mirror of
https://github.com/ChainSafe/lodestar.git
synced 2026-01-09 07:38:03 -05:00
Review gossipsub handlers (#2803)
* Handle onAttestation error * Simplify gossip validation fns * Move gossip topic handling to validate functions * Re-org gossip handlers * Cleanup * Override validate function completely * Fix tests * Add StrictNoSign validation * Add gossipMeshPeersBySyncCommitteeSubnet metric * Handle multiple forks in meshPeers metrics * Update tests * Rename allForksAfterAltair * Fix merge issues * Fix merge issues in e2e tests
This commit is contained in:
@@ -6057,10 +6057,10 @@
|
||||
"pluginVersion": "7.4.5",
|
||||
"targets": [
|
||||
{
|
||||
"expr": "lodestar_gossip_mesh_peers_by_type{gossipType!~\"beacon_attestation\"}",
|
||||
"expr": "lodestar_gossip_mesh_peers_by_type{type!~\"beacon_attestation\"}",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"legendFormat": "{{gossipType}}",
|
||||
"legendFormat": "{{type}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
|
||||
@@ -376,10 +376,7 @@ export function getValidatorApi({
|
||||
contributionAndProofs.map(async (contributionAndProof, i) => {
|
||||
try {
|
||||
// TODO: Validate in batch
|
||||
await validateSyncCommitteeGossipContributionAndProof(chain, db, {
|
||||
contributionAndProof,
|
||||
validSignature: false,
|
||||
});
|
||||
await validateSyncCommitteeGossipContributionAndProof(chain, db, contributionAndProof);
|
||||
db.syncCommitteeContribution.add(contributionAndProof.message);
|
||||
await network.gossip.publishContributionAndProof(contributionAndProof);
|
||||
} catch (e) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {CommitteeIndex, Epoch, Slot, ValidatorIndex} from "@chainsafe/lodestar-types";
|
||||
import {LodestarError} from "@chainsafe/lodestar-utils";
|
||||
import {GossipActionError} from "./gossipValidation";
|
||||
|
||||
export enum AttestationErrorCode {
|
||||
/**
|
||||
@@ -164,4 +164,4 @@ export type AttestationErrorType =
|
||||
| {code: AttestationErrorCode.INVALID_AGGREGATOR}
|
||||
| {code: AttestationErrorCode.INVALID_INDEXED_ATTESTATION};
|
||||
|
||||
export class AttestationError extends LodestarError<AttestationErrorType> {}
|
||||
export class AttestationError extends GossipActionError<AttestationErrorType> {}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import {LodestarError} from "@chainsafe/lodestar-utils";
|
||||
import {GossipActionError} from "./gossipValidation";
|
||||
|
||||
export enum AttesterSlashingErrorCode {
|
||||
ALREADY_EXISTS = "ATTESTATION_SLASHING_ERROR_ALREADY_EXISTS",
|
||||
@@ -8,8 +8,4 @@ export type AttesterSlashingErrorType =
|
||||
| {code: AttesterSlashingErrorCode.ALREADY_EXISTS}
|
||||
| {code: AttesterSlashingErrorCode.INVALID; error: Error};
|
||||
|
||||
export class AttesterSlashingError extends LodestarError<AttesterSlashingErrorType> {
|
||||
constructor(type: AttesterSlashingErrorType) {
|
||||
super(type);
|
||||
}
|
||||
}
|
||||
export class AttesterSlashingError extends GossipActionError<AttesterSlashingErrorType> {}
|
||||
|
||||
@@ -2,6 +2,7 @@ import {Root, Slot, ValidatorIndex} from "@chainsafe/lodestar-types";
|
||||
import {LodestarError} from "@chainsafe/lodestar-utils";
|
||||
|
||||
import {IBlockJob, IChainSegmentJob} from "../interface";
|
||||
import {GossipActionError} from "./gossipValidation";
|
||||
|
||||
export enum BlockErrorCode {
|
||||
/**
|
||||
@@ -104,6 +105,8 @@ export type BlockErrorType =
|
||||
| {code: BlockErrorCode.BEACON_CHAIN_ERROR; error: Error}
|
||||
| {code: BlockErrorCode.KNOWN_BAD_BLOCK};
|
||||
|
||||
export class BlockGossipError extends GossipActionError<BlockErrorType> {}
|
||||
|
||||
export class BlockError extends LodestarError<BlockErrorType> {
|
||||
signedBlock: IBlockJob["signedBlock"];
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ export enum GossipAction {
|
||||
REJECT = "REJECT",
|
||||
}
|
||||
|
||||
export class GossipValidationError<T extends {code: string}> extends LodestarError<T> {
|
||||
export class GossipActionError<T extends {code: string}> extends LodestarError<T> {
|
||||
action: GossipAction;
|
||||
|
||||
constructor(action: GossipAction, type: T) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import {LodestarError} from "@chainsafe/lodestar-utils";
|
||||
import {GossipActionError} from "./gossipValidation";
|
||||
|
||||
export enum ProposerSlashingErrorCode {
|
||||
ALREADY_EXISTS = "PROPOSER_SLASHING_ERROR_ALREADY_EXISTS",
|
||||
@@ -8,8 +8,4 @@ export type ProposerSlashingErrorType =
|
||||
| {code: ProposerSlashingErrorCode.ALREADY_EXISTS}
|
||||
| {code: ProposerSlashingErrorCode.INVALID; error: Error};
|
||||
|
||||
export class ProposerSlashingError extends LodestarError<ProposerSlashingErrorType> {
|
||||
constructor(type: ProposerSlashingErrorType) {
|
||||
super(type);
|
||||
}
|
||||
}
|
||||
export class ProposerSlashingError extends GossipActionError<ProposerSlashingErrorType> {}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {altair, phase0, Slot} from "@chainsafe/lodestar-types";
|
||||
import {GossipAction, GossipValidationError} from "./gossipValidation";
|
||||
import {GossipActionError} from "./gossipValidation";
|
||||
|
||||
export enum SyncCommitteeErrorCode {
|
||||
NOT_CURRENT_SLOT = "SYNC_COMMITTEE_ERROR_NOT_CURRENT_SLOT",
|
||||
@@ -26,16 +26,4 @@ export interface ISyncCommitteeJob {
|
||||
validSignature: boolean;
|
||||
}
|
||||
|
||||
export interface IContributionAndProofJob {
|
||||
contributionAndProof: altair.SignedContributionAndProof;
|
||||
validSignature: boolean;
|
||||
}
|
||||
|
||||
export class SyncCommitteeError extends GossipValidationError<SyncCommitteeErrorType> {
|
||||
action: GossipAction;
|
||||
|
||||
constructor(action: GossipAction, type: SyncCommitteeErrorType) {
|
||||
super(action, type);
|
||||
this.action = action;
|
||||
}
|
||||
}
|
||||
export class SyncCommitteeError extends GossipActionError<SyncCommitteeErrorType> {}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import {LodestarError} from "@chainsafe/lodestar-utils";
|
||||
import {GossipActionError} from "./gossipValidation";
|
||||
|
||||
export enum VoluntaryExitErrorCode {
|
||||
ALREADY_EXISTS = "VOLUNTARY_EXIT_ERROR_ALREADY_EXISTS",
|
||||
@@ -8,8 +8,4 @@ export type VoluntaryExitErrorType =
|
||||
| {code: VoluntaryExitErrorCode.ALREADY_EXISTS}
|
||||
| {code: VoluntaryExitErrorCode.INVALID; error: Error};
|
||||
|
||||
export class VoluntaryExitError extends LodestarError<VoluntaryExitErrorType> {
|
||||
constructor(type: VoluntaryExitErrorType) {
|
||||
super(type);
|
||||
}
|
||||
}
|
||||
export class VoluntaryExitError extends GossipActionError<VoluntaryExitErrorType> {}
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {IBeaconChain} from "..";
|
||||
import {getSelectionProofSignatureSet, getAggregateAndProofSignatureSet} from "./signatureSets";
|
||||
import {AttestationError, AttestationErrorCode} from "../errors";
|
||||
import {AttestationError, AttestationErrorCode, GossipAction} from "../errors";
|
||||
import {getCommitteeIndices, verifyHeadBlockAndTargetRoot, verifyPropagationSlotRange} from "./attestation";
|
||||
|
||||
export async function validateGossipAggregateAndProof(
|
||||
@@ -33,7 +33,7 @@ export async function validateGossipAggregateAndProof(
|
||||
|
||||
// [REJECT] The attestation's epoch matches its target -- i.e. attestation.data.target.epoch == compute_epoch_at_slot(attestation.data.slot)
|
||||
if (!ssz.Epoch.equals(targetEpoch, attEpoch)) {
|
||||
throw new AttestationError({code: AttestationErrorCode.BAD_TARGET_EPOCH});
|
||||
throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.BAD_TARGET_EPOCH});
|
||||
}
|
||||
|
||||
// [IGNORE] aggregate.data.slot is within the last ATTESTATION_PROPAGATION_SLOT_RANGE slots (with a MAXIMUM_GOSSIP_CLOCK_DISPARITY allowance)
|
||||
@@ -45,7 +45,11 @@ export async function validateGossipAggregateAndProof(
|
||||
// index aggregate_and_proof.aggregator_index for the epoch aggregate.data.target.epoch.
|
||||
const aggregatorIndex = aggregateAndProof.aggregatorIndex;
|
||||
if (chain.seenAggregators.isKnown(targetEpoch, aggregatorIndex)) {
|
||||
throw new AttestationError({code: AttestationErrorCode.AGGREGATOR_ALREADY_KNOWN, targetEpoch, aggregatorIndex});
|
||||
throw new AttestationError(GossipAction.IGNORE, {
|
||||
code: AttestationErrorCode.AGGREGATOR_ALREADY_KNOWN,
|
||||
targetEpoch,
|
||||
aggregatorIndex,
|
||||
});
|
||||
}
|
||||
|
||||
// [IGNORE] The block being voted for (attestation.data.beacon_block_root) has been seen (via both gossip
|
||||
@@ -57,7 +61,10 @@ export async function validateGossipAggregateAndProof(
|
||||
// > Altready check in `chain.forkChoice.hasBlock(attestation.data.beaconBlockRoot)`
|
||||
|
||||
const targetState = await chain.regen.getCheckpointState(attTarget).catch((e) => {
|
||||
throw new AttestationError({code: AttestationErrorCode.MISSING_ATTESTATION_TARGET_STATE, error: e as Error});
|
||||
throw new AttestationError(GossipAction.REJECT, {
|
||||
code: AttestationErrorCode.MISSING_ATTESTATION_TARGET_STATE,
|
||||
error: e as Error,
|
||||
});
|
||||
});
|
||||
|
||||
const committeeIndices = getCommitteeIndices(targetState, attSlot, attData.index);
|
||||
@@ -73,19 +80,19 @@ export async function validateGossipAggregateAndProof(
|
||||
// len(get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)) >= 1.
|
||||
if (attestingIndices.length < 1) {
|
||||
// missing attestation participants
|
||||
throw new AttestationError({code: AttestationErrorCode.EMPTY_AGGREGATION_BITFIELD});
|
||||
throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.EMPTY_AGGREGATION_BITFIELD});
|
||||
}
|
||||
|
||||
// [REJECT] aggregate_and_proof.selection_proof selects the validator as an aggregator for the slot
|
||||
// -- i.e. is_aggregator(state, aggregate.data.slot, aggregate.data.index, aggregate_and_proof.selection_proof) returns True.
|
||||
if (!isAggregatorFromCommitteeLength(committeeIndices.length, aggregateAndProof.selectionProof)) {
|
||||
throw new AttestationError({code: AttestationErrorCode.INVALID_AGGREGATOR});
|
||||
throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.INVALID_AGGREGATOR});
|
||||
}
|
||||
|
||||
// [REJECT] The aggregator's validator index is within the committee
|
||||
// -- i.e. aggregate_and_proof.aggregator_index in get_beacon_committee(state, aggregate.data.slot, aggregate.data.index).
|
||||
if (!committeeIndices.includes(aggregateAndProof.aggregatorIndex)) {
|
||||
throw new AttestationError({code: AttestationErrorCode.AGGREGATOR_NOT_IN_COMMITTEE});
|
||||
throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.AGGREGATOR_NOT_IN_COMMITTEE});
|
||||
}
|
||||
|
||||
// [REJECT] The aggregate_and_proof.selection_proof is a valid signature of the aggregate.data.slot
|
||||
@@ -99,14 +106,18 @@ export async function validateGossipAggregateAndProof(
|
||||
allForks.getIndexedAttestationSignatureSet(targetState, indexedAttestation),
|
||||
];
|
||||
if (!(await chain.bls.verifySignatureSets(signatureSets))) {
|
||||
throw new AttestationError({code: AttestationErrorCode.INVALID_SIGNATURE});
|
||||
throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.INVALID_SIGNATURE});
|
||||
}
|
||||
|
||||
// It's important to double check that the attestation still hasn't been observed, since
|
||||
// there can be a race-condition if we receive two attestations at the same time and
|
||||
// process them in different threads.
|
||||
if (chain.seenAggregators.isKnown(targetEpoch, aggregatorIndex)) {
|
||||
throw new AttestationError({code: AttestationErrorCode.AGGREGATOR_ALREADY_KNOWN, targetEpoch, aggregatorIndex});
|
||||
throw new AttestationError(GossipAction.IGNORE, {
|
||||
code: AttestationErrorCode.AGGREGATOR_ALREADY_KNOWN,
|
||||
targetEpoch,
|
||||
aggregatorIndex,
|
||||
});
|
||||
}
|
||||
|
||||
chain.seenAggregators.add(targetEpoch, aggregatorIndex);
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
AggregationBitsErrorCode,
|
||||
} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {IBeaconChain} from "..";
|
||||
import {AttestationError, AttestationErrorCode} from "../errors";
|
||||
import {AttestationError, AttestationErrorCode, GossipAction} from "../errors";
|
||||
import {MAXIMUM_GOSSIP_CLOCK_DISPARITY_SEC} from "../../constants";
|
||||
|
||||
const {EpochContextError, EpochContextErrorCode, computeSubnetForSlot, getIndexedAttestationSignatureSet} = allForks;
|
||||
@@ -39,7 +39,7 @@ export async function validateGossipAttestation(
|
||||
|
||||
// [REJECT] The attestation's epoch matches its target -- i.e. attestation.data.target.epoch == compute_epoch_at_slot(attestation.data.slot)
|
||||
if (!ssz.Epoch.equals(targetEpoch, attEpoch)) {
|
||||
throw new AttestationError({
|
||||
throw new AttestationError(GossipAction.REJECT, {
|
||||
code: AttestationErrorCode.BAD_TARGET_EPOCH,
|
||||
});
|
||||
}
|
||||
@@ -58,7 +58,7 @@ export async function validateGossipAttestation(
|
||||
bitIndex = getSingleBitIndex(aggregationBits);
|
||||
} catch (e) {
|
||||
if (e instanceof AggregationBitsError && e.type.code === AggregationBitsErrorCode.NOT_EXACTLY_ONE_BIT_SET) {
|
||||
throw new AttestationError({code: AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET});
|
||||
throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET});
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
@@ -85,7 +85,10 @@ export async function validateGossipAttestation(
|
||||
// > Altready check in `verifyHeadBlockAndTargetRoot()`
|
||||
|
||||
const attestationTargetState = await chain.regen.getCheckpointState(attTarget).catch((e) => {
|
||||
throw new AttestationError({code: AttestationErrorCode.MISSING_ATTESTATION_TARGET_STATE, error: e as Error});
|
||||
throw new AttestationError(GossipAction.REJECT, {
|
||||
code: AttestationErrorCode.MISSING_ATTESTATION_TARGET_STATE,
|
||||
error: e as Error,
|
||||
});
|
||||
});
|
||||
|
||||
// [REJECT] The committee index is within the expected range
|
||||
@@ -98,7 +101,7 @@ export async function validateGossipAttestation(
|
||||
// -- i.e. len(attestation.aggregation_bits) == len(get_beacon_committee(state, data.slot, data.index)).
|
||||
// > TODO: Is this necessary? Lighthouse does not do this check
|
||||
if (aggregationBits.length !== committeeIndices.length) {
|
||||
throw new AttestationError({code: AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS});
|
||||
throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS});
|
||||
}
|
||||
|
||||
// LH > verify_middle_checks
|
||||
@@ -112,7 +115,7 @@ export async function validateGossipAttestation(
|
||||
// which may be pre-computed along with the committee information for the signature check.
|
||||
const expectedSubnet = computeSubnetForSlot(attestationTargetState, attSlot, attIndex);
|
||||
if (subnet !== null && subnet !== expectedSubnet) {
|
||||
throw new AttestationError({
|
||||
throw new AttestationError(GossipAction.REJECT, {
|
||||
code: AttestationErrorCode.INVALID_SUBNET_ID,
|
||||
received: subnet,
|
||||
expected: expectedSubnet,
|
||||
@@ -122,7 +125,11 @@ export async function validateGossipAttestation(
|
||||
// [IGNORE] There has been no other valid attestation seen on an attestation subnet that has an
|
||||
// identical attestation.data.target.epoch and participating validator index.
|
||||
if (chain.seenAttesters.isKnown(targetEpoch, validatorIndex)) {
|
||||
throw new AttestationError({code: AttestationErrorCode.ATTESTATION_ALREADY_KNOWN, targetEpoch, validatorIndex});
|
||||
throw new AttestationError(GossipAction.IGNORE, {
|
||||
code: AttestationErrorCode.ATTESTATION_ALREADY_KNOWN,
|
||||
targetEpoch,
|
||||
validatorIndex,
|
||||
});
|
||||
}
|
||||
|
||||
// [REJECT] The signature of attestation is valid.
|
||||
@@ -133,7 +140,7 @@ export async function validateGossipAttestation(
|
||||
};
|
||||
const signatureSet = getIndexedAttestationSignatureSet(attestationTargetState, indexedAttestation);
|
||||
if (!(await chain.bls.verifySignatureSets([signatureSet]))) {
|
||||
throw new AttestationError({code: AttestationErrorCode.INVALID_SIGNATURE});
|
||||
throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.INVALID_SIGNATURE});
|
||||
}
|
||||
|
||||
// Now that the attestation has been fully verified, store that we have received a valid attestation from this validator.
|
||||
@@ -142,7 +149,11 @@ export async function validateGossipAttestation(
|
||||
// there can be a race-condition if we receive two attestations at the same time and
|
||||
// process them in different threads.
|
||||
if (chain.seenAttesters.isKnown(targetEpoch, validatorIndex)) {
|
||||
throw new AttestationError({code: AttestationErrorCode.ATTESTATION_ALREADY_KNOWN, targetEpoch, validatorIndex});
|
||||
throw new AttestationError(GossipAction.IGNORE, {
|
||||
code: AttestationErrorCode.ATTESTATION_ALREADY_KNOWN,
|
||||
targetEpoch,
|
||||
validatorIndex,
|
||||
});
|
||||
}
|
||||
|
||||
chain.seenAttesters.add(targetEpoch, validatorIndex);
|
||||
@@ -167,10 +178,18 @@ export function verifyPropagationSlotRange(chain: IBeaconChain, attestationSlot:
|
||||
0
|
||||
);
|
||||
if (attestationSlot < earliestPermissibleSlot) {
|
||||
throw new AttestationError({code: AttestationErrorCode.PAST_SLOT, earliestPermissibleSlot, attestationSlot});
|
||||
throw new AttestationError(GossipAction.IGNORE, {
|
||||
code: AttestationErrorCode.PAST_SLOT,
|
||||
earliestPermissibleSlot,
|
||||
attestationSlot,
|
||||
});
|
||||
}
|
||||
if (attestationSlot > latestPermissibleSlot) {
|
||||
throw new AttestationError({code: AttestationErrorCode.FUTURE_SLOT, latestPermissibleSlot, attestationSlot});
|
||||
throw new AttestationError(GossipAction.IGNORE, {
|
||||
code: AttestationErrorCode.FUTURE_SLOT,
|
||||
latestPermissibleSlot,
|
||||
attestationSlot,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -206,7 +225,7 @@ function verifyHeadBlockIsKnown(chain: IBeaconChain, beaconBlockRoot: Root): IBl
|
||||
|
||||
const headBlock = chain.forkChoice.getBlock(beaconBlockRoot);
|
||||
if (headBlock === null) {
|
||||
throw new AttestationError({
|
||||
throw new AttestationError(GossipAction.IGNORE, {
|
||||
code: AttestationErrorCode.UNKNOWN_BEACON_BLOCK_ROOT,
|
||||
root: beaconBlockRoot.valueOf() as Uint8Array,
|
||||
});
|
||||
@@ -234,7 +253,7 @@ function verifyAttestationTargetRoot(headBlock: IBlockSummary, targetRoot: Root,
|
||||
//
|
||||
// Reference:
|
||||
// https://github.com/ethereum/eth2.0-specs/pull/2001#issuecomment-699246659
|
||||
throw new AttestationError({
|
||||
throw new AttestationError(GossipAction.REJECT, {
|
||||
code: AttestationErrorCode.INVALID_TARGET_ROOT,
|
||||
targetRoot: targetRoot.valueOf() as Uint8Array,
|
||||
expected: null,
|
||||
@@ -253,7 +272,7 @@ function verifyAttestationTargetRoot(headBlock: IBlockSummary, targetRoot: Root,
|
||||
|
||||
if (!ssz.Root.equals(expectedTargetRoot, targetRoot)) {
|
||||
// Reject any attestation with an invalid target root.
|
||||
throw new AttestationError({
|
||||
throw new AttestationError(GossipAction.REJECT, {
|
||||
code: AttestationErrorCode.INVALID_TARGET_ROOT,
|
||||
targetRoot: targetRoot.valueOf() as Uint8Array,
|
||||
expected: expectedTargetRoot,
|
||||
@@ -271,7 +290,10 @@ export function getCommitteeIndices(
|
||||
return attestationTargetState.getBeaconCommittee(attestationSlot, attestationIndex);
|
||||
} catch (e) {
|
||||
if (e instanceof EpochContextError && e.type.code === EpochContextErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE) {
|
||||
throw new AttestationError({code: AttestationErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE, index: attestationIndex});
|
||||
throw new AttestationError(GossipAction.REJECT, {
|
||||
code: AttestationErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE,
|
||||
index: attestationIndex,
|
||||
});
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import {phase0, allForks} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {ssz, ValidatorIndex} from "@chainsafe/lodestar-types";
|
||||
import {IBeaconChain} from "..";
|
||||
import {AttesterSlashingError, AttesterSlashingErrorCode} from "../errors/attesterSlashingError";
|
||||
import {AttesterSlashingError, AttesterSlashingErrorCode, GossipAction} from "../errors";
|
||||
import {IBeaconDb} from "../../db";
|
||||
import {arrayIntersection, sszEqualPredicate} from "../../util/objects";
|
||||
|
||||
@@ -17,7 +17,7 @@ export async function validateGossipAttesterSlashing(
|
||||
);
|
||||
|
||||
if (await db.attesterSlashing.hasAll(attesterSlashedIndices)) {
|
||||
throw new AttesterSlashingError({
|
||||
throw new AttesterSlashingError(GossipAction.IGNORE, {
|
||||
code: AttesterSlashingErrorCode.ALREADY_EXISTS,
|
||||
});
|
||||
}
|
||||
@@ -28,7 +28,7 @@ export async function validateGossipAttesterSlashing(
|
||||
// verifySignature = false, verified in batch below
|
||||
allForks.assertValidAttesterSlashing(state, attesterSlashing, false);
|
||||
} catch (e) {
|
||||
throw new AttesterSlashingError({
|
||||
throw new AttesterSlashingError(GossipAction.REJECT, {
|
||||
code: AttesterSlashingErrorCode.INVALID,
|
||||
error: e as Error,
|
||||
});
|
||||
@@ -36,7 +36,7 @@ export async function validateGossipAttesterSlashing(
|
||||
|
||||
const signatureSets = allForks.getAttesterSlashingSignatureSets(state, attesterSlashing);
|
||||
if (!(await chain.bls.verifySignatureSets(signatureSets))) {
|
||||
throw new AttesterSlashingError({
|
||||
throw new AttesterSlashingError(GossipAction.REJECT, {
|
||||
code: AttesterSlashingErrorCode.INVALID,
|
||||
error: Error("Invalid signature"),
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ import {IBeaconChain, IBlockJob} from "..";
|
||||
import {IBeaconDb} from "../../db";
|
||||
import {computeStartSlotAtEpoch} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {allForks} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {BlockError, BlockErrorCode} from "../errors";
|
||||
import {BlockGossipError, BlockErrorCode, GossipAction} from "../errors";
|
||||
|
||||
export async function validateGossipBlock(
|
||||
config: IChainForkConfig,
|
||||
@@ -18,12 +18,16 @@ export async function validateGossipBlock(
|
||||
const finalizedSlot = computeStartSlotAtEpoch(finalizedCheckpoint.epoch);
|
||||
// block is too old
|
||||
if (blockSlot <= finalizedSlot) {
|
||||
throw new BlockError(block, {code: BlockErrorCode.WOULD_REVERT_FINALIZED_SLOT, blockSlot, finalizedSlot});
|
||||
throw new BlockGossipError(GossipAction.IGNORE, {
|
||||
code: BlockErrorCode.WOULD_REVERT_FINALIZED_SLOT,
|
||||
blockSlot,
|
||||
finalizedSlot,
|
||||
});
|
||||
}
|
||||
|
||||
const currentSlotWithGossipDisparity = chain.clock.currentSlotWithGossipDisparity;
|
||||
if (currentSlotWithGossipDisparity < blockSlot) {
|
||||
throw new BlockError(block, {
|
||||
throw new BlockGossipError(GossipAction.IGNORE, {
|
||||
code: BlockErrorCode.FUTURE_SLOT,
|
||||
currentSlot: currentSlotWithGossipDisparity,
|
||||
blockSlot,
|
||||
@@ -35,23 +39,23 @@ export async function validateGossipBlock(
|
||||
|
||||
const existingBlock = await db.block.get(blockRoot);
|
||||
if (existingBlock?.message.proposerIndex === block.message.proposerIndex) {
|
||||
throw new BlockError(block, {code: BlockErrorCode.REPEAT_PROPOSAL, proposer: block.message.proposerIndex});
|
||||
}
|
||||
|
||||
let blockState;
|
||||
try {
|
||||
// getBlockSlotState also checks for whether the current finalized checkpoint is an ancestor of the block. as a result, we throw an IGNORE (whereas the spec says we should REJECT for this scenario). this is something we should change this in the future to make the code airtight to the spec.
|
||||
blockState = await chain.regen.getBlockSlotState(block.message.parentRoot, block.message.slot);
|
||||
} catch (e) {
|
||||
throw new BlockError(block, {
|
||||
code: BlockErrorCode.PARENT_UNKNOWN,
|
||||
parentRoot: block.message.parentRoot.valueOf() as Uint8Array,
|
||||
throw new BlockGossipError(GossipAction.IGNORE, {
|
||||
code: BlockErrorCode.REPEAT_PROPOSAL,
|
||||
proposer: block.message.proposerIndex,
|
||||
});
|
||||
}
|
||||
|
||||
// getBlockSlotState also checks for whether the current finalized checkpoint is an ancestor of the block. as a result, we throw an IGNORE (whereas the spec says we should REJECT for this scenario). this is something we should change this in the future to make the code airtight to the spec.
|
||||
const blockState = await chain.regen.getBlockSlotState(block.message.parentRoot, block.message.slot).catch(() => {
|
||||
throw new BlockGossipError(GossipAction.IGNORE, {
|
||||
code: BlockErrorCode.PARENT_UNKNOWN,
|
||||
parentRoot: block.message.parentRoot.valueOf() as Uint8Array,
|
||||
});
|
||||
});
|
||||
|
||||
const signatureSet = allForks.getProposerSignatureSet(blockState, block);
|
||||
if (!(await chain.bls.verifySignatureSets([signatureSet]))) {
|
||||
throw new BlockError(block, {code: BlockErrorCode.PROPOSAL_SIGNATURE_INVALID});
|
||||
throw new BlockGossipError(GossipAction.REJECT, {code: BlockErrorCode.PROPOSAL_SIGNATURE_INVALID});
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -60,7 +64,7 @@ export async function validateGossipBlock(
|
||||
throw Error("INCORRECT_PROPOSER");
|
||||
}
|
||||
} catch (error) {
|
||||
throw new BlockError(block, {
|
||||
throw new BlockGossipError(GossipAction.REJECT, {
|
||||
code: BlockErrorCode.INCORRECT_PROPOSER,
|
||||
blockProposer: block.message.proposerIndex,
|
||||
});
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
export * from "./block";
|
||||
export * from "./attestation";
|
||||
export * from "./aggregateAndProof";
|
||||
export * from "./attestation";
|
||||
export * from "./attesterSlashing";
|
||||
export * from "./block";
|
||||
export * from "./proposerSlashing";
|
||||
export * from "./syncCommittee";
|
||||
export * from "./syncCommitteeContributionAndProof";
|
||||
export * from "./voluntaryExit";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {phase0, allForks} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {IBeaconChain} from "..";
|
||||
import {ProposerSlashingError, ProposerSlashingErrorCode} from "../errors/proposerSlashingError";
|
||||
import {ProposerSlashingError, ProposerSlashingErrorCode, GossipAction} from "../errors";
|
||||
import {IBeaconDb} from "../../db";
|
||||
|
||||
export async function validateGossipProposerSlashing(
|
||||
@@ -9,7 +9,7 @@ export async function validateGossipProposerSlashing(
|
||||
proposerSlashing: phase0.ProposerSlashing
|
||||
): Promise<void> {
|
||||
if (await db.proposerSlashing.has(proposerSlashing.signedHeader1.message.proposerIndex)) {
|
||||
throw new ProposerSlashingError({
|
||||
throw new ProposerSlashingError(GossipAction.IGNORE, {
|
||||
code: ProposerSlashingErrorCode.ALREADY_EXISTS,
|
||||
});
|
||||
}
|
||||
@@ -20,7 +20,7 @@ export async function validateGossipProposerSlashing(
|
||||
// verifySignature = false, verified in batch below
|
||||
allForks.assertValidProposerSlashing(state, proposerSlashing, false);
|
||||
} catch (e) {
|
||||
throw new ProposerSlashingError({
|
||||
throw new ProposerSlashingError(GossipAction.REJECT, {
|
||||
code: ProposerSlashingErrorCode.INVALID,
|
||||
error: e as Error,
|
||||
});
|
||||
@@ -28,7 +28,7 @@ export async function validateGossipProposerSlashing(
|
||||
|
||||
const signatureSets = allForks.getProposerSlashingSignatureSets(state, proposerSlashing);
|
||||
if (!(await chain.bls.verifySignatureSets(signatureSets))) {
|
||||
throw new ProposerSlashingError({
|
||||
throw new ProposerSlashingError(GossipAction.REJECT, {
|
||||
code: ProposerSlashingErrorCode.INVALID,
|
||||
error: Error("Invalid signature"),
|
||||
});
|
||||
|
||||
@@ -2,13 +2,10 @@ import {CachedBeaconState, computeSyncPeriodAtSlot} from "@chainsafe/lodestar-be
|
||||
import {SYNC_COMMITTEE_SIZE, SYNC_COMMITTEE_SUBNET_COUNT} from "@chainsafe/lodestar-params";
|
||||
import {allForks, altair} from "@chainsafe/lodestar-types";
|
||||
import {IBeaconDb} from "../../db";
|
||||
import {GossipAction, ISyncCommitteeJob, SyncCommitteeError, SyncCommitteeErrorCode} from "../errors";
|
||||
import {GossipAction, SyncCommitteeError, SyncCommitteeErrorCode} from "../errors";
|
||||
import {IBeaconChain} from "../interface";
|
||||
import {getSyncCommitteeSignatureSet} from "./signatureSets";
|
||||
|
||||
/** TODO: Do this much better to be able to access this property in the handler */
|
||||
export type SyncCommitteeSignatureIndexed = altair.SyncCommitteeMessage & {indexInSubCommittee: number};
|
||||
|
||||
type IndexInSubCommittee = number;
|
||||
|
||||
/**
|
||||
@@ -17,17 +14,12 @@ type IndexInSubCommittee = number;
|
||||
export async function validateGossipSyncCommittee(
|
||||
chain: IBeaconChain,
|
||||
db: IBeaconDb,
|
||||
job: ISyncCommitteeJob,
|
||||
syncCommittee: altair.SyncCommitteeMessage,
|
||||
subnet: number
|
||||
): Promise<void> {
|
||||
const {signature: syncCommittee, validSignature} = job;
|
||||
|
||||
): Promise<{indexInSubCommittee: IndexInSubCommittee}> {
|
||||
const headState = chain.getHeadState();
|
||||
const indexInSubCommittee = validateGossipSyncCommitteeExceptSig(chain, headState, subnet, syncCommittee);
|
||||
|
||||
// TODO: Do this much better to be able to access this property in the handler
|
||||
(syncCommittee as SyncCommitteeSignatureIndexed).indexInSubCommittee = indexInSubCommittee;
|
||||
|
||||
// [IGNORE] The signature's slot is for the current slot, i.e. sync_committee_signature.slot == current_slot.
|
||||
// > Checked in validateGossipSyncCommitteeExceptSig()
|
||||
|
||||
@@ -46,13 +38,13 @@ export async function validateGossipSyncCommittee(
|
||||
// Note this validation implies the validator is part of the broader current sync committee along with the correct subcommittee.
|
||||
// > Checked in validateGossipSyncCommitteeExceptSig()
|
||||
|
||||
if (!validSignature) {
|
||||
// [REJECT] The signature is valid for the message beacon_block_root for the validator referenced by validator_index.
|
||||
await validateSyncCommitteeSigOnly(chain, headState, syncCommittee);
|
||||
}
|
||||
// [REJECT] The signature is valid for the message beacon_block_root for the validator referenced by validator_index.
|
||||
await validateSyncCommitteeSigOnly(chain, headState, syncCommittee);
|
||||
|
||||
// Register this valid item as seen
|
||||
db.syncCommittee.seen(subnet, syncCommittee);
|
||||
|
||||
return {indexInSubCommittee};
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import {CachedBeaconState, isSyncCommitteeAggregator} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {altair} from "@chainsafe/lodestar-types";
|
||||
import {IBeaconDb} from "../../db";
|
||||
import {GossipAction, IContributionAndProofJob, SyncCommitteeError, SyncCommitteeErrorCode} from "../errors";
|
||||
import {GossipAction, SyncCommitteeError, SyncCommitteeErrorCode} from "../errors";
|
||||
import {IBeaconChain} from "../interface";
|
||||
import {validateGossipSyncCommitteeExceptSig} from "./syncCommittee";
|
||||
import {
|
||||
@@ -16,9 +16,8 @@ import {
|
||||
export async function validateSyncCommitteeGossipContributionAndProof(
|
||||
chain: IBeaconChain,
|
||||
db: IBeaconDb,
|
||||
job: IContributionAndProofJob
|
||||
signedContributionAndProof: altair.SignedContributionAndProof
|
||||
): Promise<void> {
|
||||
const signedContributionAndProof = job.contributionAndProof;
|
||||
const contributionAndProof = signedContributionAndProof.message;
|
||||
const contribution = contributionAndProof.contribution;
|
||||
const subCommitteeIndex = contribution.subCommitteeIndex;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {phase0, allForks} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {IBeaconChain} from "..";
|
||||
import {VoluntaryExitError, VoluntaryExitErrorCode} from "../errors/voluntaryExitError";
|
||||
import {VoluntaryExitError, VoluntaryExitErrorCode, GossipAction} from "../errors";
|
||||
import {IBeaconDb} from "../../db";
|
||||
|
||||
export async function validateGossipVoluntaryExit(
|
||||
@@ -9,7 +9,7 @@ export async function validateGossipVoluntaryExit(
|
||||
voluntaryExit: phase0.SignedVoluntaryExit
|
||||
): Promise<void> {
|
||||
if (await db.voluntaryExit.has(voluntaryExit.message.validatorIndex)) {
|
||||
throw new VoluntaryExitError({
|
||||
throw new VoluntaryExitError(GossipAction.IGNORE, {
|
||||
code: VoluntaryExitErrorCode.ALREADY_EXISTS,
|
||||
});
|
||||
}
|
||||
@@ -23,7 +23,7 @@ export async function validateGossipVoluntaryExit(
|
||||
// verifySignature = false, verified in batch below
|
||||
allForks.assertValidVoluntaryExit(state, voluntaryExit, false);
|
||||
} catch (e) {
|
||||
throw new VoluntaryExitError({
|
||||
throw new VoluntaryExitError(GossipAction.REJECT, {
|
||||
code: VoluntaryExitErrorCode.INVALID,
|
||||
error: e as Error,
|
||||
});
|
||||
@@ -31,7 +31,7 @@ export async function validateGossipVoluntaryExit(
|
||||
|
||||
const signatureSet = allForks.getVoluntaryExitSignatureSet(state, voluntaryExit);
|
||||
if (!(await chain.bls.verifySignatureSets([signatureSet]))) {
|
||||
throw new VoluntaryExitError({
|
||||
throw new VoluntaryExitError(GossipAction.REJECT, {
|
||||
code: VoluntaryExitErrorCode.INVALID,
|
||||
error: Error("Invalid signature"),
|
||||
});
|
||||
|
||||
@@ -69,15 +69,36 @@ export function createLodestarMetrics(
|
||||
help: "Total number of unique peers that have had a connection with",
|
||||
}),
|
||||
|
||||
gossipMeshPeersByType: register.gauge<"gossipType">({
|
||||
gossipMeshPeersByType: register.gauge<"type" | "fork">({
|
||||
name: "lodestar_gossip_mesh_peers_by_type",
|
||||
help: "Number of connected mesh peers per gossip type",
|
||||
labelNames: ["gossipType"],
|
||||
labelNames: ["type", "fork"],
|
||||
}),
|
||||
gossipMeshPeersByBeaconAttestationSubnet: register.gauge<"subnet">({
|
||||
gossipMeshPeersByBeaconAttestationSubnet: register.gauge<"subnet" | "fork">({
|
||||
name: "lodestar_gossip_mesh_peers_by_beacon_attestation_subnet",
|
||||
help: "Number of connected mesh peers per beacon attestation subnet",
|
||||
labelNames: ["subnet"],
|
||||
labelNames: ["subnet", "fork"],
|
||||
}),
|
||||
gossipMeshPeersBySyncCommitteeSubnet: register.gauge<"subnet" | "fork">({
|
||||
name: "lodestar_gossip_mesh_peers_by_sync_committee_subnet",
|
||||
help: "Number of connected mesh peers per sync committee subnet",
|
||||
labelNames: ["subnet", "fork"],
|
||||
}),
|
||||
|
||||
gossipValidationAccept: register.gauge<"topic">({
|
||||
name: "lodestar_gossip_validation_accept",
|
||||
help: "Count of total gossip validation accept",
|
||||
labelNames: ["topic"],
|
||||
}),
|
||||
gossipValidationIgnore: register.gauge<"topic">({
|
||||
name: "lodestar_gossip_validation_ignore",
|
||||
help: "Count of total gossip validation ignore",
|
||||
labelNames: ["topic"],
|
||||
}),
|
||||
gossipValidationReject: register.gauge<"topic">({
|
||||
name: "lodestar_gossip_validation_reject",
|
||||
help: "Count of total gossip validation reject",
|
||||
labelNames: ["topic"],
|
||||
}),
|
||||
|
||||
gossipValidationQueueLength: register.gauge<"topic">({
|
||||
|
||||
@@ -14,7 +14,7 @@ import {Epoch} from "@chainsafe/lodestar-types";
|
||||
* 0 fork-2 fork fork+2 oo
|
||||
* ```
|
||||
*/
|
||||
const FORK_EPOCH_LOOKAHEAD = 2;
|
||||
export const FORK_EPOCH_LOOKAHEAD = 2;
|
||||
|
||||
/**
|
||||
* Return the list of `ForkName`s meant to be active at `epoch`
|
||||
@@ -40,38 +40,6 @@ export function getActiveForks(config: IChainForkConfig, epoch: Epoch): ForkName
|
||||
return [prevFork, nextFork];
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to run hooks at the start and end of the fork transition, with `FORK_EPOCH_LOOKAHEAD`
|
||||
*/
|
||||
export function runForkTransitionHooks(
|
||||
config: IChainForkConfig,
|
||||
epoch: Epoch,
|
||||
hooks: {
|
||||
/** ONLY ONCE: Two epoch before the fork run this function */
|
||||
beforeForkTransition(nextFork: ForkName): void;
|
||||
/** ONLY ONCE: Two epochs after the fork run this function */
|
||||
afterForkTransition(prevFork: ForkName): void;
|
||||
}
|
||||
): void {
|
||||
// Compute prev and next fork shifted, so next fork is still next at forkEpoch + FORK_EPOCH_LOOKAHEAD
|
||||
const forks = getCurrentAndNextFork(config, epoch - FORK_EPOCH_LOOKAHEAD - 1);
|
||||
|
||||
// Only when fork is scheduled
|
||||
if (forks.nextFork) {
|
||||
const prevFork = forks.currentFork.name;
|
||||
const nextFork = forks.nextFork.name;
|
||||
const forkEpoch = forks.nextFork.epoch;
|
||||
|
||||
if (epoch === forkEpoch - FORK_EPOCH_LOOKAHEAD) {
|
||||
hooks.beforeForkTransition(nextFork);
|
||||
}
|
||||
|
||||
if (epoch === forkEpoch + FORK_EPOCH_LOOKAHEAD) {
|
||||
hooks.afterForkTransition(prevFork);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the currentFork and nextFork given a fork schedule and `epoch`
|
||||
*/
|
||||
|
||||
@@ -1,137 +1,139 @@
|
||||
import {compress, uncompress} from "snappyjs";
|
||||
import {intToBytes} from "@chainsafe/lodestar-utils";
|
||||
import {hash} from "@chainsafe/ssz";
|
||||
import {GOSSIP_MSGID_LENGTH, MESSAGE_DOMAIN_INVALID_SNAPPY, MESSAGE_DOMAIN_VALID_SNAPPY} from "./constants";
|
||||
import {GossipEncoding, IGossipMessage} from "./interface";
|
||||
import {
|
||||
DEFAULT_ENCODING,
|
||||
GOSSIP_MSGID_LENGTH,
|
||||
MESSAGE_DOMAIN_INVALID_SNAPPY,
|
||||
MESSAGE_DOMAIN_VALID_SNAPPY,
|
||||
} from "./constants";
|
||||
import {GossipEncoding, GossipTopic} from "./interface";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
|
||||
export function getTopicEncoding(topic: string): GossipEncoding {
|
||||
if (topic.endsWith(GossipEncoding.ssz)) {
|
||||
return GossipEncoding.ssz;
|
||||
}
|
||||
|
||||
if (topic.endsWith(GossipEncoding.ssz_snappy)) {
|
||||
return GossipEncoding.ssz_snappy;
|
||||
}
|
||||
|
||||
throw `Unknown gossip encoding "${topic.split("/").pop()}"`;
|
||||
export interface IUncompressCache {
|
||||
uncompress(input: Uint8Array): Uint8Array;
|
||||
}
|
||||
|
||||
export function decodeMessageData(encoding: GossipEncoding, data: Uint8Array): Uint8Array {
|
||||
export class UncompressCache implements IUncompressCache {
|
||||
private cache = new WeakMap<Uint8Array, Uint8Array>();
|
||||
|
||||
uncompress(input: Uint8Array): Uint8Array {
|
||||
let uncompressed = this.cache.get(input);
|
||||
if (!uncompressed) {
|
||||
uncompressed = uncompress(input);
|
||||
this.cache.set(input, uncompressed);
|
||||
}
|
||||
return uncompressed;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode message using `IUncompressCache`. Message will have been uncompressed before to compute the msgId.
|
||||
* We must re-use that result to prevent uncompressing the object again here.
|
||||
*/
|
||||
export function decodeMessageData(
|
||||
encoding: GossipEncoding,
|
||||
msgData: Uint8Array,
|
||||
uncompressCache: IUncompressCache
|
||||
): Uint8Array {
|
||||
switch (encoding) {
|
||||
case GossipEncoding.ssz_snappy:
|
||||
return uncompress(data);
|
||||
|
||||
case GossipEncoding.ssz:
|
||||
return data;
|
||||
return uncompressCache.uncompress(msgData);
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported encoding ${encoding}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function encodeMessageData(encoding: GossipEncoding, data: Uint8Array): Uint8Array {
|
||||
export function encodeMessageData(encoding: GossipEncoding, msgData: Uint8Array): Uint8Array {
|
||||
switch (encoding) {
|
||||
case GossipEncoding.ssz_snappy:
|
||||
return compress(data);
|
||||
|
||||
case GossipEncoding.ssz:
|
||||
return data;
|
||||
return compress(msgData);
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported encoding ${encoding}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function getMessageDecoder(encoding: GossipEncoding): (message: IGossipMessage) => Uint8Array {
|
||||
switch (encoding) {
|
||||
case GossipEncoding.ssz_snappy:
|
||||
return (message) => {
|
||||
if (message.uncompressed) {
|
||||
return message.uncompressed;
|
||||
} else {
|
||||
return uncompress(message.data);
|
||||
}
|
||||
};
|
||||
|
||||
case GossipEncoding.ssz:
|
||||
return (message) => message.data;
|
||||
|
||||
default:
|
||||
throw new Error(`unsupported encoding ${encoding}`);
|
||||
/**
|
||||
* Function to compute message id for all forks.
|
||||
*/
|
||||
export function computeMsgId(
|
||||
topic: GossipTopic,
|
||||
topicStr: string,
|
||||
msgData: Uint8Array,
|
||||
uncompressCache: IUncompressCache
|
||||
): Uint8Array {
|
||||
switch (topic.fork) {
|
||||
case ForkName.phase0:
|
||||
return computeMsgIdPhase0(topic, msgData, uncompressCache);
|
||||
case ForkName.altair:
|
||||
return computeMsgIdAltair(topic, topicStr, msgData, uncompressCache);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to compute message id for phase0.
|
||||
* Computing the message id requires uncompressing data, if applicable
|
||||
* Return both the computed message id and uncompressed data
|
||||
* ```
|
||||
* SHA256(MESSAGE_DOMAIN_VALID_SNAPPY + snappy_decompress(message.data))[:20]
|
||||
* ```
|
||||
*/
|
||||
export function computeMsgIdPhase0(topic: string, data: Uint8Array): {msgId: Uint8Array; uncompressed?: Uint8Array} {
|
||||
const encoding = getTopicEncoding(topic);
|
||||
|
||||
let dataToHash: Uint8Array;
|
||||
let uncompressed: Uint8Array | undefined;
|
||||
switch (encoding) {
|
||||
export function computeMsgIdPhase0(
|
||||
topic: GossipTopic,
|
||||
msgData: Uint8Array,
|
||||
uncompressCache: IUncompressCache
|
||||
): Uint8Array {
|
||||
switch (topic.encoding ?? DEFAULT_ENCODING) {
|
||||
case GossipEncoding.ssz_snappy:
|
||||
try {
|
||||
uncompressed = uncompress(data);
|
||||
dataToHash = Buffer.concat([MESSAGE_DOMAIN_VALID_SNAPPY, uncompressed]);
|
||||
const uncompressed = uncompressCache.uncompress(msgData);
|
||||
return hashGossipMsgData(MESSAGE_DOMAIN_VALID_SNAPPY, uncompressed);
|
||||
} catch (e) {
|
||||
dataToHash = Buffer.concat([MESSAGE_DOMAIN_INVALID_SNAPPY, data]);
|
||||
return hashGossipMsgData(MESSAGE_DOMAIN_INVALID_SNAPPY, msgData);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
uncompressed = data;
|
||||
dataToHash = data;
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
msgId: hash(dataToHash).slice(0, GOSSIP_MSGID_LENGTH),
|
||||
uncompressed,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to compute message id for altair.
|
||||
*
|
||||
* ```
|
||||
* SHA256(
|
||||
* MESSAGE_DOMAIN_VALID_SNAPPY +
|
||||
* uint_to_bytes(uint64(len(message.topic))) +
|
||||
* message.topic +
|
||||
* snappy_decompress(message.data)
|
||||
* )[:20]
|
||||
* ```
|
||||
* https://github.com/ethereum/eth2.0-specs/blob/v1.1.0-alpha.7/specs/altair/p2p-interface.md#topics-and-messages
|
||||
* Computing the message id requires uncompressing data, if applicable
|
||||
* Return both the computed message id and uncompressed data
|
||||
*/
|
||||
export function computeMsgIdAltair(topic: string, data: Uint8Array): {msgId: Uint8Array; uncompressed?: Uint8Array} {
|
||||
const encoding = getTopicEncoding(topic);
|
||||
|
||||
let dataToHash: Uint8Array;
|
||||
let uncompressed: Uint8Array | undefined;
|
||||
switch (encoding) {
|
||||
export function computeMsgIdAltair(
|
||||
topic: GossipTopic,
|
||||
topicStr: string,
|
||||
msgData: Uint8Array,
|
||||
uncompressCache: IUncompressCache
|
||||
): Uint8Array {
|
||||
switch (topic.encoding ?? DEFAULT_ENCODING) {
|
||||
case GossipEncoding.ssz_snappy:
|
||||
try {
|
||||
uncompressed = uncompress(data);
|
||||
dataToHash = Buffer.concat([
|
||||
const uncompressed = uncompressCache.uncompress(msgData);
|
||||
return hashGossipMsgData(
|
||||
MESSAGE_DOMAIN_VALID_SNAPPY,
|
||||
intToBytes(topic.length, 8),
|
||||
Buffer.from(topic),
|
||||
uncompressed,
|
||||
]);
|
||||
intToBytes(topicStr.length, 8),
|
||||
Buffer.from(topicStr),
|
||||
uncompressed
|
||||
);
|
||||
} catch (e) {
|
||||
dataToHash = Buffer.concat([
|
||||
return hashGossipMsgData(
|
||||
MESSAGE_DOMAIN_INVALID_SNAPPY,
|
||||
intToBytes(topic.length, 8),
|
||||
Buffer.from(topic),
|
||||
data,
|
||||
]);
|
||||
intToBytes(topicStr.length, 8),
|
||||
Buffer.from(topicStr),
|
||||
msgData
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
uncompressed = data;
|
||||
dataToHash = data;
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
msgId: hash(dataToHash).slice(0, GOSSIP_MSGID_LENGTH),
|
||||
uncompressed,
|
||||
};
|
||||
}
|
||||
|
||||
function hashGossipMsgData(...dataArrToHash: Uint8Array[]): Uint8Array {
|
||||
return hash(Buffer.concat(dataArrToHash)).slice(0, GOSSIP_MSGID_LENGTH);
|
||||
}
|
||||
|
||||
@@ -1,31 +1,36 @@
|
||||
/* eslint-disable @typescript-eslint/naming-convention */
|
||||
import Gossipsub from "libp2p-gossipsub";
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {InMessage} from "libp2p-interfaces/src/pubsub";
|
||||
import Libp2p from "libp2p";
|
||||
import {AbortSignal} from "@chainsafe/abort-controller";
|
||||
import {IChainForkConfig} from "@chainsafe/lodestar-config";
|
||||
import {ATTESTATION_SUBNET_COUNT} from "@chainsafe/lodestar-params";
|
||||
import {ATTESTATION_SUBNET_COUNT, ForkName, SYNC_COMMITTEE_SUBNET_COUNT} from "@chainsafe/lodestar-params";
|
||||
import {allForks, altair, phase0} from "@chainsafe/lodestar-types";
|
||||
import {ILogger, toJson} from "@chainsafe/lodestar-utils";
|
||||
import {computeEpochAtSlot} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {ILogger} from "@chainsafe/lodestar-utils";
|
||||
import {computeStartSlotAtEpoch} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
|
||||
import {IMetrics} from "../../metrics";
|
||||
import {GossipHandlerFn, GossipObject, GossipTopic, GossipType, IGossipMessage, TopicValidatorFnMap} from "./interface";
|
||||
import {msgIdToString, getMsgId, messageIsValid} from "./utils";
|
||||
import {getGossipSSZSerializer, parseGossipTopic, stringifyGossipTopic} from "./topic";
|
||||
import {encodeMessageData} from "./encoding";
|
||||
import {GossipTopic, GossipTopicMap, GossipType, GossipTypeMap} from "./interface";
|
||||
import {getGossipSSZType, GossipTopicCache, stringifyGossipTopic} from "./topic";
|
||||
import {computeMsgId, encodeMessageData, UncompressCache} from "./encoding";
|
||||
import {DEFAULT_ENCODING} from "./constants";
|
||||
import {GossipValidationError} from "./errors";
|
||||
import {ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {prepareGossipMsg} from "./message";
|
||||
import {IForkDigestContext} from "../../util/forkDigestContext";
|
||||
import {GOSSIP_MAX_SIZE} from "../../constants";
|
||||
import {createValidatorFnsByTopic} from "./validation/validatorFnsByTopic";
|
||||
import {createValidatorFnsByType} from "./validation";
|
||||
import {GossipHandlers} from "./handlers";
|
||||
import {Map2d, Map2dArr} from "../../util/map";
|
||||
|
||||
interface IGossipsubModules {
|
||||
config: IChainForkConfig;
|
||||
libp2p: Libp2p;
|
||||
validatorFns: TopicValidatorFnMap;
|
||||
forkDigestContext: IForkDigestContext;
|
||||
logger: ILogger;
|
||||
metrics: IMetrics | null;
|
||||
signal: AbortSignal;
|
||||
forkDigestContext: IForkDigestContext;
|
||||
gossipHandlers: GossipHandlers;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -45,21 +50,11 @@ export class Eth2Gossipsub extends Gossipsub {
|
||||
private readonly config: IChainForkConfig;
|
||||
private readonly forkDigestContext: IForkDigestContext;
|
||||
private readonly logger: ILogger;
|
||||
private readonly metrics: IMetrics | null;
|
||||
/**
|
||||
* Cached gossip objects
|
||||
*
|
||||
* Objects are deserialized during validation. If they pass validation, they get added here for later processing
|
||||
*/
|
||||
private gossipObjects: Map<string, GossipObject>;
|
||||
/**
|
||||
* Cached gossip topic objects
|
||||
*/
|
||||
private gossipTopics: Map<string, GossipTopic>;
|
||||
/**
|
||||
* Timeout for logging status message
|
||||
*/
|
||||
private statusInterval?: NodeJS.Timeout;
|
||||
|
||||
// Internal caches
|
||||
private readonly gossipTopicCache: GossipTopicCache;
|
||||
private readonly uncompressCache = new UncompressCache();
|
||||
private readonly msgIdCache = new WeakMap<InMessage, Uint8Array>();
|
||||
|
||||
constructor(modules: IGossipsubModules) {
|
||||
// Gossipsub parameters defined here:
|
||||
@@ -72,30 +67,43 @@ export class Eth2Gossipsub extends Gossipsub {
|
||||
Dhi: 12,
|
||||
Dlazy: 6,
|
||||
});
|
||||
this.config = modules.config;
|
||||
this.forkDigestContext = modules.forkDigestContext;
|
||||
this.logger = modules.logger;
|
||||
this.metrics = modules.metrics;
|
||||
const {config, forkDigestContext, logger, metrics, signal, gossipHandlers} = modules;
|
||||
this.config = config;
|
||||
this.forkDigestContext = forkDigestContext;
|
||||
this.logger = logger;
|
||||
this.gossipTopicCache = new GossipTopicCache(forkDigestContext);
|
||||
|
||||
this.gossipObjects = new Map<string, GossipObject>();
|
||||
this.gossipTopics = new Map<string, GossipTopic>();
|
||||
// Note: We use the validator functions as handlers. No handler will be registered to gossipsub.
|
||||
// libp2p-js layer will emit the message to an EventEmitter that won't be listened by anyone.
|
||||
// TODO: Force to ensure there's a validatorFunction attached to every received topic.
|
||||
const validatorFnsByType = createValidatorFnsByType(gossipHandlers, {
|
||||
config,
|
||||
logger,
|
||||
uncompressCache: this.uncompressCache,
|
||||
gossipTopicCache: this.gossipTopicCache,
|
||||
metrics,
|
||||
signal,
|
||||
});
|
||||
|
||||
for (const [topic, validatorFn] of modules.validatorFns.entries()) {
|
||||
this.topicValidators.set(topic, validatorFn);
|
||||
const validatorFnsByTopic = createValidatorFnsByTopic(config, forkDigestContext, validatorFnsByType);
|
||||
|
||||
// Register validator functions for all topics, forks and encodings
|
||||
for (const [topicStr, validatorFn] of validatorFnsByTopic.entries()) {
|
||||
this.topicValidators.set(topicStr, validatorFn);
|
||||
}
|
||||
|
||||
if (metrics) {
|
||||
metrics.gossipMeshPeersByType.addCollect(() => this.onScrapeMetrics(metrics));
|
||||
}
|
||||
}
|
||||
|
||||
start(): void {
|
||||
super.start();
|
||||
this.statusInterval = setInterval(this.logSubscriptions, 12000);
|
||||
}
|
||||
|
||||
stop(): void {
|
||||
try {
|
||||
super.stop();
|
||||
if (this.statusInterval) {
|
||||
clearInterval(this.statusInterval);
|
||||
}
|
||||
} catch (error) {
|
||||
if ((error as GossipValidationError).code !== "ERR_HEARTBEAT_NO_RUNNING") {
|
||||
throw error;
|
||||
@@ -106,55 +114,71 @@ export class Eth2Gossipsub extends Gossipsub {
|
||||
/**
|
||||
* @override Use eth2 msg id and cache results to the msg
|
||||
*/
|
||||
getMsgId(msg: IGossipMessage): Uint8Array {
|
||||
return getMsgId(msg, this.forkDigestContext);
|
||||
getMsgId(msg: InMessage): Uint8Array {
|
||||
let msgId = this.msgIdCache.get(msg);
|
||||
if (!msgId) {
|
||||
const topicStr = msg.topicIDs[0];
|
||||
const topic = this.gossipTopicCache.getTopic(topicStr);
|
||||
msgId = computeMsgId(topic, topicStr, msg.data, this.uncompressCache);
|
||||
this.msgIdCache.set(msg, msgId);
|
||||
}
|
||||
return msgId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @override
|
||||
* @override https://github.com/ChainSafe/js-libp2p-gossipsub/blob/3c3c46595f65823fcd7900ed716f43f76c6b355c/ts/index.ts#L436
|
||||
* @override https://github.com/libp2p/js-libp2p-interfaces/blob/ff3bd10704a4c166ce63135747e3736915b0be8d/src/pubsub/index.js#L513
|
||||
* Note: this does not call super. All logic is re-implemented below
|
||||
*/
|
||||
async validate(message: IGossipMessage): Promise<void> {
|
||||
async validate(message: InMessage): Promise<void> {
|
||||
try {
|
||||
// messages must have a single topicID
|
||||
const topicStr = (message.topicIDs || [])[0];
|
||||
|
||||
// message sanity check
|
||||
if (!messageIsValid(message)) {
|
||||
throw null;
|
||||
if (!topicStr || message.topicIDs.length > 1) {
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT, "Not exactly one topicID");
|
||||
}
|
||||
// get GossipTopic and GossipObject, set on IGossipMessage
|
||||
const gossipTopic = this.getGossipTopic(message.topicIDs[0]);
|
||||
prepareGossipMsg(message, gossipTopic);
|
||||
if (!message.data) {
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT, "No message.data");
|
||||
}
|
||||
if (message.data.length > GOSSIP_MAX_SIZE) {
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT, "message.data too big");
|
||||
}
|
||||
|
||||
if (message.from || message.signature || message.key || message.seqno) {
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT, "StrictNoSigning invalid");
|
||||
}
|
||||
|
||||
// We use 'StrictNoSign' policy, no need to validate message signature
|
||||
|
||||
// Ensure we have a validate function associated with all topics.
|
||||
// Otherwise super.validate() blindly accepts the object.
|
||||
const validatorFn = this.topicValidators.get(topicStr);
|
||||
if (!validatorFn) {
|
||||
this.logger.error("No gossip validatorFn", {topic: topicStr});
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE, `No gossip validatorFn for topic ${topicStr}`);
|
||||
}
|
||||
|
||||
// No error here means that the incoming object is valid
|
||||
await validatorFn(topicStr, message);
|
||||
} catch (e) {
|
||||
const err = new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
// must set gossip scores manually, since this usually happens in super.validate
|
||||
this.score.rejectMessage(message, err.code);
|
||||
this.gossipTracer.rejectMessage(message, err.code);
|
||||
// JobQueue may throw non-typed errors
|
||||
const code = e instanceof GossipValidationError ? e.code : ERR_TOPIC_VALIDATOR_IGNORE;
|
||||
this.score.rejectMessage(message, code);
|
||||
this.gossipTracer.rejectMessage(message, code);
|
||||
throw e;
|
||||
}
|
||||
|
||||
await super.validate(message); // No error here means that the incoming object is valid
|
||||
|
||||
//`message.gossipObject` must have been set ^ so that we can cache the deserialized gossip object
|
||||
if (message.gossipObject) {
|
||||
this.gossipObjects.set(msgIdToString(this.getMsgId(message)), message.gossipObject);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @override
|
||||
* See https://github.com/libp2p/js-libp2p-interfaces/blob/v0.5.2/src/pubsub/index.js#L428
|
||||
*
|
||||
* Instead of emitting `InMessage`, emit `GossipObject`
|
||||
* Our handlers are attached on the validator functions, so no need to emit the objects internally.
|
||||
*/
|
||||
_emitMessage(message: InMessage): void {
|
||||
const topic = message.topicIDs[0];
|
||||
const msgIdStr = msgIdToString(this.getMsgId(message));
|
||||
const gossipObject = this.gossipObjects.get(msgIdStr);
|
||||
if (gossipObject) {
|
||||
this.gossipObjects.delete(msgIdStr);
|
||||
}
|
||||
// Only messages that are currently subscribed and have properly been cached are emitted
|
||||
if (this.subscriptions.has(topic) && gossipObject) {
|
||||
this.emit(topic, gossipObject);
|
||||
}
|
||||
_emitMessage(): void {
|
||||
// Objects are handled in the validator functions, no need to do anything here
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -164,143 +188,158 @@ export class Eth2Gossipsub extends Gossipsub {
|
||||
*
|
||||
* See https://github.com/libp2p/js-libp2p-interfaces/blob/v0.8.3/src/pubsub/index.js#L720
|
||||
*/
|
||||
unsubscribe(topic: string): void {
|
||||
unsubscribe(topicStr: string): void {
|
||||
if (!this.started) {
|
||||
throw new Error("Pubsub is not started");
|
||||
}
|
||||
|
||||
if (this.subscriptions.has(topic)) {
|
||||
this.subscriptions.delete(topic);
|
||||
this.peers.forEach((_, id) => this._sendSubscriptions(id, [topic], false));
|
||||
if (this.subscriptions.has(topicStr)) {
|
||||
this.subscriptions.delete(topicStr);
|
||||
this.peers.forEach((_, id) => this._sendSubscriptions(id, [topicStr], false));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Publish a `GossipObject` on a `GossipTopic`
|
||||
*/
|
||||
async publishObject(topic: GossipTopic, object: GossipObject): Promise<void> {
|
||||
this.logger.verbose("Publish to topic", toJson(topic));
|
||||
await this.publish(
|
||||
this.getGossipTopicString(topic),
|
||||
encodeMessageData(topic.encoding ?? DEFAULT_ENCODING, getGossipSSZSerializer(topic)(object))
|
||||
);
|
||||
async publishObject<K extends GossipType>(topic: GossipTopicMap[K], object: GossipTypeMap[K]): Promise<void> {
|
||||
const topicStr = this.getGossipTopicString(topic);
|
||||
this.logger.verbose("Publish to topic", {topic: topicStr});
|
||||
const sszType = getGossipSSZType(topic);
|
||||
const messageData = (sszType.serialize as (object: GossipTypeMap[GossipType]) => Uint8Array)(object);
|
||||
await this.publish(topicStr, encodeMessageData(topic.encoding ?? DEFAULT_ENCODING, messageData));
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to a `GossipTopic`
|
||||
*/
|
||||
subscribeTopic(topic: GossipTopic): void {
|
||||
this.logger.verbose("Subscribe to topic", toJson(topic));
|
||||
this.subscribe(this.getGossipTopicString(topic));
|
||||
const topicStr = this.getGossipTopicString(topic);
|
||||
// Register known topicStr
|
||||
this.gossipTopicCache.setTopic(topicStr, topic);
|
||||
|
||||
this.logger.verbose("Subscribe to gossipsub topic", {topic: topicStr});
|
||||
this.subscribe(topicStr);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribe to a `GossipTopic`
|
||||
*/
|
||||
unsubscribeTopic(topic: GossipTopic): void {
|
||||
this.logger.verbose("Unsubscribe to topic", toJson(topic));
|
||||
this.unsubscribe(this.getGossipTopicString(topic));
|
||||
}
|
||||
|
||||
/**
|
||||
* Attach a handler to a `GossipTopic`
|
||||
*/
|
||||
handleTopic(topic: GossipTopic, handler: GossipHandlerFn): void {
|
||||
this.on(this.getGossipTopicString(topic), handler);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a handler from a `GossipTopic`
|
||||
*/
|
||||
unhandleTopic(topic: GossipTopic, handler: GossipHandlerFn): void {
|
||||
this.off(this.getGossipTopicString(topic), handler);
|
||||
const topicStr = this.getGossipTopicString(topic);
|
||||
this.logger.verbose("Unsubscribe to gossipsub topic", {topic: topicStr});
|
||||
this.unsubscribe(topicStr);
|
||||
}
|
||||
|
||||
async publishBeaconBlock(signedBlock: allForks.SignedBeaconBlock): Promise<void> {
|
||||
const fork = this.config.getForkName(signedBlock.message.slot);
|
||||
|
||||
await this.publishObject({type: GossipType.beacon_block, fork}, signedBlock);
|
||||
await this.publishObject<GossipType.beacon_block>({type: GossipType.beacon_block, fork}, signedBlock);
|
||||
}
|
||||
|
||||
async publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise<void> {
|
||||
const fork = this.config.getForkName(aggregateAndProof.message.aggregate.data.slot);
|
||||
await this.publishObject({type: GossipType.beacon_aggregate_and_proof, fork}, aggregateAndProof);
|
||||
await this.publishObject<GossipType.beacon_aggregate_and_proof>(
|
||||
{type: GossipType.beacon_aggregate_and_proof, fork},
|
||||
aggregateAndProof
|
||||
);
|
||||
}
|
||||
|
||||
async publishBeaconAttestation(attestation: phase0.Attestation, subnet: number): Promise<void> {
|
||||
const fork = this.config.getForkName(attestation.data.slot);
|
||||
await this.publishObject({type: GossipType.beacon_attestation, fork, subnet}, attestation);
|
||||
await this.publishObject<GossipType.beacon_attestation>(
|
||||
{type: GossipType.beacon_attestation, fork, subnet},
|
||||
attestation
|
||||
);
|
||||
}
|
||||
|
||||
async publishVoluntaryExit(voluntaryExit: phase0.SignedVoluntaryExit): Promise<void> {
|
||||
const fork = this.config.getForkName(computeEpochAtSlot(voluntaryExit.message.epoch));
|
||||
await this.publishObject({type: GossipType.voluntary_exit, fork}, voluntaryExit);
|
||||
const fork = this.config.getForkName(computeStartSlotAtEpoch(voluntaryExit.message.epoch));
|
||||
await this.publishObject<GossipType.voluntary_exit>({type: GossipType.voluntary_exit, fork}, voluntaryExit);
|
||||
}
|
||||
|
||||
async publishProposerSlashing(proposerSlashing: phase0.ProposerSlashing): Promise<void> {
|
||||
const fork = this.config.getForkName(proposerSlashing.signedHeader1.message.slot);
|
||||
await this.publishObject({type: GossipType.proposer_slashing, fork}, proposerSlashing);
|
||||
await this.publishObject<GossipType.proposer_slashing>(
|
||||
{type: GossipType.proposer_slashing, fork},
|
||||
proposerSlashing
|
||||
);
|
||||
}
|
||||
|
||||
async publishAttesterSlashing(attesterSlashing: phase0.AttesterSlashing): Promise<void> {
|
||||
const fork = this.config.getForkName(attesterSlashing.attestation1.data.slot);
|
||||
await this.publishObject({type: GossipType.attester_slashing, fork}, attesterSlashing);
|
||||
await this.publishObject<GossipType.attester_slashing>(
|
||||
{type: GossipType.attester_slashing, fork},
|
||||
attesterSlashing
|
||||
);
|
||||
}
|
||||
|
||||
async publishSyncCommitteeSignature(signature: altair.SyncCommitteeMessage, subnet: number): Promise<void> {
|
||||
const fork = this.config.getForkName(signature.slot);
|
||||
await this.publishObject({type: GossipType.sync_committee, fork, subnet}, signature);
|
||||
await this.publishObject<GossipType.sync_committee>({type: GossipType.sync_committee, fork, subnet}, signature);
|
||||
}
|
||||
|
||||
async publishContributionAndProof(contributionAndProof: altair.SignedContributionAndProof): Promise<void> {
|
||||
const fork = this.config.getForkName(contributionAndProof.message.contribution.slot);
|
||||
await this.publishObject({type: GossipType.sync_committee_contribution_and_proof, fork}, contributionAndProof);
|
||||
await this.publishObject<GossipType.sync_committee_contribution_and_proof>(
|
||||
{type: GossipType.sync_committee_contribution_and_proof, fork},
|
||||
contributionAndProof
|
||||
);
|
||||
}
|
||||
|
||||
private getGossipTopicString(topic: GossipTopic): string {
|
||||
return stringifyGossipTopic(this.forkDigestContext, topic);
|
||||
}
|
||||
|
||||
private getGossipTopic(topicString: string): GossipTopic {
|
||||
let topic = this.gossipTopics.get(topicString);
|
||||
if (topic == null) {
|
||||
topic = parseGossipTopic(this.forkDigestContext, topicString);
|
||||
this.gossipTopics.set(topicString, topic);
|
||||
}
|
||||
return topic;
|
||||
}
|
||||
private onScrapeMetrics(metrics: IMetrics): void {
|
||||
// Pre-aggregate results by fork so we can fill the remaining metrics with 0
|
||||
const peersByTypeByFork = new Map2d<ForkName, GossipType, number>();
|
||||
const peersByBeaconAttSubnetByFork = new Map2dArr<ForkName, number>();
|
||||
const peersByBeaconSyncSubnetByFork = new Map2dArr<ForkName, number>();
|
||||
|
||||
private logSubscriptions = (): void => {
|
||||
if (this.metrics) {
|
||||
// beacon attestation mesh gets counted separately so we can track mesh peers by subnet
|
||||
// zero out all gossip type & subnet choices, so the dashboard will register them
|
||||
for (const gossipType of Object.values(GossipType)) {
|
||||
this.metrics.gossipMeshPeersByType.set({gossipType}, 0);
|
||||
}
|
||||
for (let subnet = 0; subnet < ATTESTATION_SUBNET_COUNT; subnet++) {
|
||||
this.metrics.gossipMeshPeersByBeaconAttestationSubnet.set({subnet: subnetLabel(subnet)}, 0);
|
||||
}
|
||||
// loop through all mesh entries, count each set size
|
||||
for (const [topicString, peers] of this.mesh.entries()) {
|
||||
const topic = this.getGossipTopic(topicString);
|
||||
if (topic.type === GossipType.beacon_attestation) {
|
||||
this.metrics.gossipMeshPeersByBeaconAttestationSubnet.set({subnet: subnetLabel(topic.subnet)}, peers.size);
|
||||
} else {
|
||||
this.metrics.gossipMeshPeersByType.set({gossipType: topic.type}, peers.size);
|
||||
}
|
||||
// loop through all mesh entries, count each set size
|
||||
for (const [topicString, peers] of this.mesh.entries()) {
|
||||
// Ignore topics with 0 peers. May prevent overriding after a fork
|
||||
if (peers.size === 0) continue;
|
||||
|
||||
const topic = this.gossipTopicCache.getTopic(topicString);
|
||||
if (topic.type === GossipType.beacon_attestation) {
|
||||
peersByBeaconAttSubnetByFork.set(topic.fork, topic.subnet, peers.size);
|
||||
} else if (topic.type === GossipType.sync_committee) {
|
||||
peersByBeaconSyncSubnetByFork.set(topic.fork, topic.subnet, peers.size);
|
||||
} else {
|
||||
peersByTypeByFork.set(topic.fork, topic.type, peers.size);
|
||||
}
|
||||
}
|
||||
this.logger.verbose("Current gossip subscriptions", {
|
||||
subscriptions: Array.from(this.subscriptions),
|
||||
});
|
||||
};
|
||||
|
||||
// beacon attestation mesh gets counted separately so we can track mesh peers by subnet
|
||||
// zero out all gossip type & subnet choices, so the dashboard will register them
|
||||
for (const [fork, peersByType] of peersByTypeByFork.map.entries()) {
|
||||
for (const type of Object.values(GossipType)) {
|
||||
metrics.gossipMeshPeersByType.set({fork, type}, peersByType.get(type) ?? 0);
|
||||
}
|
||||
}
|
||||
for (const [fork, peersByBeaconAttSubnet2] of peersByBeaconAttSubnetByFork.map.entries()) {
|
||||
for (let subnet = 0; subnet < ATTESTATION_SUBNET_COUNT; subnet++) {
|
||||
metrics.gossipMeshPeersByBeaconAttestationSubnet.set(
|
||||
{fork, subnet: attSubnetLabel(subnet)},
|
||||
peersByBeaconAttSubnet2[subnet] ?? 0
|
||||
);
|
||||
}
|
||||
}
|
||||
for (const [fork, peersByBeaconSyncSubnet2] of peersByBeaconSyncSubnetByFork.map.entries()) {
|
||||
for (let subnet = 0; subnet < SYNC_COMMITTEE_SUBNET_COUNT; subnet++) {
|
||||
// SYNC_COMMITTEE_SUBNET_COUNT is < 9, no need to prepend a 0 to the label
|
||||
metrics.gossipMeshPeersBySyncCommitteeSubnet.set({fork, subnet}, peersByBeaconSyncSubnet2[subnet] ?? 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Left pad subnets to two characters. Assumes ATTESTATION_SUBNET_COUNT < 99
|
||||
* Otherwise grafana sorts the mesh peers chart as: [1,11,12,13,...]
|
||||
*/
|
||||
function subnetLabel(subnet: number): string {
|
||||
function attSubnetLabel(subnet: number): string {
|
||||
if (subnet > 9) return String(subnet);
|
||||
else return `0${subnet}`;
|
||||
}
|
||||
|
||||
@@ -1,210 +0,0 @@
|
||||
import {allForks, altair, Epoch, phase0} from "@chainsafe/lodestar-types";
|
||||
import {ATTESTATION_SUBNET_COUNT, ForkName, SYNC_COMMITTEE_SUBNET_COUNT} from "@chainsafe/lodestar-params";
|
||||
import {IChainForkConfig} from "@chainsafe/lodestar-config";
|
||||
import {computeEpochAtSlot} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {ILogger} from "@chainsafe/lodestar-utils";
|
||||
import {SyncCommitteeSignatureIndexed} from "../../chain/validation/syncCommittee";
|
||||
import {getActiveForks, runForkTransitionHooks} from "../forks";
|
||||
import {ChainEvent, IBeaconChain} from "../../chain";
|
||||
import {IBeaconDb} from "../../db";
|
||||
import {GossipHandlerFn, GossipTopic, GossipType} from ".";
|
||||
import {Eth2Gossipsub} from "./gossipsub";
|
||||
import {IAttnetsService} from "../subnets";
|
||||
|
||||
/**
|
||||
* Registers handlers to all possible gossip topics and forks.
|
||||
* Other components control when to subscribe and unsubcribe.
|
||||
*/
|
||||
export class GossipHandler {
|
||||
private readonly topicHandlers: {topic: GossipTopic; handler: GossipHandlerFn}[] = [];
|
||||
private subscribedForks = new Set<ForkName>();
|
||||
|
||||
constructor(
|
||||
private readonly config: IChainForkConfig,
|
||||
private readonly chain: IBeaconChain,
|
||||
private readonly gossip: Eth2Gossipsub,
|
||||
private readonly attnetsService: IAttnetsService,
|
||||
private readonly db: IBeaconDb,
|
||||
private readonly logger: ILogger
|
||||
) {
|
||||
this.registerGossipHandlers();
|
||||
this.chain.emitter.on(ChainEvent.clockEpoch, this.onEpoch);
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.chain.emitter.off(ChainEvent.clockEpoch, this.onEpoch);
|
||||
for (const {topic, handler} of this.topicHandlers) {
|
||||
this.gossip.unhandleTopic(topic, handler);
|
||||
}
|
||||
}
|
||||
|
||||
get isSubscribedToCoreTopics(): boolean {
|
||||
return this.subscribedForks.size > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to all gossip events. Safe to call multiple times
|
||||
*/
|
||||
subscribeCoreTopics(): void {
|
||||
if (!this.isSubscribedToCoreTopics) {
|
||||
this.logger.info("Subscribed gossip core topics");
|
||||
}
|
||||
|
||||
const currentEpoch = computeEpochAtSlot(this.chain.forkChoice.getHead().slot);
|
||||
for (const fork of getActiveForks(this.config, currentEpoch)) {
|
||||
this.subscribeCoreTopicsAtFork(fork);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribe from all gossip events. Safe to call multiple times
|
||||
*/
|
||||
unsubscribeCoreTopics(): void {
|
||||
for (const fork of this.subscribedForks.values()) {
|
||||
this.unsubscribeCoreTopicsAtFork(fork);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle forks
|
||||
|
||||
private onEpoch = (epoch: Epoch): void => {
|
||||
try {
|
||||
// Don't subscribe to new fork if the node is not subscribed to any topic
|
||||
if (!this.isSubscribedToCoreTopics) {
|
||||
return;
|
||||
}
|
||||
|
||||
runForkTransitionHooks(this.config, epoch, {
|
||||
beforeForkTransition: (nextFork) => {
|
||||
this.logger.info("Suscribing gossip core topics to next fork", {nextFork});
|
||||
this.subscribeCoreTopicsAtFork(nextFork);
|
||||
},
|
||||
afterForkTransition: (prevFork) => {
|
||||
this.logger.info("Unsuscribing gossip core topics from prev fork", {prevFork});
|
||||
this.unsubscribeCoreTopicsAtFork(prevFork);
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
this.logger.error("Error on BeaconGossipHandler.onEpoch", {epoch}, e);
|
||||
}
|
||||
};
|
||||
|
||||
// Gossip handlers
|
||||
|
||||
private onBlock = (block: allForks.SignedBeaconBlock): void => {
|
||||
this.logger.verbose("Received gossip block", {slot: block.message.slot});
|
||||
this.chain.receiveBlock(block);
|
||||
};
|
||||
|
||||
private onAggregatedAttestation = async (aggregate: phase0.SignedAggregateAndProof): Promise<void> => {
|
||||
await this.db.aggregateAndProof.add(aggregate.message);
|
||||
};
|
||||
|
||||
private onAttesterSlashing = async (attesterSlashing: phase0.AttesterSlashing): Promise<void> => {
|
||||
await this.db.attesterSlashing.add(attesterSlashing);
|
||||
};
|
||||
|
||||
private onProposerSlashing = async (proposerSlashing: phase0.ProposerSlashing): Promise<void> => {
|
||||
await this.db.proposerSlashing.add(proposerSlashing);
|
||||
};
|
||||
|
||||
private onVoluntaryExit = async (exit: phase0.SignedVoluntaryExit): Promise<void> => {
|
||||
await this.db.voluntaryExit.add(exit);
|
||||
};
|
||||
|
||||
private onSyncCommitteeContribution = async (contribution: altair.SignedContributionAndProof): Promise<void> => {
|
||||
this.db.syncCommitteeContribution.add(contribution.message);
|
||||
};
|
||||
|
||||
private onAttestation = async (subnet: number, attestation: phase0.Attestation): Promise<void> => {
|
||||
// TODO: Review if it's really necessary to check shouldProcessAttestation()
|
||||
if (this.attnetsService.shouldProcess(subnet, attestation.data.slot)) {
|
||||
this.chain.attestationPool.add(attestation);
|
||||
}
|
||||
};
|
||||
|
||||
private onSyncCommitteeSignature = async (subnet: number, signature: altair.SyncCommitteeMessage): Promise<void> => {
|
||||
// Note: not calling checking `syncnetsService.shouldProcess()` here since the validators will always aggregate
|
||||
|
||||
// TODO: Do this much better to be able to access this property in the handler
|
||||
const indexInSubCommittee = (signature as SyncCommitteeSignatureIndexed).indexInSubCommittee;
|
||||
this.db.syncCommittee.add(subnet, signature, indexInSubCommittee);
|
||||
};
|
||||
|
||||
private subscribeCoreTopicsAtFork = (fork: ForkName): void => {
|
||||
if (this.subscribedForks.has(fork)) return;
|
||||
this.subscribedForks.add(fork);
|
||||
|
||||
this.gossip.subscribeTopic({type: GossipType.beacon_block, fork});
|
||||
this.gossip.subscribeTopic({type: GossipType.beacon_aggregate_and_proof, fork});
|
||||
this.gossip.subscribeTopic({type: GossipType.voluntary_exit, fork});
|
||||
this.gossip.subscribeTopic({type: GossipType.proposer_slashing, fork});
|
||||
this.gossip.subscribeTopic({type: GossipType.attester_slashing, fork});
|
||||
if (fork === ForkName.altair) {
|
||||
this.gossip.subscribeTopic({type: GossipType.sync_committee_contribution_and_proof, fork});
|
||||
}
|
||||
};
|
||||
|
||||
private unsubscribeCoreTopicsAtFork = (fork: ForkName): void => {
|
||||
if (!this.subscribedForks.has(fork)) return;
|
||||
this.subscribedForks.delete(fork);
|
||||
|
||||
this.gossip.unsubscribeTopic({type: GossipType.beacon_block, fork});
|
||||
this.gossip.unsubscribeTopic({type: GossipType.beacon_aggregate_and_proof, fork});
|
||||
this.gossip.unsubscribeTopic({type: GossipType.voluntary_exit, fork});
|
||||
this.gossip.unsubscribeTopic({type: GossipType.proposer_slashing, fork});
|
||||
this.gossip.unsubscribeTopic({type: GossipType.attester_slashing, fork});
|
||||
if (fork === ForkName.altair) {
|
||||
this.gossip.unsubscribeTopic({type: GossipType.sync_committee_contribution_and_proof, fork});
|
||||
}
|
||||
};
|
||||
|
||||
private registerGossipHandlers(): void {
|
||||
const allForkNames = Object.values(this.config.forks).map((fork) => fork.name);
|
||||
// TODO: Compute all forks after altair including altair
|
||||
const allForksAfterAltair = [ForkName.altair];
|
||||
|
||||
const topicHandlers = [
|
||||
{type: GossipType.beacon_block, forks: allForkNames, handler: this.onBlock},
|
||||
{type: GossipType.beacon_aggregate_and_proof, forks: allForkNames, handler: this.onAggregatedAttestation},
|
||||
{type: GossipType.voluntary_exit, forks: allForkNames, handler: this.onVoluntaryExit},
|
||||
{type: GossipType.proposer_slashing, forks: allForkNames, handler: this.onProposerSlashing},
|
||||
{type: GossipType.attester_slashing, forks: allForkNames, handler: this.onAttesterSlashing},
|
||||
// Note: Calling .handleTopic() does not subscribe. Safe to do in any fork
|
||||
{
|
||||
type: GossipType.sync_committee_contribution_and_proof,
|
||||
forks: allForksAfterAltair,
|
||||
handler: this.onSyncCommitteeContribution,
|
||||
},
|
||||
];
|
||||
|
||||
for (const {type, forks, handler} of topicHandlers) {
|
||||
for (const fork of forks) {
|
||||
const topic = {type, fork} as GossipTopic;
|
||||
this.gossip.handleTopic(topic, handler as GossipHandlerFn);
|
||||
this.topicHandlers.push({topic, handler: handler as GossipHandlerFn});
|
||||
}
|
||||
}
|
||||
|
||||
for (const fork of allForkNames) {
|
||||
for (let subnet = 0; subnet < ATTESTATION_SUBNET_COUNT; subnet++) {
|
||||
const topic = {type: GossipType.beacon_attestation, fork, subnet};
|
||||
const handlerWrapped = (async (attestation: phase0.Attestation): Promise<void> =>
|
||||
await this.onAttestation(subnet, attestation)) as GossipHandlerFn;
|
||||
// Note: Calling .handleTopic() does not subscribe. Safe to do in any fork// TODO: Only subscribe after altair
|
||||
this.gossip.handleTopic(topic, handlerWrapped);
|
||||
this.topicHandlers.push({topic, handler: handlerWrapped});
|
||||
}
|
||||
}
|
||||
|
||||
for (const fork of allForksAfterAltair) {
|
||||
for (let subnet = 0; subnet < SYNC_COMMITTEE_SUBNET_COUNT; subnet++) {
|
||||
const topic = {type: GossipType.sync_committee, fork, subnet};
|
||||
const handlerWrapped = (async (signature: altair.SyncCommitteeMessage): Promise<void> =>
|
||||
await this.onSyncCommitteeSignature(subnet, signature)) as GossipHandlerFn;
|
||||
this.gossip.handleTopic(topic, handlerWrapped);
|
||||
this.topicHandlers.push({topic, handler: handlerWrapped});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
179
packages/lodestar/src/network/gossip/handlers/index.ts
Normal file
179
packages/lodestar/src/network/gossip/handlers/index.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
import {IBeaconConfig} from "@chainsafe/lodestar-config";
|
||||
import {ILogger} from "@chainsafe/lodestar-utils";
|
||||
import {IMetrics} from "../../../metrics";
|
||||
import {OpSource} from "../../../metrics/validatorMonitor";
|
||||
import {IBeaconDb} from "../../../db";
|
||||
import {IBeaconChain} from "../../../chain";
|
||||
import {BlockError, BlockErrorCode} from "../../../chain/errors";
|
||||
import {GossipTopicMap, GossipType, GossipTypeMap} from "../interface";
|
||||
import {
|
||||
validateGossipAggregateAndProof,
|
||||
validateGossipAttestation,
|
||||
validateGossipAttesterSlashing,
|
||||
validateGossipBlock,
|
||||
validateGossipProposerSlashing,
|
||||
validateGossipSyncCommittee,
|
||||
validateSyncCommitteeGossipContributionAndProof,
|
||||
validateGossipVoluntaryExit,
|
||||
} from "../../../chain/validation";
|
||||
|
||||
export type GossipHandlerFn = (object: GossipTypeMap[GossipType], topic: GossipTopicMap[GossipType]) => Promise<void>;
|
||||
export type GossipHandlers = {
|
||||
[K in GossipType]: (object: GossipTypeMap[K], topic: GossipTopicMap[K]) => Promise<void>;
|
||||
};
|
||||
|
||||
type ValidatorFnsModules = {
|
||||
chain: IBeaconChain;
|
||||
config: IBeaconConfig;
|
||||
db: IBeaconDb;
|
||||
logger: ILogger;
|
||||
metrics: IMetrics | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Gossip handlers perform validation + handling in a single function.
|
||||
* - This gossip handlers MUST only be registered as validator functions. No handler is registered for any topic.
|
||||
* - All `chain/validation/*` functions MUST throw typed GossipActionError instances so they gossip action is captured
|
||||
* by `getGossipValidatorFn()` try catch block.
|
||||
* - This gossip handlers should not let any handling errors propagate to the caller. Only validation errors must be thrown.
|
||||
*
|
||||
* Note: `libp2p/js-libp2p-interfaces` would normally indicate to register separate validator functions and handler functions.
|
||||
* This approach is not suitable for us because:
|
||||
* - We do expensive processing on the object in the validator function that we need to re-use in the handler function.
|
||||
* - The validator function produces extra data that is needed for the handler function. Making this data available in
|
||||
* the handler function scope is hard to achieve without very hacky strategies
|
||||
* - Eth2.0 gossipsub protocol strictly defined a single topic for message
|
||||
*/
|
||||
export function getGossipHandlers(modules: ValidatorFnsModules): GossipHandlers {
|
||||
const {chain, db, config, metrics, logger} = modules;
|
||||
|
||||
return {
|
||||
[GossipType.beacon_block]: async (signedBlock) => {
|
||||
const seenTimestampSec = Date.now() / 1000;
|
||||
|
||||
try {
|
||||
await validateGossipBlock(config, chain, db, {
|
||||
signedBlock,
|
||||
reprocess: false,
|
||||
prefinalized: false,
|
||||
validSignatures: false,
|
||||
validProposerSignature: false,
|
||||
});
|
||||
|
||||
metrics?.registerBeaconBlock(OpSource.api, seenTimestampSec, signedBlock.message);
|
||||
|
||||
// Handler
|
||||
|
||||
try {
|
||||
chain.receiveBlock(signedBlock);
|
||||
} catch (e) {
|
||||
logger.error("Error receiving block", {}, e);
|
||||
}
|
||||
} catch (e) {
|
||||
if (
|
||||
e instanceof BlockError &&
|
||||
(e.type.code === BlockErrorCode.FUTURE_SLOT || e.type.code === BlockErrorCode.PARENT_UNKNOWN)
|
||||
) {
|
||||
chain.receiveBlock(signedBlock);
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
|
||||
[GossipType.beacon_aggregate_and_proof]: async (signedAggregateAndProof) => {
|
||||
const seenTimestampSec = Date.now() / 1000;
|
||||
|
||||
const indexedAtt = await validateGossipAggregateAndProof(chain, signedAggregateAndProof);
|
||||
|
||||
metrics?.registerAggregatedAttestation(OpSource.gossip, seenTimestampSec, signedAggregateAndProof, indexedAtt);
|
||||
|
||||
// TODO: Add DoS resistant pending attestation pool
|
||||
// switch (e.type.code) {
|
||||
// case AttestationErrorCode.FUTURE_SLOT:
|
||||
// chain.pendingAttestations.putBySlot(e.type.attestationSlot, attestation);
|
||||
// break;
|
||||
// case AttestationErrorCode.UNKNOWN_TARGET_ROOT:
|
||||
// case AttestationErrorCode.UNKNOWN_BEACON_BLOCK_ROOT:
|
||||
// chain.pendingAttestations.putByBlock(e.type.root, attestation);
|
||||
// break;
|
||||
// }
|
||||
|
||||
// Handler
|
||||
|
||||
db.aggregateAndProof.add(signedAggregateAndProof.message).catch((e) => {
|
||||
logger.error("Error adding aggregateAndProof to pool", {}, e);
|
||||
});
|
||||
},
|
||||
|
||||
[GossipType.beacon_attestation]: async (attestation, {subnet}) => {
|
||||
const seenTimestampSec = Date.now() / 1000;
|
||||
|
||||
const {indexedAttestation} = await validateGossipAttestation(chain, attestation, subnet);
|
||||
|
||||
metrics?.registerUnaggregatedAttestation(OpSource.gossip, seenTimestampSec, indexedAttestation);
|
||||
|
||||
// Handler
|
||||
|
||||
try {
|
||||
chain.attestationPool.add(attestation);
|
||||
} catch (e) {
|
||||
logger.error("Error adding attestation to pool", {subnet}, e);
|
||||
}
|
||||
},
|
||||
|
||||
[GossipType.voluntary_exit]: async (voluntaryExit) => {
|
||||
await validateGossipVoluntaryExit(chain, db, voluntaryExit);
|
||||
|
||||
// Handler
|
||||
|
||||
db.voluntaryExit.add(voluntaryExit).catch((e) => {
|
||||
logger.error("Error adding attesterSlashing to pool", {}, e);
|
||||
});
|
||||
},
|
||||
|
||||
[GossipType.proposer_slashing]: async (proposerSlashing) => {
|
||||
await validateGossipProposerSlashing(chain, db, proposerSlashing);
|
||||
|
||||
// Handler
|
||||
|
||||
db.proposerSlashing.add(proposerSlashing).catch((e) => {
|
||||
logger.error("Error adding attesterSlashing to pool", {}, e);
|
||||
});
|
||||
},
|
||||
|
||||
[GossipType.attester_slashing]: async (attesterSlashing) => {
|
||||
await validateGossipAttesterSlashing(chain, db, attesterSlashing);
|
||||
|
||||
// Handler
|
||||
|
||||
db.attesterSlashing.add(attesterSlashing).catch((e) => {
|
||||
logger.error("Error adding attesterSlashing to pool", {}, e);
|
||||
});
|
||||
},
|
||||
|
||||
[GossipType.sync_committee_contribution_and_proof]: async (contributionAndProof) => {
|
||||
await validateSyncCommitteeGossipContributionAndProof(chain, db, contributionAndProof);
|
||||
|
||||
// Handler
|
||||
|
||||
try {
|
||||
db.syncCommitteeContribution.add(contributionAndProof.message);
|
||||
} catch (e) {
|
||||
logger.error("Error adding to contributionAndProof pool", {}, e);
|
||||
}
|
||||
},
|
||||
|
||||
[GossipType.sync_committee]: async (syncCommittee, {subnet}) => {
|
||||
const {indexInSubCommittee} = await validateGossipSyncCommittee(chain, db, syncCommittee, subnet);
|
||||
|
||||
// Handler
|
||||
|
||||
try {
|
||||
db.syncCommittee.add(subnet, syncCommittee, indexInSubCommittee);
|
||||
} catch (e) {
|
||||
logger.error("Error adding to syncCommittee pool", {subnet}, e);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,8 +1,3 @@
|
||||
export * from "./gossipsub";
|
||||
export * from "./validator";
|
||||
export {Eth2Gossipsub} from "./gossipsub";
|
||||
export {GossipHandlers, getGossipHandlers} from "./handlers";
|
||||
export * from "./interface";
|
||||
export * from "./topic";
|
||||
export * from "./constants";
|
||||
export * from "./errors";
|
||||
export * from "./encoding";
|
||||
export * from "./utils";
|
||||
|
||||
@@ -12,8 +12,6 @@ import {ILogger} from "@chainsafe/lodestar-utils";
|
||||
import {InMessage} from "libp2p-interfaces/src/pubsub";
|
||||
import {IBeaconChain} from "../../chain";
|
||||
import {NetworkEvent} from "../events";
|
||||
import {IBeaconDb} from "../../db";
|
||||
import {IMetrics} from "../../metrics";
|
||||
|
||||
export enum GossipType {
|
||||
// phase0
|
||||
@@ -29,7 +27,6 @@ export enum GossipType {
|
||||
}
|
||||
|
||||
export enum GossipEncoding {
|
||||
ssz = "ssz",
|
||||
ssz_snappy = "ssz_snappy",
|
||||
}
|
||||
|
||||
@@ -91,7 +88,7 @@ export type GossipFnByType = {
|
||||
export type GossipFn = GossipFnByType[keyof GossipFnByType];
|
||||
|
||||
export interface IGossipEvents {
|
||||
[topic: string]: GossipFn;
|
||||
[topicStr: string]: GossipFn;
|
||||
[NetworkEvent.gossipHeartbeat]: () => void;
|
||||
[NetworkEvent.gossipStart]: () => void;
|
||||
[NetworkEvent.gossipStop]: () => void;
|
||||
@@ -111,62 +108,11 @@ export interface IGossipModules {
|
||||
* https://github.com/ethereum/eth2.0-specs/blob/dev/specs/phase0/p2p-interface.md#global-topics
|
||||
*/
|
||||
|
||||
export type GossipObject =
|
||||
| allForks.SignedBeaconBlock
|
||||
| phase0.SignedAggregateAndProof
|
||||
| phase0.Attestation
|
||||
| phase0.SignedVoluntaryExit
|
||||
| phase0.ProposerSlashing
|
||||
| phase0.AttesterSlashing
|
||||
| altair.SignedContributionAndProof
|
||||
| altair.SyncCommitteeMessage;
|
||||
|
||||
export type GossipHandlerFn = (gossipObject: GossipObject) => Promise<void> | void;
|
||||
|
||||
export type GossipSerializer = (obj: GossipObject) => Uint8Array;
|
||||
|
||||
export type GossipDeserializer = (buf: Uint8Array) => GossipObject;
|
||||
|
||||
export interface IObjectValidatorModules {
|
||||
chain: IBeaconChain;
|
||||
config: IBeaconConfig;
|
||||
db: IBeaconDb;
|
||||
logger: ILogger;
|
||||
metrics: IMetrics | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Top-level type for gossip validation functions
|
||||
*
|
||||
* js-libp2p-gossipsub expects validation functions that look like this
|
||||
*/
|
||||
export type TopicValidatorFn = (topic: string, message: InMessage) => Promise<void>;
|
||||
export type GossipValidatorFn = (topicStr: string, message: InMessage) => Promise<void>;
|
||||
|
||||
/**
|
||||
* Map of TopicValidatorFn by topic string. What js-libp2p-gossipsub requires
|
||||
*/
|
||||
export type TopicValidatorFnMap = Map<string, TopicValidatorFn>;
|
||||
|
||||
/**
|
||||
* Overridden `InMessage`
|
||||
*
|
||||
* Possibly includes cached msgId, uncompressed message data, deserialized data
|
||||
*/
|
||||
export interface IGossipMessage extends InMessage {
|
||||
/**
|
||||
* Cached message id
|
||||
*/
|
||||
msgId?: Uint8Array;
|
||||
/**
|
||||
* Cached uncompressed data
|
||||
*/
|
||||
uncompressed?: Uint8Array;
|
||||
/**
|
||||
* deserialized data
|
||||
*/
|
||||
gossipObject?: GossipObject;
|
||||
/**
|
||||
* gossip topic
|
||||
*/
|
||||
gossipTopic?: GossipTopic;
|
||||
}
|
||||
export type ValidatorFnsByType = {[K in GossipType]: GossipValidatorFn};
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import {ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {GossipValidationError} from "./errors";
|
||||
import {decodeMessageData} from "./encoding";
|
||||
import {getGossipSSZDeserializer} from "./topic";
|
||||
import {IGossipMessage, GossipTypeMap, GossipTopicMap, GossipType, GossipTopic, GossipEncoding} from "./interface";
|
||||
|
||||
/**
|
||||
* Mutates the `IGossipMessage` `message` so `parseGossipMsg()` can use it
|
||||
*/
|
||||
export function prepareGossipMsg(message: IGossipMessage, gossipTopic: GossipTopic): void {
|
||||
// get GossipTopic and GossipObject, set on IGossipMessage
|
||||
const messageData = decodeMessageData(gossipTopic.encoding as GossipEncoding, message.data);
|
||||
const gossipObject = getGossipSSZDeserializer(gossipTopic)(messageData);
|
||||
// Lodestar ObjectValidatorFns rely on these properties being set
|
||||
message.gossipObject = gossipObject;
|
||||
message.gossipTopic = gossipTopic;
|
||||
}
|
||||
|
||||
export function parseGossipMsg<K extends GossipType>(
|
||||
msg: IGossipMessage
|
||||
): {gossipTopic: GossipTopicMap[K]; gossipObject: GossipTypeMap[K]} {
|
||||
const gossipTopic = msg.gossipTopic as GossipTopicMap[K];
|
||||
const gossipObject = msg.gossipObject as GossipTypeMap[K];
|
||||
if (gossipTopic == null || gossipObject == null) {
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
}
|
||||
return {gossipTopic, gossipObject};
|
||||
}
|
||||
@@ -2,14 +2,36 @@
|
||||
* @module network/gossip
|
||||
*/
|
||||
|
||||
import {ContainerType} from "@chainsafe/ssz";
|
||||
import {ssz} from "@chainsafe/lodestar-types";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {IForkDigestContext, toHexStringNoPrefix} from "../../util/forkDigestContext";
|
||||
import {GossipType, GossipTopic, GossipEncoding} from "./interface";
|
||||
import {DEFAULT_ENCODING} from "./constants";
|
||||
import {GossipEncoding, GossipDeserializer, GossipObject, GossipSerializer, GossipType, GossipTopic} from "./interface";
|
||||
|
||||
const gossipTopicRegex = new RegExp("^/eth2/(\\w+)/(\\w+)/(\\w+)");
|
||||
export interface IGossipTopicCache {
|
||||
getTopic(topicStr: string): GossipTopic;
|
||||
}
|
||||
|
||||
export class GossipTopicCache implements IGossipTopicCache {
|
||||
private topicsByTopicStr = new Map<string, Required<GossipTopic>>();
|
||||
|
||||
constructor(private readonly forkDigestContext: IForkDigestContext) {}
|
||||
|
||||
getTopic(topicStr: string): GossipTopic {
|
||||
let topic = this.topicsByTopicStr.get(topicStr);
|
||||
if (topic === undefined) {
|
||||
topic = parseGossipTopic(this.forkDigestContext, topicStr);
|
||||
// TODO: Consider just throwing here. We should only receive messages from known subscribed topics
|
||||
this.topicsByTopicStr.set(topicStr, topic);
|
||||
}
|
||||
return topic;
|
||||
}
|
||||
|
||||
setTopic(topicStr: string, topic: GossipTopic): void {
|
||||
if (!this.topicsByTopicStr.has(topicStr)) {
|
||||
this.topicsByTopicStr.set(topicStr, {encoding: DEFAULT_ENCODING, ...topic});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stringify a GossipTopic into a spec-ed formated topic string
|
||||
@@ -40,22 +62,35 @@ function stringifyGossipTopicType(topic: GossipTopic): string {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a gossip string to a fork.
|
||||
* A gossip topic has the format
|
||||
* ```ts
|
||||
* /eth2/$FORK_DIGEST/$GOSSIP_TYPE/$ENCODING
|
||||
* ```
|
||||
*/
|
||||
export function getForkFromGossipTopic(forkDigestContext: IForkDigestContext, topicStr: string): ForkName {
|
||||
const matches = topicStr.match(gossipTopicRegex);
|
||||
if (matches === null) {
|
||||
throw Error(`Must match regex ${gossipTopicRegex}`);
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/explicit-function-return-type
|
||||
export function getGossipSSZType(topic: GossipTopic) {
|
||||
switch (topic.type) {
|
||||
case GossipType.beacon_block:
|
||||
// beacon_block is updated in altair to support the updated SignedBeaconBlock type
|
||||
return ssz[topic.fork].SignedBeaconBlock;
|
||||
case GossipType.beacon_aggregate_and_proof:
|
||||
return ssz.phase0.SignedAggregateAndProof;
|
||||
case GossipType.beacon_attestation:
|
||||
return ssz.phase0.Attestation;
|
||||
case GossipType.proposer_slashing:
|
||||
return ssz.phase0.ProposerSlashing;
|
||||
case GossipType.attester_slashing:
|
||||
return ssz.phase0.AttesterSlashing;
|
||||
case GossipType.voluntary_exit:
|
||||
return ssz.phase0.SignedVoluntaryExit;
|
||||
case GossipType.sync_committee_contribution_and_proof:
|
||||
return ssz.altair.SignedContributionAndProof;
|
||||
case GossipType.sync_committee:
|
||||
return ssz.altair.SyncCommitteeMessage;
|
||||
default:
|
||||
throw new Error(`No ssz gossip type for ${(topic as GossipTopic).type}`);
|
||||
}
|
||||
|
||||
return forkDigestContext.forkDigest2ForkName(matches[1]);
|
||||
}
|
||||
|
||||
// Parsing
|
||||
|
||||
const gossipTopicRegex = new RegExp("^/eth2/(\\w+)/(\\w+)/(\\w+)");
|
||||
|
||||
/**
|
||||
* Parse a `GossipTopic` object from its stringified form.
|
||||
* A gossip topic has the format
|
||||
@@ -63,7 +98,7 @@ export function getForkFromGossipTopic(forkDigestContext: IForkDigestContext, to
|
||||
* /eth2/$FORK_DIGEST/$GOSSIP_TYPE/$ENCODING
|
||||
* ```
|
||||
*/
|
||||
export function parseGossipTopic(forkDigestContext: IForkDigestContext, topicStr: string): GossipTopic {
|
||||
export function parseGossipTopic(forkDigestContext: IForkDigestContext, topicStr: string): Required<GossipTopic> {
|
||||
try {
|
||||
const matches = topicStr.match(gossipTopicRegex);
|
||||
if (matches === null) {
|
||||
@@ -107,7 +142,6 @@ export function parseGossipTopic(forkDigestContext: IForkDigestContext, topicStr
|
||||
*/
|
||||
function parseEncodingStr(encodingStr: string): GossipEncoding {
|
||||
switch (encodingStr) {
|
||||
case GossipEncoding.ssz:
|
||||
case GossipEncoding.ssz_snappy:
|
||||
return encodingStr;
|
||||
|
||||
@@ -115,56 +149,3 @@ function parseEncodingStr(encodingStr: string): GossipEncoding {
|
||||
throw Error(`Unknown encoding ${encodingStr}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function getGossipSSZType<T extends GossipObject>(topic: GossipTopic): ContainerType<T> {
|
||||
switch (topic.type) {
|
||||
case GossipType.beacon_block:
|
||||
// beacon_block is updated in altair to support the updated SignedBeaconBlock type
|
||||
return (ssz[topic.fork].SignedBeaconBlock as unknown) as ContainerType<T>;
|
||||
case GossipType.beacon_aggregate_and_proof:
|
||||
return (ssz.phase0.SignedAggregateAndProof as unknown) as ContainerType<T>;
|
||||
case GossipType.beacon_attestation:
|
||||
return (ssz.phase0.Attestation as unknown) as ContainerType<T>;
|
||||
case GossipType.proposer_slashing:
|
||||
return (ssz.phase0.ProposerSlashing as unknown) as ContainerType<T>;
|
||||
case GossipType.attester_slashing:
|
||||
return (ssz.phase0.AttesterSlashing as unknown) as ContainerType<T>;
|
||||
case GossipType.voluntary_exit:
|
||||
return (ssz.phase0.SignedVoluntaryExit as unknown) as ContainerType<T>;
|
||||
case GossipType.sync_committee_contribution_and_proof:
|
||||
return (ssz.altair.SignedContributionAndProof as unknown) as ContainerType<T>;
|
||||
case GossipType.sync_committee:
|
||||
return (ssz.altair.SyncCommitteeMessage as unknown) as ContainerType<T>;
|
||||
default:
|
||||
throw new Error(`No ssz gossip type for ${(topic as GossipTopic).type}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ssz deserialize function for a gossip topic
|
||||
*/
|
||||
export function getGossipSSZDeserializer(topic: GossipTopic): GossipDeserializer {
|
||||
const sszType = getGossipSSZType(topic);
|
||||
|
||||
switch (topic.type) {
|
||||
case GossipType.beacon_block:
|
||||
case GossipType.beacon_aggregate_and_proof:
|
||||
// all other gossip can be deserialized to struct
|
||||
return sszType.createTreeBackedFromBytes.bind(sszType);
|
||||
case GossipType.beacon_attestation:
|
||||
case GossipType.proposer_slashing:
|
||||
case GossipType.attester_slashing:
|
||||
case GossipType.voluntary_exit:
|
||||
case GossipType.sync_committee_contribution_and_proof:
|
||||
case GossipType.sync_committee:
|
||||
return sszType.deserialize.bind(sszType);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a ssz serialize function for a gossip topic
|
||||
*/
|
||||
export function getGossipSSZSerializer(topic: GossipTopic): GossipSerializer {
|
||||
const sszType = getGossipSSZType(topic);
|
||||
return sszType.serialize.bind(sszType);
|
||||
}
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
/**
|
||||
* @module network/gossip
|
||||
*/
|
||||
|
||||
import {InMessage} from "libp2p-interfaces/src/pubsub";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {IGossipMessage} from "./interface";
|
||||
import {computeMsgIdAltair, computeMsgIdPhase0} from "./encoding";
|
||||
import {GOSSIP_MAX_SIZE} from "../../constants";
|
||||
import {IForkDigestContext} from "../../util/forkDigestContext";
|
||||
import {getForkFromGossipTopic} from "./topic";
|
||||
|
||||
/**
|
||||
* Basic sanity check on gossip message
|
||||
*/
|
||||
export function messageIsValid(message: InMessage): boolean | undefined {
|
||||
return message.topicIDs && message.topicIDs.length === 1 && message.data && message.data.length <= GOSSIP_MAX_SIZE;
|
||||
}
|
||||
|
||||
export function msgIdToString(msgId: Uint8Array): string {
|
||||
return Buffer.from(msgId).toString("base64");
|
||||
}
|
||||
|
||||
export function getMsgId(msg: IGossipMessage, forkDigestContext: IForkDigestContext): Uint8Array {
|
||||
const topic = msg.topicIDs[0];
|
||||
const fork = getForkFromGossipTopic(forkDigestContext, topic);
|
||||
if (!msg.msgId) {
|
||||
const {msgId, uncompressed} =
|
||||
fork === ForkName.phase0 ? computeMsgIdPhase0(topic, msg.data) : computeMsgIdAltair(topic, msg.data);
|
||||
msg.msgId = msgId;
|
||||
msg.uncompressed = uncompressed;
|
||||
}
|
||||
return msg.msgId;
|
||||
}
|
||||
107
packages/lodestar/src/network/gossip/validation/index.ts
Normal file
107
packages/lodestar/src/network/gossip/validation/index.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {AbortSignal} from "@chainsafe/abort-controller";
|
||||
import {IChainForkConfig} from "@chainsafe/lodestar-config";
|
||||
import {Json} from "@chainsafe/ssz";
|
||||
import {ILogger, mapValues} from "@chainsafe/lodestar-utils";
|
||||
import {IMetrics} from "../../../metrics";
|
||||
import {getGossipSSZType, GossipTopicCache} from "../topic";
|
||||
import {GossipHandlers, GossipHandlerFn} from "../handlers";
|
||||
import {GossipType, GossipValidatorFn, ValidatorFnsByType} from "../interface";
|
||||
import {GossipValidationError} from "../errors";
|
||||
import {GossipActionError, GossipAction} from "../../../chain/errors";
|
||||
import {decodeMessageData, UncompressCache} from "../encoding";
|
||||
import {wrapWithQueue} from "./queue";
|
||||
import {DEFAULT_ENCODING} from "../constants";
|
||||
import {getGossipAcceptMetadataByType, GetGossipAcceptMetadataFn} from "./onAccept";
|
||||
|
||||
type ValidatorFnModules = {
|
||||
config: IChainForkConfig;
|
||||
logger: ILogger;
|
||||
metrics: IMetrics | null;
|
||||
uncompressCache: UncompressCache;
|
||||
gossipTopicCache: GossipTopicCache;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns GossipValidatorFn for each GossipType, given GossipHandlerFn indexed by type.
|
||||
*
|
||||
* @see getGossipHandlers for reasoning on why GossipHandlerFn are used for gossip validation.
|
||||
*/
|
||||
export function createValidatorFnsByType(
|
||||
gossipHandlers: GossipHandlers,
|
||||
modules: ValidatorFnModules & {signal: AbortSignal}
|
||||
): ValidatorFnsByType {
|
||||
return mapValues(gossipHandlers, (gossipHandler, type) => {
|
||||
const gossipValidatorFn = getGossipValidatorFn(gossipHandler, type, modules);
|
||||
|
||||
return wrapWithQueue(gossipValidatorFn, type, modules.signal, modules.metrics);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a GossipSub validator function from a GossipHandlerFn. GossipHandlerFn may throw GossipActionError if one
|
||||
* or more validation conditions from the eth2.0-specs#p2p-interface are not satisfied.
|
||||
*
|
||||
* This function receives a string topic and a binary message `InMessage` and deserializes both using caches.
|
||||
* - The topic string should be known in advance and pre-computed
|
||||
* - The message.data should already by uncompressed when computing its msgID
|
||||
*
|
||||
* All logging and metrics associated with gossip object validation should happen in this function. We want to know
|
||||
* - In debug logs what objects are we processing, the result and some succint metadata
|
||||
* - In metrics what's the throughput and ratio of accept/ignore/reject per type
|
||||
*
|
||||
* @see getGossipHandlers for reasoning on why GossipHandlerFn are used for gossip validation.
|
||||
*/
|
||||
function getGossipValidatorFn<K extends GossipType>(
|
||||
gossipHandler: GossipHandlers[K],
|
||||
type: K,
|
||||
modules: ValidatorFnModules
|
||||
): GossipValidatorFn {
|
||||
const {config, logger, metrics, uncompressCache, gossipTopicCache} = modules;
|
||||
const getGossipObjectAcceptMetadata = getGossipAcceptMetadataByType[type] as GetGossipAcceptMetadataFn;
|
||||
|
||||
return async function (topicStr, gossipMsg) {
|
||||
try {
|
||||
const topic = gossipTopicCache.getTopic(topicStr);
|
||||
const encoding = topic.encoding ?? DEFAULT_ENCODING;
|
||||
|
||||
// Deserialize object from bytes ONLY after being picked up from the validation queue
|
||||
let gossipObject;
|
||||
try {
|
||||
const sszType = getGossipSSZType(topic);
|
||||
const messageData = decodeMessageData(encoding, gossipMsg.data, uncompressCache);
|
||||
gossipObject =
|
||||
// TODO: Review if it's really necessary to deserialize this as TreeBacked
|
||||
topic.type === GossipType.beacon_block || topic.type === GossipType.beacon_aggregate_and_proof
|
||||
? sszType.createTreeBackedFromBytes(messageData)
|
||||
: sszType.deserialize(messageData);
|
||||
} catch (e) {
|
||||
// TODO: Log the error or do something better with it
|
||||
throw new GossipActionError(GossipAction.REJECT, {code: (e as Error).message});
|
||||
}
|
||||
|
||||
await (gossipHandler as GossipHandlerFn)(gossipObject, topic);
|
||||
|
||||
const metadata = getGossipObjectAcceptMetadata(config, gossipObject, topic);
|
||||
logger.debug(`gossip - ${type} - accept`, metadata);
|
||||
metrics?.gossipValidationAccept.inc({topic: type}, 1);
|
||||
} catch (e) {
|
||||
if (!(e instanceof GossipActionError)) {
|
||||
logger.error(`Gossip validation ${type} threw a non-GossipActionError`, {}, e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE, (e as Error).message);
|
||||
}
|
||||
|
||||
switch (e.action) {
|
||||
case GossipAction.IGNORE:
|
||||
logger.debug(`gossip - ${type} - ignore`, e.type as Json);
|
||||
metrics?.gossipValidationIgnore.inc({topic: type}, 1);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE, e.message);
|
||||
|
||||
case GossipAction.REJECT:
|
||||
logger.debug(`gossip - ${type} - reject`, e.type as Json);
|
||||
metrics?.gossipValidationReject.inc({topic: type}, 1);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT, e.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
56
packages/lodestar/src/network/gossip/validation/onAccept.ts
Normal file
56
packages/lodestar/src/network/gossip/validation/onAccept.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import {IChainForkConfig} from "@chainsafe/lodestar-config";
|
||||
import {Json, toHexString} from "@chainsafe/ssz";
|
||||
import {GossipType, GossipTypeMap, GossipTopicTypeMap} from "../interface";
|
||||
|
||||
export type GetGossipAcceptMetadataFn = (
|
||||
config: IChainForkConfig,
|
||||
object: GossipTypeMap[GossipType],
|
||||
topic: GossipTopicTypeMap[GossipType]
|
||||
) => Json;
|
||||
export type GetGossipAcceptMetadataFns = {
|
||||
[K in GossipType]: (config: IChainForkConfig, object: GossipTypeMap[K], topic: GossipTopicTypeMap[K]) => Json;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return succint but meaningful data about accepted gossip objects.
|
||||
* This data is logged at the debug level extremely frequently so it must be short.
|
||||
*/
|
||||
export const getGossipAcceptMetadataByType: GetGossipAcceptMetadataFns = {
|
||||
[GossipType.beacon_block]: (config, signedBlock) => ({
|
||||
slot: signedBlock.message.slot,
|
||||
root: toHexString(config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message)),
|
||||
}),
|
||||
[GossipType.beacon_aggregate_and_proof]: (config, aggregateAndProof) => {
|
||||
const {data} = aggregateAndProof.message.aggregate;
|
||||
return {
|
||||
slot: data.slot,
|
||||
index: data.index,
|
||||
};
|
||||
},
|
||||
[GossipType.beacon_attestation]: (config, attestation, topic) => ({
|
||||
slot: attestation.data.slot,
|
||||
subnet: topic.subnet,
|
||||
index: attestation.data.index,
|
||||
}),
|
||||
[GossipType.voluntary_exit]: (config, voluntaryExit) => ({
|
||||
validatorIndex: voluntaryExit.message.validatorIndex,
|
||||
}),
|
||||
[GossipType.proposer_slashing]: (config, proposerSlashing) => ({
|
||||
proposerIndex: proposerSlashing.signedHeader1.message.proposerIndex,
|
||||
}),
|
||||
[GossipType.attester_slashing]: (config, attesterSlashing) => ({
|
||||
slot1: attesterSlashing.attestation1.data.slot,
|
||||
slot2: attesterSlashing.attestation2.data.slot,
|
||||
}),
|
||||
[GossipType.sync_committee_contribution_and_proof]: (config, contributionAndProof) => {
|
||||
const {contribution} = contributionAndProof.message;
|
||||
return {
|
||||
slot: contribution.slot,
|
||||
index: contribution.subCommitteeIndex,
|
||||
};
|
||||
},
|
||||
[GossipType.sync_committee]: (config, syncCommitteeSignature, topic) => ({
|
||||
slot: syncCommitteeSignature.slot,
|
||||
subnet: topic.subnet,
|
||||
}),
|
||||
};
|
||||
58
packages/lodestar/src/network/gossip/validation/queue.ts
Normal file
58
packages/lodestar/src/network/gossip/validation/queue.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import {AbortSignal} from "@chainsafe/abort-controller";
|
||||
import {IMetrics} from "../../../metrics";
|
||||
import {JobQueue, JobQueueOpts, QueueType} from "../../../util/queue";
|
||||
import {GossipType, GossipValidatorFn} from "../interface";
|
||||
|
||||
/**
|
||||
* Numbers from https://github.com/sigp/lighthouse/blob/b34a79dc0b02e04441ba01fd0f304d1e203d877d/beacon_node/network/src/beacon_processor/mod.rs#L69
|
||||
*/
|
||||
const gossipQueueOpts: {[K in GossipType]: Pick<JobQueueOpts, "maxLength" | "type" | "maxConcurrency">} = {
|
||||
[GossipType.beacon_block]: {maxLength: 1024, type: QueueType.FIFO},
|
||||
// this is different from lighthouse's, there are more gossip aggregate_and_proof than gossip block
|
||||
[GossipType.beacon_aggregate_and_proof]: {maxLength: 4096, type: QueueType.LIFO, maxConcurrency: 16},
|
||||
[GossipType.beacon_attestation]: {maxLength: 16384, type: QueueType.LIFO, maxConcurrency: 64},
|
||||
[GossipType.voluntary_exit]: {maxLength: 4096, type: QueueType.FIFO},
|
||||
[GossipType.proposer_slashing]: {maxLength: 4096, type: QueueType.FIFO},
|
||||
[GossipType.attester_slashing]: {maxLength: 4096, type: QueueType.FIFO},
|
||||
[GossipType.sync_committee_contribution_and_proof]: {maxLength: 4096, type: QueueType.LIFO},
|
||||
[GossipType.sync_committee]: {maxLength: 4096, type: QueueType.LIFO},
|
||||
};
|
||||
|
||||
/**
|
||||
* Wraps a GossipValidatorFn with a queue, to limit the processing of gossip objects by type.
|
||||
*
|
||||
* A queue here is essential to protect against DOS attacks, where a peer may send many messages at once.
|
||||
* Queues also protect the node against overloading. If the node gets bussy with an expensive epoch transition,
|
||||
* it may buffer too many gossip objects causing an Out of memory (OOM) error. With a queue the node will reject
|
||||
* new objects to fit its current throughput.
|
||||
*
|
||||
* Queues may buffer objects by
|
||||
* - topic '/eth2/0011aabb/beacon_attestation_0/ssz_snappy'
|
||||
* - type `GossipType.beacon_attestation`
|
||||
* - all objects in one queue
|
||||
*
|
||||
* By topic is too specific, so by type groups all similar objects in the same queue. All in the same won't allow
|
||||
* to customize different queue behaviours per object type (see `gossipQueueOpts`).
|
||||
*/
|
||||
export function wrapWithQueue(
|
||||
gossipValidatorFn: GossipValidatorFn,
|
||||
type: GossipType,
|
||||
signal: AbortSignal,
|
||||
metrics: IMetrics | null
|
||||
): GossipValidatorFn {
|
||||
const jobQueue = new JobQueue(
|
||||
{signal, ...gossipQueueOpts[type]},
|
||||
metrics
|
||||
? {
|
||||
length: metrics.gossipValidationQueueLength.child({topic: type}),
|
||||
droppedJobs: metrics.gossipValidationQueueDroppedJobs.child({topic: type}),
|
||||
jobTime: metrics.gossipValidationQueueJobTime.child({topic: type}),
|
||||
jobWaitTime: metrics.gossipValidationQueueJobWaitTime.child({topic: type}),
|
||||
}
|
||||
: undefined
|
||||
);
|
||||
|
||||
return async function (topicStr, gossipMsg) {
|
||||
await jobQueue.push(async () => gossipValidatorFn(topicStr, gossipMsg));
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
import {ATTESTATION_SUBNET_COUNT, ForkName, SYNC_COMMITTEE_SUBNET_COUNT} from "@chainsafe/lodestar-params";
|
||||
import {IChainForkConfig} from "@chainsafe/lodestar-config";
|
||||
import {IForkDigestContext} from "../../../util/forkDigestContext";
|
||||
import {stringifyGossipTopic} from "../topic";
|
||||
import {DEFAULT_ENCODING} from "../constants";
|
||||
import {GossipType, GossipValidatorFn, GossipTopic, ValidatorFnsByType} from "../interface";
|
||||
|
||||
/**
|
||||
* Associate a GossipValidator function to every possible topic that the node may subscribe too.
|
||||
*
|
||||
* GossipSub gets validator functions from the Map Eth2Gossipsub.topicValidators by message topicStr.
|
||||
* https://github.com/libp2p/js-libp2p-interfaces/blob/ff3bd10704a4c166ce63135747e3736915b0be8d/src/pubsub/index.js#L525
|
||||
*
|
||||
* Eth2Gossipsub MUST customize the validate() function above to ensure all validator functions are registered.
|
||||
*
|
||||
* Note: topics of the same type share validator functions
|
||||
* ```ts
|
||||
* '/eth2/0011aabb/beacon_attestation_0/ssz_snappy' -> ValidatorFnsByType[GossipType.beacon_attestation]
|
||||
* '/eth2/0011aabb/beacon_attestation_1/ssz_snappy' -> ValidatorFnsByType[GossipType.beacon_attestation]
|
||||
* ```
|
||||
*/
|
||||
export function createValidatorFnsByTopic(
|
||||
config: IChainForkConfig,
|
||||
forkDigestContext: IForkDigestContext,
|
||||
validatorFnsByType: ValidatorFnsByType
|
||||
): Map<string, GossipValidatorFn> {
|
||||
const validatorFnsByTopic = new Map<string, GossipValidatorFn>();
|
||||
|
||||
const encoding = DEFAULT_ENCODING;
|
||||
const allForkNames = Object.values(config.forks).map((fork) => fork.name);
|
||||
const allForksAfterPhase0 = allForkNames.filter((fork) => fork !== ForkName.phase0);
|
||||
|
||||
const staticGossipTypes = [
|
||||
{type: GossipType.beacon_block, forks: allForkNames},
|
||||
{type: GossipType.beacon_aggregate_and_proof, forks: allForkNames},
|
||||
{type: GossipType.voluntary_exit, forks: allForkNames},
|
||||
{type: GossipType.proposer_slashing, forks: allForkNames},
|
||||
{type: GossipType.attester_slashing, forks: allForkNames},
|
||||
// Note: Calling .handleTopic() does not subscribe. Safe to do in any fork
|
||||
{type: GossipType.sync_committee_contribution_and_proof, forks: allForksAfterPhase0},
|
||||
];
|
||||
|
||||
for (const {type, forks} of staticGossipTypes) {
|
||||
for (const fork of forks) {
|
||||
const topic = {type, fork, encoding} as Required<GossipTopic>;
|
||||
const topicStr = stringifyGossipTopic(forkDigestContext, topic);
|
||||
validatorFnsByTopic.set(topicStr, validatorFnsByType[type]);
|
||||
}
|
||||
}
|
||||
|
||||
for (const fork of allForkNames) {
|
||||
for (let subnet = 0; subnet < ATTESTATION_SUBNET_COUNT; subnet++) {
|
||||
const topic = {type: GossipType.beacon_attestation, fork, subnet, encoding};
|
||||
const topicStr = stringifyGossipTopic(forkDigestContext, topic);
|
||||
const topicValidatorFn = validatorFnsByType[GossipType.beacon_attestation];
|
||||
validatorFnsByTopic.set(topicStr, topicValidatorFn);
|
||||
}
|
||||
}
|
||||
|
||||
for (const fork of allForksAfterPhase0) {
|
||||
for (let subnet = 0; subnet < SYNC_COMMITTEE_SUBNET_COUNT; subnet++) {
|
||||
const topic = {type: GossipType.sync_committee, fork, subnet, encoding};
|
||||
const topicStr = stringifyGossipTopic(forkDigestContext, topic);
|
||||
const topicValidatorFn = validatorFnsByType[GossipType.sync_committee];
|
||||
validatorFnsByTopic.set(topicStr, topicValidatorFn);
|
||||
}
|
||||
}
|
||||
|
||||
return validatorFnsByTopic;
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
import {AbortSignal} from "@chainsafe/abort-controller";
|
||||
import {ATTESTATION_SUBNET_COUNT, ForkName} from "@chainsafe/lodestar-params";
|
||||
import {mapValues} from "@chainsafe/lodestar-utils";
|
||||
import {IMetrics} from "../../metrics";
|
||||
import {JobQueue, JobQueueOpts, QueueType} from "../../util/queue";
|
||||
import {stringifyGossipTopic} from "./topic";
|
||||
import {DEFAULT_ENCODING} from "./constants";
|
||||
import {validatorFns} from "./validatorFns";
|
||||
import {parseGossipMsg} from "./message";
|
||||
import {
|
||||
GossipType,
|
||||
TopicValidatorFn,
|
||||
IObjectValidatorModules,
|
||||
GossipTopic,
|
||||
TopicValidatorFnMap,
|
||||
GossipTopicMap,
|
||||
GossipTypeMap,
|
||||
} from "./interface";
|
||||
|
||||
// Numbers from https://github.com/sigp/lighthouse/blob/b34a79dc0b02e04441ba01fd0f304d1e203d877d/beacon_node/network/src/beacon_processor/mod.rs#L69
|
||||
const gossipQueueOpts: {[K in GossipType]: Pick<JobQueueOpts, "maxLength" | "type" | "maxConcurrency">} = {
|
||||
[GossipType.beacon_block]: {maxLength: 1024, type: QueueType.FIFO},
|
||||
// this is different from lighthouse's, there are more gossip aggregate_and_proof than gossip block
|
||||
[GossipType.beacon_aggregate_and_proof]: {maxLength: 4096, type: QueueType.LIFO, maxConcurrency: 16},
|
||||
[GossipType.beacon_attestation]: {maxLength: 16384, type: QueueType.LIFO, maxConcurrency: 64},
|
||||
[GossipType.voluntary_exit]: {maxLength: 4096, type: QueueType.FIFO},
|
||||
[GossipType.proposer_slashing]: {maxLength: 4096, type: QueueType.FIFO},
|
||||
[GossipType.attester_slashing]: {maxLength: 4096, type: QueueType.FIFO},
|
||||
[GossipType.sync_committee_contribution_and_proof]: {maxLength: 4096, type: QueueType.LIFO},
|
||||
[GossipType.sync_committee]: {maxLength: 4096, type: QueueType.LIFO},
|
||||
};
|
||||
|
||||
export function createTopicValidatorFnMap(
|
||||
modules: IObjectValidatorModules,
|
||||
metrics: IMetrics | null,
|
||||
signal: AbortSignal
|
||||
): TopicValidatorFnMap {
|
||||
const wrappedValidatorFns = mapValues(validatorFns, (validatorFn, type) =>
|
||||
wrapWithQueue(validatorFn as ValidatorFn<typeof type>, modules, {signal, ...gossipQueueOpts[type]}, metrics, type)
|
||||
);
|
||||
|
||||
return createValidatorFnsByTopic(modules, wrappedValidatorFns);
|
||||
}
|
||||
|
||||
/**
|
||||
* Intermediate type for gossip validation functions.
|
||||
* Gossip validation functions defined with this signature are easier to unit test
|
||||
*/
|
||||
export type ValidatorFn<K extends GossipType> = (
|
||||
modules: IObjectValidatorModules,
|
||||
topic: GossipTopicMap[K],
|
||||
object: GossipTypeMap[K]
|
||||
) => Promise<void>;
|
||||
|
||||
/**
|
||||
* Wraps an ObjectValidatorFn as a TopicValidatorFn
|
||||
* See TopicValidatorFn here https://github.com/libp2p/js-libp2p-interfaces/blob/v0.5.2/src/pubsub/index.js#L529
|
||||
*/
|
||||
export function wrapWithQueue<K extends GossipType>(
|
||||
validatorFn: ValidatorFn<K>,
|
||||
modules: IObjectValidatorModules,
|
||||
queueOpts: JobQueueOpts,
|
||||
metrics: IMetrics | null,
|
||||
type: GossipType
|
||||
): TopicValidatorFn {
|
||||
const jobQueue = new JobQueue(
|
||||
queueOpts,
|
||||
metrics
|
||||
? {
|
||||
length: metrics.gossipValidationQueueLength.child({topic: type}),
|
||||
droppedJobs: metrics.gossipValidationQueueDroppedJobs.child({topic: type}),
|
||||
jobTime: metrics.gossipValidationQueueJobTime.child({topic: type}),
|
||||
jobWaitTime: metrics.gossipValidationQueueJobWaitTime.child({topic: type}),
|
||||
}
|
||||
: undefined
|
||||
);
|
||||
return async function (_topicStr, gossipMsg) {
|
||||
const {gossipTopic, gossipObject} = parseGossipMsg<K>(gossipMsg);
|
||||
await jobQueue.push(async () => await validatorFn(modules, gossipTopic, gossipObject));
|
||||
};
|
||||
}
|
||||
|
||||
// Gossip validation functions are wrappers around chain-level validation functions
|
||||
// With a few additional elements:
|
||||
//
|
||||
// - Gossip error handling - chain-level validation throws eg: `BlockErrorCode` with many possible error types.
|
||||
// Gossip validation functions instead throw either "ignore" or "reject" errors.
|
||||
//
|
||||
// - Logging - chain-level validation has no logging.
|
||||
// For gossip, its useful to know, via logs/metrics, when gossip is received/ignored/rejected.
|
||||
//
|
||||
// - Gossip type conversion - Gossip validation functions operate on messages of binary data.
|
||||
// This data must be deserialized into the proper type, determined by the topic (fork digest)
|
||||
// This deserialization must have happened prior to the topic validator running.
|
||||
|
||||
export function createValidatorFnsByTopic(
|
||||
modules: IObjectValidatorModules,
|
||||
validatorFnsByType: {[K in GossipType]: TopicValidatorFn}
|
||||
): TopicValidatorFnMap {
|
||||
const validatorFnsByTopic = new Map<string, TopicValidatorFn>();
|
||||
const staticGossipTypes: GossipType[] = [
|
||||
GossipType.beacon_block,
|
||||
GossipType.beacon_aggregate_and_proof,
|
||||
GossipType.voluntary_exit,
|
||||
GossipType.proposer_slashing,
|
||||
GossipType.attester_slashing,
|
||||
];
|
||||
|
||||
// TODO: other fork topics should get added here
|
||||
// phase0
|
||||
const fork = ForkName.phase0;
|
||||
|
||||
for (const type of staticGossipTypes) {
|
||||
const topic = {type, fork, encoding: DEFAULT_ENCODING} as GossipTopic;
|
||||
const topicString = stringifyGossipTopic(modules.chain.forkDigestContext, topic);
|
||||
validatorFnsByTopic.set(topicString, validatorFnsByType[type]);
|
||||
}
|
||||
|
||||
// create an entry for every committee subnet - phase0
|
||||
for (let subnet = 0; subnet < ATTESTATION_SUBNET_COUNT; subnet++) {
|
||||
const topic = {
|
||||
type: GossipType.beacon_attestation,
|
||||
fork,
|
||||
encoding: DEFAULT_ENCODING,
|
||||
subnet,
|
||||
} as GossipTopic;
|
||||
const topicString = stringifyGossipTopic(modules.chain.forkDigestContext, topic);
|
||||
const topicValidatorFn = validatorFnsByType[GossipType.beacon_attestation];
|
||||
validatorFnsByTopic.set(topicString, topicValidatorFn);
|
||||
}
|
||||
|
||||
return validatorFnsByTopic;
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {phase0} from "@chainsafe/lodestar-types";
|
||||
import {Json} from "@chainsafe/ssz";
|
||||
import {validateGossipAggregateAndProof} from "../../../chain/validation";
|
||||
import {AttestationError, AttestationErrorCode} from "../../../chain/errors";
|
||||
import {IObjectValidatorModules, GossipTopic} from "../interface";
|
||||
import {GossipValidationError} from "../errors";
|
||||
import {OpSource} from "../../../metrics/validatorMonitor";
|
||||
|
||||
export async function validateAggregatedAttestation(
|
||||
{chain, logger, metrics}: IObjectValidatorModules,
|
||||
_topic: GossipTopic,
|
||||
signedAggregateAndProof: phase0.SignedAggregateAndProof
|
||||
): Promise<void> {
|
||||
const seenTimestampSec = Date.now() / 1000;
|
||||
|
||||
try {
|
||||
const indexedAtt = await validateGossipAggregateAndProof(chain, signedAggregateAndProof);
|
||||
logger.debug("gossip - AggregateAndProof - accept");
|
||||
|
||||
metrics?.registerAggregatedAttestation(OpSource.gossip, seenTimestampSec, signedAggregateAndProof, indexedAtt);
|
||||
} catch (e) {
|
||||
if (!(e instanceof AttestationError)) {
|
||||
logger.error("Gossip aggregate and proof validation threw a non-AttestationError", e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
|
||||
// TODO: Add DoS resistant pending attestation pool
|
||||
// switch (e.type.code) {
|
||||
// case AttestationErrorCode.FUTURE_SLOT:
|
||||
// chain.pendingAttestations.putBySlot(e.type.attestationSlot, attestation);
|
||||
// break;
|
||||
// case AttestationErrorCode.UNKNOWN_TARGET_ROOT:
|
||||
// case AttestationErrorCode.UNKNOWN_BEACON_BLOCK_ROOT:
|
||||
// chain.pendingAttestations.putByBlock(e.type.root, attestation);
|
||||
// break;
|
||||
// }
|
||||
|
||||
switch (e.type.code) {
|
||||
case AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS:
|
||||
case AttestationErrorCode.KNOWN_BAD_BLOCK:
|
||||
case AttestationErrorCode.AGGREGATOR_NOT_IN_COMMITTEE:
|
||||
case AttestationErrorCode.INVALID_SIGNATURE:
|
||||
case AttestationErrorCode.INVALID_AGGREGATOR:
|
||||
case AttestationErrorCode.INVALID_INDEXED_ATTESTATION:
|
||||
logger.debug("gossip - AggregateAndProof - reject", e.type);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
|
||||
case AttestationErrorCode.FUTURE_SLOT: // IGNORE
|
||||
case AttestationErrorCode.PAST_SLOT:
|
||||
case AttestationErrorCode.AGGREGATOR_ALREADY_KNOWN:
|
||||
case AttestationErrorCode.MISSING_ATTESTATION_TARGET_STATE:
|
||||
default:
|
||||
logger.debug("gossip - AggregateAndProof - ignore", e.type as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {phase0} from "@chainsafe/lodestar-types";
|
||||
import {Json} from "@chainsafe/ssz";
|
||||
import {validateGossipAttestation} from "../../../chain/validation";
|
||||
import {AttestationError, AttestationErrorCode} from "../../../chain/errors";
|
||||
import {IObjectValidatorModules, GossipTopicMap, GossipType} from "../interface";
|
||||
import {GossipValidationError} from "../errors";
|
||||
import {OpSource} from "../../../metrics/validatorMonitor";
|
||||
|
||||
export async function validateCommitteeAttestation(
|
||||
{chain, logger, metrics}: IObjectValidatorModules,
|
||||
topic: GossipTopicMap[GossipType.beacon_attestation],
|
||||
attestation: phase0.Attestation
|
||||
): Promise<void> {
|
||||
const seenTimestampSec = Date.now() / 1000;
|
||||
const subnet = topic.subnet;
|
||||
|
||||
try {
|
||||
const {indexedAttestation} = await validateGossipAttestation(chain, attestation, subnet);
|
||||
logger.debug("gossip - Attestation - accept", {subnet});
|
||||
|
||||
metrics?.registerUnaggregatedAttestation(OpSource.gossip, seenTimestampSec, indexedAttestation);
|
||||
} catch (e) {
|
||||
if (!(e instanceof AttestationError)) {
|
||||
logger.error("Gossip attestation validation threw a non-AttestationError", e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
|
||||
// TODO: Add DoS resistant pending attestation pool
|
||||
// switch (e.type.code) {
|
||||
// case AttestationErrorCode.FUTURE_SLOT:
|
||||
// chain.pendingAttestations.putBySlot(e.type.attestationSlot, attestation);
|
||||
// break;
|
||||
// case AttestationErrorCode.UNKNOWN_TARGET_ROOT:
|
||||
// case AttestationErrorCode.UNKNOWN_BEACON_BLOCK_ROOT:
|
||||
// chain.pendingAttestations.putByBlock(e.type.root, attestation);
|
||||
// break;
|
||||
// }
|
||||
|
||||
switch (e.type.code) {
|
||||
case AttestationErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE:
|
||||
case AttestationErrorCode.INVALID_SUBNET_ID:
|
||||
case AttestationErrorCode.BAD_TARGET_EPOCH:
|
||||
case AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET:
|
||||
case AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS:
|
||||
case AttestationErrorCode.INVALID_SIGNATURE:
|
||||
case AttestationErrorCode.KNOWN_BAD_BLOCK:
|
||||
case AttestationErrorCode.TARGET_BLOCK_NOT_AN_ANCESTOR_OF_LMD_BLOCK:
|
||||
logger.debug("gossip - Attestation - reject", e.type);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
|
||||
case AttestationErrorCode.PAST_SLOT:
|
||||
case AttestationErrorCode.FUTURE_SLOT:
|
||||
case AttestationErrorCode.ATTESTATION_ALREADY_KNOWN:
|
||||
default:
|
||||
logger.debug("gossip - Attestation - ignore", e.type as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {Json} from "@chainsafe/ssz";
|
||||
import {phase0} from "@chainsafe/lodestar-types";
|
||||
import {validateGossipAttesterSlashing} from "../../../chain/validation";
|
||||
import {AttesterSlashingError, AttesterSlashingErrorCode} from "../../../chain/errors";
|
||||
import {IObjectValidatorModules, GossipTopic} from "../interface";
|
||||
import {GossipValidationError} from "../errors";
|
||||
|
||||
export async function validateAttesterSlashing(
|
||||
{chain, db, logger}: IObjectValidatorModules,
|
||||
_topic: GossipTopic,
|
||||
attesterSlashing: phase0.AttesterSlashing
|
||||
): Promise<void> {
|
||||
try {
|
||||
await validateGossipAttesterSlashing(chain, db, attesterSlashing);
|
||||
logger.debug("gossip - AttesterSlashing - accept");
|
||||
} catch (e) {
|
||||
if (!(e instanceof AttesterSlashingError)) {
|
||||
logger.error("Gossip attester slashing validation threw a non-AttesterSlashingError", e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
|
||||
switch (e.type.code) {
|
||||
case AttesterSlashingErrorCode.INVALID:
|
||||
logger.debug("gossip - AttesterSlashing - reject", (e.type as unknown) as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
|
||||
case AttesterSlashingErrorCode.ALREADY_EXISTS:
|
||||
default:
|
||||
logger.debug("gossip - AttesterSlashing - ignore", e.type);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {allForks} from "@chainsafe/lodestar-types";
|
||||
import {Json, toHexString} from "@chainsafe/ssz";
|
||||
import {validateGossipBlock} from "../../../chain/validation";
|
||||
import {BlockError, BlockErrorCode} from "../../../chain/errors";
|
||||
import {IObjectValidatorModules, GossipTopic} from "../interface";
|
||||
import {GossipValidationError} from "../errors";
|
||||
import {OpSource} from "../../../metrics/validatorMonitor";
|
||||
|
||||
export async function validateBeaconBlock(
|
||||
{chain, db, config, logger, metrics}: IObjectValidatorModules,
|
||||
_topic: GossipTopic,
|
||||
signedBlock: allForks.SignedBeaconBlock
|
||||
): Promise<void> {
|
||||
const seenTimestampSec = Date.now() / 1000;
|
||||
|
||||
try {
|
||||
await validateGossipBlock(config, chain, db, {
|
||||
signedBlock,
|
||||
reprocess: false,
|
||||
prefinalized: false,
|
||||
validSignatures: false,
|
||||
validProposerSignature: false,
|
||||
});
|
||||
logger.debug("gossip - Block - accept", {
|
||||
root: toHexString(config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message)),
|
||||
slot: signedBlock.message.slot,
|
||||
});
|
||||
|
||||
metrics?.registerBeaconBlock(OpSource.api, seenTimestampSec, signedBlock.message);
|
||||
} catch (e) {
|
||||
if (!(e instanceof BlockError)) {
|
||||
logger.error("Gossip block validation threw a non-BlockError", e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
|
||||
switch (e.type.code) {
|
||||
case BlockErrorCode.PROPOSAL_SIGNATURE_INVALID:
|
||||
case BlockErrorCode.INCORRECT_PROPOSER:
|
||||
case BlockErrorCode.KNOWN_BAD_BLOCK:
|
||||
logger.debug("gossip - Block - reject", e.type);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
|
||||
case BlockErrorCode.FUTURE_SLOT:
|
||||
case BlockErrorCode.PARENT_UNKNOWN: // IGNORE
|
||||
chain.receiveBlock(signedBlock);
|
||||
/** eslit-disable-next-line no-fallthrough */
|
||||
case BlockErrorCode.WOULD_REVERT_FINALIZED_SLOT:
|
||||
case BlockErrorCode.REPEAT_PROPOSAL:
|
||||
default:
|
||||
logger.debug("gossip - Block - ignore", e.type as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
import {GossipType} from "../interface";
|
||||
import {validateAggregatedAttestation} from "./aggregatedAttestation";
|
||||
import {validateCommitteeAttestation} from "./attestation";
|
||||
import {validateAttesterSlashing} from "./attesterSlashing";
|
||||
import {validateBeaconBlock} from "./block";
|
||||
import {validateProposerSlashing} from "./proposerSlashing";
|
||||
import {validateSyncCommitteeContribution} from "./syncCommitteeContribution";
|
||||
import {validateSyncCommittee} from "./syncCommittee";
|
||||
import {validateVoluntaryExit} from "./voluntaryExit";
|
||||
|
||||
export const validatorFns = {
|
||||
[GossipType.beacon_block]: validateBeaconBlock,
|
||||
[GossipType.beacon_aggregate_and_proof]: validateAggregatedAttestation,
|
||||
[GossipType.beacon_attestation]: validateCommitteeAttestation,
|
||||
[GossipType.voluntary_exit]: validateVoluntaryExit,
|
||||
[GossipType.proposer_slashing]: validateProposerSlashing,
|
||||
[GossipType.attester_slashing]: validateAttesterSlashing,
|
||||
[GossipType.sync_committee_contribution_and_proof]: validateSyncCommitteeContribution,
|
||||
[GossipType.sync_committee]: validateSyncCommittee,
|
||||
};
|
||||
@@ -1,34 +0,0 @@
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {Json} from "@chainsafe/ssz";
|
||||
import {phase0} from "@chainsafe/lodestar-types";
|
||||
import {validateGossipProposerSlashing} from "../../../chain/validation";
|
||||
import {ProposerSlashingError, ProposerSlashingErrorCode} from "../../../chain/errors";
|
||||
import {IObjectValidatorModules, GossipTopic} from "../interface";
|
||||
import {GossipValidationError} from "../errors";
|
||||
|
||||
export async function validateProposerSlashing(
|
||||
{chain, db, logger}: IObjectValidatorModules,
|
||||
_topic: GossipTopic,
|
||||
proposerSlashing: phase0.ProposerSlashing
|
||||
): Promise<void> {
|
||||
try {
|
||||
await validateGossipProposerSlashing(chain, db, proposerSlashing);
|
||||
logger.debug("gossip - ProposerSlashing - accept");
|
||||
} catch (e) {
|
||||
if (!(e instanceof ProposerSlashingError)) {
|
||||
logger.error("Gossip proposer slashing validation threw a non-ProposerSlashingError", e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
|
||||
switch (e.type.code) {
|
||||
case ProposerSlashingErrorCode.INVALID:
|
||||
logger.debug("gossip - ProposerSlashing - reject", (e.type as unknown) as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
|
||||
case ProposerSlashingErrorCode.ALREADY_EXISTS:
|
||||
default:
|
||||
logger.debug("gossip - ProposerSlashing - ignore", e.type);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import {altair} from "@chainsafe/lodestar-types";
|
||||
import {Json} from "@chainsafe/ssz";
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {GossipType, GossipValidationError} from "..";
|
||||
import {GossipAction, ISyncCommitteeJob, SyncCommitteeError} from "../../../chain/errors";
|
||||
import {validateGossipSyncCommittee} from "../../../chain/validation/syncCommittee";
|
||||
import {GossipTopicMap, IObjectValidatorModules} from "../interface";
|
||||
|
||||
/**
|
||||
* Validate messages from `sync_committee_{subnet_id}` channels
|
||||
*/
|
||||
export async function validateSyncCommittee(
|
||||
{chain, db, logger}: IObjectValidatorModules,
|
||||
{subnet}: GossipTopicMap[GossipType.sync_committee],
|
||||
syncCommittee: altair.SyncCommitteeMessage
|
||||
): Promise<void> {
|
||||
const metadata = {subnet, slot: syncCommittee.slot};
|
||||
try {
|
||||
const syncCommitteeJob: ISyncCommitteeJob = {signature: syncCommittee, validSignature: false};
|
||||
await validateGossipSyncCommittee(chain, db, syncCommitteeJob, subnet);
|
||||
logger.debug("gossip - sync_committee - accept", metadata);
|
||||
} catch (e) {
|
||||
if (!(e instanceof SyncCommitteeError)) {
|
||||
logger.error("gossip - sync_committee - non-SyncCommitteeError", metadata, e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
|
||||
if (e.action === GossipAction.REJECT) {
|
||||
logger.debug("gossip - sync_committee - reject", e.type as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
} else {
|
||||
logger.debug("gossip - sync_committee - ignore", e.type as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import {altair} from "@chainsafe/lodestar-types";
|
||||
import {Json} from "@chainsafe/ssz";
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {GossipValidationError} from "../errors";
|
||||
import {GossipAction, IContributionAndProofJob, SyncCommitteeError} from "../../../chain/errors";
|
||||
import {validateSyncCommitteeGossipContributionAndProof} from "../../../chain/validation/syncCommitteeContributionAndProof";
|
||||
import {GossipTopic, IObjectValidatorModules} from "../interface";
|
||||
|
||||
/**
|
||||
* Validate messages from `sync_committee_contribution_and_proof`
|
||||
*/
|
||||
export async function validateSyncCommitteeContribution(
|
||||
{chain, db, logger}: IObjectValidatorModules,
|
||||
_topic: GossipTopic,
|
||||
contributionAndProof: altair.SignedContributionAndProof
|
||||
): Promise<void> {
|
||||
const metadata = {slot: contributionAndProof.message.contribution.slot};
|
||||
try {
|
||||
const contributionAndProofJob: IContributionAndProofJob = {contributionAndProof, validSignature: false};
|
||||
await validateSyncCommitteeGossipContributionAndProof(chain, db, contributionAndProofJob);
|
||||
logger.debug("gossip - sync_committee_contribution_and_proof - accept", metadata);
|
||||
} catch (e) {
|
||||
if (!(e instanceof SyncCommitteeError)) {
|
||||
logger.error("gossip - sync_committee_contribution_and_proof - non-SyncCommitteeError", metadata, e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
|
||||
if (e.action === GossipAction.REJECT) {
|
||||
logger.debug("gossip - sync_committee_contribution_and_proof - reject", e.type as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
} else {
|
||||
logger.debug("gossip - sync_committee_contribution_and_proof - ignore", e.type as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {Json} from "@chainsafe/ssz";
|
||||
import {phase0} from "@chainsafe/lodestar-types";
|
||||
import {validateGossipVoluntaryExit} from "../../../chain/validation";
|
||||
import {VoluntaryExitError, VoluntaryExitErrorCode} from "../../../chain/errors";
|
||||
import {IObjectValidatorModules, GossipTopic} from "../interface";
|
||||
import {GossipValidationError} from "../errors";
|
||||
|
||||
export async function validateVoluntaryExit(
|
||||
{chain, db, logger}: IObjectValidatorModules,
|
||||
_topic: GossipTopic,
|
||||
voluntaryExit: phase0.SignedVoluntaryExit
|
||||
): Promise<void> {
|
||||
try {
|
||||
await validateGossipVoluntaryExit(chain, db, voluntaryExit);
|
||||
logger.debug("gossip - VoluntaryExit - accept");
|
||||
} catch (e) {
|
||||
if (!(e instanceof VoluntaryExitError)) {
|
||||
logger.error("Gossip voluntary exit validation threw a non-VoluntaryExitError", e);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
|
||||
switch (e.type.code) {
|
||||
case VoluntaryExitErrorCode.INVALID:
|
||||
logger.debug("gossip - VoluntaryExit - reject", (e.type as unknown) as Json);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
|
||||
case VoluntaryExitErrorCode.ALREADY_EXISTS:
|
||||
default:
|
||||
logger.debug("gossip - VoluntaryExit - ignore", e.type);
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,8 @@
|
||||
/**
|
||||
* @module network
|
||||
*/
|
||||
export * from "./events";
|
||||
export * from "./interface";
|
||||
export * from "./network";
|
||||
export * from "./nodejs";
|
||||
export * from "./gossip";
|
||||
export * from "./reqresp";
|
||||
export * from "./util";
|
||||
export * from "./peers";
|
||||
|
||||
@@ -47,6 +47,7 @@ export interface INetwork {
|
||||
// Service
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
close(): void;
|
||||
|
||||
// Debug
|
||||
connectToPeer(peer: PeerId, multiaddr: Multiaddr[]): Promise<void>;
|
||||
|
||||
@@ -8,22 +8,24 @@ import Multiaddr from "multiaddr";
|
||||
import {AbortSignal} from "@chainsafe/abort-controller";
|
||||
import {IBeaconConfig} from "@chainsafe/lodestar-config";
|
||||
import {ILogger} from "@chainsafe/lodestar-utils";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {Discv5Discovery, ENR} from "@chainsafe/discv5";
|
||||
import {computeEpochAtSlot} from "@chainsafe/lodestar-beacon-state-transition";
|
||||
import {Epoch} from "@chainsafe/lodestar-types";
|
||||
import {IMetrics} from "../metrics";
|
||||
import {ReqResp, IReqResp, IReqRespOptions} from "./reqresp";
|
||||
import {ChainEvent, IBeaconChain, IBeaconClock} from "../chain";
|
||||
import {IBeaconDb} from "../db";
|
||||
import {INetworkOptions} from "./options";
|
||||
import {INetwork} from "./interface";
|
||||
import {IBeaconChain, IBeaconClock} from "../chain";
|
||||
import {ReqResp, IReqResp, IReqRespOptions, ReqRespHandlers} from "./reqresp";
|
||||
import {Eth2Gossipsub, GossipType, GossipHandlers} from "./gossip";
|
||||
import {MetadataController} from "./metadata";
|
||||
import {Discv5Discovery, ENR} from "@chainsafe/discv5";
|
||||
import {getActiveForks, getCurrentAndNextFork, FORK_EPOCH_LOOKAHEAD} from "./forks";
|
||||
import {IPeerMetadataStore, Libp2pPeerMetadataStore} from "./peers/metastore";
|
||||
import {PeerManager} from "./peers/peerManager";
|
||||
import {IPeerRpcScoreStore, PeerRpcScoreStore} from "./peers";
|
||||
import {IBeaconDb} from "../db";
|
||||
import {createTopicValidatorFnMap, Eth2Gossipsub} from "./gossip";
|
||||
import {IReqRespHandler} from "./reqresp/handlers";
|
||||
import {INetworkEventBus, NetworkEventBus} from "./events";
|
||||
import {AttnetsService, SyncnetsService, CommitteeSubscription} from "./subnets";
|
||||
import {GossipHandler} from "./gossip/handler";
|
||||
|
||||
interface INetworkModules {
|
||||
config: IBeaconConfig;
|
||||
@@ -32,7 +34,8 @@ interface INetworkModules {
|
||||
metrics: IMetrics | null;
|
||||
chain: IBeaconChain;
|
||||
db: IBeaconDb;
|
||||
reqRespHandler: IReqRespHandler;
|
||||
reqRespHandlers: ReqRespHandlers;
|
||||
gossipHandlers: GossipHandlers;
|
||||
signal: AbortSignal;
|
||||
}
|
||||
|
||||
@@ -46,19 +49,22 @@ export class Network implements INetwork {
|
||||
peerMetadata: IPeerMetadataStore;
|
||||
peerRpcScores: IPeerRpcScoreStore;
|
||||
|
||||
private readonly gossipHandler: GossipHandler;
|
||||
private readonly peerManager: PeerManager;
|
||||
private readonly libp2p: LibP2p;
|
||||
private readonly logger: ILogger;
|
||||
private readonly config: IBeaconConfig;
|
||||
private readonly clock: IBeaconClock;
|
||||
private readonly chain: IBeaconChain;
|
||||
|
||||
private subscribedForks = new Set<ForkName>();
|
||||
|
||||
constructor(opts: INetworkOptions & IReqRespOptions, modules: INetworkModules) {
|
||||
const {config, libp2p, logger, metrics, chain, db, reqRespHandler, signal} = modules;
|
||||
const {config, libp2p, logger, metrics, chain, reqRespHandlers, gossipHandlers, signal} = modules;
|
||||
this.libp2p = libp2p;
|
||||
this.logger = logger;
|
||||
this.config = config;
|
||||
this.clock = chain.clock;
|
||||
this.chain = chain;
|
||||
const networkEventBus = new NetworkEventBus();
|
||||
const metadata = new MetadataController({}, {config, chain, logger});
|
||||
const peerMetadata = new Libp2pPeerMetadataStore(libp2p.peerStore.metadataBook);
|
||||
@@ -72,7 +78,7 @@ export class Network implements INetwork {
|
||||
config,
|
||||
libp2p,
|
||||
forkDigestContext: chain.forkDigestContext,
|
||||
reqRespHandler,
|
||||
reqRespHandlers,
|
||||
peerMetadata,
|
||||
metadata,
|
||||
peerRpcScores,
|
||||
@@ -82,17 +88,20 @@ export class Network implements INetwork {
|
||||
},
|
||||
opts
|
||||
);
|
||||
|
||||
this.gossip = new Eth2Gossipsub({
|
||||
config,
|
||||
libp2p,
|
||||
validatorFns: createTopicValidatorFnMap({config, chain, db, logger, metrics}, metrics, signal),
|
||||
logger,
|
||||
forkDigestContext: chain.forkDigestContext,
|
||||
metrics,
|
||||
signal,
|
||||
gossipHandlers,
|
||||
forkDigestContext: chain.forkDigestContext,
|
||||
});
|
||||
|
||||
this.attnetsService = new AttnetsService(config, chain, this.gossip, metadata, logger);
|
||||
this.syncnetsService = new SyncnetsService(config, chain, this.gossip, metadata, logger);
|
||||
|
||||
this.peerManager = new PeerManager(
|
||||
{
|
||||
libp2p,
|
||||
@@ -110,12 +119,13 @@ export class Network implements INetwork {
|
||||
opts
|
||||
);
|
||||
|
||||
this.gossipHandler = new GossipHandler(config, chain, this.gossip, this.attnetsService, db, logger);
|
||||
this.chain.emitter.on(ChainEvent.clockEpoch, this.onEpoch);
|
||||
modules.signal.addEventListener("abort", () => this.close(), {once: true});
|
||||
}
|
||||
|
||||
/** Destroy this instance. Can only be called once. */
|
||||
close(): void {
|
||||
this.gossipHandler.close();
|
||||
this.chain.emitter.off(ChainEvent.clockEpoch, this.onEpoch);
|
||||
}
|
||||
|
||||
async start(): Promise<void> {
|
||||
@@ -133,7 +143,6 @@ export class Network implements INetwork {
|
||||
async stop(): Promise<void> {
|
||||
// Must goodbye and disconnect before stopping libp2p
|
||||
await this.peerManager.goodbyeAndDisconnectAllPeers();
|
||||
this.gossipHandler.close();
|
||||
this.peerManager.stop();
|
||||
this.metadata.stop();
|
||||
this.gossip.stop();
|
||||
@@ -189,16 +198,31 @@ export class Network implements INetwork {
|
||||
this.peerManager.reStatusPeers(peers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to all gossip events. Safe to call multiple times
|
||||
*/
|
||||
subscribeGossipCoreTopics(): void {
|
||||
this.gossipHandler.subscribeCoreTopics();
|
||||
if (!this.isSubscribedToGossipCoreTopics()) {
|
||||
this.logger.info("Subscribed gossip core topics");
|
||||
}
|
||||
|
||||
const currentEpoch = computeEpochAtSlot(this.chain.forkChoice.getHead().slot);
|
||||
for (const fork of getActiveForks(this.config, currentEpoch)) {
|
||||
this.subscribeCoreTopicsAtFork(fork);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribe from all gossip events. Safe to call multiple times
|
||||
*/
|
||||
unsubscribeGossipCoreTopics(): void {
|
||||
this.gossipHandler.unsubscribeCoreTopics();
|
||||
for (const fork of this.subscribedForks.values()) {
|
||||
this.unsubscribeCoreTopicsAtFork(fork);
|
||||
}
|
||||
}
|
||||
|
||||
isSubscribedToGossipCoreTopics(): boolean {
|
||||
return this.gossipHandler.isSubscribedToCoreTopics;
|
||||
return this.subscribedForks.size > 0;
|
||||
}
|
||||
|
||||
// Debug
|
||||
@@ -211,4 +235,68 @@ export class Network implements INetwork {
|
||||
async disconnectPeer(peer: PeerId): Promise<void> {
|
||||
await this.libp2p.hangUp(peer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle subscriptions through fork transitions, @see FORK_EPOCH_LOOKAHEAD
|
||||
*/
|
||||
private onEpoch = (epoch: Epoch): void => {
|
||||
try {
|
||||
// Compute prev and next fork shifted, so next fork is still next at forkEpoch + FORK_EPOCH_LOOKAHEAD
|
||||
const forks = getCurrentAndNextFork(this.config, epoch - FORK_EPOCH_LOOKAHEAD - 1);
|
||||
|
||||
// Only when a new fork is scheduled
|
||||
if (forks.nextFork) {
|
||||
const prevFork = forks.currentFork.name;
|
||||
const nextFork = forks.nextFork.name;
|
||||
const forkEpoch = forks.nextFork.epoch;
|
||||
|
||||
// Before fork transition
|
||||
if (epoch === forkEpoch - FORK_EPOCH_LOOKAHEAD) {
|
||||
this.logger.info("Suscribing gossip topics to next fork", {nextFork});
|
||||
// Don't subscribe to new fork if the node is not subscribed to any topic
|
||||
if (this.isSubscribedToGossipCoreTopics()) this.subscribeCoreTopicsAtFork(nextFork);
|
||||
this.attnetsService.subscribeSubnetsToNextFork(nextFork);
|
||||
this.syncnetsService.subscribeSubnetsToNextFork(nextFork);
|
||||
}
|
||||
|
||||
// After fork transition
|
||||
if (epoch === forkEpoch + FORK_EPOCH_LOOKAHEAD) {
|
||||
this.logger.info("Unsuscribing gossip topics from prev fork", {prevFork});
|
||||
this.unsubscribeCoreTopicsAtFork(prevFork);
|
||||
this.attnetsService.unsubscribeSubnetsFromPrevFork(prevFork);
|
||||
this.syncnetsService.unsubscribeSubnetsFromPrevFork(prevFork);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
this.logger.error("Error on BeaconGossipHandler.onEpoch", {epoch}, e);
|
||||
}
|
||||
};
|
||||
|
||||
private subscribeCoreTopicsAtFork = (fork: ForkName): void => {
|
||||
if (this.subscribedForks.has(fork)) return;
|
||||
this.subscribedForks.add(fork);
|
||||
|
||||
this.gossip.subscribeTopic({type: GossipType.beacon_block, fork});
|
||||
this.gossip.subscribeTopic({type: GossipType.beacon_aggregate_and_proof, fork});
|
||||
this.gossip.subscribeTopic({type: GossipType.voluntary_exit, fork});
|
||||
this.gossip.subscribeTopic({type: GossipType.proposer_slashing, fork});
|
||||
this.gossip.subscribeTopic({type: GossipType.attester_slashing, fork});
|
||||
if (fork === ForkName.altair) {
|
||||
this.gossip.subscribeTopic({type: GossipType.sync_committee_contribution_and_proof, fork});
|
||||
}
|
||||
};
|
||||
|
||||
private unsubscribeCoreTopicsAtFork = (fork: ForkName): void => {
|
||||
if (!this.subscribedForks.has(fork)) return;
|
||||
this.subscribedForks.delete(fork);
|
||||
|
||||
this.gossip.unsubscribeTopic({type: GossipType.beacon_block, fork});
|
||||
this.gossip.unsubscribeTopic({type: GossipType.beacon_aggregate_and_proof, fork});
|
||||
this.gossip.unsubscribeTopic({type: GossipType.voluntary_exit, fork});
|
||||
this.gossip.unsubscribeTopic({type: GossipType.proposer_slashing, fork});
|
||||
this.gossip.unsubscribeTopic({type: GossipType.attester_slashing, fork});
|
||||
if (fork === ForkName.altair) {
|
||||
this.gossip.unsubscribeTopic({type: GossipType.sync_committee_contribution_and_proof, fork});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -4,34 +4,28 @@ import {IBeaconDb} from "../../../db";
|
||||
import {onBeaconBlocksByRange} from "./beaconBlocksByRange";
|
||||
import {onBeaconBlocksByRoot} from "./beaconBlocksByRoot";
|
||||
|
||||
export interface IReqRespHandler {
|
||||
export type ReqRespHandlers = {
|
||||
onStatus(): AsyncIterable<phase0.Status>;
|
||||
onBeaconBlocksByRange(req: phase0.BeaconBlocksByRangeRequest): AsyncIterable<allForks.SignedBeaconBlock>;
|
||||
onBeaconBlocksByRoot(req: phase0.BeaconBlocksByRootRequest): AsyncIterable<allForks.SignedBeaconBlock>;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* The ReqRespHandler module handles app-level requests / responses from other peers,
|
||||
* fetching state from the chain and database as needed.
|
||||
*/
|
||||
export class ReqRespHandler implements IReqRespHandler {
|
||||
private db: IBeaconDb;
|
||||
private chain: IBeaconChain;
|
||||
export function getReqRespHandlers({db, chain}: {db: IBeaconDb; chain: IBeaconChain}): ReqRespHandlers {
|
||||
return {
|
||||
async *onStatus() {
|
||||
yield chain.getStatus();
|
||||
},
|
||||
|
||||
constructor({db, chain}: {db: IBeaconDb; chain: IBeaconChain}) {
|
||||
this.db = db;
|
||||
this.chain = chain;
|
||||
}
|
||||
async *onBeaconBlocksByRange(req) {
|
||||
yield* onBeaconBlocksByRange(req, chain, db);
|
||||
},
|
||||
|
||||
async *onStatus(): AsyncIterable<phase0.Status> {
|
||||
yield this.chain.getStatus();
|
||||
}
|
||||
|
||||
async *onBeaconBlocksByRange(req: phase0.BeaconBlocksByRangeRequest): AsyncIterable<allForks.SignedBeaconBlock> {
|
||||
yield* onBeaconBlocksByRange(req, this.chain, this.db);
|
||||
}
|
||||
|
||||
async *onBeaconBlocksByRoot(req: phase0.BeaconBlocksByRootRequest): AsyncIterable<allForks.SignedBeaconBlock> {
|
||||
yield* onBeaconBlocksByRoot(req, this.chain, this.db);
|
||||
}
|
||||
async *onBeaconBlocksByRoot(req) {
|
||||
yield* onBeaconBlocksByRoot(req, chain, db);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export {ReqResp, IReqRespOptions} from "./reqResp";
|
||||
export {IReqRespHandler, ReqRespHandler} from "./handlers";
|
||||
export {ReqRespHandlers, getReqRespHandlers} from "./handlers";
|
||||
export * from "./interface";
|
||||
export {RequestTypedContainer} from "./types"; // To type-safe reqResp event listeners
|
||||
export {Encoding as ReqRespEncoding, Method as ReqRespMethod} from "./types"; // Expose enums renamed
|
||||
|
||||
@@ -8,7 +8,7 @@ import {IForkDigestContext} from "../../util/forkDigestContext";
|
||||
import {IPeerMetadataStore, IPeerRpcScoreStore} from "../peers";
|
||||
import {MetadataController} from "../metadata";
|
||||
import {INetworkEventBus} from "../events";
|
||||
import {IReqRespHandler} from "./handlers";
|
||||
import {ReqRespHandlers} from "./handlers";
|
||||
import {IMetrics} from "../../metrics";
|
||||
|
||||
export interface IReqResp {
|
||||
@@ -31,7 +31,7 @@ export interface IReqRespModules {
|
||||
logger: ILogger;
|
||||
forkDigestContext: IForkDigestContext;
|
||||
metadata: MetadataController;
|
||||
reqRespHandler: IReqRespHandler;
|
||||
reqRespHandlers: ReqRespHandlers;
|
||||
peerMetadata: IPeerMetadataStore;
|
||||
peerRpcScores: IPeerRpcScoreStore;
|
||||
networkEventBus: INetworkEventBus;
|
||||
|
||||
@@ -19,7 +19,7 @@ import {IPeerMetadataStore, IPeerRpcScoreStore} from "../peers";
|
||||
import {assertSequentialBlocksInRange, formatProtocolId} from "./utils";
|
||||
import {MetadataController} from "../metadata";
|
||||
import {INetworkEventBus, NetworkEvent} from "../events";
|
||||
import {IReqRespHandler} from "./handlers";
|
||||
import {ReqRespHandlers} from "./handlers";
|
||||
import {IMetrics} from "../../metrics";
|
||||
import {RequestError, RequestErrorCode} from "./request";
|
||||
import {
|
||||
@@ -45,7 +45,7 @@ export class ReqResp implements IReqResp {
|
||||
private libp2p: LibP2p;
|
||||
private logger: ILogger;
|
||||
private forkDigestContext: IForkDigestContext;
|
||||
private reqRespHandler: IReqRespHandler;
|
||||
private reqRespHandlers: ReqRespHandlers;
|
||||
private metadataController: MetadataController;
|
||||
private peerMetadata: IPeerMetadataStore;
|
||||
private peerRpcScores: IPeerRpcScoreStore;
|
||||
@@ -61,7 +61,7 @@ export class ReqResp implements IReqResp {
|
||||
this.libp2p = modules.libp2p;
|
||||
this.logger = modules.logger;
|
||||
this.forkDigestContext = modules.forkDigestContext;
|
||||
this.reqRespHandler = modules.reqRespHandler;
|
||||
this.reqRespHandlers = modules.reqRespHandlers;
|
||||
this.peerMetadata = modules.peerMetadata;
|
||||
this.metadataController = modules.metadata;
|
||||
this.peerRpcScores = modules.peerRpcScores;
|
||||
@@ -226,13 +226,13 @@ export class ReqResp implements IReqResp {
|
||||
// Don't bubble Ping, Metadata, and, Goodbye requests to the app layer
|
||||
|
||||
case Method.Status:
|
||||
yield* this.reqRespHandler.onStatus();
|
||||
yield* this.reqRespHandlers.onStatus();
|
||||
break;
|
||||
case Method.BeaconBlocksByRange:
|
||||
yield* this.reqRespHandler.onBeaconBlocksByRange(requestTyped.body);
|
||||
yield* this.reqRespHandlers.onBeaconBlocksByRange(requestTyped.body);
|
||||
break;
|
||||
case Method.BeaconBlocksByRoot:
|
||||
yield* this.reqRespHandler.onBeaconBlocksByRoot(requestTyped.body);
|
||||
yield* this.reqRespHandlers.onBeaconBlocksByRoot(requestTyped.body);
|
||||
break;
|
||||
|
||||
default:
|
||||
|
||||
@@ -3,6 +3,7 @@ import {IChainForkConfig} from "@chainsafe/lodestar-config";
|
||||
import {
|
||||
ATTESTATION_SUBNET_COUNT,
|
||||
EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION,
|
||||
ForkName,
|
||||
RANDOM_SUBNETS_PER_VALIDATOR,
|
||||
SLOTS_PER_EPOCH,
|
||||
} from "@chainsafe/lodestar-params";
|
||||
@@ -13,7 +14,7 @@ import {ChainEvent, IBeaconChain} from "../../chain";
|
||||
import {Eth2Gossipsub, GossipType} from "../gossip";
|
||||
import {MetadataController} from "../metadata";
|
||||
import {SubnetMap, RequestedSubnet} from "../peers/utils";
|
||||
import {getActiveForks, runForkTransitionHooks} from "../forks";
|
||||
import {getActiveForks} from "../forks";
|
||||
import {IAttnetsService, CommitteeSubscription} from "./interface";
|
||||
|
||||
/**
|
||||
@@ -113,6 +114,22 @@ export class AttnetsService implements IAttnetsService {
|
||||
return this.subscriptionsCommittee.isActiveAtSlot(subnet, slot);
|
||||
}
|
||||
|
||||
/** Call ONLY ONCE: Two epoch before the fork, re-subscribe all existing random subscriptions to the new fork */
|
||||
subscribeSubnetsToNextFork(nextFork: ForkName): void {
|
||||
this.logger.info("Suscribing to random attnets to next fork", {nextFork});
|
||||
for (const subnet of this.subscriptionsRandom.getAll()) {
|
||||
this.gossip.subscribeTopic({type: gossipType, fork: nextFork, subnet});
|
||||
}
|
||||
}
|
||||
|
||||
/** Call ONLY ONCE: Two epochs after the fork, un-subscribe all subnets from the old fork */
|
||||
unsubscribeSubnetsFromPrevFork(prevFork: ForkName): void {
|
||||
this.logger.info("Unsuscribing to random attnets from prev fork", {prevFork});
|
||||
for (let subnet = 0; subnet < ATTESTATION_SUBNET_COUNT; subnet++) {
|
||||
this.gossip.unsubscribeTopic({type: gossipType, fork: prevFork, subnet});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run per slot.
|
||||
*/
|
||||
@@ -132,23 +149,6 @@ export class AttnetsService implements IAttnetsService {
|
||||
const slot = computeStartSlotAtEpoch(epoch);
|
||||
this.unsubscribeExpiredRandomSubnets(slot);
|
||||
this.pruneExpiredKnownValidators(slot);
|
||||
|
||||
runForkTransitionHooks(this.config, epoch, {
|
||||
beforeForkTransition: (nextFork) => {
|
||||
this.logger.info("Suscribing to random attnets to next fork", {nextFork});
|
||||
// ONLY ONCE: Two epoch before the fork, re-subscribe all existing random subscriptions to the new fork
|
||||
for (const subnet of this.subscriptionsRandom.getAll()) {
|
||||
this.gossip.subscribeTopic({type: gossipType, fork: nextFork, subnet});
|
||||
}
|
||||
},
|
||||
afterForkTransition: (prevFork) => {
|
||||
this.logger.info("Unsuscribing to random attnets from prev fork", {prevFork});
|
||||
// ONLY ONCE: Two epochs after the fork, un-subscribe all subnets from the old fork
|
||||
for (let subnet = 0; subnet < ATTESTATION_SUBNET_COUNT; subnet++) {
|
||||
this.gossip.unsubscribeTopic({type: gossipType, fork: prevFork, subnet});
|
||||
}
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
this.logger.error("Error on AttnetsService.onEpoch", {epoch}, e);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {Slot, ValidatorIndex} from "@chainsafe/lodestar-types";
|
||||
|
||||
/** Generic CommitteeSubscription for both beacon attnets subs and syncnets subs */
|
||||
@@ -13,6 +14,8 @@ export interface ISubnetsService {
|
||||
stop(): void;
|
||||
addCommitteeSubscriptions(subscriptions: CommitteeSubscription[]): void;
|
||||
getActiveSubnets(): number[];
|
||||
subscribeSubnetsToNextFork(nextFork: ForkName): void;
|
||||
unsubscribeSubnetsFromPrevFork(prevFork: ForkName): void;
|
||||
}
|
||||
|
||||
export interface IAttnetsService extends ISubnetsService {
|
||||
|
||||
@@ -4,7 +4,7 @@ import {ForkName, SYNC_COMMITTEE_SUBNET_COUNT} from "@chainsafe/lodestar-params"
|
||||
import {Epoch, ssz} from "@chainsafe/lodestar-types";
|
||||
import {ILogger} from "@chainsafe/lodestar-utils";
|
||||
import {ChainEvent, IBeaconChain} from "../../chain";
|
||||
import {getActiveForks, runForkTransitionHooks} from "../forks";
|
||||
import {getActiveForks} from "../forks";
|
||||
import {Eth2Gossipsub, GossipType} from "../gossip";
|
||||
import {MetadataController} from "../metadata";
|
||||
import {SubnetMap} from "../peers/utils";
|
||||
@@ -68,6 +68,24 @@ export class SyncnetsService implements ISubnetsService {
|
||||
this.updateMetadata();
|
||||
}
|
||||
|
||||
/** Call ONLY ONCE: Two epoch before the fork, re-subscribe all existing random subscriptions to the new fork */
|
||||
subscribeSubnetsToNextFork(nextFork: ForkName): void {
|
||||
if (nextFork === ForkName.altair) return;
|
||||
this.logger.info("Suscribing to random attnets to next fork", {nextFork});
|
||||
for (const subnet of this.subscriptionsCommittee.getAll()) {
|
||||
this.gossip.subscribeTopic({type: gossipType, fork: nextFork, subnet});
|
||||
}
|
||||
}
|
||||
|
||||
/** Call ONLY ONCE: Two epochs after the fork, un-subscribe all subnets from the old fork */
|
||||
unsubscribeSubnetsFromPrevFork(prevFork: ForkName): void {
|
||||
if (prevFork === ForkName.phase0) return;
|
||||
this.logger.info("Unsuscribing to random attnets from prev fork", {prevFork});
|
||||
for (let subnet = 0; subnet < SYNC_COMMITTEE_SUBNET_COUNT; subnet++) {
|
||||
this.gossip.unsubscribeTopic({type: gossipType, fork: prevFork, subnet});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run per epoch, clean-up operations that are not urgent
|
||||
*/
|
||||
@@ -76,26 +94,6 @@ export class SyncnetsService implements ISubnetsService {
|
||||
const slot = computeStartSlotAtEpoch(epoch);
|
||||
// Unsubscribe to a committee subnet from subscriptionsCommittee.
|
||||
this.unsubscribeSubnets(this.subscriptionsCommittee.getExpired(slot));
|
||||
|
||||
// Fork transition for altair -> nextFork
|
||||
runForkTransitionHooks(this.config, epoch, {
|
||||
beforeForkTransition: (nextFork) => {
|
||||
if (nextFork === ForkName.altair) return;
|
||||
this.logger.info("Suscribing to random attnets to next fork", {nextFork});
|
||||
// ONLY ONCE: Two epoch before the fork, re-subscribe all existing random subscriptions to the new fork
|
||||
for (const subnet of this.subscriptionsCommittee.getAll()) {
|
||||
this.gossip.subscribeTopic({type: gossipType, fork: nextFork, subnet});
|
||||
}
|
||||
},
|
||||
afterForkTransition: (prevFork) => {
|
||||
if (prevFork === ForkName.phase0) return;
|
||||
this.logger.info("Unsuscribing to random attnets from prev fork", {prevFork});
|
||||
// ONLY ONCE: Two epochs after the fork, un-subscribe all subnets from the old fork
|
||||
for (let subnet = 0; subnet < SYNC_COMMITTEE_SUBNET_COUNT; subnet++) {
|
||||
this.gossip.unsubscribeTopic({type: gossipType, fork: prevFork, subnet});
|
||||
}
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
this.logger.error("Error on SyncnetsService.onEpoch", {epoch}, e);
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import {ILogger} from "@chainsafe/lodestar-utils";
|
||||
import {Api} from "@chainsafe/lodestar-api";
|
||||
|
||||
import {IBeaconDb} from "../db";
|
||||
import {INetwork, Network, ReqRespHandler} from "../network";
|
||||
import {INetwork, Network, getReqRespHandlers, getGossipHandlers} from "../network";
|
||||
import {BeaconSync, IBeaconSync} from "../sync";
|
||||
import {BeaconChain, IBeaconChain, initBeaconMetrics} from "../chain";
|
||||
import {createMetrics, IMetrics, HttpMetricsServer} from "../metrics";
|
||||
@@ -144,7 +144,8 @@ export class BeaconNode {
|
||||
metrics,
|
||||
chain,
|
||||
db,
|
||||
reqRespHandler: new ReqRespHandler({db, chain}),
|
||||
reqRespHandlers: getReqRespHandlers({db, chain}),
|
||||
gossipHandlers: getGossipHandlers({chain, config, db, logger, metrics}),
|
||||
signal,
|
||||
});
|
||||
const sync = new BeaconSync(opts.sync, {
|
||||
|
||||
39
packages/lodestar/src/util/map.ts
Normal file
39
packages/lodestar/src/util/map.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* 2 dimensions Es6 Map
|
||||
*/
|
||||
export class Map2d<K1, K2, V> {
|
||||
readonly map = new Map<K1, Map<K2, V>>();
|
||||
|
||||
get(k1: K1, k2: K2): V | undefined {
|
||||
return this.map.get(k1)?.get(k2);
|
||||
}
|
||||
|
||||
set(k1: K1, k2: K2, v: V): void {
|
||||
let map2 = this.map.get(k1);
|
||||
if (!map2) {
|
||||
map2 = new Map<K2, V>();
|
||||
this.map.set(k1, map2);
|
||||
}
|
||||
map2.set(k2, v);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 2 dimensions Es6 Map + regular array
|
||||
*/
|
||||
export class Map2dArr<K1, V> {
|
||||
readonly map = new Map<K1, V[]>();
|
||||
|
||||
get(k1: K1, idx: number): V | undefined {
|
||||
return this.map.get(k1)?.[idx];
|
||||
}
|
||||
|
||||
set(k1: K1, idx: number, v: V): void {
|
||||
let arr = this.map.get(k1);
|
||||
if (!arr) {
|
||||
arr = [];
|
||||
this.map.set(k1, arr);
|
||||
}
|
||||
arr[idx] = v;
|
||||
}
|
||||
}
|
||||
168
packages/lodestar/test/e2e/network/gossipsub.test.ts
Normal file
168
packages/lodestar/test/e2e/network/gossipsub.test.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import sinon from "sinon";
|
||||
import {expect} from "chai";
|
||||
import {AbortController} from "@chainsafe/abort-controller";
|
||||
import {createIBeaconConfig} from "@chainsafe/lodestar-config";
|
||||
import {config} from "@chainsafe/lodestar-config/default";
|
||||
import {phase0, ssz} from "@chainsafe/lodestar-types";
|
||||
import {sleep} from "@chainsafe/lodestar-utils";
|
||||
|
||||
import {getReqRespHandlers, Network} from "../../../src/network";
|
||||
import {INetworkOptions} from "../../../src/network/options";
|
||||
import {GossipHandlers} from "../../../src/network/gossip/handlers";
|
||||
import {GossipType} from "../../../src/network/gossip";
|
||||
|
||||
import {generateEmptySignedBlock} from "../../utils/block";
|
||||
import {MockBeaconChain} from "../../utils/mocks/chain/chain";
|
||||
import {createNode} from "../../utils/network";
|
||||
import {generateState} from "../../utils/state";
|
||||
import {StubbedBeaconDb} from "../../utils/stub";
|
||||
import {connect, onPeerConnect} from "../../utils/network";
|
||||
import {testLogger} from "../../utils/logger";
|
||||
|
||||
const multiaddr = "/ip4/127.0.0.1/tcp/0";
|
||||
|
||||
const opts: INetworkOptions = {
|
||||
maxPeers: 1,
|
||||
targetPeers: 1,
|
||||
bootMultiaddrs: [],
|
||||
localMultiaddrs: [],
|
||||
};
|
||||
|
||||
describe("network", function () {
|
||||
if (this.timeout() < 15 * 1000) this.timeout(15 * 1000);
|
||||
|
||||
const logger = testLogger();
|
||||
|
||||
const afterEachCallbacks: (() => Promise<void> | void)[] = [];
|
||||
afterEach(async () => {
|
||||
while (afterEachCallbacks.length > 0) {
|
||||
const callback = afterEachCallbacks.pop();
|
||||
if (callback) await callback();
|
||||
}
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
|
||||
async function mockModules(gossipHandlersPartial?: Partial<GossipHandlers>) {
|
||||
const controller = new AbortController();
|
||||
|
||||
const block = generateEmptySignedBlock();
|
||||
const state = generateState({
|
||||
finalizedCheckpoint: {
|
||||
epoch: 0,
|
||||
root: ssz.phase0.BeaconBlock.hashTreeRoot(block.message),
|
||||
},
|
||||
});
|
||||
|
||||
const beaconConfig = createIBeaconConfig(config, state.genesisValidatorsRoot);
|
||||
const chain = new MockBeaconChain({genesisTime: 0, chainId: 0, networkId: BigInt(0), state, config});
|
||||
const db = new StubbedBeaconDb(sinon, config);
|
||||
const reqRespHandlers = getReqRespHandlers({db, chain});
|
||||
const gossipHandlers = gossipHandlersPartial as GossipHandlers;
|
||||
|
||||
const [libp2pA, libp2pB] = await Promise.all([createNode(multiaddr), createNode(multiaddr)]);
|
||||
const loggerA = testLogger("A");
|
||||
const loggerB = testLogger("B");
|
||||
|
||||
const modules = {
|
||||
config: beaconConfig,
|
||||
chain,
|
||||
db,
|
||||
reqRespHandlers,
|
||||
gossipHandlers,
|
||||
signal: controller.signal,
|
||||
metrics: null,
|
||||
};
|
||||
const netA = new Network(opts, {...modules, libp2p: libp2pA, logger: loggerA});
|
||||
const netB = new Network(opts, {...modules, libp2p: libp2pB, logger: loggerB});
|
||||
|
||||
await Promise.all([netA.start(), netB.start()]);
|
||||
|
||||
afterEachCallbacks.push(async () => {
|
||||
chain.close();
|
||||
controller.abort();
|
||||
await Promise.all([netA.stop(), netB.stop()]);
|
||||
sinon.restore();
|
||||
});
|
||||
|
||||
return {netA, netB, chain, controller};
|
||||
}
|
||||
|
||||
it("Publish and receive a voluntaryExit", async function () {
|
||||
let onVoluntaryExit: (ve: phase0.SignedVoluntaryExit) => void;
|
||||
const onVoluntaryExitPromise = new Promise<phase0.SignedVoluntaryExit>((resolve) => (onVoluntaryExit = resolve));
|
||||
|
||||
const {netA, netB, controller} = await mockModules({
|
||||
[GossipType.voluntary_exit]: async (voluntaryExit) => {
|
||||
onVoluntaryExit(voluntaryExit);
|
||||
},
|
||||
});
|
||||
|
||||
await Promise.all([onPeerConnect(netA), onPeerConnect(netB), connect(netA, netB.peerId, netB.localMultiaddrs)]);
|
||||
expect(Array.from(netA.getConnectionsByPeer().values()).length).to.equal(1);
|
||||
expect(Array.from(netB.getConnectionsByPeer().values()).length).to.equal(1);
|
||||
|
||||
netA.subscribeGossipCoreTopics();
|
||||
netB.subscribeGossipCoreTopics();
|
||||
|
||||
// Wait to have a peer connected to a topic
|
||||
while (!controller.signal.aborted) {
|
||||
await sleep(500);
|
||||
const topicStr = Array.from(netA.gossip.mesh.keys())[0];
|
||||
const peersOnTopic = netA.gossip.mesh.get(topicStr);
|
||||
if (peersOnTopic && peersOnTopic?.size > 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const voluntaryExit = ssz.phase0.SignedVoluntaryExit.defaultValue();
|
||||
await netA.gossip.publishVoluntaryExit(voluntaryExit);
|
||||
|
||||
const receivedVoluntaryExit = await onVoluntaryExitPromise;
|
||||
expect(receivedVoluntaryExit).to.deep.equal(voluntaryExit);
|
||||
});
|
||||
|
||||
it("Publish and receive 1000 voluntaryExits", async function () {
|
||||
const receivedVoluntaryExits: phase0.SignedVoluntaryExit[] = [];
|
||||
|
||||
const {netA, netB, controller} = await mockModules({
|
||||
[GossipType.voluntary_exit]: async (voluntaryExit) => {
|
||||
receivedVoluntaryExits.push(voluntaryExit);
|
||||
},
|
||||
});
|
||||
|
||||
await Promise.all([onPeerConnect(netA), onPeerConnect(netB), connect(netA, netB.peerId, netB.localMultiaddrs)]);
|
||||
expect(Array.from(netA.getConnectionsByPeer().values()).length).to.equal(1);
|
||||
expect(Array.from(netB.getConnectionsByPeer().values()).length).to.equal(1);
|
||||
|
||||
netA.subscribeGossipCoreTopics();
|
||||
netB.subscribeGossipCoreTopics();
|
||||
|
||||
// Wait to have a peer connected to a topic
|
||||
while (!controller.signal.aborted) {
|
||||
await sleep(500);
|
||||
const topicStr = Array.from(netA.gossip.mesh.keys())[0];
|
||||
const peersOnTopic = netA.gossip.mesh.get(topicStr);
|
||||
if (peersOnTopic && peersOnTopic?.size > 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const msgCount = 1000;
|
||||
|
||||
for (let i = 0; i < msgCount; i++) {
|
||||
const voluntaryExit = ssz.phase0.SignedVoluntaryExit.defaultValue();
|
||||
voluntaryExit.message.epoch = i;
|
||||
netA.gossip.publishVoluntaryExit(voluntaryExit).catch((e) => {
|
||||
logger.error("Error on publishVoluntaryExit", {}, e);
|
||||
});
|
||||
}
|
||||
|
||||
// Wait to receive all the messages. A timeout error will happen otherwise
|
||||
while (!controller.signal.aborted) {
|
||||
await sleep(500);
|
||||
if (receivedVoluntaryExits.length >= msgCount) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -4,11 +4,12 @@ import {AbortController} from "@chainsafe/abort-controller";
|
||||
|
||||
import PeerId from "peer-id";
|
||||
import {Discv5Discovery, ENR} from "@chainsafe/discv5";
|
||||
import {createIBeaconConfig} from "@chainsafe/lodestar-config";
|
||||
import {config} from "@chainsafe/lodestar-config/default";
|
||||
import {phase0, ssz} from "@chainsafe/lodestar-types";
|
||||
import {sleep} from "@chainsafe/lodestar-utils";
|
||||
|
||||
import {Network, NetworkEvent, ReqRespHandler, ReqRespMethod} from "../../../src/network";
|
||||
import {Network, NetworkEvent, ReqRespMethod, getReqRespHandlers} from "../../../src/network";
|
||||
import {INetworkOptions} from "../../../src/network/options";
|
||||
import {GoodByeReasonCode} from "../../../src/constants";
|
||||
|
||||
@@ -20,7 +21,7 @@ import {StubbedBeaconDb} from "../../utils/stub";
|
||||
import {connect, disconnect, onPeerConnect, onPeerDisconnect} from "../../utils/network";
|
||||
import {testLogger} from "../../utils/logger";
|
||||
import {CommitteeSubscription} from "../../../src/network/subnets";
|
||||
import {createIBeaconConfig} from "@chainsafe/lodestar-config";
|
||||
import {GossipHandlers} from "../../../src/network/gossip";
|
||||
|
||||
const multiaddr = "/ip4/127.0.0.1/tcp/0";
|
||||
|
||||
@@ -57,13 +58,22 @@ describe("network", function () {
|
||||
const beaconConfig = createIBeaconConfig(config, state.genesisValidatorsRoot);
|
||||
const chain = new MockBeaconChain({genesisTime: 0, chainId: 0, networkId: BigInt(0), state, config: beaconConfig});
|
||||
const db = new StubbedBeaconDb(sinon, config);
|
||||
const reqRespHandler = new ReqRespHandler({db, chain});
|
||||
const reqRespHandlers = getReqRespHandlers({db, chain});
|
||||
const gossipHandlers = {} as GossipHandlers;
|
||||
|
||||
const [libp2pA, libp2pB] = await Promise.all([createNode(multiaddr), createNode(multiaddr)]);
|
||||
const loggerA = testLogger("A");
|
||||
const loggerB = testLogger("B");
|
||||
|
||||
const modules = {config: beaconConfig, chain, db, reqRespHandler, signal: controller.signal, metrics: null};
|
||||
const modules = {
|
||||
config: beaconConfig,
|
||||
chain,
|
||||
db,
|
||||
reqRespHandlers,
|
||||
gossipHandlers,
|
||||
signal: controller.signal,
|
||||
metrics: null,
|
||||
};
|
||||
const netA = new Network(opts, {...modules, libp2p: libp2pA, logger: loggerA});
|
||||
const netB = new Network(opts, {...modules, libp2p: libp2pB, logger: loggerB});
|
||||
|
||||
@@ -177,4 +187,15 @@ describe("network", function () {
|
||||
expect(peer.toB58String()).to.equal(netA.peerId.toB58String(), "netA must be the goodbye requester");
|
||||
expect(goodbye).to.equal(BigInt(GoodByeReasonCode.CLIENT_SHUTDOWN), "goodbye reason must be CLIENT_SHUTDOWN");
|
||||
});
|
||||
|
||||
it("Should subscribe to gossip core topics on demand", async () => {
|
||||
const {netA} = await mockModules();
|
||||
|
||||
expect(netA.gossip.subscriptions.size).to.equal(0);
|
||||
netA.subscribeGossipCoreTopics();
|
||||
expect(netA.gossip.subscriptions.size).to.equal(5);
|
||||
netA.unsubscribeGossipCoreTopics();
|
||||
expect(netA.gossip.subscriptions.size).to.equal(0);
|
||||
netA.close();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -60,15 +60,15 @@ describe("network / peers / PeerManager", function () {
|
||||
const peerMetadata = new Libp2pPeerMetadataStore(libp2p.peerStore.metadataBook);
|
||||
const peerRpcScores = new PeerRpcScoreStore(peerMetadata);
|
||||
const networkEventBus = new NetworkEventBus();
|
||||
/* eslint-disable @typescript-eslint/no-empty-function */
|
||||
const mockSubnetsService: IAttnetsService = {
|
||||
getActiveSubnets: () => [],
|
||||
shouldProcess: () => true,
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
addCommitteeSubscriptions: () => {},
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
start: () => {},
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
stop: () => {},
|
||||
subscribeSubnetsToNextFork: () => {},
|
||||
unsubscribeSubnetsFromPrevFork: () => {},
|
||||
};
|
||||
|
||||
const peerManager = new PeerManager(
|
||||
|
||||
@@ -3,13 +3,15 @@ import chai, {expect} from "chai";
|
||||
import chaiAsPromised from "chai-as-promised";
|
||||
import {AbortController} from "@chainsafe/abort-controller";
|
||||
import PeerId from "peer-id";
|
||||
import {createIBeaconConfig} from "@chainsafe/lodestar-config";
|
||||
import {config} from "@chainsafe/lodestar-config/default";
|
||||
import {sleep as _sleep} from "@chainsafe/lodestar-utils";
|
||||
import {altair, phase0, ssz} from "@chainsafe/lodestar-types";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {createPeerId, IReqRespOptions, Network, prettyPrintPeerId} from "../../../src/network";
|
||||
import {INetworkOptions} from "../../../src/network/options";
|
||||
import {Method, Encoding} from "../../../src/network/reqresp/types";
|
||||
import {IReqRespHandler} from "../../../src/network/reqresp/handlers";
|
||||
import {ReqRespHandlers} from "../../../src/network/reqresp/handlers";
|
||||
import {RequestError, RequestErrorCode} from "../../../src/network/reqresp/request";
|
||||
import {IRequestErrorMetadata} from "../../../src/network/reqresp/request/errors";
|
||||
import {testLogger} from "../../utils/logger";
|
||||
@@ -21,8 +23,7 @@ import {generateEmptySignedBlock} from "../../utils/block";
|
||||
import {expectRejectedWithLodestarError} from "../../utils/errors";
|
||||
import {connect, onPeerConnect} from "../../utils/network";
|
||||
import {StubbedBeaconDb} from "../../utils/stub";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {createIBeaconConfig} from "@chainsafe/lodestar-config";
|
||||
import {GossipHandlers} from "../../../src/network/gossip";
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
|
||||
@@ -59,7 +60,7 @@ describe("network / ReqResp", function () {
|
||||
}
|
||||
|
||||
async function createAndConnectPeers(
|
||||
reqRespHandlerPartial?: Partial<IReqRespHandler>,
|
||||
reqRespHandlersPartial?: Partial<ReqRespHandlers>,
|
||||
reqRespOpts?: IReqRespOptions
|
||||
): Promise<[Network, Network]> {
|
||||
const controller = new AbortController();
|
||||
@@ -70,14 +71,25 @@ describe("network / ReqResp", function () {
|
||||
const notImplemented = async function* <T>(): AsyncIterable<T> {
|
||||
throw Error("not implemented");
|
||||
};
|
||||
const reqRespHandler: IReqRespHandler = {
|
||||
|
||||
const reqRespHandlers: ReqRespHandlers = {
|
||||
onStatus: notImplemented,
|
||||
onBeaconBlocksByRange: notImplemented,
|
||||
onBeaconBlocksByRoot: notImplemented,
|
||||
...reqRespHandlerPartial,
|
||||
...reqRespHandlersPartial,
|
||||
};
|
||||
|
||||
const gossipHandlers = {} as GossipHandlers;
|
||||
const opts = {...networkOptsDefault, ...reqRespOpts};
|
||||
const modules = {config: beaconConfig, db, chain, reqRespHandler, signal: controller.signal, metrics: null};
|
||||
const modules = {
|
||||
config: beaconConfig,
|
||||
db,
|
||||
chain,
|
||||
reqRespHandlers,
|
||||
gossipHandlers,
|
||||
signal: controller.signal,
|
||||
metrics: null,
|
||||
};
|
||||
const netA = new Network(opts, {...modules, libp2p: libp2pA, logger: testLogger("A")});
|
||||
const netB = new Network(opts, {...modules, libp2p: libp2pB, logger: testLogger("B")});
|
||||
await Promise.all([netA.start(), netB.start()]);
|
||||
|
||||
@@ -76,7 +76,7 @@ describe("gossip block validation", function () {
|
||||
epoch: 0,
|
||||
root: Buffer.alloc(32),
|
||||
});
|
||||
regenStub.getBlockSlotState.throws();
|
||||
regenStub.getBlockSlotState.rejects();
|
||||
|
||||
await expectRejectedWithLodestarError(
|
||||
validateGossipBlock(config, chainStub, dbStub, job),
|
||||
|
||||
@@ -57,10 +57,7 @@ describe("Sync Committee Contribution And Proof validation", function () {
|
||||
|
||||
const signedContributionAndProof = generateSignedContributionAndProof({contribution: {slot: 1}});
|
||||
await expectRejectedWithLodestarError(
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, {
|
||||
contributionAndProof: signedContributionAndProof,
|
||||
validSignature: false,
|
||||
}),
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, signedContributionAndProof),
|
||||
SyncCommitteeErrorCode.NOT_CURRENT_SLOT
|
||||
);
|
||||
});
|
||||
@@ -70,10 +67,7 @@ describe("Sync Committee Contribution And Proof validation", function () {
|
||||
forkChoiceStub.hasBlock.returns(false);
|
||||
|
||||
await expectRejectedWithLodestarError(
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, {
|
||||
contributionAndProof: signedContributionAndProof,
|
||||
validSignature: false,
|
||||
}),
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, signedContributionAndProof),
|
||||
SyncCommitteeErrorCode.UNKNOWN_BEACON_BLOCK_ROOT
|
||||
);
|
||||
});
|
||||
@@ -85,10 +79,7 @@ describe("Sync Committee Contribution And Proof validation", function () {
|
||||
forkChoiceStub.hasBlock.returns(true);
|
||||
|
||||
await expectRejectedWithLodestarError(
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, {
|
||||
contributionAndProof: signedContributionAndProof,
|
||||
validSignature: false,
|
||||
}),
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, signedContributionAndProof),
|
||||
SyncCommitteeErrorCode.INVALID_SUB_COMMITTEE_INDEX
|
||||
);
|
||||
});
|
||||
@@ -100,10 +91,7 @@ describe("Sync Committee Contribution And Proof validation", function () {
|
||||
chain.getHeadState.returns(headState);
|
||||
db.syncCommitteeContribution.has.returns(true);
|
||||
await expectRejectedWithLodestarError(
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, {
|
||||
contributionAndProof: signedContributionAndProof,
|
||||
validSignature: false,
|
||||
}),
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, signedContributionAndProof),
|
||||
SyncCommitteeErrorCode.SYNC_COMMITTEE_ALREADY_KNOWN
|
||||
);
|
||||
});
|
||||
@@ -116,10 +104,7 @@ describe("Sync Committee Contribution And Proof validation", function () {
|
||||
chain.getHeadState.returns(headState);
|
||||
isSyncCommitteeAggregatorStub.returns(false);
|
||||
await expectRejectedWithLodestarError(
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, {
|
||||
contributionAndProof: signedContributionAndProof,
|
||||
validSignature: false,
|
||||
}),
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, signedContributionAndProof),
|
||||
SyncCommitteeErrorCode.INVALID_AGGREGATOR
|
||||
);
|
||||
});
|
||||
@@ -136,10 +121,7 @@ describe("Sync Committee Contribution And Proof validation", function () {
|
||||
const headState = await generateCachedStateWithPubkeys({slot: currentSlot}, config, true);
|
||||
chain.getHeadState.returns(headState);
|
||||
await expectRejectedWithLodestarError(
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, {
|
||||
contributionAndProof: signedContributionAndProof,
|
||||
validSignature: false,
|
||||
}),
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, signedContributionAndProof),
|
||||
SyncCommitteeErrorCode.AGGREGATOR_PUBKEY_UNKNOWN
|
||||
);
|
||||
});
|
||||
@@ -153,10 +135,7 @@ describe("Sync Committee Contribution And Proof validation", function () {
|
||||
chain.getHeadState.returns(headState);
|
||||
chain.bls = {verifySignatureSets: async () => false};
|
||||
await expectRejectedWithLodestarError(
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, {
|
||||
contributionAndProof: signedContributionAndProof,
|
||||
validSignature: false,
|
||||
}),
|
||||
validateSyncCommitteeGossipContributionAndProof(chain, db, signedContributionAndProof),
|
||||
SyncCommitteeErrorCode.INVALID_SIGNATURE
|
||||
);
|
||||
});
|
||||
|
||||
@@ -62,7 +62,7 @@ describe("Sync Committee Signature validation", function () {
|
||||
|
||||
const syncCommittee = generateSyncCommitteeSignature({slot: 1});
|
||||
await expectRejectedWithLodestarError(
|
||||
validateGossipSyncCommittee(chain, db, {signature: syncCommittee, validSignature: false}, 0),
|
||||
validateGossipSyncCommittee(chain, db, syncCommittee, 0),
|
||||
SyncCommitteeErrorCode.NOT_CURRENT_SLOT
|
||||
);
|
||||
});
|
||||
@@ -71,7 +71,7 @@ describe("Sync Committee Signature validation", function () {
|
||||
const syncCommittee = generateSyncCommitteeSignature({slot: currentSlot});
|
||||
forkChoiceStub.hasBlock.returns(false);
|
||||
await expectRejectedWithLodestarError(
|
||||
validateGossipSyncCommittee(chain, db, {signature: syncCommittee, validSignature: false}, 0),
|
||||
validateGossipSyncCommittee(chain, db, syncCommittee, 0),
|
||||
SyncCommitteeErrorCode.UNKNOWN_BEACON_BLOCK_ROOT
|
||||
);
|
||||
});
|
||||
@@ -86,7 +86,7 @@ describe("Sync Committee Signature validation", function () {
|
||||
chain.getHeadState.returns(headState);
|
||||
db.syncCommittee.has.returns(true);
|
||||
await expectRejectedWithLodestarError(
|
||||
validateGossipSyncCommittee(chain, db, {signature: syncCommittee, validSignature: false}, 0),
|
||||
validateGossipSyncCommittee(chain, db, syncCommittee, 0),
|
||||
SyncCommitteeErrorCode.SYNC_COMMITTEE_ALREADY_KNOWN
|
||||
);
|
||||
});
|
||||
@@ -99,7 +99,7 @@ describe("Sync Committee Signature validation", function () {
|
||||
chain.getHeadState.returns(headState);
|
||||
|
||||
await expectRejectedWithLodestarError(
|
||||
validateGossipSyncCommittee(chain, db, {signature: syncCommittee, validSignature: false}, 0),
|
||||
validateGossipSyncCommittee(chain, db, syncCommittee, 0),
|
||||
SyncCommitteeErrorCode.VALIDATOR_NOT_IN_SYNC_COMMITTEE
|
||||
);
|
||||
});
|
||||
@@ -115,7 +115,7 @@ describe("Sync Committee Signature validation", function () {
|
||||
const headState = generateCachedState({slot: currentSlot}, config, true);
|
||||
chain.getHeadState.returns(headState);
|
||||
await expectRejectedWithLodestarError(
|
||||
validateGossipSyncCommittee(chain, db, {signature: syncCommittee, validSignature: false}, 0),
|
||||
validateGossipSyncCommittee(chain, db, syncCommittee, 0),
|
||||
SyncCommitteeErrorCode.INVALID_SUB_COMMITTEE_INDEX
|
||||
);
|
||||
});
|
||||
@@ -132,7 +132,7 @@ describe("Sync Committee Signature validation", function () {
|
||||
chain.getHeadState.returns(headState);
|
||||
chain.bls = {verifySignatureSets: async () => false};
|
||||
await expectRejectedWithLodestarError(
|
||||
validateGossipSyncCommittee(chain, db, {signature: syncCommittee, validSignature: false}, 0),
|
||||
validateGossipSyncCommittee(chain, db, syncCommittee, 0),
|
||||
SyncCommitteeErrorCode.INVALID_SIGNATURE
|
||||
);
|
||||
});
|
||||
|
||||
@@ -129,17 +129,12 @@ describe("AttnetsService", function () {
|
||||
});
|
||||
|
||||
it("should prepare for a hard fork", async () => {
|
||||
const altairEpoch = config.forks.altair.epoch;
|
||||
service.addCommitteeSubscriptions([subscription]);
|
||||
// run every epoch (or any num slots < 150)
|
||||
while (chain.clock.currentSlot < altairEpoch * SLOTS_PER_EPOCH) {
|
||||
// avoid known validator expiry
|
||||
service.addCommitteeSubscriptions([subscription]);
|
||||
sandbox.clock.tick(SLOTS_PER_EPOCH * SECONDS_PER_SLOT * 1000);
|
||||
}
|
||||
|
||||
// Run the pre-fork transition
|
||||
service.subscribeSubnetsToNextFork(ForkName.altair);
|
||||
|
||||
// Should have already subscribed to both forks
|
||||
|
||||
const forkTransitionSubscribeCalls = gossipStub.subscribeTopic.getCalls().map((call) => call.args[0]);
|
||||
const subToPhase0 = forkTransitionSubscribeCalls.find((topic) => topic.fork === ForkName.phase0);
|
||||
const subToAltair = forkTransitionSubscribeCalls.find((topic) => topic.fork === ForkName.altair);
|
||||
@@ -147,11 +142,7 @@ describe("AttnetsService", function () {
|
||||
if (!subToAltair) throw Error("Must subscribe to one subnet on altair");
|
||||
|
||||
// Advance through the fork transition so it un-subscribes from all phase0 subs
|
||||
|
||||
while (chain.clock.currentSlot < (altairEpoch + EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION) * SLOTS_PER_EPOCH) {
|
||||
service.addCommitteeSubscriptions([subscription]);
|
||||
sandbox.clock.tick(SLOTS_PER_EPOCH * SECONDS_PER_SLOT * 1000);
|
||||
}
|
||||
service.unsubscribeSubnetsFromPrevFork(ForkName.phase0);
|
||||
|
||||
const forkTransitionUnSubscribeCalls = gossipStub.unsubscribeTopic.getCalls().map((call) => call.args[0]);
|
||||
const unsubbedPhase0Subnets = new Set<number>();
|
||||
|
||||
@@ -3,70 +3,101 @@ import sinon, {SinonStubbedInstance} from "sinon";
|
||||
import Libp2p from "libp2p";
|
||||
import {InMessage} from "libp2p-interfaces/src/pubsub";
|
||||
import {ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants";
|
||||
import {AbortController} from "@chainsafe/abort-controller";
|
||||
import {config} from "@chainsafe/lodestar-config/default";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {ssz} from "@chainsafe/lodestar-types";
|
||||
|
||||
import {
|
||||
Eth2Gossipsub,
|
||||
stringifyGossipTopic,
|
||||
GossipType,
|
||||
TopicValidatorFn,
|
||||
GossipValidationError,
|
||||
encodeMessageData,
|
||||
GossipEncoding,
|
||||
TopicValidatorFnMap,
|
||||
} from "../../../../src/network/gossip";
|
||||
import {Eth2Gossipsub, GossipHandlers, GossipType, GossipEncoding} from "../../../../src/network/gossip";
|
||||
import {stringifyGossipTopic} from "../../../../src/network/gossip/topic";
|
||||
import {ForkDigestContext} from "../../../../src/util/forkDigestContext";
|
||||
import {encodeMessageData} from "../../../../src/network/gossip/encoding";
|
||||
import {GossipValidationError} from "../../../../src/network/gossip/errors";
|
||||
|
||||
import {generateEmptySignedBlock} from "../../../utils/block";
|
||||
import {createNode} from "../../../utils/network";
|
||||
import {testLogger} from "../../../utils/logger";
|
||||
import {ForkDigestContext} from "../../../../src/util/forkDigestContext";
|
||||
import {GossipAction, GossipActionError} from "../../../../src/chain/errors";
|
||||
|
||||
describe("gossipsub", function () {
|
||||
describe("network / gossip / validation", function () {
|
||||
const logger = testLogger();
|
||||
const metrics = null;
|
||||
let validatorFns: TopicValidatorFnMap;
|
||||
let gossipSub: Eth2Gossipsub;
|
||||
const gossipType = GossipType.beacon_block;
|
||||
|
||||
let message: InMessage;
|
||||
let topicString: string;
|
||||
let libp2p: Libp2p;
|
||||
let forkDigestContext: SinonStubbedInstance<ForkDigestContext>;
|
||||
|
||||
let controller: AbortController;
|
||||
beforeEach(() => (controller = new AbortController()));
|
||||
afterEach(() => controller.abort());
|
||||
|
||||
beforeEach(async function () {
|
||||
forkDigestContext = sinon.createStubInstance(ForkDigestContext);
|
||||
forkDigestContext.forkName2ForkDigest.returns(Buffer.alloc(4, 1));
|
||||
forkDigestContext.forkDigest2ForkName.returns(ForkName.phase0);
|
||||
|
||||
const signedBlock = generateEmptySignedBlock();
|
||||
topicString = stringifyGossipTopic(forkDigestContext, {type: GossipType.beacon_block, fork: ForkName.phase0});
|
||||
topicString = stringifyGossipTopic(forkDigestContext, {type: gossipType, fork: ForkName.phase0});
|
||||
message = {
|
||||
data: encodeMessageData(GossipEncoding.ssz_snappy, ssz.phase0.SignedBeaconBlock.serialize(signedBlock)),
|
||||
receivedFrom: "0",
|
||||
topicIDs: [topicString],
|
||||
};
|
||||
|
||||
validatorFns = new Map<string, TopicValidatorFn>();
|
||||
const multiaddr = "/ip4/127.0.0.1/tcp/0";
|
||||
libp2p = await createNode(multiaddr);
|
||||
});
|
||||
|
||||
it("should throw on failed validation", async () => {
|
||||
validatorFns.set(topicString, () => {
|
||||
throw new GossipValidationError(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
const gossipHandlersPartial: Partial<GossipHandlers> = {
|
||||
[gossipType]: async () => {
|
||||
throw new GossipActionError(GossipAction.REJECT, {code: "TEST_ERROR"});
|
||||
},
|
||||
};
|
||||
|
||||
const gossipSub = new Eth2Gossipsub({
|
||||
config,
|
||||
gossipHandlers: gossipHandlersPartial as GossipHandlers,
|
||||
logger,
|
||||
forkDigestContext,
|
||||
libp2p,
|
||||
metrics,
|
||||
signal: controller.signal,
|
||||
});
|
||||
gossipSub = new Eth2Gossipsub({config, validatorFns, logger, forkDigestContext, libp2p, metrics});
|
||||
|
||||
try {
|
||||
await gossipSub.validate(message);
|
||||
assert.fail("Expect error here");
|
||||
} catch (e) {
|
||||
expect((e as GossipValidationError).code).to.be.equal(ERR_TOPIC_VALIDATOR_REJECT);
|
||||
expect({
|
||||
message: (e as Error).message,
|
||||
code: (e as GossipValidationError).code,
|
||||
}).to.deep.equal({
|
||||
message: "TEST_ERROR",
|
||||
code: ERR_TOPIC_VALIDATOR_REJECT,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("should not throw on successful validation", async () => {
|
||||
gossipSub = new Eth2Gossipsub({config, validatorFns, logger, forkDigestContext, libp2p, metrics});
|
||||
const gossipHandlersPartial: Partial<GossipHandlers> = {
|
||||
[gossipType]: async () => {
|
||||
//
|
||||
},
|
||||
};
|
||||
|
||||
const gossipSub = new Eth2Gossipsub({
|
||||
config,
|
||||
gossipHandlers: gossipHandlersPartial as GossipHandlers,
|
||||
logger,
|
||||
forkDigestContext,
|
||||
libp2p,
|
||||
metrics,
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
await gossipSub.validate(message);
|
||||
// no error means pass validation
|
||||
});
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
import sinon, {SinonStubbedInstance} from "sinon";
|
||||
import {expect} from "chai";
|
||||
import {config} from "@chainsafe/lodestar-config/default";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {ssz} from "@chainsafe/lodestar-types";
|
||||
|
||||
import {BeaconChain, ChainEventEmitter, IBeaconChain} from "../../../../src/chain";
|
||||
import {INetwork, Network} from "../../../../src/network";
|
||||
import {
|
||||
Eth2Gossipsub,
|
||||
stringifyGossipTopic,
|
||||
GossipEncoding,
|
||||
GossipType,
|
||||
encodeMessageData,
|
||||
TopicValidatorFn,
|
||||
} from "../../../../src/network/gossip";
|
||||
import {GossipHandler} from "../../../../src/network/gossip/handler";
|
||||
|
||||
import {StubbedBeaconDb} from "../../../utils/stub";
|
||||
import {testLogger} from "../../../utils/logger";
|
||||
import {createNode} from "../../../utils/network";
|
||||
import {ForkDigestContext, toHexStringNoPrefix} from "../../../../src/util/forkDigestContext";
|
||||
import {generateBlockSummary} from "../../../utils/block";
|
||||
import {IForkChoice} from "@chainsafe/lodestar-fork-choice";
|
||||
import {IAttnetsService} from "../../../../src/network/subnets";
|
||||
|
||||
describe("gossip handler", function () {
|
||||
const logger = testLogger();
|
||||
const attnetsService = {} as IAttnetsService;
|
||||
let forkDigestContext: SinonStubbedInstance<ForkDigestContext>;
|
||||
let chainStub: SinonStubbedInstance<IBeaconChain>;
|
||||
let networkStub: SinonStubbedInstance<INetwork>;
|
||||
let gossipsub: Eth2Gossipsub;
|
||||
let dbStub: StubbedBeaconDb;
|
||||
|
||||
beforeEach(async function () {
|
||||
forkDigestContext = sinon.createStubInstance(ForkDigestContext);
|
||||
chainStub = sinon.createStubInstance(BeaconChain);
|
||||
chainStub.emitter = new ChainEventEmitter();
|
||||
chainStub.getHeadForkName.returns(ForkName.phase0);
|
||||
chainStub.forkDigestContext = forkDigestContext;
|
||||
chainStub.forkChoice = {getHead: () => generateBlockSummary()} as IForkChoice;
|
||||
networkStub = sinon.createStubInstance(Network);
|
||||
const multiaddr = "/ip4/127.0.0.1/tcp/0";
|
||||
const libp2p = await createNode(multiaddr);
|
||||
gossipsub = new Eth2Gossipsub({
|
||||
config,
|
||||
libp2p,
|
||||
validatorFns: new Map<string, TopicValidatorFn>(),
|
||||
logger,
|
||||
forkDigestContext,
|
||||
metrics: null,
|
||||
});
|
||||
networkStub.gossip = gossipsub;
|
||||
gossipsub.start();
|
||||
dbStub = new StubbedBeaconDb(sinon);
|
||||
const phase0ForkDigestBuf = Buffer.alloc(4, 1);
|
||||
const altairForkDigestBuf = Buffer.alloc(4, 2);
|
||||
const phase0ForkDigestHex = toHexStringNoPrefix(Buffer.alloc(4, 1));
|
||||
const altairForkDigestHex = toHexStringNoPrefix(Buffer.alloc(4, 2));
|
||||
forkDigestContext.forkName2ForkDigest.withArgs(ForkName.phase0).returns(phase0ForkDigestBuf);
|
||||
forkDigestContext.forkName2ForkDigest.withArgs(ForkName.altair).returns(altairForkDigestBuf);
|
||||
forkDigestContext.forkDigest2ForkName.withArgs(phase0ForkDigestHex).returns(ForkName.phase0);
|
||||
forkDigestContext.forkDigest2ForkName.withArgs(phase0ForkDigestBuf).returns(ForkName.phase0);
|
||||
forkDigestContext.forkDigest2ForkName.withArgs(altairForkDigestHex).returns(ForkName.altair);
|
||||
forkDigestContext.forkDigest2ForkName.withArgs(altairForkDigestBuf).returns(ForkName.altair);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sinon.restore();
|
||||
});
|
||||
|
||||
it("should subscribe/unsubscribe on start/stop", function () {
|
||||
const handler = new GossipHandler(config, chainStub, gossipsub, attnetsService, dbStub, logger);
|
||||
expect(gossipsub.subscriptions.size).to.equal(0);
|
||||
handler.subscribeCoreTopics();
|
||||
expect(gossipsub.subscriptions.size).to.equal(5);
|
||||
handler.unsubscribeCoreTopics();
|
||||
expect(gossipsub.subscriptions.size).to.equal(0);
|
||||
handler.close();
|
||||
});
|
||||
|
||||
it("should handle incoming gossip objects", async function () {
|
||||
const handler = new GossipHandler(config, chainStub, gossipsub, attnetsService, dbStub, logger);
|
||||
handler.subscribeCoreTopics();
|
||||
const fork = ForkName.phase0;
|
||||
const {
|
||||
SignedBeaconBlock,
|
||||
SignedAggregateAndProof,
|
||||
SignedVoluntaryExit,
|
||||
ProposerSlashing,
|
||||
AttesterSlashing,
|
||||
} = ssz.phase0;
|
||||
|
||||
await gossipsub._processRpcMessage({
|
||||
data: encodeMessageData(GossipEncoding.ssz_snappy, SignedBeaconBlock.serialize(SignedBeaconBlock.defaultValue())),
|
||||
receivedFrom: "foo",
|
||||
topicIDs: [stringifyGossipTopic(forkDigestContext, {type: GossipType.beacon_block, fork})],
|
||||
});
|
||||
expect(chainStub.receiveBlock.calledOnce).to.be.true;
|
||||
|
||||
await gossipsub._processRpcMessage({
|
||||
data: encodeMessageData(
|
||||
GossipEncoding.ssz_snappy,
|
||||
SignedAggregateAndProof.serialize(SignedAggregateAndProof.defaultValue())
|
||||
),
|
||||
receivedFrom: "foo",
|
||||
topicIDs: [stringifyGossipTopic(forkDigestContext, {type: GossipType.beacon_aggregate_and_proof, fork})],
|
||||
});
|
||||
expect(dbStub.aggregateAndProof.add.calledOnce).to.be.true;
|
||||
|
||||
await gossipsub._processRpcMessage({
|
||||
data: encodeMessageData(
|
||||
GossipEncoding.ssz_snappy,
|
||||
SignedVoluntaryExit.serialize(SignedVoluntaryExit.defaultValue())
|
||||
),
|
||||
receivedFrom: "foo",
|
||||
topicIDs: [stringifyGossipTopic(forkDigestContext, {type: GossipType.voluntary_exit, fork})],
|
||||
});
|
||||
expect(dbStub.voluntaryExit.add.calledOnce).to.be.true;
|
||||
|
||||
await gossipsub._processRpcMessage({
|
||||
data: encodeMessageData(GossipEncoding.ssz_snappy, ProposerSlashing.serialize(ProposerSlashing.defaultValue())),
|
||||
receivedFrom: "foo",
|
||||
topicIDs: [stringifyGossipTopic(forkDigestContext, {type: GossipType.proposer_slashing, fork})],
|
||||
});
|
||||
expect(dbStub.proposerSlashing.add.calledOnce).to.be.true;
|
||||
|
||||
await gossipsub._processRpcMessage({
|
||||
data: encodeMessageData(GossipEncoding.ssz_snappy, AttesterSlashing.serialize(AttesterSlashing.defaultValue())),
|
||||
receivedFrom: "foo",
|
||||
topicIDs: [stringifyGossipTopic(forkDigestContext, {type: GossipType.attester_slashing, fork})],
|
||||
});
|
||||
expect(dbStub.attesterSlashing.add.calledOnce).to.be.true;
|
||||
|
||||
handler.unsubscribeCoreTopics();
|
||||
handler.close();
|
||||
});
|
||||
});
|
||||
@@ -2,53 +2,84 @@ import {expect} from "chai";
|
||||
import {config} from "@chainsafe/lodestar-config/default";
|
||||
import {ForkName} from "@chainsafe/lodestar-params";
|
||||
import {ssz} from "@chainsafe/lodestar-types";
|
||||
import {
|
||||
parseGossipTopic,
|
||||
stringifyGossipTopic,
|
||||
GossipType,
|
||||
GossipEncoding,
|
||||
GossipTopicMap,
|
||||
getForkFromGossipTopic,
|
||||
} from "../../../../src/network/gossip";
|
||||
import {GossipType, GossipEncoding, GossipTopicMap} from "../../../../src/network/gossip";
|
||||
import {ForkDigestContext} from "../../../../src/util/forkDigestContext";
|
||||
import {parseGossipTopic, stringifyGossipTopic} from "../../../../src/network/gossip/topic";
|
||||
|
||||
describe("GossipTopic", function () {
|
||||
describe("network / gossip / topic", function () {
|
||||
const genesisValidatorsRoot = ssz.Root.defaultValue();
|
||||
const forkDigestContext = new ForkDigestContext(config, genesisValidatorsRoot);
|
||||
const encoding = GossipEncoding.ssz_snappy;
|
||||
|
||||
// Enforce with Typescript that we test all GossipType
|
||||
const testCases: {[K in GossipType]: GossipTopicMap[K][]} = {
|
||||
[GossipType.beacon_block]: [{type: GossipType.beacon_block, fork: ForkName.phase0, encoding}],
|
||||
const testCases: {[K in GossipType]: {topic: GossipTopicMap[K]; topicStr: string}[]} = {
|
||||
[GossipType.beacon_block]: [
|
||||
{
|
||||
topic: {type: GossipType.beacon_block, fork: ForkName.phase0, encoding},
|
||||
topicStr: "/eth2/18ae4ccb/beacon_block/ssz_snappy",
|
||||
},
|
||||
],
|
||||
[GossipType.beacon_aggregate_and_proof]: [
|
||||
{type: GossipType.beacon_aggregate_and_proof, fork: ForkName.phase0, encoding},
|
||||
{
|
||||
topic: {type: GossipType.beacon_aggregate_and_proof, fork: ForkName.phase0, encoding},
|
||||
topicStr: "/eth2/18ae4ccb/beacon_aggregate_and_proof/ssz_snappy",
|
||||
},
|
||||
],
|
||||
[GossipType.beacon_attestation]: [
|
||||
{type: GossipType.beacon_attestation, fork: ForkName.phase0, subnet: 5, encoding},
|
||||
{
|
||||
topic: {type: GossipType.beacon_attestation, fork: ForkName.phase0, subnet: 5, encoding},
|
||||
topicStr: "/eth2/18ae4ccb/beacon_attestation_5/ssz_snappy",
|
||||
},
|
||||
],
|
||||
[GossipType.voluntary_exit]: [
|
||||
{
|
||||
topic: {type: GossipType.voluntary_exit, fork: ForkName.phase0, encoding},
|
||||
topicStr: "/eth2/18ae4ccb/voluntary_exit/ssz_snappy",
|
||||
},
|
||||
],
|
||||
[GossipType.proposer_slashing]: [
|
||||
{
|
||||
topic: {type: GossipType.proposer_slashing, fork: ForkName.phase0, encoding},
|
||||
topicStr: "/eth2/18ae4ccb/proposer_slashing/ssz_snappy",
|
||||
},
|
||||
],
|
||||
[GossipType.attester_slashing]: [
|
||||
{
|
||||
topic: {type: GossipType.attester_slashing, fork: ForkName.phase0, encoding},
|
||||
topicStr: "/eth2/18ae4ccb/attester_slashing/ssz_snappy",
|
||||
},
|
||||
],
|
||||
[GossipType.voluntary_exit]: [{type: GossipType.voluntary_exit, fork: ForkName.phase0, encoding}],
|
||||
[GossipType.proposer_slashing]: [{type: GossipType.proposer_slashing, fork: ForkName.phase0, encoding}],
|
||||
[GossipType.attester_slashing]: [{type: GossipType.attester_slashing, fork: ForkName.phase0, encoding}],
|
||||
[GossipType.sync_committee_contribution_and_proof]: [
|
||||
{type: GossipType.sync_committee_contribution_and_proof, fork: ForkName.altair, encoding},
|
||||
{
|
||||
topic: {type: GossipType.sync_committee_contribution_and_proof, fork: ForkName.altair, encoding},
|
||||
topicStr: "/eth2/8e04f66f/sync_committee_contribution_and_proof/ssz_snappy",
|
||||
},
|
||||
],
|
||||
[GossipType.sync_committee]: [
|
||||
{
|
||||
topic: {type: GossipType.sync_committee, fork: ForkName.altair, subnet: 5, encoding},
|
||||
topicStr: "/eth2/8e04f66f/sync_committee_5/ssz_snappy",
|
||||
},
|
||||
],
|
||||
[GossipType.sync_committee]: [{type: GossipType.sync_committee, fork: ForkName.altair, subnet: 5, encoding}],
|
||||
};
|
||||
|
||||
for (const topics of Object.values(testCases)) {
|
||||
if (topics.length === 0) throw Error("Must have a least 1 testCase for each GossipType");
|
||||
|
||||
for (const topic of topics) {
|
||||
it(`should round trip encode/decode gossip topic ${topic.type} ${topic.fork} ${topic.encoding}`, async () => {
|
||||
const topicString = stringifyGossipTopic(forkDigestContext, topic);
|
||||
const outputTopic = parseGossipTopic(forkDigestContext, topicString);
|
||||
for (const {topic, topicStr} of topics) {
|
||||
it(`should encode gossip topic ${topic.type} ${topic.fork} ${topic.encoding}`, async () => {
|
||||
const topicStrRes = stringifyGossipTopic(forkDigestContext, topic);
|
||||
expect(topicStrRes).to.equal(topicStr);
|
||||
});
|
||||
|
||||
it(`should decode gossip topic ${topicStr}`, async () => {
|
||||
const outputTopic = parseGossipTopic(forkDigestContext, topicStr);
|
||||
expect(outputTopic).to.deep.equal(topic);
|
||||
expect(getForkFromGossipTopic(forkDigestContext, topicString)).to.be.equal(topic.fork, "Incorrect fork");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const topicStrings: string[] = [
|
||||
const badTopicStrings: string[] = [
|
||||
// completely invalid
|
||||
"/different/protocol/entirely",
|
||||
// invalid fork digest
|
||||
@@ -63,9 +94,9 @@ describe("GossipTopic", function () {
|
||||
// invalid encoding
|
||||
"/eth2/18ae4ccb/beacon_attestation_5/ssz_supersnappy",
|
||||
];
|
||||
for (const topicString of topicStrings) {
|
||||
it(`should fail to decode invalid gossip topic string ${topicString}`, async () => {
|
||||
expect(() => parseGossipTopic(forkDigestContext, topicString), topicString).to.throw();
|
||||
for (const topicStr of badTopicStrings) {
|
||||
it(`should fail to decode invalid gossip topic string ${topicStr}`, async () => {
|
||||
expect(() => parseGossipTopic(forkDigestContext, topicStr), topicStr).to.throw();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user