diff --git a/configs/mainnet/lightclient_patch.yaml b/configs/mainnet/lightclient_patch.yaml index 64c05a720..a9ddc16f6 100644 --- a/configs/mainnet/lightclient_patch.yaml +++ b/configs/mainnet/lightclient_patch.yaml @@ -2,12 +2,22 @@ CONFIG_NAME: "mainnet" +# Updated penalty values +# --------------------------------------------------------------- +# 3 * 2**24) (= 50,331,648) +HF1_INACTIVITY_PENALTY_QUOTIENT: 50331648 +# 2**6 (= 64) +HF1_MIN_SLASHING_PENALTY_QUOTIENT: 64 +# 2 +HF1_PROPORTIONAL_SLASHING_MULTIPLIER: 2 + + # Misc # --------------------------------------------------------------- # 2**10 (=1,024) SYNC_COMMITTEE_SIZE: 1024 # 2**6 (=64) -SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE: 64 +SYNC_SUBCOMMITTEE_SIZE: 64 # Time parameters diff --git a/configs/minimal/lightclient_patch.yaml b/configs/minimal/lightclient_patch.yaml index afe7d897e..56ce34591 100644 --- a/configs/minimal/lightclient_patch.yaml +++ b/configs/minimal/lightclient_patch.yaml @@ -2,12 +2,22 @@ CONFIG_NAME: "minimal" +# Updated penalty values +# --------------------------------------------------------------- +# 3 * 2**24) (= 50,331,648) +HF1_INACTIVITY_PENALTY_QUOTIENT: 50331648 +# 2**6 (= 64) +HF1_MIN_SLASHING_PENALTY_QUOTIENT: 64 +# 2 +HF1_PROPORTIONAL_SLASHING_MULTIPLIER: 2 + + # Misc # --------------------------------------------------------------- # [customized] SYNC_COMMITTEE_SIZE: 32 # [customized] -SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE: 16 +SYNC_SUBCOMMITTEE_SIZE: 16 # Time parameters diff --git a/setup.py b/setup.py index 6b8520075..aa74fcb05 100644 --- a/setup.py +++ b/setup.py @@ -449,7 +449,7 @@ class PySpecCommand(Command): specs/phase1/beacon-chain.md specs/phase1/shard-transition.md specs/phase1/fork-choice.md - specs/phase1/phase1-fork.md + specs/phase1/fork.md specs/phase1/shard-fork-choice.md specs/phase1/validator.md """ @@ -460,7 +460,7 @@ class PySpecCommand(Command): specs/phase0/validator.md specs/phase0/weak-subjectivity.md specs/lightclient/beacon-chain.md - specs/lightclient/lightclient-fork.md + specs/lightclient/fork.md """ # TODO: add specs/lightclient/sync-protocol.md back when the GeneralizedIndex helpers are included. else: @@ -562,13 +562,12 @@ setup( url="https://github.com/ethereum/eth2.0-specs", include_package_data=False, package_data={'configs': ['*.yaml'], - 'specs': ['**/*.md'], 'eth2spec': ['VERSION.txt']}, package_dir={ "eth2spec": "tests/core/pyspec/eth2spec", "configs": "configs", - "specs": "specs" + "specs": "specs", }, packages=find_packages(where='tests/core/pyspec') + ['configs', 'specs'], py_modules=["eth2spec"], @@ -577,16 +576,17 @@ setup( extras_require={ "test": ["pytest>=4.4", "pytest-cov", "pytest-xdist"], "lint": ["flake8==3.7.7", "mypy==0.750"], + "generator": ["python-snappy==0.5.4"], }, install_requires=[ "eth-utils>=1.3.0,<2", "eth-typing>=2.1.0,<3.0.0", "pycryptodome==3.9.4", - "py_ecc==5.1.0", - "milagro_bls_binding==1.6.2", + "py_ecc==5.2.0", + "milagro_bls_binding==1.6.3", "dataclasses==0.6", "remerkleable==0.1.18", "ruamel.yaml==0.16.5", - "lru-dict==1.1.6" + "lru-dict==1.1.6", ] ) diff --git a/specs/lightclient/beacon-chain.md b/specs/lightclient/beacon-chain.md index fff8726bf..bc070908a 100644 --- a/specs/lightclient/beacon-chain.md +++ b/specs/lightclient/beacon-chain.md @@ -9,10 +9,11 @@ - [Introduction](#introduction) - [Custom types](#custom-types) - [Constants](#constants) - - [Validator action flags](#validator-action-flags) - - [Participation rewards](#participation-rewards) + - [Participation flag indices](#participation-flag-indices) + - [Participation flag fractions](#participation-flag-fractions) - [Misc](#misc) - [Configuration](#configuration) + - [Updated penalty values](#updated-penalty-values) - [Misc](#misc-1) - [Time parameters](#time-parameters) - [Domain types](#domain-types) @@ -26,7 +27,9 @@ - [`Predicates`](#predicates) - [`eth2_fast_aggregate_verify`](#eth2_fast_aggregate_verify) - [Misc](#misc-2) - - [`flags_and_numerators`](#flags_and_numerators) + - [`get_flag_indices_and_numerators`](#get_flag_indices_and_numerators) + - [`add_flag`](#add_flag) + - [`has_flag`](#has_flag) - [Beacon state accessors](#beacon-state-accessors) - [`get_sync_committee_indices`](#get_sync_committee_indices) - [`get_sync_committee`](#get_sync_committee) @@ -34,60 +37,60 @@ - [`get_unslashed_participating_indices`](#get_unslashed_participating_indices) - [`get_flag_deltas`](#get_flag_deltas) - [New `get_inactivity_penalty_deltas`](#new-get_inactivity_penalty_deltas) + - [Beacon state mutators](#beacon-state-mutators) + - [New `slash_validator`](#new-slash_validator) - [Block processing](#block-processing) - - [New `process_attestation`](#new-process_attestation) + - [Modified `process_attestation`](#modified-process_attestation) - [New `process_deposit`](#new-process_deposit) - [Sync committee processing](#sync-committee-processing) - [Epoch processing](#epoch-processing) - - [New `process_justification_and_finalization`](#new-process_justification_and_finalization) - - [New `process_rewards_and_penalties`](#new-process_rewards_and_penalties) - - [Sync committee updates](#sync-committee-updates) + - [Justification and finalization](#justification-and-finalization) + - [Rewards and penalties](#rewards-and-penalties) + - [Slashings](#slashings) - [Participation flags updates](#participation-flags-updates) + - [Sync committee updates](#sync-committee-updates) ## Introduction -This is a patch implementing the first hard fork to the beacon chain, tentatively named HF1 pending a permanent name. It has three main features: +This is a patch implementing the first hard fork to the beacon chain, tentatively named HF1 pending a permanent name. +It has four main features: * Light client support via sync committees * Incentive accounting reforms, reducing spec complexity and [TODO] reducing the cost of processing chains that have very little or zero participation for a long span of epochs +* Update penalty configuration values, moving them toward their planned maximally punitive configuration * Fork choice rule changes to address weaknesses recently discovered in the existing fork choice ## Custom types | Name | SSZ equivalent | Description | | - | - | - | -| `ValidatorFlag` | `uint8` | Bitflags to track validator actions with | +| `ParticipationFlags` | `uint8` | A succinct representation of 8 boolean participation flags | ## Constants -### Validator action flags - -This is formatted as an enum, with values `2**i` that can be combined as bit-flags. -The `0` value is reserved as default. Remaining bits in `ValidatorFlag` may be used in future hardforks. - -**Note**: Unlike Phase0, a `TIMELY_TARGET_FLAG` does not necessarily imply a `TIMELY_SOURCE_FLAG` -due to the varying slot delay requirements of each. +### Participation flag indices | Name | Value | | - | - | -| `TIMELY_HEAD_FLAG` | `ValidatorFlag(2**0)` (= 1) | -| `TIMELY_SOURCE_FLAG` | `ValidatorFlag(2**1)` (= 2) | -| `TIMELY_TARGET_FLAG` | `ValidatorFlag(2**2)` (= 4) | +| `TIMELY_HEAD_FLAG_INDEX` | `0` | +| `TIMELY_SOURCE_FLAG_INDEX` | `1` | +| `TIMELY_TARGET_FLAG_INDEX` | `2` | -### Participation rewards +### Participation flag fractions | Name | Value | | - | - | -| `TIMELY_HEAD_NUMERATOR` | `12` | -| `TIMELY_SOURCE_NUMERATOR` | `12` | -| `TIMELY_TARGET_NUMERATOR` | `32` | -| `REWARD_DENOMINATOR` | `64` | +| `TIMELY_HEAD_FLAG_NUMERATOR` | `12` | +| `TIMELY_SOURCE_FLAG_NUMERATOR` | `12` | +| `TIMELY_TARGET_FLAG_NUMERATOR` | `32` | +| `FLAG_DENOMINATOR` | `64` | -The reward fractions add up to 7/8, leaving the remaining 1/8 for proposer rewards and other future micro-rewards. +**Note**: The participatition flag fractions add up to 7/8. +The remaining 1/8 is for proposer incentives and other future micro-incentives. ### Misc @@ -97,12 +100,24 @@ The reward fractions add up to 7/8, leaving the remaining 1/8 for proposer rewar ## Configuration +### Updated penalty values + +This patch updates a few configuration values to move penalty constants toward their final, maxmium security values. + +*Note*: The spec does *not* override previous configuration values but instead creates new values and replaces usage throughout. + +| Name | Value | +| - | - | +| `HF1_INACTIVITY_PENALTY_QUOTIENT` | `uint64(3 * 2**24)` (= 50,331,648) | +| `HF1_MIN_SLASHING_PENALTY_QUOTIENT` | `uint64(2**6)` (=64) | +| `HF1_PROPORTIONAL_SLASHING_MULTIPLIER` | `uint64(2)` | + ### Misc | Name | Value | | - | - | -| `SYNC_COMMITTEE_SIZE` | `uint64(2**10)` (= 1024) | -| `SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE` | `uint64(2**6)` (= 64) | +| `SYNC_COMMITTEE_SIZE` | `uint64(2**10)` (= 1,024) | +| `SYNC_SUBCOMMITTEE_SIZE` | `uint64(2**6)` (= 64) | ### Time parameters @@ -126,10 +141,19 @@ order and append any additional fields to the end. #### `BeaconBlockBody` ```python -class BeaconBlockBody(phase0.BeaconBlockBody): +class BeaconBlockBody(Container): + randao_reveal: BLSSignature + eth1_data: Eth1Data # Eth1 data vote + graffiti: Bytes32 # Arbitrary data + # Operations + proposer_slashings: List[ProposerSlashing, MAX_PROPOSER_SLASHINGS] + attester_slashings: List[AttesterSlashing, MAX_ATTESTER_SLASHINGS] + attestations: List[Attestation, MAX_ATTESTATIONS] + deposits: List[Deposit, MAX_DEPOSITS] + voluntary_exits: List[SignedVoluntaryExit, MAX_VOLUNTARY_EXITS] # Sync committee aggregate signature - sync_committee_bits: Bitvector[SYNC_COMMITTEE_SIZE] - sync_committee_signature: BLSSignature + sync_committee_bits: Bitvector[SYNC_COMMITTEE_SIZE] # [New in HF1] + sync_committee_signature: BLSSignature # [New in HF1] ``` #### `BeaconState` @@ -158,8 +182,8 @@ class BeaconState(Container): # Slashings slashings: Vector[Gwei, EPOCHS_PER_SLASHINGS_VECTOR] # Per-epoch sums of slashed effective balances # Participation - previous_epoch_participation: List[ValidatorFlag, VALIDATOR_REGISTRY_LIMIT] - current_epoch_participation: List[ValidatorFlag, VALIDATOR_REGISTRY_LIMIT] + previous_epoch_participation: List[ParticipationFlags, VALIDATOR_REGISTRY_LIMIT] + current_epoch_participation: List[ParticipationFlags, VALIDATOR_REGISTRY_LIMIT] # Finality justification_bits: Bitvector[JUSTIFICATION_BITS_LENGTH] # Bit set for every recent justified epoch previous_justified_checkpoint: Checkpoint @@ -177,7 +201,7 @@ class BeaconState(Container): ```python class SyncCommittee(Container): pubkeys: Vector[BLSPubkey, SYNC_COMMITTEE_SIZE] - pubkey_aggregates: Vector[BLSPubkey, SYNC_COMMITTEE_SIZE // SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE] + pubkey_aggregates: Vector[BLSPubkey, SYNC_COMMITTEE_SIZE // SYNC_SUBCOMMITTEE_SIZE] ``` ## Helper functions @@ -198,25 +222,31 @@ def eth2_fast_aggregate_verify(pubkeys: Sequence[BLSPubkey], message: Bytes32, s ### Misc -#### `flags_and_numerators` +#### `get_flag_indices_and_numerators` ```python -def get_flags_and_numerators() -> Sequence[Tuple[ValidatorFlag, int]]: +def get_flag_indices_and_numerators() -> Sequence[Tuple[int, int]]: return ( - (TIMELY_HEAD_FLAG, TIMELY_HEAD_NUMERATOR), - (TIMELY_SOURCE_FLAG, TIMELY_SOURCE_NUMERATOR), - (TIMELY_TARGET_FLAG, TIMELY_TARGET_NUMERATOR) + (TIMELY_HEAD_FLAG_INDEX, TIMELY_HEAD_FLAG_NUMERATOR), + (TIMELY_SOURCE_FLAG_INDEX, TIMELY_SOURCE_FLAG_NUMERATOR), + (TIMELY_TARGET_FLAG_INDEX, TIMELY_TARGET_FLAG_NUMERATOR), ) ``` -```python -def add_validator_flags(flags: ValidatorFlag, add: ValidatorFlag) -> ValidatorFlag: - return flags | add -``` +#### `add_flag` ```python -def has_validator_flags(flags: ValidatorFlag, has: ValidatorFlag) -> bool: - return flags & has == has +def add_flag(flags: ParticipationFlags, flag_index: int) -> ParticipationFlags: + flag = ParticipationFlags(2**flag_index) + return flags | flag +``` + +#### `has_flag` + +```python +def has_flag(flags: ParticipationFlags, flag_index: int) -> bool: + flag = ParticipationFlags(2**flag_index) + return flags & flag == flag ``` ### Beacon state accessors @@ -257,8 +287,8 @@ def get_sync_committee(state: BeaconState, epoch: Epoch) -> SyncCommittee: validators = [state.validators[index] for index in indices] pubkeys = [validator.pubkey for validator in validators] aggregates = [ - bls.AggregatePKs(pubkeys[i:i + SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE]) - for i in range(0, len(pubkeys), SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE) + bls.AggregatePKs(pubkeys[i:i + SYNC_SUBCOMMITTEE_SIZE]) + for i in range(0, len(pubkeys), SYNC_SUBCOMMITTEE_SIZE) ] return SyncCommittee(pubkeys=pubkeys, pubkey_aggregates=aggregates) ``` @@ -277,19 +307,17 @@ def get_base_reward(state: BeaconState, index: ValidatorIndex) -> Gwei: #### `get_unslashed_participating_indices` ```python -def get_unslashed_participating_indices(state: BeaconState, flags: ValidatorFlag, epoch: Epoch) -> Set[ValidatorIndex]: +def get_unslashed_participating_indices(state: BeaconState, flag_index: int, epoch: Epoch) -> Set[ValidatorIndex]: """ - Retrieve the active validator indices of the given epoch, which are not slashed, and have all of the given flags. + Retrieve the active and unslashed validator indices for the given epoch and flag index. """ assert epoch in (get_previous_epoch(state), get_current_epoch(state)) if epoch == get_current_epoch(state): epoch_participation = state.current_epoch_participation else: epoch_participation = state.previous_epoch_participation - participating_indices = [ - index for index in get_active_validator_indices(state, epoch) - if has_validator_flags(epoch_participation[index], flags) - ] + active_validator_indices = get_active_validator_indices(state, epoch) + participating_indices = [i for i in active_validator_indices if has_flag(epoch_participation[i], flag_index)] return set(filter(lambda index: not state.validators[index].slashed, participating_indices)) ``` @@ -297,7 +325,7 @@ def get_unslashed_participating_indices(state: BeaconState, flags: ValidatorFlag ```python def get_flag_deltas(state: BeaconState, - flag: ValidatorFlag, + flag_index: int, numerator: uint64) -> Tuple[Sequence[Gwei], Sequence[Gwei]]: """ Compute the rewards and penalties associated with a particular duty, by scanning through the participation @@ -306,7 +334,7 @@ def get_flag_deltas(state: BeaconState, rewards = [Gwei(0)] * len(state.validators) penalties = [Gwei(0)] * len(state.validators) - unslashed_participating_indices = get_unslashed_participating_indices(state, flag, get_previous_epoch(state)) + unslashed_participating_indices = get_unslashed_participating_indices(state, flag_index, get_previous_epoch(state)) increment = EFFECTIVE_BALANCE_INCREMENT # Factored out from balances to avoid uint64 overflow unslashed_participating_increments = get_total_balance(state, unslashed_participating_indices) // increment active_increments = get_total_active_balance(state) // increment @@ -315,20 +343,21 @@ def get_flag_deltas(state: BeaconState, if index in unslashed_participating_indices: if is_in_inactivity_leak(state): # Optimal participation is fully rewarded to cancel the inactivity penalty - rewards[index] = base_reward * numerator // REWARD_DENOMINATOR + rewards[index] = base_reward * numerator // FLAG_DENOMINATOR else: rewards[index] = ( (base_reward * numerator * unslashed_participating_increments) - // (active_increments * REWARD_DENOMINATOR) + // (active_increments * FLAG_DENOMINATOR) ) else: - penalties[index] = base_reward * numerator // REWARD_DENOMINATOR + penalties[index] = base_reward * numerator // FLAG_DENOMINATOR return rewards, penalties ``` #### New `get_inactivity_penalty_deltas` -*Note*: The function `get_inactivity_penalty_deltas` is modified in the selection of matching target indices and the removal of `BASE_REWARDS_PER_EPOCH`. +*Note*: The function `get_inactivity_penalty_deltas` is modified in the selection of matching target indices +and the removal of `BASE_REWARDS_PER_EPOCH`. ```python def get_inactivity_penalty_deltas(state: BeaconState) -> Tuple[Sequence[Gwei], Sequence[Gwei]]: @@ -339,21 +368,56 @@ def get_inactivity_penalty_deltas(state: BeaconState) -> Tuple[Sequence[Gwei], S """ penalties = [Gwei(0) for _ in range(len(state.validators))] if is_in_inactivity_leak(state): - reward_numerator_sum = sum(numerator for (_, numerator) in get_flags_and_numerators()) + reward_numerator_sum = sum(numerator for (_, numerator) in get_flag_indices_and_numerators()) matching_target_attesting_indices = get_unslashed_participating_indices( - state, TIMELY_TARGET_FLAG, get_previous_epoch(state) + state, TIMELY_TARGET_FLAG_INDEX, get_previous_epoch(state) ) for index in get_eligible_validator_indices(state): # If validator is performing optimally this cancels all attestation rewards for a neutral balance - penalties[index] += Gwei(get_base_reward(state, index) * reward_numerator_sum // REWARD_DENOMINATOR) + penalties[index] += Gwei(get_base_reward(state, index) * reward_numerator_sum // FLAG_DENOMINATOR) if index not in matching_target_attesting_indices: effective_balance = state.validators[index].effective_balance - penalties[index] += Gwei(effective_balance * get_finality_delay(state) // INACTIVITY_PENALTY_QUOTIENT) + penalties[index] += Gwei( + effective_balance * get_finality_delay(state) + // HF1_INACTIVITY_PENALTY_QUOTIENT + ) rewards = [Gwei(0) for _ in range(len(state.validators))] return rewards, penalties ``` +### Beacon state mutators + +#### New `slash_validator` + +*Note*: The function `slash_validator` is modified +with the substitution of `MIN_SLASHING_PENALTY_QUOTIENT` with `HF1_MIN_SLASHING_PENALTY_QUOTIENT`. + +```python +def slash_validator(state: BeaconState, + slashed_index: ValidatorIndex, + whistleblower_index: ValidatorIndex=None) -> None: + """ + Slash the validator with index ``slashed_index``. + """ + epoch = get_current_epoch(state) + initiate_validator_exit(state, slashed_index) + validator = state.validators[slashed_index] + validator.slashed = True + validator.withdrawable_epoch = max(validator.withdrawable_epoch, Epoch(epoch + EPOCHS_PER_SLASHINGS_VECTOR)) + state.slashings[epoch % EPOCHS_PER_SLASHINGS_VECTOR] += validator.effective_balance + decrease_balance(state, slashed_index, validator.effective_balance // HF1_MIN_SLASHING_PENALTY_QUOTIENT) + + # Apply proposer and whistleblower rewards + proposer_index = get_beacon_proposer_index(state) + if whistleblower_index is None: + whistleblower_index = proposer_index + whistleblower_reward = Gwei(validator.effective_balance // WHISTLEBLOWER_REWARD_QUOTIENT) + proposer_reward = Gwei(whistleblower_reward // PROPOSER_REWARD_QUOTIENT) + increase_balance(state, proposer_index, proposer_reward) + increase_balance(state, whistleblower_index, Gwei(whistleblower_reward - proposer_reward)) +``` + ### Block processing ```python @@ -361,12 +425,11 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None: process_block_header(state, block) process_randao(state, block.body) process_eth1_data(state, block.body) - process_operations(state, block.body) - # Light client support - process_sync_committee(state, block.body) + process_operations(state, block.body) # [Modified in HF1] + process_sync_committee(state, block.body) # [New in HF1] ``` -#### New `process_attestation` +#### Modified `process_attestation` *Note*: The function `process_attestation` is modified to do incentive accounting with epoch participation flags. @@ -397,25 +460,25 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: # Verify signature assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) - # Participation flags - participation_flags = [] + # Participation flag indices + participation_flag_indices = [] if is_matching_head and is_matching_target and state.slot <= data.slot + MIN_ATTESTATION_INCLUSION_DELAY: - participation_flags.append(TIMELY_HEAD_FLAG) + participation_flag_indices.append(TIMELY_HEAD_FLAG_INDEX) if is_matching_source and state.slot <= data.slot + integer_squareroot(SLOTS_PER_EPOCH): - participation_flags.append(TIMELY_SOURCE_FLAG) + participation_flag_indices.append(TIMELY_SOURCE_FLAG_INDEX) if is_matching_target and state.slot <= data.slot + SLOTS_PER_EPOCH: - participation_flags.append(TIMELY_TARGET_FLAG) + participation_flag_indices.append(TIMELY_TARGET_FLAG_INDEX) # Update epoch participation flags proposer_reward_numerator = 0 for index in get_attesting_indices(state, data, attestation.aggregation_bits): - for flag, numerator in get_flags_and_numerators(): - if flag in participation_flags and not has_validator_flags(epoch_participation[index], flag): - epoch_participation[index] = add_validator_flags(epoch_participation[index], flag) - proposer_reward_numerator += get_base_reward(state, index) * numerator + for flag_index, flag_numerator in get_flag_indices_and_numerators(): + if flag_index in participation_flag_indices and not has_flag(epoch_participation[index], flag_index): + epoch_participation[index] = add_flag(epoch_participation[index], flag_index) + proposer_reward_numerator += get_base_reward(state, index) * flag_numerator # Reward proposer - proposer_reward = Gwei(proposer_reward_numerator // (REWARD_DENOMINATOR * PROPOSER_REWARD_QUOTIENT)) + proposer_reward = Gwei(proposer_reward_numerator // (FLAG_DENOMINATOR * PROPOSER_REWARD_QUOTIENT)) increase_balance(state, get_beacon_proposer_index(state), proposer_reward) ``` @@ -457,8 +520,8 @@ def process_deposit(state: BeaconState, deposit: Deposit) -> None: state.validators.append(get_validator_from_deposit(state, deposit)) state.balances.append(amount) # [Added in hf-1] Initialize empty participation flags for new validator - state.previous_epoch_participation.append(ValidatorFlag(0)) - state.current_epoch_participation.append(ValidatorFlag(0)) + state.previous_epoch_participation.append(ParticipationFlags(0b0000_0000)) + state.current_epoch_participation.append(ParticipationFlags(0b0000_0000)) else: # Increase balance by deposit amount index = ValidatorIndex(validator_pubkeys.index(pubkey)) @@ -480,40 +543,38 @@ def process_sync_committee(state: BeaconState, body: BeaconBlockBody) -> None: assert eth2_fast_aggregate_verify(participant_pubkeys, signing_root, body.sync_committee_signature) # Reward sync committee participants - total_proposer_reward = Gwei(0) + proposer_rewards = Gwei(0) active_validator_count = uint64(len(get_active_validator_indices(state, get_current_epoch(state)))) for participant_index in participant_indices: - base_reward = get_base_reward(state, participant_index) proposer_reward = get_proposer_reward(state, participant_index) + proposer_rewards += proposer_reward + base_reward = get_base_reward(state, participant_index) max_participant_reward = base_reward - proposer_reward - reward = Gwei(max_participant_reward * active_validator_count // len(committee_indices) // SLOTS_PER_EPOCH) + reward = Gwei(max_participant_reward * active_validator_count // (len(committee_indices) * SLOTS_PER_EPOCH)) increase_balance(state, participant_index, reward) - total_proposer_reward += proposer_reward # Reward beacon proposer - increase_balance(state, get_beacon_proposer_index(state), total_proposer_reward) + increase_balance(state, get_beacon_proposer_index(state), proposer_rewards) ``` ### Epoch processing ```python def process_epoch(state: BeaconState) -> None: - process_justification_and_finalization(state) # [Updated in HF1] - process_rewards_and_penalties(state) # [Updated in HF1] + process_justification_and_finalization(state) # [Modified in HF1] + process_rewards_and_penalties(state) # [Modified in HF1] process_registry_updates(state) - process_slashings(state) + process_slashings(state) # [Modified in HF1] process_eth1_data_reset(state) process_effective_balance_updates(state) process_slashings_reset(state) process_randao_mixes_reset(state) process_historical_roots_update(state) - # [Removed in HF1] -- process_participation_record_updates(state) - # [Added in HF1] - process_participation_flag_updates(state) - process_sync_committee_updates(state) + process_participation_flag_updates(state) # [New in HF1] + process_sync_committee_updates(state) # [New in HF1] ``` -#### New `process_justification_and_finalization` +#### Justification and finalization *Note*: The function `process_justification_and_finalization` is modified with `matching_target_attestations` replaced by `matching_target_indices`. @@ -532,12 +593,12 @@ def process_justification_and_finalization(state: BeaconState) -> None: state.previous_justified_checkpoint = state.current_justified_checkpoint state.justification_bits[1:] = state.justification_bits[:JUSTIFICATION_BITS_LENGTH - 1] state.justification_bits[0] = 0b0 - matching_target_indices = get_unslashed_participating_indices(state, TIMELY_TARGET_FLAG, previous_epoch) + matching_target_indices = get_unslashed_participating_indices(state, TIMELY_TARGET_FLAG_INDEX, previous_epoch) if get_total_balance(state, matching_target_indices) * 3 >= get_total_active_balance(state) * 2: state.current_justified_checkpoint = Checkpoint(epoch=previous_epoch, root=get_block_root(state, previous_epoch)) state.justification_bits[1] = 0b1 - matching_target_indices = get_unslashed_participating_indices(state, TIMELY_TARGET_FLAG, current_epoch) + matching_target_indices = get_unslashed_participating_indices(state, TIMELY_TARGET_FLAG_INDEX, current_epoch) if get_total_balance(state, matching_target_indices) * 3 >= get_total_active_balance(state) * 2: state.current_justified_checkpoint = Checkpoint(epoch=current_epoch, root=get_block_root(state, current_epoch)) @@ -559,16 +620,19 @@ def process_justification_and_finalization(state: BeaconState) -> None: state.finalized_checkpoint = old_current_justified_checkpoint ``` -#### New `process_rewards_and_penalties` +#### Rewards and penalties -*Note*: The function `process_rewards_and_penalties` is modified to use participation flag deltas. +*Note*: The function `process_rewards_and_penalties` is modified to support the incentive reforms. ```python def process_rewards_and_penalties(state: BeaconState) -> None: # No rewards are applied at the end of `GENESIS_EPOCH` because rewards are for work done in the previous epoch if get_current_epoch(state) == GENESIS_EPOCH: return - flag_deltas = [get_flag_deltas(state, flag, numerator) for (flag, numerator) in get_flags_and_numerators()] + flag_deltas = [ + get_flag_deltas(state, flag_index, flag_numerator) + for (flag_index, flag_numerator) in get_flag_indices_and_numerators() + ] deltas = flag_deltas + [get_inactivity_penalty_deltas(state)] for (rewards, penalties) in deltas: for index in range(len(state.validators)): @@ -576,26 +640,41 @@ def process_rewards_and_penalties(state: BeaconState) -> None: decrease_balance(state, ValidatorIndex(index), penalties[index]) ``` +#### Slashings + +*Note*: The function `process_slashings` is modified to use `HF1_PROPORTIONAL_SLASHING_MULTIPLIER`. + +```python +def process_slashings(state: BeaconState) -> None: + epoch = get_current_epoch(state) + total_balance = get_total_active_balance(state) + adjusted_total_slashing_balance = min(sum(state.slashings) * HF1_PROPORTIONAL_SLASHING_MULTIPLIER, total_balance) + for index, validator in enumerate(state.validators): + if validator.slashed and epoch + EPOCHS_PER_SLASHINGS_VECTOR // 2 == validator.withdrawable_epoch: + increment = EFFECTIVE_BALANCE_INCREMENT # Factored out from penalty numerator to avoid uint64 overflow + penalty_numerator = validator.effective_balance // increment * adjusted_total_slashing_balance + penalty = penalty_numerator // total_balance * increment + decrease_balance(state, ValidatorIndex(index), penalty) +``` + +#### Participation flags updates + +*Note*: The function `process_participation_flag_updates` is new. + +```python +def process_participation_flag_updates(state: BeaconState) -> None: + state.previous_epoch_participation = state.current_epoch_participation + state.current_epoch_participation = [ParticipationFlags(0b0000_0000) for _ in range(len(state.validators))] +``` + #### Sync committee updates +*Note*: The function `process_sync_committee_updates` is new. + ```python def process_sync_committee_updates(state: BeaconState) -> None: - """ - Call to ``proces_sync_committee_updates`` added to ``process_epoch`` in HF1 - """ next_epoch = get_current_epoch(state) + Epoch(1) if next_epoch % EPOCHS_PER_SYNC_COMMITTEE_PERIOD == 0: state.current_sync_committee = state.next_sync_committee state.next_sync_committee = get_sync_committee(state, next_epoch + EPOCHS_PER_SYNC_COMMITTEE_PERIOD) ``` - -#### Participation flags updates - -```python -def process_participation_flag_updates(state: BeaconState) -> None: - """ - Call to ``process_participation_flag_updates`` added to ``process_epoch`` in HF1 - """ - state.previous_epoch_participation = state.current_epoch_participation - state.current_epoch_participation = [ValidatorFlag(0) for _ in range(len(state.validators))] -``` diff --git a/specs/lightclient/lightclient-fork.md b/specs/lightclient/fork.md similarity index 93% rename from specs/lightclient/lightclient-fork.md rename to specs/lightclient/fork.md index aa0171b86..157e67dc9 100644 --- a/specs/lightclient/lightclient-fork.md +++ b/specs/lightclient/fork.md @@ -66,9 +66,9 @@ def upgrade_to_lightclient_patch(pre: phase0.BeaconState) -> BeaconState: randao_mixes=pre.randao_mixes, # Slashings slashings=pre.slashings, - # Attestations - previous_epoch_participation=[ValidatorFlag(0) for _ in range(len(pre.validators))], - current_epoch_participation=[ValidatorFlag(0) for _ in range(len(pre.validators))], + # Participation + previous_epoch_participation=[ParticipationFlags(0) for _ in range(len(pre.validators))], + current_epoch_participation=[ParticipationFlags(0) for _ in range(len(pre.validators))], # Finality justification_bits=pre.justification_bits, previous_justified_checkpoint=pre.previous_justified_checkpoint, diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 249d08799..02a578fb8 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -292,7 +292,7 @@ If one or more validations fail while processing the items in order, return eith There are two primary global topics used to propagate beacon blocks (`beacon_block`) and aggregate attestations (`beacon_aggregate_and_proof`) to all nodes on the network. -There are three additional global topics are used to propagate lower frequency validator messages +There are three additional global topics that are used to propagate lower frequency validator messages (`voluntary_exit`, `proposer_slashing`, and `attester_slashing`). ##### `beacon_block` diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 09b9b402a..14a5ecbba 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -132,10 +132,10 @@ The `withdrawal_credentials` field must be such that: * `withdrawal_credentials[1:12] == b'\x00' * 11` * `withdrawal_credentials[12:] == eth1_withdrawal_address` -After the merge of eth1 into eth2, +After the merge of the current Ethereum application layer (Eth1) into the Beacon Chain (Eth2), withdrawals to `eth1_withdrawal_address` will be normal ETH transfers (with no payload other than the validator's ETH) triggered by a user transaction that will set the gas price and gas limit as well pay fees. -As long as the account or contract with address `eth1_withdrawal_address` can receive ETH transfers +As long as the account or contract with address `eth1_withdrawal_address` can receive ETH transfers, the future withdrawal protocol is agnostic to all other implementation details. ### Submit deposit diff --git a/specs/phase1/phase1-fork.md b/specs/phase1/fork.md similarity index 100% rename from specs/phase1/phase1-fork.md rename to specs/phase1/fork.md diff --git a/tests/core/gen_helpers/gen_from_tests/gen.py b/tests/core/gen_helpers/gen_from_tests/gen.py deleted file mode 100644 index 902b0954a..000000000 --- a/tests/core/gen_helpers/gen_from_tests/gen.py +++ /dev/null @@ -1,40 +0,0 @@ -from inspect import getmembers, isfunction -from typing import Any, Iterable - -from gen_base.gen_typing import TestCase - - -def generate_from_tests(runner_name: str, handler_name: str, src: Any, - fork_name: str, bls_active: bool = True) -> Iterable[TestCase]: - """ - Generate a list of test cases by running tests from the given src in generator-mode. - :param runner_name: to categorize the test in general as. - :param handler_name: to categorize the test specialization as. - :param src: to retrieve tests from (discovered using inspect.getmembers). - :param fork_name: to run tests against particular phase and/or fork. - (if multiple forks are applicable, indicate the last fork) - :param bls_active: optional, to override BLS switch preference. Defaults to True. - :return: an iterable of test cases. - """ - fn_names = [ - name for (name, _) in getmembers(src, isfunction) - if name.startswith('test_') - ] - print("generating test vectors from tests source: %s" % src.__name__) - for name in fn_names: - tfn = getattr(src, name) - - # strip off the `test_` - case_name = name - if case_name.startswith('test_'): - case_name = case_name[5:] - - yield TestCase( - fork_name=fork_name, - runner_name=runner_name, - handler_name=handler_name, - suite_name='pyspec_tests', - case_name=case_name, - # TODO: with_all_phases and other per-phase tooling, should be replaced with per-fork equivalent. - case_fn=lambda: tfn(generator_mode=True, phase=fork_name, bls_active=bls_active) - ) diff --git a/tests/core/gen_helpers/requirements.txt b/tests/core/gen_helpers/requirements.txt deleted file mode 100644 index e7cdd30ea..000000000 --- a/tests/core/gen_helpers/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -ruamel.yaml==0.16.5 -eth-utils==1.6.0 -pytest>=4.4 diff --git a/tests/core/gen_helpers/setup.py b/tests/core/gen_helpers/setup.py deleted file mode 100644 index e9fc1a787..000000000 --- a/tests/core/gen_helpers/setup.py +++ /dev/null @@ -1,11 +0,0 @@ -from distutils.core import setup - -setup( - name='gen_helpers', - packages=['gen_base', 'gen_from_tests'], - install_requires=[ - "ruamel.yaml==0.16.5", - "eth-utils==1.6.0", - "pytest>=4.4", - ] -) diff --git a/tests/core/pyspec/eth2spec/VERSION.txt b/tests/core/pyspec/eth2spec/VERSION.txt index afaf360d3..7f207341d 100644 --- a/tests/core/pyspec/eth2spec/VERSION.txt +++ b/tests/core/pyspec/eth2spec/VERSION.txt @@ -1 +1 @@ -1.0.0 \ No newline at end of file +1.0.1 \ No newline at end of file diff --git a/tests/core/gen_helpers/README.md b/tests/core/pyspec/eth2spec/gen_helpers/README.md similarity index 96% rename from tests/core/gen_helpers/README.md rename to tests/core/pyspec/eth2spec/gen_helpers/README.md index 20b48db83..d39ee66ae 100644 --- a/tests/core/gen_helpers/README.md +++ b/tests/core/pyspec/eth2spec/gen_helpers/README.md @@ -4,7 +4,7 @@ A util to quickly write new test suite generators with. -See [Generators documentation](../../generators/README.md) for integration details. +See [Generators documentation](../../../../generators/README.md) for integration details. Options: diff --git a/tests/core/gen_helpers/gen_base/__init__.py b/tests/core/pyspec/eth2spec/gen_helpers/__init__.py similarity index 100% rename from tests/core/gen_helpers/gen_base/__init__.py rename to tests/core/pyspec/eth2spec/gen_helpers/__init__.py diff --git a/tests/core/gen_helpers/gen_from_tests/__init__.py b/tests/core/pyspec/eth2spec/gen_helpers/gen_base/__init__.py similarity index 100% rename from tests/core/gen_helpers/gen_from_tests/__init__.py rename to tests/core/pyspec/eth2spec/gen_helpers/gen_base/__init__.py diff --git a/tests/core/gen_helpers/gen_base/gen_runner.py b/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py similarity index 92% rename from tests/core/gen_helpers/gen_base/gen_runner.py rename to tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py index a22073c00..f14f7e323 100644 --- a/tests/core/gen_helpers/gen_base/gen_runner.py +++ b/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py @@ -8,11 +8,13 @@ from ruamel.yaml import ( YAML, ) -from gen_base.gen_typing import TestProvider +from snappy import compress from eth2spec.test import context from eth2spec.test.exceptions import SkippedTest +from .gen_typing import TestProvider + # Flag that the runner does NOT run test via pytest context.is_pytest = False @@ -119,10 +121,11 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): print(f"generating tests with config '{config_name}' ...") for test_case in tprov.make_cases(): - case_dir = Path(output_dir) / Path(config_name) / Path(test_case.fork_name) \ - / Path(test_case.runner_name) / Path(test_case.handler_name) \ - / Path(test_case.suite_name) / Path(test_case.case_name) - + case_dir = ( + Path(output_dir) / Path(config_name) / Path(test_case.fork_name) + / Path(test_case.runner_name) / Path(test_case.handler_name) + / Path(test_case.suite_name) / Path(test_case.case_name) + ) if case_dir.exists(): if not args.force: print(f'Skipping already existing test: {case_dir}') @@ -180,7 +183,8 @@ def dump_yaml_fn(data: Any, name: str, file_mode: str, yaml_encoder: YAML): def dump_ssz_fn(data: AnyStr, name: str, file_mode: str): def dump(case_path: Path): - out_path = case_path / Path(name + '.ssz') + out_path = case_path / Path(name + '.ssz_snappy') + compressed = compress(data) with out_path.open(file_mode + 'b') as f: # write in raw binary mode - f.write(data) + f.write(compressed) return dump diff --git a/tests/core/gen_helpers/gen_base/gen_typing.py b/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_typing.py similarity index 100% rename from tests/core/gen_helpers/gen_base/gen_typing.py rename to tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_typing.py diff --git a/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/__init__.py b/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/gen.py b/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/gen.py new file mode 100644 index 000000000..c090869d5 --- /dev/null +++ b/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/gen.py @@ -0,0 +1,105 @@ +from importlib import reload, import_module +from inspect import getmembers, isfunction +from typing import Any, Callable, Dict, Iterable, Optional + +from eth2spec.config import config_util +from eth2spec.utils import bls +from eth2spec.test.context import ALL_CONFIGS, TESTGEN_FORKS, SpecForkName, ConfigName + +from eth2spec.gen_helpers.gen_base import gen_runner +from eth2spec.gen_helpers.gen_base.gen_typing import TestCase, TestProvider + + +def generate_from_tests(runner_name: str, handler_name: str, src: Any, + fork_name: SpecForkName, bls_active: bool = True, + phase: Optional[str]=None) -> Iterable[TestCase]: + """ + Generate a list of test cases by running tests from the given src in generator-mode. + :param runner_name: to categorize the test in general as. + :param handler_name: to categorize the test specialization as. + :param src: to retrieve tests from (discovered using inspect.getmembers). + :param fork_name: the folder name for these tests. + (if multiple forks are applicable, indicate the last fork) + :param bls_active: optional, to override BLS switch preference. Defaults to True. + :param phase: optional, to run tests against a particular spec version. Default to `fork_name` value. + :return: an iterable of test cases. + """ + fn_names = [ + name for (name, _) in getmembers(src, isfunction) + if name.startswith('test_') + ] + + if phase is None: + phase = fork_name + + print("generating test vectors from tests source: %s" % src.__name__) + for name in fn_names: + tfn = getattr(src, name) + + # strip off the `test_` + case_name = name + if case_name.startswith('test_'): + case_name = case_name[5:] + + yield TestCase( + fork_name=fork_name, + runner_name=runner_name, + handler_name=handler_name, + suite_name='pyspec_tests', + case_name=case_name, + # TODO: with_all_phases and other per-phase tooling, should be replaced with per-fork equivalent. + case_fn=lambda: tfn(generator_mode=True, phase=phase, bls_active=bls_active) + ) + + +def get_provider(create_provider_fn: Callable[[SpecForkName, str, str, ConfigName], TestProvider], + config_name: ConfigName, + fork_name: SpecForkName, + all_mods: Dict[str, Dict[str, str]]) -> Iterable[TestProvider]: + for key, mod_name in all_mods[fork_name].items(): + yield create_provider_fn( + fork_name=fork_name, + handler_name=key, + tests_src_mod_name=mod_name, + config_name=config_name, + ) + + +def get_create_provider_fn( + runner_name: str, config_name: ConfigName, specs: Iterable[Any] +) -> Callable[[SpecForkName, str, str, ConfigName], TestProvider]: + def prepare_fn(configs_path: str) -> str: + config_util.prepare_config(configs_path, config_name) + for spec in specs: + reload(spec) + bls.use_milagro() + return config_name + + def create_provider(fork_name: SpecForkName, handler_name: str, + tests_src_mod_name: str, config_name: ConfigName) -> TestProvider: + def cases_fn() -> Iterable[TestCase]: + tests_src = import_module(tests_src_mod_name) + return generate_from_tests( + runner_name=runner_name, + handler_name=handler_name, + src=tests_src, + fork_name=fork_name, + ) + + return TestProvider(prepare=prepare_fn, make_cases=cases_fn) + return create_provider + + +def run_state_test_generators(runner_name: str, specs: Iterable[Any], all_mods: Dict[str, Dict[str, str]]) -> None: + """ + Generate all available state tests of `TESTGEN_FORKS` forks of `ALL_CONFIGS` configs of the given runner. + """ + for config_name in ALL_CONFIGS: + for fork_name in TESTGEN_FORKS: + if fork_name in all_mods: + gen_runner.run_generator(runner_name, get_provider( + create_provider_fn=get_create_provider_fn(runner_name, config_name, specs), + config_name=config_name, + fork_name=fork_name, + all_mods=all_mods, + )) diff --git a/tests/core/pyspec/eth2spec/test/context.py b/tests/core/pyspec/eth2spec/test/context.py index 4f07a790a..e197124c0 100644 --- a/tests/core/pyspec/eth2spec/test/context.py +++ b/tests/core/pyspec/eth2spec/test/context.py @@ -37,6 +37,10 @@ ALL_PHASES = (PHASE0, PHASE1, LIGHTCLIENT_PATCH) MAINNET = ConfigName('mainnet') MINIMAL = ConfigName('minimal') +ALL_CONFIGS = (MINIMAL, MAINNET) + +# The forks that output to the test vectors. +TESTGEN_FORKS = (PHASE0, LIGHTCLIENT_PATCH) # TODO: currently phases are defined as python modules. # It would be better if they would be more well-defined interfaces for stronger typing. @@ -78,7 +82,7 @@ def _prepare_state(balances_fn: Callable[[Any], Sequence[int]], threshold_fn: Ca # TODO: instead of upgrading a test phase0 genesis state we can also write a phase1 state helper. # Decide based on performance/consistency results later. state = phases[PHASE1].upgrade_to_phase1(state) - elif spec.fork == LIGHTCLIENT_PATCH: # not generalizing this just yet, unclear final spec fork/patch order. + elif spec.fork == LIGHTCLIENT_PATCH: state = phases[LIGHTCLIENT_PATCH].upgrade_to_lightclient_patch(state) return state @@ -336,12 +340,13 @@ def with_phases(phases, other_phases=None): available_phases = set(run_phases) if other_phases is not None: - available_phases += set(other_phases) + available_phases |= set(other_phases) # TODO: test state is dependent on phase0 but is immediately transitioned to phase1. # A new state-creation helper for phase 1 may be in place, and then phase1+ tests can run without phase0 available_phases.add(PHASE0) + # Populate all phases for multi-phase tests phase_dir = {} if PHASE0 in available_phases: phase_dir[PHASE0] = spec_phase0 diff --git a/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py b/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py index 87b4f5ca0..89acb3417 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py +++ b/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py @@ -1,8 +1,16 @@ +from eth2spec.test.context import is_post_lightclient_patch from eth2spec.test.helpers.block_header import sign_block_header from eth2spec.test.helpers.keys import pubkey_to_privkey from eth2spec.test.helpers.state import get_balance +def get_min_slashing_penalty_quotient(spec): + if is_post_lightclient_patch(spec): + return spec.HF1_MIN_SLASHING_PENALTY_QUOTIENT + else: + return spec.MIN_SLASHING_PENALTY_QUOTIENT + + def check_proposer_slashing_effect(spec, pre_state, state, slashed_index): slashed_validator = state.validators[slashed_index] assert slashed_validator.slashed @@ -10,7 +18,7 @@ def check_proposer_slashing_effect(spec, pre_state, state, slashed_index): assert slashed_validator.withdrawable_epoch < spec.FAR_FUTURE_EPOCH proposer_index = spec.get_beacon_proposer_index(state) - slash_penalty = state.validators[slashed_index].effective_balance // spec.MIN_SLASHING_PENALTY_QUOTIENT + slash_penalty = state.validators[slashed_index].effective_balance // get_min_slashing_penalty_quotient(spec) whistleblower_reward = state.validators[slashed_index].effective_balance // spec.WHISTLEBLOWER_REWARD_QUOTIENT if proposer_index != slashed_index: # slashed validator lost initial slash penalty diff --git a/tests/core/pyspec/eth2spec/test/helpers/rewards.py b/tests/core/pyspec/eth2spec/test/helpers/rewards.py index 2499bcffe..19ed3f691 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/rewards.py +++ b/tests/core/pyspec/eth2spec/test/helpers/rewards.py @@ -41,13 +41,13 @@ def run_deltas(spec, state): if is_post_lightclient_patch(spec): def get_source_deltas(state): - return spec.get_flag_deltas(state, spec.TIMELY_SOURCE_FLAG, spec.TIMELY_SOURCE_NUMERATOR) + return spec.get_flag_deltas(state, spec.TIMELY_SOURCE_FLAG_INDEX, spec.TIMELY_SOURCE_FLAG_NUMERATOR) def get_head_deltas(state): - return spec.get_flag_deltas(state, spec.TIMELY_HEAD_FLAG, spec.TIMELY_HEAD_NUMERATOR) + return spec.get_flag_deltas(state, spec.TIMELY_HEAD_FLAG_INDEX, spec.TIMELY_HEAD_FLAG_NUMERATOR) def get_target_deltas(state): - return spec.get_flag_deltas(state, spec.TIMELY_TARGET_FLAG, spec.TIMELY_TARGET_NUMERATOR) + return spec.get_flag_deltas(state, spec.TIMELY_TARGET_FLAG_INDEX, spec.TIMELY_TARGET_FLAG_NUMERATOR) yield from run_attestation_component_deltas( spec, @@ -74,13 +74,13 @@ def run_deltas(spec, state): yield from run_get_inactivity_penalty_deltas(spec, state) -def deltas_name_to_flag(spec, deltas_name): +def deltas_name_to_flag_index(spec, deltas_name): if 'source' in deltas_name: - return spec.TIMELY_SOURCE_FLAG + return spec.TIMELY_SOURCE_FLAG_INDEX elif 'head' in deltas_name: - return spec.TIMELY_HEAD_FLAG + return spec.TIMELY_HEAD_FLAG_INDEX elif 'target' in deltas_name: - return spec.TIMELY_TARGET_FLAG + return spec.TIMELY_TARGET_FLAG_INDEX raise ValueError("Wrong deltas_name %s" % deltas_name) @@ -98,7 +98,7 @@ def run_attestation_component_deltas(spec, state, component_delta_fn, matching_a matching_indices = spec.get_unslashed_attesting_indices(state, matching_attestations) else: matching_indices = spec.get_unslashed_participating_indices( - state, deltas_name_to_flag(spec, deltas_name), spec.get_previous_epoch(state) + state, deltas_name_to_flag_index(spec, deltas_name), spec.get_previous_epoch(state) ) eligible_indices = spec.get_eligible_validator_indices(state) @@ -187,9 +187,9 @@ def run_get_inactivity_penalty_deltas(spec, state): matching_attesting_indices = spec.get_unslashed_attesting_indices(state, matching_attestations) else: matching_attesting_indices = spec.get_unslashed_participating_indices( - state, spec.TIMELY_TARGET_FLAG, spec.get_previous_epoch(state) + state, spec.TIMELY_TARGET_FLAG_INDEX, spec.get_previous_epoch(state) ) - reward_numerator_sum = sum(numerator for (_, numerator) in spec.get_flags_and_numerators()) + reward_numerator_sum = sum(numerator for (_, numerator) in spec.get_flag_indices_and_numerators()) eligible_indices = spec.get_eligible_validator_indices(state) for index in range(len(state.validators)): @@ -205,7 +205,7 @@ def run_get_inactivity_penalty_deltas(spec, state): base_reward = spec.get_base_reward(state, index) base_penalty = cancel_base_rewards_per_epoch * base_reward - spec.get_proposer_reward(state, index) else: - base_penalty = spec.get_base_reward(state, index) * reward_numerator_sum // spec.REWARD_DENOMINATOR + base_penalty = spec.get_base_reward(state, index) * reward_numerator_sum // spec.FLAG_DENOMINATOR if not has_enough_for_reward(spec, state, index): assert penalties[index] == 0 @@ -314,7 +314,7 @@ def run_test_full_but_partial_participation(spec, state, rng=Random(5522)): else: for index in range(len(state.validators)): if rng.choice([True, False]): - state.previous_epoch_participation[index] = spec.ValidatorFlag(0) + state.previous_epoch_participation[index] = spec.ParticipationFlags(0b0000_0000) yield from run_deltas(spec, state) @@ -328,7 +328,7 @@ def run_test_partial(spec, state, fraction_filled): state.previous_epoch_attestations = state.previous_epoch_attestations[:num_attestations] else: for index in range(int(len(state.validators) * fraction_filled)): - state.previous_epoch_participation[index] = spec.ValidatorFlag(0) + state.previous_epoch_participation[index] = spec.ParticipationFlags(0b0000_0000) yield from run_deltas(spec, state) @@ -394,7 +394,7 @@ def run_test_some_very_low_effective_balances_that_did_not_attest(spec, state): else: index = 0 state.validators[index].effective_balance = 1 - state.previous_epoch_participation[index] = spec.ValidatorFlag(0) + state.previous_epoch_participation[index] = spec.ParticipationFlags(0b0000_0000) yield from run_deltas(spec, state) @@ -521,23 +521,24 @@ def run_test_full_random(spec, state, rng=Random(8020)): is_timely_correct_head = rng.randint(0, 2) != 0 flags = state.previous_epoch_participation[index] - def set_flag(f, v): + def set_flag(index, value): nonlocal flags - if v: - flags |= f + flag = spec.ParticipationFlags(2**index) + if value: + flags |= flag else: - flags &= 0xff ^ f + flags &= 0xff ^ flag - set_flag(spec.TIMELY_HEAD_FLAG, is_timely_correct_head) + set_flag(spec.TIMELY_HEAD_FLAG_INDEX, is_timely_correct_head) if is_timely_correct_head: # If timely head, then must be timely target - set_flag(spec.TIMELY_TARGET_FLAG, True) + set_flag(spec.TIMELY_TARGET_FLAG_INDEX, True) # If timely head, then must be timely source - set_flag(spec.TIMELY_SOURCE_FLAG, True) + set_flag(spec.TIMELY_SOURCE_FLAG_INDEX, True) else: # ~50% of remaining have bad target or not timely enough - set_flag(spec.TIMELY_TARGET_FLAG, rng.choice([True, False])) + set_flag(spec.TIMELY_TARGET_FLAG_INDEX, rng.choice([True, False])) # ~50% of remaining have bad source or not timely enough - set_flag(spec.TIMELY_SOURCE_FLAG, rng.choice([True, False])) + set_flag(spec.TIMELY_SOURCE_FLAG_INDEX, rng.choice([True, False])) state.previous_epoch_participation[index] = flags yield from run_deltas(spec, state) diff --git a/tests/core/pyspec/eth2spec/test/lightclient_patch/fork/__init__.py b/tests/core/pyspec/eth2spec/test/lightclient_patch/fork/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/pyspec/eth2spec/test/lightclient_patch/fork/test_fork.py b/tests/core/pyspec/eth2spec/test/lightclient_patch/fork/test_fork.py new file mode 100644 index 000000000..27d181510 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/lightclient_patch/fork/test_fork.py @@ -0,0 +1,113 @@ +from eth2spec.test.context import ( + PHASE0, LIGHTCLIENT_PATCH, + with_phases, + with_custom_state, + spec_test, with_state, + low_balances, misc_balances, large_validator_set, +) +from eth2spec.test.utils import with_meta_tags +from eth2spec.test.helpers.state import ( + next_epoch, + next_epoch_via_block, +) + + +HF1_FORK_TEST_META_TAGS = { + 'fork': 'altair', +} + + +def run_fork_test(post_spec, pre_state): + yield 'pre', pre_state + + post_state = post_spec.upgrade_to_lightclient_patch(pre_state) + + # Stable fields + stable_fields = [ + 'genesis_time', 'genesis_validators_root', 'slot', + # History + 'latest_block_header', 'block_roots', 'state_roots', 'historical_roots', + # Eth1 + 'eth1_data', 'eth1_data_votes', 'eth1_deposit_index', + # Registry + 'validators', 'balances', + # Randomness + 'randao_mixes', + # Slashings + 'slashings', + # Finality + 'justification_bits', 'previous_justified_checkpoint', 'current_justified_checkpoint', 'finalized_checkpoint', + ] + for field in stable_fields: + assert getattr(pre_state, field) == getattr(post_state, field) + + # Modified fields + modified_fields = ['fork'] + for field in modified_fields: + assert getattr(pre_state, field) != getattr(post_state, field) + + assert pre_state.fork.current_version == post_state.fork.previous_version + assert post_state.fork.current_version == post_spec.LIGHTCLIENT_PATCH_FORK_VERSION + assert post_state.fork.epoch == post_spec.get_current_epoch(post_state) + + yield 'post', post_state + + +@with_phases(phases=[PHASE0], other_phases=[LIGHTCLIENT_PATCH]) +@spec_test +@with_state +@with_meta_tags(HF1_FORK_TEST_META_TAGS) +def test_fork_base_state(spec, phases, state): + yield from run_fork_test(phases[LIGHTCLIENT_PATCH], state) + + +@with_phases(phases=[PHASE0], other_phases=[LIGHTCLIENT_PATCH]) +@spec_test +@with_state +@with_meta_tags(HF1_FORK_TEST_META_TAGS) +def test_fork_next_epoch(spec, phases, state): + next_epoch(spec, state) + yield from run_fork_test(phases[LIGHTCLIENT_PATCH], state) + + +@with_phases(phases=[PHASE0], other_phases=[LIGHTCLIENT_PATCH]) +@spec_test +@with_state +@with_meta_tags(HF1_FORK_TEST_META_TAGS) +def test_fork_next_epoch_with_block(spec, phases, state): + next_epoch_via_block(spec, state) + yield from run_fork_test(phases[LIGHTCLIENT_PATCH], state) + + +@with_phases(phases=[PHASE0], other_phases=[LIGHTCLIENT_PATCH]) +@spec_test +@with_state +@with_meta_tags(HF1_FORK_TEST_META_TAGS) +def test_fork_many_next_epoch(spec, phases, state): + for _ in range(3): + next_epoch(spec, state) + yield from run_fork_test(phases[LIGHTCLIENT_PATCH], state) + + +@with_phases(phases=[PHASE0], other_phases=[LIGHTCLIENT_PATCH]) +@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.EJECTION_BALANCE) +@spec_test +@with_meta_tags(HF1_FORK_TEST_META_TAGS) +def test_fork_random_low_balances(spec, phases, state): + yield from run_fork_test(phases[LIGHTCLIENT_PATCH], state) + + +@with_phases(phases=[PHASE0], other_phases=[LIGHTCLIENT_PATCH]) +@with_custom_state(balances_fn=misc_balances, threshold_fn=lambda spec: spec.EJECTION_BALANCE) +@spec_test +@with_meta_tags(HF1_FORK_TEST_META_TAGS) +def test_fork_random_misc_balances(spec, phases, state): + yield from run_fork_test(phases[LIGHTCLIENT_PATCH], state) + + +@with_phases(phases=[PHASE0], other_phases=[LIGHTCLIENT_PATCH]) +@with_custom_state(balances_fn=large_validator_set, threshold_fn=lambda spec: spec.EJECTION_BALANCE) +@spec_test +@with_meta_tags(HF1_FORK_TEST_META_TAGS) +def test_fork_random_large_validator_set(spec, phases, state): + yield from run_fork_test(phases[LIGHTCLIENT_PATCH], state) diff --git a/tests/core/pyspec/eth2spec/test/lightclient_patch/sanity/__init__.py b/tests/core/pyspec/eth2spec/test/lightclient_patch/sanity/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/pyspec/eth2spec/test/phase0/block_processing/test_process_attester_slashing.py b/tests/core/pyspec/eth2spec/test/phase0/block_processing/test_process_attester_slashing.py index 82d490311..21d9363f7 100644 --- a/tests/core/pyspec/eth2spec/test/phase0/block_processing/test_process_attester_slashing.py +++ b/tests/core/pyspec/eth2spec/test/phase0/block_processing/test_process_attester_slashing.py @@ -4,6 +4,7 @@ from eth2spec.test.context import ( from eth2spec.test.helpers.attestations import sign_indexed_attestation from eth2spec.test.helpers.attester_slashings import get_valid_attester_slashing, \ get_indexed_attestation_participants, get_attestation_2_data, get_attestation_1_data +from eth2spec.test.helpers.proposer_slashings import get_min_slashing_penalty_quotient from eth2spec.test.helpers.state import ( get_balance, next_epoch_via_block, @@ -70,7 +71,7 @@ def run_attester_slashing_processing(spec, state, attester_slashing, valid=True) expected_balance = ( pre_proposer_balance + total_proposer_rewards - - pre_slashings[proposer_index] // spec.MIN_SLASHING_PENALTY_QUOTIENT + - pre_slashings[proposer_index] // get_min_slashing_penalty_quotient(spec) ) assert get_balance(state, proposer_index) == expected_balance diff --git a/tests/core/pyspec/eth2spec/test/phase0/epoch_processing/test_process_justification_and_finalization.py b/tests/core/pyspec/eth2spec/test/phase0/epoch_processing/test_process_justification_and_finalization.py index 89783f987..274d67134 100644 --- a/tests/core/pyspec/eth2spec/test/phase0/epoch_processing/test_process_justification_and_finalization.py +++ b/tests/core/pyspec/eth2spec/test/phase0/epoch_processing/test_process_justification_and_finalization.py @@ -78,10 +78,10 @@ def add_mock_attestations(spec, state, epoch, source, target, sufficient_support else: for i, index in enumerate(committee): if aggregation_bits[i]: - epoch_participation[index] |= spec.TIMELY_HEAD_FLAG - epoch_participation[index] |= spec.TIMELY_SOURCE_FLAG + epoch_participation[index] |= spec.ParticipationFlags(2**spec.TIMELY_HEAD_FLAG_INDEX) + epoch_participation[index] |= spec.ParticipationFlags(2**spec.TIMELY_SOURCE_FLAG_INDEX) if not messed_up_target: - epoch_participation[index] |= spec.TIMELY_TARGET_FLAG + epoch_participation[index] |= spec.ParticipationFlags(2**spec.TIMELY_TARGET_FLAG_INDEX) def get_checkpoints(spec, epoch): diff --git a/tests/core/pyspec/eth2spec/test/phase0/epoch_processing/test_process_slashings.py b/tests/core/pyspec/eth2spec/test/phase0/epoch_processing/test_process_slashings.py index 2e09f5c8a..34f1e89c6 100644 --- a/tests/core/pyspec/eth2spec/test/phase0/epoch_processing/test_process_slashings.py +++ b/tests/core/pyspec/eth2spec/test/phase0/epoch_processing/test_process_slashings.py @@ -1,4 +1,4 @@ -from eth2spec.test.context import spec_state_test, with_all_phases +from eth2spec.test.context import spec_state_test, with_all_phases, is_post_lightclient_patch from eth2spec.test.helpers.epoch_processing import ( run_epoch_processing_with, run_epoch_processing_to ) @@ -23,12 +23,19 @@ def slash_validators(spec, state, indices, out_epochs): ] = total_slashed_balance +def get_slashing_multiplier(spec): + if is_post_lightclient_patch(spec): + return spec.HF1_PROPORTIONAL_SLASHING_MULTIPLIER + else: + return spec.PROPORTIONAL_SLASHING_MULTIPLIER + + @with_all_phases @spec_state_test def test_max_penalties(spec, state): # Slashed count to ensure that enough validators are slashed to induce maximum penalties slashed_count = min( - (len(state.validators) // spec.PROPORTIONAL_SLASHING_MULTIPLIER) + 1, + (len(state.validators) // get_slashing_multiplier(spec)) + 1, # Can't slash more than validator count! len(state.validators) ) @@ -40,7 +47,7 @@ def test_max_penalties(spec, state): total_balance = spec.get_total_active_balance(state) total_penalties = sum(state.slashings) - assert total_balance // spec.PROPORTIONAL_SLASHING_MULTIPLIER <= total_penalties + assert total_balance // get_slashing_multiplier(spec) <= total_penalties yield from run_process_slashings(spec, state) @@ -50,7 +57,30 @@ def test_max_penalties(spec, state): @with_all_phases @spec_state_test -def test_small_penalty(spec, state): +def test_low_penalty(spec, state): + # Slashed count is one tenth of validator set + slashed_count = (len(state.validators) // 10) + 1 + out_epoch = spec.get_current_epoch(state) + (spec.EPOCHS_PER_SLASHINGS_VECTOR // 2) + + slashed_indices = list(range(slashed_count)) + slash_validators(spec, state, slashed_indices, [out_epoch] * slashed_count) + + pre_state = state.copy() + + yield from run_process_slashings(spec, state) + + for i in slashed_indices: + assert 0 < state.balances[i] < pre_state.balances[i] + + +@with_all_phases +@spec_state_test +def test_minimal_penalty(spec, state): + # + # When very few slashings, the resulting slashing penalty gets rounded down + # to zero so the result of `process_slashings` is null + # + # Just the bare minimum for this one validator state.balances[0] = state.validators[0].effective_balance = spec.EJECTION_BALANCE # All the other validators get the maximum. @@ -74,11 +104,13 @@ def test_small_penalty(spec, state): expected_penalty = ( state.validators[0].effective_balance // spec.EFFECTIVE_BALANCE_INCREMENT - * (3 * total_penalties) + * (get_slashing_multiplier(spec) * total_penalties) // total_balance * spec.EFFECTIVE_BALANCE_INCREMENT ) - assert state.balances[0] == pre_slash_balances[0] - expected_penalty + + assert expected_penalty == 0 + assert state.balances[0] == pre_slash_balances[0] @with_all_phases @@ -96,7 +128,7 @@ def test_scaled_penalties(spec, state): state.slashings[5] = base + (incr * 6) state.slashings[spec.EPOCHS_PER_SLASHINGS_VECTOR - 1] = base + (incr * 7) - slashed_count = len(state.validators) // (spec.PROPORTIONAL_SLASHING_MULTIPLIER + 1) + slashed_count = len(state.validators) // (get_slashing_multiplier(spec) + 1) assert slashed_count > 10 @@ -134,7 +166,7 @@ def test_scaled_penalties(spec, state): v = state.validators[i] expected_penalty = ( v.effective_balance // spec.EFFECTIVE_BALANCE_INCREMENT - * (spec.PROPORTIONAL_SLASHING_MULTIPLIER * total_penalties) + * (get_slashing_multiplier(spec) * total_penalties) // (total_balance) * spec.EFFECTIVE_BALANCE_INCREMENT ) diff --git a/tests/core/pyspec/eth2spec/test/phase0/sanity/test_blocks.py b/tests/core/pyspec/eth2spec/test/phase0/sanity/test_blocks.py index 1834b290f..98ffbd590 100644 --- a/tests/core/pyspec/eth2spec/test/phase0/sanity/test_blocks.py +++ b/tests/core/pyspec/eth2spec/test/phase0/sanity/test_blocks.py @@ -806,7 +806,7 @@ def test_attestation(spec, state): assert spec.hash_tree_root(state.previous_epoch_attestations) == pre_current_attestations_root else: for index in range(len(state.validators)): - assert state.current_epoch_participation[index] == 0 + assert state.current_epoch_participation[index] == spec.ParticipationFlags(0b0000_0000) assert spec.hash_tree_root(state.previous_epoch_participation) == pre_current_epoch_participation_root diff --git a/tests/core/pyspec/eth2spec/test/utils.py b/tests/core/pyspec/eth2spec/test/utils.py index 12ff68443..bad6c867b 100644 --- a/tests/core/pyspec/eth2spec/test/utils.py +++ b/tests/core/pyspec/eth2spec/test/utils.py @@ -1,5 +1,4 @@ from typing import Dict, Any -from eth2spec.debug.encode import encode from eth2spec.utils.ssz.ssz_typing import View from eth2spec.utils.ssz.ssz_impl import serialize @@ -39,24 +38,20 @@ def vector_test(description: str = None): if value is None: continue if isinstance(value, View): - yield key, 'data', encode(value) yield key, 'ssz', serialize(value) elif isinstance(value, bytes): - yield key, 'data', encode(value) yield key, 'ssz', value elif isinstance(value, list) and all([isinstance(el, (View, bytes)) for el in value]): for i, el in enumerate(value): if isinstance(el, View): - yield f'{key}_{i}', 'data', encode(el) yield f'{key}_{i}', 'ssz', serialize(el) elif isinstance(el, bytes): - yield f'{key}_{i}', 'data', encode(el) yield f'{key}_{i}', 'ssz', el yield f'{key}_count', 'meta', len(value) else: # Not a ssz value. # The data will now just be yielded as any python data, - # something that should be encodeable by the generator runner. + # something that should be encodable by the generator runner. yield key, 'data', value # check generator mode, may be None/else. diff --git a/tests/formats/README.md b/tests/formats/README.md index 36a5ec21b..7808538ad 100644 --- a/tests/formats/README.md +++ b/tests/formats/README.md @@ -132,21 +132,25 @@ Cases are split up too. This enables diffing of parts of the test case, tracking ### `` -E.g. `pre.yaml`, `deposit.yaml`, `post.yaml`. - -Diffing a `pre.yaml` and `post.yaml` provides all the information for testing, good for readability of the change. -Then the difference between pre and post can be compared to anything that changes the pre state, e.g. `deposit.yaml` - These files allow for custom formats for some parts of the test. E.g. something encoded in SSZ. +Or to avoid large files, the SSZ can be compressed with Snappy. +E.g. `pre.ssz_snappy`, `deposit.ssz_snappy`, `post.ssz_snappy`. -Some yaml files have copies, but formatted as raw SSZ bytes: `pre.ssz`, `deposit.ssz`, `post.ssz`. -The yaml files are intended to be deprecated, and clients should shift to ssz inputs for efficiency. -Deprecation will start once a viewer of SSZ test-cases is in place, to maintain a standard of readable test cases. -This also means that some clients can drop legacy YAML -> JSON/other -> SSZ work-arounds. -(These were implemented to support the uint64 YAML, hex strings, etc. Things that were not idiomatic to their language.) +Diffing a `pre.ssz_snappy` and `post.ssz_snappy` provides all the information for testing, when decompressed and decoded. +Then the difference between pre and post can be compared to anything that changes the pre state, e.g. `deposit.ssz_snappy` + +YAML is generally used for test metadata, and for tests that do not use SSZ: e.g. shuffling and BLS tests. +In this case, there is no point in adding special SSZ types. And the size and efficiency of YAML is acceptable. + +#### Common output formats + +Between all types of tests, a few formats are common: + +- **`.yaml`**: A YAML file containing structured data to describe settings or test contents. +- **`.ssz`**: A file containing raw SSZ-encoded data. Previously widely used in tests, but replaced with compressed variant. +- **`.ssz_snappy`**: Like `.ssz`, but compressed with Snappy block compression. + Snappy block compression is already applied to SSZ in Eth2 gossip, available in client implementations, and thus chosen as compression method. -Yaml will not be deprecated for tests that do not use SSZ: e.g. shuffling and BLS tests. -In this case, there is no work around for loading necessary anyway, and the size and efficiency of yaml is acceptable. #### Special output parts diff --git a/tests/formats/epoch_processing/README.md b/tests/formats/epoch_processing/README.md index 1f88b4832..6469682dc 100644 --- a/tests/formats/epoch_processing/README.md +++ b/tests/formats/epoch_processing/README.md @@ -15,18 +15,13 @@ description: string -- Optional description of test case, purely for debuggin bls_setting: int -- see general test-format spec. ``` -### `pre.yaml` +### `pre.ssz_snappy` -A YAML-encoded `BeaconState`, the state before running the epoch sub-transition. +A SSZ-snappy encoded `BeaconState`, the state before running the epoch sub-transition. -Also available as `pre.ssz`. +### `post.ssz_snappy` - -### `post.yaml` - -A YAML-encoded `BeaconState`, the state after applying the epoch sub-transition. - -Also available as `post.ssz`. +A SSZ-snappy encoded `BeaconState`, the state after applying the epoch sub-transition. ## Condition diff --git a/tests/formats/finality/README.md b/tests/formats/finality/README.md index da9108a6a..2d279b441 100644 --- a/tests/formats/finality/README.md +++ b/tests/formats/finality/README.md @@ -14,11 +14,11 @@ bls_setting: int -- see general test-format spec. blocks_count: int -- the number of blocks processed in this test. ``` -### `pre.yaml` +### `pre.ssz_snappy` -A YAML-encoded `BeaconState`, the state before running the block transitions. +A SSZ-snappy encoded `BeaconState`, the state before running the block transitions. -Also available as `pre.ssz`. +Also available as `pre.ssz_snappy`. ### `blocks_.yaml` @@ -28,13 +28,11 @@ A series of files, with `` in range `[0, blocks_count)`. Blocks need to b Each file is a YAML-encoded `SignedBeaconBlock`. -Each block is also available as `blocks_.ssz` +Each block is also available as `blocks_.ssz_snappy` -### `post.yaml` +### `post.ssz_snappy` -A YAML-encoded `BeaconState`, the state after applying the block transitions. - -Also available as `post.ssz`. +A SSZ-snappy encoded `BeaconState`, the state after applying the block transitions. ## Condition diff --git a/tests/formats/forks/README.md b/tests/formats/forks/README.md new file mode 100644 index 000000000..cbafd4e2a --- /dev/null +++ b/tests/formats/forks/README.md @@ -0,0 +1,43 @@ +# Forks + +The aim of the fork tests is to ensure that a pre-fork state can be transformed + into a valid post-fork state, utilizing the `upgrade` function found in the relevant `fork.md` spec. + +There is only one handler: `fork`. Each fork (after genesis) is handled with the same format, + and the particular fork boundary being tested is noted in `meta.yaml`. + +## Test case format + +### `meta.yaml` + +A yaml file to signify which fork boundary is being tested. + +```yaml +fork: str -- Fork being transitioned to +``` + +#### Fork strings + +Key of valid `fork` strings that might be found in `meta.yaml` + +| String ID | Pre-fork | Post-fork | Function | +| - | - | - | - | +| `altair` | Phase 0 | Altair | `upgrade_to_lightclient_patch` | + +### `pre.ssz_snappy` + +A SSZ-snappy encoded `BeaconState`, the state before running the fork transition. + +### `post.ssz_snappy` + +A SSZ-snappy encoded `BeaconState`, the state after applying the fork transition. + +*Note*: This type is the `BeaconState` after the fork and is *not* the same type as `pre`. + +## Processing + +To process this test, pass `pre` into the upgrade function defined by the `fork` in `meta.yaml`. + +## Condition + +The resulting state should match the expected `post`. diff --git a/tests/formats/genesis/initialization.md b/tests/formats/genesis/initialization.md index 428abb5bd..2e5452821 100644 --- a/tests/formats/genesis/initialization.md +++ b/tests/formats/genesis/initialization.md @@ -4,11 +4,9 @@ Tests the initialization of a genesis state based on Eth1 data. ## Test case format -### `eth1_block_hash.yaml` +### `eth1_block_hash.ssz_snappy` -A `Bytes32` hex encoded, with prefix 0x. The root of the Eth1 block. - -Also available as `eth1_block_hash.ssz`. +A SSZ-snappy encoded root of the Eth1 block. ### `eth1_timestamp.yaml` @@ -22,18 +20,15 @@ A yaml file to help read the deposit count: deposits_count: int -- Amount of deposits. ``` -### `deposits_.yaml` +### `deposits_.ssz_snappy` A series of files, with `` in range `[0, deposits_count)`. Deposits need to be processed in order. -Each file is a YAML-encoded `Deposit` object. +Each file is a SSZ-snappy encoded `Deposit` object. -Each deposit is also available as `deposits_.ssz`. +### `state.ssz_snappy` -### `state.yaml` +The expected genesis state. A SSZ-snappy encoded `BeaconState` object. -The expected genesis state. A YAML-encoded `BeaconState` object. - -Also available as `state.ssz`. ## Processing diff --git a/tests/formats/genesis/validity.md b/tests/formats/genesis/validity.md index 38f2b1b1f..1b3f79879 100644 --- a/tests/formats/genesis/validity.md +++ b/tests/formats/genesis/validity.md @@ -4,11 +4,10 @@ Tests if a genesis state is valid, i.e. if it counts as trigger to launch. ## Test case format -### `genesis.yaml` +### `genesis.ssz_snappy` -A `BeaconState`, the state to validate as genesis candidate. +A SSZ-snappy encoded `BeaconState`, the state to validate as genesis candidate. -Also available as `genesis.ssz`. ### `is_valid.yaml` diff --git a/tests/formats/operations/README.md b/tests/formats/operations/README.md index bb4636ec0..ca77ab966 100644 --- a/tests/formats/operations/README.md +++ b/tests/formats/operations/README.md @@ -12,23 +12,17 @@ description: string -- Optional description of test case, purely for debuggin bls_setting: int -- see general test-format spec. ``` -### `pre.yaml` +### `pre.ssz_snappy` -A YAML-encoded `BeaconState`, the state before applying the operation. +A SSZ-snappy encoded `BeaconState`, the state before applying the operation. -Also available as `pre.ssz`. +### `.ssz_snappy` -### `.yaml` +A SSZ-snappy encoded operation object, e.g. a `ProposerSlashing`, or `Deposit`. -A YAML-encoded operation object, e.g. a `ProposerSlashing`, or `Deposit`. +### `post.ssz_snappy` -Also available as `.ssz`. - -### `post.yaml` - -A YAML-encoded `BeaconState`, the state after applying the operation. No value if operation processing is aborted. - -Also available as `post.ssz`. +A SSZ-snappy encoded `BeaconState`, the state after applying the operation. No value if operation processing is aborted. ## Condition diff --git a/tests/formats/rewards/README.md b/tests/formats/rewards/README.md index b229d9f98..aee23c3e9 100644 --- a/tests/formats/rewards/README.md +++ b/tests/formats/rewards/README.md @@ -23,41 +23,29 @@ description: string -- Optional description of test case, purely for debuggin _Note_: No signature verification happens within rewards sub-functions. These tests can safely be run with or without BLS enabled. -### `pre.yaml` +### `pre.ssz_snappy` -A YAML-encoded `BeaconState`, the state before running the rewards sub-function. +A SSZ-snappy encoded `BeaconState`, the state before running the rewards sub-function. -Also available as `pre.ssz`. +### `source_deltas.ssz_snappy` -### `source_deltas.yaml` +A SSZ-snappy encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_source_deltas` function -A YAML-encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_source_deltas` function +### `target_deltas.ssz_snappy` -Also available as `source_deltas.ssz`. +A SSZ-snappy encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_target_deltas` function -### `target_deltas.yaml` +### `head_deltas.ssz_snappy` -A YAML-encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_target_deltas` function +A SSZ-snappy encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_head_deltas` function -Also available as `target_deltas.ssz`. +### `inclusion_delay_deltas.ssz_snappy` -### `head_deltas.yaml` +A SSZ-snappy encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_inclusion_delay_deltas` function -A YAML-encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_head_deltas` function +### `inactivity_penalty_deltas.ssz_snappy` -Also available as `head_deltas.ssz`. - -### `inclusion_delay_deltas.yaml` - -A YAML-encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_inclusion_delay_deltas` function - -Also available as `inclusion_delay_deltas.ssz`. - -### `inactivity_penalty_deltas.yaml` - -A YAML-encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_inactivity_penalty_deltas` function - -Also available as `inactivity_penalty_deltas.ssz`. +A SSZ-snappy encoded `Deltas` representing the rewards and penalties returned by the rewards the `get_inactivity_penalty_deltas` function ## Condition diff --git a/tests/formats/sanity/blocks.md b/tests/formats/sanity/blocks.md index 44b37ed5e..a8b38ccae 100644 --- a/tests/formats/sanity/blocks.md +++ b/tests/formats/sanity/blocks.md @@ -14,27 +14,21 @@ blocks_count: int -- the number of blocks processed in this test. ``` -### `pre.yaml` +### `pre.ssz_snappy` -A YAML-encoded `BeaconState`, the state before running the block transitions. - -Also available as `pre.ssz`. +A SSZ-snappy encoded `BeaconState`, the state before running the block transitions. -### `blocks_.yaml` +### `blocks_.ssz_snappy` A series of files, with `` in range `[0, blocks_count)`. Blocks need to be processed in order, following the main transition function (i.e. process slot and epoch transitions in between blocks as normal) -Each file is a YAML-encoded `SignedBeaconBlock`. +Each file is a SSZ-snappy encoded `SignedBeaconBlock`. -Each block is also available as `blocks_.ssz` +### `post.ssz_snappy` -### `post.yaml` - -A YAML-encoded `BeaconState`, the state after applying the block transitions. - -Also available as `post.ssz`. +A SSZ-snappy encoded `BeaconState`, the state after applying the block transitions. ## Condition diff --git a/tests/formats/sanity/slots.md b/tests/formats/sanity/slots.md index 353287ee2..72a24da51 100644 --- a/tests/formats/sanity/slots.md +++ b/tests/formats/sanity/slots.md @@ -16,7 +16,7 @@ bls_setting: int -- see general test-format spec. A YAML-encoded `BeaconState`, the state before running the transitions. -Also available as `pre.ssz`. +Also available as `pre.ssz_snappy`. ### `slots.yaml` @@ -27,7 +27,7 @@ An integer. The amount of slots to process (i.e. the difference in slots between A YAML-encoded `BeaconState`, the state after applying the transitions. -Also available as `post.ssz`. +Also available as `post.ssz_snappy`. ### Processing diff --git a/tests/formats/ssz_generic/README.md b/tests/formats/ssz_generic/README.md index 68bdbc15f..85a507985 100644 --- a/tests/formats/ssz_generic/README.md +++ b/tests/formats/ssz_generic/README.md @@ -33,7 +33,7 @@ Each of the handlers encodes the SSZ type declaration in the file-name. See [Typ ### `valid` -Valid has 3 parts: `meta.yaml`, `serialized.ssz`, `value.yaml` +Valid has 3 parts: `meta.yaml`, `serialized.ssz_snappy`, `value.yaml` ### `meta.yaml` @@ -46,9 +46,9 @@ root: Bytes32 -- Hash-tree-root of the object The `Bytes32` is encoded as a string, hexadecimal encoding, prefixed with `0x`. -### `serialized.ssz` +### `serialized.ssz_snappy` -The serialized form of the object, as raw SSZ bytes. +The serialized form of the object, as snappy-compressed SSZ bytes. ### `value.yaml` @@ -64,7 +64,7 @@ The conditions are the same for each type: ## `invalid` -Test cases in the `invalid` suite only include the `serialized.ssz` +Test cases in the `invalid` suite only include the `serialized.ssz_snappy` #### Condition diff --git a/tests/formats/ssz_static/core.md b/tests/formats/ssz_static/core.md index d6f99a32b..09ff04e20 100644 --- a/tests/formats/ssz_static/core.md +++ b/tests/formats/ssz_static/core.md @@ -18,7 +18,7 @@ One can iterate over the handlers, and select the type based on the handler name Suites are then the same format, but each specialized in one randomization mode. Some randomization modes may only produce a single test case (e.g. the all-zeroes case). -The output parts are: `roots.yaml`, `serialized.ssz`, `value.yaml` +The output parts are: `roots.yaml`, `serialized.ssz_snappy`, `value.yaml` ### `roots.yaml` @@ -26,13 +26,13 @@ The output parts are: `roots.yaml`, `serialized.ssz`, `value.yaml` root: bytes32 -- string, hash-tree-root of the value, hex encoded, with prefix 0x ``` -### `serialized.ssz` +### `serialized.ssz_snappy` -The raw encoded bytes. +The SSZ-snappy encoded bytes. ### `value.yaml` -The same value as `serialized.ssz`, represented as YAML. +The same value as `serialized.ssz_snappy`, represented as YAML. ## Condition diff --git a/tests/generators/README.md b/tests/generators/README.md index 077a8443c..6ccf9f118 100644 --- a/tests/generators/README.md +++ b/tests/generators/README.md @@ -36,7 +36,7 @@ Prerequisites: ### Cleaning -This removes the existing virtual environments (`/test_generators//venv`) and generated tests (`/yaml_tests/`). +This removes the existing virtual environments (`/tests/generators//venv`) and generated tests (`../eth2.0-spec-tests/tests`). ```bash make clean @@ -47,7 +47,7 @@ make clean This runs all of the generators. ```bash -make -j 4 gen_yaml_tests +make -j 4 generate_tests ``` The `-j N` flag makes the generators run in parallel, with `N` being the amount of cores. @@ -55,10 +55,10 @@ The `-j N` flag makes the generators run in parallel, with `N` being the amount ### Running a single generator -The makefile auto-detects generators in the `test_generators` directory and provides a tests-gen target for each generator. See example: +The makefile auto-detects generators in the `tests/generators` directory and provides a tests-gen target (gen_) for each generator. See example: ```bash -make ./eth2.0-spec-tests/tests/shuffling/ +make gen_ssz_static ``` ## Developing a generator @@ -78,9 +78,8 @@ It's recommended to extend the base-generator. Create a `requirements.txt` in the root of your generator directory: ``` -../../core/gen_helpers -../../core/config_helpers -../../core/pyspec +pytest>=4.4 +../../../[generator] ``` The config helper and pyspec is optional, but preferred. We encourage generators to derive tests from the spec itself in order to prevent code duplication and outdated tests. @@ -103,7 +102,7 @@ Write a `main.py` file. The shuffling test generator is a good minimal starting ```python from eth2spec.phase0 import spec as spec from eth_utils import to_tuple -from gen_base import gen_runner, gen_typing +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing from preset_loader import loader from typing import Iterable @@ -163,35 +162,40 @@ To extend this, one could decide to parametrize the `shuffling_test_cases` funct Another example, to generate tests from pytests: ```python -def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider: +from eth2spec.phase0 import spec as spec_phase0 +from eth2spec.lightclient_patch import spec as spec_lightclient_patch +from eth2spec.phase1 import spec as spec_phase1 +from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH - def prepare_fn(configs_path: str) -> str: - presets = loader.load_presets(configs_path, config_name) - spec_phase0.apply_constants_preset(presets) - spec_phase1.apply_constants_preset(presets) - return config_name +from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators - def cases_fn() -> Iterable[gen_typing.TestCase]: - return generate_from_tests( - runner_name='epoch_processing', - handler_name=handler_name, - src=tests_src, - fork_name='phase0' - ) - return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) +specs = (spec_phase0, spec_lightclient_patch, spec_phase1) if __name__ == "__main__": - gen_runner.run_generator("epoch_processing", [ - create_provider('justification_and_finalization', test_process_justification_and_finalization, 'minimal'), - ... - ]) + phase_0_mods = {key: 'eth2spec.test.phase0.sanity.test_' + key for key in [ + 'blocks', + 'slots', + ]} + lightclient_patch_mods = {**{key: 'eth2spec.test.lightclient_patch.sanity.test_' + key for key in [ + 'blocks', + ]}, **phase_0_mods} # also run the previous phase 0 tests + phase_1_mods = {**{key: 'eth2spec.test.phase1.sanity.test_' + key for key in [ + 'blocks', # more phase 1 specific block tests + 'shard_blocks', + ]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec) + all_mods = { + PHASE0: phase_0_mods, + LIGHTCLIENT_PATCH: lightclient_patch_mods, + PHASE1: phase_1_mods, + } + + run_state_test_generators(runner_name="sanity", specs=specs, all_mods=all_mods) ``` -Here multiple phases load the configuration, and the stream of test cases is derived from a pytest file using the `generate_from_tests` utility. - +Here multiple phases load the configuration, and the stream of test cases is derived from a pytest file using the `eth2spec.gen_helpers.gen_from_tests.gen.run_state_test_generators` utility. Note that this helper generates all available tests of `TESTGEN_FORKS` forks of `ALL_CONFIGS` configs of the given runner. Recommendations: - You can have more than just one test provider. @@ -200,14 +204,13 @@ Recommendations: - Use config `minimal` for performance and simplicity, but also implement a suite with the `mainnet` config where necessary. - You may be able to write your test case provider in a way where it does not make assumptions on constants. If so, you can generate test cases with different configurations for the same scenario (see example). -- See [`tests/core/gen_helpers/README.md`](../core/gen_helpers/README.md) for command line options for generators. - +- See [`tests/core/gen_helpers/README.md`](../core/pyspec/eth2spec/gen_helpers/README.md) for command line options for generators. ## How to add a new test generator To add a new test generator that builds `New Tests`: -1. Create a new directory `new_tests` within the `test_generators` directory. +1. Create a new directory `new_tests` within the `tests/generators` directory. Note that `new_tests` is also the name of the directory in which the tests will appear in the tests repository later. 2. Your generator is assumed to have a `requirements.txt` file, with any dependencies it may need. Leave it empty if your generator has none. @@ -216,8 +219,8 @@ To add a new test generator that builds `New Tests`: 4. Your generator is called with `-o some/file/path/for_testing/can/be_anything -c some/other/path/to_configs/`. The base generator helps you handle this; you only have to define test case providers. 5. Finally, add any linting or testing commands to the - [circleci config file](../.circleci/config.yml) if desired to increase code quality. - Or add it to the [`Makefile`](../Makefile), if it can be run locally. + [circleci config file](../../.circleci/config.yml) if desired to increase code quality. + Or add it to the [`Makefile`](../../Makefile), if it can be run locally. *Note*: You do not have to change the makefile. However, if necessary (e.g. not using Python, or mixing in other languages), submit an issue, and it can be a special case. diff --git a/tests/generators/bls/main.py b/tests/generators/bls/main.py index 6552b8654..fecc2df7d 100644 --- a/tests/generators/bls/main.py +++ b/tests/generators/bls/main.py @@ -13,7 +13,7 @@ import milagro_bls_binding as milagro_bls from eth2spec.utils import bls from eth2spec.test.context import PHASE0 -from gen_base import gen_runner, gen_typing +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing def to_bytes(i): diff --git a/tests/generators/bls/requirements.txt b/tests/generators/bls/requirements.txt index 5f830773a..182248686 100644 --- a/tests/generators/bls/requirements.txt +++ b/tests/generators/bls/requirements.txt @@ -1,4 +1,2 @@ -py_ecc==5.1.0 -eth-utils==1.6.0 -../../core/gen_helpers -../../../ +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/epoch_processing/main.py b/tests/generators/epoch_processing/main.py index ea7639605..50a1e2b57 100644 --- a/tests/generators/epoch_processing/main.py +++ b/tests/generators/epoch_processing/main.py @@ -1,34 +1,11 @@ -from typing import Iterable - -from gen_base import gen_runner, gen_typing -from gen_from_tests.gen import generate_from_tests -from importlib import reload, import_module -from eth2spec.config import config_util +from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators from eth2spec.phase0 import spec as spec_phase0 +from eth2spec.lightclient_patch import spec as spec_lightclient_patch from eth2spec.phase1 import spec as spec_phase1 -from eth2spec.test.context import PHASE0, PHASE1 -from eth2spec.utils import bls +from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH -def create_provider(fork_name: str, handler_name: str, - tests_src_mod_name: str, config_name: str) -> gen_typing.TestProvider: - def prepare_fn(configs_path: str) -> str: - config_util.prepare_config(configs_path, config_name) - reload(spec_phase0) - reload(spec_phase1) - bls.use_milagro() - return config_name - - def cases_fn() -> Iterable[gen_typing.TestCase]: - tests_src = import_module(tests_src_mod_name) - return generate_from_tests( - runner_name='epoch_processing', - handler_name=handler_name, - src=tests_src, - fork_name=fork_name, - ) - - return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) +specs = (spec_phase0, spec_lightclient_patch, spec_phase1) if __name__ == "__main__": @@ -44,21 +21,22 @@ if __name__ == "__main__": 'historical_roots_update', 'participation_record_updates', ]} + lightclient_patch_mods = { + **{key: 'eth2spec.test.lightclient_patch.epoch_processing.test_process_' + key for key in [ + 'sync_committee_updates', + ]}, + **phase_0_mods, + } # also run the previous phase 0 tests phase_1_mods = {**{key: 'eth2spec.test.phase1.epoch_processing.test_process_' + key for key in [ 'reveal_deadlines', 'challenge_deadlines', 'custody_final_updates', ]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec) - gen_runner.run_generator(f"epoch_processing", [ - create_provider(PHASE0, key, mod_name, 'minimal') for key, mod_name in phase_0_mods.items() - ]) - gen_runner.run_generator(f"epoch_processing", [ - create_provider(PHASE0, key, mod_name, 'mainnet') for key, mod_name in phase_0_mods.items() - ]) - gen_runner.run_generator(f"epoch_processing", [ - create_provider(PHASE1, key, mod_name, 'minimal') for key, mod_name in phase_1_mods.items() - ]) - gen_runner.run_generator(f"epoch_processing", [ - create_provider(PHASE1, key, mod_name, 'mainnet') for key, mod_name in phase_1_mods.items() - ]) + all_mods = { + PHASE0: phase_0_mods, + LIGHTCLIENT_PATCH: lightclient_patch_mods, + PHASE1: phase_1_mods, + } + + run_state_test_generators(runner_name="epoch_processing", specs=specs, all_mods=all_mods) diff --git a/tests/generators/epoch_processing/requirements.txt b/tests/generators/epoch_processing/requirements.txt index b82314298..182248686 100644 --- a/tests/generators/epoch_processing/requirements.txt +++ b/tests/generators/epoch_processing/requirements.txt @@ -1,2 +1,2 @@ -../../core/gen_helpers -../../../ \ No newline at end of file +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/finality/main.py b/tests/generators/finality/main.py index ef2d8293f..8b961f9f4 100644 --- a/tests/generators/finality/main.py +++ b/tests/generators/finality/main.py @@ -1,43 +1,23 @@ -from typing import Iterable -from importlib import reload - -from gen_base import gen_runner, gen_typing -from gen_from_tests.gen import generate_from_tests - -from eth2spec.test.context import PHASE0, PHASE1 -from eth2spec.test.phase0.finality import test_finality -from eth2spec.config import config_util +from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators from eth2spec.phase0 import spec as spec_phase0 +from eth2spec.lightclient_patch import spec as spec_lightclient_patch from eth2spec.phase1 import spec as spec_phase1 -from eth2spec.utils import bls +from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH -def create_provider(fork_name: str, handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider: - - def prepare_fn(configs_path: str) -> str: - config_util.prepare_config(configs_path, config_name) - reload(spec_phase0) - reload(spec_phase1) - bls.use_milagro() - return config_name - - def cases_fn() -> Iterable[gen_typing.TestCase]: - return generate_from_tests( - runner_name='finality', - handler_name=handler_name, - src=tests_src, - fork_name=fork_name, - ) - - return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) +specs = (spec_phase0, spec_lightclient_patch, spec_phase1) if __name__ == "__main__": - # No additional phase 1 specific rewards tests, yet. - key = 'finality' - gen_runner.run_generator("finality", [ - create_provider(PHASE0, 'finality', test_finality, 'minimal'), - create_provider(PHASE0, 'finality', test_finality, 'mainnet'), - create_provider(PHASE1, 'finality', test_finality, 'minimal'), - create_provider(PHASE1, 'finality', test_finality, 'mainnet'), - ]) + phase_0_mods = {'finality': 'eth2spec.test.phase0.finality.test_finality'} + # No additional lightclient_patch or phase 1 specific finality tests, yet. + lightclient_patch_mods = phase_0_mods + phase_1_mods = phase_0_mods + + all_mods = { + PHASE0: phase_0_mods, + LIGHTCLIENT_PATCH: lightclient_patch_mods, + PHASE1: phase_1_mods, + } + + run_state_test_generators(runner_name="finality", specs=specs, all_mods=all_mods) diff --git a/tests/generators/finality/requirements.txt b/tests/generators/finality/requirements.txt index b82314298..182248686 100644 --- a/tests/generators/finality/requirements.txt +++ b/tests/generators/finality/requirements.txt @@ -1,2 +1,2 @@ -../../core/gen_helpers -../../../ \ No newline at end of file +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/forks/main.py b/tests/generators/forks/main.py new file mode 100644 index 000000000..190d4620c --- /dev/null +++ b/tests/generators/forks/main.py @@ -0,0 +1,38 @@ +from importlib import reload +from typing import Iterable + +from eth2spec.test.context import PHASE0, LIGHTCLIENT_PATCH, MINIMAL, MAINNET +from eth2spec.config import config_util +from eth2spec.test.lightclient_patch.fork import test_fork as test_altair_forks +from eth2spec.phase0 import spec as spec_phase0 +from eth2spec.lightclient_patch import spec as spec_lightclient_patch + +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing +from eth2spec.gen_helpers.gen_from_tests.gen import generate_from_tests + + +def create_provider(tests_src, config_name: str, phase: str, fork_name: str) -> gen_typing.TestProvider: + + def prepare_fn(configs_path: str) -> str: + config_util.prepare_config(configs_path, config_name) + reload(spec_phase0) + reload(spec_lightclient_patch) + return config_name + + def cases_fn() -> Iterable[gen_typing.TestCase]: + return generate_from_tests( + runner_name='fork', + handler_name='fork', + src=tests_src, + fork_name=fork_name, + phase=phase, + ) + + return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) + + +if __name__ == "__main__": + gen_runner.run_generator("forks", [ + create_provider(test_altair_forks, MINIMAL, PHASE0, LIGHTCLIENT_PATCH), + create_provider(test_altair_forks, MAINNET, PHASE0, LIGHTCLIENT_PATCH), + ]) diff --git a/tests/generators/forks/requirements.txt b/tests/generators/forks/requirements.txt new file mode 100644 index 000000000..735f863fa --- /dev/null +++ b/tests/generators/forks/requirements.txt @@ -0,0 +1,2 @@ +pytest>=4.4 +../../../[generator] \ No newline at end of file diff --git a/tests/generators/genesis/main.py b/tests/generators/genesis/main.py index ce055b44a..854af6572 100644 --- a/tests/generators/genesis/main.py +++ b/tests/generators/genesis/main.py @@ -3,8 +3,8 @@ from typing import Iterable from eth2spec.test.context import PHASE0 from eth2spec.test.phase0.genesis import test_initialization, test_validity -from gen_base import gen_runner, gen_typing -from gen_from_tests.gen import generate_from_tests +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing +from eth2spec.gen_helpers.gen_from_tests.gen import generate_from_tests from eth2spec.phase0 import spec as spec from importlib import reload from eth2spec.config import config_util diff --git a/tests/generators/genesis/requirements.txt b/tests/generators/genesis/requirements.txt index b82314298..182248686 100644 --- a/tests/generators/genesis/requirements.txt +++ b/tests/generators/genesis/requirements.txt @@ -1,2 +1,2 @@ -../../core/gen_helpers -../../../ \ No newline at end of file +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 1acf45e47..00a58288f 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -1,34 +1,11 @@ -from typing import Iterable - -from gen_base import gen_runner, gen_typing -from gen_from_tests.gen import generate_from_tests -from importlib import reload, import_module -from eth2spec.config import config_util +from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators from eth2spec.phase0 import spec as spec_phase0 +from eth2spec.lightclient_patch import spec as spec_lightclient_patch from eth2spec.phase1 import spec as spec_phase1 -from eth2spec.test.context import PHASE0, PHASE1 -from eth2spec.utils import bls +from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH -def create_provider(fork_name: str, handler_name: str, - tests_src_mod_name: str, config_name: str) -> gen_typing.TestProvider: - def prepare_fn(configs_path: str) -> str: - config_util.prepare_config(configs_path, config_name) - reload(spec_phase0) - reload(spec_phase1) - bls.use_milagro() - return config_name - - def cases_fn() -> Iterable[gen_typing.TestCase]: - tests_src = import_module(tests_src_mod_name) - return generate_from_tests( - runner_name='operations', - handler_name=handler_name, - src=tests_src, - fork_name=fork_name, - ) - - return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) +specs = (spec_phase0, spec_lightclient_patch, spec_phase1) if __name__ == "__main__": @@ -40,6 +17,12 @@ if __name__ == "__main__": 'proposer_slashing', 'voluntary_exit', ]} + lightclient_patch_mods = { + **{key: 'eth2spec.test.lightclient_patch.block_processing.test_process_' + key for key in [ + 'sync_committee', + ]}, + **phase_0_mods, + } # also run the previous phase 0 tests phase_1_mods = {**{key: 'eth2spec.test.phase1.block_processing.test_process_' + key for key in [ 'attestation', 'chunk_challenge', @@ -49,15 +32,10 @@ if __name__ == "__main__": 'shard_transition', ]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec) - gen_runner.run_generator(f"operations", [ - create_provider(PHASE0, key, mod_name, 'minimal') for key, mod_name in phase_0_mods.items() - ]) - gen_runner.run_generator(f"operations", [ - create_provider(PHASE0, key, mod_name, 'mainnet') for key, mod_name in phase_0_mods.items() - ]) - gen_runner.run_generator(f"operations", [ - create_provider(PHASE1, key, mod_name, 'minimal') for key, mod_name in phase_1_mods.items() - ]) - gen_runner.run_generator(f"operations", [ - create_provider(PHASE1, key, mod_name, 'mainnet') for key, mod_name in phase_1_mods.items() - ]) + all_mods = { + PHASE0: phase_0_mods, + LIGHTCLIENT_PATCH: lightclient_patch_mods, + PHASE1: phase_1_mods, + } + + run_state_test_generators(runner_name="operations", specs=specs, all_mods=all_mods) diff --git a/tests/generators/operations/requirements.txt b/tests/generators/operations/requirements.txt index a6ea61aea..182248686 100644 --- a/tests/generators/operations/requirements.txt +++ b/tests/generators/operations/requirements.txt @@ -1,3 +1,2 @@ -eth-utils==1.6.0 -../../core/gen_helpers -../../../ \ No newline at end of file +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/rewards/main.py b/tests/generators/rewards/main.py index 23d0633b0..4124587cc 100644 --- a/tests/generators/rewards/main.py +++ b/tests/generators/rewards/main.py @@ -1,34 +1,11 @@ -from typing import Iterable - -from gen_base import gen_runner, gen_typing -from gen_from_tests.gen import generate_from_tests -from importlib import reload, import_module -from eth2spec.config import config_util +from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators from eth2spec.phase0 import spec as spec_phase0 +from eth2spec.lightclient_patch import spec as spec_lightclient_patch from eth2spec.phase1 import spec as spec_phase1 -from eth2spec.test.context import PHASE0, PHASE1 -from eth2spec.utils import bls +from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH -def create_provider(fork_name: str, handler_name: str, - tests_src_mod_name: str, config_name: str) -> gen_typing.TestProvider: - def prepare_fn(configs_path: str) -> str: - config_util.prepare_config(configs_path, config_name) - reload(spec_phase0) - reload(spec_phase1) - bls.use_milagro() - return config_name - - def cases_fn() -> Iterable[gen_typing.TestCase]: - tests_src = import_module(tests_src_mod_name) - return generate_from_tests( - runner_name='rewards', - handler_name=handler_name, - src=tests_src, - fork_name=fork_name, - ) - - return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) +specs = (spec_phase0, spec_lightclient_patch, spec_phase1) if __name__ == "__main__": @@ -37,18 +14,14 @@ if __name__ == "__main__": 'leak', 'random', ]} - # No additional phase 1 specific rewards tests, yet. + # No additional lightclient_patch or phase 1 specific rewards tests, yet. + lightclient_patch_mods = phase_0_mods phase_1_mods = phase_0_mods - gen_runner.run_generator(f"rewards", [ - create_provider(PHASE0, key, mod_name, 'minimal') for key, mod_name in phase_0_mods.items() - ]) - gen_runner.run_generator(f"rewards", [ - create_provider(PHASE0, key, mod_name, 'mainnet') for key, mod_name in phase_0_mods.items() - ]) - gen_runner.run_generator(f"rewards", [ - create_provider(PHASE1, key, mod_name, 'minimal') for key, mod_name in phase_1_mods.items() - ]) - gen_runner.run_generator(f"rewards", [ - create_provider(PHASE1, key, mod_name, 'mainnet') for key, mod_name in phase_1_mods.items() - ]) + all_mods = { + PHASE0: phase_0_mods, + LIGHTCLIENT_PATCH: lightclient_patch_mods, + PHASE1: phase_1_mods, + } + + run_state_test_generators(runner_name="rewards", specs=specs, all_mods=all_mods) diff --git a/tests/generators/rewards/requirements.txt b/tests/generators/rewards/requirements.txt index b82314298..182248686 100644 --- a/tests/generators/rewards/requirements.txt +++ b/tests/generators/rewards/requirements.txt @@ -1,2 +1,2 @@ -../../core/gen_helpers -../../../ \ No newline at end of file +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/sanity/main.py b/tests/generators/sanity/main.py index 83166f0cf..5155798ff 100644 --- a/tests/generators/sanity/main.py +++ b/tests/generators/sanity/main.py @@ -1,34 +1,12 @@ -from typing import Iterable - -from gen_base import gen_runner, gen_typing -from gen_from_tests.gen import generate_from_tests -from importlib import reload, import_module -from eth2spec.config import config_util from eth2spec.phase0 import spec as spec_phase0 +from eth2spec.lightclient_patch import spec as spec_lightclient_patch from eth2spec.phase1 import spec as spec_phase1 -from eth2spec.test.context import PHASE0, PHASE1 -from eth2spec.utils import bls +from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH + +from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators -def create_provider(fork_name: str, handler_name: str, - tests_src_mod_name: str, config_name: str) -> gen_typing.TestProvider: - def prepare_fn(configs_path: str) -> str: - config_util.prepare_config(configs_path, config_name) - reload(spec_phase0) - reload(spec_phase1) - bls.use_milagro() - return config_name - - def cases_fn() -> Iterable[gen_typing.TestCase]: - tests_src = import_module(tests_src_mod_name) - return generate_from_tests( - runner_name='sanity', - handler_name=handler_name, - src=tests_src, - fork_name=fork_name, - ) - - return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) +specs = (spec_phase0, spec_lightclient_patch, spec_phase1) if __name__ == "__main__": @@ -36,20 +14,18 @@ if __name__ == "__main__": 'blocks', 'slots', ]} + lightclient_patch_mods = {**{key: 'eth2spec.test.lightclient_patch.sanity.test_' + key for key in [ + 'blocks', + ]}, **phase_0_mods} # also run the previous phase 0 tests phase_1_mods = {**{key: 'eth2spec.test.phase1.sanity.test_' + key for key in [ 'blocks', # more phase 1 specific block tests 'shard_blocks', ]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec) - gen_runner.run_generator(f"sanity", [ - create_provider(PHASE0, key, mod_name, 'minimal') for key, mod_name in phase_0_mods.items() - ]) - gen_runner.run_generator(f"sanity", [ - create_provider(PHASE0, key, mod_name, 'mainnet') for key, mod_name in phase_0_mods.items() - ]) - gen_runner.run_generator(f"sanity", [ - create_provider(PHASE1, key, mod_name, 'minimal') for key, mod_name in phase_1_mods.items() - ]) - gen_runner.run_generator(f"sanity", [ - create_provider(PHASE1, key, mod_name, 'mainnet') for key, mod_name in phase_1_mods.items() - ]) + all_mods = { + PHASE0: phase_0_mods, + LIGHTCLIENT_PATCH: lightclient_patch_mods, + PHASE1: phase_1_mods, + } + + run_state_test_generators(runner_name="sanity", specs=specs, all_mods=all_mods) diff --git a/tests/generators/sanity/requirements.txt b/tests/generators/sanity/requirements.txt index b82314298..182248686 100644 --- a/tests/generators/sanity/requirements.txt +++ b/tests/generators/sanity/requirements.txt @@ -1,2 +1,2 @@ -../../core/gen_helpers -../../../ \ No newline at end of file +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/shuffling/main.py b/tests/generators/shuffling/main.py index 6069de77a..091207bca 100644 --- a/tests/generators/shuffling/main.py +++ b/tests/generators/shuffling/main.py @@ -2,7 +2,7 @@ from eth_utils import to_tuple from typing import Iterable from importlib import reload -from gen_base import gen_runner, gen_typing +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing from eth2spec.config import config_util from eth2spec.phase0 import spec as spec diff --git a/tests/generators/shuffling/requirements.txt b/tests/generators/shuffling/requirements.txt index a6ea61aea..182248686 100644 --- a/tests/generators/shuffling/requirements.txt +++ b/tests/generators/shuffling/requirements.txt @@ -1,3 +1,2 @@ -eth-utils==1.6.0 -../../core/gen_helpers -../../../ \ No newline at end of file +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/ssz_generic/main.py b/tests/generators/ssz_generic/main.py index 8cfb2e3eb..737f8cda6 100644 --- a/tests/generators/ssz_generic/main.py +++ b/tests/generators/ssz_generic/main.py @@ -1,5 +1,5 @@ from typing import Iterable -from gen_base import gen_runner, gen_typing +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing import ssz_basic_vector import ssz_bitlist import ssz_bitvector diff --git a/tests/generators/ssz_generic/requirements.txt b/tests/generators/ssz_generic/requirements.txt index af061a3b1..182248686 100644 --- a/tests/generators/ssz_generic/requirements.txt +++ b/tests/generators/ssz_generic/requirements.txt @@ -1,3 +1,2 @@ -eth-utils==1.6.0 -../../core/gen_helpers -../../../ +pytest>=4.4 +../../../[generator] diff --git a/tests/generators/ssz_static/main.py b/tests/generators/ssz_static/main.py index 38ff18615..e21fc4141 100644 --- a/tests/generators/ssz_static/main.py +++ b/tests/generators/ssz_static/main.py @@ -3,13 +3,14 @@ from typing import Iterable from importlib import reload from inspect import getmembers, isclass -from gen_base import gen_runner, gen_typing +from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing from eth2spec.debug import random_value, encode from eth2spec.config import config_util from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase1 import spec as spec_phase1 -from eth2spec.test.context import PHASE0, PHASE1 +from eth2spec.lightclient_patch import spec as spec_lightclient_patch +from eth2spec.test.context import PHASE1, LIGHTCLIENT_PATCH, TESTGEN_FORKS, MINIMAL, MAINNET from eth2spec.utils.ssz.ssz_typing import Container from eth2spec.utils.ssz.ssz_impl import ( hash_tree_root, @@ -64,6 +65,7 @@ def create_provider(fork_name, config_name: str, seed: int, mode: random_value.R config_util.prepare_config(configs_path, config_name) reload(spec_phase0) reload(spec_phase1) + reload(spec_lightclient_patch) return config_name def cases_fn() -> Iterable[gen_typing.TestCase]: @@ -71,6 +73,8 @@ def create_provider(fork_name, config_name: str, seed: int, mode: random_value.R spec = spec_phase0 if fork_name == PHASE1: spec = spec_phase1 + if fork_name == LIGHTCLIENT_PATCH: + spec = spec_lightclient_patch for (i, (name, ssz_type)) in enumerate(get_spec_ssz_types(spec)): yield from ssz_static_cases(fork_name, seed * 1000 + i, name, ssz_type, mode, chaos, count) @@ -83,14 +87,14 @@ if __name__ == "__main__": settings = [] seed = 1 for mode in random_value.RandomizationMode: - settings.append((seed, "minimal", mode, False, 30)) + settings.append((seed, MINIMAL, mode, False, 30)) seed += 1 - settings.append((seed, "minimal", random_value.RandomizationMode.mode_random, True, 30)) + settings.append((seed, MINIMAL, random_value.RandomizationMode.mode_random, True, 30)) seed += 1 - settings.append((seed, "mainnet", random_value.RandomizationMode.mode_random, False, 5)) + settings.append((seed, MAINNET, random_value.RandomizationMode.mode_random, False, 5)) seed += 1 - for fork in [PHASE0, PHASE1]: + for fork in TESTGEN_FORKS: gen_runner.run_generator("ssz_static", [ create_provider(fork, config_name, seed, mode, chaos, cases_if_random) for (seed, config_name, mode, chaos, cases_if_random) in settings diff --git a/tests/generators/ssz_static/requirements.txt b/tests/generators/ssz_static/requirements.txt index b82314298..182248686 100644 --- a/tests/generators/ssz_static/requirements.txt +++ b/tests/generators/ssz_static/requirements.txt @@ -1,2 +1,2 @@ -../../core/gen_helpers -../../../ \ No newline at end of file +pytest>=4.4 +../../../[generator]