Merge branch 'dev'

This commit is contained in:
Danny Ryan
2021-06-24 13:36:01 -06:00
18 changed files with 269 additions and 186 deletions

View File

@@ -35,8 +35,7 @@ The current features are:
### Merge
The merge is still actively in R&D. The specifications outline a general direction for engineering work,
while the details are in review and may change.
The merge is still actively in development. The exact specification has not been formally accepted as final and details are still subject to change.
* Background material:
* An [ethresear.ch](https://ethresear.ch) post [describing the basic mechanism](https://ethresear.ch/t/the-eth1-eth2-transition/6265)

View File

@@ -223,7 +223,7 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str]) ->
if not _is_constant_id(name):
# Check for short type declarations
if value.startswith("uint") or value.startswith("Bytes") or value.startswith("ByteList"):
if value.startswith("uint") or value.startswith("Bytes") or value.startswith("ByteList") or value.startswith("Union"):
custom_types[name] = value
continue
@@ -495,7 +495,7 @@ class MergeSpecBuilder(Phase0SpecBuilder):
return super().imports(preset_name) + f'''
from typing import Protocol
from eth2spec.phase0 import {preset_name} as phase0
from eth2spec.utils.ssz.ssz_typing import Bytes20, ByteList, ByteVector, uint256
from eth2spec.utils.ssz.ssz_typing import Bytes20, ByteList, ByteVector, uint256, Union
'''
@classmethod
@@ -523,7 +523,7 @@ def get_pow_chain_head() -> PowBlock:
class NoopExecutionEngine(ExecutionEngine):
def new_block(self, execution_payload: ExecutionPayload) -> bool:
def on_payload(self, execution_payload: ExecutionPayload) -> bool:
return True
def set_head(self, block_hash: Hash32) -> bool:
@@ -532,7 +532,7 @@ class NoopExecutionEngine(ExecutionEngine):
def finalize_block(self, block_hash: Hash32) -> bool:
return True
def assemble_block(self, block_hash: Hash32, timestamp: uint64) -> ExecutionPayload:
def assemble_block(self, block_hash: Hash32, timestamp: uint64, random: Bytes32) -> ExecutionPayload:
raise NotImplementedError("no default block production")
@@ -553,6 +553,10 @@ spec_builders = {
}
def is_spec_defined_type(value: str) -> bool:
return value.startswith('ByteList') or value.startswith('Union')
def objects_to_spec(preset_name: str,
spec_object: SpecObject,
builder: SpecBuilder,
@@ -565,15 +569,15 @@ def objects_to_spec(preset_name: str,
[
f"class {key}({value}):\n pass\n"
for key, value in spec_object.custom_types.items()
if not value.startswith('ByteList')
if not is_spec_defined_type(value)
]
)
+ ('\n\n' if len([key for key, value in spec_object.custom_types.items() if value.startswith('ByteList')]) > 0 else '')
+ ('\n\n' if len([key for key, value in spec_object.custom_types.items() if is_spec_defined_type(value)]) > 0 else '')
+ '\n\n'.join(
[
f"{key} = {value}\n"
for key, value in spec_object.custom_types.items()
if value.startswith('ByteList')
if is_spec_defined_type(value)
]
)
)
@@ -1020,7 +1024,7 @@ setup(
"py_ecc==5.2.0",
"milagro_bls_binding==1.6.3",
"dataclasses==0.6",
"remerkleable==0.1.20",
"remerkleable==0.1.21",
RUAMEL_YAML_VERSION,
"lru-dict==1.1.6",
MARKO_VERSION,

View File

@@ -137,13 +137,14 @@ def get_sync_subcommittee_pubkeys(state: BeaconState, subcommittee_index: uint64
return sync_committee.pubkeys[i:i + sync_subcommittee_size]
```
- _[IGNORE]_ The contribution's slot is for the current slot (with a MAXIMUM_GOSSIP_CLOCK_DISPARITY allowance), i.e. `contribution.slot == current_slot`.
- _[IGNORE]_ The block being signed over (`contribution.beacon_block_root`) has been seen (via both gossip and non-gossip sources).
- _[IGNORE]_ The contribution's slot is for the current slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance), i.e. `contribution.slot == current_slot`.
- _[REJECT]_ The subcommittee index is in the allowed range, i.e. `contribution.subcommittee_index < SYNC_COMMITTEE_SUBNET_COUNT`.
- _[IGNORE]_ The sync committee contribution is the first valid contribution received for the aggregator with index `contribution_and_proof.aggregator_index` for the slot `contribution.slot` and subcommittee index `contribution.subcommittee_index`.
- _[REJECT]_ `contribution_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_sync_committee_aggregator(contribution_and_proof.selection_proof)` returns `True`.
- _[REJECT]_ The aggregator's validator index is in the declared subcommittee of the current sync committee --
i.e. `state.validators[contribution_and_proof.aggregator_index].pubkey in get_sync_subcommittee_pubkeys(state, contribution.subcommittee_index)`.
- _[IGNORE]_ The sync committee contribution is the first valid contribution received for the aggregator with index `contribution_and_proof.aggregator_index`
for the slot `contribution.slot` and subcommittee index `contribution.subcommittee_index`
(this requires maintaining a cache of size `SYNC_COMMITTEE_SIZE` for this topic that can be flushed after each slot).
- _[REJECT]_ The `contribution_and_proof.selection_proof` is a valid signature of the `SyncAggregatorSelectionData` derived from the `contribution` by the validator with index `contribution_and_proof.aggregator_index`.
- _[REJECT]_ The aggregator signature, `signed_contribution_and_proof.signature`, is valid.
- _[REJECT]_ The aggregate signature is valid for the message `beacon_block_root` and aggregate pubkey derived from the participation info in `aggregation_bits` for the subcommittee specified by the `contribution.subcommittee_index`.
@@ -158,12 +159,12 @@ The `sync_committee_{subnet_id}` topics are used to propagate unaggregated sync
The following validations MUST pass before forwarding the `sync_committee_message` on the network:
- _[IGNORE]_ The signature's slot is for the current slot (with a MAXIMUM_GOSSIP_CLOCK_DISPARITY allowance), i.e. `sync_committee_message.slot == current_slot`.
- _[IGNORE]_ The block being signed over (`sync_committee_message.beacon_block_root`) has been seen (via both gossip and non-gossip sources).
- _[IGNORE]_ There has been no other valid sync committee signature for the declared `slot` for the validator referenced by `sync_committee_message.validator_index`.
Note this validation is _per topic_ so that for a given `slot`, multiple messages could be forwarded with the same `validator_index` as long as the `subnet_id`s are distinct.
- _[IGNORE]_ The signature's slot is for the current slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance), i.e. `sync_committee_message.slot == current_slot`.
- _[REJECT]_ The `subnet_id` is valid for the given validator, i.e. `subnet_id in compute_subnets_for_sync_committee(state, sync_committee_message.validator_index)`.
Note this validation implies the validator is part of the broader current sync committee along with the correct subcommittee.
- _[IGNORE]_ There has been no other valid sync committee signature for the declared `slot` for the validator referenced by `sync_committee_message.validator_index`
(this requires maintaining a cache of size `SYNC_COMMITTEE_SIZE // SYNC_COMMITTEE_SUBNET_COUNT` for each subnet that can be flushed after each slot).
Note this validation is _per topic_ so that for a given `slot`, multiple messages could be forwarded with the same `validator_index` as long as the `subnet_id`s are distinct.
- _[REJECT]_ The `signature` is valid for the message `beacon_block_root` for the validator referenced by `validator_index`.
#### Sync committees and aggregation

View File

@@ -265,7 +265,7 @@ This process occurs each slot.
##### Prepare sync committee message
If a validator is in the current sync committee (i.e. `is_assigned_to_sync_committee()` above returns `True`), then for every `slot` in the current sync committee period, the validator should prepare a `SyncCommitteeMessage` for the previous slot (`slot - 1`) according to the logic in `get_sync_committee_message` as soon as they have determined the head block of `slot - 1`.
If a validator is in the current sync committee (i.e. `is_assigned_to_sync_committee()` above returns `True`), then for every `slot` in the current sync committee period, the validator should prepare a `SyncCommitteeMessage` for the previous slot (`slot - 1`) according to the logic in `get_sync_committee_message` as soon as they have determined the head block of `slot - 1`. This means that when assigned to `slot` a `SyncCommitteeMessage` is prepared and broadcast in `slot-1 ` instead of `slot`.
This logic is triggered upon the same conditions as when producing an attestation.
Meaning, a sync committee member should produce and broadcast a `SyncCommitteeMessage` either when (a) the validator has received a valid block from the expected block proposer for the current `slot` or (b) one-third of the slot has transpired (`SECONDS_PER_SLOT / 3` seconds after the start of the slot) -- whichever comes first.

View File

@@ -1,6 +1,6 @@
# Ethereum 2.0 The Merge
**Warning:** This document is currently based on [Phase 0](../phase0/beacon-chain.md) but will be rebased to [Altair](../altair/beacon-chain.md) once the latter is shipped.
**Warning**: This document is currently based on [Phase 0](../phase0/beacon-chain.md) and will be rebased on [Altair](../altair/beacon-chain.md).
**Notice**: This document is a work-in-progress for researchers and implementers.
@@ -21,35 +21,36 @@
- [New containers](#new-containers)
- [`ExecutionPayload`](#executionpayload)
- [`ExecutionPayloadHeader`](#executionpayloadheader)
- [Protocols](#protocols)
- [`ExecutionEngine`](#executionengine)
- [`new_block`](#new_block)
- [Helper functions](#helper-functions)
- [Misc](#misc)
- [Predicates](#predicates)
- [`is_merge_complete`](#is_merge_complete)
- [`is_merge_block`](#is_merge_block)
- [`is_execution_enabled`](#is_execution_enabled)
- [`is_transition_completed`](#is_transition_completed)
- [`is_transition_block`](#is_transition_block)
- [`compute_time_at_slot`](#compute_time_at_slot)
- [Misc](#misc)
- [`compute_timestamp_at_slot`](#compute_timestamp_at_slot)
- [Beacon chain state transition function](#beacon-chain-state-transition-function)
- [Execution engine](#execution-engine)
- [`on_payload`](#on_payload)
- [Block processing](#block-processing)
- [Execution payload processing](#execution-payload-processing)
- [`process_execution_payload`](#process_execution_payload)
- [Initialize state for pure Merge testnets and test vectors](#initialize-state-for-pure-merge-testnets-and-test-vectors)
- [Execution payload processing](#execution-payload-processing)
- [`process_execution_payload`](#process_execution_payload)
- [Testing](#testing)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
<!-- /TOC -->
## Introduction
This is a patch implementing the executable beacon chain proposal.
It enshrines transaction execution and validity as a first class citizen at the core of the beacon chain.
This patch adds transaction execution to the beacon chain as part of the Merge fork.
## Custom types
We define the following Python custom types for type hinting and readability:
*Note*: The `Transaction` type is a stub which is not final.
| Name | SSZ equivalent | Description |
| - | - | - |
| `OpaqueTransaction` | `ByteList[MAX_BYTES_PER_OPAQUE_TRANSACTION]` | a byte-list containing a single [typed transaction envelope](https://eips.ethereum.org/EIPS/eip-2718#opaque-byte-array-rather-than-an-rlp-array) structured as `TransactionType \|\| TransactionPayload` |
| `OpaqueTransaction` | `ByteList[MAX_BYTES_PER_OPAQUE_TRANSACTION]` | a [typed transaction envelope](https://eips.ethereum.org/EIPS/eip-2718#opaque-byte-array-rather-than-an-rlp-array) structured as `TransactionType \|\| TransactionPayload` |
| `Transaction` | `Union[OpaqueTransaction]` | a transaction |
## Constants
@@ -58,32 +59,26 @@ We define the following Python custom types for type hinting and readability:
| Name | Value |
| - | - |
| `MAX_BYTES_PER_OPAQUE_TRANSACTION` | `uint64(2**20)` (= 1,048,576) |
| `MAX_EXECUTION_TRANSACTIONS` | `uint64(2**14)` (= 16,384) |
| `MAX_TRANSACTIONS_PER_PAYLOAD` | `uint64(2**14)` (= 16,384) |
| `BYTES_PER_LOGS_BLOOM` | `uint64(2**8)` (= 256) |
## Containers
### Extended containers
*Note*: Extended SSZ containers inherit all fields from the parent in the original
order and append any additional fields to the end.
#### `BeaconBlockBody`
*Note*: `BeaconBlockBody` fields remain unchanged other than the addition of `execution_payload`.
```python
class BeaconBlockBody(phase0.BeaconBlockBody):
# Execution
execution_payload: ExecutionPayload # [New in Merge]
```
#### `BeaconState`
*Note*: `BeaconState` fields remain unchanged other than addition of `latest_execution_payload_header`.
```python
class BeaconState(phase0.BeaconState):
# Execution-layer
# Execution
latest_execution_payload_header: ExecutionPayloadHeader # [New in Merge]
```
@@ -91,103 +86,102 @@ class BeaconState(phase0.BeaconState):
#### `ExecutionPayload`
The execution payload included in a `BeaconBlockBody`.
```python
class ExecutionPayload(Container):
block_hash: Hash32 # Hash of execution block
# Execution block header fields
parent_hash: Hash32
coinbase: Bytes20
coinbase: Bytes20 # 'beneficiary' in the yellow paper
state_root: Bytes32
number: uint64
receipt_root: Bytes32 # 'receipts root' in the yellow paper
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
random: Bytes32 # 'difficulty' in the yellow paper
block_number: uint64 # 'number' in the yellow paper
gas_limit: uint64
gas_used: uint64
timestamp: uint64
receipt_root: Bytes32
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
transactions: List[OpaqueTransaction, MAX_EXECUTION_TRANSACTIONS]
# Extra payload fields
block_hash: Hash32 # Hash of execution block
transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD]
```
#### `ExecutionPayloadHeader`
The execution payload header included in a `BeaconState`.
*Note:* Holds execution payload data without transaction bodies.
```python
class ExecutionPayloadHeader(Container):
block_hash: Hash32 # Hash of execution block
# Execution block header fields
parent_hash: Hash32
coinbase: Bytes20
state_root: Bytes32
number: uint64
receipt_root: Bytes32
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
random: Bytes32
block_number: uint64
gas_limit: uint64
gas_used: uint64
timestamp: uint64
receipt_root: Bytes32
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
# Extra payload fields
block_hash: Hash32 # Hash of execution block
transactions_root: Root
```
## Protocols
### `ExecutionEngine`
The `ExecutionEngine` protocol separates the consensus and execution sub-systems.
The consensus implementation references an instance of this sub-system with `EXECUTION_ENGINE`.
The following methods are added to the `ExecutionEngine` protocol for use in the state transition:
#### `new_block`
Verifies the given `execution_payload` with respect to execution state transition, and persists changes if valid.
The body of this function is implementation dependent.
The Consensus API may be used to implement this with an external execution engine.
```python
def new_block(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
"""
Returns True if the ``execution_payload`` was verified and processed successfully, False otherwise.
"""
...
```
## Helper functions
### Misc
### Predicates
#### `is_merge_complete`
```python
def is_merge_complete(state: BeaconState) -> bool:
return state.latest_execution_payload_header != ExecutionPayloadHeader()
```
#### `is_merge_block`
```python
def is_merge_block(state: BeaconState, body: BeaconBlockBody) -> bool:
return not is_merge_complete(state) and body.execution_payload != ExecutionPayload()
```
#### `is_execution_enabled`
```python
def is_execution_enabled(state: BeaconState, block: BeaconBlock) -> bool:
return is_transition_completed(state) or is_transition_block(state, block)
def is_execution_enabled(state: BeaconState, body: BeaconBlockBody) -> bool:
return is_merge_block(state, body) or is_merge_complete(state)
```
#### `is_transition_completed`
### Misc
```python
def is_transition_completed(state: BeaconState) -> bool:
return state.latest_execution_payload_header != ExecutionPayloadHeader()
```
#### `is_transition_block`
```python
def is_transition_block(state: BeaconState, block: BeaconBlock) -> bool:
return not is_transition_completed(state) and block.body.execution_payload != ExecutionPayload()
```
#### `compute_time_at_slot`
#### `compute_timestamp_at_slot`
*Note*: This function is unsafe with respect to overflows and underflows.
```python
def compute_time_at_slot(state: BeaconState, slot: Slot) -> uint64:
def compute_timestamp_at_slot(state: BeaconState, slot: Slot) -> uint64:
slots_since_genesis = slot - GENESIS_SLOT
return uint64(state.genesis_time + slots_since_genesis * SECONDS_PER_SLOT)
```
## Beacon chain state transition function
### Execution engine
The implementation-dependent `ExecutionEngine` protocol encapsulates the execution sub-system logic via:
* a state object `self.execution_state` of type `ExecutionState`
* a state transition function `self.on_payload` which mutates `self.execution_state`
#### `on_payload`
```python
def on_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
"""
Returns ``True`` iff ``execution_payload`` is valid with respect to ``self.execution_state``.
"""
...
```
The above function is accessed through the `EXECUTION_ENGINE` module which instantiates the `ExecutionEngine` protocol.
### Block processing
```python
@@ -196,50 +190,49 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None:
process_randao(state, block.body)
process_eth1_data(state, block.body)
process_operations(state, block.body)
# Pre-merge, skip execution payload processing
if is_execution_enabled(state, block):
if is_execution_enabled(state, block.body):
process_execution_payload(state, block.body.execution_payload, EXECUTION_ENGINE) # [New in Merge]
```
#### Execution payload processing
### Execution payload processing
##### `process_execution_payload`
#### `process_execution_payload`
*Note:* This function depends on `process_randao` function call as it retrieves the most recent randao mix from the `state`. Implementations that are considering parallel processing of execution payload with respect to beacon chain state transition function should work around this dependency.
```python
def process_execution_payload(state: BeaconState,
execution_payload: ExecutionPayload,
execution_engine: ExecutionEngine) -> None:
"""
Note: This function is designed to be able to be run in parallel with the other `process_block` sub-functions
"""
if is_transition_completed(state):
assert execution_payload.parent_hash == state.latest_execution_payload_header.block_hash
assert execution_payload.number == state.latest_execution_payload_header.number + 1
assert execution_payload.timestamp == compute_time_at_slot(state, state.slot)
assert execution_engine.new_block(execution_payload)
def process_execution_payload(state: BeaconState, payload: ExecutionPayload, execution_engine: ExecutionEngine) -> None:
# Verify consistency of the parent hash, block number and random
if is_merge_complete(state):
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
assert payload.block_number == state.latest_execution_payload_header.block_number + uint64(1)
assert payload.random == get_randao_mix(state, get_current_epoch(state))
# Verify timestamp
assert payload.timestamp == compute_timestamp_at_slot(state, state.slot)
# Verify the execution payload is valid
assert execution_engine.on_payload(payload)
# Cache execution payload
state.latest_execution_payload_header = ExecutionPayloadHeader(
block_hash=execution_payload.block_hash,
parent_hash=execution_payload.parent_hash,
coinbase=execution_payload.coinbase,
state_root=execution_payload.state_root,
number=execution_payload.number,
gas_limit=execution_payload.gas_limit,
gas_used=execution_payload.gas_used,
timestamp=execution_payload.timestamp,
receipt_root=execution_payload.receipt_root,
logs_bloom=execution_payload.logs_bloom,
transactions_root=hash_tree_root(execution_payload.transactions),
parent_hash=payload.parent_hash,
coinbase=payload.coinbase,
state_root=payload.state_root,
receipt_root=payload.receipt_root,
logs_bloom=payload.logs_bloom,
random=payload.random,
block_number=payload.block_number,
gas_limit=payload.gas_limit,
gas_used=payload.gas_used,
timestamp=payload.timestamp,
block_hash=payload.block_hash,
transactions_root=hash_tree_root(payload.transactions),
)
```
## Initialize state for pure Merge testnets and test vectors
## Testing
This helper function is only for initializing the state for pure Merge testnets and tests.
*Note*: The function `initialize_beacon_state_from_eth1` is modified for pure Merge testing only.
*Note*: The function `initialize_beacon_state_from_eth1` is modified: (1) using `MERGE_FORK_VERSION` as the current fork version, (2) utilizing the Merge `BeaconBlockBody` when constructing the initial `latest_block_header`, and (3) adding initial `latest_execution_payload_header`.
*Note*: The function `initialize_beacon_state_from_eth1` is modified to use `MERGE_FORK_VERSION` and initialize `latest_execution_payload_header`.
```python
def initialize_beacon_state_from_eth1(eth1_block_hash: Bytes32,
@@ -276,21 +269,10 @@ def initialize_beacon_state_from_eth1(eth1_block_hash: Bytes32,
# Set genesis validators root for domain separation and chain versioning
state.genesis_validators_root = hash_tree_root(state.validators)
# [New in Merge] Construct execution payload header
# Note: initialized with zero block height
state.latest_execution_payload_header = ExecutionPayloadHeader(
block_hash=eth1_block_hash,
parent_hash=Hash32(),
coinbase=Bytes20(),
state_root=Bytes32(),
number=uint64(0),
gas_limit=uint64(0),
gas_used=uint64(0),
timestamp=eth1_timestamp,
receipt_root=Bytes32(),
logs_bloom=ByteVector[BYTES_PER_LOGS_BLOOM](),
transactions_root=Root(),
)
# [New in Merge] Initialize the execution payload header (with block number set to 0)
state.latest_execution_payload_header.block_hash = eth1_block_hash
state.latest_execution_payload_header.timestamp = eth1_timestamp
state.latest_execution_payload_header.random = eth1_block_hash
return state
```

View File

@@ -127,7 +127,7 @@ def on_block(store: Store, signed_block: SignedBeaconBlock, transition_store: Tr
assert get_ancestor(store, block.parent_root, finalized_slot) == store.finalized_checkpoint.root
# [New in Merge]
if (transition_store is not None) and is_transition_block(pre_state, block):
if (transition_store is not None) and is_merge_block(pre_state, block):
# Delay consideration of block until PoW block is processed by the PoW node
pow_block = get_pow_block(block.body.execution_payload.parent_hash)
assert pow_block.is_processed

View File

@@ -49,7 +49,7 @@ The body of this function is implementation dependent.
The Consensus API may be used to implement this with an external execution engine.
```python
def assemble_block(self: ExecutionEngine, block_hash: Hash32, timestamp: uint64) -> ExecutionPayload:
def assemble_block(self: ExecutionEngine, block_hash: Hash32, timestamp: uint64, random: Bytes32) -> ExecutionPayload:
...
```
@@ -70,21 +70,34 @@ Let `get_pow_chain_head() -> PowBlock` be the function that returns the head of
* Set `block.body.execution_payload = get_execution_payload(state, transition_store, execution_engine)` where:
```python
def compute_randao_mix(state: BeaconState, randao_reveal: BLSSignature) -> Bytes32:
epoch = get_current_epoch(state)
return xor(get_randao_mix(state, epoch), hash(randao_reveal))
def produce_execution_payload(state: BeaconState,
parent_hash: Hash32,
randao_reveal: BLSSignature,
execution_engine: ExecutionEngine) -> ExecutionPayload:
timestamp = compute_timestamp_at_slot(state, state.slot)
randao_mix = compute_randao_mix(state, randao_reveal)
return execution_engine.assemble_block(parent_hash, timestamp, randao_mix)
def get_execution_payload(state: BeaconState,
transition_store: TransitionStore,
randao_reveal: BLSSignature,
execution_engine: ExecutionEngine) -> ExecutionPayload:
if not is_transition_completed(state):
if not is_merge_complete(state):
pow_block = get_pow_chain_head()
if not is_valid_terminal_pow_block(transition_store, pow_block):
# Pre-merge, empty payload
return ExecutionPayload()
else:
# Signify merge via producing on top of the last PoW block
timestamp = compute_time_at_slot(state, state.slot)
return execution_engine.assemble_block(pow_block.block_hash, timestamp)
return produce_execution_payload(state, pow_block.block_hash, randao_reveal, execution_engine)
# Post-merge, normal payload
execution_parent_hash = state.latest_execution_payload_header.block_hash
timestamp = compute_time_at_slot(state, state.slot)
return execution_engine.assemble_block(execution_parent_hash, timestamp)
parent_hash = state.latest_execution_payload_header.block_hash
return produce_execution_payload(state, parent_hash, randao_reveal, execution_engine)
```

View File

@@ -576,12 +576,12 @@ def update_pending_shard_work(state: BeaconState, attestation: Attestation) -> N
# TODO In Altair: set participation bit flag for voters of this early winning header
if pending_header.commitment == DataCommitment():
# The committee voted to not confirm anything
state.shard_buffer[buffer_index][attestation_shard].change(
state.shard_buffer[buffer_index][attestation_shard].status.change(
selector=SHARD_WORK_UNCONFIRMED,
value=None,
)
else:
state.shard_buffer[buffer_index][attestation_shard].change(
state.shard_buffer[buffer_index][attestation_shard].status.change(
selector=SHARD_WORK_CONFIRMED,
value=pending_header.commitment,
)
@@ -608,7 +608,7 @@ def process_shard_header(state: BeaconState, signed_header: SignedShardBlobHeade
assert committee_work.status.selector == SHARD_WORK_PENDING
# Check that this header is not yet in the pending list
current_headers: Sequence[PendingShardHeader] = committee_work.status.value
current_headers: List[PendingShardHeader, MAX_SHARD_HEADERS_PER_SHARD] = committee_work.status.value
header_root = hash_tree_root(header)
assert header_root not in [pending_header.root for pending_header in current_headers]
@@ -640,7 +640,7 @@ def process_shard_header(state: BeaconState, signed_header: SignedShardBlobHeade
)
# Include it in the pending list
state.shard_buffer[header.slot % SHARD_STATE_MEMORY_SLOTS][header.shard].append(pending_header)
current_headers.append(pending_header)
```
The degree proof works as follows. For a block `B` with length `l` (so `l` values in `[0...l - 1]`, seen as a polynomial `B(X)` which takes these values),
@@ -784,8 +784,8 @@ def reset_pending_shard_work(state: BeaconState) -> None:
for committee_index in range(committees_per_slot):
shard = (start_shard + committee_index) % active_shards
# a committee is available, initialize a pending shard-header list
committee_length = len(get_beacon_committee(state, slot, committee_index))
state.shard_buffer[buffer_index][shard].change(
committee_length = len(get_beacon_committee(state, slot, CommitteeIndex(committee_index)))
state.shard_buffer[buffer_index][shard].status.change(
selector=SHARD_WORK_PENDING,
value=List[PendingShardHeader, MAX_SHARD_HEADERS_PER_SHARD](
PendingShardHeader(

View File

@@ -1 +1 @@
1.1.0-alpha.7
1.1.0-alpha.8

View File

@@ -2,7 +2,7 @@ from typing import Any
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import (
uint, Container, List, boolean,
Vector, ByteVector, ByteList
Vector, ByteVector, ByteList, Union, View
)
@@ -27,5 +27,16 @@ def decode(data: Any, typ):
assert (data["hash_tree_root"][2:] ==
hash_tree_root(ret).hex())
return ret
elif issubclass(typ, Union):
selector = int(data["selector"])
options = typ.options()
value_typ = options[selector]
value: View
if value_typ is None: # handle the "nil" type case
assert data["value"] is None
value = None
else:
value = decode(data["value"], value_typ)
return typ(selector=selector, value=value)
else:
raise Exception(f"Type not recognized: data={data}, typ={typ}")

View File

@@ -1,7 +1,7 @@
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, serialize
from eth2spec.utils.ssz.ssz_typing import (
uint, boolean,
Bitlist, Bitvector, Container, Vector, List
Bitlist, Bitvector, Container, Vector, List, Union
)
@@ -31,5 +31,11 @@ def encode(value, include_hash_tree_roots=False):
if include_hash_tree_roots:
ret["hash_tree_root"] = '0x' + hash_tree_root(value).hex()
return ret
elif isinstance(value, Union):
inner_value = value.value()
return {
'selector': int(value.selector()),
'value': None if inner_value is None else encode(inner_value, include_hash_tree_roots)
}
else:
raise Exception(f"Type not recognized: value={value}, typ={type(value)}")

View File

@@ -5,7 +5,7 @@ from typing import Type
from eth2spec.utils.ssz.ssz_typing import (
View, BasicView, uint, Container, List, boolean,
Vector, ByteVector, ByteList, Bitlist, Bitvector
Vector, ByteVector, ByteList, Bitlist, Bitvector, Union
)
# in bytes
@@ -115,6 +115,22 @@ def get_random_ssz_object(rng: Random,
get_random_ssz_object(rng, field_type, max_bytes_length, max_list_length, mode, chaos)
for field_name, field_type in fields.items()
})
elif issubclass(typ, Union):
options = typ.options()
selector: int
if mode == RandomizationMode.mode_zero:
selector = 0
elif mode == RandomizationMode.mode_max:
selector = len(options) - 1
else:
selector = rng.randrange(0, len(options))
elem_type = options[selector]
elem: View
if elem_type is None:
elem = None
else:
elem = get_random_ssz_object(rng, elem_type, max_bytes_length, max_list_length, mode, chaos)
return typ(selector=selector, value=elem)
else:
raise Exception(f"Type not recognized: typ={typ}")

View File

@@ -93,13 +93,15 @@ def build_empty_block(spec, state, slot=None):
empty_block.body.eth1_data.deposit_count = state.eth1_deposit_index
empty_block.parent_root = parent_block_root
apply_randao_reveal(spec, state, empty_block)
if is_post_altair(spec):
empty_block.body.sync_aggregate.sync_committee_signature = spec.G2_POINT_AT_INFINITY
if is_post_merge(spec):
empty_block.body.execution_payload = build_empty_execution_payload(spec, state)
randao_mix = spec.compute_randao_mix(state, empty_block.body.randao_reveal)
empty_block.body.execution_payload = build_empty_execution_payload(spec, state, randao_mix)
apply_randao_reveal(spec, state, empty_block)
return empty_block

View File

@@ -1,22 +1,26 @@
def build_empty_execution_payload(spec, state):
def build_empty_execution_payload(spec, state, randao_mix=None):
"""
Assuming a pre-state of the same slot, build a valid ExecutionPayload without any transactions.
"""
latest = state.latest_execution_payload_header
timestamp = spec.compute_time_at_slot(state, state.slot)
empty_txs = spec.List[spec.OpaqueTransaction, spec.MAX_EXECUTION_TRANSACTIONS]()
empty_txs = spec.List[spec.Transaction, spec.MAX_TRANSACTIONS_PER_PAYLOAD]()
if randao_mix is None:
randao_mix = spec.get_randao_mix(state, spec.get_current_epoch(state))
payload = spec.ExecutionPayload(
block_hash=spec.Hash32(),
parent_hash=latest.block_hash,
coinbase=spec.Bytes20(),
state_root=latest.state_root, # no changes to the state
number=latest.number + 1,
receipt_root=b"no receipts here" + b"\x00" * 16, # TODO: root of empty MPT may be better.
logs_bloom=spec.ByteVector[spec.BYTES_PER_LOGS_BLOOM](), # TODO: zeroed logs bloom for empty logs ok?
block_number=latest.block_number + 1,
random=randao_mix,
gas_limit=latest.gas_limit, # retain same limit
gas_used=0, # empty block, 0 gas
timestamp=timestamp,
receipt_root=b"no receipts here" + b"\x00" * 16, # TODO: root of empty MPT may be better.
logs_bloom=spec.ByteVector[spec.BYTES_PER_LOGS_BLOOM](), # TODO: zeroed logs bloom for empty logs ok?
block_hash=spec.Hash32(),
transactions=empty_txs,
)
# TODO: real RLP + block hash logic would be nice, requires RLP and keccak256 dependency however.
@@ -27,16 +31,17 @@ def build_empty_execution_payload(spec, state):
def get_execution_payload_header(spec, execution_payload):
return spec.ExecutionPayloadHeader(
block_hash=execution_payload.block_hash,
parent_hash=execution_payload.parent_hash,
coinbase=execution_payload.coinbase,
state_root=execution_payload.state_root,
number=execution_payload.number,
receipt_root=execution_payload.receipt_root,
logs_bloom=execution_payload.logs_bloom,
random=execution_payload.random,
block_number=execution_payload.block_number,
gas_limit=execution_payload.gas_limit,
gas_used=execution_payload.gas_used,
timestamp=execution_payload.timestamp,
receipt_root=execution_payload.receipt_root,
logs_bloom=execution_payload.logs_bloom,
block_hash=execution_payload.block_hash,
transactions_root=spec.hash_tree_root(execution_payload.transactions)
)

View File

@@ -73,12 +73,13 @@ def check_proposer_slashing_effect(spec, pre_state, state, slashed_index, block=
def get_valid_proposer_slashing(spec, state, random_root=b'\x99' * 32,
slashed_index=None, signed_1=False, signed_2=False):
slashed_index=None, slot=None, signed_1=False, signed_2=False):
if slashed_index is None:
current_epoch = spec.get_current_epoch(state)
slashed_index = spec.get_active_validator_indices(state, current_epoch)[-1]
privkey = pubkey_to_privkey[state.validators[slashed_index].pubkey]
slot = state.slot
if slot is None:
slot = state.slot
header_1 = spec.BeaconBlockHeader(
slot=slot,

View File

@@ -25,7 +25,7 @@ def run_execution_payload_processing(spec, state, execution_payload, valid=True,
called_new_block = False
class TestEngine(spec.NoopExecutionEngine):
def new_block(self, payload) -> bool:
def on_payload(self, payload) -> bool:
nonlocal called_new_block, execution_valid
called_new_block = True
assert payload == execution_payload
@@ -153,7 +153,7 @@ def test_bad_number_regular_payload(spec, state):
# execution payload
execution_payload = build_empty_execution_payload(spec, state)
execution_payload.number = execution_payload.number + 1
execution_payload.block_number = execution_payload.block_number + 1
yield from run_execution_payload_processing(spec, state, execution_payload, valid=False)
@@ -168,7 +168,7 @@ def test_bad_everything_regular_payload(spec, state):
# execution payload
execution_payload = build_empty_execution_payload(spec, state)
execution_payload.parent_hash = spec.Hash32()
execution_payload.number = execution_payload.number + 1
execution_payload.block_number = execution_payload.block_number + 1
yield from run_execution_payload_processing(spec, state, execution_payload, valid=False)

View File

@@ -6,8 +6,10 @@ from eth2spec.test.context import (
low_balances, misc_balances,
)
from eth2spec.test.helpers.attestations import sign_indexed_attestation
from eth2spec.test.helpers.attester_slashings import get_valid_attester_slashing, \
get_indexed_attestation_participants, get_attestation_2_data, get_attestation_1_data
from eth2spec.test.helpers.attester_slashings import (
get_valid_attester_slashing, get_valid_attester_slashing_by_indices,
get_indexed_attestation_participants, get_attestation_2_data, get_attestation_1_data,
)
from eth2spec.test.helpers.proposer_slashings import get_min_slashing_penalty_quotient
from eth2spec.test.helpers.state import (
get_balance,
@@ -126,6 +128,39 @@ def test_success_already_exited_recent(spec, state):
yield from run_attester_slashing_processing(spec, state, attester_slashing)
@with_all_phases
@spec_state_test
@always_bls
def test_success_proposer_index_slashed(spec, state):
# Transition past genesis slot because generally doesn't have a proposer
next_epoch_via_block(spec, state)
proposer_index = spec.get_beacon_proposer_index(state)
attester_slashing = get_valid_attester_slashing_by_indices(
spec, state,
[proposer_index],
signed_1=True, signed_2=True,
)
yield from run_attester_slashing_processing(spec, state, attester_slashing)
@with_all_phases
@spec_state_test
def test_success_attestation_from_future(spec, state):
# Transition state to future to enable generation of a "future" attestation
future_state = state.copy()
next_epoch_via_block(spec, future_state)
# Generate slashing using the future state
attester_slashing = get_valid_attester_slashing(
spec, future_state,
slot=state.slot + 5, # Slot is in the future wrt `state`
signed_1=True, signed_2=True
)
yield from run_attester_slashing_processing(spec, state, attester_slashing)
@with_all_phases
@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
@spec_test

View File

@@ -35,6 +35,14 @@ def run_proposer_slashing_processing(spec, state, proposer_slashing, valid=True)
@with_all_phases
@spec_state_test
def test_success(spec, state):
proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=True, signed_2=True)
yield from run_proposer_slashing_processing(spec, state, proposer_slashing)
@with_all_phases
@spec_state_test
def test_success_slashed_and_proposer_index_the_same(spec, state):
# Get proposer for next slot
block = build_empty_block_for_next_slot(spec, state)
proposer_index = block.proposer_index
@@ -49,8 +57,8 @@ def test_success(spec, state):
@with_all_phases
@spec_state_test
def test_success_slashed_and_proposer_index_the_same(spec, state):
proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=True, signed_2=True)
def test_success_block_header_from_future(spec, state):
proposer_slashing = get_valid_proposer_slashing(spec, state, slot=state.slot + 5, signed_1=True, signed_2=True)
yield from run_proposer_slashing_processing(spec, state, proposer_slashing)