mirror of
https://github.com/vacp2p/linea-besu.git
synced 2026-01-09 07:27:55 -05:00
Due to subtle differences between Bytes32 and UInt256 the changes failed burn-in testing and are being reverted. Signed-off-by: Danno Ferrin <danno.ferrin@swirldslabs.com> Signed-off-by: Danno Ferrin <danno.ferrin@swirldslabs.com>
This commit is contained in:
@@ -2,7 +2,6 @@
|
||||
## 22.10.0
|
||||
### Breaking Changes
|
||||
- Version 22.10.0 will require Java 17 to build and run.
|
||||
- Internal and interface APIs relating to storage have migrated from `UInt256` to `Bytes32` [#4562](https://github.com/hyperledger/besu/pull/4562)
|
||||
|
||||
### Additions and Improvements
|
||||
- Updated jackson-databind library to version 2.13.4.2 addressing [CVE-2022-42003](https://nvd.nist.gov/vuln/detail/CVE-2022-42003)
|
||||
|
||||
@@ -27,7 +27,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class TestPrivacyGroupGenesisProvider implements PrivacyGroupGenesisProvider {
|
||||
private boolean genesisEnabled = false;
|
||||
@@ -50,7 +50,7 @@ public class TestPrivacyGroupGenesisProvider implements PrivacyGroupGenesisProvi
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Bytes32, Bytes32> getStorage() {
|
||||
public Map<UInt256, UInt256> getStorage() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
|
||||
@@ -99,7 +99,7 @@ public class PrivGetPrivateTransactionAcceptanceTest extends ParameterizedEnclav
|
||||
|
||||
@Test
|
||||
public void nonExistentHashReturnsNull() {
|
||||
alice.getBesu().verify(priv.getPrivateTransactionReturnsNull(Hash.ZERO_HASH));
|
||||
alice.getBesu().verify(priv.getPrivateTransactionReturnsNull(Hash.ZERO));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -21,7 +21,6 @@ import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
import static org.hyperledger.besu.ethereum.core.PrivacyParameters.DEFAULT_PRIVACY;
|
||||
|
||||
import org.hyperledger.besu.crypto.KeyPair;
|
||||
@@ -213,7 +212,7 @@ public class MultiTenancyAcceptanceTest extends AcceptanceTestBase {
|
||||
public void privDistributeRawTransactionSuccessShouldReturnEnclaveKey()
|
||||
throws JsonProcessingException {
|
||||
final String enclaveResponseKeyBytes =
|
||||
Bytes.fromBase64String(PARTICIPANT_ENCLAVE_KEY1).toString();
|
||||
Bytes.wrap(Bytes.fromBase64String(PARTICIPANT_ENCLAVE_KEY1)).toString();
|
||||
|
||||
retrievePrivacyGroupEnclaveStub();
|
||||
sendEnclaveStub(PARTICIPANT_ENCLAVE_KEY1);
|
||||
@@ -250,7 +249,8 @@ public class MultiTenancyAcceptanceTest extends AcceptanceTestBase {
|
||||
final String privateTxRlp = getRLPOutput(validSignedPrivateTransaction).encoded().toHexString();
|
||||
|
||||
retrieveEeaPrivacyGroupEnclaveStub(validSignedPrivateTransaction);
|
||||
sendEnclaveStub(ZERO_32.toBase64String()); // can be any value, as we are stubbing the enclave
|
||||
sendEnclaveStub(
|
||||
Bytes32.ZERO.toBase64String()); // can be any value, as we are stubbing the enclave
|
||||
receiveEnclaveStubEea(validSignedPrivateTransaction);
|
||||
|
||||
final String privateFrom = validSignedPrivateTransaction.getPrivateFrom().toBase64String();
|
||||
|
||||
@@ -218,7 +218,7 @@ public class RestoreState implements Runnable {
|
||||
throw new RuntimeException("Unexpected storage trie entry length " + len);
|
||||
}
|
||||
final Bytes32 storageTrieKey = Bytes32.wrap(trieInput.readBytes());
|
||||
final Bytes storageTrieValue = trieInput.readBytes();
|
||||
final Bytes storageTrieValue = Bytes.wrap(trieInput.readBytes());
|
||||
final RestoreVisitor<Bytes> storageTrieWriteVisitor =
|
||||
new RestoreVisitor<>(t -> t, storageTrieValue, storagePersistVisitor);
|
||||
storageRoot = storageRoot.accept(storageTrieWriteVisitor, bytesToPath(storageTrieKey));
|
||||
|
||||
@@ -94,10 +94,10 @@ public class BesuControllerBuilderTest {
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO_HASH.toHexString());
|
||||
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO.toHexString());
|
||||
when(genesisConfigFile.getDifficulty()).thenReturn(Bytes.of(0).toHexString());
|
||||
when(genesisConfigFile.getExtraData()).thenReturn(Bytes.EMPTY.toHexString());
|
||||
when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO_HASH.toHexString());
|
||||
when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO.toHexString());
|
||||
when(genesisConfigFile.getNonce()).thenReturn(Long.toHexString(1));
|
||||
when(genesisConfigFile.getConfigOptions(any())).thenReturn(genesisConfigOptions);
|
||||
when(genesisConfigOptions.getThanosBlockNumber()).thenReturn(OptionalLong.empty());
|
||||
|
||||
@@ -104,17 +104,17 @@ public class MergeBesuControllerBuilderTest {
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO_HASH.toHexString());
|
||||
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO.toHexString());
|
||||
when(genesisConfigFile.getDifficulty()).thenReturn(Bytes.of(0).toHexString());
|
||||
when(genesisConfigFile.getExtraData()).thenReturn(Bytes.EMPTY.toHexString());
|
||||
when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO_HASH.toHexString());
|
||||
when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO.toHexString());
|
||||
when(genesisConfigFile.getNonce()).thenReturn(Long.toHexString(1));
|
||||
when(genesisConfigFile.getConfigOptions(any())).thenReturn(genesisConfigOptions);
|
||||
when(genesisConfigOptions.getCheckpointOptions()).thenReturn(checkpointConfigOptions);
|
||||
when(genesisConfigOptions.getTerminalTotalDifficulty())
|
||||
.thenReturn((Optional.of(UInt256.valueOf(100L))));
|
||||
when(genesisConfigOptions.getThanosBlockNumber()).thenReturn(OptionalLong.empty());
|
||||
when(genesisConfigOptions.getTerminalBlockHash()).thenReturn(Optional.of(Hash.ZERO_HASH));
|
||||
when(genesisConfigOptions.getTerminalBlockHash()).thenReturn(Optional.of(Hash.ZERO));
|
||||
when(genesisConfigOptions.getTerminalBlockNumber()).thenReturn(OptionalLong.of(1L));
|
||||
when(storageProvider.createBlockchainStorage(any()))
|
||||
.thenReturn(
|
||||
|
||||
@@ -94,10 +94,10 @@ public class QbftBesuControllerBuilderTest {
|
||||
@Before
|
||||
public void setup() {
|
||||
// besu controller setup
|
||||
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO_HASH.toHexString());
|
||||
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO.toHexString());
|
||||
when(genesisConfigFile.getDifficulty()).thenReturn(Bytes.of(0).toHexString());
|
||||
when(genesisConfigFile.getExtraData()).thenReturn(Bytes.EMPTY.toHexString());
|
||||
when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO_HASH.toHexString());
|
||||
when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO.toHexString());
|
||||
when(genesisConfigFile.getNonce()).thenReturn(Long.toHexString(1));
|
||||
when(genesisConfigFile.getConfigOptions(any())).thenReturn(genesisConfigOptions);
|
||||
when(genesisConfigOptions.getCheckpointOptions()).thenReturn(checkpointConfigOptions);
|
||||
|
||||
@@ -90,7 +90,7 @@ public class BlockHeaderValidationRulesetFactory {
|
||||
}
|
||||
|
||||
var mixHashRule =
|
||||
new ConstantFieldValidationRule<>("MixHash", BlockHeader::getMixHash, Hash.ZERO_HASH);
|
||||
new ConstantFieldValidationRule<>("MixHash", BlockHeader::getMixHash, Hash.ZERO);
|
||||
var voteValidationRule = new VoteValidationRule();
|
||||
var cliqueTimestampRule = new TimestampMoreRecentThanParent(secondsBetweenBlocks);
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ public class CliqueBlockCreator extends AbstractBlockCreator {
|
||||
final BlockHeaderBuilder builder =
|
||||
BlockHeaderBuilder.create()
|
||||
.populateFrom(sealableBlockHeader)
|
||||
.mixHash(Hash.ZERO_HASH)
|
||||
.mixHash(Hash.ZERO)
|
||||
.blockHeaderFunctions(blockHeaderFunctions);
|
||||
|
||||
final Optional<ValidatorVote> vote = determineCliqueVote(sealableBlockHeader);
|
||||
|
||||
@@ -128,15 +128,15 @@ public class CliqueBlockHashingTest {
|
||||
LogsBloomFilter.fromHexString(
|
||||
"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"));
|
||||
builder.coinbase(Address.fromHexString("0x0000000000000000000000000000000000000000"));
|
||||
builder.mixHash(Hash.ZERO_HASH);
|
||||
builder.mixHash(Hash.ZERO);
|
||||
builder.nonce(0);
|
||||
builder.number(0);
|
||||
builder.parentHash(Hash.ZERO_HASH);
|
||||
builder.receiptsRoot(Hash.ZERO_HASH);
|
||||
builder.ommersHash(Hash.ZERO_HASH);
|
||||
builder.stateRoot(Hash.ZERO_HASH);
|
||||
builder.parentHash(Hash.ZERO);
|
||||
builder.receiptsRoot(Hash.ZERO);
|
||||
builder.ommersHash(Hash.ZERO);
|
||||
builder.stateRoot(Hash.ZERO);
|
||||
builder.timestamp(1492009146);
|
||||
builder.transactionsRoot(Hash.ZERO_HASH);
|
||||
builder.transactionsRoot(Hash.ZERO);
|
||||
|
||||
builder.blockHeaderFunctions(new CliqueBlockHeaderFunctions());
|
||||
|
||||
|
||||
@@ -239,7 +239,7 @@ public class NodeCanProduceNextBlockTest {
|
||||
final CliqueContext cliqueContext = new CliqueContext(validatorProvider, null, blockInterface);
|
||||
cliqueProtocolContext = new ProtocolContext(blockChain, null, cliqueContext);
|
||||
|
||||
headerBuilder.parentHash(Hash.ZERO_HASH).number(3);
|
||||
headerBuilder.parentHash(Hash.ZERO).number(3);
|
||||
final BlockHeader parentHeader =
|
||||
TestHelpers.createCliqueSignedBlockHeader(headerBuilder, otherNodeKeyPair, validatorList);
|
||||
|
||||
@@ -262,7 +262,7 @@ public class NodeCanProduceNextBlockTest {
|
||||
final CliqueContext cliqueContext = new CliqueContext(validatorProvider, null, blockInterface);
|
||||
cliqueProtocolContext = new ProtocolContext(blockChain, null, cliqueContext);
|
||||
|
||||
headerBuilder.parentHash(Hash.ZERO_HASH).number(3);
|
||||
headerBuilder.parentHash(Hash.ZERO).number(3);
|
||||
final BlockHeader parentHeader = headerBuilder.buildHeader();
|
||||
assertThat(
|
||||
CliqueHelpers.addressIsAllowedToProduceNextBlock(
|
||||
|
||||
@@ -85,8 +85,7 @@ public class CliqueMiningCoordinatorTest {
|
||||
public void setup() {
|
||||
|
||||
headerTestFixture.number(1);
|
||||
Block genesisBlock =
|
||||
createEmptyBlock(0, Hash.ZERO_HASH, proposerKeys); // not normally signed but ok
|
||||
Block genesisBlock = createEmptyBlock(0, Hash.ZERO, proposerKeys); // not normally signed but ok
|
||||
blockChain = createInMemoryBlockchain(genesisBlock);
|
||||
|
||||
when(validatorProvider.getValidatorsAfterBlock(any())).thenReturn(validators);
|
||||
|
||||
@@ -72,7 +72,7 @@ public class VoteTallyCacheTest extends VoteTallyCacheTestBase {
|
||||
final VoteTallyCache cache =
|
||||
new VoteTallyCache(blockChain, tallyUpdater, new EpochManager(30_000), blockInterface);
|
||||
|
||||
final Block orphanBlock = createEmptyBlock(4, Hash.ZERO_HASH);
|
||||
final Block orphanBlock = createEmptyBlock(4, Hash.ZERO);
|
||||
|
||||
assertThatExceptionOfType(UncheckedExecutionException.class)
|
||||
.isThrownBy(() -> cache.getVoteTallyAfterBlock(orphanBlock.getHeader()))
|
||||
|
||||
@@ -62,7 +62,7 @@ public class VoteTallyCacheTestBase {
|
||||
}
|
||||
headerBuilder.extraData(Bytes.wrap(new byte[32]));
|
||||
|
||||
genesisBlock = createEmptyBlock(0, Hash.ZERO_HASH);
|
||||
genesisBlock = createEmptyBlock(0, Hash.ZERO);
|
||||
|
||||
blockChain = createInMemoryBlockchain(genesisBlock);
|
||||
|
||||
|
||||
@@ -274,7 +274,7 @@ public class TestContextBuilder {
|
||||
headerTestFixture.ommersHash(Hash.EMPTY_LIST_HASH);
|
||||
headerTestFixture.nonce(0);
|
||||
headerTestFixture.timestamp(0);
|
||||
headerTestFixture.parentHash(Hash.ZERO_HASH);
|
||||
headerTestFixture.parentHash(Hash.ZERO);
|
||||
headerTestFixture.gasLimit(5000);
|
||||
headerTestFixture.coinbase(coinbase);
|
||||
|
||||
|
||||
@@ -119,13 +119,13 @@ public class SpuriousBehaviourTest {
|
||||
peers.getProposer().injectProposal(roundId, proposedBlock);
|
||||
peers.verifyMessagesReceived(expectedPrepare);
|
||||
|
||||
peers.prepareForNonProposing(roundId, Hash.ZERO_HASH);
|
||||
peers.prepareForNonProposing(roundId, Hash.ZERO);
|
||||
peers.verifyNoMessagesReceived();
|
||||
|
||||
peers.prepareForNonProposing(roundId, proposedBlock.getHash());
|
||||
peers.verifyMessagesReceived(expectedCommit);
|
||||
|
||||
peers.prepareForNonProposing(roundId, Hash.ZERO_HASH);
|
||||
peers.prepareForNonProposing(roundId, Hash.ZERO);
|
||||
assertThat(context.getCurrentChainHeight()).isEqualTo(0);
|
||||
|
||||
peers.commitForNonProposing(roundId, proposedBlock.getHash());
|
||||
@@ -145,7 +145,7 @@ public class SpuriousBehaviourTest {
|
||||
|
||||
// nonProposer-2 will generate an invalid seal
|
||||
final ValidatorPeer badSealPeer = peers.getNonProposing(2);
|
||||
final SECPSignature illegalSeal = badSealPeer.getnodeKey().sign(Hash.ZERO_HASH);
|
||||
final SECPSignature illegalSeal = badSealPeer.getnodeKey().sign(Hash.ZERO);
|
||||
|
||||
badSealPeer.injectCommit(roundId, proposedBlock.getHash(), illegalSeal);
|
||||
assertThat(context.getCurrentChainHeight()).isEqualTo(0);
|
||||
|
||||
@@ -41,7 +41,7 @@ public class IbftGetValidatorsByBlockHashTest {
|
||||
|
||||
private static final String ETH_METHOD = "ibft_getValidatorsByBlockHash";
|
||||
private static final String JSON_RPC_VERSION = "2.0";
|
||||
private static final String ZERO_HASH = String.valueOf(Hash.ZERO_HASH);
|
||||
private static final String ZERO_HASH = String.valueOf(Hash.ZERO);
|
||||
|
||||
@Mock private Blockchain blockchain;
|
||||
@Mock private BlockHeader blockHeader;
|
||||
@@ -62,7 +62,7 @@ public class IbftGetValidatorsByBlockHashTest {
|
||||
|
||||
@Test
|
||||
public void shouldReturnListOfValidatorsFromBlock() {
|
||||
when(blockchain.getBlockHeader(Hash.ZERO_HASH)).thenReturn(Optional.of(blockHeader));
|
||||
when(blockchain.getBlockHeader(Hash.ZERO)).thenReturn(Optional.of(blockHeader));
|
||||
final List<Address> addresses = Collections.singletonList(Address.ID);
|
||||
final List<String> expectedOutput = Collections.singletonList(Address.ID.toString());
|
||||
when(bftBlockInterface.validatorsInBlock(blockHeader)).thenReturn(addresses);
|
||||
|
||||
@@ -104,7 +104,7 @@ public class IbftControllerTest {
|
||||
when(bftFinalState.getValidators()).thenReturn(ImmutableList.of(validator));
|
||||
|
||||
when(chainHeadBlockHeader.getNumber()).thenReturn(3L);
|
||||
when(chainHeadBlockHeader.getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(chainHeadBlockHeader.getHash()).thenReturn(Hash.ZERO);
|
||||
|
||||
when(blockHeightManager.getParentBlockHeader()).thenReturn(chainHeadBlockHeader);
|
||||
when(blockHeightManager.getChainHeight()).thenReturn(4L); // one great than blockchain
|
||||
@@ -203,7 +203,7 @@ public class IbftControllerTest {
|
||||
ibftController.start();
|
||||
long chainHeadHeight = chainHeadBlockHeader.getNumber();
|
||||
when(nextBlock.getNumber()).thenReturn(chainHeadHeight);
|
||||
when(nextBlock.getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(nextBlock.getHash()).thenReturn(Hash.ZERO);
|
||||
final NewChainHead sameHeightBlock = new NewChainHead(nextBlock);
|
||||
ibftController.handleNewBlockEvent(sameHeightBlock);
|
||||
verify(blockHeightManagerFactory, times(1)).create(any()); // initial creation
|
||||
|
||||
@@ -72,8 +72,7 @@ public class SignedDataValidatorTest {
|
||||
|
||||
@Test
|
||||
public void receivingAPrepareMessageBeforeProposalFails() {
|
||||
final Prepare prepareMsg =
|
||||
proposerMessageFactory.createPrepare(roundIdentifier, Hash.ZERO_HASH);
|
||||
final Prepare prepareMsg = proposerMessageFactory.createPrepare(roundIdentifier, Hash.ZERO);
|
||||
|
||||
assertThat(validator.validatePrepare(prepareMsg.getSignedPayload())).isFalse();
|
||||
}
|
||||
@@ -82,7 +81,7 @@ public class SignedDataValidatorTest {
|
||||
public void receivingACommitMessageBeforeProposalFails() {
|
||||
final Commit commitMsg =
|
||||
proposerMessageFactory.createCommit(
|
||||
roundIdentifier, Hash.ZERO_HASH, proposerKey.sign(block.getHash()));
|
||||
roundIdentifier, Hash.ZERO, proposerKey.sign(block.getHash()));
|
||||
|
||||
assertThat(validator.validateCommit(commitMsg.getSignedPayload())).isFalse();
|
||||
}
|
||||
|
||||
@@ -645,7 +645,7 @@ public class MergeCoordinator implements MergeMiningCoordinator, BadChainListene
|
||||
header -> {
|
||||
// if block is PoW, return ZERO hash
|
||||
if (header.getDifficulty().greaterThan(Difficulty.ZERO)) {
|
||||
return Hash.ZERO_HASH;
|
||||
return Hash.ZERO;
|
||||
} else {
|
||||
return header.getHash();
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ package org.hyperledger.besu.consensus.merge.blockcreation;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.fail;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
import static org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider.createInMemoryBlockchain;
|
||||
import static org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider.createInMemoryWorldStateArchive;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
@@ -191,7 +190,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
coordinator.preparePayload(
|
||||
genesisState.getBlock().getHeader(),
|
||||
System.currentTimeMillis() / 1000,
|
||||
ZERO_32,
|
||||
Bytes32.ZERO,
|
||||
suggestedFeeRecipient);
|
||||
|
||||
ArgumentCaptor<Block> block = ArgumentCaptor.forClass(Block.class);
|
||||
@@ -225,7 +224,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
coordinator.preparePayload(
|
||||
genesisState.getBlock().getHeader(),
|
||||
System.currentTimeMillis() / 1000,
|
||||
ZERO_32,
|
||||
Bytes32.ZERO,
|
||||
suggestedFeeRecipient);
|
||||
|
||||
blockCreationTask.get();
|
||||
@@ -267,7 +266,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
coordinator.preparePayload(
|
||||
genesisState.getBlock().getHeader(),
|
||||
System.currentTimeMillis() / 1000,
|
||||
ZERO_32,
|
||||
Bytes32.ZERO,
|
||||
suggestedFeeRecipient);
|
||||
|
||||
blockCreationTask.get();
|
||||
@@ -297,7 +296,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
coordinator.preparePayload(
|
||||
genesisState.getBlock().getHeader(),
|
||||
System.currentTimeMillis() / 1000,
|
||||
ZERO_32,
|
||||
Bytes32.ZERO,
|
||||
suggestedFeeRecipient);
|
||||
|
||||
try {
|
||||
@@ -338,7 +337,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
coordinator.preparePayload(
|
||||
genesisState.getBlock().getHeader(),
|
||||
System.currentTimeMillis() / 1000,
|
||||
ZERO_32,
|
||||
Bytes32.ZERO,
|
||||
suggestedFeeRecipient);
|
||||
|
||||
waitForBlockCreationInProgress.await();
|
||||
@@ -379,13 +378,13 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
|
||||
var payloadId1 =
|
||||
coordinator.preparePayload(
|
||||
genesisState.getBlock().getHeader(), timestamp, ZERO_32, suggestedFeeRecipient);
|
||||
genesisState.getBlock().getHeader(), timestamp, Bytes32.ZERO, suggestedFeeRecipient);
|
||||
|
||||
final CompletableFuture<Void> task1 = blockCreationTask;
|
||||
|
||||
var payloadId2 =
|
||||
coordinator.preparePayload(
|
||||
genesisState.getBlock().getHeader(), timestamp, ZERO_32, suggestedFeeRecipient);
|
||||
genesisState.getBlock().getHeader(), timestamp, Bytes32.ZERO, suggestedFeeRecipient);
|
||||
|
||||
assertThat(payloadId1).isEqualTo(payloadId2);
|
||||
|
||||
@@ -409,7 +408,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
public void childTimestampExceedsParentsFails() {
|
||||
BlockHeader terminalHeader = terminalPowBlock();
|
||||
sendNewPayloadAndForkchoiceUpdate(
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO_HASH);
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO);
|
||||
|
||||
BlockHeader parentHeader = nextBlockHeader(terminalHeader);
|
||||
Block parent = new Block(parentHeader, BlockBody.empty());
|
||||
@@ -441,7 +440,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
public void latestValidAncestorDescendsFromTerminal() {
|
||||
BlockHeader terminalHeader = terminalPowBlock();
|
||||
sendNewPayloadAndForkchoiceUpdate(
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO_HASH);
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO);
|
||||
|
||||
BlockHeader parentHeader = nextBlockHeader(terminalHeader);
|
||||
Block parent = new Block(parentHeader, BlockBody.empty());
|
||||
@@ -449,7 +448,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
// if latest valid ancestor is PoW, then latest valid hash should be Hash.ZERO
|
||||
var lvh = this.coordinator.getLatestValidAncestor(parentHeader);
|
||||
assertThat(lvh).isPresent();
|
||||
assertThat(lvh.get()).isEqualTo(Hash.ZERO_HASH);
|
||||
assertThat(lvh.get()).isEqualTo(Hash.ZERO);
|
||||
|
||||
sendNewPayloadAndForkchoiceUpdate(parent, Optional.empty(), terminalHeader.getHash());
|
||||
BlockHeader childHeader = nextBlockHeader(parentHeader);
|
||||
@@ -466,7 +465,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
public void latestValidAncestorDescendsFromFinalizedBlock() {
|
||||
BlockHeader terminalHeader = terminalPowBlock();
|
||||
sendNewPayloadAndForkchoiceUpdate(
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO_HASH);
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO);
|
||||
|
||||
BlockHeader grandParentHeader = nextBlockHeader(terminalHeader);
|
||||
Block grandParent = new Block(grandParentHeader, BlockBody.empty());
|
||||
@@ -474,7 +473,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
// if latest valid ancestor is PoW, then latest valid hash should be Hash.ZERO
|
||||
var lvh = this.coordinator.getLatestValidAncestor(grandParentHeader);
|
||||
assertThat(lvh).isPresent();
|
||||
assertThat(lvh.get()).isEqualTo(Hash.ZERO_HASH);
|
||||
assertThat(lvh.get()).isEqualTo(Hash.ZERO);
|
||||
|
||||
sendNewPayloadAndForkchoiceUpdate(grandParent, Optional.empty(), terminalHeader.getHash());
|
||||
BlockHeader parentHeader = nextBlockHeader(grandParentHeader);
|
||||
@@ -500,7 +499,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
public void updateForkChoiceShouldPersistFirstFinalizedBlockHash() {
|
||||
BlockHeader terminalHeader = terminalPowBlock();
|
||||
sendNewPayloadAndForkchoiceUpdate(
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO_HASH);
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO);
|
||||
|
||||
BlockHeader firstFinalizedHeader = nextBlockHeader(terminalHeader);
|
||||
Block firstFinalizedBlock = new Block(firstFinalizedHeader, BlockBody.empty());
|
||||
@@ -571,7 +570,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
public void updateForkChoiceShouldPersistLastFinalizedBlockHash() {
|
||||
BlockHeader terminalHeader = terminalPowBlock();
|
||||
sendNewPayloadAndForkchoiceUpdate(
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO_HASH);
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO);
|
||||
|
||||
BlockHeader prevFinalizedHeader = nextBlockHeader(terminalHeader);
|
||||
Block prevFinalizedBlock = new Block(prevFinalizedHeader, BlockBody.empty());
|
||||
@@ -764,7 +763,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
public void invalidPayloadShouldReturnErrorAndUpdateForkchoiceState() {
|
||||
BlockHeader terminalHeader = terminalPowBlock();
|
||||
sendNewPayloadAndForkchoiceUpdate(
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO_HASH);
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO);
|
||||
|
||||
BlockHeader prevFinalizedHeader = nextBlockHeader(terminalHeader);
|
||||
Block prevFinalizedBlock = new Block(prevFinalizedHeader, BlockBody.empty());
|
||||
@@ -788,7 +787,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
lastFinalizedBlock.getHash(),
|
||||
Optional.of(
|
||||
new PayloadAttributes(
|
||||
headBlockHeader.getTimestamp() - 1, Hash.ZERO_HASH, Address.ZERO)));
|
||||
headBlockHeader.getTimestamp() - 1, Hash.ZERO, Address.ZERO)));
|
||||
|
||||
assertThat(res.isValid()).isFalse();
|
||||
assertThat(res.getStatus()).isEqualTo(ForkchoiceResult.Status.INVALID_PAYLOAD_ATTRIBUTES);
|
||||
@@ -803,7 +802,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
public void forkchoiceUpdateShouldIgnoreAncestorOfChainHead() {
|
||||
BlockHeader terminalHeader = terminalPowBlock();
|
||||
sendNewPayloadAndForkchoiceUpdate(
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO_HASH);
|
||||
new Block(terminalHeader, BlockBody.empty()), Optional.empty(), Hash.ZERO);
|
||||
|
||||
BlockHeader parentHeader = nextBlockHeader(terminalHeader);
|
||||
Block parent = new Block(parentHeader, BlockBody.empty());
|
||||
@@ -816,11 +815,10 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
ForkchoiceResult res =
|
||||
coordinator.updateForkChoice(
|
||||
parentHeader,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
terminalHeader.getHash(),
|
||||
Optional.of(
|
||||
new PayloadAttributes(
|
||||
parentHeader.getTimestamp() + 1, Hash.ZERO_HASH, Address.ZERO)));
|
||||
new PayloadAttributes(parentHeader.getTimestamp() + 1, Hash.ZERO, Address.ZERO)));
|
||||
|
||||
assertThat(res.getStatus()).isEqualTo(ForkchoiceResult.Status.IGNORE_UPDATE_TO_OLD_HEAD);
|
||||
assertThat(res.getNewHead().isEmpty()).isTrue();
|
||||
@@ -839,7 +837,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
coordinator
|
||||
.updateForkChoice(
|
||||
block.getHeader(),
|
||||
finalizedHeader.map(BlockHeader::getHash).orElse(Hash.ZERO_HASH),
|
||||
finalizedHeader.map(BlockHeader::getHash).orElse(Hash.ZERO),
|
||||
safeHash,
|
||||
Optional.empty())
|
||||
.isValid())
|
||||
@@ -896,7 +894,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
.number(0L)
|
||||
.difficulty(hasTerminalPoW ? mockTTD : Difficulty.ZERO)
|
||||
.buildHeader());
|
||||
when(terminal.getParentHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(terminal.getParentHash()).thenReturn(Hash.ZERO);
|
||||
|
||||
// return decreasing numbered blocks:
|
||||
final var invocations = new AtomicLong(chainDepth);
|
||||
@@ -913,7 +911,7 @@ public class MergeCoordinatorTest implements MergeGenesisConfigHelper {
|
||||
|
||||
// mock total difficulty for isTerminalProofOfWorkBlock invocation:
|
||||
when(mockBlockchain.getTotalDifficultyByHash(any())).thenReturn(Optional.of(Difficulty.ZERO));
|
||||
when(mockBlockchain.getBlockHeader(Hash.ZERO_HASH)).thenReturn(Optional.empty());
|
||||
when(mockBlockchain.getBlockHeader(Hash.ZERO)).thenReturn(Optional.empty());
|
||||
|
||||
var mockContext = mock(MergeContext.class);
|
||||
when(mockContext.getTerminalTotalDifficulty()).thenReturn(mockTTD);
|
||||
|
||||
@@ -43,7 +43,7 @@ public class PayloadIdentifierTest {
|
||||
public void conversionCoverage() {
|
||||
var idTest =
|
||||
PayloadIdentifier.forPayloadParams(
|
||||
Hash.ZERO_HASH, 1337L, Bytes32.random(), Address.fromHexString("0x42"));
|
||||
Hash.ZERO, 1337L, Bytes32.random(), Address.fromHexString("0x42"));
|
||||
assertThat(new PayloadIdentifier(idTest.getAsBigInteger().longValue())).isEqualTo(idTest);
|
||||
assertThat(new PayloadIdentifier(idTest.getAsBigInteger().longValue())).isEqualTo(idTest);
|
||||
}
|
||||
|
||||
@@ -355,7 +355,7 @@ public class TestContextBuilder {
|
||||
headerTestFixture.ommersHash(Hash.EMPTY_LIST_HASH);
|
||||
headerTestFixture.nonce(0);
|
||||
headerTestFixture.timestamp(0);
|
||||
headerTestFixture.parentHash(Hash.ZERO_HASH);
|
||||
headerTestFixture.parentHash(Hash.ZERO);
|
||||
headerTestFixture.gasLimit(5000);
|
||||
headerTestFixture.coinbase(coinbase);
|
||||
|
||||
|
||||
@@ -120,13 +120,13 @@ public class SpuriousBehaviourTest {
|
||||
peers.getProposer().injectProposal(roundId, proposedBlock);
|
||||
peers.verifyMessagesReceived(expectedPrepare);
|
||||
|
||||
peers.prepareForNonProposing(roundId, Hash.ZERO_HASH);
|
||||
peers.prepareForNonProposing(roundId, Hash.ZERO);
|
||||
peers.verifyNoMessagesReceived();
|
||||
|
||||
peers.prepareForNonProposing(roundId, proposedBlock.getHash());
|
||||
peers.verifyMessagesReceived(expectedCommit);
|
||||
|
||||
peers.prepareForNonProposing(roundId, Hash.ZERO_HASH);
|
||||
peers.prepareForNonProposing(roundId, Hash.ZERO);
|
||||
assertThat(context.getCurrentChainHeight()).isEqualTo(0);
|
||||
|
||||
peers.commitForNonProposing(roundId, proposedBlock);
|
||||
@@ -146,7 +146,7 @@ public class SpuriousBehaviourTest {
|
||||
|
||||
// nonProposer-2 will generate an invalid seal
|
||||
final ValidatorPeer badSealPeer = peers.getNonProposing(2);
|
||||
final SECPSignature illegalSeal = badSealPeer.getnodeKey().sign(Hash.ZERO_HASH);
|
||||
final SECPSignature illegalSeal = badSealPeer.getnodeKey().sign(Hash.ZERO);
|
||||
|
||||
badSealPeer.injectCommit(roundId, proposedBlock.getHash(), illegalSeal);
|
||||
assertThat(context.getCurrentChainHeight()).isEqualTo(0);
|
||||
|
||||
@@ -42,7 +42,7 @@ public class QbftGetValidatorsByBlockHashTest {
|
||||
|
||||
private static final String ETH_METHOD = "qbft_getValidatorsByBlockHash";
|
||||
private static final String JSON_RPC_VERSION = "2.0";
|
||||
private static final String ZERO_HASH = String.valueOf(Hash.ZERO_HASH);
|
||||
private static final String ZERO_HASH = String.valueOf(Hash.ZERO);
|
||||
|
||||
@Mock private Blockchain blockchain;
|
||||
@Mock private BlockHeader blockHeader;
|
||||
@@ -63,7 +63,7 @@ public class QbftGetValidatorsByBlockHashTest {
|
||||
|
||||
@Test
|
||||
public void shouldReturnListOfValidatorsFromBlock() {
|
||||
when(blockchain.getBlockHeader(Hash.ZERO_HASH)).thenReturn(Optional.of(blockHeader));
|
||||
when(blockchain.getBlockHeader(Hash.ZERO)).thenReturn(Optional.of(blockHeader));
|
||||
final List<Address> addresses = Collections.singletonList(Address.ID);
|
||||
final List<String> expectedOutput = Collections.singletonList(Address.ID.toString());
|
||||
when(validatorProvider.getValidatorsForBlock(any())).thenReturn(addresses);
|
||||
|
||||
@@ -41,7 +41,7 @@ public class CommitTest {
|
||||
final CommitPayload commitPayload =
|
||||
new CommitPayload(
|
||||
new ConsensusRoundIdentifier(1, 1),
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
SignatureAlgorithmFactory.getInstance()
|
||||
.createSignature(BigInteger.ONE, BigInteger.ONE, (byte) 0));
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ public class PrepareTest {
|
||||
final Address addr = Util.publicKeyToAddress(nodeKey.getPublicKey());
|
||||
|
||||
final PreparePayload preparePayload =
|
||||
new PreparePayload(new ConsensusRoundIdentifier(1, 1), Hash.ZERO_HASH);
|
||||
new PreparePayload(new ConsensusRoundIdentifier(1, 1), Hash.ZERO);
|
||||
|
||||
final SignedData<PreparePayload> signedPreparePayload =
|
||||
SignedData.create(preparePayload, nodeKey.sign(preparePayload.hashForSignature()));
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
package org.hyperledger.besu.consensus.qbft.messagewrappers;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.consensus.common.bft.BftExtraData;
|
||||
import org.hyperledger.besu.consensus.common.bft.BftExtraDataCodec;
|
||||
@@ -39,6 +38,7 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.junit.Test;
|
||||
|
||||
public class ProposalTest {
|
||||
@@ -46,7 +46,7 @@ public class ProposalTest {
|
||||
|
||||
private static final BftExtraData extraData =
|
||||
new BftExtraData(
|
||||
ZERO_32, Collections.emptyList(), Optional.empty(), 1, Collections.emptyList());
|
||||
Bytes32.ZERO, Collections.emptyList(), Optional.empty(), 1, Collections.emptyList());
|
||||
|
||||
private static final Block BLOCK =
|
||||
new Block(
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
package org.hyperledger.besu.consensus.qbft.messagewrappers;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.consensus.common.bft.BftExtraData;
|
||||
import org.hyperledger.besu.consensus.common.bft.BftExtraDataCodec;
|
||||
@@ -38,13 +37,14 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.junit.Test;
|
||||
|
||||
public class RoundChangeTest {
|
||||
private static final BftExtraDataCodec bftExtraDataCodec = new QbftExtraDataCodec();
|
||||
private static final BftExtraData extraData =
|
||||
new BftExtraData(
|
||||
ZERO_32, Collections.emptyList(), Optional.empty(), 1, Collections.emptyList());
|
||||
Bytes32.ZERO, Collections.emptyList(), Optional.empty(), 1, Collections.emptyList());
|
||||
|
||||
private static final Block BLOCK =
|
||||
new Block(
|
||||
|
||||
@@ -108,7 +108,7 @@ public class QbftControllerTest {
|
||||
when(bftFinalState.getValidators()).thenReturn(ImmutableList.of(validator));
|
||||
|
||||
when(chainHeadBlockHeader.getNumber()).thenReturn(3L);
|
||||
when(chainHeadBlockHeader.getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(chainHeadBlockHeader.getHash()).thenReturn(Hash.ZERO);
|
||||
|
||||
when(blockHeightManager.getParentBlockHeader()).thenReturn(chainHeadBlockHeader);
|
||||
when(blockHeightManager.getChainHeight()).thenReturn(4L); // one great than blockchain
|
||||
@@ -208,7 +208,7 @@ public class QbftControllerTest {
|
||||
qbftController.start();
|
||||
long chainHeadHeight = chainHeadBlockHeader.getNumber();
|
||||
when(nextBlock.getNumber()).thenReturn(chainHeadHeight);
|
||||
when(nextBlock.getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(nextBlock.getHash()).thenReturn(Hash.ZERO);
|
||||
final NewChainHead sameHeightBlock = new NewChainHead(nextBlock);
|
||||
qbftController.handleNewBlockEvent(sameHeightBlock);
|
||||
verify(blockHeightManagerFactory, times(1)).create(any()); // initial creation
|
||||
|
||||
@@ -74,7 +74,7 @@ public class ForkingValidatorProviderTest {
|
||||
@Before
|
||||
public void setup() {
|
||||
headerBuilder.extraData(Bytes.wrap(new byte[32]));
|
||||
Block genesisBlock = createEmptyBlock(0, Hash.ZERO_HASH);
|
||||
Block genesisBlock = createEmptyBlock(0, Hash.ZERO);
|
||||
Block block_1 = createEmptyBlock(1, genesisBlock.getHeader().getHash());
|
||||
Block block_2 = createEmptyBlock(2, block_1.getHeader().getHash());
|
||||
genesisHeader = genesisBlock.getHeader();
|
||||
|
||||
@@ -62,7 +62,7 @@ public class TransactionValidatorProviderTest {
|
||||
@Before
|
||||
public void setup() {
|
||||
forksSchedule = new ForksSchedule<>(List.of(createContractForkSpec(0L, CONTRACT_ADDRESS)));
|
||||
genesisBlock = createEmptyBlock(0, Hash.ZERO_HASH);
|
||||
genesisBlock = createEmptyBlock(0, Hash.ZERO);
|
||||
blockChain = createInMemoryBlockchain(genesisBlock);
|
||||
headerBuilder.extraData(Bytes.wrap(new byte[32]));
|
||||
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.datatypes;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
|
||||
public class Constants {
|
||||
|
||||
/** Constant representing uninitialized or emptied storage values */
|
||||
public static final Bytes32 ZERO_32 = Bytes32.wrap(new byte[32]);
|
||||
|
||||
private Constants() {
|
||||
// non-instantiable class
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,6 @@
|
||||
package org.hyperledger.besu.datatypes;
|
||||
|
||||
import static org.hyperledger.besu.crypto.Hash.keccak256;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.ethereum.rlp.RLP;
|
||||
|
||||
@@ -27,7 +26,7 @@ import org.apache.tuweni.bytes.DelegatingBytes32;
|
||||
/** A 32-bytes hash value as used in Ethereum blocks, that is the result of the KEC algorithm. */
|
||||
public class Hash extends DelegatingBytes32 implements org.hyperledger.besu.plugin.data.Hash {
|
||||
|
||||
public static final Hash ZERO_HASH = new Hash(ZERO_32);
|
||||
public static final Hash ZERO = new Hash(Bytes32.ZERO);
|
||||
|
||||
/**
|
||||
* Hash of an RLP encoded trie hash with no content, or
|
||||
|
||||
@@ -47,7 +47,7 @@ public class EthGetBlockByHashIntegrationTest {
|
||||
private final JsonRpcResponseUtils responseUtils = new JsonRpcResponseUtils();
|
||||
private final String ETH_METHOD = "eth_getBlockByHash";
|
||||
private final String JSON_RPC_VERSION = "2.0";
|
||||
private final String ZERO_HASH = String.valueOf(Hash.ZERO_HASH);
|
||||
private final String ZERO_HASH = String.valueOf(Hash.ZERO);
|
||||
|
||||
@BeforeAll
|
||||
public static void setUpOnce() throws Exception {
|
||||
|
||||
@@ -151,7 +151,7 @@ public class PrivGetPrivateTransactionIntegrationTest {
|
||||
final PrivGetPrivateTransaction privGetPrivateTransaction =
|
||||
new PrivGetPrivateTransaction(privacyController, privacyIdProvider);
|
||||
|
||||
final Hash blockHash = Hash.ZERO_HASH;
|
||||
final Hash blockHash = Hash.ZERO;
|
||||
final Transaction pmt = spy(privateMarkerTransaction());
|
||||
when(blockchain.getTransactionByHash(eq(pmt.getHash()))).thenReturn(Optional.of(pmt));
|
||||
when(blockchain.getTransactionLocation(eq(pmt.getHash())))
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.api.graphql.internal.pojoadapter;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
|
||||
@@ -55,6 +53,6 @@ public class EmptyAccountAdapter extends AccountAdapter {
|
||||
|
||||
@Override
|
||||
public Optional<Bytes32> getStorage(final DataFetchingEnvironment environment) {
|
||||
return Optional.of(ZERO_32);
|
||||
return Optional.of(Bytes32.ZERO);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext;
|
||||
@@ -88,7 +86,7 @@ public class DebugAccountRange implements JsonRpcMethod {
|
||||
.get()
|
||||
.streamAccounts(Bytes32.fromHexStringLenient(addressHash), maxResults + 1)
|
||||
.collect(Collectors.toList());
|
||||
Bytes32 nextKey = ZERO_32;
|
||||
Bytes32 nextKey = Bytes32.ZERO;
|
||||
if (accounts.size() == maxResults + 1) {
|
||||
nextKey = accounts.get(maxResults).getAddressHash();
|
||||
accounts.remove(maxResults);
|
||||
|
||||
@@ -33,7 +33,7 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class EthGetProof extends AbstractBlockParameterOrBlockHashMethod {
|
||||
public EthGetProof(final BlockchainQueries blockchain) {
|
||||
@@ -56,7 +56,7 @@ public class EthGetProof extends AbstractBlockParameterOrBlockHashMethod {
|
||||
final JsonRpcRequestContext requestContext, final Hash blockHash) {
|
||||
|
||||
final Address address = requestContext.getRequiredParameter(0, Address.class);
|
||||
final List<Bytes32> storageKeys = getStorageKeys(requestContext);
|
||||
final List<UInt256> storageKeys = getStorageKeys(requestContext);
|
||||
|
||||
final Optional<WorldState> worldState = getBlockchainQueries().getWorldState(blockHash);
|
||||
|
||||
@@ -86,9 +86,9 @@ public class EthGetProof extends AbstractBlockParameterOrBlockHashMethod {
|
||||
return (JsonRpcResponse) handleParamTypes(requestContext);
|
||||
}
|
||||
|
||||
private List<Bytes32> getStorageKeys(final JsonRpcRequestContext request) {
|
||||
private List<UInt256> getStorageKeys(final JsonRpcRequestContext request) {
|
||||
return Arrays.stream(request.getRequiredParameter(1, String[].class))
|
||||
.map(Bytes32::fromHexString)
|
||||
.map(UInt256::fromHexString)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.BlockParame
|
||||
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.UInt256Parameter;
|
||||
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class EthGetStorageAt extends AbstractBlockParameterOrBlockHashMethod {
|
||||
public EthGetStorageAt(final BlockchainQueries blockchainQueries) {
|
||||
@@ -43,11 +43,11 @@ public class EthGetStorageAt extends AbstractBlockParameterOrBlockHashMethod {
|
||||
@Override
|
||||
protected String resultByBlockHash(final JsonRpcRequestContext request, final Hash blockHash) {
|
||||
final Address address = request.getRequiredParameter(0, Address.class);
|
||||
final Bytes32 position = request.getRequiredParameter(1, UInt256Parameter.class).getValue();
|
||||
final UInt256 position = request.getRequiredParameter(1, UInt256Parameter.class).getValue();
|
||||
return blockchainQueries
|
||||
.get()
|
||||
.storageAt(address, position, blockHash)
|
||||
.map(Bytes32::toHexString)
|
||||
.map(UInt256::toHexString)
|
||||
.orElse(null);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,7 +90,7 @@ public class EngineExchangeTransitionConfiguration extends ExecutionEngineJsonRp
|
||||
mergeContextOptional
|
||||
.map(c -> c.getTerminalTotalDifficulty())
|
||||
.orElse(FALLBACK_TTD_DEFAULT),
|
||||
maybeTerminalPoWBlockHeader.map(BlockHeader::getHash).orElse(Hash.ZERO_HASH),
|
||||
maybeTerminalPoWBlockHeader.map(BlockHeader::getHash).orElse(Hash.ZERO),
|
||||
maybeTerminalPoWBlockHeader.map(BlockHeader::getNumber).orElse(0L));
|
||||
|
||||
if (!localTransitionConfiguration
|
||||
|
||||
@@ -94,7 +94,7 @@ public class EngineForkchoiceUpdated extends ExecutionEngineJsonRpcMethod {
|
||||
INVALID,
|
||||
mergeCoordinator
|
||||
.getLatestValidHashOfBadBlock(forkChoice.getHeadBlockHash())
|
||||
.orElse(Hash.ZERO_HASH),
|
||||
.orElse(Hash.ZERO),
|
||||
null,
|
||||
Optional.of(forkChoice.getHeadBlockHash() + " is an invalid block")));
|
||||
}
|
||||
@@ -122,7 +122,7 @@ public class EngineForkchoiceUpdated extends ExecutionEngineJsonRpcMethod {
|
||||
requestId,
|
||||
new EngineUpdateForkchoiceResult(
|
||||
INVALID,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
null,
|
||||
Optional.of(newHead.get() + " did not descend from terminal block")));
|
||||
}
|
||||
|
||||
@@ -161,7 +161,7 @@ public class EngineNewPayload extends ExecutionEngineJsonRpcMethod {
|
||||
blockParam,
|
||||
mergeCoordinator
|
||||
.getLatestValidHashOfBadBlock(blockParam.getBlockHash())
|
||||
.orElse(Hash.ZERO_HASH),
|
||||
.orElse(Hash.ZERO),
|
||||
INVALID,
|
||||
"Block already present in bad block manager.");
|
||||
}
|
||||
@@ -204,7 +204,7 @@ public class EngineNewPayload extends ExecutionEngineJsonRpcMethod {
|
||||
return respondWithInvalid(
|
||||
reqId,
|
||||
blockParam,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
INVALID,
|
||||
newBlockHeader.getHash() + " did not descend from terminal block");
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import com.fasterxml.jackson.annotation.JsonPropertyOrder;
|
||||
import com.google.common.base.MoreObjects;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class DebugStorageRangeAtResult implements JsonRpcResult {
|
||||
|
||||
@@ -87,17 +88,17 @@ public class DebugStorageRangeAtResult implements JsonRpcResult {
|
||||
|
||||
public StorageEntry(final AccountStorageEntry entry, final boolean shortValues) {
|
||||
if (shortValues) {
|
||||
this.value = entry.getValue().trimLeadingZeros().toHexString();
|
||||
this.value = entry.getValue().toMinimalBytes().toHexString();
|
||||
this.key =
|
||||
entry
|
||||
.getKey()
|
||||
.map(Bytes32::trimLeadingZeros)
|
||||
.map(UInt256::toMinimalBytes)
|
||||
.map(Bytes::toHexString)
|
||||
.map(s -> "0x".equals(s) ? "0x00" : s)
|
||||
.orElse(null);
|
||||
} else {
|
||||
this.value = entry.getValue().toHexString();
|
||||
this.key = entry.getKey().map(Bytes32::toHexString).orElse(null);
|
||||
this.key = entry.getKey().map(UInt256::toHexString).orElse(null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import java.util.TreeMap;
|
||||
import com.fasterxml.jackson.annotation.JsonGetter;
|
||||
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
@JsonPropertyOrder({"pc", "op", "gas", "gasCost", "depth", "stack", "memory", "storage"})
|
||||
public class StructLog {
|
||||
@@ -59,7 +59,7 @@ public class StructLog {
|
||||
reason = traceFrame.getRevertReason().map(Bytes::toShortHexString).orElse(null);
|
||||
}
|
||||
|
||||
private static Map<String, String> formatStorage(final Map<Bytes32, Bytes32> storage) {
|
||||
private static Map<String, String> formatStorage(final Map<UInt256, UInt256> storage) {
|
||||
final Map<String, String> formattedStorage = new TreeMap<>();
|
||||
storage.forEach(
|
||||
(key, value) ->
|
||||
|
||||
@@ -21,17 +21,17 @@ import java.util.stream.Collectors;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonGetter;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class StorageEntryProof {
|
||||
|
||||
private final Bytes32 key;
|
||||
private final UInt256 key;
|
||||
|
||||
private final Bytes32 value;
|
||||
private final UInt256 value;
|
||||
|
||||
private final List<Bytes> storageProof;
|
||||
|
||||
public StorageEntryProof(final Bytes32 key, final Bytes32 value, final List<Bytes> storageProof) {
|
||||
public StorageEntryProof(final UInt256 key, final UInt256 value, final List<Bytes> storageProof) {
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
this.storageProof = storageProof;
|
||||
|
||||
@@ -16,8 +16,6 @@
|
||||
|
||||
package org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.tracing.diff;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.processor.TransactionTrace;
|
||||
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.tracing.Trace;
|
||||
@@ -35,7 +33,7 @@ import java.util.TreeMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class StateDiffGenerator {
|
||||
|
||||
@@ -61,18 +59,18 @@ public class StateDiffGenerator {
|
||||
|
||||
// calculate storage diff
|
||||
final Map<String, DiffNode> storageDiff = new TreeMap<>();
|
||||
for (final Map.Entry<Bytes32, Bytes32> entry :
|
||||
for (final Map.Entry<UInt256, UInt256> entry :
|
||||
((UpdateTrackingAccount<?>) updatedAccount)
|
||||
.getUpdatedStorage()
|
||||
.entrySet()) { // FIXME cast
|
||||
final Bytes32 newValue = entry.getValue();
|
||||
final UInt256 newValue = entry.getValue();
|
||||
if (rootAccount == null) {
|
||||
if (!ZERO_32.equals(newValue)) {
|
||||
if (!UInt256.ZERO.equals(newValue)) {
|
||||
storageDiff.put(
|
||||
entry.getKey().toHexString(), new DiffNode(null, newValue.toHexString()));
|
||||
}
|
||||
} else {
|
||||
final Bytes32 originalValue = rootAccount.getStorageValue(entry.getKey());
|
||||
final UInt256 originalValue = rootAccount.getStorageValue(entry.getKey());
|
||||
if (!originalValue.equals(newValue)) {
|
||||
storageDiff.put(
|
||||
entry.getKey().toHexString(),
|
||||
|
||||
@@ -256,7 +256,7 @@ public class VmTraceGenerator {
|
||||
.ifPresent(
|
||||
stack ->
|
||||
IntStream.range(0, currentTraceFrame.getStackItemsProduced())
|
||||
.mapToObj(i -> stack[stack.length - i - 1].trimLeadingZeros())
|
||||
.mapToObj(i -> Bytes.wrap(stack[stack.length - i - 1]).trimLeadingZeros())
|
||||
.map(value -> Quantity.create(UInt256.fromHexString(value.toHexString())))
|
||||
.forEach(report::addPush));
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
package org.hyperledger.besu.ethereum.api.query;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
import static org.hyperledger.besu.ethereum.api.query.cache.TransactionLogBloomCacher.BLOCKS_PER_BLOOM_CACHE;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
@@ -57,7 +56,7 @@ import java.util.stream.IntStream;
|
||||
import java.util.stream.LongStream;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -168,8 +167,8 @@ public class BlockchainQueries {
|
||||
* @param blockNumber The blockNumber that is being queried.
|
||||
* @return The value at the storage index being queried.
|
||||
*/
|
||||
public Optional<Bytes32> storageAt(
|
||||
final Address address, final Bytes32 storageIndex, final long blockNumber) {
|
||||
public Optional<UInt256> storageAt(
|
||||
final Address address, final UInt256 storageIndex, final long blockNumber) {
|
||||
final Hash blockHash =
|
||||
getBlockHeaderByNumber(blockNumber).map(BlockHeader::getHash).orElse(Hash.EMPTY);
|
||||
|
||||
@@ -184,10 +183,10 @@ public class BlockchainQueries {
|
||||
* @param blockHash The blockHash that is being queried.
|
||||
* @return The value at the storage index being queried.
|
||||
*/
|
||||
public Optional<Bytes32> storageAt(
|
||||
final Address address, final Bytes32 storageIndex, final Hash blockHash) {
|
||||
public Optional<UInt256> storageAt(
|
||||
final Address address, final UInt256 storageIndex, final Hash blockHash) {
|
||||
return fromAccount(
|
||||
address, blockHash, account -> account.getStorageValue(storageIndex), ZERO_32);
|
||||
address, blockHash, account -> account.getStorageValue(storageIndex), UInt256.ZERO);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.hyperledger.besu.ethereum.api.query;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.google.common.base.Preconditions.checkState;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.config.JsonUtil;
|
||||
import org.hyperledger.besu.ethereum.chain.Blockchain;
|
||||
@@ -108,7 +107,7 @@ public class StateBackupService {
|
||||
this.backupDir = backupDir.orElse(this.backupDir);
|
||||
backupStatus.targetBlock = block;
|
||||
backupStatus.compressed = compress;
|
||||
backupStatus.currentAccount = ZERO_32;
|
||||
backupStatus.currentAccount = Bytes32.ZERO;
|
||||
scheduler.scheduleComputationTask(
|
||||
() -> {
|
||||
try {
|
||||
@@ -186,7 +185,7 @@ public class StateBackupService {
|
||||
"Backup Block must be within blockchain");
|
||||
backupStatus.targetBlock = block;
|
||||
backupStatus.compressed = compress;
|
||||
backupStatus.currentAccount = ZERO_32;
|
||||
backupStatus.currentAccount = Bytes32.ZERO;
|
||||
|
||||
backupChainData();
|
||||
backupLeaves();
|
||||
|
||||
@@ -515,8 +515,8 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
|
||||
final String mockBalance = "0x35";
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.accountBalance(eq(address), eq(Hash.ZERO_HASH)))
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.accountBalance(eq(address), eq(Hash.ZERO)))
|
||||
.thenReturn(Optional.of(Wei.fromHexString(mockBalance)));
|
||||
|
||||
final String id = "123";
|
||||
@@ -549,8 +549,8 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
|
||||
final Wei mockBalance = Wei.of(0);
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.accountBalance(eq(address), eq(Hash.ZERO_HASH)))
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.accountBalance(eq(address), eq(Hash.ZERO)))
|
||||
.thenReturn(Optional.of(mockBalance));
|
||||
|
||||
final String id = "123";
|
||||
@@ -1866,8 +1866,8 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
|
||||
final String mockStorage = "0x0000000000000000000000000000000000000000000000000000000000000001";
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.storageAt(eq(address), eq(UInt256.ZERO), eq(Hash.ZERO_HASH)))
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.storageAt(eq(address), eq(UInt256.ZERO), eq(Hash.ZERO)))
|
||||
.thenReturn(Optional.of(UInt256.fromHexString(mockStorage)));
|
||||
|
||||
final String id = "88";
|
||||
@@ -1903,8 +1903,8 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
|
||||
final String mockStorage = "0x0000000000000000000000000000000000000000000000000000000000000006";
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.storageAt(eq(address), eq(UInt256.ONE), eq(Hash.ZERO_HASH)))
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.storageAt(eq(address), eq(UInt256.ONE), eq(Hash.ZERO)))
|
||||
.thenReturn(Optional.of(UInt256.fromHexString(mockStorage)));
|
||||
|
||||
final String id = "88";
|
||||
|
||||
@@ -304,8 +304,8 @@ public class FilterManagerLogFilterTest {
|
||||
return new LogWithMetadata(
|
||||
0,
|
||||
100L,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
Hash.ZERO,
|
||||
0,
|
||||
Address.fromHexString("0x0"),
|
||||
Bytes.EMPTY,
|
||||
|
||||
@@ -79,8 +79,7 @@ class DebugAccountAtTest {
|
||||
void testBlockNotFoundResponse() {
|
||||
Mockito.when(blockchainQueries.blockByHash(any())).thenReturn(Optional.empty());
|
||||
|
||||
final Object[] params =
|
||||
new Object[] {Hash.ZERO_HASH.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final Object[] params = new Object[] {Hash.ZERO.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final JsonRpcRequestContext request =
|
||||
new JsonRpcRequestContext(new JsonRpcRequest("2.0", "debug_accountAt", params));
|
||||
final JsonRpcResponse response = debugAccountAt.response(request);
|
||||
@@ -95,8 +94,7 @@ class DebugAccountAtTest {
|
||||
Mockito.when(blockchainQueries.blockByHash(any())).thenReturn(Optional.of(blockWithMetadata));
|
||||
Mockito.when(blockWithMetadata.getTransactions()).thenReturn(Collections.emptyList());
|
||||
|
||||
final Object[] params =
|
||||
new Object[] {Hash.ZERO_HASH.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final Object[] params = new Object[] {Hash.ZERO.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final JsonRpcRequestContext request =
|
||||
new JsonRpcRequestContext(new JsonRpcRequest("2.0", "debug_accountAt", params));
|
||||
final JsonRpcResponse response = debugAccountAt.response(request);
|
||||
@@ -112,8 +110,7 @@ class DebugAccountAtTest {
|
||||
Mockito.when(blockWithMetadata.getTransactions())
|
||||
.thenReturn(Collections.singletonList(transactionWithMetadata));
|
||||
|
||||
final Object[] params =
|
||||
new Object[] {Hash.ZERO_HASH.toHexString(), -1, Address.ZERO.toHexString()};
|
||||
final Object[] params = new Object[] {Hash.ZERO.toHexString(), -1, Address.ZERO.toHexString()};
|
||||
final JsonRpcRequestContext request =
|
||||
new JsonRpcRequestContext(new JsonRpcRequest("2.0", "debug_accountAt", params));
|
||||
final JsonRpcResponse response = debugAccountAt.response(request);
|
||||
@@ -129,8 +126,7 @@ class DebugAccountAtTest {
|
||||
Mockito.when(blockWithMetadata.getTransactions())
|
||||
.thenReturn(Collections.singletonList(transactionWithMetadata));
|
||||
|
||||
final Object[] params =
|
||||
new Object[] {Hash.ZERO_HASH.toHexString(), 2, Address.ZERO.toHexString()};
|
||||
final Object[] params = new Object[] {Hash.ZERO.toHexString(), 2, Address.ZERO.toHexString()};
|
||||
final JsonRpcRequestContext request =
|
||||
new JsonRpcRequestContext(new JsonRpcRequest("2.0", "debug_accountAt", params));
|
||||
final JsonRpcResponse response = debugAccountAt.response(request);
|
||||
@@ -146,8 +142,7 @@ class DebugAccountAtTest {
|
||||
Mockito.when(blockWithMetadata.getTransactions())
|
||||
.thenReturn(Collections.singletonList(transactionWithMetadata));
|
||||
|
||||
final Object[] params =
|
||||
new Object[] {Hash.ZERO_HASH.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final Object[] params = new Object[] {Hash.ZERO.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final JsonRpcRequestContext request =
|
||||
new JsonRpcRequestContext(new JsonRpcRequest("2.0", "debug_accountAt", params));
|
||||
final JsonRpcResponse response = debugAccountAt.response(request);
|
||||
@@ -161,8 +156,7 @@ class DebugAccountAtTest {
|
||||
void testNoAccountFoundResponse() {
|
||||
setupMockTransaction();
|
||||
|
||||
final Object[] params =
|
||||
new Object[] {Hash.ZERO_HASH.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final Object[] params = new Object[] {Hash.ZERO.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final JsonRpcRequestContext request =
|
||||
new JsonRpcRequestContext(new JsonRpcRequest("2.0", "debug_accountAt", params));
|
||||
|
||||
@@ -191,8 +185,7 @@ class DebugAccountAtTest {
|
||||
Mockito.when(account.getBalance()).thenReturn(balance);
|
||||
Mockito.when(account.getCodeHash()).thenReturn(codeHash);
|
||||
|
||||
final Object[] params =
|
||||
new Object[] {Hash.ZERO_HASH.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final Object[] params = new Object[] {Hash.ZERO.toHexString(), 0, Address.ZERO.toHexString()};
|
||||
final JsonRpcRequestContext request =
|
||||
new JsonRpcRequestContext(new JsonRpcRequest("2.0", "debug_accountAt", params));
|
||||
final JsonRpcSuccessResponse response =
|
||||
@@ -222,6 +215,6 @@ class DebugAccountAtTest {
|
||||
.thenReturn(Collections.singletonList(transactionTrace));
|
||||
Mockito.when(transactionTrace.getTransaction()).thenReturn(transaction);
|
||||
Mockito.when(transactionWithMetadata.getTransaction()).thenReturn(transaction);
|
||||
Mockito.when(transaction.getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
Mockito.when(transaction.getHash()).thenReturn(Hash.ZERO);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,8 +81,8 @@ public class EthCallTest {
|
||||
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO_HASH))
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO))
|
||||
.thenReturn(Optional.of(mock(BlockHeader.class)));
|
||||
when(transactionSimulator.process(any(), any(), any(), any())).thenReturn(Optional.empty());
|
||||
|
||||
@@ -103,8 +103,8 @@ public class EthCallTest {
|
||||
mockTransactionProcessorSuccessResult(Bytes.of());
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO_HASH))
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO))
|
||||
.thenReturn(Optional.of(mock(BlockHeader.class)));
|
||||
|
||||
final JsonRpcResponse response = method.response(request);
|
||||
@@ -121,8 +121,8 @@ public class EthCallTest {
|
||||
mockTransactionProcessorSuccessResult(Bytes.of(1));
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO_HASH))
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO))
|
||||
.thenReturn(Optional.of(mock(BlockHeader.class)));
|
||||
|
||||
final JsonRpcResponse response = method.response(request);
|
||||
@@ -136,27 +136,27 @@ public class EthCallTest {
|
||||
final JsonRpcRequestContext request = ethCallRequest(callParameter(), "latest");
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO_HASH))
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO))
|
||||
.thenReturn(Optional.of(mock(BlockHeader.class)));
|
||||
when(transactionSimulator.process(any(), any(), any(), any())).thenReturn(Optional.empty());
|
||||
|
||||
method.response(request);
|
||||
|
||||
verify(blockchainQueries).getBlockHeaderByHash(eq(Hash.ZERO_HASH));
|
||||
verify(blockchainQueries).getBlockHeaderByHash(eq(Hash.ZERO));
|
||||
verify(transactionSimulator).process(any(), any(), any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldUseCorrectBlockNumberWhenEarliest() {
|
||||
final JsonRpcRequestContext request = ethCallRequest(callParameter(), "earliest");
|
||||
when(blockchainQueries.getBlockHashByNumber(anyLong())).thenReturn(Optional.of(Hash.ZERO_HASH));
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO_HASH))
|
||||
when(blockchainQueries.getBlockHashByNumber(anyLong())).thenReturn(Optional.of(Hash.ZERO));
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO))
|
||||
.thenReturn(Optional.of(mock(BlockHeader.class)));
|
||||
when(transactionSimulator.process(any(), any(), any(), any())).thenReturn(Optional.empty());
|
||||
method.response(request);
|
||||
|
||||
verify(blockchainQueries).getBlockHeaderByHash(eq(Hash.ZERO_HASH));
|
||||
verify(blockchainQueries).getBlockHeaderByHash(eq(Hash.ZERO));
|
||||
verify(transactionSimulator).process(any(), any(), any(), any());
|
||||
}
|
||||
|
||||
@@ -164,14 +164,14 @@ public class EthCallTest {
|
||||
public void shouldUseCorrectBlockNumberWhenSpecified() {
|
||||
final JsonRpcRequestContext request = ethCallRequest(callParameter(), Quantity.create(13L));
|
||||
when(blockchainQueries.headBlockNumber()).thenReturn(14L);
|
||||
when(blockchainQueries.getBlockHashByNumber(anyLong())).thenReturn(Optional.of(Hash.ZERO_HASH));
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO_HASH))
|
||||
when(blockchainQueries.getBlockHashByNumber(anyLong())).thenReturn(Optional.of(Hash.ZERO));
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO))
|
||||
.thenReturn(Optional.of(mock(BlockHeader.class)));
|
||||
when(transactionSimulator.process(any(), any(), any(), any())).thenReturn(Optional.empty());
|
||||
|
||||
method.response(request);
|
||||
|
||||
verify(blockchainQueries).getBlockHeaderByHash(eq(Hash.ZERO_HASH));
|
||||
verify(blockchainQueries).getBlockHeaderByHash(eq(Hash.ZERO));
|
||||
verify(transactionSimulator).process(any(), any(), any(), any());
|
||||
}
|
||||
|
||||
@@ -233,9 +233,8 @@ public class EthCallTest {
|
||||
when(blockHeader.getBaseFee()).thenReturn(baseFee);
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO_HASH))
|
||||
.thenReturn(Optional.of(blockHeader));
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.getBlockHeaderByHash(Hash.ZERO)).thenReturn(Optional.of(blockHeader));
|
||||
|
||||
method.response(request);
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ public class EthGetBlockByHashTest {
|
||||
private EthGetBlockByHash method;
|
||||
private final String JSON_RPC_VERSION = "2.0";
|
||||
private final String ETH_METHOD = "eth_getBlockByHash";
|
||||
private final String ZERO_HASH = String.valueOf(Hash.ZERO_HASH);
|
||||
private final String ZERO_HASH = String.valueOf(Hash.ZERO);
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
@@ -109,12 +109,10 @@ public class EthGetFilterChangesTest {
|
||||
@Test
|
||||
public void shouldReturnHashesWhenFilterManagerFindsBlockFilterWithHashes() {
|
||||
final JsonRpcRequestContext request = requestWithParams("0x1");
|
||||
when(filterManager.blockChanges("0x1")).thenReturn(Lists.newArrayList(Hash.ZERO_HASH));
|
||||
when(filterManager.blockChanges("0x1")).thenReturn(Lists.newArrayList(Hash.ZERO));
|
||||
|
||||
final List<String> expectedHashes =
|
||||
Lists.newArrayList(Hash.ZERO_HASH).stream()
|
||||
.map(Hash::toString)
|
||||
.collect(Collectors.toList());
|
||||
Lists.newArrayList(Hash.ZERO).stream().map(Hash::toString).collect(Collectors.toList());
|
||||
final JsonRpcResponse expectedResponse = new JsonRpcSuccessResponse(null, expectedHashes);
|
||||
|
||||
final JsonRpcResponse response = method.response(request);
|
||||
@@ -138,13 +136,10 @@ public class EthGetFilterChangesTest {
|
||||
public void shouldReturnHashesWhenFilterManagerFindsPendingTransactionFilterWithHashes() {
|
||||
final JsonRpcRequestContext request = requestWithParams("0x1");
|
||||
when(filterManager.blockChanges(anyString())).thenReturn(null);
|
||||
when(filterManager.pendingTransactionChanges("0x1"))
|
||||
.thenReturn(Lists.newArrayList(Hash.ZERO_HASH));
|
||||
when(filterManager.pendingTransactionChanges("0x1")).thenReturn(Lists.newArrayList(Hash.ZERO));
|
||||
|
||||
final List<String> expectedHashes =
|
||||
Lists.newArrayList(Hash.ZERO_HASH).stream()
|
||||
.map(Hash::toString)
|
||||
.collect(Collectors.toList());
|
||||
Lists.newArrayList(Hash.ZERO).stream().map(Hash::toString).collect(Collectors.toList());
|
||||
final JsonRpcResponse expectedResponse = new JsonRpcSuccessResponse(null, expectedHashes);
|
||||
|
||||
final JsonRpcResponse response = method.response(request);
|
||||
@@ -203,8 +198,8 @@ public class EthGetFilterChangesTest {
|
||||
return new LogWithMetadata(
|
||||
0,
|
||||
100L,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
Hash.ZERO,
|
||||
0,
|
||||
Address.fromHexString("0x0"),
|
||||
Bytes.EMPTY,
|
||||
|
||||
@@ -132,8 +132,8 @@ public class EthGetFilterLogsTest {
|
||||
new LogWithMetadata(
|
||||
0,
|
||||
100L,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
Hash.ZERO,
|
||||
0,
|
||||
Address.fromHexString("0x0"),
|
||||
Bytes.EMPTY,
|
||||
|
||||
@@ -86,7 +86,7 @@ class EthGetProofTest {
|
||||
final JsonRpcRequestContext request = requestWithParams(null, null, "latest");
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
|
||||
Assertions.assertThatThrownBy(() -> method.response(request))
|
||||
.isInstanceOf(InvalidJsonRpcParameters.class)
|
||||
@@ -98,7 +98,7 @@ class EthGetProofTest {
|
||||
final JsonRpcRequestContext request = requestWithParams(address.toString(), null, "latest");
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
|
||||
Assertions.assertThatThrownBy(() -> method.response(request))
|
||||
.isInstanceOf(InvalidJsonRpcParameters.class)
|
||||
|
||||
@@ -134,7 +134,7 @@ public class EthGetTransactionByHashTest {
|
||||
org.hyperledger.besu.ethereum.core.Transaction.readFrom(
|
||||
Bytes.fromHexString(VALID_TRANSACTION));
|
||||
final TransactionWithMetadata transactionWithMetadata =
|
||||
new TransactionWithMetadata(transaction, 1, Optional.empty(), Hash.ZERO_HASH, 0);
|
||||
new TransactionWithMetadata(transaction, 1, Optional.empty(), Hash.ZERO, 0);
|
||||
|
||||
when(pendingTransactions.getTransactionByHash(eq(transaction.getHash())))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
@@ -150,8 +150,7 @@ class EthGetTransactionCountTest {
|
||||
private void mockGetTransactionCount(final Address address, final long transactionCount) {
|
||||
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
|
||||
when(blockchainQueries.getBlockchain().getChainHead()).thenReturn(chainHead);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO_HASH);
|
||||
when(blockchainQueries.getTransactionCount(address, Hash.ZERO_HASH))
|
||||
.thenReturn(transactionCount);
|
||||
when(blockchainQueries.getBlockchain().getChainHead().getHash()).thenReturn(Hash.ZERO);
|
||||
when(blockchainQueries.getTransactionCount(address, Hash.ZERO)).thenReturn(transactionCount);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ public class EthGetUncleByBlockHashAndIndexTest {
|
||||
private final TransactionTestFixture transactionTestFixture = new TransactionTestFixture();
|
||||
|
||||
private EthGetUncleByBlockHashAndIndex method;
|
||||
private final Hash zeroHash = Hash.ZERO_HASH;
|
||||
private final Hash zeroHash = Hash.ZERO;
|
||||
|
||||
@Mock private BlockchainQueries blockchainQueries;
|
||||
|
||||
@@ -172,7 +172,7 @@ public class EthGetUncleByBlockHashAndIndexTest {
|
||||
}
|
||||
|
||||
final List<Hash> ommers = new ArrayList<>();
|
||||
ommers.add(Hash.ZERO_HASH);
|
||||
ommers.add(Hash.ZERO);
|
||||
|
||||
return new BlockWithMetadata<>(header, transactions, ommers, header.getDifficulty(), 0);
|
||||
}
|
||||
|
||||
@@ -149,7 +149,7 @@ public class EthGetUncleByBlockNumberAndIndexTest {
|
||||
}
|
||||
|
||||
final List<Hash> ommers = new ArrayList<>();
|
||||
ommers.add(Hash.ZERO_HASH);
|
||||
ommers.add(Hash.ZERO);
|
||||
|
||||
return new BlockWithMetadata<>(header, transactions, ommers, header.getDifficulty(), 0);
|
||||
}
|
||||
|
||||
@@ -175,7 +175,7 @@ public class EthNewFilterTest {
|
||||
null,
|
||||
Collections.emptyList(),
|
||||
Collections.emptyList(),
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
null,
|
||||
null);
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ public class EthSubmitWorkTest {
|
||||
final JsonRpcRequestContext request =
|
||||
requestWithParams(
|
||||
Bytes.ofUnsignedLong(expectedFirstOutput.getNonce()).trimLeadingZeros().toHexString(),
|
||||
expectedFirstOutput.getPowHash().toHexString(),
|
||||
Bytes.wrap(expectedFirstOutput.getPowHash()).toHexString(),
|
||||
expectedFirstOutput.getMixHash().toHexString());
|
||||
final JsonRpcResponse expectedResponse =
|
||||
new JsonRpcSuccessResponse(request.getRequest().getId(), true);
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
package org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
@@ -49,6 +48,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.vertx.core.Vertx;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -92,7 +92,7 @@ public class EngineExchangeTransitionConfigurationTest {
|
||||
var response =
|
||||
resp(
|
||||
new EngineExchangeTransitionConfigurationParameter(
|
||||
"0", Hash.ZERO_HASH.toHexString(), new UnsignedLongParameter(1L)));
|
||||
"0", Hash.ZERO.toHexString(), new UnsignedLongParameter(1L)));
|
||||
|
||||
var result = fromSuccessResp(response);
|
||||
assertThat(result.getTerminalTotalDifficulty()).isEqualTo(Difficulty.of(1337L));
|
||||
@@ -109,11 +109,11 @@ public class EngineExchangeTransitionConfigurationTest {
|
||||
var response =
|
||||
resp(
|
||||
new EngineExchangeTransitionConfigurationParameter(
|
||||
"0", Hash.ZERO_HASH.toHexString(), new UnsignedLongParameter(0L)));
|
||||
"0", Hash.ZERO.toHexString(), new UnsignedLongParameter(0L)));
|
||||
|
||||
var result = fromSuccessResp(response);
|
||||
assertThat(result.getTerminalTotalDifficulty()).isEqualTo(Difficulty.of(1337L));
|
||||
assertThat(result.getTerminalBlockHash()).isEqualTo(Hash.ZERO_HASH);
|
||||
assertThat(result.getTerminalBlockHash()).isEqualTo(Hash.ZERO);
|
||||
assertThat(result.getTerminalBlockNumber()).isEqualTo(0L);
|
||||
verify(engineCallListener, times(1)).executionEngineCalled();
|
||||
}
|
||||
@@ -123,7 +123,7 @@ public class EngineExchangeTransitionConfigurationTest {
|
||||
var response =
|
||||
resp(
|
||||
new EngineExchangeTransitionConfigurationParameter(
|
||||
"0", Hash.ZERO_HASH.toHexString(), new UnsignedLongParameter(0L)));
|
||||
"0", Hash.ZERO.toHexString(), new UnsignedLongParameter(0L)));
|
||||
|
||||
var result = fromSuccessResp(response);
|
||||
assertThat(result.getTerminalTotalDifficulty())
|
||||
@@ -132,7 +132,7 @@ public class EngineExchangeTransitionConfigurationTest {
|
||||
new BigInteger(
|
||||
"115792089237316195423570985008687907853269984665640564039457584007913129638912",
|
||||
10)));
|
||||
assertThat(result.getTerminalBlockHash()).isEqualTo(Hash.ZERO_HASH);
|
||||
assertThat(result.getTerminalBlockHash()).isEqualTo(Hash.ZERO);
|
||||
assertThat(result.getTerminalBlockNumber()).isEqualTo(0L);
|
||||
verify(engineCallListener, times(1)).executionEngineCalled();
|
||||
}
|
||||
@@ -181,11 +181,11 @@ public class EngineExchangeTransitionConfigurationTest {
|
||||
public void shouldAlwaysReturnResultsInHex() throws JsonProcessingException {
|
||||
var mapper = new ObjectMapper();
|
||||
var mockResult =
|
||||
new EngineExchangeTransitionConfigurationResult(Difficulty.ZERO, Hash.ZERO_HASH, 0L);
|
||||
new EngineExchangeTransitionConfigurationResult(Difficulty.ZERO, Hash.ZERO, 0L);
|
||||
|
||||
assertThat(mockResult.getTerminalBlockNumberAsString()).isEqualTo("0x0");
|
||||
assertThat(mockResult.getTerminalTotalDifficultyAsString()).isEqualTo("0x0");
|
||||
assertThat(mockResult.getTerminalBlockHashAsString()).isEqualTo(Hash.ZERO_HASH.toHexString());
|
||||
assertThat(mockResult.getTerminalBlockHashAsString()).isEqualTo(Hash.ZERO.toHexString());
|
||||
|
||||
String json = mapper.writeValueAsString(mockResult);
|
||||
var res = mapper.readValue(json, Map.class);
|
||||
@@ -199,11 +199,11 @@ public class EngineExchangeTransitionConfigurationTest {
|
||||
public void shouldStripLeadingZeros() throws JsonProcessingException {
|
||||
var mapper = new ObjectMapper();
|
||||
var mockResult =
|
||||
new EngineExchangeTransitionConfigurationResult(Difficulty.ZERO, Hash.ZERO_HASH, 100);
|
||||
new EngineExchangeTransitionConfigurationResult(Difficulty.ZERO, Hash.ZERO, 100);
|
||||
|
||||
assertThat(mockResult.getTerminalBlockNumberAsString()).isEqualTo("0x64");
|
||||
assertThat(mockResult.getTerminalTotalDifficultyAsString()).isEqualTo("0x0");
|
||||
assertThat(mockResult.getTerminalBlockHashAsString()).isEqualTo(Hash.ZERO_HASH.toHexString());
|
||||
assertThat(mockResult.getTerminalBlockHashAsString()).isEqualTo(Hash.ZERO.toHexString());
|
||||
|
||||
String json = mapper.writeValueAsString(mockResult);
|
||||
var res = mapper.readValue(json, Map.class);
|
||||
@@ -247,7 +247,7 @@ public class EngineExchangeTransitionConfigurationTest {
|
||||
0,
|
||||
Bytes.EMPTY,
|
||||
Wei.ZERO,
|
||||
ZERO_32,
|
||||
Bytes32.ZERO,
|
||||
0,
|
||||
new BlockHeaderFunctions() {
|
||||
@Override
|
||||
|
||||
@@ -115,11 +115,11 @@ public class EngineForkchoiceUpdatedTest {
|
||||
.thenReturn(Optional.of(mockHeader));
|
||||
when(mergeCoordinator.latestValidAncestorDescendsFromTerminal(mockHeader)).thenReturn(false);
|
||||
assertSuccessWithPayloadForForkchoiceResult(
|
||||
new EngineForkchoiceUpdatedParameter(mockHeader.getHash(), Hash.ZERO_HASH, Hash.ZERO_HASH),
|
||||
new EngineForkchoiceUpdatedParameter(mockHeader.getHash(), Hash.ZERO, Hash.ZERO),
|
||||
Optional.empty(),
|
||||
mock(ForkchoiceResult.class),
|
||||
INVALID,
|
||||
Optional.of(Hash.ZERO_HASH));
|
||||
Optional.of(Hash.ZERO));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -132,7 +132,7 @@ public class EngineForkchoiceUpdatedTest {
|
||||
|
||||
assertSuccessWithPayloadForForkchoiceResult(
|
||||
new EngineForkchoiceUpdatedParameter(
|
||||
mockHeader.getHash(), Hash.ZERO_HASH, mockHeader.getParentHash()),
|
||||
mockHeader.getHash(), Hash.ZERO, mockHeader.getParentHash()),
|
||||
Optional.empty(),
|
||||
mock(ForkchoiceResult.class),
|
||||
INVALID,
|
||||
@@ -153,7 +153,7 @@ public class EngineForkchoiceUpdatedTest {
|
||||
when(mergeCoordinator.latestValidAncestorDescendsFromTerminal(mockHeader)).thenReturn(true);
|
||||
|
||||
assertSuccessWithPayloadForForkchoiceResult(
|
||||
new EngineForkchoiceUpdatedParameter(mockHeader.getHash(), Hash.ZERO_HASH, Hash.ZERO_HASH),
|
||||
new EngineForkchoiceUpdatedParameter(mockHeader.getHash(), Hash.ZERO, Hash.ZERO),
|
||||
Optional.empty(),
|
||||
ForkchoiceResult.withResult(Optional.empty(), Optional.of(mockHeader)),
|
||||
VALID);
|
||||
@@ -208,8 +208,7 @@ public class EngineForkchoiceUpdatedTest {
|
||||
when(mergeCoordinator.isDescendantOf(any(), any())).thenReturn(true);
|
||||
|
||||
assertSuccessWithPayloadForForkchoiceResult(
|
||||
new EngineForkchoiceUpdatedParameter(
|
||||
mockHeader.getHash(), Hash.ZERO_HASH, mockParent.getHash()),
|
||||
new EngineForkchoiceUpdatedParameter(mockHeader.getHash(), Hash.ZERO, mockParent.getHash()),
|
||||
Optional.empty(),
|
||||
ForkchoiceResult.withResult(Optional.of(mockParent), Optional.of(mockHeader)),
|
||||
VALID);
|
||||
@@ -240,8 +239,7 @@ public class EngineForkchoiceUpdatedTest {
|
||||
|
||||
var res =
|
||||
assertSuccessWithPayloadForForkchoiceResult(
|
||||
new EngineForkchoiceUpdatedParameter(
|
||||
mockHeader.getHash(), Hash.ZERO_HASH, Hash.ZERO_HASH),
|
||||
new EngineForkchoiceUpdatedParameter(mockHeader.getHash(), Hash.ZERO, Hash.ZERO),
|
||||
Optional.of(payloadParams),
|
||||
ForkchoiceResult.withResult(Optional.empty(), Optional.of(mockHeader)),
|
||||
VALID);
|
||||
@@ -309,7 +307,7 @@ public class EngineForkchoiceUpdatedTest {
|
||||
var resp =
|
||||
resp(
|
||||
new EngineForkchoiceUpdatedParameter(
|
||||
newHead.getBlockHash(), parent.getBlockHash(), Hash.ZERO_HASH),
|
||||
newHead.getBlockHash(), parent.getBlockHash(), Hash.ZERO),
|
||||
Optional.empty());
|
||||
|
||||
assertInvalidForkchoiceState(resp);
|
||||
@@ -417,7 +415,7 @@ public class EngineForkchoiceUpdatedTest {
|
||||
(JsonRpcSuccessResponse)
|
||||
resp(
|
||||
new EngineForkchoiceUpdatedParameter(
|
||||
mockHeader.getBlockHash(), Hash.ZERO_HASH, Hash.ZERO_HASH),
|
||||
mockHeader.getBlockHash(), Hash.ZERO, Hash.ZERO),
|
||||
Optional.of(payloadParams));
|
||||
|
||||
var forkchoiceRes = (EngineUpdateForkchoiceResult) resp.getResult();
|
||||
|
||||
@@ -58,7 +58,7 @@ public class EngineGetPayloadTest {
|
||||
private static final BlockResultFactory factory = new BlockResultFactory();
|
||||
private static final PayloadIdentifier mockPid =
|
||||
PayloadIdentifier.forPayloadParams(
|
||||
Hash.ZERO_HASH, 1337L, Bytes32.random(), Address.fromHexString("0x42"));
|
||||
Hash.ZERO, 1337L, Bytes32.random(), Address.fromHexString("0x42"));
|
||||
private static final BlockHeader mockHeader =
|
||||
new BlockHeaderTestFixture().prevRandao(Bytes32.random()).buildHeader();
|
||||
private static final Block mockBlock =
|
||||
@@ -108,7 +108,7 @@ public class EngineGetPayloadTest {
|
||||
var resp =
|
||||
resp(
|
||||
PayloadIdentifier.forPayloadParams(
|
||||
Hash.ZERO_HASH, 0L, Bytes32.random(), Address.fromHexString("0x42")));
|
||||
Hash.ZERO, 0L, Bytes32.random(), Address.fromHexString("0x42")));
|
||||
assertThat(resp).isInstanceOf(JsonRpcErrorResponse.class);
|
||||
verify(engineCallListener, times(1)).executionEngineCalled();
|
||||
}
|
||||
|
||||
@@ -195,7 +195,7 @@ public class EngineNewPayloadTest {
|
||||
var resp = resp(mockPayload(mockHeader, Collections.emptyList()));
|
||||
|
||||
EnginePayloadStatusResult res = fromSuccessResp(resp);
|
||||
assertThat(res.getLatestValidHash()).isEqualTo(Optional.of(Hash.ZERO_HASH));
|
||||
assertThat(res.getLatestValidHash()).isEqualTo(Optional.of(Hash.ZERO));
|
||||
assertThat(res.getStatusAsString()).isEqualTo(INVALID.name());
|
||||
verify(mergeCoordinator, atLeastOnce()).addBadBlock(any());
|
||||
verify(engineCallListener, times(1)).executionEngineCalled();
|
||||
@@ -364,7 +364,7 @@ public class EngineNewPayloadTest {
|
||||
var resp = resp(mockPayload(mockHeader, Collections.emptyList()));
|
||||
|
||||
EnginePayloadStatusResult res = fromSuccessResp(resp);
|
||||
assertThat(res.getLatestValidHash()).contains(Hash.ZERO_HASH);
|
||||
assertThat(res.getLatestValidHash()).contains(Hash.ZERO);
|
||||
assertThat(res.getStatusAsString()).isEqualTo(INVALID.name());
|
||||
assertThat(res.getError()).isEqualTo("Block already present in bad block manager.");
|
||||
verify(engineCallListener, times(1)).executionEngineCalled();
|
||||
|
||||
@@ -165,7 +165,7 @@ public class FilterParameterTest {
|
||||
+ ","
|
||||
+ TOPICS_TWO_THREE_ARRAY
|
||||
+ "], \"blockHash\": \""
|
||||
+ Hash.ZERO_HASH
|
||||
+ Hash.ZERO
|
||||
+ "\"}],\"id\":1}";
|
||||
|
||||
final String jsonUsingAlias =
|
||||
@@ -176,7 +176,7 @@ public class FilterParameterTest {
|
||||
+ ","
|
||||
+ TOPICS_TWO_THREE_ARRAY
|
||||
+ "], \"blockhash\": \""
|
||||
+ Hash.ZERO_HASH
|
||||
+ Hash.ZERO
|
||||
+ "\"}],\"id\":1}";
|
||||
|
||||
final JsonRpcRequestContext request = new JsonRpcRequestContext(readJsonAsJsonRpcRequest(json));
|
||||
@@ -204,7 +204,7 @@ public class FilterParameterTest {
|
||||
+ ","
|
||||
+ TOPICS_TWO_THREE_ARRAY
|
||||
+ "], \"blockHash\": \""
|
||||
+ Hash.ZERO_HASH
|
||||
+ Hash.ZERO
|
||||
+ "\"}],\"id\":1}";
|
||||
|
||||
final String jsonUsingAlias =
|
||||
@@ -215,7 +215,7 @@ public class FilterParameterTest {
|
||||
+ ","
|
||||
+ TOPICS_TWO_THREE_ARRAY
|
||||
+ "], \"blockhash\": \""
|
||||
+ Hash.ZERO_HASH
|
||||
+ Hash.ZERO
|
||||
+ "\"}],\"id\":1}";
|
||||
|
||||
final JsonRpcRequestContext request = new JsonRpcRequestContext(readJsonAsJsonRpcRequest(json));
|
||||
@@ -228,8 +228,7 @@ public class FilterParameterTest {
|
||||
requestUsingAlias.getRequiredParameter(0, FilterParameter.class);
|
||||
|
||||
assertThat(parsedFilterParameterUsingAlias.isValid()).isTrue();
|
||||
assertThat(parsedFilterParameterUsingAlias.getBlockHash())
|
||||
.isEqualTo(Optional.of(Hash.ZERO_HASH));
|
||||
assertThat(parsedFilterParameterUsingAlias.getBlockHash()).isEqualTo(Optional.of(Hash.ZERO));
|
||||
|
||||
// blockhash and blockHash should end up the same
|
||||
assertThat(parsedFilterParameter)
|
||||
|
||||
@@ -48,7 +48,7 @@ public class PrivGetCodeTest {
|
||||
@Mock private BlockchainQueries mockBlockchainQueries;
|
||||
@Mock private PrivacyIdProvider privacyIdProvider;
|
||||
|
||||
private final Hash latestBlockHash = Hash.ZERO_HASH;
|
||||
private final Hash latestBlockHash = Hash.ZERO;
|
||||
private final String enclavePublicKey = "A1aVtMxLCUHmBVHXoZzzBgPbW/wj5axDpW9X8l91SGo=";
|
||||
private final String privacyGroupId = "Ko2bVqD+nNlNYL5EE7y3IdOnviftjiizpjRt+HTuFBs=";
|
||||
private final Address contractAddress =
|
||||
|
||||
@@ -176,8 +176,8 @@ public class PrivGetFilterChangesTest {
|
||||
return new LogWithMetadata(
|
||||
0,
|
||||
100L,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
Hash.ZERO,
|
||||
0,
|
||||
Address.fromHexString("0x0"),
|
||||
Bytes.EMPTY,
|
||||
|
||||
@@ -145,8 +145,8 @@ public class PrivGetFilterLogsTest {
|
||||
return new LogWithMetadata(
|
||||
0,
|
||||
100L,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
Hash.ZERO,
|
||||
0,
|
||||
Address.fromHexString("0x0"),
|
||||
Bytes.EMPTY,
|
||||
|
||||
@@ -109,7 +109,7 @@ public class PrivGetLogsTest {
|
||||
null,
|
||||
Collections.emptyList(),
|
||||
Collections.emptyList(),
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
null,
|
||||
null);
|
||||
|
||||
@@ -218,8 +218,8 @@ public class PrivGetLogsTest {
|
||||
return new LogWithMetadata(
|
||||
logIndex,
|
||||
100L,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
Hash.ZERO,
|
||||
0,
|
||||
Address.fromHexString("0x0"),
|
||||
Bytes.EMPTY,
|
||||
|
||||
@@ -100,7 +100,7 @@ public class PrivNewFilterTest {
|
||||
null,
|
||||
Collections.emptyList(),
|
||||
Collections.emptyList(),
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
null,
|
||||
null);
|
||||
|
||||
|
||||
@@ -110,8 +110,8 @@ public class TransactionTracerTest {
|
||||
when(blockHeader.getNumber()).thenReturn(12L);
|
||||
when(blockHeader.getHash()).thenReturn(blockHash);
|
||||
when(blockHeader.getParentHash()).thenReturn(previousBlockHash);
|
||||
when(previousBlockHeader.getStateRoot()).thenReturn(Hash.ZERO_HASH);
|
||||
when(worldStateArchive.getMutable(Hash.ZERO_HASH, null, false))
|
||||
when(previousBlockHeader.getStateRoot()).thenReturn(Hash.ZERO);
|
||||
when(worldStateArchive.getMutable(Hash.ZERO, null, false))
|
||||
.thenReturn(Optional.of(mutableWorldState));
|
||||
when(protocolSchedule.getByBlockNumber(12)).thenReturn(protocolSpec);
|
||||
when(protocolSpec.getTransactionProcessor()).thenReturn(transactionProcessor);
|
||||
|
||||
@@ -46,7 +46,7 @@ public class TransactionCompleteResultTest {
|
||||
.createTransaction(gen.generateKeyPair()),
|
||||
0L,
|
||||
Optional.of(Wei.of(7L)),
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
0));
|
||||
|
||||
assertThat(zeroPriorityFeeTx.getMaxFeePerGas()).isEqualTo("0x1");
|
||||
@@ -59,8 +59,7 @@ public class TransactionCompleteResultTest {
|
||||
final Transaction transaction = gen.transaction(TransactionType.EIP1559);
|
||||
TransactionCompleteResult tcr =
|
||||
new TransactionCompleteResult(
|
||||
new TransactionWithMetadata(
|
||||
transaction, 0L, Optional.of(Wei.of(7L)), Hash.ZERO_HASH, 0));
|
||||
new TransactionWithMetadata(transaction, 0L, Optional.of(Wei.of(7L)), Hash.ZERO, 0));
|
||||
assertThat(tcr.getMaxFeePerGas()).isNotEmpty();
|
||||
assertThat(tcr.getMaxPriorityFeePerGas()).isNotEmpty();
|
||||
assertThat(tcr.getGasPrice()).isNotEmpty();
|
||||
@@ -74,8 +73,7 @@ public class TransactionCompleteResultTest {
|
||||
final Transaction transaction = gen.transaction(TransactionType.FRONTIER);
|
||||
TransactionCompleteResult tcr =
|
||||
new TransactionCompleteResult(
|
||||
new TransactionWithMetadata(
|
||||
transaction, 0L, Optional.of(Wei.of(7L)), Hash.ZERO_HASH, 0));
|
||||
new TransactionWithMetadata(transaction, 0L, Optional.of(Wei.of(7L)), Hash.ZERO, 0));
|
||||
assertThat(tcr.getMaxFeePerGas()).isNull();
|
||||
assertThat(tcr.getMaxPriorityFeePerGas()).isNull();
|
||||
assertThat(tcr.getGasPrice()).isNotEmpty();
|
||||
@@ -89,7 +87,7 @@ public class TransactionCompleteResultTest {
|
||||
final Transaction transaction = gen.transaction(TransactionType.FRONTIER);
|
||||
TransactionCompleteResult tcr =
|
||||
new TransactionCompleteResult(
|
||||
new TransactionWithMetadata(transaction, 0L, Optional.empty(), Hash.ZERO_HASH, 0));
|
||||
new TransactionWithMetadata(transaction, 0L, Optional.empty(), Hash.ZERO, 0));
|
||||
assertThat(tcr.getMaxFeePerGas()).isNull();
|
||||
assertThat(tcr.getMaxPriorityFeePerGas()).isNull();
|
||||
assertThat(tcr.getGasPrice()).isNotEmpty();
|
||||
|
||||
@@ -454,8 +454,8 @@ public class LogsSubscriptionServiceTest {
|
||||
return new LogWithMetadata(
|
||||
0,
|
||||
100L,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO_HASH,
|
||||
Hash.ZERO,
|
||||
Hash.ZERO,
|
||||
0,
|
||||
Address.fromHexString("0x0"),
|
||||
Bytes.EMPTY,
|
||||
|
||||
@@ -45,7 +45,7 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
@@ -199,7 +199,7 @@ public class BlockchainQueriesTest {
|
||||
@Test
|
||||
public void getAccountStorageBlockNumber() {
|
||||
final List<Address> addresses = Arrays.asList(gen.address(), gen.address(), gen.address());
|
||||
final List<Bytes32> storageKeys =
|
||||
final List<UInt256> storageKeys =
|
||||
Arrays.asList(gen.storageKey(), gen.storageKey(), gen.storageKey());
|
||||
final BlockchainWithData data = setupBlockchain(3, addresses, storageKeys);
|
||||
final BlockchainQueries queries = data.blockchainQueries;
|
||||
@@ -213,7 +213,7 @@ public class BlockchainQueriesTest {
|
||||
storageKeys.forEach(
|
||||
storageKey -> {
|
||||
final Account actualAccount0 = worldState0.get(address);
|
||||
final Optional<Bytes32> result = queries.storageAt(address, storageKey, 2L);
|
||||
final Optional<UInt256> result = queries.storageAt(address, storageKey, 2L);
|
||||
assertThat(result).contains(actualAccount0.getStorageValue(storageKey));
|
||||
}));
|
||||
|
||||
@@ -226,7 +226,7 @@ public class BlockchainQueriesTest {
|
||||
storageKeys.forEach(
|
||||
storageKey -> {
|
||||
final Account actualAccount1 = worldState1.get(address);
|
||||
final Optional<Bytes32> result = queries.storageAt(address, storageKey, 1L);
|
||||
final Optional<UInt256> result = queries.storageAt(address, storageKey, 1L);
|
||||
assertThat(result).contains(actualAccount1.getStorageValue(storageKey));
|
||||
}));
|
||||
}
|
||||
@@ -359,7 +359,7 @@ public class BlockchainQueriesTest {
|
||||
final BlockchainWithData data = setupBlockchain(3);
|
||||
final BlockchainQueries queries = data.blockchainQueries;
|
||||
List<LogWithMetadata> logs =
|
||||
queries.matchingLogs(Hash.ZERO_HASH, new LogsQuery.Builder().build(), () -> true);
|
||||
queries.matchingLogs(Hash.ZERO, new LogsQuery.Builder().build(), () -> true);
|
||||
assertThat(logs).isEmpty();
|
||||
}
|
||||
|
||||
@@ -368,7 +368,7 @@ public class BlockchainQueriesTest {
|
||||
final BlockchainWithData data = setupBlockchain(3);
|
||||
final BlockchainQueries queries = data.blockchainQueries;
|
||||
|
||||
final Optional<BlockHeader> ommerOptional = queries.getOmmer(Hash.ZERO_HASH, 0);
|
||||
final Optional<BlockHeader> ommerOptional = queries.getOmmer(Hash.ZERO, 0);
|
||||
|
||||
assertThat(ommerOptional).isEmpty();
|
||||
}
|
||||
@@ -540,7 +540,7 @@ public class BlockchainQueriesTest {
|
||||
}
|
||||
|
||||
private BlockchainWithData setupBlockchain(
|
||||
final int blocksToAdd, final List<Address> accountsToSetup, final List<Bytes32> storageKeys) {
|
||||
final int blocksToAdd, final List<Address> accountsToSetup, final List<UInt256> storageKeys) {
|
||||
checkArgument(blocksToAdd >= 1, "Must add at least one block to the queries");
|
||||
|
||||
final WorldStateArchive worldStateArchive = createInMemoryWorldStateArchive();
|
||||
|
||||
@@ -236,7 +236,7 @@ public class PoWBlockCreatorTest {
|
||||
|
||||
final ProcessableBlockHeader header =
|
||||
BlockHeaderBuilder.create()
|
||||
.parentHash(Hash.ZERO_HASH)
|
||||
.parentHash(Hash.ZERO)
|
||||
.coinbase(BLOCK_1_COINBASE)
|
||||
.difficulty(Difficulty.ONE)
|
||||
.number(1)
|
||||
@@ -306,7 +306,7 @@ public class PoWBlockCreatorTest {
|
||||
|
||||
final ProcessableBlockHeader header =
|
||||
BlockHeaderBuilder.create()
|
||||
.parentHash(Hash.ZERO_HASH)
|
||||
.parentHash(Hash.ZERO)
|
||||
.coinbase(BLOCK_1_COINBASE)
|
||||
.difficulty(Difficulty.ONE)
|
||||
.number(1)
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
package org.hyperledger.besu.ethereum.blockcreation;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
import static org.hyperledger.besu.ethereum.core.MiningParameters.DEFAULT_REMOTE_SEALERS_LIMIT;
|
||||
import static org.hyperledger.besu.ethereum.core.MiningParameters.DEFAULT_REMOTE_SEALERS_TTL;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
@@ -29,6 +28,7 @@ import org.hyperledger.besu.ethereum.mainnet.PoWSolution;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
@@ -53,7 +53,7 @@ public class PoWMiningCoordinatorTest {
|
||||
syncState,
|
||||
DEFAULT_REMOTE_SEALERS_LIMIT,
|
||||
DEFAULT_REMOTE_SEALERS_TTL);
|
||||
final PoWSolution solution = new PoWSolution(1L, Hash.EMPTY, null, ZERO_32);
|
||||
final PoWSolution solution = new PoWSolution(1L, Hash.EMPTY, null, Bytes32.ZERO);
|
||||
|
||||
assertThat(miningCoordinator.isMining()).isFalse();
|
||||
assertThat(miningCoordinator.hashesPerSecond()).isEqualTo(Optional.empty());
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
package org.hyperledger.besu.ethereum.vm;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
import static org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider.createInMemoryWorldStateArchive;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
@@ -66,7 +65,7 @@ public class EntriesFromIntegrationTest {
|
||||
}
|
||||
|
||||
final Map<Bytes32, AccountStorageEntry> values =
|
||||
account.storageEntriesFrom(ZERO_32, Integer.MAX_VALUE);
|
||||
account.storageEntriesFrom(Bytes32.ZERO, Integer.MAX_VALUE);
|
||||
assertThat(values).isEqualTo(expectedValues);
|
||||
}
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
package org.hyperledger.besu.ethereum.worldstate;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
import static org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider.createInMemoryBlockchain;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
@@ -145,12 +144,13 @@ public class PrunerIntegrationTest {
|
||||
worldStateArchive.get(stateRoot, blockHeader.getHash()).get();
|
||||
// Traverse accounts and make sure all are accessible
|
||||
final int expectedAccounts = accountsPerBlock * i;
|
||||
final long accounts = markedState.streamAccounts(ZERO_32, expectedAccounts * 2).count();
|
||||
final long accounts =
|
||||
markedState.streamAccounts(Bytes32.ZERO, expectedAccounts * 2).count();
|
||||
assertThat(accounts).isEqualTo(expectedAccounts);
|
||||
// Traverse storage to ensure that all storage is accessible
|
||||
markedState
|
||||
.streamAccounts(ZERO_32, expectedAccounts * 2)
|
||||
.forEach(a -> a.storageEntriesFrom(ZERO_32, 1000));
|
||||
.streamAccounts(Bytes32.ZERO, expectedAccounts * 2)
|
||||
.forEach(a -> a.storageEntriesFrom(Bytes32.ZERO, 1000));
|
||||
}
|
||||
|
||||
// All other state roots should have been removed
|
||||
@@ -200,7 +200,7 @@ public class PrunerIntegrationTest {
|
||||
|
||||
// Collect storage roots and code
|
||||
stateTrie
|
||||
.entriesFrom(ZERO_32, 1000)
|
||||
.entriesFrom(Bytes32.ZERO, 1000)
|
||||
.forEach(
|
||||
(key, val) -> {
|
||||
final StateTrieAccountValue accountValue =
|
||||
|
||||
@@ -37,6 +37,7 @@ import java.util.Objects;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class BonsaiAccount implements MutableAccount, EvmAccount {
|
||||
private final BonsaiWorldView context;
|
||||
@@ -50,7 +51,7 @@ public class BonsaiAccount implements MutableAccount, EvmAccount {
|
||||
private Hash storageRoot;
|
||||
private Bytes code;
|
||||
|
||||
private final Map<Bytes32, Bytes32> updatedStorage = new HashMap<>();
|
||||
private final Map<UInt256, UInt256> updatedStorage = new HashMap<>();
|
||||
|
||||
BonsaiAccount(
|
||||
final BonsaiWorldView context,
|
||||
@@ -203,12 +204,12 @@ public class BonsaiAccount implements MutableAccount, EvmAccount {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getStorageValue(final Bytes32 key) {
|
||||
public UInt256 getStorageValue(final UInt256 key) {
|
||||
return context.getStorageValue(address, key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getOriginalStorageValue(final Bytes32 key) {
|
||||
public UInt256 getOriginalStorageValue(final UInt256 key) {
|
||||
return context.getPriorStorageValue(address, key);
|
||||
}
|
||||
|
||||
@@ -232,7 +233,7 @@ public class BonsaiAccount implements MutableAccount, EvmAccount {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStorageValue(final Bytes32 key, final Bytes32 value) {
|
||||
public void setStorageValue(final UInt256 key, final UInt256 value) {
|
||||
if (!mutable) {
|
||||
throw new UnsupportedOperationException("Account is immutable");
|
||||
}
|
||||
@@ -245,7 +246,7 @@ public class BonsaiAccount implements MutableAccount, EvmAccount {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Bytes32, Bytes32> getUpdatedStorage() {
|
||||
public Map<UInt256, UInt256> getUpdatedStorage() {
|
||||
return updatedStorage;
|
||||
}
|
||||
|
||||
|
||||
@@ -16,8 +16,6 @@
|
||||
|
||||
package org.hyperledger.besu.ethereum.bonsai;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.ethereum.chain.Blockchain;
|
||||
@@ -35,6 +33,7 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
/** A World State backed first by trie log layer and then by another world state. */
|
||||
public class BonsaiLayeredWorldState implements MutableWorldState, BonsaiWorldView, WorldState {
|
||||
@@ -126,19 +125,19 @@ public class BonsaiLayeredWorldState implements MutableWorldState, BonsaiWorldVi
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getStorageValue(final Address address, final Bytes32 key) {
|
||||
return getStorageValueBySlotHash(address, Hash.hash(key)).orElse(ZERO_32);
|
||||
public UInt256 getStorageValue(final Address address, final UInt256 key) {
|
||||
return getStorageValueBySlotHash(address, Hash.hash(key)).orElse(UInt256.ZERO);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Bytes32> getStorageValueBySlotHash(final Address address, final Hash slotHash) {
|
||||
public Optional<UInt256> getStorageValueBySlotHash(final Address address, final Hash slotHash) {
|
||||
// this must be iterative and lambda light because the stack may blow up
|
||||
// mainly because we don't have tail calls.
|
||||
BonsaiLayeredWorldState currentLayer = this;
|
||||
while (currentLayer != null) {
|
||||
final Optional<Bytes32> maybeValue =
|
||||
final Optional<UInt256> maybeValue =
|
||||
currentLayer.trieLog.getStorageBySlotHash(address, slotHash);
|
||||
final Optional<Bytes32> maybePriorValue =
|
||||
final Optional<UInt256> maybePriorValue =
|
||||
currentLayer.trieLog.getPriorStorageBySlotHash(address, slotHash);
|
||||
if (currentLayer == this && maybeValue.isPresent()) {
|
||||
return maybeValue;
|
||||
@@ -159,7 +158,7 @@ public class BonsaiLayeredWorldState implements MutableWorldState, BonsaiWorldVi
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getPriorStorageValue(final Address address, final Bytes32 key) {
|
||||
public UInt256 getPriorStorageValue(final Address address, final UInt256 key) {
|
||||
// This is the base layer for a block, all values are original.
|
||||
return getStorageValue(address, key);
|
||||
}
|
||||
@@ -178,7 +177,7 @@ public class BonsaiLayeredWorldState implements MutableWorldState, BonsaiWorldVi
|
||||
.forEach(
|
||||
entry -> {
|
||||
if (!results.containsKey(entry.getKey())) {
|
||||
final Bytes32 value = entry.getValue().getUpdated();
|
||||
final UInt256 value = entry.getValue().getUpdated();
|
||||
// yes, store the nulls. If it was deleted it should stay deleted
|
||||
results.put(entry.getKey(), value);
|
||||
}
|
||||
@@ -192,7 +191,7 @@ public class BonsaiLayeredWorldState implements MutableWorldState, BonsaiWorldVi
|
||||
final Account account = currentLayer.getNextWorldView().get().get(address);
|
||||
if (account != null) {
|
||||
account
|
||||
.storageEntriesFrom(Hash.ZERO_HASH, Integer.MAX_VALUE)
|
||||
.storageEntriesFrom(Hash.ZERO, Integer.MAX_VALUE)
|
||||
.forEach(
|
||||
(k, v) -> {
|
||||
if (!results.containsKey(k)) {
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
|
||||
package org.hyperledger.besu.ethereum.bonsai;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
import static org.hyperledger.besu.ethereum.bonsai.BonsaiAccount.fromRLP;
|
||||
import static org.hyperledger.besu.ethereum.bonsai.BonsaiWorldStateKeyValueStorage.WORLD_BLOCK_HASH_KEY;
|
||||
import static org.hyperledger.besu.ethereum.bonsai.BonsaiWorldStateKeyValueStorage.WORLD_ROOT_HASH_KEY;
|
||||
@@ -41,6 +40,7 @@ import javax.annotation.Nonnull;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -65,7 +65,7 @@ public class BonsaiPersistedWorldState implements MutableWorldState, BonsaiWorld
|
||||
Hash.wrap(
|
||||
Bytes32.wrap(worldStateStorage.getWorldStateRootHash().orElse(Hash.EMPTY_TRIE_HASH)));
|
||||
worldStateBlockHash =
|
||||
Hash.wrap(Bytes32.wrap(worldStateStorage.getWorldStateBlockHash().orElse(Hash.ZERO_HASH)));
|
||||
Hash.wrap(Bytes32.wrap(worldStateStorage.getWorldStateBlockHash().orElse(Hash.ZERO)));
|
||||
updater = new BonsaiWorldStateUpdater(this);
|
||||
}
|
||||
|
||||
@@ -125,7 +125,7 @@ public class BonsaiPersistedWorldState implements MutableWorldState, BonsaiWorld
|
||||
oldAccount.getStorageRoot(),
|
||||
Function.identity(),
|
||||
Function.identity());
|
||||
Map<Bytes32, Bytes> entriesToDelete = storageTrie.entriesFrom(ZERO_32, 256);
|
||||
Map<Bytes32, Bytes> entriesToDelete = storageTrie.entriesFrom(Bytes32.ZERO, 256);
|
||||
while (!entriesToDelete.isEmpty()) {
|
||||
entriesToDelete
|
||||
.keySet()
|
||||
@@ -133,7 +133,7 @@ public class BonsaiPersistedWorldState implements MutableWorldState, BonsaiWorld
|
||||
k -> stateUpdater.removeStorageValueBySlotHash(Hash.hash(address), Hash.wrap(k)));
|
||||
if (entriesToDelete.size() == 256) {
|
||||
entriesToDelete.keySet().forEach(storageTrie::remove);
|
||||
entriesToDelete = storageTrie.entriesFrom(ZERO_32, 256);
|
||||
entriesToDelete = storageTrie.entriesFrom(Bytes32.ZERO, 256);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@@ -142,7 +142,7 @@ public class BonsaiPersistedWorldState implements MutableWorldState, BonsaiWorld
|
||||
|
||||
// second update account storage state. This must be done before updating the accounts so
|
||||
// that we can get the storage state hash
|
||||
for (final Map.Entry<Address, Map<Hash, BonsaiValue<Bytes32>>> storageAccountUpdate :
|
||||
for (final Map.Entry<Address, Map<Hash, BonsaiValue<UInt256>>> storageAccountUpdate :
|
||||
worldStateUpdater.getStorageToUpdate().entrySet()) {
|
||||
final Address updatedAddress = storageAccountUpdate.getKey();
|
||||
final Hash updatedAddressHash = Hash.hash(updatedAddress);
|
||||
@@ -161,11 +161,11 @@ public class BonsaiPersistedWorldState implements MutableWorldState, BonsaiWorld
|
||||
|
||||
// for manicured tries and composting, collect branches here (not implemented)
|
||||
|
||||
for (final Map.Entry<Hash, BonsaiValue<Bytes32>> storageUpdate :
|
||||
for (final Map.Entry<Hash, BonsaiValue<UInt256>> storageUpdate :
|
||||
storageAccountUpdate.getValue().entrySet()) {
|
||||
final Hash keyHash = storageUpdate.getKey();
|
||||
final Bytes32 updatedStorage = storageUpdate.getValue().getUpdated();
|
||||
if (updatedStorage == null || updatedStorage.equals(ZERO_32)) {
|
||||
final UInt256 updatedStorage = storageUpdate.getValue().getUpdated();
|
||||
if (updatedStorage == null || updatedStorage.equals(UInt256.ZERO)) {
|
||||
stateUpdater.removeStorageValueBySlotHash(updatedAddressHash, keyHash);
|
||||
storageTrie.remove(keyHash);
|
||||
} else {
|
||||
@@ -372,19 +372,19 @@ public class BonsaiPersistedWorldState implements MutableWorldState, BonsaiWorld
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getStorageValue(final Address address, final Bytes32 storageKey) {
|
||||
return getStorageValueBySlotHash(address, Hash.hash(storageKey)).orElse(ZERO_32);
|
||||
public UInt256 getStorageValue(final Address address, final UInt256 storageKey) {
|
||||
return getStorageValueBySlotHash(address, Hash.hash(storageKey)).orElse(UInt256.ZERO);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Bytes32> getStorageValueBySlotHash(final Address address, final Hash slotHash) {
|
||||
public Optional<UInt256> getStorageValueBySlotHash(final Address address, final Hash slotHash) {
|
||||
return worldStateStorage
|
||||
.getStorageValueBySlotHash(Hash.hash(address), slotHash)
|
||||
.map(Bytes32::leftPad);
|
||||
.map(UInt256::fromBytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getPriorStorageValue(final Address address, final Bytes32 storageKey) {
|
||||
public UInt256 getPriorStorageValue(final Address address, final UInt256 storageKey) {
|
||||
return getStorageValue(address, storageKey);
|
||||
}
|
||||
|
||||
@@ -396,6 +396,6 @@ public class BonsaiPersistedWorldState implements MutableWorldState, BonsaiWorld
|
||||
rootHash,
|
||||
Function.identity(),
|
||||
Function.identity());
|
||||
return storageTrie.entriesFrom(ZERO_32, Integer.MAX_VALUE);
|
||||
return storageTrie.entriesFrom(Bytes32.ZERO, Integer.MAX_VALUE);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -263,7 +263,7 @@ public class BonsaiWorldStateArchive implements WorldStateArchive {
|
||||
public Optional<WorldStateProof> getAccountProof(
|
||||
final Hash worldStateRoot,
|
||||
final Address accountAddress,
|
||||
final List<Bytes32> accountStorageKeys) {
|
||||
final List<UInt256> accountStorageKeys) {
|
||||
// FIXME we can do proofs for layered tries and the persisted trie
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@@ -16,8 +16,6 @@
|
||||
|
||||
package org.hyperledger.besu.ethereum.bonsai;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
@@ -30,6 +28,7 @@ import org.hyperledger.besu.evm.worldstate.UpdateTrackingAccount;
|
||||
import org.hyperledger.besu.evm.worldstate.WrappedEvmAccount;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
@@ -39,9 +38,11 @@ import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldView, BonsaiAccount>
|
||||
implements BonsaiWorldView {
|
||||
@@ -53,7 +54,7 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
// storage sub mapped by _hashed_ key. This is because in self_destruct calls we need to
|
||||
// enumerate the old storage and delete it. Those are trie stored by hashed key by spec and the
|
||||
// alternative was to keep a giant pre-image cache of the entire trie.
|
||||
private Map<Address, Map<Hash, BonsaiValue<Bytes32>>> storageToUpdate = new ConcurrentHashMap<>();
|
||||
private Map<Address, Map<Hash, BonsaiValue<UInt256>>> storageToUpdate = new ConcurrentHashMap<>();
|
||||
|
||||
BonsaiWorldStateUpdater(final BonsaiWorldView world) {
|
||||
super(world);
|
||||
@@ -70,6 +71,22 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
return copy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Account get(final Address address) {
|
||||
return super.get(address);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UpdateTrackingAccount<BonsaiAccount> track(
|
||||
final UpdateTrackingAccount<BonsaiAccount> account) {
|
||||
return super.track(account);
|
||||
}
|
||||
|
||||
@Override
|
||||
public EvmAccount getAccount(final Address address) {
|
||||
return super.getAccount(address);
|
||||
}
|
||||
|
||||
@Override
|
||||
public EvmAccount createAccount(final Address address, final long nonce, final Wei balance) {
|
||||
BonsaiValue<BonsaiAccount> bonsaiValue = accountsToUpdate.get(address);
|
||||
@@ -105,7 +122,7 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
return storageToClear;
|
||||
}
|
||||
|
||||
Map<Address, Map<Hash, BonsaiValue<Bytes32>>> getStorageToUpdate() {
|
||||
Map<Address, Map<Hash, BonsaiValue<UInt256>>> getStorageToUpdate() {
|
||||
return storageToUpdate;
|
||||
}
|
||||
|
||||
@@ -161,13 +178,13 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
}
|
||||
|
||||
// mark all updated storage as to be cleared
|
||||
final Map<Hash, BonsaiValue<Bytes32>> deletedStorageUpdates =
|
||||
final Map<Hash, BonsaiValue<UInt256>> deletedStorageUpdates =
|
||||
storageToUpdate.computeIfAbsent(deletedAddress, k -> new HashMap<>());
|
||||
final Iterator<Map.Entry<Hash, BonsaiValue<Bytes32>>> iter =
|
||||
final Iterator<Map.Entry<Hash, BonsaiValue<UInt256>>> iter =
|
||||
deletedStorageUpdates.entrySet().iterator();
|
||||
while (iter.hasNext()) {
|
||||
final Map.Entry<Hash, BonsaiValue<Bytes32>> updateEntry = iter.next();
|
||||
final BonsaiValue<Bytes32> updatedSlot = updateEntry.getValue();
|
||||
final Map.Entry<Hash, BonsaiValue<UInt256>> updateEntry = iter.next();
|
||||
final BonsaiValue<UInt256> updatedSlot = updateEntry.getValue();
|
||||
if (updatedSlot.getPrior() == null || updatedSlot.getPrior().isZero()) {
|
||||
iter.remove();
|
||||
} else {
|
||||
@@ -184,7 +201,7 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
(keyHash, entryValue) -> {
|
||||
final Hash slotHash = Hash.wrap(keyHash);
|
||||
if (!deletedStorageUpdates.containsKey(slotHash)) {
|
||||
final Bytes32 value = Bytes32.leftPad(RLP.decodeOne(entryValue));
|
||||
final UInt256 value = UInt256.fromBytes(RLP.decodeOne(entryValue));
|
||||
deletedStorageUpdates.put(slotHash, new BonsaiValue<>(value, null, true));
|
||||
}
|
||||
});
|
||||
@@ -228,22 +245,24 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
pendingCode.setUpdated(updatedAccount.getCode());
|
||||
}
|
||||
|
||||
final Map<Hash, BonsaiValue<Bytes32>> pendingStorageUpdates =
|
||||
final Map<Hash, BonsaiValue<UInt256>> pendingStorageUpdates =
|
||||
storageToUpdate.computeIfAbsent(updatedAddress, __ -> new HashMap<>());
|
||||
if (tracked.getStorageWasCleared()) {
|
||||
storageToClear.add(updatedAddress);
|
||||
pendingStorageUpdates.clear();
|
||||
}
|
||||
|
||||
final TreeSet<Map.Entry<Bytes32, Bytes32>> entries =
|
||||
new TreeSet<>(Map.Entry.comparingByKey());
|
||||
final TreeSet<Map.Entry<UInt256, UInt256>> entries =
|
||||
new TreeSet<>(
|
||||
Comparator.comparing(
|
||||
(Function<Map.Entry<UInt256, UInt256>, UInt256>) Map.Entry::getKey));
|
||||
entries.addAll(updatedAccount.getUpdatedStorage().entrySet());
|
||||
|
||||
for (final Map.Entry<Bytes32, Bytes32> storageUpdate : entries) {
|
||||
final Bytes32 keyUInt = storageUpdate.getKey();
|
||||
for (final Map.Entry<UInt256, UInt256> storageUpdate : entries) {
|
||||
final UInt256 keyUInt = storageUpdate.getKey();
|
||||
final Hash slotHash = Hash.hash(keyUInt);
|
||||
final Bytes32 value = storageUpdate.getValue();
|
||||
final BonsaiValue<Bytes32> pendingValue = pendingStorageUpdates.get(slotHash);
|
||||
final UInt256 value = storageUpdate.getValue();
|
||||
final BonsaiValue<UInt256> pendingValue = pendingStorageUpdates.get(slotHash);
|
||||
if (pendingValue == null) {
|
||||
pendingStorageUpdates.put(
|
||||
slotHash, new BonsaiValue<>(updatedAccount.getOriginalStorageValue(keyUInt), value));
|
||||
@@ -282,22 +301,22 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getStorageValue(final Address address, final Bytes32 storageKey) {
|
||||
public UInt256 getStorageValue(final Address address, final UInt256 storageKey) {
|
||||
// TODO maybe log the read into the trie layer?
|
||||
final Hash slotHashBytes = Hash.hash(storageKey);
|
||||
return getStorageValueBySlotHash(address, slotHashBytes).orElse(ZERO_32);
|
||||
return getStorageValueBySlotHash(address, slotHashBytes).orElse(UInt256.ZERO);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Bytes32> getStorageValueBySlotHash(final Address address, final Hash slotHash) {
|
||||
final Map<Hash, BonsaiValue<Bytes32>> localAccountStorage = storageToUpdate.get(address);
|
||||
public Optional<UInt256> getStorageValueBySlotHash(final Address address, final Hash slotHash) {
|
||||
final Map<Hash, BonsaiValue<UInt256>> localAccountStorage = storageToUpdate.get(address);
|
||||
if (localAccountStorage != null) {
|
||||
final BonsaiValue<Bytes32> value = localAccountStorage.get(slotHash);
|
||||
final BonsaiValue<UInt256> value = localAccountStorage.get(slotHash);
|
||||
if (value != null) {
|
||||
return Optional.ofNullable(value.getUpdated());
|
||||
}
|
||||
}
|
||||
final Optional<Bytes32> valueUInt =
|
||||
final Optional<UInt256> valueUInt =
|
||||
wrappedWorldView().getStorageValueBySlotHash(address, slotHash);
|
||||
valueUInt.ifPresent(
|
||||
v ->
|
||||
@@ -308,28 +327,28 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getPriorStorageValue(final Address address, final Bytes32 storageKey) {
|
||||
public UInt256 getPriorStorageValue(final Address address, final UInt256 storageKey) {
|
||||
// TODO maybe log the read into the trie layer?
|
||||
final Map<Hash, BonsaiValue<Bytes32>> localAccountStorage = storageToUpdate.get(address);
|
||||
final Map<Hash, BonsaiValue<UInt256>> localAccountStorage = storageToUpdate.get(address);
|
||||
final Hash slotHash = Hash.hash(storageKey);
|
||||
if (localAccountStorage != null) {
|
||||
final BonsaiValue<Bytes32> value = localAccountStorage.get(slotHash);
|
||||
final BonsaiValue<UInt256> value = localAccountStorage.get(slotHash);
|
||||
if (value != null) {
|
||||
if (value.isCleared()) {
|
||||
return ZERO_32;
|
||||
return UInt256.ZERO;
|
||||
}
|
||||
final Bytes32 updated = value.getUpdated();
|
||||
final UInt256 updated = value.getUpdated();
|
||||
if (updated != null) {
|
||||
return updated;
|
||||
}
|
||||
final Bytes32 original = value.getPrior();
|
||||
final UInt256 original = value.getPrior();
|
||||
if (original != null) {
|
||||
return original;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (storageToClear.contains(address)) {
|
||||
return ZERO_32;
|
||||
return UInt256.ZERO;
|
||||
}
|
||||
return getStorageValue(address, storageKey);
|
||||
}
|
||||
@@ -381,10 +400,10 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
blockHash);
|
||||
}
|
||||
|
||||
for (final Map.Entry<Address, Map<Hash, BonsaiValue<Bytes32>>> updatesStorage :
|
||||
for (final Map.Entry<Address, Map<Hash, BonsaiValue<UInt256>>> updatesStorage :
|
||||
storageToUpdate.entrySet()) {
|
||||
final Address address = updatesStorage.getKey();
|
||||
for (final Map.Entry<Hash, BonsaiValue<Bytes32>> slotUpdate :
|
||||
for (final Map.Entry<Hash, BonsaiValue<UInt256>> slotUpdate :
|
||||
updatesStorage.getValue().entrySet()) {
|
||||
layer.addStorageChange(
|
||||
address,
|
||||
@@ -554,10 +573,10 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
}
|
||||
}
|
||||
|
||||
private Map<Hash, BonsaiValue<Bytes32>> maybeCreateStorageMap(
|
||||
final Map<Hash, BonsaiValue<Bytes32>> storageMap, final Address address) {
|
||||
private Map<Hash, BonsaiValue<UInt256>> maybeCreateStorageMap(
|
||||
final Map<Hash, BonsaiValue<UInt256>> storageMap, final Address address) {
|
||||
if (storageMap == null) {
|
||||
final Map<Hash, BonsaiValue<Bytes32>> newMap = new HashMap<>();
|
||||
final Map<Hash, BonsaiValue<UInt256>> newMap = new HashMap<>();
|
||||
storageToUpdate.put(address, newMap);
|
||||
return newMap;
|
||||
} else {
|
||||
@@ -568,8 +587,8 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
private void rollStorageChange(
|
||||
final Address address,
|
||||
final Hash slotHash,
|
||||
final Bytes32 expectedValue,
|
||||
final Bytes32 replacementValue) {
|
||||
final UInt256 expectedValue,
|
||||
final UInt256 replacementValue) {
|
||||
if (Objects.equals(expectedValue, replacementValue)) {
|
||||
// non-change, a cached read.
|
||||
return;
|
||||
@@ -578,10 +597,10 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
// corner case on deletes, non-change
|
||||
return;
|
||||
}
|
||||
final Map<Hash, BonsaiValue<Bytes32>> storageMap = storageToUpdate.get(address);
|
||||
BonsaiValue<Bytes32> slotValue = storageMap == null ? null : storageMap.get(slotHash);
|
||||
final Map<Hash, BonsaiValue<UInt256>> storageMap = storageToUpdate.get(address);
|
||||
BonsaiValue<UInt256> slotValue = storageMap == null ? null : storageMap.get(slotHash);
|
||||
if (slotValue == null) {
|
||||
final Optional<Bytes32> storageValue =
|
||||
final Optional<UInt256> storageValue =
|
||||
wrappedWorldView().getStorageValueBySlotHash(address, slotHash);
|
||||
if (storageValue.isPresent()) {
|
||||
slotValue = new BonsaiValue<>(storageValue.get(), storageValue.get());
|
||||
@@ -599,7 +618,7 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
address, slotHash));
|
||||
}
|
||||
} else {
|
||||
final Bytes32 existingSlotValue = slotValue.getUpdated();
|
||||
final UInt256 existingSlotValue = slotValue.getUpdated();
|
||||
if ((expectedValue == null || expectedValue.isZero())
|
||||
&& existingSlotValue != null
|
||||
&& !existingSlotValue.isZero()) {
|
||||
@@ -618,7 +637,7 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
existingSlotValue == null ? "null" : existingSlotValue.toShortHexString()));
|
||||
}
|
||||
if (replacementValue == null && slotValue.getPrior() == null) {
|
||||
final Map<Hash, BonsaiValue<Bytes32>> thisStorageUpdate =
|
||||
final Map<Hash, BonsaiValue<UInt256>> thisStorageUpdate =
|
||||
maybeCreateStorageMap(storageMap, address);
|
||||
thisStorageUpdate.remove(slotHash);
|
||||
if (thisStorageUpdate.isEmpty()) {
|
||||
@@ -630,10 +649,10 @@ public class BonsaiWorldStateUpdater extends AbstractWorldUpdater<BonsaiWorldVie
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSlotEquals(final Bytes32 expectedValue, final Bytes32 existingSlotValue) {
|
||||
final Bytes32 sanitizedExpectedValue = (expectedValue == null) ? ZERO_32 : expectedValue;
|
||||
final Bytes32 sanitizedExistingSlotValue =
|
||||
(existingSlotValue == null) ? ZERO_32 : existingSlotValue;
|
||||
private boolean isSlotEquals(final UInt256 expectedValue, final UInt256 existingSlotValue) {
|
||||
final UInt256 sanitizedExpectedValue = (expectedValue == null) ? UInt256.ZERO : expectedValue;
|
||||
final UInt256 sanitizedExistingSlotValue =
|
||||
(existingSlotValue == null) ? UInt256.ZERO : existingSlotValue;
|
||||
return Objects.equals(sanitizedExpectedValue, sanitizedExistingSlotValue);
|
||||
}
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ import java.util.Optional;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public interface BonsaiWorldView extends WorldView {
|
||||
|
||||
@@ -33,11 +34,11 @@ public interface BonsaiWorldView extends WorldView {
|
||||
|
||||
Optional<Bytes> getStateTrieNode(Bytes location);
|
||||
|
||||
Bytes32 getStorageValue(Address address, Bytes32 key);
|
||||
UInt256 getStorageValue(Address address, UInt256 key);
|
||||
|
||||
Optional<Bytes32> getStorageValueBySlotHash(Address address, Hash slotHash);
|
||||
Optional<UInt256> getStorageValueBySlotHash(Address address, Hash slotHash);
|
||||
|
||||
Bytes32 getPriorStorageValue(Address address, Bytes32 key);
|
||||
UInt256 getPriorStorageValue(Address address, UInt256 key);
|
||||
|
||||
/**
|
||||
* Retrieve all the storage values of a account.
|
||||
|
||||
@@ -36,7 +36,7 @@ import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
/**
|
||||
* This class encapsulates the changes that are done to transition one block to the next. This
|
||||
@@ -50,7 +50,7 @@ public class TrieLogLayer {
|
||||
private Hash blockHash;
|
||||
private final Map<Address, BonsaiValue<StateTrieAccountValue>> accounts;
|
||||
private final Map<Address, BonsaiValue<Bytes>> code;
|
||||
private final Map<Address, Map<Hash, BonsaiValue<Bytes32>>> storage;
|
||||
private final Map<Address, Map<Hash, BonsaiValue<UInt256>>> storage;
|
||||
private boolean frozen = false;
|
||||
|
||||
TrieLogLayer() {
|
||||
@@ -92,7 +92,7 @@ public class TrieLogLayer {
|
||||
}
|
||||
|
||||
void addStorageChange(
|
||||
final Address address, final Hash slotHash, final Bytes32 oldValue, final Bytes32 newValue) {
|
||||
final Address address, final Hash slotHash, final UInt256 oldValue, final UInt256 newValue) {
|
||||
checkState(!frozen, "Layer is Frozen");
|
||||
storage
|
||||
.computeIfAbsent(address, a -> new TreeMap<>())
|
||||
@@ -136,13 +136,13 @@ public class TrieLogLayer {
|
||||
if (input.nextIsNull()) {
|
||||
input.skipNext();
|
||||
} else {
|
||||
final Map<Hash, BonsaiValue<Bytes32>> storageChanges = new TreeMap<>();
|
||||
final Map<Hash, BonsaiValue<UInt256>> storageChanges = new TreeMap<>();
|
||||
input.enterList();
|
||||
while (!input.isEndOfCurrentList()) {
|
||||
input.enterList();
|
||||
final Hash slotHash = Hash.wrap(input.readBytes32());
|
||||
final Bytes32 oldValue = nullOrValue(input, RLPInput::readBytes32Scalar);
|
||||
final Bytes32 newValue = nullOrValue(input, RLPInput::readBytes32Scalar);
|
||||
final UInt256 oldValue = nullOrValue(input, RLPInput::readUInt256Scalar);
|
||||
final UInt256 newValue = nullOrValue(input, RLPInput::readUInt256Scalar);
|
||||
storageChanges.put(slotHash, new BonsaiValue<>(oldValue, newValue));
|
||||
input.leaveList();
|
||||
}
|
||||
@@ -190,16 +190,16 @@ public class TrieLogLayer {
|
||||
codeChange.writeRlp(output, RLPOutput::writeBytes);
|
||||
}
|
||||
|
||||
final Map<Hash, BonsaiValue<Bytes32>> storageChanges = storage.get(address);
|
||||
final Map<Hash, BonsaiValue<UInt256>> storageChanges = storage.get(address);
|
||||
if (storageChanges == null) {
|
||||
output.writeNull();
|
||||
} else {
|
||||
output.startList();
|
||||
for (final Map.Entry<Hash, BonsaiValue<Bytes32>> storageChangeEntry :
|
||||
for (final Map.Entry<Hash, BonsaiValue<UInt256>> storageChangeEntry :
|
||||
storageChanges.entrySet()) {
|
||||
output.startList();
|
||||
output.writeBytes(storageChangeEntry.getKey());
|
||||
storageChangeEntry.getValue().writeInnerRlp(output, RLPOutput::writeBytesScalar);
|
||||
storageChangeEntry.getValue().writeInnerRlp(output, RLPOutput::writeUInt256Scalar);
|
||||
output.endList();
|
||||
}
|
||||
output.endList();
|
||||
@@ -220,7 +220,7 @@ public class TrieLogLayer {
|
||||
return code.entrySet().stream();
|
||||
}
|
||||
|
||||
public Stream<Map.Entry<Address, Map<Hash, BonsaiValue<Bytes32>>>> streamStorageChanges() {
|
||||
public Stream<Map.Entry<Address, Map<Hash, BonsaiValue<UInt256>>>> streamStorageChanges() {
|
||||
return storage.entrySet().stream();
|
||||
}
|
||||
|
||||
@@ -228,7 +228,7 @@ public class TrieLogLayer {
|
||||
return storage.containsKey(address);
|
||||
}
|
||||
|
||||
public Stream<Map.Entry<Hash, BonsaiValue<Bytes32>>> streamStorageChanges(final Address address) {
|
||||
public Stream<Map.Entry<Hash, BonsaiValue<UInt256>>> streamStorageChanges(final Address address) {
|
||||
return storage.getOrDefault(address, Map.of()).entrySet().stream();
|
||||
}
|
||||
|
||||
@@ -249,13 +249,13 @@ public class TrieLogLayer {
|
||||
return Optional.ofNullable(code.get(address)).map(BonsaiValue::getUpdated);
|
||||
}
|
||||
|
||||
Optional<Bytes32> getPriorStorageBySlotHash(final Address address, final Hash slotHash) {
|
||||
Optional<UInt256> getPriorStorageBySlotHash(final Address address, final Hash slotHash) {
|
||||
return Optional.ofNullable(storage.get(address))
|
||||
.map(i -> i.get(slotHash))
|
||||
.map(BonsaiValue::getPrior);
|
||||
}
|
||||
|
||||
Optional<Bytes32> getStorageBySlotHash(final Address address, final Hash slotHash) {
|
||||
Optional<UInt256> getStorageBySlotHash(final Address address, final Hash slotHash) {
|
||||
return Optional.ofNullable(storage.get(address))
|
||||
.map(i -> i.get(slotHash))
|
||||
.map(BonsaiValue::getUpdated);
|
||||
@@ -294,11 +294,11 @@ public class TrieLogLayer {
|
||||
}
|
||||
}
|
||||
sb.append("Storage").append("\n");
|
||||
for (final Map.Entry<Address, Map<Hash, BonsaiValue<Bytes32>>> storage : storage.entrySet()) {
|
||||
for (final Map.Entry<Address, Map<Hash, BonsaiValue<UInt256>>> storage : storage.entrySet()) {
|
||||
sb.append(" : ").append(storage.getKey()).append("\n");
|
||||
for (final Map.Entry<Hash, BonsaiValue<Bytes32>> slot : storage.getValue().entrySet()) {
|
||||
final Bytes32 originalValue = slot.getValue().getPrior();
|
||||
final Bytes32 updatedValue = slot.getValue().getUpdated();
|
||||
for (final Map.Entry<Hash, BonsaiValue<UInt256>> slot : storage.getValue().entrySet()) {
|
||||
final UInt256 originalValue = slot.getValue().getPrior();
|
||||
final UInt256 updatedValue = slot.getValue().getUpdated();
|
||||
sb.append(" : ").append(slot.getKey()).append("\n");
|
||||
if (Objects.equals(originalValue, updatedValue)) {
|
||||
sb.append(" = ")
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.core;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.plugin.data.Quantity;
|
||||
|
||||
import java.math.BigInteger;
|
||||
@@ -31,7 +29,7 @@ public final class Difficulty extends BaseUInt256Value<Difficulty> implements Qu
|
||||
|
||||
public static final Difficulty ONE = of(1);
|
||||
|
||||
public static final Difficulty MAX_VALUE = wrap(ZERO_32.not());
|
||||
public static final Difficulty MAX_VALUE = wrap(Bytes32.ZERO.not());
|
||||
|
||||
Difficulty(final UInt256 value) {
|
||||
super(value, Difficulty::new);
|
||||
|
||||
@@ -29,6 +29,7 @@ import java.util.OptionalLong;
|
||||
import com.google.common.base.MoreObjects;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class TraceFrame {
|
||||
|
||||
@@ -45,7 +46,7 @@ public class TraceFrame {
|
||||
private final Bytes outputData;
|
||||
private final Optional<Bytes32[]> stack;
|
||||
private final Optional<Bytes[]> memory;
|
||||
private final Optional<Map<Bytes32, Bytes32>> storage;
|
||||
private final Optional<Map<UInt256, UInt256>> storage;
|
||||
private final WorldUpdater worldUpdater;
|
||||
private final Optional<Bytes> revertReason;
|
||||
private final Optional<Map<Address, Wei>> maybeRefunds;
|
||||
@@ -73,7 +74,7 @@ public class TraceFrame {
|
||||
final Bytes outputData,
|
||||
final Optional<Bytes32[]> stack,
|
||||
final Optional<Bytes[]> memory,
|
||||
final Optional<Map<Bytes32, Bytes32>> storage,
|
||||
final Optional<Map<UInt256, UInt256>> storage,
|
||||
final WorldUpdater worldUpdater,
|
||||
final Optional<Bytes> revertReason,
|
||||
final Optional<Map<Address, Wei>> maybeRefunds,
|
||||
@@ -165,7 +166,7 @@ public class TraceFrame {
|
||||
return memory;
|
||||
}
|
||||
|
||||
public Optional<Map<Bytes32, Bytes32>> getStorage() {
|
||||
public Optional<Map<UInt256, UInt256>> getStorage() {
|
||||
return storage;
|
||||
}
|
||||
|
||||
|
||||
@@ -135,7 +135,7 @@ public class PrivateTransactionSimulator {
|
||||
publicWorldState.updater(),
|
||||
disposablePrivateState.updater(),
|
||||
header,
|
||||
Hash.ZERO_HASH, // Corresponding PMT hash not needed as this private transaction doesn't
|
||||
Hash.ZERO, // Corresponding PMT hash not needed as this private transaction doesn't
|
||||
// exist
|
||||
transaction,
|
||||
protocolSpec.getMiningBeneficiaryCalculator().calculateBeneficiary(header),
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.privacy;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.ethereum.processing.TransactionProcessingResult;
|
||||
import org.hyperledger.besu.ethereum.rlp.BytesValueRLPOutput;
|
||||
import org.hyperledger.besu.ethereum.rlp.RLPException;
|
||||
@@ -36,7 +34,7 @@ public class VersionedPrivateTransaction {
|
||||
privateTransaction,
|
||||
result
|
||||
.map(value -> Bytes32.fromHexStringLenient(value.getOutput().toHexString()))
|
||||
.orElse(ZERO_32));
|
||||
.orElse(Bytes32.ZERO));
|
||||
}
|
||||
|
||||
public VersionedPrivateTransaction(
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.proof;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.ethereum.rlp.RLP;
|
||||
import org.hyperledger.besu.ethereum.trie.Proof;
|
||||
import org.hyperledger.besu.ethereum.worldstate.StateTrieAccountValue;
|
||||
@@ -27,7 +25,7 @@ import java.util.Optional;
|
||||
import java.util.SortedMap;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class WorldStateProof {
|
||||
|
||||
@@ -35,12 +33,12 @@ public class WorldStateProof {
|
||||
|
||||
private final Proof<Bytes> accountProof;
|
||||
|
||||
private final Map<Bytes32, Proof<Bytes>> storageProofs;
|
||||
private final Map<UInt256, Proof<Bytes>> storageProofs;
|
||||
|
||||
public WorldStateProof(
|
||||
final StateTrieAccountValue stateTrieAccountValue,
|
||||
final Proof<Bytes> accountProof,
|
||||
final SortedMap<Bytes32, Proof<Bytes>> storageProofs) {
|
||||
final SortedMap<UInt256, Proof<Bytes>> storageProofs) {
|
||||
this.stateTrieAccountValue = stateTrieAccountValue;
|
||||
this.accountProof = accountProof;
|
||||
this.storageProofs = storageProofs;
|
||||
@@ -54,20 +52,20 @@ public class WorldStateProof {
|
||||
return accountProof.getProofRelatedNodes();
|
||||
}
|
||||
|
||||
public List<Bytes32> getStorageKeys() {
|
||||
public List<UInt256> getStorageKeys() {
|
||||
return new ArrayList<>(storageProofs.keySet());
|
||||
}
|
||||
|
||||
public Bytes32 getStorageValue(final Bytes32 key) {
|
||||
public UInt256 getStorageValue(final UInt256 key) {
|
||||
Optional<Bytes> value = storageProofs.get(key).getValue();
|
||||
if (value.isEmpty()) {
|
||||
return ZERO_32;
|
||||
return UInt256.ZERO;
|
||||
} else {
|
||||
return RLP.input(value.get()).readBytes32Scalar();
|
||||
return RLP.input(value.get()).readUInt256Scalar();
|
||||
}
|
||||
}
|
||||
|
||||
public List<Bytes> getStorageProof(final Bytes32 key) {
|
||||
public List<Bytes> getStorageProof(final UInt256 key) {
|
||||
return storageProofs.get(key).getProofRelatedNodes();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,6 +40,7 @@ import java.util.function.Function;
|
||||
import com.google.common.collect.Ordering;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class WorldStateProofProvider {
|
||||
|
||||
@@ -52,7 +53,7 @@ public class WorldStateProofProvider {
|
||||
public Optional<WorldStateProof> getAccountProof(
|
||||
final Hash worldStateRoot,
|
||||
final Address accountAddress,
|
||||
final List<Bytes32> accountStorageKeys) {
|
||||
final List<UInt256> accountStorageKeys) {
|
||||
|
||||
if (!worldStateStorage.isWorldStateAvailable(worldStateRoot, null)) {
|
||||
return Optional.empty();
|
||||
@@ -67,20 +68,20 @@ public class WorldStateProofProvider {
|
||||
.map(StateTrieAccountValue::readFrom)
|
||||
.map(
|
||||
account -> {
|
||||
final SortedMap<Bytes32, Proof<Bytes>> storageProofs =
|
||||
final SortedMap<UInt256, Proof<Bytes>> storageProofs =
|
||||
getStorageProofs(accountHash, account, accountStorageKeys);
|
||||
return new WorldStateProof(account, accountProof, storageProofs);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private SortedMap<Bytes32, Proof<Bytes>> getStorageProofs(
|
||||
private SortedMap<UInt256, Proof<Bytes>> getStorageProofs(
|
||||
final Hash accountHash,
|
||||
final StateTrieAccountValue account,
|
||||
final List<Bytes32> accountStorageKeys) {
|
||||
final List<UInt256> accountStorageKeys) {
|
||||
final MerklePatriciaTrie<Bytes32, Bytes> storageTrie =
|
||||
newAccountStorageTrie(accountHash, account.getStorageRoot());
|
||||
final NavigableMap<Bytes32, Proof<Bytes>> storageProofs = new TreeMap<>();
|
||||
final NavigableMap<UInt256, Proof<Bytes>> storageProofs = new TreeMap<>();
|
||||
accountStorageKeys.forEach(
|
||||
key -> storageProofs.put(key, storageTrie.getValueWithProof(Hash.hash(key))));
|
||||
return storageProofs;
|
||||
@@ -113,7 +114,7 @@ public class WorldStateProofProvider {
|
||||
final Bytes32 endKeyHash,
|
||||
final Bytes32 rootHash,
|
||||
final List<Bytes> proofs,
|
||||
final NavigableMap<Bytes32, Bytes> keys) {
|
||||
final TreeMap<Bytes32, Bytes> keys) {
|
||||
|
||||
// check if it's monotonic increasing
|
||||
if (!Ordering.natural().isOrdered(keys.keySet())) {
|
||||
|
||||
@@ -23,6 +23,7 @@ import java.util.Optional;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class WorldStatePreimageKeyValueStorage implements WorldStatePreimageStorage {
|
||||
private final KeyValueStorage keyValueStorage;
|
||||
@@ -32,11 +33,12 @@ public class WorldStatePreimageKeyValueStorage implements WorldStatePreimageStor
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Bytes32> getStorageTrieKeyPreimage(final Bytes32 trieKey) {
|
||||
public Optional<UInt256> getStorageTrieKeyPreimage(final Bytes32 trieKey) {
|
||||
return keyValueStorage
|
||||
.get(trieKey.toArrayUnsafe())
|
||||
.filter(val -> val.length == Bytes32.SIZE)
|
||||
.map(Bytes32::wrap);
|
||||
.map(Bytes32::wrap)
|
||||
.map(UInt256::fromBytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -61,7 +63,7 @@ public class WorldStatePreimageKeyValueStorage implements WorldStatePreimageStor
|
||||
|
||||
@Override
|
||||
public WorldStatePreimageStorage.Updater putStorageTrieKeyPreimage(
|
||||
final Bytes32 trieKey, final Bytes32 preimage) {
|
||||
final Bytes32 trieKey, final UInt256 preimage) {
|
||||
transaction.put(trieKey.toArrayUnsafe(), preimage.toArrayUnsafe());
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.vm;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Hash.ZERO_HASH;
|
||||
import static org.hyperledger.besu.datatypes.Hash.ZERO;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.ethereum.chain.Blockchain;
|
||||
@@ -57,6 +57,6 @@ public class BlockHashLookup implements Function<Long, Hash> {
|
||||
hashByNumber.put(searchStartHeader.getNumber() - 1, searchStartHeader.getParentHash());
|
||||
}
|
||||
}
|
||||
return hashByNumber.getOrDefault(blockNumber, ZERO_HASH);
|
||||
return hashByNumber.getOrDefault(blockNumber, ZERO);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,6 +37,7 @@ import java.util.TreeMap;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class DebugOperationTracer implements OperationTracer {
|
||||
|
||||
@@ -74,7 +75,7 @@ public class DebugOperationTracer implements OperationTracer {
|
||||
if (lastFrame != null) {
|
||||
lastFrame.setGasRemainingPostExecution(gasRemaining);
|
||||
}
|
||||
final Optional<Map<Bytes32, Bytes32>> storage = captureStorage(frame);
|
||||
final Optional<Map<UInt256, UInt256>> storage = captureStorage(frame);
|
||||
final Optional<Map<Address, Wei>> maybeRefunds =
|
||||
frame.getRefunds().isEmpty() ? Optional.empty() : Optional.of(frame.getRefunds());
|
||||
lastFrame =
|
||||
@@ -188,12 +189,12 @@ public class DebugOperationTracer implements OperationTracer {
|
||||
});
|
||||
}
|
||||
|
||||
private Optional<Map<Bytes32, Bytes32>> captureStorage(final MessageFrame frame) {
|
||||
private Optional<Map<UInt256, UInt256>> captureStorage(final MessageFrame frame) {
|
||||
if (!options.isStorageEnabled()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
try {
|
||||
final Map<Bytes32, Bytes32> storageContents =
|
||||
final Map<UInt256, UInt256> storageContents =
|
||||
new TreeMap<>(
|
||||
frame
|
||||
.getWorldUpdater()
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.worldstate;
|
||||
|
||||
import static org.hyperledger.besu.datatypes.Constants.ZERO_32;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
@@ -48,6 +46,7 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public class DefaultMutableWorldState implements MutableWorldState {
|
||||
|
||||
@@ -58,7 +57,7 @@ public class DefaultMutableWorldState implements MutableWorldState {
|
||||
private final Map<Address, MerklePatriciaTrie<Bytes32, Bytes>> updatedStorageTries =
|
||||
new HashMap<>();
|
||||
private final Map<Address, Bytes> updatedAccountCode = new HashMap<>();
|
||||
private final Map<Bytes32, Bytes32> newStorageKeyPreimages = new HashMap<>();
|
||||
private final Map<Bytes32, UInt256> newStorageKeyPreimages = new HashMap<>();
|
||||
private final Map<Bytes32, Address> newAccountKeyPreimages = new HashMap<>();
|
||||
|
||||
public DefaultMutableWorldState(
|
||||
@@ -204,16 +203,16 @@ public class DefaultMutableWorldState implements MutableWorldState {
|
||||
stateUpdater.commit();
|
||||
}
|
||||
|
||||
private Optional<Bytes32> getStorageTrieKeyPreimage(final Bytes32 trieKey) {
|
||||
private Optional<UInt256> getStorageTrieKeyPreimage(final Bytes32 trieKey) {
|
||||
return Optional.ofNullable(newStorageKeyPreimages.get(trieKey))
|
||||
.or(() -> preimageStorage.getStorageTrieKeyPreimage(trieKey));
|
||||
}
|
||||
|
||||
private static Bytes32 convertToBytes32(final Bytes value) {
|
||||
private static UInt256 convertToUInt256(final Bytes value) {
|
||||
// TODO: we could probably have an optimized method to decode a single scalar since it's used
|
||||
// pretty often.
|
||||
final RLPInput in = RLP.input(value);
|
||||
return Bytes32.leftPad(in.readBytes());
|
||||
return in.readUInt256Scalar();
|
||||
}
|
||||
|
||||
private Optional<Address> getAccountTrieKeyPreimage(final Bytes32 trieKey) {
|
||||
@@ -300,15 +299,15 @@ public class DefaultMutableWorldState implements MutableWorldState {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getStorageValue(final Bytes32 key) {
|
||||
public UInt256 getStorageValue(final UInt256 key) {
|
||||
return storageTrie()
|
||||
.get(Hash.hash(key))
|
||||
.map(DefaultMutableWorldState::convertToBytes32)
|
||||
.orElse(ZERO_32);
|
||||
.map(DefaultMutableWorldState::convertToUInt256)
|
||||
.orElse(UInt256.ZERO);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes32 getOriginalStorageValue(final Bytes32 key) {
|
||||
public UInt256 getOriginalStorageValue(final UInt256 key) {
|
||||
return getStorageValue(key);
|
||||
}
|
||||
|
||||
@@ -322,7 +321,7 @@ public class DefaultMutableWorldState implements MutableWorldState {
|
||||
(key, value) -> {
|
||||
final AccountStorageEntry entry =
|
||||
AccountStorageEntry.create(
|
||||
convertToBytes32(value), key, getStorageTrieKeyPreimage(key));
|
||||
convertToUInt256(value), key, getStorageTrieKeyPreimage(key));
|
||||
storageEntries.put(key, entry);
|
||||
});
|
||||
return storageEntries;
|
||||
@@ -401,7 +400,7 @@ public class DefaultMutableWorldState implements MutableWorldState {
|
||||
if (freshState) {
|
||||
wrapped.updatedStorageTries.remove(updated.getAddress());
|
||||
}
|
||||
final Map<Bytes32, Bytes32> updatedStorage = updated.getUpdatedStorage();
|
||||
final Map<UInt256, UInt256> updatedStorage = updated.getUpdatedStorage();
|
||||
if (!updatedStorage.isEmpty()) {
|
||||
// Apply any storage updates
|
||||
final MerklePatriciaTrie<Bytes32, Bytes> storageTrie =
|
||||
@@ -409,21 +408,21 @@ public class DefaultMutableWorldState implements MutableWorldState {
|
||||
? wrapped.newAccountStorageTrie(Hash.EMPTY_TRIE_HASH)
|
||||
: origin.storageTrie();
|
||||
wrapped.updatedStorageTries.put(updated.getAddress(), storageTrie);
|
||||
final TreeSet<Map.Entry<Bytes32, Bytes32>> entries =
|
||||
final TreeSet<Map.Entry<UInt256, UInt256>> entries =
|
||||
new TreeSet<>(
|
||||
Comparator.comparing(
|
||||
(Function<Map.Entry<Bytes32, Bytes32>, Bytes32>) Map.Entry::getKey));
|
||||
(Function<Map.Entry<UInt256, UInt256>, UInt256>) Map.Entry::getKey));
|
||||
entries.addAll(updatedStorage.entrySet());
|
||||
|
||||
for (final Map.Entry<Bytes32, Bytes32> entry : entries) {
|
||||
final Bytes32 value = entry.getValue();
|
||||
for (final Map.Entry<UInt256, UInt256> entry : entries) {
|
||||
final UInt256 value = entry.getValue();
|
||||
final Hash keyHash = Hash.hash(entry.getKey());
|
||||
if (value.isZero()) {
|
||||
storageTrie.remove(keyHash);
|
||||
} else {
|
||||
wrapped.newStorageKeyPreimages.put(keyHash, entry.getKey());
|
||||
storageTrie.put(
|
||||
keyHash, RLP.encode(out -> out.writeBytes(entry.getValue().trimLeadingZeros())));
|
||||
keyHash, RLP.encode(out -> out.writeBytes(entry.getValue().toMinimalBytes())));
|
||||
}
|
||||
}
|
||||
storageRoot = Hash.wrap(storageTrie.getRootHash());
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user