mirror of
https://github.com/vacp2p/linea-besu.git
synced 2026-01-08 20:47:59 -05:00
Merge branch 'main' into zkbesu
# Conflicts: # .github/workflows/release.yml
This commit is contained in:
35
.github/ISSUE_TEMPLATE/release-checklist.md
vendored
Normal file
35
.github/ISSUE_TEMPLATE/release-checklist.md
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
name: Release Checklist
|
||||
about: items to be completed for each release
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
- [ ] Confirm anything outstanding for release with other maintainers on #besu-release in Discord
|
||||
- [ ] Notify maintainers about updating changelog for in-flight PRs
|
||||
- [ ] Update changelog if necessary, and merge a PR for it to main
|
||||
- [ ] Optional: for hotfixes, create a release branch and cherry-pick, e.g. `release-<version>-hotfix`
|
||||
- [ ] Optional: create a PR into main from the hotfix branch to see the CI checks pass
|
||||
- [ ] On the appropriate branch/commit, create a calver tag for the release candidate, format example: `24.4.0-RC2`
|
||||
- [ ] Sign-off with team; confirm tag is correct in #besu-release in Discord
|
||||
- [ ] Consensys staff start burn-in using the proposed release <version-RCX> tag
|
||||
- [ ] Sign off burn-in; convey burn-in results in #besu-release in Discord
|
||||
- [ ] Using the same git sha, create a calver tag for the FULL RELEASE, example format `24.4.0`
|
||||
- [ ] Using the FULL RELEASE tag, create a release in github to trigger the workflows. Once published:
|
||||
- makes the release "latest" in github
|
||||
- this is now public and notifies subscribed users
|
||||
- publishes artefacts and version-specific docker tags
|
||||
- publishes the docker `latest` tag variants
|
||||
- [ ] Draft homebrew PR
|
||||
- [ ] Draft documentation release
|
||||
- [ ] Ensure binary SHAs are correct on the release page
|
||||
- [ ] Docker release startup test:
|
||||
- `docker run hyperledger/besu:<version>`
|
||||
- `docker run hyperledger/besu:<version>-arm64`
|
||||
- `docker run --platform linux/amd64 hyperledger/besu:<version>-amd64`
|
||||
- `docker run --pull=always hyperledger/besu:latest` (check version is <version>)
|
||||
- [ ] Merge homebrew PR
|
||||
- [ ] Publish Docs Release
|
||||
- [ ] Social announcements
|
||||
44
.github/workflows/container-security-scan.yml
vendored
Normal file
44
.github/workflows/container-security-scan.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: container security scan
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Container image tag'
|
||||
required: false
|
||||
default: 'develop'
|
||||
schedule:
|
||||
# Start of the hour is the busy time. Scheule it to run 8:17am UTC
|
||||
- cron: '17 8 * * *'
|
||||
|
||||
jobs:
|
||||
scan-sarif:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
|
||||
# Shell parameter expansion does not support directly on a step
|
||||
# Adding a separate step to set the image tag. This allows running
|
||||
# this workflow with a schedule as well as manual
|
||||
- name: Set image tag
|
||||
id: tag
|
||||
run: |
|
||||
echo "TAG=${INPUT_TAG:-develop}" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
INPUT_TAG: ${{ inputs.tag }}
|
||||
|
||||
- name: Vulnerability scanner
|
||||
id: trivy
|
||||
uses: aquasecurity/trivy-action@595be6a0f6560a0a8fc419ddf630567fc623531d
|
||||
with:
|
||||
image-ref: hyperledger/besu:${{ steps.tag.outputs.TAG }}
|
||||
format: sarif
|
||||
output: 'trivy-results.sarif'
|
||||
|
||||
# Check the vulnerabilities via GitHub security tab
|
||||
- name: Upload results
|
||||
uses: github/codeql-action/upload-sarif@23acc5c183826b7a8a97bce3cecc52db901f8251
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
26
CHANGELOG.md
26
CHANGELOG.md
@@ -1,5 +1,21 @@
|
||||
# Changelog
|
||||
|
||||
## Next Release
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
### Additions and Improvements
|
||||
- Support for eth_maxPriorityFeePerGas [#5658](https://github.com/hyperledger/besu/issues/5658)
|
||||
- Improve genesis state performance at startup [#6977](https://github.com/hyperledger/besu/pull/6977)
|
||||
- Enable continuous profiling with default setting [#7006](https://github.com/hyperledger/besu/pull/7006)
|
||||
- A full and up to date implementation of EOF for Prague [#7169](https://github.com/hyperledger/besu/pull/7169)
|
||||
- Add Subnet-Based Peer Permissions. [#7168](https://github.com/hyperledger/besu/pull/7168)
|
||||
- Reduce lock contention on transaction pool when building a block [#7180](https://github.com/hyperledger/besu/pull/7180)
|
||||
|
||||
### Bug fixes
|
||||
- Validation errors ignored in accounts-allowlist and empty list [#7138](https://github.com/hyperledger/besu/issues/7138)
|
||||
- Fix "Invalid block detected" for BFT chains using Bonsai DB [#7204](https://github.com/hyperledger/besu/pull/7204)
|
||||
|
||||
## 24.6.0
|
||||
|
||||
### Breaking Changes
|
||||
@@ -13,7 +29,6 @@
|
||||
- PKI-backed QBFT will be removed in a future version of Besu. Other forms of QBFT will remain unchanged.
|
||||
- --Xbonsai-limit-trie-logs-enabled is deprecated, use --bonsai-limit-trie-logs-enabled instead
|
||||
- --Xbonsai-trie-logs-pruning-window-size is deprecated, use --bonsai-trie-logs-pruning-window-size instead
|
||||
- Receipt compaction will be enabled by default in a future version of Besu. After this change it will not be possible to downgrade to the previous Besu version.
|
||||
|
||||
### Additions and Improvements
|
||||
- Add two counters to DefaultBlockchain in order to be able to calculate TPS and Mgas/s [#7105](https://github.com/hyperledger/besu/pull/7105)
|
||||
@@ -22,10 +37,15 @@
|
||||
- Promote experimental --Xbonsai-trie-logs-pruning-window-size to production-ready, --bonsai-trie-logs-pruning-window-size [#7192](https://github.com/hyperledger/besu/pull/7192)
|
||||
- `admin_nodeInfo` JSON/RPC call returns the currently active EVM version [#7127](https://github.com/hyperledger/besu/pull/7127)
|
||||
- Improve the selection of the most profitable built block [#7174](https://github.com/hyperledger/besu/pull/7174)
|
||||
- Support for eth_maxPriorityFeePerGas [#5658](https://github.com/hyperledger/besu/issues/5658)
|
||||
|
||||
### Bug fixes
|
||||
- Make `eth_gasPrice` aware of the base fee market [#7102](https://github.com/hyperledger/besu/pull/7102)
|
||||
- Validation errors ignored in accounts-allowlist and empty list [#7138](https://github.com/hyperledger/besu/issues/7138)
|
||||
|
||||
### Download Links
|
||||
https://github.com/hyperledger/besu/releases/tag/24.6.0
|
||||
https://github.com/hyperledger/besu/releases/download/24.6.0/besu-24.6.0.tar.gz / sha256 fa86e5c6873718cd568e3326151ce06957a5e7546b52df79a831ea9e39b857ab
|
||||
https://github.com/hyperledger/besu/releases/download/24.6.0/besu-24.6.0.zip / sha256 8b2d3a674cd7ead68b9ca68fea21e46d5ec9b278bbadc73f8c13c6a1e1bc0e4d
|
||||
|
||||
## 24.5.2
|
||||
|
||||
### Upcoming Breaking Changes
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.hyperledger.besu.tests.acceptance.permissioning;
|
||||
import org.hyperledger.besu.tests.acceptance.dsl.node.Node;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class NodeSmartContractPermissioningOutOfSyncAcceptanceTest
|
||||
@@ -42,6 +43,7 @@ public class NodeSmartContractPermissioningOutOfSyncAcceptanceTest
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled("test is flaky #7108")
|
||||
public void addNodeToClusterAndVerifyNonBootNodePeerConnectionWorksAfterSync() {
|
||||
final long blockchainHeight = 25L;
|
||||
waitForBlockHeight(permissionedNodeA, blockchainHeight);
|
||||
|
||||
@@ -25,8 +25,10 @@ import java.util.List;
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@Disabled("flaky test #7155")
|
||||
public class NodesSmartContractPermissioningStaticNodesAcceptanceTest
|
||||
extends NodeSmartContractPermissioningAcceptanceTestBase {
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
"withdrawalRequests": [
|
||||
{
|
||||
"sourceAddress": "0xa4664c40aacebd82a2db79f0ea36c06bc6a19adb",
|
||||
"validatorPublicKey": "0xb10a4a15bf67b328c9b101d09e5c6ee6672978fdad9ef0d9e2ceffaee99223555d8601f0cb3bcc4ce1af9864779a416e",
|
||||
"validatorPubkey": "0xb10a4a15bf67b328c9b101d09e5c6ee6672978fdad9ef0d9e2ceffaee99223555d8601f0cb3bcc4ce1af9864779a416e",
|
||||
"amount": "0x0"
|
||||
}
|
||||
],
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
{
|
||||
"sourceAddress": "0xa4664c40aacebd82a2db79f0ea36c06bc6a19adb",
|
||||
"amount": "0x0",
|
||||
"validatorPublicKey": "0xb10a4a15bf67b328c9b101d09e5c6ee6672978fdad9ef0d9e2ceffaee99223555d8601f0cb3bcc4ce1af9864779a416e"
|
||||
"validatorPubkey": "0xb10a4a15bf67b328c9b101d09e5c6ee6672978fdad9ef0d9e2ceffaee99223555d8601f0cb3bcc4ce1af9864779a416e"
|
||||
}
|
||||
],
|
||||
"blockNumber": "0x2",
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
{
|
||||
"sourceAddress": "0x23618e81e3f5cdf7f54c3d65f7fbc0abf5b21e8f",
|
||||
"amount": "0x0",
|
||||
"validatorPublicKey": "0x8706d19a62f28a6a6549f96c5adaebac9124a61d44868ec94f6d2d707c6a2f82c9162071231dfeb40e24bfde4ffdf243"
|
||||
"validatorPubkey": "0x8706d19a62f28a6a6549f96c5adaebac9124a61d44868ec94f6d2d707c6a2f82c9162071231dfeb40e24bfde4ffdf243"
|
||||
}
|
||||
],
|
||||
"blockNumber": "0x4",
|
||||
|
||||
@@ -80,6 +80,7 @@ dependencies {
|
||||
implementation 'org.xerial.snappy:snappy-java'
|
||||
implementation 'tech.pegasys:jc-kzg-4844'
|
||||
implementation 'org.rocksdb:rocksdbjni'
|
||||
implementation 'commons-net:commons-net'
|
||||
|
||||
runtimeOnly 'org.apache.logging.log4j:log4j-jul'
|
||||
runtimeOnly 'com.splunk.logging:splunk-library-javalogging'
|
||||
|
||||
@@ -89,6 +89,7 @@ import org.hyperledger.besu.ethereum.p2p.network.P2PNetwork;
|
||||
import org.hyperledger.besu.ethereum.p2p.network.ProtocolManager;
|
||||
import org.hyperledger.besu.ethereum.p2p.peers.DefaultPeer;
|
||||
import org.hyperledger.besu.ethereum.p2p.peers.EnodeDnsConfiguration;
|
||||
import org.hyperledger.besu.ethereum.p2p.permissions.PeerPermissionSubnet;
|
||||
import org.hyperledger.besu.ethereum.p2p.permissions.PeerPermissions;
|
||||
import org.hyperledger.besu.ethereum.p2p.permissions.PeerPermissionsDenylist;
|
||||
import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration;
|
||||
@@ -147,6 +148,7 @@ import com.google.common.base.Strings;
|
||||
import graphql.GraphQL;
|
||||
import io.vertx.core.Vertx;
|
||||
import io.vertx.core.VertxOptions;
|
||||
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
import org.slf4j.Logger;
|
||||
@@ -194,6 +196,7 @@ public class RunnerBuilder {
|
||||
private JsonRpcIpcConfiguration jsonRpcIpcConfiguration;
|
||||
private boolean legacyForkIdEnabled;
|
||||
private Optional<EnodeDnsConfiguration> enodeDnsConfiguration;
|
||||
private List<SubnetInfo> allowedSubnets = new ArrayList<>();
|
||||
|
||||
/** Instantiates a new Runner builder. */
|
||||
public RunnerBuilder() {}
|
||||
@@ -603,6 +606,17 @@ public class RunnerBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add subnet configuration
|
||||
*
|
||||
* @param allowedSubnets the allowedSubnets
|
||||
* @return the runner builder
|
||||
*/
|
||||
public RunnerBuilder allowedSubnets(final List<SubnetInfo> allowedSubnets) {
|
||||
this.allowedSubnets = allowedSubnets;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build Runner instance.
|
||||
*
|
||||
@@ -662,6 +676,10 @@ public class RunnerBuilder {
|
||||
final PeerPermissionsDenylist bannedNodes = PeerPermissionsDenylist.create();
|
||||
bannedNodeIds.forEach(bannedNodes::add);
|
||||
|
||||
PeerPermissionSubnet peerPermissionSubnet = new PeerPermissionSubnet(allowedSubnets);
|
||||
final PeerPermissions defaultPeerPermissions =
|
||||
PeerPermissions.combine(peerPermissionSubnet, bannedNodes);
|
||||
|
||||
final List<EnodeURL> bootnodes = discoveryConfiguration.getBootnodes();
|
||||
|
||||
final Synchronizer synchronizer = besuController.getSynchronizer();
|
||||
@@ -681,8 +699,8 @@ public class RunnerBuilder {
|
||||
final PeerPermissions peerPermissions =
|
||||
nodePermissioningController
|
||||
.map(nodePC -> new PeerPermissionsAdapter(nodePC, bootnodes, context.getBlockchain()))
|
||||
.map(nodePerms -> PeerPermissions.combine(nodePerms, bannedNodes))
|
||||
.orElse(bannedNodes);
|
||||
.map(nodePerms -> PeerPermissions.combine(nodePerms, defaultPeerPermissions))
|
||||
.orElse(defaultPeerPermissions);
|
||||
|
||||
LOG.info("Detecting NAT service.");
|
||||
final boolean fallbackEnabled = natMethod == NatMethod.AUTO || natMethodFallbackEnabled;
|
||||
|
||||
@@ -43,6 +43,7 @@ import org.hyperledger.besu.cli.config.NetworkName;
|
||||
import org.hyperledger.besu.cli.config.ProfileName;
|
||||
import org.hyperledger.besu.cli.converter.MetricCategoryConverter;
|
||||
import org.hyperledger.besu.cli.converter.PercentageConverter;
|
||||
import org.hyperledger.besu.cli.converter.SubnetInfoConverter;
|
||||
import org.hyperledger.besu.cli.custom.JsonRPCAllowlistHostsProperty;
|
||||
import org.hyperledger.besu.cli.error.BesuExecutionExceptionHandler;
|
||||
import org.hyperledger.besu.cli.error.BesuParameterExceptionHandler;
|
||||
@@ -245,6 +246,7 @@ import io.vertx.core.Vertx;
|
||||
import io.vertx.core.VertxOptions;
|
||||
import io.vertx.core.json.DecodeException;
|
||||
import io.vertx.core.metrics.MetricsOptions;
|
||||
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
import org.slf4j.Logger;
|
||||
@@ -530,6 +532,15 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
|
||||
|
||||
return autoDiscoveredDefaultIP;
|
||||
}
|
||||
|
||||
@Option(
|
||||
names = {"--net-restrict"},
|
||||
arity = "1..*",
|
||||
split = ",",
|
||||
converter = SubnetInfoConverter.class,
|
||||
description =
|
||||
"Comma-separated list of allowed IP subnets (e.g., '192.168.1.0/24,10.0.0.0/8').")
|
||||
private List<SubnetInfo> allowedSubnets;
|
||||
}
|
||||
|
||||
@Option(
|
||||
@@ -1518,7 +1529,8 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
|
||||
}
|
||||
|
||||
if (genesisConfigOptionsSupplier.get().getCancunTime().isPresent()
|
||||
|| genesisConfigOptionsSupplier.get().getPragueTime().isPresent()) {
|
||||
|| genesisConfigOptionsSupplier.get().getPragueTime().isPresent()
|
||||
|| genesisConfigOptionsSupplier.get().getPragueEOFTime().isPresent()) {
|
||||
if (kzgTrustedSetupFile != null) {
|
||||
KZGPointEvalPrecompiledContract.init(kzgTrustedSetupFile);
|
||||
} else {
|
||||
@@ -2336,6 +2348,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
|
||||
.storageProvider(keyValueStorageProvider(keyValueStorageName))
|
||||
.rpcEndpointService(rpcEndpointServiceImpl)
|
||||
.enodeDnsConfiguration(getEnodeDnsConfiguration())
|
||||
.allowedSubnets(p2PDiscoveryOptionGroup.allowedSubnets)
|
||||
.build();
|
||||
|
||||
addShutdownHook(runner);
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.cli.converter;
|
||||
|
||||
import org.apache.commons.net.util.SubnetUtils;
|
||||
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
|
||||
import picocli.CommandLine;
|
||||
|
||||
/** The SubnetInfo converter for CLI options. */
|
||||
public class SubnetInfoConverter implements CommandLine.ITypeConverter<SubnetInfo> {
|
||||
/** Default Constructor. */
|
||||
public SubnetInfoConverter() {}
|
||||
|
||||
/**
|
||||
* Converts an IP addresses with CIDR notation into SubnetInfo
|
||||
*
|
||||
* @param value The IP addresses with CIDR notation.
|
||||
* @return the SubnetInfo
|
||||
*/
|
||||
@Override
|
||||
public SubnetInfo convert(final String value) {
|
||||
return new SubnetUtils(value).getInfo();
|
||||
}
|
||||
}
|
||||
@@ -41,6 +41,7 @@ import org.hyperledger.besu.ethereum.chain.DefaultBlockchain;
|
||||
import org.hyperledger.besu.ethereum.chain.GenesisState;
|
||||
import org.hyperledger.besu.ethereum.chain.MutableBlockchain;
|
||||
import org.hyperledger.besu.ethereum.chain.VariablesStorage;
|
||||
import org.hyperledger.besu.ethereum.core.BlockHeader;
|
||||
import org.hyperledger.besu.ethereum.core.Difficulty;
|
||||
import org.hyperledger.besu.ethereum.core.MiningParameters;
|
||||
import org.hyperledger.besu.ethereum.core.PrivacyParameters;
|
||||
@@ -552,30 +553,9 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
|
||||
prepForBuild();
|
||||
|
||||
final ProtocolSchedule protocolSchedule = createProtocolSchedule();
|
||||
final GenesisState genesisState;
|
||||
|
||||
final VariablesStorage variablesStorage = storageProvider.createVariablesStorage();
|
||||
|
||||
Optional<Hash> genesisStateHash = Optional.empty();
|
||||
if (variablesStorage != null && this.genesisStateHashCacheEnabled) {
|
||||
genesisStateHash = variablesStorage.getGenesisStateHash();
|
||||
}
|
||||
|
||||
if (genesisStateHash.isPresent()) {
|
||||
genesisState =
|
||||
GenesisState.fromConfig(genesisStateHash.get(), genesisConfigFile, protocolSchedule);
|
||||
} else {
|
||||
genesisState =
|
||||
GenesisState.fromConfig(dataStorageConfiguration, genesisConfigFile, protocolSchedule);
|
||||
if (variablesStorage != null) {
|
||||
VariablesStorage.Updater updater = variablesStorage.updater();
|
||||
if (updater != null) {
|
||||
updater.setGenesisStateHash(genesisState.getBlock().getHeader().getStateRoot());
|
||||
updater.commit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final WorldStateStorageCoordinator worldStateStorageCoordinator =
|
||||
storageProvider.createWorldStateStorageCoordinator(dataStorageConfiguration);
|
||||
|
||||
@@ -583,6 +563,13 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
|
||||
storageProvider.createBlockchainStorage(
|
||||
protocolSchedule, variablesStorage, dataStorageConfiguration);
|
||||
|
||||
final var maybeStoredGenesisBlockHash = blockchainStorage.getBlockHash(0L);
|
||||
|
||||
final var genesisState =
|
||||
getGenesisState(
|
||||
maybeStoredGenesisBlockHash.flatMap(blockchainStorage::getBlockHeader),
|
||||
protocolSchedule);
|
||||
|
||||
final MutableBlockchain blockchain =
|
||||
DefaultBlockchain.createMutable(
|
||||
genesisState.getBlock(),
|
||||
@@ -591,7 +578,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
|
||||
reorgLoggingThreshold,
|
||||
dataDirectory.toString(),
|
||||
numberOfBlocksToCache);
|
||||
|
||||
final BonsaiCachedMerkleTrieLoader bonsaiCachedMerkleTrieLoader =
|
||||
besuComponent
|
||||
.map(BesuComponent::getCachedMerkleTrieLoader)
|
||||
@@ -601,7 +587,7 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
|
||||
createWorldStateArchive(
|
||||
worldStateStorageCoordinator, blockchain, bonsaiCachedMerkleTrieLoader);
|
||||
|
||||
if (blockchain.getChainHeadBlockNumber() < 1) {
|
||||
if (maybeStoredGenesisBlockHash.isEmpty()) {
|
||||
genesisState.writeStateTo(worldStateArchive.getMutable());
|
||||
}
|
||||
|
||||
@@ -772,6 +758,24 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
|
||||
dataStorageConfiguration);
|
||||
}
|
||||
|
||||
private GenesisState getGenesisState(
|
||||
final Optional<BlockHeader> maybeGenesisBlockHeader,
|
||||
final ProtocolSchedule protocolSchedule) {
|
||||
final Optional<Hash> maybeGenesisStateRoot =
|
||||
genesisStateHashCacheEnabled
|
||||
? maybeGenesisBlockHeader.map(BlockHeader::getStateRoot)
|
||||
: Optional.empty();
|
||||
|
||||
return maybeGenesisStateRoot
|
||||
.map(
|
||||
genesisStateRoot ->
|
||||
GenesisState.fromStorage(genesisStateRoot, genesisConfigFile, protocolSchedule))
|
||||
.orElseGet(
|
||||
() ->
|
||||
GenesisState.fromConfig(
|
||||
dataStorageConfiguration, genesisConfigFile, protocolSchedule));
|
||||
}
|
||||
|
||||
private TrieLogPruner createTrieLogPruner(
|
||||
final WorldStateKeyValueStorage worldStateStorage,
|
||||
final Blockchain blockchain,
|
||||
|
||||
@@ -18,7 +18,6 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.hyperledger.besu.cli.config.EthNetworkConfig;
|
||||
import org.hyperledger.besu.cli.config.NetworkName;
|
||||
import org.hyperledger.besu.config.GenesisConfigFile;
|
||||
import org.hyperledger.besu.config.GenesisConfigOptions;
|
||||
@@ -138,7 +137,7 @@ public class ForkIdsNetworkConfigTest {
|
||||
@MethodSource("parameters")
|
||||
public void testForkId(final NetworkName chainName, final List<ForkId> expectedForkIds) {
|
||||
final GenesisConfigFile genesisConfigFile =
|
||||
GenesisConfigFile.fromConfig(EthNetworkConfig.jsonConfig(chainName));
|
||||
GenesisConfigFile.fromResource(chainName.getGenesisFile());
|
||||
final MilestoneStreamingTransitionProtocolSchedule schedule = createSchedule(genesisConfigFile);
|
||||
final GenesisState genesisState = GenesisState.fromConfig(genesisConfigFile, schedule);
|
||||
final Blockchain mockBlockchain = mock(Blockchain.class);
|
||||
|
||||
@@ -1217,6 +1217,28 @@ public class BesuCommandTest extends CommandTestAbstract {
|
||||
.contains("Invalid value for option '--fast-sync-min-peers': 'ten' is not an int");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void netRestrictParsedCorrectly() {
|
||||
final String subnet1 = "127.0.0.1/24";
|
||||
final String subnet2 = "10.0.0.1/24";
|
||||
parseCommand("--net-restrict", String.join(",", subnet1, subnet2));
|
||||
verify(mockRunnerBuilder).allowedSubnets(allowedSubnetsArgumentCaptor.capture());
|
||||
assertThat(allowedSubnetsArgumentCaptor.getValue().size()).isEqualTo(2);
|
||||
assertThat(allowedSubnetsArgumentCaptor.getValue().get(0).getCidrSignature())
|
||||
.isEqualTo(subnet1);
|
||||
assertThat(allowedSubnetsArgumentCaptor.getValue().get(1).getCidrSignature())
|
||||
.isEqualTo(subnet2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void netRestrictInvalidShouldFail() {
|
||||
final String subnet = "127.0.0.1/abc";
|
||||
parseCommand("--net-restrict", subnet);
|
||||
Mockito.verifyNoInteractions(mockRunnerBuilder);
|
||||
assertThat(commandErrorOutput.toString(UTF_8))
|
||||
.contains("Invalid value for option '--net-restrict'");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void ethStatsOptionIsParsedCorrectly() {
|
||||
final String url = "besu-node:secret@host:443";
|
||||
|
||||
@@ -118,6 +118,7 @@ import io.opentelemetry.api.GlobalOpenTelemetry;
|
||||
import io.vertx.core.Vertx;
|
||||
import io.vertx.core.VertxOptions;
|
||||
import io.vertx.core.json.JsonObject;
|
||||
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
|
||||
import org.apache.commons.text.StringEscapeUtils;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
@@ -262,6 +263,7 @@ public abstract class CommandTestAbstract {
|
||||
@Captor protected ArgumentCaptor<ApiConfiguration> apiConfigurationCaptor;
|
||||
|
||||
@Captor protected ArgumentCaptor<EthstatsOptions> ethstatsOptionsArgumentCaptor;
|
||||
@Captor protected ArgumentCaptor<List<SubnetInfo>> allowedSubnetsArgumentCaptor;
|
||||
|
||||
@BeforeEach
|
||||
public void initMocks() throws Exception {
|
||||
@@ -356,6 +358,7 @@ public abstract class CommandTestAbstract {
|
||||
when(mockRunnerBuilder.legacyForkId(anyBoolean())).thenReturn(mockRunnerBuilder);
|
||||
when(mockRunnerBuilder.apiConfiguration(any())).thenReturn(mockRunnerBuilder);
|
||||
when(mockRunnerBuilder.enodeDnsConfiguration(any())).thenReturn(mockRunnerBuilder);
|
||||
when(mockRunnerBuilder.allowedSubnets(any())).thenReturn(mockRunnerBuilder);
|
||||
when(mockRunnerBuilder.build()).thenReturn(mockRunner);
|
||||
|
||||
final SignatureAlgorithm signatureAlgorithm = SignatureAlgorithmFactory.getInstance();
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.cli.converter;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class SubnetInfoConverterTest {
|
||||
|
||||
@Test
|
||||
void testCreateIpRestrictionHandlerWithValidSubnets() {
|
||||
String subnet = "192.168.1.0/24";
|
||||
assertThat(parseSubnetRules(subnet).getCidrSignature()).isEqualTo(subnet);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCreateIpRestrictionHandlerWithInvalidSubnet() {
|
||||
assertThrows(IllegalArgumentException.class, () -> parseSubnetRules("abc"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCreateIpRestrictionHandlerMissingCIDR() {
|
||||
assertThrows(IllegalArgumentException.class, () -> parseSubnetRules("192.168.1.0"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCreateIpRestrictionHandlerBigCIDR() {
|
||||
assertThrows(IllegalArgumentException.class, () -> parseSubnetRules("192.168.1.0:25"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCreateIpRestrictionHandlerWithInvalidCIDR() {
|
||||
assertThrows(IllegalArgumentException.class, () -> parseSubnetRules("192.168.1.0/abc"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCreateIpRestrictionHandlerWithEmptyString() {
|
||||
assertThrows(IllegalArgumentException.class, () -> parseSubnetRules(""));
|
||||
}
|
||||
|
||||
private SubnetInfo parseSubnetRules(final String subnet) {
|
||||
return new SubnetInfoConverter().convert(subnet);
|
||||
}
|
||||
}
|
||||
@@ -51,6 +51,7 @@ engine-jwt-disabled=true
|
||||
engine-jwt-secret="/tmp/jwt.hex"
|
||||
required-blocks=["8675309=123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"]
|
||||
discovery-dns-url="enrtree://AM5FCQLWIZX2QFPNJAP7VUERCCRNGRHWZG3YYHIUV7BVDQ5FDPRT2@nodes.example.org"
|
||||
net-restrict=["none"]
|
||||
|
||||
# chain
|
||||
network="MAINNET"
|
||||
|
||||
@@ -612,7 +612,9 @@ startScripts {
|
||||
defaultJvmOpts = applicationDefaultJvmArgs + [
|
||||
"-XX:G1ConcRefinementThreads=2",
|
||||
"-XX:G1HeapWastePercent=15",
|
||||
"-XX:MaxGCPauseMillis=100"
|
||||
"-XX:MaxGCPauseMillis=100",
|
||||
"-XX:StartFlightRecording,dumponexit=true,settings=default.jfc",
|
||||
"-Xlog:jfr*=off"
|
||||
]
|
||||
unixStartScriptGenerator.template = resources.text.fromFile("${projectDir}/besu/src/main/scripts/unixStartScript.txt")
|
||||
windowsStartScriptGenerator.template = resources.text.fromFile("${projectDir}/besu/src/main/scripts/windowsStartScript.txt")
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.config;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
/**
|
||||
* Genesis account
|
||||
*
|
||||
* @param address of the account
|
||||
* @param nonce nonce of the account at genesis
|
||||
* @param balance balance of the account at genesis
|
||||
* @param code code of the account at genesis, can be null
|
||||
* @param storage storage of the account at genesis
|
||||
* @param privateKey of the account, only use for testing
|
||||
*/
|
||||
public record GenesisAccount(
|
||||
Address address,
|
||||
long nonce,
|
||||
Wei balance,
|
||||
Bytes code,
|
||||
Map<UInt256, UInt256> storage,
|
||||
Bytes32 privateKey) {}
|
||||
@@ -1,109 +0,0 @@
|
||||
/*
|
||||
* Copyright ConsenSys AG.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.config;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
|
||||
/** The Genesis allocation configuration. */
|
||||
public class GenesisAllocation {
|
||||
private final String address;
|
||||
private final ObjectNode data;
|
||||
|
||||
/**
|
||||
* Instantiates a new Genesis allocation.
|
||||
*
|
||||
* @param address the address
|
||||
* @param data the data
|
||||
*/
|
||||
GenesisAllocation(final String address, final ObjectNode data) {
|
||||
this.address = address;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets address.
|
||||
*
|
||||
* @return the address
|
||||
*/
|
||||
public String getAddress() {
|
||||
return address;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets private key.
|
||||
*
|
||||
* @return the private key
|
||||
*/
|
||||
public Optional<String> getPrivateKey() {
|
||||
return Optional.ofNullable(JsonUtil.getString(data, "privatekey", null));
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets balance.
|
||||
*
|
||||
* @return the balance
|
||||
*/
|
||||
public String getBalance() {
|
||||
return JsonUtil.getValueAsString(data, "balance", "0");
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets code.
|
||||
*
|
||||
* @return the code
|
||||
*/
|
||||
public String getCode() {
|
||||
return JsonUtil.getString(data, "code", null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets nonce.
|
||||
*
|
||||
* @return the nonce
|
||||
*/
|
||||
public String getNonce() {
|
||||
return JsonUtil.getValueAsString(data, "nonce", "0");
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets version.
|
||||
*
|
||||
* @return the version
|
||||
*/
|
||||
public String getVersion() {
|
||||
return JsonUtil.getValueAsString(data, "version", null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets storage map.
|
||||
*
|
||||
* @return fields under storage as a map
|
||||
*/
|
||||
public Map<String, String> getStorage() {
|
||||
final Map<String, String> map = new HashMap<>();
|
||||
JsonUtil.getObjectNode(data, "storage")
|
||||
.orElse(JsonUtil.createEmptyObjectNode())
|
||||
.fields()
|
||||
.forEachRemaining(
|
||||
(entry) -> {
|
||||
map.put(entry.getKey(), entry.getValue().asText());
|
||||
});
|
||||
return map;
|
||||
}
|
||||
}
|
||||
@@ -14,8 +14,6 @@
|
||||
*/
|
||||
package org.hyperledger.besu.config;
|
||||
|
||||
import static org.hyperledger.besu.config.JsonUtil.normalizeKeys;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
|
||||
import java.net.URL;
|
||||
@@ -30,22 +28,23 @@ import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.common.collect.Streams;
|
||||
|
||||
/** The Genesis config file. */
|
||||
public class GenesisConfigFile {
|
||||
|
||||
/** The constant DEFAULT. */
|
||||
public static final GenesisConfigFile DEFAULT =
|
||||
new GenesisConfigFile(JsonUtil.createEmptyObjectNode());
|
||||
new GenesisConfigFile(new GenesisReader.FromObjectNode(JsonUtil.createEmptyObjectNode()));
|
||||
|
||||
/** The constant BASEFEE_AT_GENESIS_DEFAULT_VALUE. */
|
||||
public static final Wei BASEFEE_AT_GENESIS_DEFAULT_VALUE = Wei.of(1_000_000_000L);
|
||||
|
||||
private final GenesisReader loader;
|
||||
private final ObjectNode genesisRoot;
|
||||
|
||||
private GenesisConfigFile(final ObjectNode config) {
|
||||
this.genesisRoot = config;
|
||||
private GenesisConfigFile(final GenesisReader loader) {
|
||||
this.loader = loader;
|
||||
this.genesisRoot = loader.getRoot();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -70,21 +69,31 @@ public class GenesisConfigFile {
|
||||
/**
|
||||
* Genesis file from resource.
|
||||
*
|
||||
* @param jsonResource the resource name
|
||||
* @param resourceName the resource name
|
||||
* @return the genesis config file
|
||||
*/
|
||||
public static GenesisConfigFile fromResource(final String jsonResource) {
|
||||
return fromSource(GenesisConfigFile.class.getResource(jsonResource));
|
||||
public static GenesisConfigFile fromResource(final String resourceName) {
|
||||
return fromConfig(GenesisConfigFile.class.getResource(resourceName));
|
||||
}
|
||||
|
||||
/**
|
||||
* From config genesis config file.
|
||||
*
|
||||
* @param jsonString the json string
|
||||
* @param jsonSource the json string
|
||||
* @return the genesis config file
|
||||
*/
|
||||
public static GenesisConfigFile fromConfig(final String jsonString) {
|
||||
return fromConfig(JsonUtil.objectNodeFromString(jsonString, false));
|
||||
public static GenesisConfigFile fromConfig(final URL jsonSource) {
|
||||
return new GenesisConfigFile(new GenesisReader.FromURL(jsonSource));
|
||||
}
|
||||
|
||||
/**
|
||||
* From config genesis config file.
|
||||
*
|
||||
* @param json the json string
|
||||
* @return the genesis config file
|
||||
*/
|
||||
public static GenesisConfigFile fromConfig(final String json) {
|
||||
return fromConfig(JsonUtil.objectNodeFromString(json, false));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -94,7 +103,7 @@ public class GenesisConfigFile {
|
||||
* @return the genesis config file
|
||||
*/
|
||||
public static GenesisConfigFile fromConfig(final ObjectNode config) {
|
||||
return new GenesisConfigFile(normalizeKeys(config));
|
||||
return new GenesisConfigFile(new GenesisReader.FromObjectNode(config));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -113,8 +122,7 @@ public class GenesisConfigFile {
|
||||
* @return the config options
|
||||
*/
|
||||
public GenesisConfigOptions getConfigOptions(final Map<String, String> overrides) {
|
||||
final ObjectNode config =
|
||||
JsonUtil.getObjectNode(genesisRoot, "config").orElse(JsonUtil.createEmptyObjectNode());
|
||||
final ObjectNode config = loader.getConfig();
|
||||
|
||||
Map<String, String> overridesRef = overrides;
|
||||
|
||||
@@ -134,15 +142,8 @@ public class GenesisConfigFile {
|
||||
*
|
||||
* @return the stream
|
||||
*/
|
||||
public Stream<GenesisAllocation> streamAllocations() {
|
||||
return JsonUtil.getObjectNode(genesisRoot, "alloc").stream()
|
||||
.flatMap(
|
||||
allocations ->
|
||||
Streams.stream(allocations.fieldNames())
|
||||
.map(
|
||||
key ->
|
||||
new GenesisAllocation(
|
||||
key, JsonUtil.getObjectNode(allocations, key).get())));
|
||||
public Stream<GenesisAccount> streamAllocations() {
|
||||
return loader.streamAllocations();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -344,7 +345,7 @@ public class GenesisConfigFile {
|
||||
+ "genesisRoot="
|
||||
+ genesisRoot
|
||||
+ ", allocations="
|
||||
+ streamAllocations().map(GenesisAllocation::toString).collect(Collectors.joining(","))
|
||||
+ loader.streamAllocations().map(GenesisAccount::toString).collect(Collectors.joining(","))
|
||||
+ '}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -263,6 +263,13 @@ public interface GenesisConfigOptions {
|
||||
*/
|
||||
OptionalLong getPragueTime();
|
||||
|
||||
/**
|
||||
* Gets Prague EOF time.
|
||||
*
|
||||
* @return the prague time
|
||||
*/
|
||||
OptionalLong getPragueEOFTime();
|
||||
|
||||
/**
|
||||
* Gets future eips time.
|
||||
*
|
||||
|
||||
@@ -0,0 +1,242 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.config;
|
||||
|
||||
import static org.hyperledger.besu.config.JsonUtil.normalizeKey;
|
||||
import static org.hyperledger.besu.config.JsonUtil.normalizeKeys;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.net.URL;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.common.collect.Streams;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
interface GenesisReader {
|
||||
String CONFIG_FIELD = "config";
|
||||
String ALLOCATION_FIELD = "alloc";
|
||||
|
||||
ObjectNode getRoot();
|
||||
|
||||
ObjectNode getConfig();
|
||||
|
||||
Stream<GenesisAccount> streamAllocations();
|
||||
|
||||
class FromObjectNode implements GenesisReader {
|
||||
private final ObjectNode allocations;
|
||||
private final ObjectNode rootWithoutAllocations;
|
||||
|
||||
public FromObjectNode(final ObjectNode root) {
|
||||
final var removedAllocations = root.remove(ALLOCATION_FIELD);
|
||||
this.allocations =
|
||||
removedAllocations != null
|
||||
? (ObjectNode) removedAllocations
|
||||
: JsonUtil.createEmptyObjectNode();
|
||||
this.rootWithoutAllocations = normalizeKeys(root);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectNode getRoot() {
|
||||
return rootWithoutAllocations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectNode getConfig() {
|
||||
return JsonUtil.getObjectNode(rootWithoutAllocations, CONFIG_FIELD)
|
||||
.orElse(JsonUtil.createEmptyObjectNode());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<GenesisAccount> streamAllocations() {
|
||||
return Streams.stream(allocations.fields())
|
||||
.map(
|
||||
entry -> {
|
||||
final var on = normalizeKeys((ObjectNode) entry.getValue());
|
||||
return new GenesisAccount(
|
||||
Address.fromHexString(entry.getKey()),
|
||||
JsonUtil.getString(on, "nonce").map(ParserUtils::parseUnsignedLong).orElse(0L),
|
||||
JsonUtil.getString(on, "balance")
|
||||
.map(ParserUtils::parseBalance)
|
||||
.orElse(Wei.ZERO),
|
||||
JsonUtil.getBytes(on, "code", null),
|
||||
ParserUtils.getStorageMap(on, "storage"),
|
||||
JsonUtil.getBytes(on, "privatekey").map(Bytes32::wrap).orElse(null));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class FromURL implements GenesisReader {
|
||||
private final URL url;
|
||||
private final ObjectNode rootWithoutAllocations;
|
||||
|
||||
public FromURL(final URL url) {
|
||||
this.url = url;
|
||||
this.rootWithoutAllocations =
|
||||
normalizeKeys(JsonUtil.objectNodeFromURL(url, false, ALLOCATION_FIELD));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectNode getRoot() {
|
||||
return rootWithoutAllocations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectNode getConfig() {
|
||||
return JsonUtil.getObjectNode(rootWithoutAllocations, CONFIG_FIELD)
|
||||
.orElse(JsonUtil.createEmptyObjectNode());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<GenesisAccount> streamAllocations() {
|
||||
final var parser = JsonUtil.jsonParserFromURL(url, false);
|
||||
|
||||
try {
|
||||
parser.nextToken();
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
if (ALLOCATION_FIELD.equals(parser.getCurrentName())) {
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
break;
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
} catch (final IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
return Streams.stream(new AllocationIterator(parser));
|
||||
}
|
||||
|
||||
private static class AllocationIterator implements Iterator<GenesisAccount> {
|
||||
final JsonParser parser;
|
||||
|
||||
public AllocationIterator(final JsonParser parser) {
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
final var end = parser.currentToken() == JsonToken.END_OBJECT;
|
||||
if (end) {
|
||||
try {
|
||||
parser.close();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return !end;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GenesisAccount next() {
|
||||
try {
|
||||
final Address address = Address.fromHexString(parser.currentName());
|
||||
long nonce = 0;
|
||||
Wei balance = Wei.ZERO;
|
||||
Bytes code = null;
|
||||
Map<UInt256, UInt256> storage = Map.of();
|
||||
Bytes32 privateKey = null;
|
||||
parser.nextToken(); // consume start object
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
switch (normalizeKey(parser.currentName())) {
|
||||
case "nonce":
|
||||
parser.nextToken();
|
||||
nonce = ParserUtils.parseUnsignedLong(parser.getText());
|
||||
break;
|
||||
case "balance":
|
||||
parser.nextToken();
|
||||
balance = ParserUtils.parseBalance(parser.getText());
|
||||
break;
|
||||
case "code":
|
||||
parser.nextToken();
|
||||
code = Bytes.fromHexStringLenient(parser.getText());
|
||||
break;
|
||||
case "privatekey":
|
||||
parser.nextToken();
|
||||
privateKey = Bytes32.fromHexStringLenient(parser.getText());
|
||||
break;
|
||||
case "storage":
|
||||
parser.nextToken();
|
||||
storage = new HashMap<>();
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
final var key = UInt256.fromHexString(parser.currentName());
|
||||
parser.nextToken();
|
||||
final var value = UInt256.fromHexString(parser.getText());
|
||||
storage.put(key, value);
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (parser.currentToken() == JsonToken.START_OBJECT) {
|
||||
// ignore any unknown nested object
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
parser.nextToken();
|
||||
return new GenesisAccount(address, nonce, balance, code, storage, privateKey);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ParserUtils {
|
||||
static long parseUnsignedLong(final String value) {
|
||||
String v = value.toLowerCase(Locale.US);
|
||||
if (v.startsWith("0x")) {
|
||||
v = v.substring(2);
|
||||
}
|
||||
return Long.parseUnsignedLong(v, 16);
|
||||
}
|
||||
|
||||
static Wei parseBalance(final String balance) {
|
||||
final BigInteger val;
|
||||
if (balance.startsWith("0x")) {
|
||||
val = new BigInteger(1, Bytes.fromHexStringLenient(balance).toArrayUnsafe());
|
||||
} else {
|
||||
val = new BigInteger(balance);
|
||||
}
|
||||
|
||||
return Wei.of(val);
|
||||
}
|
||||
|
||||
static Map<UInt256, UInt256> getStorageMap(final ObjectNode json, final String key) {
|
||||
return JsonUtil.getObjectNode(json, key)
|
||||
.map(
|
||||
storageMap ->
|
||||
Streams.stream(storageMap.fields())
|
||||
.collect(
|
||||
Collectors.toMap(
|
||||
e -> UInt256.fromHexString(e.getKey()),
|
||||
e -> UInt256.fromHexString(e.getValue().asText()))))
|
||||
.orElse(Map.of());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -308,6 +308,11 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
|
||||
return getOptionalLong("praguetime");
|
||||
}
|
||||
|
||||
@Override
|
||||
public OptionalLong getPragueEOFTime() {
|
||||
return getOptionalLong("pragueeoftime");
|
||||
}
|
||||
|
||||
@Override
|
||||
public OptionalLong getFutureEipsTime() {
|
||||
return getOptionalLong("futureeipstime");
|
||||
@@ -467,6 +472,7 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
|
||||
getShanghaiTime().ifPresent(l -> builder.put("shanghaiTime", l));
|
||||
getCancunTime().ifPresent(l -> builder.put("cancunTime", l));
|
||||
getPragueTime().ifPresent(l -> builder.put("pragueTime", l));
|
||||
getPragueEOFTime().ifPresent(l -> builder.put("pragueEOFTime", l));
|
||||
getTerminalBlockNumber().ifPresent(l -> builder.put("terminalBlockNumber", l));
|
||||
getTerminalBlockHash().ifPresent(h -> builder.put("terminalBlockHash", h.toHexString()));
|
||||
getFutureEipsTime().ifPresent(l -> builder.put("futureEipsTime", l));
|
||||
@@ -620,6 +626,7 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
|
||||
getShanghaiTime(),
|
||||
getCancunTime(),
|
||||
getPragueTime(),
|
||||
getPragueEOFTime(),
|
||||
getFutureEipsTime(),
|
||||
getExperimentalEipsTime());
|
||||
// when adding forks add an entry to ${REPO_ROOT}/config/src/test/resources/all_forks.json
|
||||
|
||||
@@ -23,17 +23,30 @@ import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.OptionalLong;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonParser.Feature;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.filter.FilteringParserDelegate;
|
||||
import com.fasterxml.jackson.core.filter.TokenFilter;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeType;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
|
||||
/** The Json util class. */
|
||||
public class JsonUtil {
|
||||
private static final JsonFactory JSON_FACTORY =
|
||||
JsonFactory.builder()
|
||||
.disable(JsonFactory.Feature.INTERN_FIELD_NAMES)
|
||||
.disable(JsonFactory.Feature.CANONICALIZE_FIELD_NAMES)
|
||||
.build();
|
||||
|
||||
/** Default constructor. */
|
||||
private JsonUtil() {}
|
||||
|
||||
@@ -53,7 +66,7 @@ public class JsonUtil {
|
||||
entry -> {
|
||||
final String key = entry.getKey();
|
||||
final JsonNode value = entry.getValue();
|
||||
final String normalizedKey = key.toLowerCase(Locale.US);
|
||||
final String normalizedKey = normalizeKey(key);
|
||||
if (value instanceof ObjectNode) {
|
||||
normalized.set(normalizedKey, normalizeKeys((ObjectNode) value));
|
||||
} else if (value instanceof ArrayNode) {
|
||||
@@ -65,6 +78,17 @@ public class JsonUtil {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the key to lowercase for easier lookup. This is useful in cases such as the
|
||||
* 'genesis.json' file where all keys are assumed to be case insensitive.
|
||||
*
|
||||
* @param key the key to be normalized
|
||||
* @return key in lower case.
|
||||
*/
|
||||
public static String normalizeKey(final String key) {
|
||||
return key.toLowerCase(Locale.US);
|
||||
}
|
||||
|
||||
private static ArrayNode normalizeKeysInArray(final ArrayNode arrayNode) {
|
||||
final ArrayNode normalizedArray = JsonUtil.createEmptyArrayNode();
|
||||
arrayNode.forEach(
|
||||
@@ -263,6 +287,35 @@ public class JsonUtil {
|
||||
return getBoolean(node, key).orElse(defaultValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets Bytes.
|
||||
*
|
||||
* @param json the json
|
||||
* @param key the key
|
||||
* @return the Bytes
|
||||
*/
|
||||
public static Optional<Bytes> getBytes(final ObjectNode json, final String key) {
|
||||
return getParsedValue(json, key, Bytes::fromHexString);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets Wei.
|
||||
*
|
||||
* @param json the json
|
||||
* @param key the key
|
||||
* @param defaultValue the default value
|
||||
* @return the Wei
|
||||
*/
|
||||
public static Bytes getBytes(final ObjectNode json, final String key, final Bytes defaultValue) {
|
||||
return getBytes(json, key).orElse(defaultValue);
|
||||
}
|
||||
|
||||
private static <T> Optional<T> getParsedValue(
|
||||
final ObjectNode json, final String name, final Function<String, T> parser) {
|
||||
|
||||
return getValue(json, name).map(JsonNode::asText).map(parser);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create empty object node object node.
|
||||
*
|
||||
@@ -308,18 +361,75 @@ public class JsonUtil {
|
||||
*
|
||||
* @param jsonData the json data
|
||||
* @param allowComments true to allow comments
|
||||
* @param excludeFields names of the fields to not read
|
||||
* @return the object node
|
||||
*/
|
||||
public static ObjectNode objectNodeFromString(
|
||||
final String jsonData, final boolean allowComments) {
|
||||
final ObjectMapper objectMapper = new ObjectMapper();
|
||||
objectMapper.configure(Feature.ALLOW_COMMENTS, allowComments);
|
||||
final String jsonData, final boolean allowComments, final String... excludeFields) {
|
||||
try {
|
||||
final JsonNode jsonNode = objectMapper.readTree(jsonData);
|
||||
return objectNodeFromParser(
|
||||
JSON_FACTORY.createParser(jsonData), allowComments, excludeFields);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Object node from string object node.
|
||||
*
|
||||
* @param jsonSource the json data
|
||||
* @param allowComments true to allow comments
|
||||
* @param excludeFields names of the fields to not read
|
||||
* @return the object node
|
||||
*/
|
||||
public static ObjectNode objectNodeFromURL(
|
||||
final URL jsonSource, final boolean allowComments, final String... excludeFields) {
|
||||
try {
|
||||
return objectNodeFromParser(
|
||||
JSON_FACTORY.createParser(jsonSource).enable(Feature.AUTO_CLOSE_SOURCE),
|
||||
allowComments,
|
||||
excludeFields);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a JsonParser to parse JSON from URL.
|
||||
*
|
||||
* @param jsonSource the json source
|
||||
* @param allowComments true to allow comments
|
||||
* @return the json parser
|
||||
*/
|
||||
public static JsonParser jsonParserFromURL(final URL jsonSource, final boolean allowComments) {
|
||||
try {
|
||||
return JSON_FACTORY
|
||||
.createParser(jsonSource)
|
||||
.enable(Feature.AUTO_CLOSE_SOURCE)
|
||||
.configure(Feature.ALLOW_COMMENTS, allowComments);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private static ObjectNode objectNodeFromParser(
|
||||
final JsonParser baseParser, final boolean allowComments, final String... excludeFields) {
|
||||
try {
|
||||
final var parser =
|
||||
excludeFields.length > 0
|
||||
? new FilteringParserDelegate(
|
||||
baseParser,
|
||||
new NameExcludeFilter(excludeFields),
|
||||
TokenFilter.Inclusion.INCLUDE_ALL_AND_PATH,
|
||||
true)
|
||||
: baseParser;
|
||||
parser.configure(Feature.ALLOW_COMMENTS, allowComments);
|
||||
|
||||
final ObjectMapper objectMapper = new ObjectMapper();
|
||||
final JsonNode jsonNode = objectMapper.readTree(parser);
|
||||
validateType(jsonNode, JsonNodeType.OBJECT);
|
||||
return (ObjectNode) jsonNode;
|
||||
} catch (final IOException e) {
|
||||
// Reading directly from a string should not raise an IOException, just catch and rethrow
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
@@ -490,4 +600,30 @@ public class JsonUtil {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static class NameExcludeFilter extends TokenFilter {
|
||||
private final Set<String> names;
|
||||
|
||||
public NameExcludeFilter(final String... names) {
|
||||
this.names = Set.of(names);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenFilter includeProperty(final String name) {
|
||||
if (names.contains(name)) {
|
||||
return null;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean includeEmptyObject(final boolean contentsFiltered) {
|
||||
return !contentsFiltered;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean includeEmptyArray(final boolean contentsFiltered) {
|
||||
return !contentsFiltered;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,6 +52,7 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
|
||||
private OptionalLong shanghaiTime = OptionalLong.empty();
|
||||
private OptionalLong cancunTime = OptionalLong.empty();
|
||||
private OptionalLong pragueTime = OptionalLong.empty();
|
||||
private OptionalLong pragueEOFTime = OptionalLong.empty();
|
||||
private OptionalLong futureEipsTime = OptionalLong.empty();
|
||||
private OptionalLong experimentalEipsTime = OptionalLong.empty();
|
||||
private OptionalLong terminalBlockNumber = OptionalLong.empty();
|
||||
@@ -255,6 +256,11 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
|
||||
return pragueTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public OptionalLong getPragueEOFTime() {
|
||||
return pragueEOFTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public OptionalLong getFutureEipsTime() {
|
||||
return futureEipsTime;
|
||||
@@ -673,6 +679,18 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* PragueEOF time.
|
||||
*
|
||||
* @param timestamp the timestamp
|
||||
* @return the stub genesis config options
|
||||
*/
|
||||
public StubGenesisConfigOptions pragueEOFTime(final long timestamp) {
|
||||
pragueTime = OptionalLong.of(timestamp);
|
||||
pragueEOFTime = pragueTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Future EIPs Time block.
|
||||
*
|
||||
|
||||
@@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.hyperledger.besu.config.GenesisConfigFile.fromConfig;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
|
||||
import java.io.IOException;
|
||||
@@ -50,7 +51,11 @@ class GenesisConfigFileTest {
|
||||
// Sanity check some basic properties to confirm this is the mainnet file.
|
||||
assertThat(config.getConfigOptions().isEthHash()).isTrue();
|
||||
assertThat(config.getConfigOptions().getChainId()).hasValue(MAINNET_CHAIN_ID);
|
||||
assertThat(config.streamAllocations().map(GenesisAllocation::getAddress))
|
||||
assertThat(
|
||||
config
|
||||
.streamAllocations()
|
||||
.map(GenesisAccount::address)
|
||||
.map(Address::toUnprefixedHexString))
|
||||
.contains(
|
||||
"000d836201318ec6899a67540690382780743280",
|
||||
"001762430ea9c3a26e5749afdb70da5f78ddbb8c",
|
||||
@@ -63,7 +68,11 @@ class GenesisConfigFileTest {
|
||||
// Sanity check some basic properties to confirm this is the dev file.
|
||||
assertThat(config.getConfigOptions().isEthHash()).isTrue();
|
||||
assertThat(config.getConfigOptions().getChainId()).hasValue(DEVELOPMENT_CHAIN_ID);
|
||||
assertThat(config.streamAllocations().map(GenesisAllocation::getAddress))
|
||||
assertThat(
|
||||
config
|
||||
.streamAllocations()
|
||||
.map(GenesisAccount::address)
|
||||
.map(Address::toUnprefixedHexString))
|
||||
.contains(
|
||||
"fe3b557e8fb62b89f4916b721be55ceb828dbd73",
|
||||
"627306090abab3a6e1400e9345bc60c78a8bef57",
|
||||
@@ -271,31 +280,41 @@ class GenesisConfigFileTest {
|
||||
+ " }"
|
||||
+ "}");
|
||||
|
||||
final Map<String, GenesisAllocation> allocations =
|
||||
final Map<Address, GenesisAccount> allocations =
|
||||
config
|
||||
.streamAllocations()
|
||||
.collect(Collectors.toMap(GenesisAllocation::getAddress, Function.identity()));
|
||||
assertThat(allocations)
|
||||
.containsOnlyKeys(
|
||||
.collect(Collectors.toMap(GenesisAccount::address, Function.identity()));
|
||||
assertThat(allocations.keySet())
|
||||
.map(Address::toUnprefixedHexString)
|
||||
.containsOnly(
|
||||
"fe3b557e8fb62b89f4916b721be55ceb828dbd73",
|
||||
"627306090abab3a6e1400e9345bc60c78a8bef57",
|
||||
"f17f52151ebef6c7334fad080c5704d77216b732");
|
||||
final GenesisAllocation alloc1 = allocations.get("fe3b557e8fb62b89f4916b721be55ceb828dbd73");
|
||||
final GenesisAllocation alloc2 = allocations.get("627306090abab3a6e1400e9345bc60c78a8bef57");
|
||||
final GenesisAllocation alloc3 = allocations.get("f17f52151ebef6c7334fad080c5704d77216b732");
|
||||
final GenesisAccount alloc1 =
|
||||
allocations.get(Address.fromHexString("fe3b557e8fb62b89f4916b721be55ceb828dbd73"));
|
||||
final GenesisAccount alloc2 =
|
||||
allocations.get(Address.fromHexString("627306090abab3a6e1400e9345bc60c78a8bef57"));
|
||||
final GenesisAccount alloc3 =
|
||||
allocations.get(Address.fromHexString("f17f52151ebef6c7334fad080c5704d77216b732"));
|
||||
|
||||
assertThat(alloc1.getBalance()).isEqualTo("0xad78ebc5ac6200000");
|
||||
assertThat(alloc2.getBalance()).isEqualTo("1000");
|
||||
assertThat(alloc3.getBalance()).isEqualTo("90000000000000000000000");
|
||||
assertThat(alloc3.getStorage()).hasSize(2);
|
||||
assertThat(alloc3.getStorage())
|
||||
assertThat(alloc1.balance())
|
||||
.isEqualTo(GenesisReader.ParserUtils.parseBalance("0xad78ebc5ac6200000"));
|
||||
assertThat(alloc2.balance()).isEqualTo(GenesisReader.ParserUtils.parseBalance("1000"));
|
||||
assertThat(alloc3.balance())
|
||||
.isEqualTo(GenesisReader.ParserUtils.parseBalance("90000000000000000000000"));
|
||||
assertThat(alloc3.storage()).hasSize(2);
|
||||
assertThat(alloc3.storage())
|
||||
.containsEntry(
|
||||
"0xc4c3a3f99b26e5e534b71d6f33ca6ea5c174decfb16dd7237c60eff9774ef4a4",
|
||||
"0x937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0");
|
||||
assertThat(alloc3.getStorage())
|
||||
UInt256.fromHexString(
|
||||
"0xc4c3a3f99b26e5e534b71d6f33ca6ea5c174decfb16dd7237c60eff9774ef4a4"),
|
||||
UInt256.fromHexString(
|
||||
"0x937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0"));
|
||||
assertThat(alloc3.storage())
|
||||
.containsEntry(
|
||||
"0xc4c3a3f99b26e5e534b71d6f33ca6ea5c174decfb16dd7237c60eff9774ef4a3",
|
||||
"0x6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012");
|
||||
UInt256.fromHexString(
|
||||
"0xc4c3a3f99b26e5e534b71d6f33ca6ea5c174decfb16dd7237c60eff9774ef4a3"),
|
||||
UInt256.fromHexString(
|
||||
"0x6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -199,6 +199,13 @@ class GenesisConfigOptionsTest {
|
||||
assertThat(config.getPragueTime()).hasValue(1670470143);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldGetPragueEOFTime() {
|
||||
final GenesisConfigOptions config =
|
||||
fromConfigOptions(singletonMap("pragueEOFTime", 1670470143));
|
||||
assertThat(config.getPragueEOFTime()).hasValue(1670470143);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldGetFutureEipsTime() {
|
||||
final GenesisConfigOptions config = fromConfigOptions(singletonMap("futureEipsTime", 1337));
|
||||
@@ -232,6 +239,7 @@ class GenesisConfigOptionsTest {
|
||||
assertThat(config.getShanghaiTime()).isEmpty();
|
||||
assertThat(config.getCancunTime()).isEmpty();
|
||||
assertThat(config.getPragueTime()).isEmpty();
|
||||
assertThat(config.getPragueEOFTime()).isEmpty();
|
||||
assertThat(config.getFutureEipsTime()).isEmpty();
|
||||
assertThat(config.getExperimentalEipsTime()).isEmpty();
|
||||
}
|
||||
|
||||
@@ -0,0 +1,98 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.config;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hyperledger.besu.config.GenesisReader.ALLOCATION_FIELD;
|
||||
import static org.hyperledger.besu.config.GenesisReader.CONFIG_FIELD;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
public class GenesisReaderTest {
|
||||
private final ObjectMapper mapper = new ObjectMapper();
|
||||
|
||||
@Test
|
||||
public void readGenesisFromObjectNode() {
|
||||
final var configNode = mapper.createObjectNode();
|
||||
configNode.put("londonBlock", 1);
|
||||
final var allocNode = mapper.createObjectNode();
|
||||
allocNode.put(Address.BLS12_G2MUL.toUnprefixedHexString(), generateAllocation(Wei.ONE));
|
||||
final var rootNode = mapper.createObjectNode();
|
||||
rootNode.put("chainId", 12);
|
||||
rootNode.put(CONFIG_FIELD, configNode);
|
||||
rootNode.put(ALLOCATION_FIELD, allocNode);
|
||||
final var genesisReader = new GenesisReader.FromObjectNode(rootNode);
|
||||
|
||||
assertThat(genesisReader.getRoot().get("chainid").asInt()).isEqualTo(12);
|
||||
assertThat(genesisReader.getRoot().has(ALLOCATION_FIELD)).isFalse();
|
||||
assertThat(genesisReader.getConfig().get("londonblock").asInt()).isEqualTo(1);
|
||||
assertThat(genesisReader.streamAllocations())
|
||||
.containsExactly(new GenesisAccount(Address.BLS12_G2MUL, 0, Wei.ONE, null, Map.of(), null));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void readGenesisFromURL(@TempDir final Path folder) throws IOException {
|
||||
final String jsonStr =
|
||||
"""
|
||||
{
|
||||
"chainId":11,
|
||||
"config": {
|
||||
"londonBlock":1
|
||||
},
|
||||
"alloc": {
|
||||
"000d836201318ec6899a67540690382780743280": {
|
||||
"balance": "0xad78ebc5ac6200000"
|
||||
}
|
||||
},
|
||||
"gasLimit": "0x1"
|
||||
}
|
||||
""";
|
||||
|
||||
final var genesisFile = Files.writeString(folder.resolve("genesis.json"), jsonStr);
|
||||
|
||||
final var genesisReader = new GenesisReader.FromURL(genesisFile.toUri().toURL());
|
||||
|
||||
assertThat(genesisReader.getRoot().get("chainid").asInt()).isEqualTo(11);
|
||||
assertThat(genesisReader.getRoot().get("gaslimit").asText()).isEqualTo("0x1");
|
||||
assertThat(genesisReader.getRoot().has(ALLOCATION_FIELD)).isFalse();
|
||||
assertThat(genesisReader.getConfig().get("londonblock").asInt()).isEqualTo(1);
|
||||
assertThat(genesisReader.streamAllocations())
|
||||
.containsExactly(
|
||||
new GenesisAccount(
|
||||
Address.fromHexString("000d836201318ec6899a67540690382780743280"),
|
||||
0,
|
||||
Wei.fromHexString("0xad78ebc5ac6200000"),
|
||||
null,
|
||||
Map.of(),
|
||||
null));
|
||||
}
|
||||
|
||||
private ObjectNode generateAllocation(final Wei balance) {
|
||||
final ObjectNode entry = mapper.createObjectNode();
|
||||
entry.put("balance", balance.toShortHexString());
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
@@ -659,7 +659,24 @@ public class JsonUtilTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void objectNodeFromURL(@TempDir final Path folder) throws IOException {
|
||||
public void objectNodeFromString_excludingField() {
|
||||
final String jsonStr =
|
||||
"""
|
||||
{
|
||||
"a":1,
|
||||
"b":2,
|
||||
"c":3
|
||||
}
|
||||
""";
|
||||
|
||||
final ObjectNode result = JsonUtil.objectNodeFromString(jsonStr, false, "b");
|
||||
assertThat(result.get("a").asInt()).isEqualTo(1);
|
||||
assertThat(result.has("b")).isFalse();
|
||||
assertThat(result.get("c").asInt()).isEqualTo(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void objectNodeFromURL_excludingField(@TempDir final Path folder) throws IOException {
|
||||
final String jsonStr =
|
||||
"""
|
||||
{
|
||||
@@ -670,9 +687,9 @@ public class JsonUtilTest {
|
||||
""";
|
||||
final var genesisFile = Files.writeString(folder.resolve("genesis.json"), jsonStr);
|
||||
|
||||
final ObjectNode result = JsonUtil.objectNodeFromURL(genesisFile.toUri().toURL(), false);
|
||||
final ObjectNode result = JsonUtil.objectNodeFromURL(genesisFile.toUri().toURL(), false, "b");
|
||||
assertThat(result.get("a").asInt()).isEqualTo(1);
|
||||
assertThat(result.get("b").asInt()).isEqualTo(2);
|
||||
assertThat(result.has("b")).isFalse();
|
||||
assertThat(result.get("c").asInt()).isEqualTo(3);
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
*/
|
||||
package org.hyperledger.besu.consensus.merge.blockcreation;
|
||||
|
||||
import org.hyperledger.besu.config.GenesisAllocation;
|
||||
import org.hyperledger.besu.config.GenesisAccount;
|
||||
import org.hyperledger.besu.config.GenesisConfigFile;
|
||||
import org.hyperledger.besu.consensus.merge.MergeProtocolSchedule;
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
@@ -48,10 +48,7 @@ public interface MergeGenesisConfigHelper {
|
||||
}
|
||||
|
||||
default Stream<Address> genesisAllocations(final GenesisConfigFile configFile) {
|
||||
return configFile
|
||||
.streamAllocations()
|
||||
.map(GenesisAllocation::getAddress)
|
||||
.map(Address::fromHexString);
|
||||
return configFile.streamAllocations().map(GenesisAccount::address);
|
||||
}
|
||||
|
||||
default ProtocolSchedule getMergeProtocolSchedule() {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020 ConsenSys AG.
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
@@ -150,7 +150,7 @@ public class ProposalPayloadValidator {
|
||||
|
||||
final var validationResult =
|
||||
blockValidator.validateAndProcessBlock(
|
||||
protocolContext, block, HeaderValidationMode.LIGHT, HeaderValidationMode.FULL);
|
||||
protocolContext, block, HeaderValidationMode.LIGHT, HeaderValidationMode.FULL, false);
|
||||
|
||||
if (!validationResult.isSuccessful()) {
|
||||
LOG.info(
|
||||
|
||||
@@ -105,7 +105,8 @@ public class ProposalPayloadValidatorTest {
|
||||
eq(protocolContext),
|
||||
eq(block),
|
||||
eq(HeaderValidationMode.LIGHT),
|
||||
eq(HeaderValidationMode.FULL)))
|
||||
eq(HeaderValidationMode.FULL),
|
||||
eq(false)))
|
||||
.thenReturn(new BlockProcessingResult(Optional.empty()));
|
||||
|
||||
assertThat(payloadValidator.validate(proposal.getSignedPayload())).isTrue();
|
||||
@@ -129,7 +130,8 @@ public class ProposalPayloadValidatorTest {
|
||||
eq(protocolContext),
|
||||
eq(block),
|
||||
eq(HeaderValidationMode.LIGHT),
|
||||
eq(HeaderValidationMode.FULL)))
|
||||
eq(HeaderValidationMode.FULL),
|
||||
eq(false)))
|
||||
.thenReturn(new BlockProcessingResult(Optional.empty()));
|
||||
|
||||
assertThat(payloadValidator.validate(proposal.getSignedPayload())).isTrue();
|
||||
@@ -152,7 +154,8 @@ public class ProposalPayloadValidatorTest {
|
||||
eq(protocolContext),
|
||||
eq(block),
|
||||
eq(HeaderValidationMode.LIGHT),
|
||||
eq(HeaderValidationMode.FULL)))
|
||||
eq(HeaderValidationMode.FULL),
|
||||
eq(false)))
|
||||
.thenReturn(new BlockProcessingResult("Failed"));
|
||||
|
||||
assertThat(payloadValidator.validate(proposal.getSignedPayload())).isFalse();
|
||||
@@ -228,7 +231,8 @@ public class ProposalPayloadValidatorTest {
|
||||
eq(protocolContext),
|
||||
eq(block),
|
||||
eq(HeaderValidationMode.LIGHT),
|
||||
eq(HeaderValidationMode.FULL)))
|
||||
eq(HeaderValidationMode.FULL),
|
||||
eq(false)))
|
||||
.thenReturn(new BlockProcessingResult(Optional.empty()));
|
||||
|
||||
assertThat(payloadValidator.validate(proposal.getSignedPayload())).isFalse();
|
||||
@@ -262,7 +266,8 @@ public class ProposalPayloadValidatorTest {
|
||||
eq(protocolContext),
|
||||
eq(block),
|
||||
eq(HeaderValidationMode.LIGHT),
|
||||
eq(HeaderValidationMode.FULL)))
|
||||
eq(HeaderValidationMode.FULL),
|
||||
eq(false)))
|
||||
.thenReturn(new BlockProcessingResult(Optional.empty()));
|
||||
when(cmsValidator.validate(eq(cms), eq(hashWithoutCms))).thenReturn(false);
|
||||
|
||||
@@ -297,7 +302,8 @@ public class ProposalPayloadValidatorTest {
|
||||
eq(protocolContext),
|
||||
eq(block),
|
||||
eq(HeaderValidationMode.LIGHT),
|
||||
eq(HeaderValidationMode.FULL)))
|
||||
eq(HeaderValidationMode.FULL),
|
||||
eq(false)))
|
||||
.thenReturn(new BlockProcessingResult(Optional.empty()));
|
||||
when(cmsValidator.validate(eq(cms), eq(hashWithoutCms))).thenReturn(true);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020 ConsenSys AG.
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
@@ -111,7 +111,8 @@ public class ProposalValidatorTest {
|
||||
eq(protocolContext),
|
||||
any(),
|
||||
eq(HeaderValidationMode.LIGHT),
|
||||
eq(HeaderValidationMode.FULL)))
|
||||
eq(HeaderValidationMode.FULL),
|
||||
eq(false)))
|
||||
.thenReturn(new BlockProcessingResult(Optional.empty()));
|
||||
|
||||
when(protocolSchedule.getByBlockHeader(any())).thenReturn(protocolSpec);
|
||||
@@ -168,7 +169,8 @@ public class ProposalValidatorTest {
|
||||
eq(protocolContext),
|
||||
any(),
|
||||
eq(HeaderValidationMode.LIGHT),
|
||||
eq(HeaderValidationMode.FULL)))
|
||||
eq(HeaderValidationMode.FULL),
|
||||
eq(false)))
|
||||
.thenReturn(new BlockProcessingResult("Failed"));
|
||||
|
||||
assertThat(roundItem.messageValidator.validate(proposal)).isFalse();
|
||||
|
||||
@@ -28,16 +28,16 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
public class WithdrawalRequestParameter {
|
||||
|
||||
private final String sourceAddress;
|
||||
private final String validatorPublicKey;
|
||||
private final String validatorPubkey;
|
||||
private final String amount;
|
||||
|
||||
@JsonCreator
|
||||
public WithdrawalRequestParameter(
|
||||
@JsonProperty("sourceAddress") final String sourceAddress,
|
||||
@JsonProperty("validatorPublicKey") final String validatorPublicKey,
|
||||
@JsonProperty("validatorPubkey") final String validatorPubkey,
|
||||
@JsonProperty("amount") final String amount) {
|
||||
this.sourceAddress = sourceAddress;
|
||||
this.validatorPublicKey = validatorPublicKey;
|
||||
this.validatorPubkey = validatorPubkey;
|
||||
this.amount = amount;
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ public class WithdrawalRequestParameter {
|
||||
public WithdrawalRequest toWithdrawalRequest() {
|
||||
return new WithdrawalRequest(
|
||||
Address.fromHexString(sourceAddress),
|
||||
BLSPublicKey.fromHexString(validatorPublicKey),
|
||||
BLSPublicKey.fromHexString(validatorPubkey),
|
||||
GWei.fromHexString(amount));
|
||||
}
|
||||
|
||||
@@ -62,8 +62,8 @@ public class WithdrawalRequestParameter {
|
||||
}
|
||||
|
||||
@JsonGetter
|
||||
public String getValidatorPublicKey() {
|
||||
return validatorPublicKey;
|
||||
public String getValidatorPubkey() {
|
||||
return validatorPubkey;
|
||||
}
|
||||
|
||||
@JsonGetter
|
||||
@@ -77,13 +77,13 @@ public class WithdrawalRequestParameter {
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final WithdrawalRequestParameter that = (WithdrawalRequestParameter) o;
|
||||
return Objects.equals(sourceAddress, that.sourceAddress)
|
||||
&& Objects.equals(validatorPublicKey, that.validatorPublicKey)
|
||||
&& Objects.equals(validatorPubkey, that.validatorPubkey)
|
||||
&& Objects.equals(amount, that.amount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(sourceAddress, validatorPublicKey, amount);
|
||||
return Objects.hash(sourceAddress, validatorPubkey, amount);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -92,8 +92,8 @@ public class WithdrawalRequestParameter {
|
||||
+ "sourceAddress='"
|
||||
+ sourceAddress
|
||||
+ '\''
|
||||
+ ", validatorPublicKey='"
|
||||
+ validatorPublicKey
|
||||
+ ", validatorPubkey='"
|
||||
+ validatorPubkey
|
||||
+ '\''
|
||||
+ ", amount='"
|
||||
+ amount
|
||||
|
||||
@@ -164,7 +164,7 @@ public class BlockTransactionSelector {
|
||||
public TransactionSelectionResults buildTransactionListForBlock() {
|
||||
LOG.atDebug()
|
||||
.setMessage("Transaction pool stats {}")
|
||||
.addArgument(blockSelectionContext.transactionPool().logStats())
|
||||
.addArgument(blockSelectionContext.transactionPool()::logStats)
|
||||
.log();
|
||||
timeLimitedSelection();
|
||||
LOG.atTrace()
|
||||
|
||||
@@ -17,12 +17,11 @@ package org.hyperledger.besu.ethereum.chain;
|
||||
import static java.util.Collections.emptyList;
|
||||
import static org.hyperledger.besu.ethereum.trie.common.GenesisWorldStateProvider.createGenesisWorldState;
|
||||
|
||||
import org.hyperledger.besu.config.GenesisAllocation;
|
||||
import org.hyperledger.besu.config.GenesisAccount;
|
||||
import org.hyperledger.besu.config.GenesisConfigFile;
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.BlobGas;
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.datatypes.Wei;
|
||||
import org.hyperledger.besu.ethereum.core.Block;
|
||||
import org.hyperledger.besu.ethereum.core.BlockBody;
|
||||
import org.hyperledger.besu.ethereum.core.BlockHeader;
|
||||
@@ -38,29 +37,27 @@ import org.hyperledger.besu.evm.account.MutableAccount;
|
||||
import org.hyperledger.besu.evm.log.LogsBloomFilter;
|
||||
import org.hyperledger.besu.evm.worldstate.WorldUpdater;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.HashMap;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalLong;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.MoreObjects;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
|
||||
public final class GenesisState {
|
||||
|
||||
private final Block block;
|
||||
private final List<GenesisAccount> genesisAccounts;
|
||||
private final GenesisConfigFile genesisConfigFile;
|
||||
|
||||
private GenesisState(final Block block, final List<GenesisAccount> genesisAccounts) {
|
||||
private GenesisState(final Block block, final GenesisConfigFile genesisConfigFile) {
|
||||
this.block = block;
|
||||
this.genesisAccounts = genesisAccounts;
|
||||
this.genesisConfigFile = genesisConfigFile;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -75,24 +72,25 @@ public final class GenesisState {
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a {@link GenesisState} from a JSON string.
|
||||
* Construct a {@link GenesisState} from a URL
|
||||
*
|
||||
* @param dataStorageConfiguration A {@link DataStorageConfiguration} describing the storage
|
||||
* configuration
|
||||
* @param json A JSON string describing the genesis block
|
||||
* @param jsonSource A URL pointing to JSON genesis file
|
||||
* @param protocolSchedule A protocol Schedule associated with
|
||||
* @return A new {@link GenesisState}.
|
||||
*/
|
||||
public static GenesisState fromJson(
|
||||
@VisibleForTesting
|
||||
static GenesisState fromJsonSource(
|
||||
final DataStorageConfiguration dataStorageConfiguration,
|
||||
final String json,
|
||||
final URL jsonSource,
|
||||
final ProtocolSchedule protocolSchedule) {
|
||||
return fromConfig(
|
||||
dataStorageConfiguration, GenesisConfigFile.fromConfig(json), protocolSchedule);
|
||||
dataStorageConfiguration, GenesisConfigFile.fromConfig(jsonSource), protocolSchedule);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a {@link GenesisState} from a JSON object.
|
||||
* Construct a {@link GenesisState} from a genesis file object.
|
||||
*
|
||||
* @param config A {@link GenesisConfigFile} describing the genesis block.
|
||||
* @param protocolSchedule A protocol Schedule associated with
|
||||
@@ -108,41 +106,40 @@ public final class GenesisState {
|
||||
*
|
||||
* @param dataStorageConfiguration A {@link DataStorageConfiguration} describing the storage
|
||||
* configuration
|
||||
* @param config A {@link GenesisConfigFile} describing the genesis block.
|
||||
* @param genesisConfigFile A {@link GenesisConfigFile} describing the genesis block.
|
||||
* @param protocolSchedule A protocol Schedule associated with
|
||||
* @return A new {@link GenesisState}.
|
||||
*/
|
||||
public static GenesisState fromConfig(
|
||||
final DataStorageConfiguration dataStorageConfiguration,
|
||||
final GenesisConfigFile config,
|
||||
final GenesisConfigFile genesisConfigFile,
|
||||
final ProtocolSchedule protocolSchedule) {
|
||||
final List<GenesisAccount> genesisAccounts = parseAllocations(config).toList();
|
||||
final var genesisStateRoot =
|
||||
calculateGenesisStateRoot(dataStorageConfiguration, genesisConfigFile);
|
||||
final Block block =
|
||||
new Block(
|
||||
buildHeader(
|
||||
config,
|
||||
calculateGenesisStateHash(dataStorageConfiguration, genesisAccounts),
|
||||
protocolSchedule),
|
||||
buildBody(config));
|
||||
return new GenesisState(block, genesisAccounts);
|
||||
buildHeader(genesisConfigFile, genesisStateRoot, protocolSchedule),
|
||||
buildBody(genesisConfigFile));
|
||||
return new GenesisState(block, genesisConfigFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a {@link GenesisState} from a JSON object.
|
||||
*
|
||||
* @param genesisStateHash The hash of the genesis state.
|
||||
* @param config A {@link GenesisConfigFile} describing the genesis block.
|
||||
* @param genesisStateRoot The root of the genesis state.
|
||||
* @param genesisConfigFile A {@link GenesisConfigFile} describing the genesis block.
|
||||
* @param protocolSchedule A protocol Schedule associated with
|
||||
* @return A new {@link GenesisState}.
|
||||
*/
|
||||
public static GenesisState fromConfig(
|
||||
final Hash genesisStateHash,
|
||||
final GenesisConfigFile config,
|
||||
public static GenesisState fromStorage(
|
||||
final Hash genesisStateRoot,
|
||||
final GenesisConfigFile genesisConfigFile,
|
||||
final ProtocolSchedule protocolSchedule) {
|
||||
final List<GenesisAccount> genesisAccounts = parseAllocations(config).toList();
|
||||
final Block block =
|
||||
new Block(buildHeader(config, genesisStateHash, protocolSchedule), buildBody(config));
|
||||
return new GenesisState(block, genesisAccounts);
|
||||
new Block(
|
||||
buildHeader(genesisConfigFile, genesisStateRoot, protocolSchedule),
|
||||
buildBody(genesisConfigFile));
|
||||
return new GenesisState(block, genesisConfigFile);
|
||||
}
|
||||
|
||||
private static BlockBody buildBody(final GenesisConfigFile config) {
|
||||
@@ -164,31 +161,31 @@ public final class GenesisState {
|
||||
* @param target WorldView to write genesis state to
|
||||
*/
|
||||
public void writeStateTo(final MutableWorldState target) {
|
||||
writeAccountsTo(target, genesisAccounts, block.getHeader());
|
||||
writeAccountsTo(target, genesisConfigFile.streamAllocations(), block.getHeader());
|
||||
}
|
||||
|
||||
private static void writeAccountsTo(
|
||||
final MutableWorldState target,
|
||||
final List<GenesisAccount> genesisAccounts,
|
||||
final Stream<GenesisAccount> genesisAccounts,
|
||||
final BlockHeader rootHeader) {
|
||||
final WorldUpdater updater = target.updater();
|
||||
genesisAccounts.forEach(
|
||||
genesisAccount -> {
|
||||
final MutableAccount account = updater.getOrCreate(genesisAccount.address);
|
||||
account.setNonce(genesisAccount.nonce);
|
||||
account.setBalance(genesisAccount.balance);
|
||||
account.setCode(genesisAccount.code);
|
||||
genesisAccount.storage.forEach(account::setStorageValue);
|
||||
final MutableAccount account = updater.createAccount(genesisAccount.address());
|
||||
account.setNonce(genesisAccount.nonce());
|
||||
account.setBalance(genesisAccount.balance());
|
||||
account.setCode(genesisAccount.code());
|
||||
genesisAccount.storage().forEach(account::setStorageValue);
|
||||
});
|
||||
updater.commit();
|
||||
target.persist(rootHeader);
|
||||
}
|
||||
|
||||
private static Hash calculateGenesisStateHash(
|
||||
private static Hash calculateGenesisStateRoot(
|
||||
final DataStorageConfiguration dataStorageConfiguration,
|
||||
final List<GenesisAccount> genesisAccounts) {
|
||||
final GenesisConfigFile genesisConfigFile) {
|
||||
try (var worldState = createGenesisWorldState(dataStorageConfiguration)) {
|
||||
writeAccountsTo(worldState, genesisAccounts, null);
|
||||
writeAccountsTo(worldState, genesisConfigFile.streamAllocations(), null);
|
||||
return worldState.rootHash();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
@@ -265,10 +262,6 @@ public final class GenesisState {
|
||||
return withNiceErrorMessage("mixHash", genesis.getMixHash(), Hash::fromHexStringLenient);
|
||||
}
|
||||
|
||||
private static Stream<GenesisAccount> parseAllocations(final GenesisConfigFile genesis) {
|
||||
return genesis.streamAllocations().map(GenesisAccount::fromAllocation);
|
||||
}
|
||||
|
||||
private static long parseNonce(final GenesisConfigFile genesis) {
|
||||
return withNiceErrorMessage("nonce", genesis.getNonce(), GenesisState::parseUnsignedLong);
|
||||
}
|
||||
@@ -319,6 +312,14 @@ public final class GenesisState {
|
||||
if (pragueTimestamp.isPresent()) {
|
||||
return genesis.getTimestamp() >= pragueTimestamp.getAsLong();
|
||||
}
|
||||
return isPragueEOFAtGenesis(genesis);
|
||||
}
|
||||
|
||||
private static boolean isPragueEOFAtGenesis(final GenesisConfigFile genesis) {
|
||||
final OptionalLong pragueEOFTimestamp = genesis.getConfigOptions().getPragueEOFTime();
|
||||
if (pragueEOFTimestamp.isPresent()) {
|
||||
return genesis.getTimestamp() >= pragueEOFTimestamp.getAsLong();
|
||||
}
|
||||
return isFutureEipsTimeAtGenesis(genesis);
|
||||
}
|
||||
|
||||
@@ -340,75 +341,6 @@ public final class GenesisState {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return MoreObjects.toStringHelper(this)
|
||||
.add("block", block)
|
||||
.add("genesisAccounts", genesisAccounts)
|
||||
.toString();
|
||||
}
|
||||
|
||||
private static final class GenesisAccount {
|
||||
|
||||
final long nonce;
|
||||
final Address address;
|
||||
final Wei balance;
|
||||
final Map<UInt256, UInt256> storage;
|
||||
final Bytes code;
|
||||
|
||||
static GenesisAccount fromAllocation(final GenesisAllocation allocation) {
|
||||
return new GenesisAccount(
|
||||
allocation.getNonce(),
|
||||
allocation.getAddress(),
|
||||
allocation.getBalance(),
|
||||
allocation.getStorage(),
|
||||
allocation.getCode());
|
||||
}
|
||||
|
||||
private GenesisAccount(
|
||||
final String hexNonce,
|
||||
final String hexAddress,
|
||||
final String balance,
|
||||
final Map<String, String> storage,
|
||||
final String hexCode) {
|
||||
this.nonce = withNiceErrorMessage("nonce", hexNonce, GenesisState::parseUnsignedLong);
|
||||
this.address = withNiceErrorMessage("address", hexAddress, Address::fromHexString);
|
||||
this.balance = withNiceErrorMessage("balance", balance, this::parseBalance);
|
||||
this.code = hexCode != null ? Bytes.fromHexString(hexCode) : null;
|
||||
this.storage = parseStorage(storage);
|
||||
}
|
||||
|
||||
private Wei parseBalance(final String balance) {
|
||||
final BigInteger val;
|
||||
if (balance.startsWith("0x")) {
|
||||
val = new BigInteger(1, Bytes.fromHexStringLenient(balance).toArrayUnsafe());
|
||||
} else {
|
||||
val = new BigInteger(balance);
|
||||
}
|
||||
|
||||
return Wei.of(val);
|
||||
}
|
||||
|
||||
private Map<UInt256, UInt256> parseStorage(final Map<String, String> storage) {
|
||||
final Map<UInt256, UInt256> parsedStorage = new HashMap<>();
|
||||
storage.forEach(
|
||||
(key1, value1) -> {
|
||||
final UInt256 key = withNiceErrorMessage("storage key", key1, UInt256::fromHexString);
|
||||
final UInt256 value =
|
||||
withNiceErrorMessage("storage value", value1, UInt256::fromHexString);
|
||||
parsedStorage.put(key, value);
|
||||
});
|
||||
|
||||
return parsedStorage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return MoreObjects.toStringHelper(this)
|
||||
.add("address", address)
|
||||
.add("nonce", nonce)
|
||||
.add("balance", balance)
|
||||
.add("storage", storage)
|
||||
.add("code", code)
|
||||
.toString();
|
||||
}
|
||||
return MoreObjects.toStringHelper(this).add("block", block).toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,6 +189,17 @@ public class MainnetProtocolSpecFactory {
|
||||
miningParameters);
|
||||
}
|
||||
|
||||
public ProtocolSpecBuilder pragueEOFDefinition(final GenesisConfigOptions genesisConfigOptions) {
|
||||
return MainnetProtocolSpecs.pragueEOFDefinition(
|
||||
chainId,
|
||||
contractSizeLimit,
|
||||
evmStackSize,
|
||||
isRevertReasonEnabled,
|
||||
genesisConfigOptions,
|
||||
evmConfiguration,
|
||||
miningParameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* The "future" fork consists of EIPs that have been approved for Ethereum Mainnet but not
|
||||
* scheduled for a fork. This is also known as "Eligible For Inclusion" (EFI) or "Considered for
|
||||
|
||||
@@ -58,6 +58,7 @@ import org.hyperledger.besu.evm.gascalculator.HomesteadGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.IstanbulGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.LondonGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PetersburgGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PragueEOFGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PragueGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.ShanghaiGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.SpuriousDragonGasCalculator;
|
||||
@@ -735,9 +736,6 @@ public abstract class MainnetProtocolSpecs {
|
||||
final GenesisConfigOptions genesisConfigOptions,
|
||||
final EvmConfiguration evmConfiguration,
|
||||
final MiningParameters miningParameters) {
|
||||
final int contractSizeLimit =
|
||||
configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT);
|
||||
final int stackSizeLimit = configStackSizeLimit.orElse(MessageFrame.DEFAULT_MAX_STACK_SIZE);
|
||||
|
||||
final Address depositContractAddress =
|
||||
genesisConfigOptions.getDepositContractAddress().orElse(DEFAULT_DEPOSIT_CONTRACT_ADDRESS);
|
||||
@@ -750,47 +748,64 @@ public abstract class MainnetProtocolSpecs {
|
||||
genesisConfigOptions,
|
||||
evmConfiguration,
|
||||
miningParameters)
|
||||
// EVM changes to support EOF EIPs (3670, 4200, 4750, 5450)
|
||||
// EIP-3074 AUTH and AUTCALL gas
|
||||
.gasCalculator(PragueGasCalculator::new)
|
||||
// EIP-3074 AUTH and AUTCALL
|
||||
.evmBuilder(
|
||||
(gasCalculator, jdCacheConfig) ->
|
||||
MainnetEVMs.prague(
|
||||
gasCalculator, chainId.orElse(BigInteger.ZERO), evmConfiguration))
|
||||
// change contract call creator to accept EOF code
|
||||
|
||||
// EIP-2537 BLS12-381 precompiles
|
||||
.precompileContractRegistryBuilder(MainnetPrecompiledContractRegistries::prague)
|
||||
|
||||
// EIP-7002 Withdrawls / EIP-6610 Deposits / EIP-7685 Requests
|
||||
.requestsValidator(pragueRequestsValidator(depositContractAddress))
|
||||
// EIP-7002 Withdrawls / EIP-6610 Deposits / EIP-7685 Requests
|
||||
.requestProcessorCoordinator(pragueRequestsProcessors(depositContractAddress))
|
||||
|
||||
// EIP-2935 Blockhash processor
|
||||
.blockHashProcessor(new PragueBlockHashProcessor())
|
||||
.name("Prague");
|
||||
}
|
||||
|
||||
static ProtocolSpecBuilder pragueEOFDefinition(
|
||||
final Optional<BigInteger> chainId,
|
||||
final OptionalInt configContractSizeLimit,
|
||||
final OptionalInt configStackSizeLimit,
|
||||
final boolean enableRevertReason,
|
||||
final GenesisConfigOptions genesisConfigOptions,
|
||||
final EvmConfiguration evmConfiguration,
|
||||
final MiningParameters miningParameters) {
|
||||
final int contractSizeLimit =
|
||||
configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT);
|
||||
|
||||
return pragueDefinition(
|
||||
chainId,
|
||||
configContractSizeLimit,
|
||||
configStackSizeLimit,
|
||||
enableRevertReason,
|
||||
genesisConfigOptions,
|
||||
evmConfiguration,
|
||||
miningParameters)
|
||||
// EIP-7692 EOF v1 Gas calculator
|
||||
.gasCalculator(PragueEOFGasCalculator::new)
|
||||
// EIP-7692 EOF v1 EVM and opcodes
|
||||
.evmBuilder(
|
||||
(gasCalculator, jdCacheConfig) ->
|
||||
MainnetEVMs.pragueEOF(
|
||||
gasCalculator, chainId.orElse(BigInteger.ZERO), evmConfiguration))
|
||||
// EIP-7698 EOF v1 creation transaction
|
||||
.contractCreationProcessorBuilder(
|
||||
(gasCalculator, evm) ->
|
||||
new ContractCreationProcessor(
|
||||
gasCalculator,
|
||||
evm,
|
||||
true,
|
||||
List.of(
|
||||
MaxCodeSizeRule.of(contractSizeLimit), EOFValidationCodeRule.of(1, false)),
|
||||
List.of(MaxCodeSizeRule.of(contractSizeLimit), EOFValidationCodeRule.of(1)),
|
||||
1,
|
||||
SPURIOUS_DRAGON_FORCE_DELETE_WHEN_EMPTY_ADDRESSES))
|
||||
// warm blockahsh contract
|
||||
.transactionProcessorBuilder(
|
||||
(gasCalculator,
|
||||
feeMarket,
|
||||
transactionValidator,
|
||||
contractCreationProcessor,
|
||||
messageCallProcessor) ->
|
||||
new MainnetTransactionProcessor(
|
||||
gasCalculator,
|
||||
transactionValidator,
|
||||
contractCreationProcessor,
|
||||
messageCallProcessor,
|
||||
true,
|
||||
true,
|
||||
stackSizeLimit,
|
||||
feeMarket,
|
||||
CoinbaseFeePriceCalculator.eip1559()))
|
||||
|
||||
// use prague precompiled contracts
|
||||
.precompileContractRegistryBuilder(MainnetPrecompiledContractRegistries::prague)
|
||||
.requestsValidator(pragueRequestsValidator(depositContractAddress))
|
||||
.requestProcessorCoordinator(pragueRequestsProcessors(depositContractAddress))
|
||||
.blockHashProcessor(new PragueBlockHashProcessor())
|
||||
.name("Prague");
|
||||
.name("PragueEOF");
|
||||
}
|
||||
|
||||
static ProtocolSpecBuilder futureEipsDefinition(
|
||||
@@ -803,7 +818,7 @@ public abstract class MainnetProtocolSpecs {
|
||||
final MiningParameters miningParameters) {
|
||||
final int contractSizeLimit =
|
||||
configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT);
|
||||
return pragueDefinition(
|
||||
return pragueEOFDefinition(
|
||||
chainId,
|
||||
configContractSizeLimit,
|
||||
configStackSizeLimit,
|
||||
@@ -823,8 +838,7 @@ public abstract class MainnetProtocolSpecs {
|
||||
gasCalculator,
|
||||
evm,
|
||||
true,
|
||||
List.of(
|
||||
MaxCodeSizeRule.of(contractSizeLimit), EOFValidationCodeRule.of(1, false)),
|
||||
List.of(MaxCodeSizeRule.of(contractSizeLimit), EOFValidationCodeRule.of(1)),
|
||||
1,
|
||||
SPURIOUS_DRAGON_FORCE_DELETE_WHEN_EMPTY_ADDRESSES))
|
||||
// use future configured precompiled contracts
|
||||
|
||||
@@ -32,8 +32,10 @@ import org.hyperledger.besu.ethereum.privacy.storage.PrivateMetadataUpdater;
|
||||
import org.hyperledger.besu.ethereum.processing.TransactionProcessingResult;
|
||||
import org.hyperledger.besu.ethereum.transaction.TransactionInvalidReason;
|
||||
import org.hyperledger.besu.ethereum.trie.MerkleTrieException;
|
||||
import org.hyperledger.besu.evm.Code;
|
||||
import org.hyperledger.besu.evm.account.Account;
|
||||
import org.hyperledger.besu.evm.account.MutableAccount;
|
||||
import org.hyperledger.besu.evm.code.CodeInvalid;
|
||||
import org.hyperledger.besu.evm.code.CodeV0;
|
||||
import org.hyperledger.besu.evm.frame.ExceptionalHaltReason;
|
||||
import org.hyperledger.besu.evm.frame.MessageFrame;
|
||||
@@ -382,13 +384,14 @@ public class MainnetTransactionProcessor {
|
||||
Address.contractAddress(senderAddress, sender.getNonce() - 1L);
|
||||
|
||||
final Bytes initCodeBytes = transaction.getPayload();
|
||||
Code code = contractCreationProcessor.getCodeFromEVMForCreation(initCodeBytes);
|
||||
initialFrame =
|
||||
commonMessageFrameBuilder
|
||||
.type(MessageFrame.Type.CONTRACT_CREATION)
|
||||
.address(contractAddress)
|
||||
.contract(contractAddress)
|
||||
.inputData(Bytes.EMPTY)
|
||||
.code(contractCreationProcessor.getCodeFromEVMUncached(initCodeBytes))
|
||||
.inputData(initCodeBytes.slice(code.getSize()))
|
||||
.code(code)
|
||||
.build();
|
||||
} else {
|
||||
@SuppressWarnings("OptionalGetWithoutIsPresent") // isContractCall tests isPresent
|
||||
@@ -415,12 +418,17 @@ public class MainnetTransactionProcessor {
|
||||
} else {
|
||||
initialFrame.setState(MessageFrame.State.EXCEPTIONAL_HALT);
|
||||
initialFrame.setExceptionalHaltReason(Optional.of(ExceptionalHaltReason.INVALID_CODE));
|
||||
validationResult =
|
||||
ValidationResult.invalid(
|
||||
TransactionInvalidReason.EOF_CODE_INVALID,
|
||||
((CodeInvalid) initialFrame.getCode()).getInvalidReason());
|
||||
}
|
||||
|
||||
if (initialFrame.getState() == MessageFrame.State.COMPLETED_SUCCESS) {
|
||||
worldUpdater.commit();
|
||||
} else {
|
||||
if (initialFrame.getExceptionalHaltReason().isPresent()) {
|
||||
if (initialFrame.getExceptionalHaltReason().isPresent()
|
||||
&& initialFrame.getCode().isValid()) {
|
||||
validationResult =
|
||||
ValidationResult.invalid(
|
||||
TransactionInvalidReason.EXECUTION_HALTED,
|
||||
|
||||
@@ -256,6 +256,7 @@ public class ProtocolScheduleBuilder {
|
||||
lastForkBlock = validateForkOrder("Shanghai", config.getShanghaiTime(), lastForkBlock);
|
||||
lastForkBlock = validateForkOrder("Cancun", config.getCancunTime(), lastForkBlock);
|
||||
lastForkBlock = validateForkOrder("Prague", config.getPragueTime(), lastForkBlock);
|
||||
lastForkBlock = validateForkOrder("PragueEOF", config.getPragueEOFTime(), lastForkBlock);
|
||||
lastForkBlock = validateForkOrder("FutureEips", config.getFutureEipsTime(), lastForkBlock);
|
||||
lastForkBlock =
|
||||
validateForkOrder("ExperimentalEips", config.getExperimentalEipsTime(), lastForkBlock);
|
||||
@@ -361,6 +362,7 @@ public class ProtocolScheduleBuilder {
|
||||
timestampMilestone(config.getShanghaiTime(), specFactory.shanghaiDefinition(config)),
|
||||
timestampMilestone(config.getCancunTime(), specFactory.cancunDefinition(config)),
|
||||
timestampMilestone(config.getPragueTime(), specFactory.pragueDefinition(config)),
|
||||
timestampMilestone(config.getPragueEOFTime(), specFactory.pragueEOFDefinition(config)),
|
||||
timestampMilestone(config.getFutureEipsTime(), specFactory.futureEipsDefinition(config)),
|
||||
timestampMilestone(
|
||||
config.getExperimentalEipsTime(), specFactory.experimentalEipsDefinition(config)),
|
||||
|
||||
@@ -25,6 +25,7 @@ import org.hyperledger.besu.ethereum.mainnet.TransactionValidatorFactory;
|
||||
import org.hyperledger.besu.ethereum.mainnet.ValidationResult;
|
||||
import org.hyperledger.besu.ethereum.processing.TransactionProcessingResult;
|
||||
import org.hyperledger.besu.ethereum.transaction.TransactionInvalidReason;
|
||||
import org.hyperledger.besu.evm.Code;
|
||||
import org.hyperledger.besu.evm.account.Account;
|
||||
import org.hyperledger.besu.evm.account.MutableAccount;
|
||||
import org.hyperledger.besu.evm.code.CodeV0;
|
||||
@@ -138,13 +139,14 @@ public class PrivateTransactionProcessor {
|
||||
privacyGroupId);
|
||||
|
||||
final Bytes initCodeBytes = transaction.getPayload();
|
||||
Code code = contractCreationProcessor.getCodeFromEVMForCreation(initCodeBytes);
|
||||
initialFrame =
|
||||
commonMessageFrameBuilder
|
||||
.type(MessageFrame.Type.CONTRACT_CREATION)
|
||||
.address(privateContractAddress)
|
||||
.contract(privateContractAddress)
|
||||
.inputData(Bytes.EMPTY)
|
||||
.code(contractCreationProcessor.getCodeFromEVMUncached(initCodeBytes))
|
||||
.inputData(initCodeBytes.slice(code.getSize()))
|
||||
.code(code)
|
||||
.build();
|
||||
} else {
|
||||
final Address to = transaction.getTo().get();
|
||||
|
||||
@@ -50,6 +50,7 @@ public enum TransactionInvalidReason {
|
||||
INVALID_BLOBS,
|
||||
PLUGIN_TX_POOL_VALIDATOR,
|
||||
EXECUTION_HALTED,
|
||||
EOF_CODE_INVALID,
|
||||
// Private Transaction Invalid Reasons
|
||||
PRIVATE_TRANSACTION_INVALID,
|
||||
PRIVATE_TRANSACTION_FAILED,
|
||||
|
||||
@@ -29,8 +29,6 @@ import org.hyperledger.besu.evm.account.Account;
|
||||
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.io.Resources;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.units.bigints.UInt256;
|
||||
import org.bouncycastle.util.encoders.Hex;
|
||||
@@ -64,12 +62,11 @@ final class GenesisStateTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@ArgumentsSource(GenesisStateTestArguments.class)
|
||||
public void createFromJsonWithAllocs(final DataStorageConfiguration dataStorageConfiguration)
|
||||
throws Exception {
|
||||
public void createFromJsonWithAllocs(final DataStorageConfiguration dataStorageConfiguration) {
|
||||
final GenesisState genesisState =
|
||||
GenesisState.fromJson(
|
||||
GenesisState.fromJsonSource(
|
||||
dataStorageConfiguration,
|
||||
Resources.toString(GenesisStateTest.class.getResource("genesis1.json"), Charsets.UTF_8),
|
||||
GenesisStateTest.class.getResource("genesis1.json"),
|
||||
ProtocolScheduleFixture.MAINNET);
|
||||
final BlockHeader header = genesisState.getBlock().getHeader();
|
||||
assertThat(header.getStateRoot())
|
||||
@@ -95,12 +92,11 @@ final class GenesisStateTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@ArgumentsSource(GenesisStateTestArguments.class)
|
||||
void createFromJsonNoAllocs(final DataStorageConfiguration dataStorageConfiguration)
|
||||
throws Exception {
|
||||
void createFromJsonNoAllocs(final DataStorageConfiguration dataStorageConfiguration) {
|
||||
final GenesisState genesisState =
|
||||
GenesisState.fromJson(
|
||||
GenesisState.fromJsonSource(
|
||||
dataStorageConfiguration,
|
||||
Resources.toString(GenesisStateTest.class.getResource("genesis2.json"), Charsets.UTF_8),
|
||||
GenesisStateTest.class.getResource("genesis2.json"),
|
||||
ProtocolScheduleFixture.MAINNET);
|
||||
final BlockHeader header = genesisState.getBlock().getHeader();
|
||||
assertThat(header.getStateRoot()).isEqualTo(Hash.EMPTY_TRIE_HASH);
|
||||
@@ -114,12 +110,11 @@ final class GenesisStateTest {
|
||||
private void assertContractInvariants(
|
||||
final DataStorageConfiguration dataStorageConfiguration,
|
||||
final String sourceFile,
|
||||
final String blockHash)
|
||||
throws Exception {
|
||||
final String blockHash) {
|
||||
final GenesisState genesisState =
|
||||
GenesisState.fromJson(
|
||||
GenesisState.fromJsonSource(
|
||||
dataStorageConfiguration,
|
||||
Resources.toString(GenesisStateTest.class.getResource(sourceFile), Charsets.UTF_8),
|
||||
GenesisStateTest.class.getResource(sourceFile),
|
||||
ProtocolScheduleFixture.MAINNET);
|
||||
final BlockHeader header = genesisState.getBlock().getHeader();
|
||||
assertThat(header.getHash()).isEqualTo(Hash.fromHexString(blockHash));
|
||||
@@ -141,8 +136,7 @@ final class GenesisStateTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@ArgumentsSource(GenesisStateTestArguments.class)
|
||||
void createFromJsonWithContract(final DataStorageConfiguration dataStorageConfiguration)
|
||||
throws Exception {
|
||||
void createFromJsonWithContract(final DataStorageConfiguration dataStorageConfiguration) {
|
||||
assertContractInvariants(
|
||||
dataStorageConfiguration,
|
||||
"genesis3.json",
|
||||
@@ -151,13 +145,11 @@ final class GenesisStateTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@ArgumentsSource(GenesisStateTestArguments.class)
|
||||
void createFromJsonWithNonce(final DataStorageConfiguration dataStorageConfiguration)
|
||||
throws Exception {
|
||||
void createFromJsonWithNonce(final DataStorageConfiguration dataStorageConfiguration) {
|
||||
final GenesisState genesisState =
|
||||
GenesisState.fromJson(
|
||||
GenesisState.fromJsonSource(
|
||||
dataStorageConfiguration,
|
||||
Resources.toString(
|
||||
GenesisStateTest.class.getResource("genesisNonce.json"), Charsets.UTF_8),
|
||||
GenesisStateTest.class.getResource("genesisNonce.json"),
|
||||
ProtocolScheduleFixture.MAINNET);
|
||||
final BlockHeader header = genesisState.getBlock().getHeader();
|
||||
assertThat(header.getHash())
|
||||
@@ -168,13 +160,11 @@ final class GenesisStateTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@ArgumentsSource(GenesisStateTestArguments.class)
|
||||
void encodeOlympicBlock(final DataStorageConfiguration dataStorageConfiguration)
|
||||
throws Exception {
|
||||
void encodeOlympicBlock(final DataStorageConfiguration dataStorageConfiguration) {
|
||||
final GenesisState genesisState =
|
||||
GenesisState.fromJson(
|
||||
GenesisState.fromJsonSource(
|
||||
dataStorageConfiguration,
|
||||
Resources.toString(
|
||||
GenesisStateTest.class.getResource("genesis-olympic.json"), Charsets.UTF_8),
|
||||
GenesisStateTest.class.getResource("genesis-olympic.json"),
|
||||
ProtocolScheduleFixture.MAINNET);
|
||||
final BytesValueRLPOutput tmp = new BytesValueRLPOutput();
|
||||
genesisState.getBlock().writeTo(tmp);
|
||||
@@ -190,13 +180,11 @@ final class GenesisStateTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@ArgumentsSource(GenesisStateTestArguments.class)
|
||||
void genesisFromShanghai(final DataStorageConfiguration dataStorageConfiguration)
|
||||
throws Exception {
|
||||
void genesisFromShanghai(final DataStorageConfiguration dataStorageConfiguration) {
|
||||
final GenesisState genesisState =
|
||||
GenesisState.fromJson(
|
||||
GenesisState.fromJsonSource(
|
||||
dataStorageConfiguration,
|
||||
Resources.toString(
|
||||
GenesisStateTest.class.getResource("genesis_shanghai.json"), Charsets.UTF_8),
|
||||
GenesisStateTest.class.getResource("genesis_shanghai.json"),
|
||||
ProtocolScheduleFixture.MAINNET);
|
||||
final BlockHeader header = genesisState.getBlock().getHeader();
|
||||
assertThat(header.getHash())
|
||||
@@ -241,12 +229,11 @@ final class GenesisStateTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@ArgumentsSource(GenesisStateTestArguments.class)
|
||||
void genesisFromCancun(final DataStorageConfiguration dataStorageConfiguration) throws Exception {
|
||||
void genesisFromCancun(final DataStorageConfiguration dataStorageConfiguration) {
|
||||
final GenesisState genesisState =
|
||||
GenesisState.fromJson(
|
||||
GenesisState.fromJsonSource(
|
||||
dataStorageConfiguration,
|
||||
Resources.toString(
|
||||
GenesisStateTest.class.getResource("genesis_cancun.json"), Charsets.UTF_8),
|
||||
GenesisStateTest.class.getResource("genesis_cancun.json"),
|
||||
ProtocolScheduleFixture.MAINNET);
|
||||
final BlockHeader header = genesisState.getBlock().getHeader();
|
||||
assertThat(header.getHash())
|
||||
@@ -292,12 +279,11 @@ final class GenesisStateTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@ArgumentsSource(GenesisStateTestArguments.class)
|
||||
void genesisFromPrague(final DataStorageConfiguration dataStorageConfiguration) throws Exception {
|
||||
void genesisFromPrague(final DataStorageConfiguration dataStorageConfiguration) {
|
||||
final GenesisState genesisState =
|
||||
GenesisState.fromJson(
|
||||
GenesisState.fromJsonSource(
|
||||
dataStorageConfiguration,
|
||||
Resources.toString(
|
||||
GenesisStateTest.class.getResource("genesis_prague.json"), Charsets.UTF_8),
|
||||
GenesisStateTest.class.getResource("genesis_prague.json"),
|
||||
ProtocolScheduleFixture.MAINNET);
|
||||
final BlockHeader header = genesisState.getBlock().getHeader();
|
||||
assertThat(header.getHash())
|
||||
|
||||
@@ -20,7 +20,7 @@ import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.hyperledger.besu.config.GenesisAllocation;
|
||||
import org.hyperledger.besu.config.GenesisAccount;
|
||||
import org.hyperledger.besu.config.GenesisConfigFile;
|
||||
import org.hyperledger.besu.crypto.KeyPair;
|
||||
import org.hyperledger.besu.crypto.SECPPrivateKey;
|
||||
@@ -85,7 +85,6 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.Bytes32;
|
||||
@@ -98,10 +97,10 @@ public abstract class AbstractIsolationTests {
|
||||
protected ProtocolContext protocolContext;
|
||||
protected EthContext ethContext;
|
||||
protected EthScheduler ethScheduler = new DeterministicEthScheduler();
|
||||
final Function<String, KeyPair> asKeyPair =
|
||||
final Function<Bytes32, KeyPair> asKeyPair =
|
||||
key ->
|
||||
SignatureAlgorithmFactory.getInstance()
|
||||
.createKeyPair(SECPPrivateKey.create(Bytes32.fromHexString(key), "ECDSA"));
|
||||
.createKeyPair(SECPPrivateKey.create(key, "ECDSA"));
|
||||
protected final ProtocolSchedule protocolSchedule =
|
||||
MainnetProtocolSchedule.fromConfig(
|
||||
GenesisConfigFile.fromResource("/dev.json").getConfigOptions(),
|
||||
@@ -137,15 +136,16 @@ public abstract class AbstractIsolationTests {
|
||||
txPoolMetrics,
|
||||
transactionReplacementTester,
|
||||
new BlobCache(),
|
||||
MiningParameters.newDefault()));
|
||||
MiningParameters.newDefault()),
|
||||
ethScheduler);
|
||||
|
||||
protected final List<GenesisAllocation> accounts =
|
||||
protected final List<GenesisAccount> accounts =
|
||||
GenesisConfigFile.fromResource("/dev.json")
|
||||
.streamAllocations()
|
||||
.filter(ga -> ga.getPrivateKey().isPresent())
|
||||
.collect(Collectors.toList());
|
||||
.filter(ga -> ga.privateKey() != null)
|
||||
.toList();
|
||||
|
||||
KeyPair sender1 = asKeyPair.apply(accounts.get(0).getPrivateKey().get());
|
||||
KeyPair sender1 = Optional.ofNullable(accounts.get(0).privateKey()).map(asKeyPair).orElseThrow();
|
||||
TransactionPool transactionPool;
|
||||
|
||||
@TempDir private Path tempData;
|
||||
|
||||
@@ -39,7 +39,7 @@ public class BonsaiSnapshotIsolationTests extends AbstractIsolationTests {
|
||||
var postTruncatedWorldState = archive.getMutable(genesisState.getBlock().getHeader(), false);
|
||||
assertThat(postTruncatedWorldState).isEmpty();
|
||||
// assert that trying to access pre-worldstate does not segfault after truncating
|
||||
preTruncatedWorldState.get().get(Address.fromHexString(accounts.get(0).getAddress()));
|
||||
preTruncatedWorldState.get().get(accounts.get(0).address());
|
||||
assertThat(true).isTrue();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright ConsenSys AG.
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
@@ -437,7 +437,7 @@ public class EthPeer implements Comparable<EthPeer> {
|
||||
localRequestManager -> localRequestManager.dispatchResponse(ethMessage),
|
||||
() -> {
|
||||
LOG.trace(
|
||||
"Message {} not expected has just been received for protocol {}, {} ",
|
||||
"Request message {} has just been received for protocol {}, peer {} ",
|
||||
messageCode,
|
||||
protocolName,
|
||||
this);
|
||||
|
||||
@@ -357,6 +357,7 @@ public class TransactionPoolFactory {
|
||||
miningParameters);
|
||||
}
|
||||
|
||||
return new LayeredPendingTransactions(transactionPoolConfiguration, pendingTransactionsSorter);
|
||||
return new LayeredPendingTransactions(
|
||||
transactionPoolConfiguration, pendingTransactionsSorter, ethScheduler);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,13 +24,13 @@ import org.hyperledger.besu.ethereum.eth.transactions.TransactionAddedResult;
|
||||
import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfiguration;
|
||||
import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolMetrics;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* Holds the current set of executable pending transactions, that are candidate for inclusion on
|
||||
@@ -167,9 +167,25 @@ public abstract class AbstractPrioritizedTransactions extends AbstractSequential
|
||||
return remainingPromotionsPerType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the full content of this layer, organized as a list of sender pending txs. For each
|
||||
* sender the collection pending txs is ordered by nonce asc.
|
||||
*
|
||||
* <p>Returned sender list order detail: first the sender of the most profitable tx.
|
||||
*
|
||||
* @return a list of sender pending txs
|
||||
*/
|
||||
@Override
|
||||
public Stream<PendingTransaction> stream() {
|
||||
return orderByFee.descendingSet().stream();
|
||||
public List<SenderPendingTransactions> getBySender() {
|
||||
final var sendersToAdd = new HashSet<>(txsBySender.keySet());
|
||||
return orderByFee.descendingSet().stream()
|
||||
.map(PendingTransaction::getSender)
|
||||
.filter(sendersToAdd::remove)
|
||||
.map(
|
||||
sender ->
|
||||
new SenderPendingTransactions(
|
||||
sender, List.copyOf(txsBySender.get(sender).values())))
|
||||
.toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.eth.transactions.layered;
|
||||
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import static org.hyperledger.besu.ethereum.eth.transactions.TransactionAddedResult.ADDED;
|
||||
import static org.hyperledger.besu.ethereum.eth.transactions.TransactionAddedResult.ALREADY_KNOWN;
|
||||
import static org.hyperledger.besu.ethereum.eth.transactions.TransactionAddedResult.REJECTED_UNDERPRICED_REPLACEMENT;
|
||||
@@ -54,7 +55,6 @@ import java.util.function.BiFunction;
|
||||
import java.util.function.BinaryOperator;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -138,6 +138,14 @@ public abstract class AbstractTransactionsLayer implements TransactionsLayer {
|
||||
|| nextLayer.contains(transaction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the full content of this layer, organized as a list of sender pending txs. For each
|
||||
* sender the collection pending txs is ordered by nonce asc.
|
||||
*
|
||||
* @return a list of sender pending txs
|
||||
*/
|
||||
public abstract List<SenderPendingTransactions> getBySender();
|
||||
|
||||
@Override
|
||||
public List<PendingTransaction> getAll() {
|
||||
final List<PendingTransaction> allNextLayers = nextLayer.getAll();
|
||||
@@ -548,17 +556,17 @@ public abstract class AbstractTransactionsLayer implements TransactionsLayer {
|
||||
return priorityTxs;
|
||||
}
|
||||
|
||||
Stream<PendingTransaction> stream(final Address sender) {
|
||||
return txsBySender.getOrDefault(sender, EMPTY_SENDER_TXS).values().stream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<PendingTransaction> getAllFor(final Address sender) {
|
||||
return Stream.concat(stream(sender), nextLayer.getAllFor(sender).stream()).toList();
|
||||
public synchronized List<PendingTransaction> getAllFor(final Address sender) {
|
||||
final var fromNextLayers = nextLayer.getAllFor(sender);
|
||||
final var fromThisLayer = txsBySender.getOrDefault(sender, EMPTY_SENDER_TXS).values();
|
||||
final var concatLayers =
|
||||
new ArrayList<PendingTransaction>(fromThisLayer.size() + fromNextLayers.size());
|
||||
concatLayers.addAll(fromThisLayer);
|
||||
concatLayers.addAll(fromNextLayers);
|
||||
return unmodifiableList(concatLayers);
|
||||
}
|
||||
|
||||
abstract Stream<PendingTransaction> stream();
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return pendingTransactions.size() + nextLayer.count();
|
||||
|
||||
@@ -27,6 +27,7 @@ import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.ethereum.core.BlockHeader;
|
||||
import org.hyperledger.besu.ethereum.core.Transaction;
|
||||
import org.hyperledger.besu.ethereum.eth.manager.EthScheduler;
|
||||
import org.hyperledger.besu.ethereum.eth.transactions.PendingTransaction;
|
||||
import org.hyperledger.besu.ethereum.eth.transactions.PendingTransactionAddedListener;
|
||||
import org.hyperledger.besu.ethereum.eth.transactions.PendingTransactionDroppedListener;
|
||||
@@ -41,13 +42,10 @@ import org.hyperledger.besu.plugin.data.TransactionSelectionResult;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalLong;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Collector;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@@ -63,12 +61,15 @@ public class LayeredPendingTransactions implements PendingTransactions {
|
||||
private static final Marker INVALID_TX_REMOVED = MarkerFactory.getMarker("INVALID_TX_REMOVED");
|
||||
private final TransactionPoolConfiguration poolConfig;
|
||||
private final AbstractPrioritizedTransactions prioritizedTransactions;
|
||||
private final EthScheduler ethScheduler;
|
||||
|
||||
public LayeredPendingTransactions(
|
||||
final TransactionPoolConfiguration poolConfig,
|
||||
final AbstractPrioritizedTransactions prioritizedTransactions) {
|
||||
final AbstractPrioritizedTransactions prioritizedTransactions,
|
||||
final EthScheduler ethScheduler) {
|
||||
this.poolConfig = poolConfig;
|
||||
this.prioritizedTransactions = prioritizedTransactions;
|
||||
this.ethScheduler = ethScheduler;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -311,79 +312,57 @@ public class LayeredPendingTransactions implements PendingTransactions {
|
||||
}
|
||||
|
||||
@Override
|
||||
// There's a small edge case here we could encounter.
|
||||
// When we pass an upgrade block that has a new transaction type, we start allowing transactions
|
||||
// of that new type into our pool.
|
||||
// If we then reorg to a block lower than the upgrade block height _and_ we create a block, that
|
||||
// block could end up with transactions of the new type.
|
||||
// This seems like it would be very rare but worth it to document that we don't handle that case
|
||||
// right now.
|
||||
public synchronized void selectTransactions(
|
||||
final PendingTransactions.TransactionSelector selector) {
|
||||
public void selectTransactions(final PendingTransactions.TransactionSelector selector) {
|
||||
final List<PendingTransaction> invalidTransactions = new ArrayList<>();
|
||||
final Set<Hash> alreadyChecked = new HashSet<>();
|
||||
final Set<Address> skipSenders = new HashSet<>();
|
||||
final AtomicBoolean completed = new AtomicBoolean(false);
|
||||
|
||||
prioritizedTransactions.stream()
|
||||
.takeWhile(unused -> !completed.get())
|
||||
.filter(highPrioPendingTx -> !skipSenders.contains(highPrioPendingTx.getSender()))
|
||||
.peek(this::logSenderTxs)
|
||||
.forEach(
|
||||
highPrioPendingTx ->
|
||||
prioritizedTransactions.stream(highPrioPendingTx.getSender())
|
||||
.takeWhile(
|
||||
candidatePendingTx ->
|
||||
!skipSenders.contains(candidatePendingTx.getSender())
|
||||
&& !completed.get())
|
||||
.filter(
|
||||
candidatePendingTx ->
|
||||
!alreadyChecked.contains(candidatePendingTx.getHash())
|
||||
&& Long.compareUnsigned(
|
||||
candidatePendingTx.getNonce(), highPrioPendingTx.getNonce())
|
||||
<= 0)
|
||||
.forEach(
|
||||
candidatePendingTx -> {
|
||||
alreadyChecked.add(candidatePendingTx.getHash());
|
||||
final var res = selector.evaluateTransaction(candidatePendingTx);
|
||||
final List<SenderPendingTransactions> candidateTxsBySender;
|
||||
synchronized (this) {
|
||||
// since selecting transactions for block creation is a potential long operation
|
||||
// we want to avoid to keep the lock for all the process, but we just lock to get
|
||||
// the candidate transactions
|
||||
candidateTxsBySender = prioritizedTransactions.getBySender();
|
||||
}
|
||||
|
||||
LOG.atTrace()
|
||||
.setMessage("Selection result {} for transaction {}")
|
||||
.addArgument(res)
|
||||
.addArgument(candidatePendingTx::toTraceLog)
|
||||
.log();
|
||||
selection:
|
||||
for (final var senderTxs : candidateTxsBySender) {
|
||||
LOG.trace("highPrioSenderTxs {}", senderTxs);
|
||||
|
||||
if (res.discard()) {
|
||||
invalidTransactions.add(candidatePendingTx);
|
||||
logDiscardedTransaction(candidatePendingTx, res);
|
||||
}
|
||||
for (final var candidatePendingTx : senderTxs.pendingTransactions()) {
|
||||
final var selectionResult = selector.evaluateTransaction(candidatePendingTx);
|
||||
|
||||
if (res.stop()) {
|
||||
completed.set(true);
|
||||
}
|
||||
LOG.atTrace()
|
||||
.setMessage("Selection result {} for transaction {}")
|
||||
.addArgument(selectionResult)
|
||||
.addArgument(candidatePendingTx::toTraceLog)
|
||||
.log();
|
||||
|
||||
if (!res.selected()) {
|
||||
// avoid processing other txs from this sender if this one is skipped
|
||||
// since the following will not be selected due to the nonce gap
|
||||
skipSenders.add(candidatePendingTx.getSender());
|
||||
LOG.trace("Skipping tx from sender {}", candidatePendingTx.getSender());
|
||||
}
|
||||
}));
|
||||
if (selectionResult.discard()) {
|
||||
invalidTransactions.add(candidatePendingTx);
|
||||
logDiscardedTransaction(candidatePendingTx, selectionResult);
|
||||
}
|
||||
|
||||
invalidTransactions.forEach(
|
||||
invalidTx -> prioritizedTransactions.remove(invalidTx, INVALIDATED));
|
||||
}
|
||||
if (selectionResult.stop()) {
|
||||
LOG.trace("Stopping selection");
|
||||
break selection;
|
||||
}
|
||||
|
||||
private void logSenderTxs(final PendingTransaction highPrioPendingTx) {
|
||||
LOG.atTrace()
|
||||
.setMessage("highPrioPendingTx {}, senderTxs {}")
|
||||
.addArgument(highPrioPendingTx::toTraceLog)
|
||||
.addArgument(
|
||||
() ->
|
||||
prioritizedTransactions.stream(highPrioPendingTx.getSender())
|
||||
.map(PendingTransaction::toTraceLog)
|
||||
.collect(Collectors.joining(", ")))
|
||||
.log();
|
||||
if (!selectionResult.selected()) {
|
||||
// avoid processing other txs from this sender if this one is skipped
|
||||
// since the following will not be selected due to the nonce gap
|
||||
LOG.trace("Skipping remaining txs for sender {}", candidatePendingTx.getSender());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ethScheduler.scheduleTxWorkerTask(
|
||||
() ->
|
||||
invalidTransactions.forEach(
|
||||
invalidTx -> {
|
||||
synchronized (this) {
|
||||
prioritizedTransactions.remove(invalidTx, INVALIDATED);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -38,7 +38,6 @@ import java.util.TreeSet;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class ReadyTransactions extends AbstractSequentialTransactionsLayer {
|
||||
|
||||
@@ -137,11 +136,24 @@ public class ReadyTransactions extends AbstractSequentialTransactionsLayer {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the full content of this layer, organized as a list of sender pending txs. For each
|
||||
* sender the collection pending txs is ordered by nonce asc.
|
||||
*
|
||||
* <p>Returned sender list order detail: first the sender of the tx with the highest max gas
|
||||
* price.
|
||||
*
|
||||
* @return a list of sender pending txs
|
||||
*/
|
||||
@Override
|
||||
public Stream<PendingTransaction> stream() {
|
||||
public List<SenderPendingTransactions> getBySender() {
|
||||
return orderByMaxFee.descendingSet().stream()
|
||||
.map(PendingTransaction::getSender)
|
||||
.flatMap(sender -> txsBySender.get(sender).values().stream());
|
||||
.map(
|
||||
sender ->
|
||||
new SenderPendingTransactions(
|
||||
sender, List.copyOf(txsBySender.get(sender).values())))
|
||||
.toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.eth.transactions.layered;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Address;
|
||||
import org.hyperledger.besu.ethereum.eth.transactions.PendingTransaction;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A list of pending transactions of a specific sender, ordered by nonce asc
|
||||
*
|
||||
* @param sender the sender
|
||||
* @param pendingTransactions the list of pending transactions order by nonce asc
|
||||
*/
|
||||
public record SenderPendingTransactions(
|
||||
Address sender, List<PendingTransaction> pendingTransactions) {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Sender "
|
||||
+ sender
|
||||
+ " has "
|
||||
+ pendingTransactions.size()
|
||||
+ " pending transactions "
|
||||
+ pendingTransactions.stream()
|
||||
.map(PendingTransaction::toTraceLog)
|
||||
.collect(Collectors.joining(",", "[", "]"));
|
||||
}
|
||||
}
|
||||
@@ -44,15 +44,19 @@ import java.util.TreeSet;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
public class SparseTransactions extends AbstractTransactionsLayer {
|
||||
/**
|
||||
* Order sparse tx by priority flag and sequence asc, so that we pick for eviction txs that have
|
||||
* no priority and with the lowest sequence number (oldest) first.
|
||||
*/
|
||||
private final NavigableSet<PendingTransaction> sparseEvictionOrder =
|
||||
new TreeSet<>(
|
||||
Comparator.comparing(PendingTransaction::hasPriority)
|
||||
.thenComparing(PendingTransaction::getSequence));
|
||||
|
||||
private final Map<Address, Integer> gapBySender = new HashMap<>();
|
||||
private final List<SendersByPriority> orderByGap;
|
||||
|
||||
@@ -220,7 +224,8 @@ public class SparseTransactions extends AbstractTransactionsLayer {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove(final PendingTransaction invalidatedTx, final RemovalReason reason) {
|
||||
public synchronized void remove(
|
||||
final PendingTransaction invalidatedTx, final RemovalReason reason) {
|
||||
|
||||
final var senderTxs = txsBySender.get(invalidatedTx.getSender());
|
||||
if (senderTxs != null && senderTxs.containsKey(invalidatedTx.getNonce())) {
|
||||
@@ -312,9 +317,27 @@ public class SparseTransactions extends AbstractTransactionsLayer {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the full content of this layer, organized as a list of sender pending txs. For each
|
||||
* sender the collection pending txs is ordered by nonce asc.
|
||||
*
|
||||
* <p>Returned sender list order detail: first the sender of the tx that will be evicted as last.
|
||||
* So for example if the same sender has the first and the last txs in the eviction order, it will
|
||||
* be the first in the returned list, since we give precedence to tx that will be evicted later.
|
||||
*
|
||||
* @return a list of sender pending txs
|
||||
*/
|
||||
@Override
|
||||
public Stream<PendingTransaction> stream() {
|
||||
return sparseEvictionOrder.descendingSet().stream();
|
||||
public List<SenderPendingTransactions> getBySender() {
|
||||
final var sendersToAdd = new HashSet<>(txsBySender.keySet());
|
||||
return sparseEvictionOrder.descendingSet().stream()
|
||||
.map(PendingTransaction::getSender)
|
||||
.filter(sendersToAdd::remove)
|
||||
.map(
|
||||
sender ->
|
||||
new SenderPendingTransactions(
|
||||
sender, List.copyOf(txsBySender.get(sender).values())))
|
||||
.toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -41,8 +41,6 @@ public interface TransactionsLayer {
|
||||
|
||||
boolean contains(Transaction transaction);
|
||||
|
||||
List<PendingTransaction> getAll();
|
||||
|
||||
TransactionAddedResult add(PendingTransaction pendingTransaction, int gap);
|
||||
|
||||
void remove(PendingTransaction pendingTransaction, RemovalReason reason);
|
||||
@@ -52,6 +50,10 @@ public interface TransactionsLayer {
|
||||
BlockHeader blockHeader,
|
||||
final Map<Address, Long> maxConfirmedNonceBySender);
|
||||
|
||||
List<PendingTransaction> getAll();
|
||||
|
||||
List<PendingTransaction> getAllFor(Address sender);
|
||||
|
||||
List<Transaction> getAllLocal();
|
||||
|
||||
List<Transaction> getAllPriority();
|
||||
@@ -93,8 +95,6 @@ public interface TransactionsLayer {
|
||||
|
||||
String logSender(Address sender);
|
||||
|
||||
List<PendingTransaction> getAllFor(Address sender);
|
||||
|
||||
enum RemovalReason {
|
||||
CONFIRMED,
|
||||
CROSS_LAYER_REPLACED,
|
||||
|
||||
@@ -58,7 +58,8 @@ public abstract class AbstractLayeredTransactionPoolTest extends AbstractTransac
|
||||
return new LayeredPendingTransactions(
|
||||
poolConfig,
|
||||
createPrioritizedTransactions(
|
||||
poolConfig, readyLayer, txPoolMetrics, transactionReplacementTester));
|
||||
poolConfig, readyLayer, txPoolMetrics, transactionReplacementTester),
|
||||
ethScheduler);
|
||||
}
|
||||
|
||||
protected abstract AbstractPrioritizedTransactions createPrioritizedTransactions(
|
||||
|
||||
@@ -170,14 +170,16 @@ public class LayeredPendingTransactionsTest extends BaseTransactionPoolTest {
|
||||
senderLimitedLayers = createLayers(senderLimitedConfig);
|
||||
smallLayers = createLayers(smallPoolConfig);
|
||||
|
||||
pendingTransactions = new LayeredPendingTransactions(poolConf, layers.prioritizedTransactions);
|
||||
pendingTransactions =
|
||||
new LayeredPendingTransactions(poolConf, layers.prioritizedTransactions, ethScheduler);
|
||||
|
||||
senderLimitedTransactions =
|
||||
new LayeredPendingTransactions(
|
||||
senderLimitedConfig, senderLimitedLayers.prioritizedTransactions);
|
||||
senderLimitedConfig, senderLimitedLayers.prioritizedTransactions, ethScheduler);
|
||||
|
||||
smallPendingTransactions =
|
||||
new LayeredPendingTransactions(smallPoolConfig, smallLayers.prioritizedTransactions);
|
||||
new LayeredPendingTransactions(
|
||||
smallPoolConfig, smallLayers.prioritizedTransactions, ethScheduler);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -199,7 +199,7 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
MiningParameters.newDefault().setMinTransactionGasPrice(MIN_GAS_PRICE));
|
||||
|
||||
final LayeredPendingTransactions pendingTransactions =
|
||||
new LayeredPendingTransactions(poolConfig, prioritizedTransactions);
|
||||
new LayeredPendingTransactions(poolConfig, prioritizedTransactions, ethScheduler);
|
||||
|
||||
scenario.execute(
|
||||
pendingTransactions,
|
||||
@@ -306,7 +306,7 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
Arguments.of(
|
||||
new Scenario("fill sparse 2")
|
||||
.addForSender(S1, 5, 3, 2)
|
||||
.expectedSparseForSender(S1, 5, 3, 2)),
|
||||
.expectedSparseForSender(S1, 2, 3, 5)),
|
||||
Arguments.of(
|
||||
new Scenario("overflow sparse 1")
|
||||
.addForSender(S1, 1, 2, 3, 4)
|
||||
@@ -315,13 +315,13 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
Arguments.of(
|
||||
new Scenario("overflow sparse 2")
|
||||
.addForSender(S1, 4, 2, 3, 1)
|
||||
.expectedSparseForSender(S1, 2, 3, 1)
|
||||
.expectedSparseForSender(S1, 1, 2, 3)
|
||||
.expectedDroppedForSender(S1, 4)),
|
||||
Arguments.of(
|
||||
new Scenario("overflow sparse 3")
|
||||
.addForSender(S1, 0, 4, 2, 3, 5)
|
||||
.expectedPrioritizedForSender(S1, 0)
|
||||
.expectedSparseForSender(S1, 4, 2, 3)
|
||||
.expectedSparseForSender(S1, 2, 3, 4)
|
||||
.expectedDroppedForSender(S1, 5)));
|
||||
}
|
||||
|
||||
@@ -334,7 +334,7 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
Arguments.of(
|
||||
new Scenario("add first sparse")
|
||||
.addForSenders(S1, 1, S2, 2)
|
||||
.expectedSparseForSenders(S1, 1, S2, 2)),
|
||||
.expectedSparseForSenders(S2, 2, S1, 1)),
|
||||
Arguments.of(
|
||||
new Scenario("fill prioritized 1")
|
||||
.addForSender(S1, 0, 1, 2)
|
||||
@@ -357,11 +357,11 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
.addForSenders(S1, 2, S2, 1)
|
||||
.expectedPrioritizedForSenders()
|
||||
.expectedReadyForSenders()
|
||||
.expectedSparseForSenders(S1, 2, S2, 1)
|
||||
.expectedSparseForSenders(S2, 1, S1, 2)
|
||||
.addForSenders(S2, 2, S1, 0)
|
||||
.expectedPrioritizedForSender(S1, 0)
|
||||
.expectedReadyForSenders()
|
||||
.expectedSparseForSenders(S1, 2, S2, 1, S2, 2)
|
||||
.expectedSparseForSenders(S2, 1, S2, 2, S1, 2)
|
||||
.addForSenders(S1, 1)
|
||||
.expectedPrioritizedForSenders(S1, 0, S1, 1, S1, 2)
|
||||
.expectedReadyForSenders()
|
||||
@@ -431,15 +431,15 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
.addForSenders(S2, 0, S3, 2, S1, 1)
|
||||
.expectedPrioritizedForSender(S2, 0)
|
||||
.expectedReadyForSenders()
|
||||
.expectedSparseForSenders(S3, 2, S1, 1)
|
||||
.expectedSparseForSenders(S1, 1, S3, 2)
|
||||
.addForSenders(S2, 1)
|
||||
.expectedPrioritizedForSenders(S2, 0, S2, 1)
|
||||
.expectedReadyForSenders()
|
||||
.expectedSparseForSenders(S3, 2, S1, 1)
|
||||
.expectedSparseForSenders(S1, 1, S3, 2)
|
||||
.addForSenders(S3, 0)
|
||||
.expectedPrioritizedForSenders(S3, 0, S2, 0, S2, 1)
|
||||
.expectedReadyForSenders()
|
||||
.expectedSparseForSenders(S3, 2, S1, 1)
|
||||
.expectedSparseForSenders(S1, 1, S3, 2)
|
||||
.addForSenders(S1, 0)
|
||||
.expectedPrioritizedForSenders(S3, 0, S2, 0, S2, 1)
|
||||
.expectedReadyForSenders(S1, 0, S1, 1)
|
||||
@@ -452,7 +452,7 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
.addForSenders(S4, 0, S4, 1, S3, 3)
|
||||
.expectedPrioritizedForSenders(S4, 0, S4, 1, S3, 0)
|
||||
.expectedReadyForSenders(S3, 1, S2, 0, S2, 1)
|
||||
.expectedSparseForSenders(S3, 2, S1, 1, S1, 0)
|
||||
.expectedSparseForSenders(S1, 0, S1, 1, S3, 2)
|
||||
// ToDo: non optimal discard, worth to improve?
|
||||
.expectedDroppedForSender(S3, 3)),
|
||||
Arguments.of(
|
||||
@@ -813,7 +813,7 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
Arguments.of(
|
||||
new Scenario("out of order sequence with gap 1")
|
||||
.addForSender(S1, 2, 1)
|
||||
.expectedSparseForSender(S1, 2, 1)
|
||||
.expectedSparseForSender(S1, 1, 2)
|
||||
.expectedNextNonceForSenders(S1, null)),
|
||||
Arguments.of(
|
||||
new Scenario("out of order sequence with gap 2")
|
||||
@@ -969,7 +969,7 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
Arguments.of(
|
||||
new Scenario("out of order sequence with gap 1")
|
||||
.addForSender(S1, 2, 1)
|
||||
.expectedSparseForSender(S1, 2, 1)
|
||||
.expectedSparseForSender(S1, 1, 2)
|
||||
.expectedSelectedTransactions()),
|
||||
Arguments.of(
|
||||
new Scenario("out of order sequence with gap 2")
|
||||
@@ -1073,8 +1073,7 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
.setAccountNonce(S1, 5)
|
||||
.addForSender(S1, 7)
|
||||
.expectedPrioritizedForSenders()
|
||||
// remember that sparse are checked by oldest first
|
||||
.expectedSparseForSender(S1, 8, 9, 7)));
|
||||
.expectedSparseForSender(S1, 7, 8, 9)));
|
||||
}
|
||||
|
||||
static Stream<Arguments> providerPrioritySenders() {
|
||||
@@ -1195,7 +1194,7 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
.addForSender(S3, 0)
|
||||
.expectedSparseForSender(S3, 0)
|
||||
.addForSender(SP1, 0)
|
||||
.expectedSparseForSenders(S3, 0, SP1, 0)
|
||||
.expectedSparseForSenders(SP1, 0, S3, 0)
|
||||
.confirmedForSenders(SP2, 0)
|
||||
.expectedPrioritizedForSender(SP2, 1, 2, 3)
|
||||
.expectedReadyForSenders(SP2, 4, SP2, 5, SP1, 0)
|
||||
@@ -1510,23 +1509,26 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
|
||||
private void assertExpectedPrioritized(
|
||||
final AbstractPrioritizedTransactions prioLayer, final List<PendingTransaction> expected) {
|
||||
assertThat(prioLayer.stream()).describedAs("Prioritized").containsExactlyElementsOf(expected);
|
||||
assertThat(prioLayer.getBySender())
|
||||
.describedAs("Prioritized")
|
||||
.flatExtracting(SenderPendingTransactions::pendingTransactions)
|
||||
.containsExactlyElementsOf(expected);
|
||||
}
|
||||
|
||||
private void assertExpectedReady(
|
||||
final ReadyTransactions readyLayer, final List<PendingTransaction> expected) {
|
||||
assertThat(readyLayer.stream()).describedAs("Ready").containsExactlyElementsOf(expected);
|
||||
assertThat(readyLayer.getBySender())
|
||||
.describedAs("Ready")
|
||||
.flatExtracting(SenderPendingTransactions::pendingTransactions)
|
||||
.containsExactlyElementsOf(expected);
|
||||
}
|
||||
|
||||
private void assertExpectedSparse(
|
||||
final SparseTransactions sparseLayer, final List<PendingTransaction> expected) {
|
||||
// sparse txs are returned from the most recent to the oldest, so reverse it to make writing
|
||||
// scenarios easier
|
||||
final var sortedExpected = new ArrayList<>(expected);
|
||||
Collections.reverse(sortedExpected);
|
||||
assertThat(sparseLayer.stream())
|
||||
assertThat(sparseLayer.getBySender())
|
||||
.describedAs("Sparse")
|
||||
.containsExactlyElementsOf(sortedExpected);
|
||||
.flatExtracting(SenderPendingTransactions::pendingTransactions)
|
||||
.containsExactlyElementsOf(expected);
|
||||
}
|
||||
|
||||
private void assertExpectedDropped(
|
||||
@@ -1587,7 +1589,9 @@ public class LayersTest extends BaseTransactionPoolTest {
|
||||
}
|
||||
actions.add(
|
||||
(pending, prio, ready, sparse, dropped) ->
|
||||
assertThat(prio.stream()).containsExactlyElementsOf(expectedSelected));
|
||||
assertThat(prio.getBySender())
|
||||
.flatExtracting(SenderPendingTransactions::pendingTransactions)
|
||||
.containsExactlyElementsOf(expectedSelected));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@@ -129,7 +129,7 @@ public class ReplayTest {
|
||||
final AbstractPrioritizedTransactions prioritizedTransactions =
|
||||
createLayers(poolConfig, txPoolMetrics, baseFeeMarket);
|
||||
final LayeredPendingTransactions pendingTransactions =
|
||||
new LayeredPendingTransactions(poolConfig, prioritizedTransactions);
|
||||
new LayeredPendingTransactions(poolConfig, prioritizedTransactions, ethScheduler);
|
||||
br.lines()
|
||||
.forEach(
|
||||
line -> {
|
||||
|
||||
@@ -17,8 +17,9 @@ package org.hyperledger.besu.evmtool;
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.hyperledger.besu.evmtool.CodeValidateSubCommand.COMMAND_NAME;
|
||||
|
||||
import org.hyperledger.besu.evm.Code;
|
||||
import org.hyperledger.besu.evm.code.CodeFactory;
|
||||
import org.hyperledger.besu.evm.code.CodeInvalid;
|
||||
import org.hyperledger.besu.evm.code.CodeV1Validation;
|
||||
import org.hyperledger.besu.evm.code.EOFLayout;
|
||||
import org.hyperledger.besu.util.LogConfigurator;
|
||||
|
||||
@@ -39,7 +40,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = COMMAND_NAME,
|
||||
description = "Execute an Ethereum State Test.",
|
||||
description = "Validates EVM code for fuzzing",
|
||||
mixinStandardHelpOptions = true,
|
||||
versionProvider = VersionProvider.class)
|
||||
public class CodeValidateSubCommand implements Runnable {
|
||||
@@ -109,24 +110,26 @@ public class CodeValidateSubCommand implements Runnable {
|
||||
} catch (RuntimeException re) {
|
||||
return "err: hex string -" + re + "\n";
|
||||
}
|
||||
if (codeBytes.size() == 0) {
|
||||
if (codeBytes.isEmpty()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
var layout = EOFLayout.parseEOF(codeBytes);
|
||||
EOFLayout layout = EOFLayout.parseEOF(codeBytes);
|
||||
if (!layout.isValid()) {
|
||||
return "err: layout - " + layout.getInvalidReason() + "\n";
|
||||
return "err: layout - " + layout.invalidReason() + "\n";
|
||||
}
|
||||
|
||||
var code = CodeFactory.createCode(codeBytes, 1, true);
|
||||
if (!code.isValid()) {
|
||||
return "err: " + ((CodeInvalid) code).getInvalidReason() + "\n";
|
||||
String error = CodeV1Validation.validate(layout);
|
||||
if (error != null) {
|
||||
return "err: " + error + "\n";
|
||||
}
|
||||
|
||||
Code code = CodeFactory.createCode(codeBytes, 1);
|
||||
|
||||
return "OK "
|
||||
+ IntStream.range(0, code.getCodeSectionCount())
|
||||
.mapToObj(code::getCodeSection)
|
||||
.map(cs -> layout.getContainer().slice(cs.getEntryPoint(), cs.getLength()))
|
||||
.map(cs -> layout.container().slice(cs.getEntryPoint(), cs.getLength()))
|
||||
.map(Bytes::toUnprefixedHexString)
|
||||
.collect(Collectors.joining(","))
|
||||
+ "\n";
|
||||
|
||||
@@ -0,0 +1,226 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.evmtool;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec.TestResult.failed;
|
||||
import static org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec.TestResult.passed;
|
||||
import static org.hyperledger.besu.evmtool.EOFTestSubCommand.COMMAND_NAME;
|
||||
|
||||
import org.hyperledger.besu.crypto.SignatureAlgorithmFactory;
|
||||
import org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec;
|
||||
import org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec.TestResult;
|
||||
import org.hyperledger.besu.evm.EvmSpecVersion;
|
||||
import org.hyperledger.besu.evm.code.CodeFactory;
|
||||
import org.hyperledger.besu.evm.code.CodeInvalid;
|
||||
import org.hyperledger.besu.evm.code.CodeV1;
|
||||
import org.hyperledger.besu.evm.code.CodeV1Validation;
|
||||
import org.hyperledger.besu.evm.code.EOFLayout;
|
||||
import org.hyperledger.besu.util.LogConfigurator;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JavaType;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = COMMAND_NAME,
|
||||
description = "Runs EOF validation reference tests",
|
||||
mixinStandardHelpOptions = true,
|
||||
versionProvider = VersionProvider.class)
|
||||
public class EOFTestSubCommand implements Runnable {
|
||||
public static final String COMMAND_NAME = "eof-test";
|
||||
@CommandLine.ParentCommand private final EvmToolCommand parentCommand;
|
||||
|
||||
// picocli does it magically
|
||||
@CommandLine.Parameters private final List<Path> eofTestFiles = new ArrayList<>();
|
||||
|
||||
@CommandLine.Option(
|
||||
names = {"--fork-name"},
|
||||
description = "Limit execution to one fork.")
|
||||
private String forkName = null;
|
||||
|
||||
@CommandLine.Option(
|
||||
names = {"--test-name"},
|
||||
description = "Limit execution to one test.")
|
||||
private String testVectorName = null;
|
||||
|
||||
public EOFTestSubCommand() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
public EOFTestSubCommand(final EvmToolCommand parentCommand) {
|
||||
this.parentCommand = parentCommand;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
LogConfigurator.setLevel("", "OFF");
|
||||
// presume ethereum mainnet for reference and EOF tests
|
||||
SignatureAlgorithmFactory.setDefaultInstance();
|
||||
final ObjectMapper eofTestMapper = JsonUtils.createObjectMapper();
|
||||
|
||||
final JavaType javaType =
|
||||
eofTestMapper
|
||||
.getTypeFactory()
|
||||
.constructParametricType(Map.class, String.class, EOFTestCaseSpec.class);
|
||||
try {
|
||||
if (eofTestFiles.isEmpty()) {
|
||||
// if no EOF tests were specified use standard input to get filenames
|
||||
final BufferedReader in =
|
||||
new BufferedReader(new InputStreamReader(parentCommand.in, UTF_8));
|
||||
while (true) {
|
||||
final String fileName = in.readLine();
|
||||
if (fileName == null) {
|
||||
// reached end of file. Stop the loop.
|
||||
break;
|
||||
}
|
||||
final File file = new File(fileName);
|
||||
if (file.isFile()) {
|
||||
final Map<String, EOFTestCaseSpec> eofTests = eofTestMapper.readValue(file, javaType);
|
||||
executeEOFTest(file.toString(), eofTests);
|
||||
} else {
|
||||
parentCommand.out.println("File not found: " + fileName);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (final Path eofTestFile : eofTestFiles) {
|
||||
final Map<String, EOFTestCaseSpec> eofTests;
|
||||
if ("stdin".equals(eofTestFile.toString())) {
|
||||
eofTests = eofTestMapper.readValue(parentCommand.in, javaType);
|
||||
} else {
|
||||
eofTests = eofTestMapper.readValue(eofTestFile.toFile(), javaType);
|
||||
}
|
||||
executeEOFTest(eofTestFile.toString(), eofTests);
|
||||
}
|
||||
}
|
||||
} catch (final JsonProcessingException jpe) {
|
||||
parentCommand.out.println("File content error: " + jpe);
|
||||
} catch (final IOException e) {
|
||||
System.err.println("Unable to read EOF test file");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
|
||||
record TestExecutionResult(
|
||||
String fileName,
|
||||
String group,
|
||||
String name,
|
||||
String fork,
|
||||
boolean pass,
|
||||
String expectedError,
|
||||
String actualError) {}
|
||||
|
||||
private void executeEOFTest(final String fileName, final Map<String, EOFTestCaseSpec> eofTests) {
|
||||
List<TestExecutionResult> results = new ArrayList<>();
|
||||
|
||||
for (var testGroup : eofTests.entrySet()) {
|
||||
String groupName = testGroup.getKey();
|
||||
for (var testVector : testGroup.getValue().getVector().entrySet()) {
|
||||
String testName = testVector.getKey();
|
||||
if (testVectorName != null && !testVectorName.equals(testName)) {
|
||||
continue;
|
||||
}
|
||||
String code = testVector.getValue().code();
|
||||
for (var testResult : testVector.getValue().results().entrySet()) {
|
||||
String expectedForkName = testResult.getKey();
|
||||
if (forkName != null && !forkName.equals(expectedForkName)) {
|
||||
continue;
|
||||
}
|
||||
TestResult expectedResult = testResult.getValue();
|
||||
EvmSpecVersion evmVersion = EvmSpecVersion.fromName(expectedForkName);
|
||||
if (evmVersion == null) {
|
||||
results.add(
|
||||
new TestExecutionResult(
|
||||
fileName,
|
||||
groupName,
|
||||
testName,
|
||||
expectedForkName,
|
||||
false,
|
||||
"Valid fork name",
|
||||
"Unknown fork: " + expectedForkName));
|
||||
|
||||
continue;
|
||||
}
|
||||
TestResult actualResult;
|
||||
if (evmVersion.ordinal() < EvmSpecVersion.PRAGUE_EOF.ordinal()) {
|
||||
actualResult = failed("EOF_InvalidCode");
|
||||
} else {
|
||||
actualResult = considerCode(code);
|
||||
}
|
||||
results.add(
|
||||
new TestExecutionResult(
|
||||
fileName,
|
||||
groupName,
|
||||
testName,
|
||||
expectedForkName,
|
||||
actualResult.result() == expectedResult.result(),
|
||||
expectedResult.exception(),
|
||||
actualResult.exception()));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (TestExecutionResult result : results) {
|
||||
try {
|
||||
parentCommand.out.println(JsonUtils.createObjectMapper().writeValueAsString(result));
|
||||
} catch (JsonProcessingException e) {
|
||||
e.printStackTrace(parentCommand.out);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public TestResult considerCode(final String hexCode) {
|
||||
Bytes codeBytes;
|
||||
try {
|
||||
codeBytes =
|
||||
Bytes.fromHexString(
|
||||
hexCode.replaceAll("(^|\n)#[^\n]*($|\n)", "").replaceAll("[^0-9A-Za-z]", ""));
|
||||
} catch (RuntimeException re) {
|
||||
return failed(re.getMessage());
|
||||
}
|
||||
if (codeBytes.isEmpty()) {
|
||||
return passed();
|
||||
}
|
||||
|
||||
var layout = EOFLayout.parseEOF(codeBytes);
|
||||
if (!layout.isValid()) {
|
||||
return failed("layout - " + layout.invalidReason());
|
||||
}
|
||||
|
||||
var code = CodeFactory.createCode(codeBytes, 1);
|
||||
if (!code.isValid()) {
|
||||
return failed("validate " + ((CodeInvalid) code).getInvalidReason());
|
||||
}
|
||||
if (code instanceof CodeV1 codeV1) {
|
||||
var result = CodeV1Validation.validate(codeV1.getEofLayout());
|
||||
if (result != null) {
|
||||
return (failed("deep validate error: " + result));
|
||||
}
|
||||
}
|
||||
|
||||
return passed();
|
||||
}
|
||||
}
|
||||
@@ -87,6 +87,8 @@ import picocli.CommandLine.Option;
|
||||
BenchmarkSubCommand.class,
|
||||
B11rSubCommand.class,
|
||||
CodeValidateSubCommand.class,
|
||||
EOFTestSubCommand.class,
|
||||
PrettyPrintSubCommand.class,
|
||||
StateTestSubCommand.class,
|
||||
T8nSubCommand.class,
|
||||
T8nServerSubCommand.class
|
||||
@@ -140,6 +142,11 @@ public class EvmToolCommand implements Runnable {
|
||||
description = "Receiving address for this invocation.")
|
||||
private final Address receiver = Address.ZERO;
|
||||
|
||||
@Option(
|
||||
names = {"--create"},
|
||||
description = "Run call should be a create instead of a call operation.")
|
||||
private final Boolean createTransaction = false;
|
||||
|
||||
@Option(
|
||||
names = {"--contract"},
|
||||
paramLabel = "<address>",
|
||||
@@ -340,7 +347,7 @@ public class EvmToolCommand implements Runnable {
|
||||
.nonce(0)
|
||||
.gasPrice(Wei.ZERO)
|
||||
.gasLimit(Long.MAX_VALUE)
|
||||
.to(receiver)
|
||||
.to(createTransaction ? null : receiver)
|
||||
.value(Wei.ZERO)
|
||||
.payload(callData)
|
||||
.sender(sender)
|
||||
@@ -361,10 +368,10 @@ public class EvmToolCommand implements Runnable {
|
||||
}
|
||||
|
||||
final EVM evm = protocolSpec.getEvm();
|
||||
if (codeBytes.isEmpty()) {
|
||||
if (codeBytes.isEmpty() && !createTransaction) {
|
||||
codeBytes = component.getWorldState().get(receiver).getCode();
|
||||
}
|
||||
Code code = evm.getCode(Hash.hash(codeBytes), codeBytes);
|
||||
Code code = evm.getCodeForCreation(codeBytes);
|
||||
if (!code.isValid()) {
|
||||
out.println(((CodeInvalid) code).getInvalidReason());
|
||||
return;
|
||||
@@ -381,7 +388,9 @@ public class EvmToolCommand implements Runnable {
|
||||
|
||||
WorldUpdater updater = component.getWorldUpdater();
|
||||
updater.getOrCreate(sender);
|
||||
updater.getOrCreate(receiver);
|
||||
if (!createTransaction) {
|
||||
updater.getOrCreate(receiver);
|
||||
}
|
||||
var contractAccount = updater.getOrCreate(contract);
|
||||
contractAccount.setCode(codeBytes);
|
||||
|
||||
@@ -412,18 +421,23 @@ public class EvmToolCommand implements Runnable {
|
||||
.baseFee(component.getBlockchain().getChainHeadHeader().getBaseFee().orElse(null))
|
||||
.buildBlockHeader();
|
||||
|
||||
Address contractAddress =
|
||||
createTransaction ? Address.contractAddress(receiver, 0) : receiver;
|
||||
MessageFrame initialMessageFrame =
|
||||
MessageFrame.builder()
|
||||
.type(MessageFrame.Type.MESSAGE_CALL)
|
||||
.type(
|
||||
createTransaction
|
||||
? MessageFrame.Type.CONTRACT_CREATION
|
||||
: MessageFrame.Type.MESSAGE_CALL)
|
||||
.worldUpdater(updater.updater())
|
||||
.initialGas(txGas)
|
||||
.contract(Address.ZERO)
|
||||
.address(receiver)
|
||||
.contract(contractAddress)
|
||||
.address(contractAddress)
|
||||
.originator(sender)
|
||||
.sender(sender)
|
||||
.gasPrice(gasPriceGWei)
|
||||
.blobGasPrice(blobGasPrice)
|
||||
.inputData(callData)
|
||||
.inputData(createTransaction ? codeBytes.slice(code.getSize()) : callData)
|
||||
.value(ethValue)
|
||||
.apparentValue(ethValue)
|
||||
.code(code)
|
||||
|
||||
@@ -116,6 +116,9 @@ class MainnetGenesisFileModule extends GenesisFileModule {
|
||||
Map.entry(
|
||||
"prague",
|
||||
createSchedule(new StubGenesisConfigOptions().pragueTime(0).baseFeePerGas(0x0a))),
|
||||
Map.entry(
|
||||
"pragueeof",
|
||||
createSchedule(new StubGenesisConfigOptions().pragueEOFTime(0).baseFeePerGas(0x0a))),
|
||||
Map.entry(
|
||||
"futureeips",
|
||||
createSchedule(new StubGenesisConfigOptions().futureEipsTime(0).baseFeePerGas(0x0a))),
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.evmtool;
|
||||
|
||||
import static org.hyperledger.besu.evmtool.PrettyPrintSubCommand.COMMAND_NAME;
|
||||
|
||||
import org.hyperledger.besu.evm.code.CodeV1Validation;
|
||||
import org.hyperledger.besu.evm.code.EOFLayout;
|
||||
import org.hyperledger.besu.util.LogConfigurator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = COMMAND_NAME,
|
||||
description = "Pretty Prints EOF Code",
|
||||
mixinStandardHelpOptions = true,
|
||||
versionProvider = VersionProvider.class)
|
||||
public class PrettyPrintSubCommand implements Runnable {
|
||||
public static final String COMMAND_NAME = "pretty-print";
|
||||
@CommandLine.ParentCommand private final EvmToolCommand parentCommand;
|
||||
|
||||
@CommandLine.Option(
|
||||
names = {"-f", "--force"},
|
||||
description = "Always print well formated code, even if there is an error",
|
||||
paramLabel = "<boolean>")
|
||||
private final Boolean force = false;
|
||||
|
||||
// picocli does it magically
|
||||
@CommandLine.Parameters private final List<String> codeList = new ArrayList<>();
|
||||
|
||||
public PrettyPrintSubCommand() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
public PrettyPrintSubCommand(final EvmToolCommand parentCommand) {
|
||||
this.parentCommand = parentCommand;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
LogConfigurator.setLevel("", "OFF");
|
||||
|
||||
for (var hexCode : codeList) {
|
||||
Bytes container = Bytes.fromHexString(hexCode);
|
||||
if (container.get(0) != ((byte) 0xef) && container.get(1) != 0) {
|
||||
parentCommand.out.println(
|
||||
"Pretty printing of legacy EVM is not supported. Patches welcome!");
|
||||
|
||||
} else {
|
||||
EOFLayout layout = EOFLayout.parseEOF(container);
|
||||
if (layout.isValid()) {
|
||||
String validation = CodeV1Validation.validate(layout);
|
||||
if (validation == null || force) {
|
||||
layout.prettyPrint(parentCommand.out);
|
||||
}
|
||||
if (validation != null) {
|
||||
parentCommand.out.println("EOF code is invalid - " + validation);
|
||||
}
|
||||
} else {
|
||||
parentCommand.out.println("EOF layout is invalid - " + layout.invalidReason());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -308,7 +308,9 @@ public class StateTestSubCommand implements Runnable {
|
||||
"validationError",
|
||||
"Exception '" + spec.getExpectException() + "' was expected but did not occur");
|
||||
}
|
||||
|
||||
if (!result.getValidationResult().isValid()) {
|
||||
summaryLine.put("error", result.getValidationResult().getErrorMessage());
|
||||
}
|
||||
if (parentCommand.showJsonAlloc) {
|
||||
EvmToolCommand.dumpWorldState(worldState, parentCommand.out);
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ import org.hyperledger.besu.evm.gascalculator.HomesteadGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.IstanbulGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.LondonGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PetersburgGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PragueEOFGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PragueGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.ShanghaiGasCalculator;
|
||||
import org.hyperledger.besu.evm.precompile.PrecompiledContract;
|
||||
@@ -131,6 +132,8 @@ public abstract class BenchmarkExecutor {
|
||||
return switch (EvmSpecVersion.valueOf(fork.toUpperCase(Locale.ROOT))) {
|
||||
case HOMESTEAD -> new HomesteadGasCalculator();
|
||||
case FRONTIER -> new FrontierGasCalculator();
|
||||
case TANGERINE_WHISTLE -> null;
|
||||
case SPURIOUS_DRAGON -> null;
|
||||
case BYZANTIUM -> new ByzantiumGasCalculator();
|
||||
case CONSTANTINOPLE -> new ConstantinopleGasCalculator();
|
||||
case PETERSBURG -> new PetersburgGasCalculator();
|
||||
@@ -139,7 +142,9 @@ public abstract class BenchmarkExecutor {
|
||||
case LONDON, PARIS -> new LondonGasCalculator();
|
||||
case SHANGHAI -> new ShanghaiGasCalculator();
|
||||
case CANCUN -> new CancunGasCalculator();
|
||||
default -> new PragueGasCalculator();
|
||||
case PRAGUE -> new PragueGasCalculator();
|
||||
case PRAGUE_EOF, OSAKA, AMSTERDAM, BOGOTA, POLIS, BANGKOK, FUTURE_EIPS, EXPERIMENTAL_EIPS ->
|
||||
new PragueEOFGasCalculator();
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -24,24 +24,24 @@ import java.io.PrintStream;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import picocli.CommandLine;
|
||||
|
||||
public class CodeValidationSubCommandTest {
|
||||
class CodeValidationSubCommandTest {
|
||||
|
||||
static final String CODE_STOP_ONLY = "0xef0001 010004 020001-0001 030000 00 00000000 00";
|
||||
static final String CODE_RETF_ONLY = "0xef0001 010004 020001-0001 030000 00 00000000 e4";
|
||||
static final String CODE_BAD_MAGIC = "0xefffff 010004 020001-0001 030000 00 00000000 e4";
|
||||
static final String CODE_STOP_ONLY = "0xef0001 010004 020001-0001 040000 00 00800000 00";
|
||||
static final String CODE_RETURN_ONLY = "0xef0001 010004 020001-0003 040000 00 00800002 5f5ff3";
|
||||
static final String CODE_BAD_MAGIC = "0xefffff 010004 020001-0001 040000 00 00800000 e4";
|
||||
static final String CODE_INTERIOR_COMMENTS =
|
||||
"""
|
||||
0xef0001 010008 020002-000c-0002 030000 00
|
||||
0xef0001 010008 020002-0009-0002 040000 00
|
||||
# 7 inputs 1 output,
|
||||
00000007-07010007
|
||||
59-59-59-59-59-59-59-e30001-50-e4
|
||||
00800004-04010004
|
||||
59-59-59-59-e30001-50-00
|
||||
# No immediate data
|
||||
f1-e4""";
|
||||
f8-e4""";
|
||||
static final String CODE_MULTIPLE =
|
||||
CODE_STOP_ONLY + "\n" + CODE_BAD_MAGIC + "\n" + CODE_RETF_ONLY + "\n";
|
||||
CODE_STOP_ONLY + "\n" + CODE_BAD_MAGIC + "\n" + CODE_RETURN_ONLY + "\n";
|
||||
|
||||
@Test
|
||||
public void testSingleValidViaInput() {
|
||||
void testSingleValidViaInput() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais = new ByteArrayInputStream(CODE_STOP_ONLY.getBytes(UTF_8));
|
||||
final CodeValidateSubCommand codeValidateSubCommand =
|
||||
@@ -51,7 +51,7 @@ public class CodeValidationSubCommandTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleInvalidViaInput() {
|
||||
void testSingleInvalidViaInput() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais = new ByteArrayInputStream(CODE_BAD_MAGIC.getBytes(UTF_8));
|
||||
final CodeValidateSubCommand codeValidateSubCommand =
|
||||
@@ -61,7 +61,7 @@ public class CodeValidationSubCommandTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleViaInput() {
|
||||
void testMultipleViaInput() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais = new ByteArrayInputStream(CODE_MULTIPLE.getBytes(UTF_8));
|
||||
final CodeValidateSubCommand codeValidateSubCommand =
|
||||
@@ -72,12 +72,12 @@ public class CodeValidationSubCommandTest {
|
||||
"""
|
||||
OK 00
|
||||
err: layout - EOF header byte 1 incorrect
|
||||
OK e4
|
||||
OK 5f5ff3
|
||||
""");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleValidViaCli() {
|
||||
void testSingleValidViaCli() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais = new ByteArrayInputStream(new byte[0]);
|
||||
final CodeValidateSubCommand codeValidateSubCommand =
|
||||
@@ -89,7 +89,7 @@ public class CodeValidationSubCommandTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleInvalidViaCli() {
|
||||
void testSingleInvalidViaCli() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais = new ByteArrayInputStream(new byte[0]);
|
||||
final CodeValidateSubCommand codeValidateSubCommand =
|
||||
@@ -101,37 +101,37 @@ public class CodeValidationSubCommandTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleViaCli() {
|
||||
void testMultipleViaCli() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais = new ByteArrayInputStream(new byte[0]);
|
||||
final CodeValidateSubCommand codeValidateSubCommand =
|
||||
new CodeValidateSubCommand(bais, new PrintStream(baos));
|
||||
final CommandLine cmd = new CommandLine(codeValidateSubCommand);
|
||||
cmd.parseArgs(CODE_STOP_ONLY, CODE_BAD_MAGIC, CODE_RETF_ONLY);
|
||||
cmd.parseArgs(CODE_STOP_ONLY, CODE_BAD_MAGIC, CODE_RETURN_ONLY);
|
||||
codeValidateSubCommand.run();
|
||||
assertThat(baos.toString(UTF_8))
|
||||
.contains(
|
||||
"""
|
||||
OK 00
|
||||
err: layout - EOF header byte 1 incorrect
|
||||
OK e4
|
||||
OK 5f5ff3
|
||||
""");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCliEclipsesInput() {
|
||||
void testCliEclipsesInput() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais = new ByteArrayInputStream(CODE_STOP_ONLY.getBytes(UTF_8));
|
||||
final CodeValidateSubCommand codeValidateSubCommand =
|
||||
new CodeValidateSubCommand(bais, new PrintStream(baos));
|
||||
final CommandLine cmd = new CommandLine(codeValidateSubCommand);
|
||||
cmd.parseArgs(CODE_RETF_ONLY);
|
||||
cmd.parseArgs(CODE_RETURN_ONLY);
|
||||
codeValidateSubCommand.run();
|
||||
assertThat(baos.toString(UTF_8)).contains("OK e4\n");
|
||||
assertThat(baos.toString(UTF_8)).contains("OK 5f5ff3\n");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInteriorCommentsSkipped() {
|
||||
void testInteriorCommentsSkipped() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais = new ByteArrayInputStream(new byte[0]);
|
||||
final CodeValidateSubCommand codeValidateSubCommand =
|
||||
@@ -139,11 +139,11 @@ public class CodeValidationSubCommandTest {
|
||||
final CommandLine cmd = new CommandLine(codeValidateSubCommand);
|
||||
cmd.parseArgs(CODE_INTERIOR_COMMENTS);
|
||||
codeValidateSubCommand.run();
|
||||
assertThat(baos.toString(UTF_8)).contains("OK 59595959595959e3000150e4,f1e4\n");
|
||||
assertThat(baos.toString(UTF_8)).contains("OK 59595959e300015000,f8e4\n");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBlankLinesAndCommentsSkipped() {
|
||||
void testBlankLinesAndCommentsSkipped() {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final ByteArrayInputStream bais =
|
||||
new ByteArrayInputStream(("# comment\n\n#blank line\n\n" + CODE_MULTIPLE).getBytes(UTF_8));
|
||||
@@ -155,7 +155,7 @@ public class CodeValidationSubCommandTest {
|
||||
"""
|
||||
OK 00
|
||||
err: layout - EOF header byte 1 incorrect
|
||||
OK e4
|
||||
OK 5f5ff3
|
||||
""");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,6 +57,10 @@ public class EvmToolSpecTests {
|
||||
return findSpecFiles(new String[] {"b11r"});
|
||||
}
|
||||
|
||||
public static Object[][] prettyPrintTests() {
|
||||
return findSpecFiles(new String[] {"pretty-print"});
|
||||
}
|
||||
|
||||
public static Object[][] stateTestTests() {
|
||||
return findSpecFiles(new String[] {"state-test"});
|
||||
}
|
||||
@@ -110,7 +114,7 @@ public class EvmToolSpecTests {
|
||||
}
|
||||
|
||||
@ParameterizedTest(name = "{0}")
|
||||
@MethodSource({"b11rTests", "stateTestTests", "t8nTests", "traceTests"})
|
||||
@MethodSource({"b11rTests", "prettyPrintTests", "stateTestTests", "t8nTests", "traceTests"})
|
||||
void testBySpec(
|
||||
final String file,
|
||||
final JsonNode cliNode,
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"cli": [
|
||||
"pretty-print",
|
||||
"0xEF0001010004020001001304000000008000026000e20200030000fff65b5b00600160015500"
|
||||
],
|
||||
"stdin": "",
|
||||
"stdout": "0x # EOF\nef0001 # Magic and Version ( 1 )\n010004 # Types length ( 4 )\n020001 # Total code sections ( 1 )\n 0013 # Code section 0 , 19 bytes\n040000 # Data section length( 0 )\n 00 # Terminator (end of header)\n # Code section 0 types\n 00 # 0 inputs \n 80 # 0 outputs (Non-returning function)\n 0002 # max stack: 2\n # Code section 0 - in=0 out=non-returning height=2\n 6000 # [0] PUSH1(0)\ne20200030000fff6 # [2] RJUMPV(3,0,-10)\n 5b # [10] NOOP\n 5b # [11] NOOP\n 00 # [12] STOP\n 6001 # [13] PUSH1(1)\n 6001 # [15] PUSH1(1)\n 55 # [17] SSTORE\n 00 # [18] STOP\n # Data section (empty)\n"
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"cli": [
|
||||
"pretty-print",
|
||||
"0xef000101000402000100130300010043040000000080000436600060003736600060006000ec0060005500ef0001010004020001000b03000100200400000000800003366000600037366000ee00ef0001010004020001000d0400400000800002d10000600055d1002060015500"
|
||||
],
|
||||
"stdin": "",
|
||||
"stdout": "0x # EOF\nef0001 # Magic and Version ( 1 )\n010004 # Types length ( 4 )\n020001 # Total code sections ( 1 )\n 0013 # Code section 0 , 19 bytes\n030001 # Total subcontainers ( 1 )\n 0043 # Sub container 0, 67 byte\n040000 # Data section length( 0 )\n 00 # Terminator (end of header)\n # Code section 0 types\n 00 # 0 inputs \n 80 # 0 outputs (Non-returning function)\n 0004 # max stack: 4\n # Code section 0 - in=0 out=non-returning height=4\n 36 # [0] CALLDATASIZE\n 6000 # [1] PUSH1(0)\n 6000 # [3] PUSH1(0)\n 37 # [5] CALLDATACOPY\n 36 # [6] CALLDATASIZE\n 6000 # [7] PUSH1(0)\n 6000 # [9] PUSH1(0)\n 6000 # [11] PUSH1(0)\n ec00 # [13] EOFCREATE(0)\n 6000 # [15] PUSH1(0)\n 55 # [17] SSTORE\n 00 # [18] STOP\n # Subcontainer 0 starts here\n ef0001 # Magic and Version ( 1 )\n 010004 # Types length ( 4 )\n 020001 # Total code sections ( 1 )\n 000b # Code section 0 , 11 bytes\n 030001 # Total subcontainers ( 1 )\n 0020 # Sub container 0, 32 byte\n 040000 # Data section length( 0 ) \n 00 # Terminator (end of header)\n # Code section 0 types\n 00 # 0 inputs \n 80 # 0 outputs (Non-returning function)\n 0003 # max stack: 3\n # Code section 0 - in=0 out=non-returning height=3\n 36 # [0] CALLDATASIZE\n 6000 # [1] PUSH1(0)\n 6000 # [3] PUSH1(0)\n 37 # [5] CALLDATACOPY\n 36 # [6] CALLDATASIZE\n 6000 # [7] PUSH1(0)\n ee00 # [9] RETURNCONTRACT(0)\n # Subcontainer 0.0 starts here\n ef0001 # Magic and Version ( 1 )\n 010004 # Types length ( 4 )\n 020001 # Total code sections ( 1 )\n 000d # Code section 0 , 13 bytes\n 040040 # Data section length( 64 ) (actual size 0) \n 00 # Terminator (end of header)\n # Code section 0 types\n 00 # 0 inputs \n 80 # 0 outputs (Non-returning function)\n 0002 # max stack: 2\n # Code section 0 - in=0 out=non-returning height=2\n d10000 # [0] DATALOADN(0x0000)\n 6000 # [3] PUSH1(0)\n 55 # [5] SSTORE\n d10020 # [6] DATALOADN(0x0020)\n 6001 # [9] PUSH1(1)\n 55 # [11] SSTORE\n 00 # [12] STOP\n # Data section (empty)\n # Subcontainer 0.0 ends\n # Data section (empty)\n # Subcontainer 0 ends\n # Data section (empty)\n"
|
||||
}
|
||||
@@ -99,6 +99,6 @@
|
||||
{"pc":81,"op":72,"gas":"0x79bc22","gasCost":"0x2","memSize":0,"stack":["0x0","0x1","0x1","0x2","0x2","0xffff","0x1f4","0x78859e5b97166c486532b1595a673e9f9073643f1b519c6f18511b9913","0x2","0x389","0x0","0x0","0x1","0x0","0x3e3d6d5ff042148d326c1898713a76759ca273","0x44852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d","0xb94f5374fce5edbc8e2a8697c15331677e6ebf0b"],"depth":1,"refund":0,"opName":"BASEFEE"},
|
||||
{"pc":82,"op":8,"gas":"0x79bc20","gasCost":"0x8","memSize":0,"stack":["0x0","0x1","0x1","0x2","0x2","0xffff","0x1f4","0x78859e5b97166c486532b1595a673e9f9073643f1b519c6f18511b9913","0x2","0x389","0x0","0x0","0x1","0x0","0x3e3d6d5ff042148d326c1898713a76759ca273","0x44852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d","0xb94f5374fce5edbc8e2a8697c15331677e6ebf0b","0x10"],"depth":1,"refund":0,"opName":"ADDMOD"},
|
||||
{"pc":83,"op":62,"gas":"0x79bc18","gasCost":"0x0","memSize":0,"stack":["0x0","0x1","0x1","0x2","0x2","0xffff","0x1f4","0x78859e5b97166c486532b1595a673e9f9073643f1b519c6f18511b9913","0x2","0x389","0x0","0x0","0x1","0x0","0x3e3d6d5ff042148d326c1898713a76759ca273","0xb94f5374fce5edbc8e2a8697c15331677e6ebf1b"],"depth":1,"refund":0,"opName":"RETURNDATACOPY","error":"Out of bounds"},
|
||||
{"output":"","gasUsed":"0x7a1200","test":"00000936-mixed-1","fork":"Shanghai","d":0,"g":0,"v":0,"postHash":"0xd14c10ed22a1cfb642e374be985ac581c39f3969bd59249e0405aca3beb47a47","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":false}
|
||||
{"output":"","gasUsed":"0x7a1200","test":"00000936-mixed-1","fork":"Shanghai","d":0,"g":0,"v":0,"postHash":"0xd14c10ed22a1cfb642e374be985ac581c39f3969bd59249e0405aca3beb47a47","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":false,"error":"INVALID_RETURN_DATA_BUFFER_ACCESS"}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
{
|
||||
"cli": [
|
||||
"state-test",
|
||||
"stdin",
|
||||
"--trace",
|
||||
"--trace.memory",
|
||||
"--trace.stack",
|
||||
"--trace.returndata",
|
||||
"--notime"
|
||||
],
|
||||
"stdin": {
|
||||
"create-eof": {
|
||||
"env": {
|
||||
"currentCoinbase": "b94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||
"currentDifficulty": "0x20000",
|
||||
"currentRandom": "0x0000000000000000000000000000000000000000000000000000000000020000",
|
||||
"currentGasLimit": "0x26e1f476fe1e22",
|
||||
"currentNumber": "0x2",
|
||||
"currentTimestamp": "0x3e8",
|
||||
"previousHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"currentBaseFee": "0x10"
|
||||
},
|
||||
"pre": {
|
||||
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": {
|
||||
"code": "0x",
|
||||
"storage": {},
|
||||
"balance": "0xffffffffff",
|
||||
"nonce": "0x0"
|
||||
}
|
||||
},
|
||||
"transaction": {
|
||||
"gasPrice": "0x10",
|
||||
"nonce": "0x0",
|
||||
"to": null,
|
||||
"data": [
|
||||
"ef00010100040200010009030001001404000000008000035f355f5fa15f5fee00ef00010100040200010001040000000080000000c0de471fe5"
|
||||
],
|
||||
"gasLimit": [
|
||||
"0x7a1200"
|
||||
],
|
||||
"value": [
|
||||
"0xdbbe"
|
||||
],
|
||||
"secretKey": "0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8"
|
||||
},
|
||||
"out": "0x",
|
||||
"post": {
|
||||
"Prague": [
|
||||
{
|
||||
"hash": "0x1a8642a04dae90535f00f53d3a30284c4db051d508a653db89eb100ba9aecbf3",
|
||||
"logs": "0xf48b954a6a6f4ce6b28e4950b7027413f4bdc8f459df6003b6e8d7a1567c8940",
|
||||
"indexes": {
|
||||
"data": 0,
|
||||
"gas": 0,
|
||||
"value": 0
|
||||
}
|
||||
}
|
||||
],
|
||||
"Cancun": [
|
||||
{
|
||||
"hash": "0xaa80d89bc89f58da8de41d3894bd1a241896ff91f7a5964edaefb39e8e3a4a98",
|
||||
"logs": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
|
||||
"indexes": {
|
||||
"data": 0,
|
||||
"gas": 0,
|
||||
"value": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"stdout": [
|
||||
{"pc":0,"section":0,"op":95,"gas":"0x794068","gasCost":"0x2","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":1,"section":0,"op":53,"gas":"0x794066","gasCost":"0x3","memSize":0,"stack":["0x0"],"depth":1,"refund":0,"opName":"CALLDATALOAD"},
|
||||
{"pc":2,"section":0,"op":95,"gas":"0x794063","gasCost":"0x2","memSize":0,"stack":["0xc0de471fe5000000000000000000000000000000000000000000000000000000"],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":3,"section":0,"op":95,"gas":"0x794061","gasCost":"0x2","memSize":0,"stack":["0xc0de471fe5000000000000000000000000000000000000000000000000000000","0x0"],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":4,"section":0,"op":161,"gas":"0x79405f","gasCost":"0x2ee","memSize":0,"stack":["0xc0de471fe5000000000000000000000000000000000000000000000000000000","0x0","0x0"],"depth":1,"refund":0,"opName":"LOG1"},
|
||||
{"pc":5,"section":0,"op":95,"gas":"0x793d71","gasCost":"0x2","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":6,"section":0,"op":95,"gas":"0x793d6f","gasCost":"0x2","memSize":0,"stack":["0x0"],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":7,"section":0,"op":238,"immediate":"0x00","gas":"0x793d6d","gasCost":"0x0","memSize":0,"stack":["0x0","0x0"],"depth":1,"refund":0,"opName":"RETURNCONTRACT"},
|
||||
{"output":"","gasUsed":"0xe433","test":"create-eof","fork":"Prague","d":0,"g":0,"v":0,"postHash":"0x1a8642a04dae90535f00f53d3a30284c4db051d508a653db89eb100ba9aecbf3","postLogsHash":"0xf48b954a6a6f4ce6b28e4950b7027413f4bdc8f459df6003b6e8d7a1567c8940","pass":true},
|
||||
{"pc":0,"op":239,"gas":"0x794068","gasCost":"0x0","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"INVALID","error":"Bad instruction"},
|
||||
{"output":"","gasUsed":"0x7a1200","test":"create-eof","fork":"Cancun","d":0,"g":0,"v":0,"postHash":"0xaa80d89bc89f58da8de41d3894bd1a241896ff91f7a5964edaefb39e8e3a4a98","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":true,"error":"INVALID_OPERATION"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
{
|
||||
"cli": [
|
||||
"state-test",
|
||||
"stdin",
|
||||
"--trace",
|
||||
"--trace.memory",
|
||||
"--trace.stack",
|
||||
"--trace.returndata",
|
||||
"--notime"
|
||||
],
|
||||
"stdin": {
|
||||
"create-eof": {
|
||||
"env": {
|
||||
"currentCoinbase": "b94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||
"currentDifficulty": "0x20000",
|
||||
"currentRandom": "0x0000000000000000000000000000000000000000000000000000000000020000",
|
||||
"currentGasLimit": "0x26e1f476fe1e22",
|
||||
"currentNumber": "0x2",
|
||||
"currentTimestamp": "0x3e8",
|
||||
"previousHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"currentBaseFee": "0x10"
|
||||
},
|
||||
"pre": {
|
||||
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": {
|
||||
"code": "0x",
|
||||
"storage": {},
|
||||
"balance": "0xffffffffff",
|
||||
"nonce": "0x0"
|
||||
}
|
||||
},
|
||||
"transaction": {
|
||||
"gasPrice": "0x10",
|
||||
"nonce": "0x0",
|
||||
"to": null,
|
||||
"data": [
|
||||
"ef00011100040200010009030001001404000000008000035f355f5fa15f5fee00ef00010100040200010001040000000080000000c0de471fe5"
|
||||
],
|
||||
"gasLimit": [
|
||||
"0x7a1200"
|
||||
],
|
||||
"value": [
|
||||
"0xdbbe"
|
||||
],
|
||||
"secretKey": "0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8"
|
||||
},
|
||||
"out": "0x",
|
||||
"post": {
|
||||
"Prague": [
|
||||
{
|
||||
"hash": "0x1a8642a04dae90535f00f53d3a30284c4db051d508a653db89eb100ba9aecbf3",
|
||||
"logs": "0xf48b954a6a6f4ce6b28e4950b7027413f4bdc8f459df6003b6e8d7a1567c8940",
|
||||
"indexes": {
|
||||
"data": 0,
|
||||
"gas": 0,
|
||||
"value": 0
|
||||
}
|
||||
}
|
||||
],
|
||||
"Cancun": [
|
||||
{
|
||||
"hash": "0xaa80d89bc89f58da8de41d3894bd1a241896ff91f7a5964edaefb39e8e3a4a98",
|
||||
"logs": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
|
||||
"indexes": {
|
||||
"data": 0,
|
||||
"gas": 0,
|
||||
"value": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"stdout": [
|
||||
{"output":"","gasUsed":"0xd198","test":"create-eof","fork":"Prague","d":0,"g":0,"v":0,"postHash":"0x2a9c58298ba5d4ec86ca682b9fcc9ff67c3fc44dbd39f85a2f9b74bfe4e5178e","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":false,"error":"Invalid EOF Layout: Expected kind 1 but read kind 17"},
|
||||
{"pc":0,"op":239,"gas":"0x794068","gasCost":"0x0","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"INVALID","error":"Bad instruction"},
|
||||
{"output":"","gasUsed":"0x7a1200","test":"create-eof","fork":"Cancun","d":0,"g":0,"v":0,"postHash":"0xaa80d89bc89f58da8de41d3894bd1a241896ff91f7a5964edaefb39e8e3a4a98","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":true,"error":"INVALID_OPERATION"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"cli": [
|
||||
"--notime",
|
||||
"--json",
|
||||
"--create",
|
||||
"--code",
|
||||
"ef00010100040200010009030001001404000000008000035f355f5fa15f5fee00ef00010100040200010001040000000080000000c0de471fe5",
|
||||
"--coinbase",
|
||||
"4444588443C3A91288C5002483449ABA1054192B",
|
||||
"--fork",
|
||||
"pragueeof"
|
||||
],
|
||||
"stdin": "",
|
||||
"stdout": [
|
||||
{"pc":0,"section":0,"op":95,"gas":"0x2540be400","gasCost":"0x2","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":1,"section":0,"op":53,"gas":"0x2540be3fe","gasCost":"0x3","memSize":0,"stack":["0x0"],"depth":1,"refund":0,"opName":"CALLDATALOAD"},
|
||||
{"pc":2,"section":0,"op":95,"gas":"0x2540be3fb","gasCost":"0x2","memSize":0,"stack":["0xc0de471fe5000000000000000000000000000000000000000000000000000000"],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":3,"section":0,"op":95,"gas":"0x2540be3f9","gasCost":"0x2","memSize":0,"stack":["0xc0de471fe5000000000000000000000000000000000000000000000000000000","0x0"],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":4,"section":0,"op":161,"gas":"0x2540be3f7","gasCost":"0x2ee","memSize":0,"stack":["0xc0de471fe5000000000000000000000000000000000000000000000000000000","0x0","0x0"],"depth":1,"refund":0,"opName":"LOG1"},
|
||||
{"pc":5,"section":0,"op":95,"gas":"0x2540be109","gasCost":"0x2","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":6,"section":0,"op":95,"gas":"0x2540be107","gasCost":"0x2","memSize":0,"stack":["0x0"],"depth":1,"refund":0,"opName":"PUSH0"},
|
||||
{"pc":7,"section":0,"op":238,"immediate":"0x00","gas":"0x2540be105","gasCost":"0x0","memSize":0,"stack":["0x0","0x0"],"depth":1,"refund":0,"opName":"RETURNCONTRACT"},
|
||||
{"gasUser":"0x129b","gasTotal":"0x129b","output":"0x"}
|
||||
]
|
||||
}
|
||||
@@ -57,6 +57,7 @@ dependencies {
|
||||
implementation 'org.jetbrains.kotlin:kotlin-stdlib'
|
||||
implementation 'org.owasp.encoder:encoder'
|
||||
implementation 'org.xerial.snappy:snappy-java'
|
||||
implementation 'commons-net:commons-net'
|
||||
|
||||
annotationProcessor "org.immutables:value"
|
||||
implementation "org.immutables:value-annotations"
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.p2p.permissions;
|
||||
|
||||
import org.hyperledger.besu.ethereum.p2p.peers.Peer;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Manages peer permissions based on IP subnet restrictions.
|
||||
*
|
||||
* <p>This class extends {@link PeerPermissions} to implement access control based on IP subnets. It
|
||||
* allows for the configuration of permitted subnets and uses these configurations to determine
|
||||
* whether a peer should be allowed or denied access based on its IP address.
|
||||
*
|
||||
* <p>Note: If no subnets are specified, all peers are considered permitted by default.
|
||||
*
|
||||
* @see PeerPermissions
|
||||
*/
|
||||
public class PeerPermissionSubnet extends PeerPermissions {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(PeerPermissionSubnet.class);
|
||||
|
||||
private final List<SubnetInfo> allowedSubnets;
|
||||
|
||||
/**
|
||||
* Constructs a new {@code PeerPermissionSubnet} instance with specified allowed subnets.
|
||||
*
|
||||
* @param allowedSubnets A list of {@link SubnetInfo} objects representing the subnets that are
|
||||
* allowed to interact with the local node. Cannot be {@code null}.
|
||||
*/
|
||||
public PeerPermissionSubnet(final List<SubnetInfo> allowedSubnets) {
|
||||
this.allowedSubnets = allowedSubnets;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if a peer is permitted based on the configured subnets.
|
||||
*
|
||||
* <p>This method checks if the remote peer's IP address falls within any of the configured
|
||||
* allowed subnets. If the peer's IP is within any of the allowed subnets, it is permitted.
|
||||
* Otherwise, it is denied.
|
||||
*
|
||||
* @param localNode This parameter is not used in the current implementation.
|
||||
* @param remotePeer The remote peer to check. Its IP address is used to determine permission.
|
||||
* @param action Ignored. If the peer is not allowed in the subnet, all actions are now allowed.
|
||||
* @return {@code true} if the peer is permitted based on its IP address; {@code false} otherwise.
|
||||
*/
|
||||
@Override
|
||||
public boolean isPermitted(final Peer localNode, final Peer remotePeer, final Action action) {
|
||||
// If no subnets are specified, all peers are permitted
|
||||
if (allowedSubnets == null || allowedSubnets.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
String remotePeerHostAddress = remotePeer.getEnodeURL().getIpAsString();
|
||||
for (SubnetInfo subnet : allowedSubnets) {
|
||||
if (subnet.isInRange(remotePeerHostAddress)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
LOG.trace("Peer {} is not allowed in any of the configured subnets.", remotePeerHostAddress);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -29,13 +29,13 @@ import org.bouncycastle.jce.provider.BouncyCastleProvider;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
||||
@ExtendWith(VertxExtension.class)
|
||||
class DNSDaemonTest {
|
||||
private static final int EXPECTED_SEQ = 932;
|
||||
private static final String holeskyEnr =
|
||||
"enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.holesky.ethdisco.net";
|
||||
private final MockDnsServerVerticle mockDnsServerVerticle = new MockDnsServerVerticle();
|
||||
@@ -54,13 +54,24 @@ class DNSDaemonTest {
|
||||
|
||||
@Test
|
||||
@DisplayName("Test DNS Daemon with a mock DNS server")
|
||||
void testDNSDaemon(final Vertx vertx, final VertxTestContext testContext)
|
||||
throws InterruptedException {
|
||||
void testDNSDaemon(final Vertx vertx, final VertxTestContext testContext) {
|
||||
final Checkpoint checkpoint = testContext.checkpoint();
|
||||
dnsDaemon =
|
||||
new DNSDaemon(
|
||||
holeskyEnr,
|
||||
(seq, records) -> checkpoint.flag(),
|
||||
(seq, records) -> {
|
||||
if (seq != EXPECTED_SEQ) {
|
||||
testContext.failNow(
|
||||
String.format(
|
||||
"Expecting sequence to be %d in first pass but got: %d",
|
||||
EXPECTED_SEQ, seq));
|
||||
}
|
||||
if (records.size() != 115) {
|
||||
testContext.failNow(
|
||||
"Expecting 115 records in first pass but got: " + records.size());
|
||||
}
|
||||
checkpoint.flag();
|
||||
},
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
@@ -74,7 +85,6 @@ class DNSDaemonTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled("this test is flaky")
|
||||
@DisplayName("Test DNS Daemon with periodic lookup to a mock DNS server")
|
||||
void testDNSDaemonPeriodic(final Vertx vertx, final VertxTestContext testContext)
|
||||
throws InterruptedException {
|
||||
@@ -87,18 +97,28 @@ class DNSDaemonTest {
|
||||
(seq, records) -> {
|
||||
switch (pass.incrementAndGet()) {
|
||||
case 1:
|
||||
testContext.verify(
|
||||
() -> {
|
||||
assertThat(seq).isEqualTo(932);
|
||||
assertThat(records).hasSize(115);
|
||||
});
|
||||
if (seq != EXPECTED_SEQ) {
|
||||
testContext.failNow(
|
||||
String.format(
|
||||
"Expecting sequence to be %d in first pass but got: %d",
|
||||
EXPECTED_SEQ, seq));
|
||||
}
|
||||
if (records.size() != 115) {
|
||||
testContext.failNow(
|
||||
"Expecting 115 records in first pass but got: " + records.size());
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
testContext.verify(
|
||||
() -> {
|
||||
assertThat(seq).isEqualTo(932);
|
||||
assertThat(records).isEmpty();
|
||||
});
|
||||
if (seq != EXPECTED_SEQ) {
|
||||
testContext.failNow(
|
||||
String.format(
|
||||
"Expecting sequence to be %d in second pass but got: %d",
|
||||
EXPECTED_SEQ, seq));
|
||||
}
|
||||
if (!records.isEmpty()) {
|
||||
testContext.failNow(
|
||||
"Expecting 0 records in second pass but got: " + records.size());
|
||||
}
|
||||
break;
|
||||
default:
|
||||
testContext.failNow("Third pass is not expected");
|
||||
@@ -107,7 +127,7 @@ class DNSDaemonTest {
|
||||
},
|
||||
0,
|
||||
1, // initial delay
|
||||
300, // second lookup after 300 ms (due to Mock DNS server, we are very quick).
|
||||
3000, // second lookup after 3 seconds (the thread scheduling can be slower in CI)
|
||||
"localhost:" + mockDnsServerVerticle.port());
|
||||
|
||||
final DeploymentOptions options =
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.p2p.permissions;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import org.hyperledger.besu.ethereum.p2p.peers.DefaultPeer;
|
||||
import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl;
|
||||
import org.hyperledger.besu.ethereum.p2p.peers.Peer;
|
||||
import org.hyperledger.besu.ethereum.p2p.permissions.PeerPermissions.Action;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.net.util.SubnetUtils;
|
||||
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class PeerPermissionsSubnetTest {
|
||||
|
||||
private final Peer remoteNode = createPeer();
|
||||
|
||||
@Test
|
||||
public void peerInSubnetRangeShouldBePermitted() {
|
||||
List<SubnetInfo> allowedSubnets = List.of(subnet("127.0.0.0/24"));
|
||||
PeerPermissionSubnet peerPermissionSubnet = new PeerPermissionSubnet(allowedSubnets);
|
||||
checkPermissions(peerPermissionSubnet, remoteNode, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void peerInAtLeastOneSubnetRangeShouldBePermitted() {
|
||||
List<SubnetInfo> allowedSubnets = List.of(subnet("127.0.0.0/24"), subnet("10.0.0.1/24"));
|
||||
PeerPermissionSubnet peerPermissionSubnet = new PeerPermissionSubnet(allowedSubnets);
|
||||
checkPermissions(peerPermissionSubnet, remoteNode, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void peerOutSubnetRangeShouldNotBePermitted() {
|
||||
List<SubnetInfo> allowedSubnets = List.of(subnet("10.0.0.0/24"));
|
||||
PeerPermissionSubnet peerPermissionSubnet = new PeerPermissionSubnet(allowedSubnets);
|
||||
checkPermissions(peerPermissionSubnet, remoteNode, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void peerShouldBePermittedIfNoSubnets() {
|
||||
PeerPermissionSubnet peerPermissionSubnet = new PeerPermissionSubnet(List.of());
|
||||
checkPermissions(peerPermissionSubnet, remoteNode, true);
|
||||
}
|
||||
|
||||
private void checkPermissions(
|
||||
final PeerPermissions peerPermissions, final Peer remotePeer, final boolean expectedResult) {
|
||||
for (Action action : Action.values()) {
|
||||
assertThat(peerPermissions.isPermitted(createPeer(), remotePeer, action))
|
||||
.isEqualTo(expectedResult);
|
||||
}
|
||||
}
|
||||
|
||||
private SubnetInfo subnet(final String subnet) {
|
||||
return new SubnetUtils(subnet).getInfo();
|
||||
}
|
||||
|
||||
private Peer createPeer() {
|
||||
return DefaultPeer.fromEnodeURL(
|
||||
EnodeURLImpl.builder()
|
||||
.nodeId(Peer.randomId())
|
||||
.ipAddress("127.0.0.1")
|
||||
.discoveryAndListeningPorts(EnodeURLImpl.DEFAULT_LISTENING_PORT)
|
||||
.build());
|
||||
}
|
||||
}
|
||||
@@ -129,6 +129,21 @@ def generalstateRegressionReferenceTests = tasks.register("generalstateRegressio
|
||||
)
|
||||
}
|
||||
|
||||
def eofReferenceTests = tasks.register("eofReferenceTests") {
|
||||
final referenceTestsPath = "src/reference-test/external-resources/EOFTests"
|
||||
final generatedTestsPath = "$buildDir/generated/sources/reference-test/$name/java"
|
||||
inputs.files fileTree(referenceTestsPath),
|
||||
fileTree(generatedTestsPath)
|
||||
outputs.files generatedTestsPath
|
||||
generateTestFiles(
|
||||
fileTree(referenceTestsPath),
|
||||
file("src/reference-test/templates/EOFReferenceTest.java.template"),
|
||||
"EOFTests",
|
||||
"$generatedTestsPath/org/hyperledger/besu/ethereum/vm/eof",
|
||||
"EOFReferenceTest"
|
||||
)
|
||||
}
|
||||
|
||||
sourceSets {
|
||||
referenceTest {
|
||||
java {
|
||||
@@ -140,7 +155,8 @@ sourceSets {
|
||||
eipStateReferenceTests,
|
||||
executionSpecTests,
|
||||
generalstateReferenceTests,
|
||||
generalstateRegressionReferenceTests
|
||||
generalstateRegressionReferenceTests,
|
||||
eofReferenceTests
|
||||
}
|
||||
resources {
|
||||
srcDirs 'src/reference-test/resources',
|
||||
@@ -247,24 +263,20 @@ def generateTestFiles(FileTree jsonPath, File templateFile, String pathstrip, St
|
||||
mkdir(destination)
|
||||
def referenceTestTemplate = templateFile.text
|
||||
|
||||
// This is how many json files to include in each test file
|
||||
def fileSets = jsonPath.getFiles().collate(5)
|
||||
|
||||
fileSets.eachWithIndex { fileSet, idx ->
|
||||
def paths = []
|
||||
fileSet.each { testJsonFile ->
|
||||
def parentFile = testJsonFile.getParentFile()
|
||||
def parentPathFile = parentFile.getPath().substring(parentFile.getPath().indexOf(pathstrip))
|
||||
if (!testJsonFile.getName().toString().startsWith(".") && !excludedPath.contains(parentPathFile)) {
|
||||
def pathFile = testJsonFile.getPath()
|
||||
paths << pathFile.substring(pathFile.indexOf(pathstrip))
|
||||
}
|
||||
def paths = []
|
||||
jsonPath.getFiles().forEach { testJsonFile ->
|
||||
def parentFile = testJsonFile.getParentFile()
|
||||
def parentPathFile = parentFile.getPath().substring(parentFile.getPath().indexOf(pathstrip))
|
||||
if (!testJsonFile.getName().toString().startsWith(".") && !excludedPath.contains(parentPathFile)) {
|
||||
def pathFile = testJsonFile.getPath()
|
||||
paths << pathFile.substring(pathFile.indexOf(pathstrip))
|
||||
}
|
||||
}
|
||||
|
||||
paths.collate(5).eachWithIndex { tests, idx ->
|
||||
def testFile = file(destination + "/" + namePrefix + "_" + idx + ".java")
|
||||
|
||||
|
||||
def allPaths = '"' + paths.join('", "') + '"'
|
||||
def allPaths = '"' + tests.join('",\n "') + '"'
|
||||
|
||||
def testFileContents = referenceTestTemplate
|
||||
.replaceAll("%%TESTS_FILE%%", allPaths)
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.referencetests;
|
||||
|
||||
import java.util.NavigableMap;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class EOFTestCaseSpec {
|
||||
|
||||
public record TestVector(
|
||||
@JsonProperty("code") String code,
|
||||
@JsonProperty("results") NavigableMap<String, TestResult> results) {}
|
||||
|
||||
public record TestResult(
|
||||
@JsonProperty("exception") String exception, @JsonProperty("result") boolean result) {
|
||||
public static TestResult TEST_RESULT_PASSED = new TestResult(null, true);
|
||||
|
||||
public static TestResult failed(final String exception) {
|
||||
return new TestResult(exception, false);
|
||||
}
|
||||
|
||||
public static TestResult passed() {
|
||||
return TEST_RESULT_PASSED;
|
||||
}
|
||||
}
|
||||
|
||||
NavigableMap<String, TestVector> vector;
|
||||
|
||||
@JsonCreator
|
||||
public EOFTestCaseSpec(@JsonProperty("vectors") final NavigableMap<String, TestVector> vector) {
|
||||
this.vector = vector;
|
||||
}
|
||||
|
||||
public NavigableMap<String, TestVector> getVector() {
|
||||
return vector;
|
||||
}
|
||||
}
|
||||
@@ -86,7 +86,7 @@ public class ReferenceTestProtocolSchedules {
|
||||
builder.put(
|
||||
"CancunToPragueAtTime15k",
|
||||
createSchedule(genesisStub.clone().cancunTime(0).pragueTime(15000)));
|
||||
builder.put("Prague", createSchedule(genesisStub.clone().pragueTime(0)));
|
||||
builder.put("Prague", createSchedule(genesisStub.clone().pragueEOFTime(0)));
|
||||
builder.put("Future_EIPs", createSchedule(genesisStub.clone().futureEipsTime(0)));
|
||||
builder.put("Experimental_EIPs", createSchedule(genesisStub.clone().experimentalEipsTime(0)));
|
||||
return new ReferenceTestProtocolSchedules(builder.build());
|
||||
|
||||
@@ -111,7 +111,7 @@ public class StateTestVersionedTransaction {
|
||||
this.maxFeePerGas = Optional.ofNullable(maxFeePerGas).map(Wei::fromHexString).orElse(null);
|
||||
this.maxPriorityFeePerGas =
|
||||
Optional.ofNullable(maxPriorityFeePerGas).map(Wei::fromHexString).orElse(null);
|
||||
this.to = to.isEmpty() ? null : Address.fromHexString(to);
|
||||
this.to = (to == null || to.isEmpty()) ? null : Address.fromHexString(to);
|
||||
|
||||
SignatureAlgorithm signatureAlgorithm = SignatureAlgorithmFactory.getInstance();
|
||||
this.keys =
|
||||
|
||||
@@ -0,0 +1,142 @@
|
||||
/*
|
||||
* Copyright contributors to Hyperledger Besu.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package org.hyperledger.besu.ethereum.eof;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec;
|
||||
import org.hyperledger.besu.evm.Code;
|
||||
import org.hyperledger.besu.evm.EvmSpecVersion;
|
||||
import org.hyperledger.besu.evm.code.CodeFactory;
|
||||
import org.hyperledger.besu.evm.code.CodeInvalid;
|
||||
import org.hyperledger.besu.evm.code.CodeV1;
|
||||
import org.hyperledger.besu.evm.code.CodeV1Validation;
|
||||
import org.hyperledger.besu.evm.code.EOFLayout;
|
||||
import org.hyperledger.besu.testutil.JsonTestParameters;
|
||||
|
||||
public class EOFReferenceTestTools {
|
||||
private static final List<String> EIPS_TO_RUN;
|
||||
|
||||
static {
|
||||
final String eips =
|
||||
System.getProperty("test.ethereum.eof.eips", "Prague,Osaka,Amsterdam,Bogota,Polis,Bangkok");
|
||||
EIPS_TO_RUN = Arrays.asList(eips.split(","));
|
||||
}
|
||||
|
||||
private static final JsonTestParameters<?, ?> params =
|
||||
JsonTestParameters.create(EOFTestCaseSpec.class, EOFTestCaseSpec.TestResult.class)
|
||||
.generator(
|
||||
(testName, fullPath, eofSpec, collector) -> {
|
||||
final Path path = Path.of(fullPath).getParent().getFileName();
|
||||
final String prefix = path + "/" + testName + "-";
|
||||
for (final Map.Entry<String, EOFTestCaseSpec.TestVector> entry :
|
||||
eofSpec.getVector().entrySet()) {
|
||||
final String name = entry.getKey();
|
||||
final Bytes code = Bytes.fromHexString(entry.getValue().code());
|
||||
for (final var result : entry.getValue().results().entrySet()) {
|
||||
final String eip = result.getKey();
|
||||
final boolean runTest = EIPS_TO_RUN.contains(eip);
|
||||
collector.add(
|
||||
prefix + eip + '[' + name + ']',
|
||||
fullPath,
|
||||
eip,
|
||||
code,
|
||||
result.getValue(),
|
||||
runTest);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
static {
|
||||
if (EIPS_TO_RUN.isEmpty()) {
|
||||
params.ignoreAll();
|
||||
}
|
||||
|
||||
// TXCREATE still in tests, but has been removed
|
||||
params.ignore("EOF1_undefined_opcodes_186");
|
||||
}
|
||||
|
||||
private EOFReferenceTestTools() {
|
||||
// utility class
|
||||
}
|
||||
|
||||
//
|
||||
public static Collection<Object[]> generateTestParametersForConfig(final String[] filePath) {
|
||||
return params.generate(filePath);
|
||||
}
|
||||
|
||||
public static void executeTest(
|
||||
final String fork, final Bytes code, final EOFTestCaseSpec.TestResult expected) {
|
||||
EvmSpecVersion evmVersion = EvmSpecVersion.fromName(fork);
|
||||
assertThat(evmVersion).isNotNull();
|
||||
|
||||
// hardwire in the magic byte transaction checks
|
||||
if (evmVersion.getMaxEofVersion() < 1) {
|
||||
assertThat(expected.exception()).isEqualTo("EOF_InvalidCode");
|
||||
} else {
|
||||
EOFLayout layout = EOFLayout.parseEOF(code);
|
||||
|
||||
if (layout.isValid()) {
|
||||
Code parsedCode = CodeFactory.createCode(code, evmVersion.getMaxEofVersion());
|
||||
assertThat(parsedCode.isValid())
|
||||
.withFailMessage(
|
||||
() ->
|
||||
EOFLayout.parseEOF(code).prettyPrint()
|
||||
+ "\nExpected exception :"
|
||||
+ expected.exception()
|
||||
+ " actual exception :"
|
||||
+ (parsedCode.isValid()
|
||||
? null
|
||||
: ((CodeInvalid) parsedCode).getInvalidReason()))
|
||||
.isEqualTo(expected.result());
|
||||
if (parsedCode instanceof CodeV1 codeV1) {
|
||||
var deepValidate = CodeV1Validation.validate(codeV1.getEofLayout());
|
||||
assertThat(deepValidate)
|
||||
.withFailMessage(
|
||||
() ->
|
||||
codeV1.prettyPrint()
|
||||
+ "\nExpected exception :"
|
||||
+ expected.exception()
|
||||
+ " actual exception :"
|
||||
+ (parsedCode.isValid() ? null : deepValidate))
|
||||
.isNull();
|
||||
}
|
||||
|
||||
if (expected.result()) {
|
||||
System.out.println(code);
|
||||
System.out.println(layout.writeContainer(null));
|
||||
assertThat(code)
|
||||
.withFailMessage("Container round trip failed")
|
||||
.isEqualTo(layout.writeContainer(null));
|
||||
}
|
||||
} else {
|
||||
assertThat(layout.isValid())
|
||||
.withFailMessage(
|
||||
() ->
|
||||
"Expected exception - "
|
||||
+ expected.exception()
|
||||
+ " actual exception - "
|
||||
+ (layout.isValid() ? null : layout.invalidReason()))
|
||||
.isEqualTo(expected.result());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -53,9 +53,9 @@ public class BlockchainReferenceTestTools {
|
||||
final String networks =
|
||||
System.getProperty(
|
||||
"test.ethereum.blockchain.eips",
|
||||
"FrontierToHomesteadAt5,HomesteadToEIP150At5,HomesteadToDaoAt5,EIP158ToByzantiumAt5,"
|
||||
"FrontierToHomesteadAt5,HomesteadToEIP150At5,HomesteadToDaoAt5,EIP158ToByzantiumAt5,CancunToPragueAtTime15k"
|
||||
+ "Frontier,Homestead,EIP150,EIP158,Byzantium,Constantinople,ConstantinopleFix,Istanbul,Berlin,"
|
||||
+ "London,Merge,Paris,Shanghai,Cancun,Prague,Osaka,Bogota,CancunToPragueAtTime15k");
|
||||
+ "London,Merge,Paris,Shanghai,Cancun,Prague,Osaka,Amsterdam,Bogota,Polis,Bangkok");
|
||||
NETWORKS_TO_RUN = Arrays.asList(networks.split(","));
|
||||
}
|
||||
|
||||
@@ -75,21 +75,22 @@ public class BlockchainReferenceTestTools {
|
||||
|
||||
// Consumes a huge amount of memory
|
||||
params.ignore("static_Call1MB1024Calldepth_d1g0v0_\\w+");
|
||||
params.ignore("ShanghaiLove_.*");
|
||||
params.ignore("ShanghaiLove_");
|
||||
|
||||
// Absurd amount of gas, doesn't run in parallel
|
||||
params.ignore("randomStatetest94_\\w+");
|
||||
|
||||
// Don't do time-consuming tests
|
||||
params.ignore("CALLBlake2f_MaxRounds.*");
|
||||
params.ignore("loopMul_*");
|
||||
params.ignore("CALLBlake2f_MaxRounds");
|
||||
params.ignore("loopMul_");
|
||||
|
||||
// Inconclusive fork choice rule, since in merge CL should be choosing forks and setting the
|
||||
// chain head.
|
||||
// Perfectly valid test pre-merge.
|
||||
params.ignore("UncleFromSideChain_(Merge|Paris|Shanghai|Cancun|Prague|Osaka|Bogota)");
|
||||
params.ignore(
|
||||
"UncleFromSideChain_(Merge|Paris|Shanghai|Cancun|Prague|Osaka|Amsterdam|Bogota|Polis|Bangkok)");
|
||||
|
||||
// EOF tests are written against an older version of the spec
|
||||
// EOF tests don't have Prague stuff like deopsits right now
|
||||
params.ignore("/stEOF/");
|
||||
|
||||
// None of the Prague tests have withdrawls and deposits handling
|
||||
|
||||
@@ -66,7 +66,7 @@ public class GeneralStateReferenceTestTools {
|
||||
System.getProperty(
|
||||
"test.ethereum.state.eips",
|
||||
"Frontier,Homestead,EIP150,EIP158,Byzantium,Constantinople,ConstantinopleFix,Istanbul,Berlin,"
|
||||
+ "London,Merge,Paris,Shanghai,Cancun,Prague,Osaka,Bogota");
|
||||
+ "London,Merge,Paris,Shanghai,Cancun,Prague,Osaka,Amsterdam,Bogota,Polis,Bangkok");
|
||||
EIPS_TO_RUN = Arrays.asList(eips.split(","));
|
||||
}
|
||||
|
||||
|
||||
@@ -16,20 +16,20 @@ import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
||||
/** The blockchain test operation testing framework entry point. */
|
||||
public class %%TESTS_NAME%% {
|
||||
|
||||
private static final String[] TEST_CONFIG_FILE_DIR_PATH = new String[] {%%TESTS_FILE%%};
|
||||
private static final String[] TEST_CONFIG_FILE_DIR_PATH =
|
||||
new String[] {
|
||||
%%TESTS_FILE%%
|
||||
};
|
||||
|
||||
public static Stream<Arguments> getTestParametersForConfig() {
|
||||
return generateTestParametersForConfig(TEST_CONFIG_FILE_DIR_PATH).stream().map(params ->
|
||||
Arguments.of(params[0], params[1], params[2])
|
||||
);
|
||||
return generateTestParametersForConfig(TEST_CONFIG_FILE_DIR_PATH).stream()
|
||||
.map(params -> Arguments.of(params[0], params[1], params[2]));
|
||||
}
|
||||
|
||||
@ParameterizedTest(name = "Name: {0}")
|
||||
@MethodSource("getTestParametersForConfig")
|
||||
public void execution(
|
||||
final String name,
|
||||
final BlockchainReferenceTestCaseSpec spec,
|
||||
final boolean runTest) {
|
||||
final String name, final BlockchainReferenceTestCaseSpec spec, final boolean runTest) {
|
||||
assumeTrue(runTest, "Test " + name + " was ignored");
|
||||
executeTest(spec);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
package org.hyperledger.besu.ethereum.vm.eof;
|
||||
|
||||
import static org.hyperledger.besu.ethereum.eof.EOFReferenceTestTools.executeTest;
|
||||
import static org.hyperledger.besu.ethereum.eof.EOFReferenceTestTools.generateTestParametersForConfig;
|
||||
import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
||||
|
||||
import org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
||||
|
||||
/** The general state test operation testing framework entry point. */
|
||||
public class %%TESTS_NAME%% {
|
||||
|
||||
private static final String[] TEST_CONFIG_FILE_DIR_PATH =
|
||||
new String[] {
|
||||
%%TESTS_FILE%%
|
||||
};
|
||||
|
||||
public static Stream<Arguments> getTestParametersForConfig() {
|
||||
return generateTestParametersForConfig(TEST_CONFIG_FILE_DIR_PATH).stream().map(Arguments::of);
|
||||
}
|
||||
|
||||
@ParameterizedTest(name = "Name: {0}")
|
||||
@MethodSource("getTestParametersForConfig")
|
||||
public void execution(
|
||||
final String name,
|
||||
final String fork,
|
||||
final Bytes code,
|
||||
final EOFTestCaseSpec.TestResult results,
|
||||
final boolean runTest) {
|
||||
assumeTrue(runTest, "Test " + name + " was ignored");
|
||||
executeTest(fork, code, results);
|
||||
}
|
||||
}
|
||||
@@ -17,20 +17,20 @@ import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
||||
/** The general state test operation testing framework entry point. */
|
||||
public class %%TESTS_NAME%% {
|
||||
|
||||
private static final String[] TEST_CONFIG_FILE_DIR_PATH = new String[] {%%TESTS_FILE%%};
|
||||
private static final String[] TEST_CONFIG_FILE_DIR_PATH =
|
||||
new String[] {
|
||||
%%TESTS_FILE%%
|
||||
};
|
||||
|
||||
public static Stream<Arguments> getTestParametersForConfig() {
|
||||
return generateTestParametersForConfig(TEST_CONFIG_FILE_DIR_PATH).stream().map(params ->
|
||||
Arguments.of(params[0], params[1], params[2])
|
||||
);
|
||||
return generateTestParametersForConfig(TEST_CONFIG_FILE_DIR_PATH).stream()
|
||||
.map(params -> Arguments.of(params[0], params[1], params[2]));
|
||||
}
|
||||
|
||||
@ParameterizedTest(name = "Name: {0}")
|
||||
@MethodSource("getTestParametersForConfig")
|
||||
public void execution(
|
||||
final String name,
|
||||
final GeneralStateTestCaseEipSpec spec,
|
||||
final boolean runTest) {
|
||||
final String name, final GeneralStateTestCaseEipSpec spec, final boolean runTest) {
|
||||
assumeTrue(runTest, "Test " + name + " was ignored");
|
||||
executeTest(spec);
|
||||
}
|
||||
|
||||
@@ -17,6 +17,8 @@ package org.hyperledger.besu.evm;
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.evm.code.CodeSection;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
|
||||
/** Represents EVM code associated with an account. */
|
||||
@@ -30,6 +32,13 @@ public interface Code {
|
||||
*/
|
||||
int getSize();
|
||||
|
||||
/**
|
||||
* Size of the data in bytes. This is for the data only,
|
||||
*
|
||||
* @return size of code in bytes.
|
||||
*/
|
||||
int getDataSize();
|
||||
|
||||
/**
|
||||
* Get the bytes for the entire container, for example what EXTCODECOPY would want. For V0 it is
|
||||
* the same as getCodeBytes, for V1 it is the entire container, not just the data section.
|
||||
@@ -82,4 +91,63 @@ public interface Code {
|
||||
* @return The version of hte ode.
|
||||
*/
|
||||
int getEofVersion();
|
||||
|
||||
/**
|
||||
* Returns the count of subcontainers, or zero if there are none or if the code version does not
|
||||
* support subcontainers.
|
||||
*
|
||||
* @return The subcontainer count or zero if not supported;
|
||||
*/
|
||||
int getSubcontainerCount();
|
||||
|
||||
/**
|
||||
* Returns the subcontainer at the selected index. If the container doesn't exist or is invalid,
|
||||
* an empty result is returned. Legacy code always returns empty.
|
||||
*
|
||||
* @param index the index in the container to return
|
||||
* @param auxData any Auxiliary data to append to the subcontainer code. If fetching an initcode
|
||||
* container, pass null.
|
||||
* @return Either the subcontainer, or empty.
|
||||
*/
|
||||
Optional<Code> getSubContainer(final int index, final Bytes auxData);
|
||||
|
||||
/**
|
||||
* Loads data from the appropriate data section
|
||||
*
|
||||
* @param offset Where within the data section to start copying
|
||||
* @param length how many bytes to copy
|
||||
* @return A slice of the code containing the requested data
|
||||
*/
|
||||
Bytes getData(final int offset, final int length);
|
||||
|
||||
/**
|
||||
* Read a signed 16-bit big-endian integer
|
||||
*
|
||||
* @param startIndex the index to start reading the integer in the code
|
||||
* @return a java int representing the 16-bit signed integer.
|
||||
*/
|
||||
int readBigEndianI16(final int startIndex);
|
||||
|
||||
/**
|
||||
* Read an unsigned 16 bit big-endian integer
|
||||
*
|
||||
* @param startIndex the index to start reading the integer in the code
|
||||
* @return a java int representing the 16-bit unsigned integer.
|
||||
*/
|
||||
int readBigEndianU16(final int startIndex);
|
||||
|
||||
/**
|
||||
* Read an unsigned 8-bit integer
|
||||
*
|
||||
* @param startIndex the index to start reading the integer in the code
|
||||
* @return a java int representing the 8-bit unsigned integer.
|
||||
*/
|
||||
int readU8(final int startIndex);
|
||||
|
||||
/**
|
||||
* A more readable representation of the hex bytes, including whitespace and comments after hashes
|
||||
*
|
||||
* @return The pretty printed code
|
||||
*/
|
||||
String prettyPrint();
|
||||
}
|
||||
|
||||
@@ -365,6 +365,16 @@ public class EVM {
|
||||
* @return the code
|
||||
*/
|
||||
public Code getCodeUncached(final Bytes codeBytes) {
|
||||
return CodeFactory.createCode(codeBytes, evmSpecVersion.getMaxEofVersion(), false);
|
||||
return CodeFactory.createCode(codeBytes, evmSpecVersion.getMaxEofVersion());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets code for creation. Skips code cache and allows for extra data after EOF contracts.
|
||||
*
|
||||
* @param codeBytes the code bytes
|
||||
* @return the code
|
||||
*/
|
||||
public Code getCodeForCreation(final Bytes codeBytes) {
|
||||
return CodeFactory.createCode(codeBytes, evmSpecVersion.getMaxEofVersion(), false, true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,10 +50,18 @@ public enum EvmSpecVersion {
|
||||
CANCUN(0, true, "Cancun", "Finalized"),
|
||||
/** Prague evm spec version. */
|
||||
PRAGUE(0, false, "Prague", "In Development"),
|
||||
/** PragueEOF evm spec version. */
|
||||
PRAGUE_EOF(1, false, "PragueEOF", "Prague + EOF. In Development"),
|
||||
/** Osaka evm spec version. */
|
||||
OSAKA(0, false, "Osaka", "Placeholder"),
|
||||
OSAKA(1, false, "Osaka", "Placeholder"),
|
||||
/** Amstedam evm spec version. */
|
||||
AMSTERDAM(1, false, "Amsterdam", "Placeholder"),
|
||||
/** Bogota evm spec version. */
|
||||
BOGOTA(0, false, "Bogota", "Placeholder"),
|
||||
BOGOTA(1, false, "Bogota", "Placeholder"),
|
||||
/** Polis evm spec version. */
|
||||
POLIS(1, false, "Polis", "Placeholder"),
|
||||
/** Bogota evm spec version. */
|
||||
BANGKOK(1, false, "Bangkok", "Placeholder"),
|
||||
/** Development fork for unscheduled EIPs */
|
||||
FUTURE_EIPS(1, false, "Future_EIPs", "Development, for accepted and unscheduled EIPs"),
|
||||
/** Development fork for EIPs not accepted to Mainnet */
|
||||
@@ -147,6 +155,10 @@ public enum EvmSpecVersion {
|
||||
* @return the EVM spec version for that fork, or null if no fork matched.
|
||||
*/
|
||||
public static EvmSpecVersion fromName(final String name) {
|
||||
// TODO remove once PragueEOF settles
|
||||
if ("prague".equalsIgnoreCase(name)) {
|
||||
return EvmSpecVersion.PRAGUE_EOF;
|
||||
}
|
||||
for (var version : EvmSpecVersion.values()) {
|
||||
if (version.name().equalsIgnoreCase(name)) {
|
||||
return version;
|
||||
|
||||
@@ -24,6 +24,7 @@ import org.hyperledger.besu.evm.gascalculator.HomesteadGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.IstanbulGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.LondonGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PetersburgGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PragueEOFGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.PragueGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.ShanghaiGasCalculator;
|
||||
import org.hyperledger.besu.evm.gascalculator.SpuriousDragonGasCalculator;
|
||||
@@ -33,8 +34,6 @@ import org.hyperledger.besu.evm.operation.AddModOperation;
|
||||
import org.hyperledger.besu.evm.operation.AddOperation;
|
||||
import org.hyperledger.besu.evm.operation.AddressOperation;
|
||||
import org.hyperledger.besu.evm.operation.AndOperation;
|
||||
import org.hyperledger.besu.evm.operation.AuthCallOperation;
|
||||
import org.hyperledger.besu.evm.operation.AuthOperation;
|
||||
import org.hyperledger.besu.evm.operation.BalanceOperation;
|
||||
import org.hyperledger.besu.evm.operation.BaseFeeOperation;
|
||||
import org.hyperledger.besu.evm.operation.BlobBaseFeeOperation;
|
||||
@@ -55,15 +54,25 @@ import org.hyperledger.besu.evm.operation.CodeSizeOperation;
|
||||
import org.hyperledger.besu.evm.operation.CoinbaseOperation;
|
||||
import org.hyperledger.besu.evm.operation.Create2Operation;
|
||||
import org.hyperledger.besu.evm.operation.CreateOperation;
|
||||
import org.hyperledger.besu.evm.operation.DataCopyOperation;
|
||||
import org.hyperledger.besu.evm.operation.DataLoadNOperation;
|
||||
import org.hyperledger.besu.evm.operation.DataLoadOperation;
|
||||
import org.hyperledger.besu.evm.operation.DataSizeOperation;
|
||||
import org.hyperledger.besu.evm.operation.DelegateCallOperation;
|
||||
import org.hyperledger.besu.evm.operation.DifficultyOperation;
|
||||
import org.hyperledger.besu.evm.operation.DivOperation;
|
||||
import org.hyperledger.besu.evm.operation.DupNOperation;
|
||||
import org.hyperledger.besu.evm.operation.DupOperation;
|
||||
import org.hyperledger.besu.evm.operation.EOFCreateOperation;
|
||||
import org.hyperledger.besu.evm.operation.EqOperation;
|
||||
import org.hyperledger.besu.evm.operation.ExchangeOperation;
|
||||
import org.hyperledger.besu.evm.operation.ExpOperation;
|
||||
import org.hyperledger.besu.evm.operation.ExtCallOperation;
|
||||
import org.hyperledger.besu.evm.operation.ExtCodeCopyOperation;
|
||||
import org.hyperledger.besu.evm.operation.ExtCodeHashOperation;
|
||||
import org.hyperledger.besu.evm.operation.ExtCodeSizeOperation;
|
||||
import org.hyperledger.besu.evm.operation.ExtDelegateCallOperation;
|
||||
import org.hyperledger.besu.evm.operation.ExtStaticCallOperation;
|
||||
import org.hyperledger.besu.evm.operation.GasLimitOperation;
|
||||
import org.hyperledger.besu.evm.operation.GasOperation;
|
||||
import org.hyperledger.besu.evm.operation.GasPriceOperation;
|
||||
@@ -71,6 +80,7 @@ import org.hyperledger.besu.evm.operation.GtOperation;
|
||||
import org.hyperledger.besu.evm.operation.InvalidOperation;
|
||||
import org.hyperledger.besu.evm.operation.IsZeroOperation;
|
||||
import org.hyperledger.besu.evm.operation.JumpDestOperation;
|
||||
import org.hyperledger.besu.evm.operation.JumpFOperation;
|
||||
import org.hyperledger.besu.evm.operation.JumpOperation;
|
||||
import org.hyperledger.besu.evm.operation.JumpiOperation;
|
||||
import org.hyperledger.besu.evm.operation.Keccak256Operation;
|
||||
@@ -98,7 +108,9 @@ import org.hyperledger.besu.evm.operation.RelativeJumpIfOperation;
|
||||
import org.hyperledger.besu.evm.operation.RelativeJumpOperation;
|
||||
import org.hyperledger.besu.evm.operation.RelativeJumpVectorOperation;
|
||||
import org.hyperledger.besu.evm.operation.RetFOperation;
|
||||
import org.hyperledger.besu.evm.operation.ReturnContractOperation;
|
||||
import org.hyperledger.besu.evm.operation.ReturnDataCopyOperation;
|
||||
import org.hyperledger.besu.evm.operation.ReturnDataLoadOperation;
|
||||
import org.hyperledger.besu.evm.operation.ReturnDataSizeOperation;
|
||||
import org.hyperledger.besu.evm.operation.ReturnOperation;
|
||||
import org.hyperledger.besu.evm.operation.RevertOperation;
|
||||
@@ -117,6 +129,7 @@ import org.hyperledger.besu.evm.operation.SignExtendOperation;
|
||||
import org.hyperledger.besu.evm.operation.StaticCallOperation;
|
||||
import org.hyperledger.besu.evm.operation.StopOperation;
|
||||
import org.hyperledger.besu.evm.operation.SubOperation;
|
||||
import org.hyperledger.besu.evm.operation.SwapNOperation;
|
||||
import org.hyperledger.besu.evm.operation.SwapOperation;
|
||||
import org.hyperledger.besu.evm.operation.TLoadOperation;
|
||||
import org.hyperledger.besu.evm.operation.TStoreOperation;
|
||||
@@ -1015,9 +1028,108 @@ public class MainnetEVMs {
|
||||
final BigInteger chainID) {
|
||||
registerCancunOperations(registry, gasCalculator, chainID);
|
||||
|
||||
// EIP-3074 AUTH and AUTHCALL
|
||||
registry.put(new AuthOperation(gasCalculator));
|
||||
registry.put(new AuthCallOperation(gasCalculator));
|
||||
// TODO add EOF operations here once PragueEOF is collapsed into Prague
|
||||
}
|
||||
|
||||
/**
|
||||
* PragueEOF evm.
|
||||
*
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM pragueEOF(final EvmConfiguration evmConfiguration) {
|
||||
return pragueEOF(DEV_NET_CHAIN_ID, evmConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* PragueEOF evm.
|
||||
*
|
||||
* @param chainId the chain id
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM pragueEOF(final BigInteger chainId, final EvmConfiguration evmConfiguration) {
|
||||
return pragueEOF(new PragueEOFGasCalculator(), chainId, evmConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* PragueEOF evm.
|
||||
*
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainId the chain id
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM pragueEOF(
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainId,
|
||||
final EvmConfiguration evmConfiguration) {
|
||||
return new EVM(
|
||||
pragueEOFOperations(gasCalculator, chainId),
|
||||
gasCalculator,
|
||||
evmConfiguration,
|
||||
EvmSpecVersion.PRAGUE_EOF);
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation registry for PragueEOF's operations.
|
||||
*
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainId the chain id
|
||||
* @return the operation registry
|
||||
*/
|
||||
public static OperationRegistry pragueEOFOperations(
|
||||
final GasCalculator gasCalculator, final BigInteger chainId) {
|
||||
OperationRegistry operationRegistry = new OperationRegistry();
|
||||
registerPragueEOFOperations(operationRegistry, gasCalculator, chainId);
|
||||
return operationRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register PragueEOF's operations.
|
||||
*
|
||||
* @param registry the registry
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainID the chain id
|
||||
*/
|
||||
public static void registerPragueEOFOperations(
|
||||
final OperationRegistry registry,
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainID) {
|
||||
registerPragueOperations(registry, gasCalculator, chainID);
|
||||
|
||||
// EIP-663 Unlimited Swap and Dup
|
||||
registry.put(new DupNOperation(gasCalculator));
|
||||
registry.put(new SwapNOperation(gasCalculator));
|
||||
registry.put(new ExchangeOperation(gasCalculator));
|
||||
|
||||
// EIP-4200 relative jump
|
||||
registry.put(new RelativeJumpOperation(gasCalculator));
|
||||
registry.put(new RelativeJumpIfOperation(gasCalculator));
|
||||
registry.put(new RelativeJumpVectorOperation(gasCalculator));
|
||||
|
||||
// EIP-4750 EOF Code Sections
|
||||
registry.put(new CallFOperation(gasCalculator));
|
||||
registry.put(new RetFOperation(gasCalculator));
|
||||
|
||||
// EIP-6209 JUMPF Instruction
|
||||
registry.put(new JumpFOperation(gasCalculator));
|
||||
|
||||
// EIP-7069 Revamped EOF Call
|
||||
registry.put(new ExtCallOperation(gasCalculator));
|
||||
registry.put(new ExtDelegateCallOperation(gasCalculator));
|
||||
registry.put(new ExtStaticCallOperation(gasCalculator));
|
||||
registry.put(new ReturnDataLoadOperation(gasCalculator));
|
||||
|
||||
// EIP-7480 EOF Data Section Access
|
||||
registry.put(new DataLoadOperation(gasCalculator));
|
||||
registry.put(new DataLoadNOperation(gasCalculator));
|
||||
registry.put(new DataSizeOperation(gasCalculator));
|
||||
registry.put(new DataCopyOperation(gasCalculator));
|
||||
|
||||
// EIP-7620 EOF Create and Return Contract operation
|
||||
registry.put(new EOFCreateOperation(gasCalculator));
|
||||
registry.put(new ReturnContractOperation(gasCalculator));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1085,7 +1197,75 @@ public class MainnetEVMs {
|
||||
final OperationRegistry registry,
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainID) {
|
||||
registerPragueOperations(registry, gasCalculator, chainID);
|
||||
registerPragueEOFOperations(registry, gasCalculator, chainID);
|
||||
}
|
||||
|
||||
/**
|
||||
* Amsterdam evm.
|
||||
*
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM amsterdam(final EvmConfiguration evmConfiguration) {
|
||||
return amsterdam(DEV_NET_CHAIN_ID, evmConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Amsterdam evm.
|
||||
*
|
||||
* @param chainId the chain id
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM amsterdam(final BigInteger chainId, final EvmConfiguration evmConfiguration) {
|
||||
return amsterdam(new PragueGasCalculator(), chainId, evmConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Amsterdam evm.
|
||||
*
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainId the chain id
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM amsterdam(
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainId,
|
||||
final EvmConfiguration evmConfiguration) {
|
||||
return new EVM(
|
||||
amsterdamOperations(gasCalculator, chainId),
|
||||
gasCalculator,
|
||||
evmConfiguration,
|
||||
EvmSpecVersion.AMSTERDAM);
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation registry for amsterdam's operations.
|
||||
*
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainId the chain id
|
||||
* @return the operation registry
|
||||
*/
|
||||
public static OperationRegistry amsterdamOperations(
|
||||
final GasCalculator gasCalculator, final BigInteger chainId) {
|
||||
OperationRegistry operationRegistry = new OperationRegistry();
|
||||
registerAmsterdamOperations(operationRegistry, gasCalculator, chainId);
|
||||
return operationRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register amsterdam operations.
|
||||
*
|
||||
* @param registry the registry
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainID the chain id
|
||||
*/
|
||||
public static void registerAmsterdamOperations(
|
||||
final OperationRegistry registry,
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainID) {
|
||||
registerOsakaOperations(registry, gasCalculator, chainID);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1153,7 +1333,143 @@ public class MainnetEVMs {
|
||||
final OperationRegistry registry,
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainID) {
|
||||
registerOsakaOperations(registry, gasCalculator, chainID);
|
||||
registerAmsterdamOperations(registry, gasCalculator, chainID);
|
||||
}
|
||||
|
||||
/**
|
||||
* Polis evm.
|
||||
*
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM polis(final EvmConfiguration evmConfiguration) {
|
||||
return polis(DEV_NET_CHAIN_ID, evmConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Polis evm.
|
||||
*
|
||||
* @param chainId the chain id
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM polis(final BigInteger chainId, final EvmConfiguration evmConfiguration) {
|
||||
return polis(new PragueGasCalculator(), chainId, evmConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Polis evm.
|
||||
*
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainId the chain id
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM polis(
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainId,
|
||||
final EvmConfiguration evmConfiguration) {
|
||||
return new EVM(
|
||||
polisOperations(gasCalculator, chainId),
|
||||
gasCalculator,
|
||||
evmConfiguration,
|
||||
EvmSpecVersion.POLIS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation registry for Polis's operations.
|
||||
*
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainId the chain id
|
||||
* @return the operation registry
|
||||
*/
|
||||
public static OperationRegistry polisOperations(
|
||||
final GasCalculator gasCalculator, final BigInteger chainId) {
|
||||
OperationRegistry operationRegistry = new OperationRegistry();
|
||||
registerPolisOperations(operationRegistry, gasCalculator, chainId);
|
||||
return operationRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register polis operations.
|
||||
*
|
||||
* @param registry the registry
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainID the chain id
|
||||
*/
|
||||
public static void registerPolisOperations(
|
||||
final OperationRegistry registry,
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainID) {
|
||||
registerBogotaOperations(registry, gasCalculator, chainID);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bangkok evm.
|
||||
*
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM bangkok(final EvmConfiguration evmConfiguration) {
|
||||
return bangkok(DEV_NET_CHAIN_ID, evmConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bangkok evm.
|
||||
*
|
||||
* @param chainId the chain id
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM bangkok(final BigInteger chainId, final EvmConfiguration evmConfiguration) {
|
||||
return bangkok(new PragueGasCalculator(), chainId, evmConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bangkok evm.
|
||||
*
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainId the chain id
|
||||
* @param evmConfiguration the evm configuration
|
||||
* @return the evm
|
||||
*/
|
||||
public static EVM bangkok(
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainId,
|
||||
final EvmConfiguration evmConfiguration) {
|
||||
return new EVM(
|
||||
bangkokOperations(gasCalculator, chainId),
|
||||
gasCalculator,
|
||||
evmConfiguration,
|
||||
EvmSpecVersion.BANGKOK);
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation registry for bangkok's operations.
|
||||
*
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainId the chain id
|
||||
* @return the operation registry
|
||||
*/
|
||||
public static OperationRegistry bangkokOperations(
|
||||
final GasCalculator gasCalculator, final BigInteger chainId) {
|
||||
OperationRegistry operationRegistry = new OperationRegistry();
|
||||
registerBangkokOperations(operationRegistry, gasCalculator, chainId);
|
||||
return operationRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register bangkok operations.
|
||||
*
|
||||
* @param registry the registry
|
||||
* @param gasCalculator the gas calculator
|
||||
* @param chainID the chain id
|
||||
*/
|
||||
public static void registerBangkokOperations(
|
||||
final OperationRegistry registry,
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainID) {
|
||||
registerPolisOperations(registry, gasCalculator, chainID);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1222,13 +1538,6 @@ public class MainnetEVMs {
|
||||
final GasCalculator gasCalculator,
|
||||
final BigInteger chainID) {
|
||||
registerBogotaOperations(registry, gasCalculator, chainID);
|
||||
|
||||
// "big" EOF
|
||||
registry.put(new RelativeJumpOperation(gasCalculator));
|
||||
registry.put(new RelativeJumpIfOperation(gasCalculator));
|
||||
registry.put(new RelativeJumpVectorOperation(gasCalculator));
|
||||
registry.put(new CallFOperation(gasCalculator));
|
||||
registry.put(new RetFOperation(gasCalculator));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -14,8 +14,13 @@
|
||||
*/
|
||||
package org.hyperledger.besu.evm.code;
|
||||
|
||||
import static org.hyperledger.besu.evm.code.EOFLayout.EOFContainerMode.INITCODE;
|
||||
|
||||
import org.hyperledger.besu.evm.Code;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import com.google.errorprone.annotations.InlineMe;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
|
||||
/** The Code factory. */
|
||||
@@ -33,24 +38,57 @@ public final class CodeFactory {
|
||||
*
|
||||
* @param bytes the bytes
|
||||
* @param maxEofVersion the max eof version
|
||||
* @param inCreateOperation the in create operation
|
||||
* @return the code
|
||||
*/
|
||||
public static Code createCode(final Bytes bytes, final int maxEofVersion) {
|
||||
return createCode(bytes, maxEofVersion, false, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Code.
|
||||
*
|
||||
* @param bytes the bytes
|
||||
* @param maxEofVersion the max eof version
|
||||
* @param legacyCreation Allow some corner cases. `EF` and not `EF00` code
|
||||
* @deprecated use the no boolean or two boolean variant
|
||||
* @return the code
|
||||
*/
|
||||
@Deprecated(since = "24.4.1")
|
||||
@InlineMe(
|
||||
replacement = "CodeFactory.createCode(bytes, maxEofVersion, legacyCreation, false)",
|
||||
imports = "org.hyperledger.besu.evm.code.CodeFactory")
|
||||
public static Code createCode(
|
||||
final Bytes bytes, final int maxEofVersion, final boolean legacyCreation) {
|
||||
return createCode(bytes, maxEofVersion, legacyCreation, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Code.
|
||||
*
|
||||
* @param bytes the bytes
|
||||
* @param maxEofVersion the max eof version
|
||||
* @param legacyCreation Allow some corner cases. `EF` and not `EF00` code
|
||||
* @param createTransaction This is in a create transaction, allow dangling data
|
||||
* @return the code
|
||||
*/
|
||||
public static Code createCode(
|
||||
final Bytes bytes, final int maxEofVersion, final boolean inCreateOperation) {
|
||||
final Bytes bytes,
|
||||
final int maxEofVersion,
|
||||
final boolean legacyCreation,
|
||||
final boolean createTransaction) {
|
||||
if (maxEofVersion == 0) {
|
||||
return new CodeV0(bytes);
|
||||
} else if (maxEofVersion == 1) {
|
||||
int codeSize = bytes.size();
|
||||
if (codeSize > 0 && bytes.get(0) == EOF_LEAD_BYTE) {
|
||||
if (codeSize == 1 && !inCreateOperation) {
|
||||
if (codeSize == 1 && !legacyCreation) {
|
||||
return new CodeV0(bytes);
|
||||
}
|
||||
if (codeSize < 3) {
|
||||
return new CodeInvalid(bytes, "EOF Container too short");
|
||||
}
|
||||
if (bytes.get(1) != 0) {
|
||||
if (inCreateOperation) {
|
||||
if (legacyCreation) {
|
||||
// because some 0xef code made it to mainnet, this is only an error at contract create
|
||||
return new CodeInvalid(bytes, "Incorrect second byte");
|
||||
} else {
|
||||
@@ -62,22 +100,11 @@ public final class CodeFactory {
|
||||
return new CodeInvalid(bytes, "Unsupported EOF Version: " + version);
|
||||
}
|
||||
|
||||
final EOFLayout layout = EOFLayout.parseEOF(bytes);
|
||||
if (!layout.isValid()) {
|
||||
return new CodeInvalid(bytes, "Invalid EOF Layout: " + layout.getInvalidReason());
|
||||
final EOFLayout layout = EOFLayout.parseEOF(bytes, !createTransaction);
|
||||
if (createTransaction) {
|
||||
layout.containerMode().set(INITCODE);
|
||||
}
|
||||
|
||||
final String codeValidationError = CodeV1Validation.validateCode(layout);
|
||||
if (codeValidationError != null) {
|
||||
return new CodeInvalid(bytes, "EOF Code Invalid : " + codeValidationError);
|
||||
}
|
||||
|
||||
final String stackValidationError = CodeV1Validation.validateStack(layout);
|
||||
if (stackValidationError != null) {
|
||||
return new CodeInvalid(bytes, "EOF Code Invalid : " + stackValidationError);
|
||||
}
|
||||
|
||||
return new CodeV1(layout);
|
||||
return createCode(layout, createTransaction);
|
||||
} else {
|
||||
return new CodeV0(bytes);
|
||||
}
|
||||
@@ -85,4 +112,18 @@ public final class CodeFactory {
|
||||
return new CodeInvalid(bytes, "Unsupported max code version " + maxEofVersion);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
static Code createCode(final EOFLayout layout, final boolean createTransaction) {
|
||||
if (!layout.isValid()) {
|
||||
return new CodeInvalid(layout.container(), "Invalid EOF Layout: " + layout.invalidReason());
|
||||
}
|
||||
|
||||
final String validationError = CodeV1Validation.validate(layout);
|
||||
if (validationError != null) {
|
||||
return new CodeInvalid(layout.container(), "EOF Code Invalid : " + validationError);
|
||||
}
|
||||
|
||||
return new CodeV1(layout);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,14 +16,16 @@ package org.hyperledger.besu.evm.code;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.evm.Code;
|
||||
import org.hyperledger.besu.evm.internal.Words;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.google.common.base.Suppliers;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
|
||||
/**
|
||||
* For code versions where code can be deemed "invalid" this represents a cachable instance of
|
||||
* For code versions where code can be deemed "invalid" this represents a cacheable instance of
|
||||
* invalid code. Note that EXTCODE operations can still access invalid code.
|
||||
*/
|
||||
public class CodeInvalid implements Code {
|
||||
@@ -59,6 +61,11 @@ public class CodeInvalid implements Code {
|
||||
return codeBytes.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDataSize() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes getBytes() {
|
||||
return codeBytes;
|
||||
@@ -91,6 +98,41 @@ public class CodeInvalid implements Code {
|
||||
|
||||
@Override
|
||||
public int getEofVersion() {
|
||||
return -1;
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSubcontainerCount() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Code> getSubContainer(final int index, final Bytes auxData) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes getData(final int offset, final int length) {
|
||||
return Bytes.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readBigEndianI16(final int index) {
|
||||
return Words.readBigEndianI16(index, codeBytes.toArrayUnsafe());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readBigEndianU16(final int index) {
|
||||
return Words.readBigEndianU16(index, codeBytes.toArrayUnsafe());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readU8(final int index) {
|
||||
return codeBytes.toArrayUnsafe()[index] & 0xff;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String prettyPrint() {
|
||||
return codeBytes.toHexString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,6 +36,9 @@ public final class CodeSection {
|
||||
/** The byte offset from the beginning of the container that the section starts at */
|
||||
final int entryPoint;
|
||||
|
||||
/** Is this a returing code section (i.e. contains RETF or JUMPF into a returning section)? */
|
||||
final boolean returning;
|
||||
|
||||
/**
|
||||
* Instantiates a new Code section.
|
||||
*
|
||||
@@ -53,7 +56,13 @@ public final class CodeSection {
|
||||
final int entryPoint) {
|
||||
this.length = length;
|
||||
this.inputs = inputs;
|
||||
this.outputs = outputs;
|
||||
if (outputs == 0x80) {
|
||||
this.outputs = 0;
|
||||
returning = false;
|
||||
} else {
|
||||
this.outputs = outputs;
|
||||
returning = true;
|
||||
}
|
||||
this.maxStackHeight = maxStackHeight;
|
||||
this.entryPoint = entryPoint;
|
||||
}
|
||||
@@ -85,6 +94,15 @@ public final class CodeSection {
|
||||
return outputs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this code seciton have a RETF return anywhere?
|
||||
*
|
||||
* @return returning
|
||||
*/
|
||||
public boolean isReturning() {
|
||||
return returning;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets max stack height.
|
||||
*
|
||||
|
||||
@@ -16,8 +16,10 @@ package org.hyperledger.besu.evm.code;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.evm.Code;
|
||||
import org.hyperledger.besu.evm.internal.Words;
|
||||
import org.hyperledger.besu.evm.operation.JumpDestOperation;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.google.common.base.MoreObjects;
|
||||
@@ -57,15 +59,14 @@ public class CodeV0 implements Code {
|
||||
* Returns true if the object is equal to this; otherwise false.
|
||||
*
|
||||
* @param other The object to compare this with.
|
||||
* @return True if the object is equal to this; otherwise false.
|
||||
* @return True if the object is equal to this, otherwise false.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(final Object other) {
|
||||
if (other == null) return false;
|
||||
if (other == this) return true;
|
||||
if (!(other instanceof CodeV0)) return false;
|
||||
if (!(other instanceof CodeV0 that)) return false;
|
||||
|
||||
final CodeV0 that = (CodeV0) other;
|
||||
return this.bytes.equals(that.bytes);
|
||||
}
|
||||
|
||||
@@ -84,6 +85,11 @@ public class CodeV0 implements Code {
|
||||
return bytes.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDataSize() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes getBytes() {
|
||||
return bytes;
|
||||
@@ -137,6 +143,21 @@ public class CodeV0 implements Code {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSubcontainerCount() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Code> getSubContainer(final int index, final Bytes auxData) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes getData(final int offset, final int length) {
|
||||
return Bytes.EMPTY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate jump destination.
|
||||
*
|
||||
@@ -295,4 +316,24 @@ public class CodeV0 implements Code {
|
||||
}
|
||||
return bitmap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readBigEndianI16(final int index) {
|
||||
return Words.readBigEndianI16(index, bytes.toArrayUnsafe());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readBigEndianU16(final int index) {
|
||||
return Words.readBigEndianU16(index, bytes.toArrayUnsafe());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readU8(final int index) {
|
||||
return bytes.toArrayUnsafe()[index] & 0xff;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String prettyPrint() {
|
||||
return bytes.toHexString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,12 +18,17 @@ import static com.google.common.base.Preconditions.checkArgument;
|
||||
|
||||
import org.hyperledger.besu.datatypes.Hash;
|
||||
import org.hyperledger.besu.evm.Code;
|
||||
import org.hyperledger.besu.evm.internal.Words;
|
||||
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.google.common.base.Suppliers;
|
||||
import org.apache.tuweni.bytes.Bytes;
|
||||
import org.apache.tuweni.bytes.MutableBytes;
|
||||
|
||||
/** The CodeV1. */
|
||||
public class CodeV1 implements Code {
|
||||
@@ -34,16 +39,16 @@ public class CodeV1 implements Code {
|
||||
/**
|
||||
* Instantiates a new CodeV1.
|
||||
*
|
||||
* @param layout the layout
|
||||
* @param eofLayout the layout
|
||||
*/
|
||||
CodeV1(final EOFLayout layout) {
|
||||
this.eofLayout = layout;
|
||||
this.codeHash = Suppliers.memoize(() -> Hash.hash(eofLayout.getContainer()));
|
||||
CodeV1(final EOFLayout eofLayout) {
|
||||
this.eofLayout = eofLayout;
|
||||
this.codeHash = Suppliers.memoize(() -> Hash.hash(eofLayout.container()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSize() {
|
||||
return eofLayout.getContainer().size();
|
||||
return eofLayout.container().size();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -60,7 +65,7 @@ public class CodeV1 implements Code {
|
||||
|
||||
@Override
|
||||
public Bytes getBytes() {
|
||||
return eofLayout.getContainer();
|
||||
return eofLayout.container();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -80,7 +85,35 @@ public class CodeV1 implements Code {
|
||||
|
||||
@Override
|
||||
public int getEofVersion() {
|
||||
return eofLayout.getVersion();
|
||||
return eofLayout.version();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSubcontainerCount() {
|
||||
return eofLayout.getSubcontainerCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Code> getSubContainer(final int index, final Bytes auxData) {
|
||||
EOFLayout subcontainerLayout = eofLayout.getSubcontainer(index);
|
||||
if (auxData != null && !auxData.isEmpty()) {
|
||||
Bytes subcontainerWithAuxData = subcontainerLayout.writeContainer(auxData);
|
||||
if (subcontainerWithAuxData == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
subcontainerLayout = EOFLayout.parseEOF(subcontainerWithAuxData);
|
||||
} else {
|
||||
// if no auxdata is added we must validate data is not truncated separately
|
||||
if (subcontainerLayout.dataLength() != subcontainerLayout.data().size()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
Code subContainerCode = CodeFactory.createCode(subcontainerLayout, auxData == null);
|
||||
|
||||
return subContainerCode.isValid() && subContainerCode.getEofVersion() > 0
|
||||
? Optional.of(subContainerCode)
|
||||
: Optional.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -95,4 +128,56 @@ public class CodeV1 implements Code {
|
||||
public int hashCode() {
|
||||
return Objects.hash(codeHash, eofLayout);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bytes getData(final int offset, final int length) {
|
||||
Bytes data = eofLayout.data();
|
||||
int dataLen = data.size();
|
||||
if (offset > dataLen) {
|
||||
return Bytes.EMPTY;
|
||||
} else if ((offset + length) > dataLen) {
|
||||
byte[] result = new byte[length];
|
||||
MutableBytes mbytes = MutableBytes.wrap(result);
|
||||
data.slice(offset).copyTo(mbytes, 0);
|
||||
return Bytes.wrap(result);
|
||||
} else {
|
||||
return data.slice(offset, length);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDataSize() {
|
||||
return eofLayout.data().size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readBigEndianI16(final int index) {
|
||||
return Words.readBigEndianI16(index, eofLayout.container().toArrayUnsafe());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readBigEndianU16(final int index) {
|
||||
return Words.readBigEndianU16(index, eofLayout.container().toArrayUnsafe());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readU8(final int index) {
|
||||
return eofLayout.container().toArrayUnsafe()[index] & 0xff;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String prettyPrint() {
|
||||
StringWriter sw = new StringWriter();
|
||||
eofLayout.prettyPrint(new PrintWriter(sw, true), "", "");
|
||||
return sw.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* The EOFLayout object for the code
|
||||
*
|
||||
* @return the EOFLayout object for the parsed code
|
||||
*/
|
||||
public EOFLayout getEofLayout() {
|
||||
return eofLayout;
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user