Merge branch 'main' into zkbesu

# Conflicts:
#	build.gradle
This commit is contained in:
Fabio Di Fabio
2024-08-27 15:05:02 +02:00
65 changed files with 2062 additions and 268 deletions

View File

@@ -29,9 +29,10 @@ assignees: ''
- publishes the docker `latest` tag variants - publishes the docker `latest` tag variants
- [ ] Check binary SHAs are correct on the release page - [ ] Check binary SHAs are correct on the release page
- [ ] Check "Container Verify" GitHub workflow has run successfully - [ ] Check "Container Verify" GitHub workflow has run successfully
- [ ] Create besu-docs release - https://github.com/hyperledger/besu-docs/releases/new - [ ] Update the besu-docs version [update-version workflow](https://github.com/hyperledger/besu-docs/actions/workflows/update-version.yml)
- Copy release notes from besu - If the PR has not been automatically created, create the PR manually using the created branch `besu-version-<version>`
- If publishing the release in github doesn't automatically trigger this workflow, then manually run https://github.com/hyperledger/besu-docs/actions/workflows/update-version.yml - [ ] Create homebrew release using [update-version workflow](https://github.com/hyperledger/homebrew-besu/actions/workflows/update-version.yml)
- [ ] Create homebrew release - run GHA workflow directly https://github.com/hyperledger/homebrew-besu/actions/workflows/update-version.yml - If the PR has not been automatically created, create the PR manually using the created branch `update-<version>`
- Run commands `brew tap hyperledger/besu && brew install besu` on MacOSX and verify latest version has been installed
- [ ] Delete the burn-in nodes (unless required for further analysis eg performance) - [ ] Delete the burn-in nodes (unless required for further analysis eg performance)
- [ ] Social announcements - [ ] Social announcements

View File

@@ -1,5 +1,14 @@
# Changelog # Changelog
## [Unreleased]
### Fixed
- **DebugMetrics**: Fixed a `ClassCastException` occurring in `DebugMetrics` when handling nested metric structures. Previously, `Double` values within these structures were incorrectly cast to `Map` objects, leading to errors. This update allows for proper handling of both direct values and nested structures at the same level. Issue# [#7383](https://github.com/hyperledger/besu/pull/7383)
- `evmtool` was not respecting the `--genesis` setting, resulting in unexpected trace results. [#7433](https://github.com/hyperledger/besu/pull/7433)
### Tests
- Added a comprehensive test case to reproduce the bug and verify the fix for the `ClassCastException` in `DebugMetrics`. This ensures that complex, dynamically nested metric structures can be handled without errors.
## Next release ## Next release
### Upcoming Breaking Changes ### Upcoming Breaking Changes
@@ -11,12 +20,14 @@
- Add 'inbound' field to admin_peers JSON-RPC Call [#7461](https://github.com/hyperledger/besu/pull/7461) - Add 'inbound' field to admin_peers JSON-RPC Call [#7461](https://github.com/hyperledger/besu/pull/7461)
- Add pending block header to `TransactionEvaluationContext` plugin API [#7483](https://github.com/hyperledger/besu/pull/7483) - Add pending block header to `TransactionEvaluationContext` plugin API [#7483](https://github.com/hyperledger/besu/pull/7483)
- Add bootnode to holesky config [#7500](https://github.com/hyperledger/besu/pull/7500) - Add bootnode to holesky config [#7500](https://github.com/hyperledger/besu/pull/7500)
- Implement engine_getClientVersionV1
### Bug fixes ### Bug fixes
- Fix tracing in precompiled contracts when halting for out of gas [#7318](https://github.com/hyperledger/besu/issues/7318) - Fix tracing in precompiled contracts when halting for out of gas [#7318](https://github.com/hyperledger/besu/issues/7318)
- Correctly release txpool save and restore lock in case of exceptions [#7473](https://github.com/hyperledger/besu/pull/7473) - Correctly release txpool save and restore lock in case of exceptions [#7473](https://github.com/hyperledger/besu/pull/7473)
- Fix for `eth_gasPrice` could not retrieve block error [#7482](https://github.com/hyperledger/besu/pull/7482) - Fix for `eth_gasPrice` could not retrieve block error [#7482](https://github.com/hyperledger/besu/pull/7482)
## 24.8.0 ## 24.8.0
### Upcoming Breaking Changes ### Upcoming Breaking Changes
@@ -41,6 +52,7 @@
- Correct entrypoint in Docker evmtool [#7430](https://github.com/hyperledger/besu/pull/7430) - Correct entrypoint in Docker evmtool [#7430](https://github.com/hyperledger/besu/pull/7430)
- Fix protocol schedule check for devnets [#7429](https://github.com/hyperledger/besu/pull/7429) - Fix protocol schedule check for devnets [#7429](https://github.com/hyperledger/besu/pull/7429)
- Fix behaviour when starting in a pre-merge network [#7431](https://github.com/hyperledger/besu/pull/7431) - Fix behaviour when starting in a pre-merge network [#7431](https://github.com/hyperledger/besu/pull/7431)
- Fix Null pointer from DNS daemon [#7505](https://github.com/hyperledger/besu/issues/7505)
## 24.7.1 ## 24.7.1

View File

@@ -32,7 +32,9 @@ import org.hyperledger.besu.ethereum.core.MiningParameters;
import org.hyperledger.besu.ethereum.core.PrivacyParameters; import org.hyperledger.besu.ethereum.core.PrivacyParameters;
import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration; import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration; import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.pki.keystore.KeyStoreWrapper; import org.hyperledger.besu.pki.keystore.KeyStoreWrapper;
import org.hyperledger.besu.plugin.services.storage.DataStorageFormat;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode; import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.hyperledger.besu.tests.acceptance.dsl.node.Node; import org.hyperledger.besu.tests.acceptance.dsl.node.Node;
import org.hyperledger.besu.tests.acceptance.dsl.node.RunnableNode; import org.hyperledger.besu.tests.acceptance.dsl.node.RunnableNode;
@@ -476,7 +478,9 @@ public class BesuNodeFactory {
.build()); .build());
} }
public BesuNode createIbft2Node(final String name, final boolean fixedPort) throws IOException { public BesuNode createIbft2Node(
final String name, final boolean fixedPort, final DataStorageFormat storageFormat)
throws IOException {
JsonRpcConfiguration rpcConfig = node.createJsonRpcWithIbft2EnabledConfig(false); JsonRpcConfiguration rpcConfig = node.createJsonRpcWithIbft2EnabledConfig(false);
rpcConfig.addRpcApi("ADMIN,TXPOOL"); rpcConfig.addRpcApi("ADMIN,TXPOOL");
if (fixedPort) { if (fixedPort) {
@@ -484,6 +488,7 @@ public class BesuNodeFactory {
Math.abs(name.hashCode() % 60000) Math.abs(name.hashCode() % 60000)
+ 1024); // Generate a consistent port for p2p based on node name + 1024); // Generate a consistent port for p2p based on node name
} }
BesuNodeConfigurationBuilder builder = BesuNodeConfigurationBuilder builder =
new BesuNodeConfigurationBuilder() new BesuNodeConfigurationBuilder()
.name(name) .name(name)
@@ -491,6 +496,10 @@ public class BesuNodeFactory {
.jsonRpcConfiguration(rpcConfig) .jsonRpcConfiguration(rpcConfig)
.webSocketConfiguration(node.createWebSocketEnabledConfig()) .webSocketConfiguration(node.createWebSocketEnabledConfig())
.devMode(false) .devMode(false)
.dataStorageConfiguration(
storageFormat == DataStorageFormat.FOREST
? DataStorageConfiguration.DEFAULT_FOREST_CONFIG
: DataStorageConfiguration.DEFAULT_BONSAI_CONFIG)
.genesisConfigProvider(GenesisConfigurationFactory::createIbft2GenesisConfig); .genesisConfigProvider(GenesisConfigurationFactory::createIbft2GenesisConfig);
if (fixedPort) { if (fixedPort) {
builder.p2pPort( builder.p2pPort(
@@ -527,7 +536,9 @@ public class BesuNodeFactory {
return createQbftNodeWithTLS(name, KeyStoreWrapper.KEYSTORE_TYPE_PKCS11); return createQbftNodeWithTLS(name, KeyStoreWrapper.KEYSTORE_TYPE_PKCS11);
} }
public BesuNode createQbftNode(final String name, final boolean fixedPort) throws IOException { public BesuNode createQbftNode(
final String name, final boolean fixedPort, final DataStorageFormat storageFormat)
throws IOException {
JsonRpcConfiguration rpcConfig = node.createJsonRpcWithQbftEnabledConfig(false); JsonRpcConfiguration rpcConfig = node.createJsonRpcWithQbftEnabledConfig(false);
rpcConfig.addRpcApi("ADMIN,TXPOOL"); rpcConfig.addRpcApi("ADMIN,TXPOOL");
if (fixedPort) { if (fixedPort) {
@@ -543,6 +554,10 @@ public class BesuNodeFactory {
.jsonRpcConfiguration(rpcConfig) .jsonRpcConfiguration(rpcConfig)
.webSocketConfiguration(node.createWebSocketEnabledConfig()) .webSocketConfiguration(node.createWebSocketEnabledConfig())
.devMode(false) .devMode(false)
.dataStorageConfiguration(
storageFormat == DataStorageFormat.FOREST
? DataStorageConfiguration.DEFAULT_FOREST_CONFIG
: DataStorageConfiguration.DEFAULT_BONSAI_CONFIG)
.genesisConfigProvider(GenesisConfigurationFactory::createQbftGenesisConfig); .genesisConfigProvider(GenesisConfigurationFactory::createQbftGenesisConfig);
if (fixedPort) { if (fixedPort) {
builder.p2pPort( builder.p2pPort(

View File

@@ -14,6 +14,7 @@
*/ */
package org.hyperledger.besu.tests.acceptance.bft; package org.hyperledger.besu.tests.acceptance.bft;
import org.hyperledger.besu.plugin.services.storage.DataStorageFormat;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode; import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.BesuNodeFactory; import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.BesuNodeFactory;
@@ -38,7 +39,9 @@ public class BftAcceptanceTestParameterization {
@FunctionalInterface @FunctionalInterface
public interface NodeCreator { public interface NodeCreator {
BesuNode create(BesuNodeFactory factory, String name, boolean fixedPort) throws Exception; BesuNode create(
BesuNodeFactory factory, String name, boolean fixedPort, DataStorageFormat storageFormat)
throws Exception;
} }
@FunctionalInterface @FunctionalInterface
@@ -64,11 +67,15 @@ public class BftAcceptanceTestParameterization {
} }
public BesuNode createNode(BesuNodeFactory factory, String name) throws Exception { public BesuNode createNode(BesuNodeFactory factory, String name) throws Exception {
return creatorFn.create(factory, name, false); return creatorFn.create(factory, name, false, DataStorageFormat.FOREST);
} }
public BesuNode createNodeFixedPort(BesuNodeFactory factory, String name) throws Exception { public BesuNode createBonsaiNodeFixedPort(BesuNodeFactory factory, String name) throws Exception {
return creatorFn.create(factory, name, true); return creatorFn.create(factory, name, true, DataStorageFormat.BONSAI);
}
public BesuNode createForestNodeFixedPort(BesuNodeFactory factory, String name) throws Exception {
return creatorFn.create(factory, name, true, DataStorageFormat.FOREST);
} }
public BesuNode createNodeWithValidators( public BesuNode createNodeWithValidators(

View File

@@ -60,10 +60,11 @@ public class BftMiningSoakTest extends ParameterizedBftTestBase {
// in between certain steps. There should be no upper-limit to how long the test is run for // in between certain steps. There should be no upper-limit to how long the test is run for
assertThat(getTestDurationMins()).isGreaterThanOrEqualTo(MIN_TEST_TIME_MINS); assertThat(getTestDurationMins()).isGreaterThanOrEqualTo(MIN_TEST_TIME_MINS);
final BesuNode minerNode1 = nodeFactory.createNodeFixedPort(besu, "miner1"); // Create a mix of Bonsai and Forest DB nodes
final BesuNode minerNode2 = nodeFactory.createNodeFixedPort(besu, "miner2"); final BesuNode minerNode1 = nodeFactory.createBonsaiNodeFixedPort(besu, "miner1");
final BesuNode minerNode3 = nodeFactory.createNodeFixedPort(besu, "miner3"); final BesuNode minerNode2 = nodeFactory.createForestNodeFixedPort(besu, "miner2");
final BesuNode minerNode4 = nodeFactory.createNodeFixedPort(besu, "miner4"); final BesuNode minerNode3 = nodeFactory.createBonsaiNodeFixedPort(besu, "miner3");
final BesuNode minerNode4 = nodeFactory.createForestNodeFixedPort(besu, "miner4");
// Each step should be given a minimum of 3 minutes to complete successfully. If the time // Each step should be given a minimum of 3 minutes to complete successfully. If the time
// give to run the soak test results in a time-per-step lower than this then the time // give to run the soak test results in a time-per-step lower than this then the time

View File

@@ -17,6 +17,8 @@ package org.hyperledger.besu;
import org.hyperledger.besu.util.platform.PlatformDetector; import org.hyperledger.besu.util.platform.PlatformDetector;
import java.util.Optional; import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** /**
* Represent Besu information such as version, OS etc. Used with --version option and during Besu * Represent Besu information such as version, OS etc. Used with --version option and during Besu
@@ -24,9 +26,29 @@ import java.util.Optional;
*/ */
public final class BesuInfo { public final class BesuInfo {
private static final String CLIENT = "besu"; private static final String CLIENT = "besu";
private static final String VERSION = BesuInfo.class.getPackage().getImplementationVersion();
private static final String OS = PlatformDetector.getOS(); private static final String OS = PlatformDetector.getOS();
private static final String VM = PlatformDetector.getVM(); private static final String VM = PlatformDetector.getVM();
private static final String VERSION;
private static final String COMMIT;
static {
String projectVersion = BesuInfo.class.getPackage().getImplementationVersion();
if (projectVersion == null) {
// protect against unset project version (e.g. unit tests being run, etc)
VERSION = null;
COMMIT = null;
} else {
Pattern pattern =
Pattern.compile("(?<version>\\d+\\.\\d+\\.?\\d?-?\\w*)-(?<commit>[0-9a-fA-F]{8})");
Matcher matcher = pattern.matcher(projectVersion);
if (matcher.find()) {
VERSION = matcher.group("version");
COMMIT = matcher.group("commit");
} else {
throw new RuntimeException("Invalid project version: " + projectVersion);
}
}
}
private BesuInfo() {} private BesuInfo() {}
@@ -46,7 +68,7 @@ public final class BesuInfo {
* or "besu/v23.1.0/osx-aarch_64/corretto-java-19" * or "besu/v23.1.0/osx-aarch_64/corretto-java-19"
*/ */
public static String version() { public static String version() {
return String.format("%s/v%s/%s/%s", CLIENT, VERSION, OS, VM); return String.format("%s/v%s-%s/%s/%s", CLIENT, VERSION, COMMIT, OS, VM);
} }
/** /**
@@ -57,7 +79,18 @@ public final class BesuInfo {
*/ */
public static String nodeName(final Optional<String> maybeIdentity) { public static String nodeName(final Optional<String> maybeIdentity) {
return maybeIdentity return maybeIdentity
.map(identity -> String.format("%s/%s/v%s/%s/%s", CLIENT, identity, VERSION, OS, VM)) .map(
identity ->
String.format("%s/%s/v%s-%s/%s/%s", CLIENT, identity, VERSION, COMMIT, OS, VM))
.orElse(version()); .orElse(version());
} }
/**
* Generate the commit hash for this besu version
*
* @return the commit hash for this besu version
*/
public static String commit() {
return COMMIT;
}
} }

View File

@@ -1291,6 +1291,8 @@ public class RunnerBuilder {
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
BesuInfo.nodeName(identityString), BesuInfo.nodeName(identityString),
BesuInfo.shortVersion(),
BesuInfo.commit(),
ethNetworkConfig.networkId(), ethNetworkConfig.networkId(),
besuController.getGenesisConfigOptions(), besuController.getGenesisConfigOptions(),
network, network,

View File

@@ -74,17 +74,11 @@ public class SynchronizationServiceImpl implements SynchronizationService {
@Override @Override
public void fireNewUnverifiedForkchoiceEvent( public void fireNewUnverifiedForkchoiceEvent(
final Hash head, final Hash safeBlock, final Hash finalizedBlock) { final Hash head, final Hash safeBlock, final Hash finalizedBlock) {
final MergeContext mergeContext = protocolContext.getConsensusContext(MergeContext.class); protocolContext
if (mergeContext != null) { .safeConsensusContext(MergeContext.class)
mergeContext.fireNewUnverifiedForkchoiceEvent(head, safeBlock, finalizedBlock); .ifPresent(mc -> mc.fireNewUnverifiedForkchoiceEvent(head, safeBlock, finalizedBlock));
protocolContext.getBlockchain().setFinalized(finalizedBlock); protocolContext.getBlockchain().setFinalized(finalizedBlock);
protocolContext.getBlockchain().setSafeBlock(safeBlock); protocolContext.getBlockchain().setSafeBlock(safeBlock);
} else {
LOG.atWarn()
.setMessage(
"The merge context is unavailable, hence the fork choice event cannot be triggered")
.log();
}
} }
@Override @Override

View File

@@ -33,7 +33,8 @@ public final class BesuInfoTest {
*/ */
@Test @Test
public void versionStringIsEthstatsFriendly() { public void versionStringIsEthstatsFriendly() {
assertThat(BesuInfo.version()).matches("[^/]+/v(\\d+\\.\\d+\\.\\d+[^/]*|null)/[^/]+/[^/]+"); assertThat(BesuInfo.version())
.matches("[^/]+/v(\\d+\\.\\d+\\.\\d+[^/]*|null-null)/[^/]+/[^/]+");
} }
/** /**
@@ -45,7 +46,7 @@ public final class BesuInfoTest {
@Test @Test
public void noIdentityNodeNameIsEthstatsFriendly() { public void noIdentityNodeNameIsEthstatsFriendly() {
assertThat(BesuInfo.nodeName(Optional.empty())) assertThat(BesuInfo.nodeName(Optional.empty()))
.matches("[^/]+/v(\\d+\\.\\d+\\.\\d+[^/]*|null)/[^/]+/[^/]+"); .matches("[^/]+/v(\\d+\\.\\d+\\.\\d+[^/]*|null-null)/[^/]+/[^/]+");
} }
/** /**
@@ -58,6 +59,6 @@ public final class BesuInfoTest {
@Test @Test
public void userIdentityNodeNameIsEthstatsFriendly() { public void userIdentityNodeNameIsEthstatsFriendly() {
assertThat(BesuInfo.nodeName(Optional.of("TestUserIdentity"))) assertThat(BesuInfo.nodeName(Optional.of("TestUserIdentity")))
.matches("[^/]+/[^/]+/v(\\d+\\.\\d+\\.\\d+[^/]*|null)/[^/]+/[^/]+"); .matches("[^/]+/[^/]+/v(\\d+\\.\\d+\\.\\d+[^/]*|null-null)/[^/]+/[^/]+");
} }
} }

View File

@@ -32,6 +32,10 @@ plugins {
id 'maven-publish' id 'maven-publish'
} }
tasks.register('spotlessCheckFast') {
dependsOn subprojects.collect { it.tasks.withType(com.diffplug.gradle.spotless.SpotlessCheck) }
}
if (!JavaVersion.current().isCompatibleWith(JavaVersion.VERSION_21)) { if (!JavaVersion.current().isCompatibleWith(JavaVersion.VERSION_21)) {
throw new GradleException("Java 21 or later is required to build Besu.\n" + throw new GradleException("Java 21 or later is required to build Besu.\n" +
" Detected version ${JavaVersion.current()}") " Detected version ${JavaVersion.current()}")
@@ -145,6 +149,10 @@ allprojects {
url 'https://splunk.jfrog.io/splunk/ext-releases-local' url 'https://splunk.jfrog.io/splunk/ext-releases-local'
content { includeGroupByRegex('com\\.splunk\\..*') } content { includeGroupByRegex('com\\.splunk\\..*') }
} }
maven {
url 'https://gitlab.com/api/v4/projects/19871573/packages/maven'
content { includeGroupByRegex('com\\.gitlab\\.javafuzz(\\..*)?') }
}
mavenCentral() mavenCentral()
@@ -415,6 +423,12 @@ allprojects {
options.addStringOption('Xwerror', '-html5') options.addStringOption('Xwerror', '-html5')
options.encoding = 'UTF-8' options.encoding = 'UTF-8'
} }
plugins.withType(JavaPlugin) {
tasks.withType(JavaCompile) {
it.dependsOn(rootProject.tasks.named('spotlessCheckFast'))
}
}
} }
task deploy() {} task deploy() {}
@@ -438,7 +452,7 @@ task checkMavenCoordinateCollisions {
tasks.register('checkPluginAPIChanges', DefaultTask) {} tasks.register('checkPluginAPIChanges', DefaultTask) {}
checkPluginAPIChanges.dependsOn(':plugin-api:checkAPIChanges') checkPluginAPIChanges.dependsOn(':plugin-api:checkAPIChanges')
check.dependsOn('checkPluginAPIChanges', 'checkMavenCoordinateCollisions') check.dependsOn('checkPluginAPIChanges', 'checkMavenCoordinateCollisions', 'spotlessCheckFast')
subprojects { subprojects {
@@ -790,7 +804,7 @@ task distDocker {
dockerPlatform = "--platform ${project.getProperty('docker-platform')}" dockerPlatform = "--platform ${project.getProperty('docker-platform')}"
println "Building for platform ${project.getProperty('docker-platform')}" println "Building for platform ${project.getProperty('docker-platform')}"
} }
def gitDetails = getGitCommitDetails(7) def gitDetails = getGitCommitDetails()
executable shell executable shell
workingDir dockerBuildDir workingDir dockerBuildDir
args "-c", "docker buildx build ${dockerPlatform} --build-arg BUILD_DATE=${buildTime()} --build-arg VERSION=${dockerBuildVersion} --build-arg VCS_REF=${gitDetails.hash} -t ${image} --push ." args "-c", "docker buildx build ${dockerPlatform} --build-arg BUILD_DATE=${buildTime()} --build-arg VERSION=${dockerBuildVersion} --build-arg VCS_REF=${gitDetails.hash} -t ${image} --push ."
@@ -958,17 +972,13 @@ def buildTime() {
def calculateVersion() { def calculateVersion() {
// Regex pattern for basic calendar versioning, with provision to omit patch rev // Regex pattern for basic calendar versioning, with provision to omit patch rev
def calVerPattern = ~/\d+\.\d+(\.\d+)?(-.*)?/ def calVerPattern = ~/\d+\.\d+(\.\d+)?(-.*)?/
def gitDetails = getGitCommitDetails() // Adjust length as needed
if (project.hasProperty('version') && (project.version =~ calVerPattern)) { if (project.hasProperty('version') && (project.version =~ calVerPattern)) {
if (project.hasProperty('versionappendcommit') && project.versionappendcommit == "true") { println("Generating project version using supplied version: ${project.version}-${gitDetails.hash}")
def gitDetails = getGitCommitDetails(7) // Adjust length as needed return "${project.version}-${gitDetails.hash}"
return "${project.version}-${gitDetails.hash}"
}
return "${project.version}"
} else { } else {
// If no version is supplied or it doesn't match the semantic versioning, calculate from git // If no version is supplied or it doesn't match the semantic versioning, calculate from git
println("Generating project version as supplied is version not semver: ${project.version}") println("Generating project version using date (${gitDetails.date}-develop-${gitDetails.hash}), as supplied version is not semver: ${project.version}")
def gitDetails = getGitCommitDetails(7) // Adjust length as needed
return "${gitDetails.date}-develop-${gitDetails.hash}" return "${gitDetails.date}-develop-${gitDetails.hash}"
} }
} }

View File

@@ -14,6 +14,9 @@
*/ */
package org.hyperledger.besu.datatypes; package org.hyperledger.besu.datatypes;
import java.util.Comparator;
import java.util.stream.Stream;
/** Description and metadata for a hard fork */ /** Description and metadata for a hard fork */
public interface HardforkId { public interface HardforkId {
@@ -112,6 +115,19 @@ public interface HardforkId {
public String description() { public String description() {
return description; return description;
} }
/**
* The most recent finalized mainnet hardfork Besu supports. This will change across versions
* and will be updated after mainnet activations.
*
* @return the most recently activated mainnet spec.
*/
public static MainnetHardforkId mostRecent() {
return Stream.of(MainnetHardforkId.values())
.filter(MainnetHardforkId::finalized)
.max(Comparator.naturalOrder())
.orElseThrow();
}
} }
/** List of all Ethereum Classic hard forks. */ /** List of all Ethereum Classic hard forks. */

View File

@@ -21,4 +21,4 @@ Open the Zipkin UI by browsing to http://localhost:9411/
You will be able to see the detail of your traces. You will be able to see the detail of your traces.
References: References:
* [OpenTelemetry Environment Variable Specification](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/sdk-environment-variables.md) * [OpenTelemetry Environment Variable Specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/sdk-environment-variables.md)

View File

@@ -64,7 +64,9 @@ import io.vertx.core.VertxOptions;
/** Provides a facade to construct the JSON-RPC component. */ /** Provides a facade to construct the JSON-RPC component. */
public class JsonRpcTestMethodsFactory { public class JsonRpcTestMethodsFactory {
private static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; private static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
private static final String CLIENT_VERSION = "0.1.0";
private static final String CLIENT_COMMIT = "12345678";
private static final BigInteger NETWORK_ID = BigInteger.valueOf(123); private static final BigInteger NETWORK_ID = BigInteger.valueOf(123);
private final BlockchainImporter importer; private final BlockchainImporter importer;
@@ -175,7 +177,9 @@ public class JsonRpcTestMethodsFactory {
return new JsonRpcMethodsFactory() return new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
NETWORK_ID, NETWORK_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
peerDiscovery, peerDiscovery,

View File

@@ -63,6 +63,7 @@ public enum RpcMethod {
ENGINE_FORKCHOICE_UPDATED_V2("engine_forkchoiceUpdatedV2"), ENGINE_FORKCHOICE_UPDATED_V2("engine_forkchoiceUpdatedV2"),
ENGINE_FORKCHOICE_UPDATED_V3("engine_forkchoiceUpdatedV3"), ENGINE_FORKCHOICE_UPDATED_V3("engine_forkchoiceUpdatedV3"),
ENGINE_EXCHANGE_TRANSITION_CONFIGURATION("engine_exchangeTransitionConfigurationV1"), ENGINE_EXCHANGE_TRANSITION_CONFIGURATION("engine_exchangeTransitionConfigurationV1"),
ENGINE_GET_CLIENT_VERSION_V1("engine_getClientVersionV1"),
ENGINE_GET_PAYLOAD_BODIES_BY_HASH_V1("engine_getPayloadBodiesByHashV1"), ENGINE_GET_PAYLOAD_BODIES_BY_HASH_V1("engine_getPayloadBodiesByHashV1"),
ENGINE_GET_PAYLOAD_BODIES_BY_RANGE_V1("engine_getPayloadBodiesByRangeV1"), ENGINE_GET_PAYLOAD_BODIES_BY_RANGE_V1("engine_getPayloadBodiesByRangeV1"),
ENGINE_EXCHANGE_CAPABILITIES("engine_exchangeCapabilities"), ENGINE_EXCHANGE_CAPABILITIES("engine_exchangeCapabilities"),

View File

@@ -71,7 +71,26 @@ public class DebugMetrics implements JsonRpcMethod {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private Map<String, Object> getNextMapLevel( private Map<String, Object> getNextMapLevel(
final Map<String, Object> current, final String name) { final Map<String, Object> current, final String name) {
// Use compute to either return the existing map or create a new one
return (Map<String, Object>) return (Map<String, Object>)
current.computeIfAbsent(name, key -> new HashMap<String, Object>()); current.compute(
name,
(k, v) -> {
if (v instanceof Map) {
// If the value is already a Map, return it as is
return v;
} else {
// If the value is not a Map, create a new Map
Map<String, Object> newMap = new HashMap<>();
if (v != null) {
// If v is not null and not a Map, we store it as a leaf value
// If the original value was not null, store it under the "value" key
// This handles cases where a metric value (e.g., Double) was previously stored
// directly
newMap.put("value", v);
}
return newMap;
}
});
} }
} }

View File

@@ -0,0 +1,57 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine;
import org.hyperledger.besu.ethereum.ProtocolContext;
import org.hyperledger.besu.ethereum.api.jsonrpc.RpcMethod;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.ExecutionEngineJsonRpcMethod;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcSuccessResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.EngineGetClientVersionResultV1;
import io.vertx.core.Vertx;
public class EngineGetClientVersionV1 extends ExecutionEngineJsonRpcMethod {
private static final String ENGINE_CLIENT_CODE = "BU";
private static final String ENGINE_CLIENT_NAME = "Besu";
private final String clientVersion;
private final String commit;
public EngineGetClientVersionV1(
final Vertx vertx,
final ProtocolContext protocolContext,
final EngineCallListener engineCallListener,
final String clientVersion,
final String commit) {
super(vertx, protocolContext, engineCallListener);
this.clientVersion = clientVersion;
this.commit = commit;
}
@Override
public String getName() {
return RpcMethod.ENGINE_GET_CLIENT_VERSION_V1.getMethodName();
}
@Override
public JsonRpcResponse syncResponse(final JsonRpcRequestContext request) {
return new JsonRpcSuccessResponse(
request.getRequest().getId(),
new EngineGetClientVersionResultV1(
ENGINE_CLIENT_CODE, ENGINE_CLIENT_NAME, clientVersion, commit));
}
}

View File

@@ -0,0 +1,52 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.ethereum.api.jsonrpc.internal.results;
import com.fasterxml.jackson.annotation.JsonGetter;
public class EngineGetClientVersionResultV1 {
private final String code;
private final String name;
private final String version;
private final String commit;
public EngineGetClientVersionResultV1(
final String code, final String name, final String version, final String commit) {
this.code = code;
this.name = name;
this.version = version;
this.commit = commit;
}
@JsonGetter(value = "code")
public String getCode() {
return code;
}
@JsonGetter(value = "name")
public String getName() {
return name;
}
@JsonGetter(value = "version")
public String getVersion() {
return version;
}
@JsonGetter(value = "commit")
public String getCommit() {
return commit;
}
}

View File

@@ -23,6 +23,7 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineE
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineForkchoiceUpdatedV1; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineForkchoiceUpdatedV1;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineForkchoiceUpdatedV2; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineForkchoiceUpdatedV2;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineForkchoiceUpdatedV3; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineForkchoiceUpdatedV3;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetClientVersionV1;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadBodiesByHashV1; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadBodiesByHashV1;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadBodiesByRangeV1; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadBodiesByRangeV1;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadV1; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadV1;
@@ -57,13 +58,17 @@ public class ExecutionEngineJsonRpcMethods extends ApiGroupJsonRpcMethods {
private final ProtocolContext protocolContext; private final ProtocolContext protocolContext;
private final EthPeers ethPeers; private final EthPeers ethPeers;
private final Vertx consensusEngineServer; private final Vertx consensusEngineServer;
private final String clientVersion;
private final String commit;
ExecutionEngineJsonRpcMethods( ExecutionEngineJsonRpcMethods(
final MiningCoordinator miningCoordinator, final MiningCoordinator miningCoordinator,
final ProtocolSchedule protocolSchedule, final ProtocolSchedule protocolSchedule,
final ProtocolContext protocolContext, final ProtocolContext protocolContext,
final EthPeers ethPeers, final EthPeers ethPeers,
final Vertx consensusEngineServer) { final Vertx consensusEngineServer,
final String clientVersion,
final String commit) {
this.mergeCoordinator = this.mergeCoordinator =
Optional.ofNullable(miningCoordinator) Optional.ofNullable(miningCoordinator)
.filter(mc -> mc.isCompatibleWithEngineApi()) .filter(mc -> mc.isCompatibleWithEngineApi())
@@ -72,6 +77,8 @@ public class ExecutionEngineJsonRpcMethods extends ApiGroupJsonRpcMethods {
this.protocolContext = protocolContext; this.protocolContext = protocolContext;
this.ethPeers = ethPeers; this.ethPeers = ethPeers;
this.consensusEngineServer = consensusEngineServer; this.consensusEngineServer = consensusEngineServer;
this.clientVersion = clientVersion;
this.commit = commit;
} }
@Override @Override
@@ -147,7 +154,9 @@ public class ExecutionEngineJsonRpcMethods extends ApiGroupJsonRpcMethods {
new EngineExchangeCapabilities( new EngineExchangeCapabilities(
consensusEngineServer, protocolContext, engineQosTimer), consensusEngineServer, protocolContext, engineQosTimer),
new EnginePreparePayloadDebug( new EnginePreparePayloadDebug(
consensusEngineServer, protocolContext, engineQosTimer, mergeCoordinator.get()))); consensusEngineServer, protocolContext, engineQosTimer, mergeCoordinator.get()),
new EngineGetClientVersionV1(
consensusEngineServer, protocolContext, engineQosTimer, clientVersion, commit)));
if (protocolSchedule.anyMatch(p -> p.spec().getName().equalsIgnoreCase("cancun"))) { if (protocolSchedule.anyMatch(p -> p.spec().getName().equalsIgnoreCase("cancun"))) {
executionEngineApisSupported.add( executionEngineApisSupported.add(

View File

@@ -54,7 +54,9 @@ import io.vertx.core.Vertx;
public class JsonRpcMethodsFactory { public class JsonRpcMethodsFactory {
public Map<String, JsonRpcMethod> methods( public Map<String, JsonRpcMethod> methods(
final String clientNodeName,
final String clientVersion, final String clientVersion,
final String commit,
final BigInteger networkId, final BigInteger networkId,
final GenesisConfigOptions genesisConfigOptions, final GenesisConfigOptions genesisConfigOptions,
final P2PNetwork p2pNetwork, final P2PNetwork p2pNetwork,
@@ -89,7 +91,7 @@ public class JsonRpcMethodsFactory {
final List<JsonRpcMethods> availableApiGroups = final List<JsonRpcMethods> availableApiGroups =
List.of( List.of(
new AdminJsonRpcMethods( new AdminJsonRpcMethods(
clientVersion, clientNodeName,
networkId, networkId,
genesisConfigOptions, genesisConfigOptions,
p2pNetwork, p2pNetwork,
@@ -115,7 +117,9 @@ public class JsonRpcMethodsFactory {
protocolSchedule, protocolSchedule,
protocolContext, protocolContext,
ethPeers, ethPeers,
consensusEngineServer), consensusEngineServer,
clientVersion,
commit),
new EthJsonRpcMethods( new EthJsonRpcMethods(
blockchainQueries, blockchainQueries,
synchronizer, synchronizer,
@@ -141,7 +145,7 @@ public class JsonRpcMethodsFactory {
filterManager), filterManager),
new PrivxJsonRpcMethods( new PrivxJsonRpcMethods(
blockchainQueries, protocolSchedule, transactionPool, privacyParameters), blockchainQueries, protocolSchedule, transactionPool, privacyParameters),
new Web3JsonRpcMethods(clientVersion), new Web3JsonRpcMethods(clientNodeName),
new TraceJsonRpcMethods( new TraceJsonRpcMethods(
blockchainQueries, protocolSchedule, protocolContext, apiConfiguration), blockchainQueries, protocolSchedule, protocolContext, apiConfiguration),
new TxPoolJsonRpcMethods(transactionPool), new TxPoolJsonRpcMethods(transactionPool),

View File

@@ -75,7 +75,9 @@ public abstract class AbstractJsonRpcHttpServiceTest {
protected BlockchainSetupUtil blockchainSetupUtil; protected BlockchainSetupUtil blockchainSetupUtil;
protected static String CLIENT_VERSION = "TestClientVersion/0.1.0"; protected static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
protected static final String CLIENT_VERSION = "0.1.0";
protected static final String CLIENT_COMMIT = "12345678";
protected static final BigInteger NETWORK_ID = BigInteger.valueOf(123); protected static final BigInteger NETWORK_ID = BigInteger.valueOf(123);
protected static final Collection<String> JSON_RPC_APIS = protected static final Collection<String> JSON_RPC_APIS =
Arrays.asList( Arrays.asList(
@@ -168,7 +170,9 @@ public abstract class AbstractJsonRpcHttpServiceTest {
return new JsonRpcMethodsFactory() return new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
NETWORK_ID, NETWORK_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
peerDiscoveryMock, peerDiscoveryMock,

View File

@@ -58,13 +58,13 @@ public class AdminJsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
final List<EthPeer> peerList = new ArrayList<>(); final List<EthPeer> peerList = new ArrayList<>();
final PeerInfo info1 = final PeerInfo info1 =
new PeerInfo( new PeerInfo(
4, CLIENT_VERSION, caps, 30302, Bytes.fromHexString(String.format("%0128x", 1))); 4, CLIENT_NODE_NAME, caps, 30302, Bytes.fromHexString(String.format("%0128x", 1)));
final PeerInfo info2 = final PeerInfo info2 =
new PeerInfo( new PeerInfo(
4, CLIENT_VERSION, caps, 60302, Bytes.fromHexString(String.format("%0128x", 2))); 4, CLIENT_NODE_NAME, caps, 60302, Bytes.fromHexString(String.format("%0128x", 2)));
final PeerInfo info3 = final PeerInfo info3 =
new PeerInfo( new PeerInfo(
4, CLIENT_VERSION, caps, 60303, Bytes.fromHexString(String.format("%0128x", 3))); 4, CLIENT_NODE_NAME, caps, 60303, Bytes.fromHexString(String.format("%0128x", 3)));
final InetSocketAddress addr30301 = new InetSocketAddress("localhost", 30301); final InetSocketAddress addr30301 = new InetSocketAddress("localhost", 30301);
final InetSocketAddress addr30302 = new InetSocketAddress("localhost", 30302); final InetSocketAddress addr30302 = new InetSocketAddress("localhost", 30302);
final InetSocketAddress addr60301 = new InetSocketAddress("localhost", 60301); final InetSocketAddress addr60301 = new InetSocketAddress("localhost", 60301);

View File

@@ -79,7 +79,9 @@ public class JsonRpcHttpServiceHostAllowlistTest {
private static OkHttpClient client; private static OkHttpClient client;
private static String baseUrl; private static String baseUrl;
private static final MediaType JSON = MediaType.parse("application/json; charset=utf-8"); private static final MediaType JSON = MediaType.parse("application/json; charset=utf-8");
private static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; private static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
private static final String CLIENT_VERSION = "0.1.0";
private static final String CLIENT_COMMIT = "12345678";
private static final BigInteger CHAIN_ID = BigInteger.valueOf(123); private static final BigInteger CHAIN_ID = BigInteger.valueOf(123);
private final JsonRpcConfiguration jsonRpcConfig = createJsonRpcConfig(); private final JsonRpcConfiguration jsonRpcConfig = createJsonRpcConfig();
@@ -100,7 +102,9 @@ public class JsonRpcHttpServiceHostAllowlistTest {
rpcMethods = rpcMethods =
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
CHAIN_ID, CHAIN_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
peerDiscoveryMock, peerDiscoveryMock,

View File

@@ -100,7 +100,9 @@ public class JsonRpcHttpServiceLoginTest {
protected static OkHttpClient client; protected static OkHttpClient client;
protected static String baseUrl; protected static String baseUrl;
protected static final MediaType JSON = MediaType.parse("application/json; charset=utf-8"); protected static final MediaType JSON = MediaType.parse("application/json; charset=utf-8");
protected static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; protected static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
protected static final String CLIENT_VERSION = "0.1.0";
protected static final String CLIENT_COMMIT = "12345678";
protected static final BigInteger CHAIN_ID = BigInteger.valueOf(123); protected static final BigInteger CHAIN_ID = BigInteger.valueOf(123);
protected static P2PNetwork peerDiscoveryMock; protected static P2PNetwork peerDiscoveryMock;
protected static BlockchainQueries blockchainQueries; protected static BlockchainQueries blockchainQueries;
@@ -131,7 +133,9 @@ public class JsonRpcHttpServiceLoginTest {
rpcMethods = rpcMethods =
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
CHAIN_ID, CHAIN_ID,
genesisConfigOptions, genesisConfigOptions,
peerDiscoveryMock, peerDiscoveryMock,

View File

@@ -94,7 +94,9 @@ public class JsonRpcHttpServiceRpcApisTest {
private JsonRpcHttpService service; private JsonRpcHttpService service;
private static String baseUrl; private static String baseUrl;
private static final MediaType JSON = MediaType.parse("application/json; charset=utf-8"); private static final MediaType JSON = MediaType.parse("application/json; charset=utf-8");
private static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; private static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
private static final String CLIENT_VERSION = "0.1.0";
private static final String CLIENT_COMMIT = "12345678";
private static final BigInteger NETWORK_ID = BigInteger.valueOf(123); private static final BigInteger NETWORK_ID = BigInteger.valueOf(123);
private JsonRpcConfiguration configuration; private JsonRpcConfiguration configuration;
private static final List<String> netServices = private static final List<String> netServices =
@@ -202,7 +204,9 @@ public class JsonRpcHttpServiceRpcApisTest {
final Map<String, JsonRpcMethod> rpcMethods = final Map<String, JsonRpcMethod> rpcMethods =
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
NETWORK_ID, NETWORK_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
mock(P2PNetwork.class), mock(P2PNetwork.class),
@@ -310,7 +314,9 @@ public class JsonRpcHttpServiceRpcApisTest {
final Map<String, JsonRpcMethod> rpcMethods = final Map<String, JsonRpcMethod> rpcMethods =
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
NETWORK_ID, NETWORK_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
p2pNetwork, p2pNetwork,

View File

@@ -202,7 +202,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
// Check result // Check result
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1127,7 +1127,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
// Check result // Check result
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1143,7 +1143,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
final JsonObject json = new JsonObject(resp.body().string()); final JsonObject json = new JsonObject(resp.body().string());
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1175,7 +1175,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
testHelper.assertValidJsonRpcResult(json, null); testHelper.assertValidJsonRpcResult(json, null);
// Check result // Check result
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1197,7 +1197,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
// Check result // Check result
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1218,7 +1218,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
// Check result // Check result
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1242,7 +1242,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
// Check result // Check result
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1268,7 +1268,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
// Check result // Check result
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1289,7 +1289,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
// Check result // Check result
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1353,7 +1353,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
final JsonObject json = new JsonObject(resp.body().string()); final JsonObject json = new JsonObject(resp.body().string());
testHelper.assertValidJsonRpcResult(json, id); testHelper.assertValidJsonRpcResult(json, id);
final String result = json.getString("result"); final String result = json.getString("result");
assertThat(result).isEqualTo(CLIENT_VERSION); assertThat(result).isEqualTo(CLIENT_NODE_NAME);
} }
} }
@@ -1485,7 +1485,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
// Check result web3_clientVersion // Check result web3_clientVersion
final JsonObject jsonClientVersion = responses.get(clientVersionRequestId); final JsonObject jsonClientVersion = responses.get(clientVersionRequestId);
testHelper.assertValidJsonRpcResult(jsonClientVersion, clientVersionRequestId); testHelper.assertValidJsonRpcResult(jsonClientVersion, clientVersionRequestId);
assertThat(jsonClientVersion.getString("result")).isEqualTo(CLIENT_VERSION); assertThat(jsonClientVersion.getString("result")).isEqualTo(CLIENT_NODE_NAME);
// Check result unknown method // Check result unknown method
final JsonObject jsonError = responses.get(brokenRequestId); final JsonObject jsonError = responses.get(brokenRequestId);
@@ -1540,7 +1540,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
// Check result web3_clientVersion // Check result web3_clientVersion
final JsonObject jsonClientVersion = responses.get(clientVersionRequestId); final JsonObject jsonClientVersion = responses.get(clientVersionRequestId);
testHelper.assertValidJsonRpcResult(jsonClientVersion, clientVersionRequestId); testHelper.assertValidJsonRpcResult(jsonClientVersion, clientVersionRequestId);
assertThat(jsonClientVersion.getString("result")).isEqualTo(CLIENT_VERSION); assertThat(jsonClientVersion.getString("result")).isEqualTo(CLIENT_NODE_NAME);
// Check invalid request // Check invalid request
final JsonObject jsonError = responses.get(invalidId); final JsonObject jsonError = responses.get(invalidId);
@@ -1605,7 +1605,7 @@ public class JsonRpcHttpServiceTest extends JsonRpcHttpServiceTestBase {
// Check result web3_clientVersion // Check result web3_clientVersion
final JsonObject jsonClientVersion = responses.get(clientVersionRequestId); final JsonObject jsonClientVersion = responses.get(clientVersionRequestId);
testHelper.assertValidJsonRpcResult(jsonClientVersion, clientVersionRequestId); testHelper.assertValidJsonRpcResult(jsonClientVersion, clientVersionRequestId);
assertThat(jsonClientVersion.getString("result")).isEqualTo(CLIENT_VERSION); assertThat(jsonClientVersion.getString("result")).isEqualTo(CLIENT_NODE_NAME);
// Check result net_version // Check result net_version
final JsonObject jsonNetVersion = responses.get(netVersionRequestId); final JsonObject jsonNetVersion = responses.get(netVersionRequestId);

View File

@@ -78,7 +78,9 @@ public class JsonRpcHttpServiceTestBase {
protected static OkHttpClient client; protected static OkHttpClient client;
protected static String baseUrl; protected static String baseUrl;
protected static final MediaType JSON = MediaType.parse("application/json; charset=utf-8"); protected static final MediaType JSON = MediaType.parse("application/json; charset=utf-8");
protected static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; protected static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
protected static final String CLIENT_VERSION = "0.1.0";
protected static final String CLIENT_COMMIT = "12345678";
protected static final BigInteger CHAIN_ID = BigInteger.valueOf(123); protected static final BigInteger CHAIN_ID = BigInteger.valueOf(123);
protected static P2PNetwork peerDiscoveryMock; protected static P2PNetwork peerDiscoveryMock;
protected static EthPeers ethPeersMock; protected static EthPeers ethPeersMock;
@@ -108,7 +110,9 @@ public class JsonRpcHttpServiceTestBase {
rpcMethods = rpcMethods =
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
CHAIN_ID, CHAIN_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
peerDiscoveryMock, peerDiscoveryMock,

View File

@@ -85,7 +85,9 @@ public class JsonRpcHttpServiceTlsClientAuthTest {
protected static final Vertx vertx = Vertx.vertx(); protected static final Vertx vertx = Vertx.vertx();
private static final String JSON_HEADER = "application/json; charset=utf-8"; private static final String JSON_HEADER = "application/json; charset=utf-8";
private static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; private static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
private static final String CLIENT_VERSION = "0.1.0";
private static final String CLIENT_COMMIT = "12345678";
private static final BigInteger CHAIN_ID = BigInteger.valueOf(123); private static final BigInteger CHAIN_ID = BigInteger.valueOf(123);
private static final NatService natService = new NatService(Optional.empty()); private static final NatService natService = new NatService(Optional.empty());
@@ -114,7 +116,9 @@ public class JsonRpcHttpServiceTlsClientAuthTest {
rpcMethods = rpcMethods =
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
CHAIN_ID, CHAIN_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
peerDiscoveryMock, peerDiscoveryMock,

View File

@@ -75,7 +75,9 @@ class JsonRpcHttpServiceTlsMisconfigurationTest {
protected static final Vertx vertx = Vertx.vertx(); protected static final Vertx vertx = Vertx.vertx();
private static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; private static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
private static final String CLIENT_VERSION = "0.1.0";
private static final String CLIENT_COMMIT = "12345678";
private static final BigInteger CHAIN_ID = BigInteger.valueOf(123); private static final BigInteger CHAIN_ID = BigInteger.valueOf(123);
private static final NatService natService = new NatService(Optional.empty()); private static final NatService natService = new NatService(Optional.empty());
private final SelfSignedP12Certificate besuCertificate = SelfSignedP12Certificate.create(); private final SelfSignedP12Certificate besuCertificate = SelfSignedP12Certificate.create();
@@ -102,7 +104,9 @@ class JsonRpcHttpServiceTlsMisconfigurationTest {
rpcMethods = rpcMethods =
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
CHAIN_ID, CHAIN_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
peerDiscoveryMock, peerDiscoveryMock,

View File

@@ -81,7 +81,9 @@ public class JsonRpcHttpServiceTlsTest {
protected static final Vertx vertx = Vertx.vertx(); protected static final Vertx vertx = Vertx.vertx();
private static final String JSON_HEADER = "application/json; charset=utf-8"; private static final String JSON_HEADER = "application/json; charset=utf-8";
private static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; private static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
private static final String CLIENT_VERSION = "0.1.0";
private static final String CLIENT_COMMIT = "12345678";
private static final BigInteger CHAIN_ID = BigInteger.valueOf(123); private static final BigInteger CHAIN_ID = BigInteger.valueOf(123);
private static final NatService natService = new NatService(Optional.empty()); private static final NatService natService = new NatService(Optional.empty());
private JsonRpcHttpService service; private JsonRpcHttpService service;
@@ -103,7 +105,9 @@ public class JsonRpcHttpServiceTlsTest {
rpcMethods = rpcMethods =
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
CHAIN_ID, CHAIN_ID,
new StubGenesisConfigOptions(), new StubGenesisConfigOptions(),
peerDiscoveryMock, peerDiscoveryMock,

View File

@@ -16,6 +16,7 @@ package org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.hyperledger.besu.metrics.BesuMetricCategory.BLOCKCHAIN;
import static org.hyperledger.besu.metrics.BesuMetricCategory.PEERS; import static org.hyperledger.besu.metrics.BesuMetricCategory.PEERS;
import static org.hyperledger.besu.metrics.BesuMetricCategory.RPC; import static org.hyperledger.besu.metrics.BesuMetricCategory.RPC;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@@ -28,6 +29,7 @@ import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.metrics.Observation; import org.hyperledger.besu.metrics.Observation;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import java.util.stream.Stream; import java.util.stream.Stream;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
@@ -84,6 +86,36 @@ public class DebugMetricsTest {
ImmutableMap.of("label2B", "value3"))))); ImmutableMap.of("label2B", "value3")))));
} }
@Test
public void shouldHandleDoubleValuesInNestedStructureWithoutClassCastException() {
// Tests fix for issue# 7383: debug_metrics method error
when(metricsSystem.streamObservations())
.thenReturn(
Stream.of(
// This creates a double value for "a"
new Observation(BLOCKCHAIN, "nested_metric", 1.0, List.of("a")),
// This attempts to create a nested structure under "a", which was previously a
// double
new Observation(BLOCKCHAIN, "nested_metric", 2.0, asList("a", "b")),
// This adds another level of nesting
new Observation(BLOCKCHAIN, "nested_metric", 3.0, asList("a", "b", "c"))));
assertResponse(
ImmutableMap.of(
BLOCKCHAIN.getName(),
ImmutableMap.of(
"nested_metric",
ImmutableMap.of(
"a",
ImmutableMap.of(
"value",
1.0,
"b",
ImmutableMap.of(
"value", 2.0,
"c", 3.0))))));
}
private void assertResponse(final ImmutableMap<String, Object> expectedResponse) { private void assertResponse(final ImmutableMap<String, Object> expectedResponse) {
final JsonRpcSuccessResponse response = (JsonRpcSuccessResponse) method.response(REQUEST); final JsonRpcSuccessResponse response = (JsonRpcSuccessResponse) method.response(REQUEST);
assertThat(response.getResult()).isEqualTo(expectedResponse); assertThat(response.getResult()).isEqualTo(expectedResponse);

View File

@@ -0,0 +1,72 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine;
import static org.assertj.core.api.Assertions.assertThat;
import org.hyperledger.besu.ethereum.ProtocolContext;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequest;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcSuccessResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.EngineGetClientVersionResultV1;
import io.vertx.core.Vertx;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
class EngineGetClientVersionV1Test {
private static final String ENGINE_CLIENT_CODE = "BU";
private static final String ENGINE_CLIENT_NAME = "Besu";
private static final String CLIENT_VERSION = "v25.6.7-dev-abcdef12";
private static final String COMMIT = "abcdef12";
private EngineGetClientVersionV1 getClientVersion;
@BeforeEach
void before() {
getClientVersion =
new EngineGetClientVersionV1(
Mockito.mock(Vertx.class),
Mockito.mock(ProtocolContext.class),
Mockito.mock(EngineCallListener.class),
CLIENT_VERSION,
COMMIT);
}
@Test
void testGetName() {
assertThat(getClientVersion.getName()).isEqualTo("engine_getClientVersionV1");
}
@Test
void testSyncResponse() {
JsonRpcRequestContext request = new JsonRpcRequestContext(new JsonRpcRequest("v", "m", null));
JsonRpcResponse actualResult = getClientVersion.syncResponse(request);
assertThat(actualResult).isInstanceOf(JsonRpcSuccessResponse.class);
JsonRpcSuccessResponse successResponse = (JsonRpcSuccessResponse) actualResult;
assertThat(successResponse.getResult()).isInstanceOf(EngineGetClientVersionResultV1.class);
EngineGetClientVersionResultV1 actualEngineGetClientVersionResultV1 =
(EngineGetClientVersionResultV1) successResponse.getResult();
assertThat(actualEngineGetClientVersionResultV1.getName()).isEqualTo(ENGINE_CLIENT_NAME);
assertThat(actualEngineGetClientVersionResultV1.getCode()).isEqualTo(ENGINE_CLIENT_CODE);
assertThat(actualEngineGetClientVersionResultV1.getVersion()).isEqualTo(CLIENT_VERSION);
assertThat(actualEngineGetClientVersionResultV1.getCommit()).isEqualTo(COMMIT);
}
}

View File

@@ -115,7 +115,9 @@ public class WebSocketServiceLoginTest {
protected static OkHttpClient client; protected static OkHttpClient client;
protected static String baseUrl; protected static String baseUrl;
protected static final MediaType JSON = MediaType.parse("application/json; charset=utf-8"); protected static final MediaType JSON = MediaType.parse("application/json; charset=utf-8");
protected static final String CLIENT_VERSION = "TestClientVersion/0.1.0"; protected static final String CLIENT_NODE_NAME = "TestClientVersion/0.1.0";
protected static final String CLIENT_VERSION = "0.1.0";
protected static final String CLIENT_COMMIT = "12345678";
protected static final BigInteger CHAIN_ID = BigInteger.valueOf(123); protected static final BigInteger CHAIN_ID = BigInteger.valueOf(123);
protected static P2PNetwork peerDiscoveryMock; protected static P2PNetwork peerDiscoveryMock;
protected static BlockchainQueries blockchainQueries; protected static BlockchainQueries blockchainQueries;
@@ -167,7 +169,9 @@ public class WebSocketServiceLoginTest {
spy( spy(
new JsonRpcMethodsFactory() new JsonRpcMethodsFactory()
.methods( .methods(
CLIENT_NODE_NAME,
CLIENT_VERSION, CLIENT_VERSION,
CLIENT_COMMIT,
CHAIN_ID, CHAIN_ID,
genesisConfigOptions, genesisConfigOptions,
peerDiscoveryMock, peerDiscoveryMock,

View File

@@ -158,22 +158,23 @@ public class BlockHeader extends SealableBlockHeader
out.writeBytes(extraData); out.writeBytes(extraData);
out.writeBytes(mixHashOrPrevRandao); out.writeBytes(mixHashOrPrevRandao);
out.writeLong(nonce); out.writeLong(nonce);
if (baseFee != null) { do {
if (baseFee == null) break;
out.writeUInt256Scalar(baseFee); out.writeUInt256Scalar(baseFee);
}
if (withdrawalsRoot != null) { if (withdrawalsRoot == null) break;
out.writeBytes(withdrawalsRoot); out.writeBytes(withdrawalsRoot);
}
if (excessBlobGas != null && blobGasUsed != null) { if (excessBlobGas == null || blobGasUsed == null) break;
out.writeLongScalar(blobGasUsed); out.writeLongScalar(blobGasUsed);
out.writeUInt64Scalar(excessBlobGas); out.writeUInt64Scalar(excessBlobGas);
}
if (parentBeaconBlockRoot != null) { if (parentBeaconBlockRoot == null) break;
out.writeBytes(parentBeaconBlockRoot); out.writeBytes(parentBeaconBlockRoot);
}
if (requestsRoot != null) { if (requestsRoot == null) break;
out.writeBytes(requestsRoot); out.writeBytes(requestsRoot);
} } while (false);
out.endList(); out.endList();
} }

View File

@@ -36,8 +36,6 @@ import java.io.InputStreamReader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import com.google.common.base.Suppliers; import com.google.common.base.Suppliers;
import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes;
@@ -175,12 +173,8 @@ public class CodeValidateSubCommand implements Runnable {
((CodeV1) code).getEofLayout().containerMode().get())) { ((CodeV1) code).getEofLayout().containerMode().get())) {
return "err: code is valid initcode. Runtime code expected"; return "err: code is valid initcode. Runtime code expected";
} else { } else {
return "OK " return "OK %d/%d/%d"
+ IntStream.range(0, code.getCodeSectionCount()) .formatted(code.getCodeSectionCount(), code.getSubcontainerCount(), code.getDataSize());
.mapToObj(code::getCodeSection)
.map(cs -> code.getBytes().slice(cs.getEntryPoint(), cs.getLength()))
.map(Bytes::toUnprefixedHexString)
.collect(Collectors.joining(","));
} }
} }
} }

View File

@@ -478,7 +478,14 @@ public class EvmToolCommand implements Runnable {
.mixHash(Hash.ZERO) .mixHash(Hash.ZERO)
.nonce(0) .nonce(0)
.blockHeaderFunctions(new MainnetBlockHeaderFunctions()) .blockHeaderFunctions(new MainnetBlockHeaderFunctions())
.baseFee(component.getBlockchain().getChainHeadHeader().getBaseFee().orElse(null)) .baseFee(
component
.getBlockchain()
.getChainHeadHeader()
.getBaseFee()
.or(() -> genesisFileModule.providesGenesisConfigFile().getBaseFeePerGas())
.orElse(
protocolSpec.getFeeMarket().implementsBaseFee() ? Wei.of(0xa) : null))
.buildBlockHeader(); .buildBlockHeader();
Address contractAddress = Address contractAddress =
@@ -519,13 +526,12 @@ public class EvmToolCommand implements Runnable {
lastTime = stopwatch.elapsed().toNanos(); lastTime = stopwatch.elapsed().toNanos();
} }
if (lastLoop) { if (lastLoop) {
if (messageFrame.getExceptionalHaltReason().isPresent()) { messageFrame
out.println(messageFrame.getExceptionalHaltReason().get()); .getExceptionalHaltReason()
} .ifPresent(haltReason -> out.println(haltReason));
if (messageFrame.getRevertReason().isPresent()) { messageFrame
out.println( .getRevertReason()
new String(messageFrame.getRevertReason().get().toArrayUnsafe(), UTF_8)); .ifPresent(bytes -> out.println(new String(bytes.toArrayUnsafe(), UTF_8)));
}
} }
} }
} }
@@ -572,7 +578,7 @@ public class EvmToolCommand implements Runnable {
out.println("{"); out.println("{");
worldState worldState
.streamAccounts(Bytes32.ZERO, Integer.MAX_VALUE) .streamAccounts(Bytes32.ZERO, Integer.MAX_VALUE)
.sorted(Comparator.comparing(o -> o.getAddress().get().toHexString())) .sorted(Comparator.comparing(o -> o.getAddress().orElse(Address.ZERO).toHexString()))
.forEach( .forEach(
a -> { a -> {
var account = worldState.get(a.getAddress().get()); var account = worldState.get(a.getAddress().get());
@@ -585,7 +591,7 @@ public class EvmToolCommand implements Runnable {
.map( .map(
e -> e ->
Map.entry( Map.entry(
e.getKey().get(), e.getKey().orElse(UInt256.ZERO),
account.getStorageValue(UInt256.fromBytes(e.getKey().get())))) account.getStorageValue(UInt256.fromBytes(e.getKey().get()))))
.filter(e -> !e.getValue().isZero()) .filter(e -> !e.getValue().isZero())
.sorted(Map.Entry.comparingByKey()) .sorted(Map.Entry.comparingByKey())

View File

@@ -18,6 +18,7 @@ import org.hyperledger.besu.cli.config.EthNetworkConfig;
import org.hyperledger.besu.cli.config.NetworkName; import org.hyperledger.besu.cli.config.NetworkName;
import org.hyperledger.besu.config.GenesisConfigFile; import org.hyperledger.besu.config.GenesisConfigFile;
import org.hyperledger.besu.config.GenesisConfigOptions; import org.hyperledger.besu.config.GenesisConfigOptions;
import org.hyperledger.besu.datatypes.HardforkId.MainnetHardforkId;
import org.hyperledger.besu.ethereum.chain.GenesisState; import org.hyperledger.besu.ethereum.chain.GenesisState;
import org.hyperledger.besu.ethereum.core.Block; import org.hyperledger.besu.ethereum.core.Block;
import org.hyperledger.besu.ethereum.core.BlockHeaderFunctions; import org.hyperledger.besu.ethereum.core.BlockHeaderFunctions;
@@ -28,6 +29,7 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.Locale;
import java.util.Optional; import java.util.Optional;
import javax.inject.Named; import javax.inject.Named;
import javax.inject.Singleton; import javax.inject.Singleton;
@@ -116,7 +118,7 @@ public class GenesisFileModule {
final JsonObject config = new JsonObject(); final JsonObject config = new JsonObject();
genesis.put("config", config); genesis.put("config", config);
config.put("chainId", 1337); config.put("chainId", 1337);
config.put("londonBlock", 0); config.put(MainnetHardforkId.mostRecent().toString().toLowerCase(Locale.ROOT) + "Time", 0);
genesis.put("baseFeePerGas", "0x3b9aca00"); genesis.put("baseFeePerGas", "0x3b9aca00");
genesis.put("gasLimit", "0x2540be400"); genesis.put("gasLimit", "0x2540be400");
genesis.put("difficulty", "0x0"); genesis.put("difficulty", "0x0");

View File

@@ -27,7 +27,6 @@ import org.hyperledger.besu.ethereum.mainnet.MainnetProtocolSchedule;
import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule; import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule;
import org.hyperledger.besu.ethereum.mainnet.ProtocolScheduleBuilder; import org.hyperledger.besu.ethereum.mainnet.ProtocolScheduleBuilder;
import org.hyperledger.besu.ethereum.mainnet.ProtocolSpecAdapters; import org.hyperledger.besu.ethereum.mainnet.ProtocolSpecAdapters;
import org.hyperledger.besu.evm.EvmSpecVersion;
import org.hyperledger.besu.evm.internal.EvmConfiguration; import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem; import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
@@ -71,13 +70,12 @@ class MainnetGenesisFileModule extends GenesisFileModule {
} }
} }
var schedules = createSchedules(configOptions.getChainId().orElse(BigInteger.valueOf(1337))); if (fork.isPresent()) {
var schedule = var schedules = createSchedules(configOptions.getChainId().orElse(BigInteger.valueOf(1337)));
schedules.get( var schedule = schedules.get(fork.get().toLowerCase(Locale.getDefault()));
fork.orElse(EvmSpecVersion.defaultVersion().getName()) if (schedule != null) {
.toLowerCase(Locale.getDefault())); return schedule.get();
if (schedule != null) { }
return schedule.get();
} }
return MainnetProtocolSchedule.fromConfig( return MainnetProtocolSchedule.fromConfig(

View File

@@ -47,7 +47,7 @@ class CodeValidationSubCommandTest {
EvmToolCommand parentCommand = new EvmToolCommand(bais, new PrintWriter(baos, true, UTF_8)); EvmToolCommand parentCommand = new EvmToolCommand(bais, new PrintWriter(baos, true, UTF_8));
final CodeValidateSubCommand codeValidateSubCommand = new CodeValidateSubCommand(parentCommand); final CodeValidateSubCommand codeValidateSubCommand = new CodeValidateSubCommand(parentCommand);
codeValidateSubCommand.run(); codeValidateSubCommand.run();
assertThat(baos.toString(UTF_8)).contains("OK 00\n"); assertThat(baos.toString(UTF_8)).contains("OK 1/0/0\n");
} }
@Test @Test
@@ -57,7 +57,8 @@ class CodeValidationSubCommandTest {
EvmToolCommand parentCommand = new EvmToolCommand(bais, new PrintWriter(baos, true, UTF_8)); EvmToolCommand parentCommand = new EvmToolCommand(bais, new PrintWriter(baos, true, UTF_8));
final CodeValidateSubCommand codeValidateSubCommand = new CodeValidateSubCommand(parentCommand); final CodeValidateSubCommand codeValidateSubCommand = new CodeValidateSubCommand(parentCommand);
codeValidateSubCommand.run(); codeValidateSubCommand.run();
assertThat(baos.toString(UTF_8)).contains("err: layout - EOF header byte 1 incorrect\n"); assertThat(baos.toString(UTF_8))
.contains("err: layout - invalid_magic EOF header byte 1 incorrect\n");
} }
@Test @Test
@@ -70,9 +71,9 @@ class CodeValidationSubCommandTest {
assertThat(baos.toString(UTF_8)) assertThat(baos.toString(UTF_8))
.contains( .contains(
""" """
OK 00 OK 1/0/0
err: layout - EOF header byte 1 incorrect err: layout - invalid_magic EOF header byte 1 incorrect
OK 5f5ff3 OK 1/0/0
"""); """);
} }
@@ -85,7 +86,7 @@ class CodeValidationSubCommandTest {
final CommandLine cmd = new CommandLine(codeValidateSubCommand); final CommandLine cmd = new CommandLine(codeValidateSubCommand);
cmd.parseArgs(CODE_STOP_ONLY); cmd.parseArgs(CODE_STOP_ONLY);
codeValidateSubCommand.run(); codeValidateSubCommand.run();
assertThat(baos.toString(UTF_8)).contains("OK 00\n"); assertThat(baos.toString(UTF_8)).contains("OK 1/0/0\n");
} }
@Test @Test
@@ -97,7 +98,8 @@ class CodeValidationSubCommandTest {
final CommandLine cmd = new CommandLine(codeValidateSubCommand); final CommandLine cmd = new CommandLine(codeValidateSubCommand);
cmd.parseArgs(CODE_BAD_MAGIC); cmd.parseArgs(CODE_BAD_MAGIC);
codeValidateSubCommand.run(); codeValidateSubCommand.run();
assertThat(baos.toString(UTF_8)).contains("err: layout - EOF header byte 1 incorrect\n"); assertThat(baos.toString(UTF_8))
.contains("err: layout - invalid_magic EOF header byte 1 incorrect\n");
} }
@Test @Test
@@ -112,9 +114,9 @@ class CodeValidationSubCommandTest {
assertThat(baos.toString(UTF_8)) assertThat(baos.toString(UTF_8))
.contains( .contains(
""" """
OK 00 OK 1/0/0
err: layout - EOF header byte 1 incorrect err: layout - invalid_magic EOF header byte 1 incorrect
OK 5f5ff3 OK 1/0/0
"""); """);
} }
@@ -127,7 +129,7 @@ class CodeValidationSubCommandTest {
final CommandLine cmd = new CommandLine(codeValidateSubCommand); final CommandLine cmd = new CommandLine(codeValidateSubCommand);
cmd.parseArgs(CODE_RETURN_ONLY); cmd.parseArgs(CODE_RETURN_ONLY);
codeValidateSubCommand.run(); codeValidateSubCommand.run();
assertThat(baos.toString(UTF_8)).contains("OK 5f5ff3\n"); assertThat(baos.toString(UTF_8)).contains("OK 1/0/0\n");
} }
@Test @Test
@@ -139,7 +141,7 @@ class CodeValidationSubCommandTest {
final CommandLine cmd = new CommandLine(codeValidateSubCommand); final CommandLine cmd = new CommandLine(codeValidateSubCommand);
cmd.parseArgs(CODE_INTERIOR_COMMENTS); cmd.parseArgs(CODE_INTERIOR_COMMENTS);
codeValidateSubCommand.run(); codeValidateSubCommand.run();
assertThat(baos.toString(UTF_8)).contains("OK 59595959e300015000,f8e4\n"); assertThat(baos.toString(UTF_8)).contains("OK 2/0/0\n");
} }
@Test @Test
@@ -153,9 +155,9 @@ class CodeValidationSubCommandTest {
assertThat(baos.toString(UTF_8)) assertThat(baos.toString(UTF_8))
.isEqualTo( .isEqualTo(
""" """
OK 00 OK 1/0/0
err: layout - EOF header byte 1 incorrect err: layout - invalid_magic EOF header byte 1 incorrect
OK 5f5ff3 OK 1/0/0
"""); """);
} }
} }

View File

@@ -0,0 +1,7 @@
{
"cli": [
"code-validate"
],
"stdin": "0xef0001010004020001000b0300010014040004000080000436600060ff6000ec005000ef000101000402000100010400000000800000feda7ac0de",
"stdout": "OK 1/1/4\n"
}

View File

@@ -3,5 +3,5 @@
"code-validate" "code-validate"
], ],
"stdin": "ef00010100040200010001040000000080000000", "stdin": "ef00010100040200010001040000000080000000",
"stdout": "OK 00\n" "stdout": "OK 1/0/0\n"
} }

View File

@@ -4,5 +4,5 @@
"ef00010100040200010001040000000080000000ff" "ef00010100040200010001040000000080000000ff"
], ],
"stdin": "", "stdin": "",
"stdout": "EOF layout is invalid - Dangling data after end of all sections\n" "stdout": "EOF layout is invalid - invalid_section_bodies_size data after end of all sections\n"
} }

View File

@@ -71,7 +71,7 @@
} }
}, },
"stdout": [ "stdout": [
{"output":"","gasUsed":"0xd198","test":"create-eof","fork":"Prague","d":0,"g":0,"v":0,"postHash":"0x2a9c58298ba5d4ec86ca682b9fcc9ff67c3fc44dbd39f85a2f9b74bfe4e5178e","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":false,"error":"Invalid EOF Layout: Expected kind 1 but read kind 17"}, {"output":"","gasUsed":"0xd198","test":"create-eof","fork":"Prague","d":0,"g":0,"v":0,"postHash":"0x2a9c58298ba5d4ec86ca682b9fcc9ff67c3fc44dbd39f85a2f9b74bfe4e5178e","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":false,"error":"Invalid EOF Layout: unexpected_header_kind expected 1 actual 17"},
{"pc":0,"op":239,"gas":"0x794068","gasCost":"0x0","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"INVALID","error":"Bad instruction"}, {"pc":0,"op":239,"gas":"0x794068","gasCost":"0x0","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"INVALID","error":"Bad instruction"},
{"output":"","gasUsed":"0x7a1200","test":"create-eof","fork":"Cancun","d":0,"g":0,"v":0,"postHash":"0xaa80d89bc89f58da8de41d3894bd1a241896ff91f7a5964edaefb39e8e3a4a98","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":true,"error":"INVALID_OPERATION"} {"output":"","gasUsed":"0x7a1200","test":"create-eof","fork":"Cancun","d":0,"g":0,"v":0,"postHash":"0xaa80d89bc89f58da8de41d3894bd1a241896ff91f7a5964edaefb39e8e3a4a98","postLogsHash":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","pass":true,"error":"INVALID_OPERATION"}
] ]

View File

@@ -12,8 +12,8 @@
"stdin": "", "stdin": "",
"stdout": [ "stdout": [
{"pc":0,"section":0,"op":227,"immediate":"0x0002","gas":"0x2540be400","gasCost":"0x5","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"CALLF"}, {"pc":0,"section":0,"op":227,"immediate":"0x0002","gas":"0x2540be400","gasCost":"0x5","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"CALLF"},
{"pc":0,"section":2,"op":229,"immediate":"0x0002","gas":"0x2540be3fb","gasCost":"0x5","memSize":0,"stack":[],"depth":1,"fdepth":1,"refund":0,"opName":"JUMPF"}, {"pc":0,"section":2,"op":229,"immediate":"0x0002","gas":"0x2540be3fb","gasCost":"0x5","memSize":0,"stack":[],"depth":1,"functionDepth":1,"refund":0,"opName":"JUMPF"},
{"pc":0,"section":1,"op":228,"gas":"0x2540be3f6","gasCost":"0x3","memSize":0,"stack":[],"depth":1,"fdepth":1,"refund":0,"opName":"RETF"}, {"pc":0,"section":1,"op":228,"gas":"0x2540be3f6","gasCost":"0x3","memSize":0,"stack":[],"depth":1,"functionDepth":1,"refund":0,"opName":"RETF"},
{"pc":3,"section":0,"op":97,"immediate":"0x2015","gas":"0x2540be3f3","gasCost":"0x3","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"PUSH2"}, {"pc":3,"section":0,"op":97,"immediate":"0x2015","gas":"0x2540be3f3","gasCost":"0x3","memSize":0,"stack":[],"depth":1,"refund":0,"opName":"PUSH2"},
{"pc":6,"section":0,"op":96,"immediate":"0x01","gas":"0x2540be3f0","gasCost":"0x3","memSize":0,"stack":["0x2015"],"depth":1,"refund":0,"opName":"PUSH1"}, {"pc":6,"section":0,"op":96,"immediate":"0x01","gas":"0x2540be3f0","gasCost":"0x3","memSize":0,"stack":["0x2015"],"depth":1,"refund":0,"opName":"PUSH1"},
{"pc":8,"section":0,"op":85,"gas":"0x2540be3ed","gasCost":"0x5654","memSize":0,"stack":["0x2015","0x1"],"depth":1,"refund":0,"opName":"SSTORE"}, {"pc":8,"section":0,"op":85,"gas":"0x2540be3ed","gasCost":"0x5654","memSize":0,"stack":["0x2015","0x1"],"depth":1,"refund":0,"opName":"SSTORE"},

View File

@@ -243,7 +243,7 @@ public interface DNSEntry {
public ENRTreeLink(final String enrTreeLink) { public ENRTreeLink(final String enrTreeLink) {
final URI uri = URI.create(enrTreeLink); final URI uri = URI.create(enrTreeLink);
this.domainName = uri.getHost(); this.domainName = uri.getHost();
this.encodedPubKey = uri.getUserInfo(); this.encodedPubKey = uri.getUserInfo() == null ? "" : uri.getUserInfo();
this.pubKey = fromBase32(encodedPubKey); this.pubKey = fromBase32(encodedPubKey);
} }

View File

@@ -20,9 +20,11 @@ import java.nio.file.Path;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import com.google.common.base.Splitter;
import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes;
import org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec; import org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec;
@@ -31,9 +33,7 @@ import org.hyperledger.besu.ethereum.referencetests.ReferenceTestProtocolSchedul
import org.hyperledger.besu.evm.Code; import org.hyperledger.besu.evm.Code;
import org.hyperledger.besu.evm.EVM; import org.hyperledger.besu.evm.EVM;
import org.hyperledger.besu.evm.code.CodeInvalid; import org.hyperledger.besu.evm.code.CodeInvalid;
import org.hyperledger.besu.evm.code.CodeV1;
import org.hyperledger.besu.evm.code.EOFLayout; import org.hyperledger.besu.evm.code.EOFLayout;
import org.hyperledger.besu.evm.code.EOFLayout.EOFContainerMode;
import org.hyperledger.besu.testutil.JsonTestParameters; import org.hyperledger.besu.testutil.JsonTestParameters;
public class EOFReferenceTestTools { public class EOFReferenceTestTools {
@@ -124,6 +124,25 @@ public class EOFReferenceTestTools {
// hardwire in the magic byte transaction checks // hardwire in the magic byte transaction checks
if (evm.getMaxEOFVersion() < 1) { if (evm.getMaxEOFVersion() < 1) {
assertThat(expected.exception()).isEqualTo("EOF_InvalidCode"); assertThat(expected.exception()).isEqualTo("EOF_InvalidCode");
} else if (code.size() > evm.getEvmVersion().getMaxInitcodeSize()) {
// this check is in EOFCREATE and Transaction validator, but unit tests sniff it out.
assertThat(false)
.withFailMessage(
() ->
"No Expected exception, actual exception - container_size_above_limit "
+ code.size())
.isEqualTo(expected.result());
if (name.contains("eip7692")) {
// if the test is from EEST, validate the exception name.
assertThat("container_size_above_limit")
.withFailMessage(
() ->
"Expected exception: %s actual exception: %s %d"
.formatted(
expected.exception(), "container_size_above_limit ", code.size()))
.containsIgnoringCase(expected.exception().replace("EOFException.", ""));
}
} else { } else {
EOFLayout layout = EOFLayout.parseEOF(code); EOFLayout layout = EOFLayout.parseEOF(code);
@@ -134,39 +153,27 @@ public class EOFReferenceTestTools {
} else { } else {
parsedCode = evm.getCodeUncached(code); parsedCode = evm.getCodeUncached(code);
} }
if ("EOF_IncompatibleContainerKind".equals(expected.exception()) && parsedCode.isValid()) { if (expected.result()) {
EOFContainerMode expectedMode =
EOFContainerMode.valueOf(containerKind == null ? "RUNTIME" : containerKind);
EOFContainerMode containerMode =
((CodeV1) parsedCode).getEofLayout().containerMode().get();
EOFContainerMode actualMode =
containerMode == null ? EOFContainerMode.RUNTIME : containerMode;
assertThat(actualMode)
.withFailMessage("Code did not parse to valid containerKind of " + expectedMode)
.isNotEqualTo(expectedMode);
} else {
if (expected.result()) {
assertThat(parsedCode.isValid()) assertThat(parsedCode.isValid())
.withFailMessage( .withFailMessage(
() -> "Valid code failed with " + ((CodeInvalid) parsedCode).getInvalidReason()) () -> "Valid code failed with " + ((CodeInvalid) parsedCode).getInvalidReason())
.isTrue(); .isTrue();
} else { } else {
assertThat(parsedCode.isValid()) assertThat(parsedCode.isValid())
.withFailMessage("Invalid code expected " + expected.exception() + " but was valid") .withFailMessage("Invalid code expected " + expected.exception() + " but was valid")
.isFalse(); .isFalse();
if (name.contains("eip7692")) { if (name.contains("eip7692")) {
// if the test is from EEST, validate the exception name. // if the test is from EEST, validate the exception name.
assertThat(((CodeInvalid) parsedCode).getInvalidReason()) assertThat(((CodeInvalid) parsedCode).getInvalidReason())
.withFailMessage( .withFailMessage(
() -> () ->
"Expected exception :%s actual exception: %s" "Expected exception :%s actual exception: %s"
.formatted( .formatted(
expected.exception(), expected.exception(),
(parsedCode.isValid() (parsedCode.isValid()
? null ? null
: ((CodeInvalid) parsedCode).getInvalidReason()))) : ((CodeInvalid) parsedCode).getInvalidReason())))
.containsIgnoringCase(expected.exception().replace("EOFException.", "")); .containsIgnoringCase(expected.exception().replace("EOFException.", ""));
}
} }
} }
} else { } else {
@@ -178,6 +185,25 @@ public class EOFReferenceTestTools {
+ " actual exception - " + " actual exception - "
+ (layout.isValid() ? null : layout.invalidReason())) + (layout.isValid() ? null : layout.invalidReason()))
.isEqualTo(expected.result()); .isEqualTo(expected.result());
if (name.contains("eip7692")) {
// if the test is from EEST, validate the exception name.
boolean exceptionMatched = false;
for (String e : Splitter.on('|').split(expected.exception())) {
if (layout
.invalidReason()
.toLowerCase(Locale.ROOT)
.contains(e.replace("EOFException.", "").toLowerCase(Locale.ROOT))) {
exceptionMatched = true;
break;
}
}
assertThat(exceptionMatched)
.withFailMessage(
() ->
"Expected exception :%s actual exception: %s"
.formatted(expected.exception(), layout.invalidReason()))
.isTrue();
}
} }
} }
} }

View File

@@ -151,11 +151,11 @@ public class GeneralStateReferenceTestTools {
.blobGasPricePerGas(blockHeader.getExcessBlobGas().orElse(BlobGas.ZERO)); .blobGasPricePerGas(blockHeader.getExcessBlobGas().orElse(BlobGas.ZERO));
final TransactionProcessingResult result = final TransactionProcessingResult result =
processor.processTransaction( processor.processTransaction(
worldStateUpdater, worldStateUpdater,
blockHeader, blockHeader,
transaction, transaction,
blockHeader.getCoinbase(), blockHeader.getCoinbase(),
new CachingBlockHashLookup(blockHeader, blockchain), new CachingBlockHashLookup(blockHeader, blockchain),
false, false,
TransactionValidationParams.processingBlock(), TransactionValidationParams.processingBlock(),
blobGasPrice); blobGasPrice);

View File

@@ -131,10 +131,10 @@ public record EOFLayout(
private static String readKind(final ByteArrayInputStream inputStream, final int expectedKind) { private static String readKind(final ByteArrayInputStream inputStream, final int expectedKind) {
int kind = inputStream.read(); int kind = inputStream.read();
if (kind == -1) { if (kind == -1) {
return "Improper section headers"; return "missing_headers_terminator Improper section headers";
} }
if (kind != expectedKind) { if (kind != expectedKind) {
return "Expected kind " + expectedKind + " but read kind " + kind; return "unexpected_header_kind expected " + expectedKind + " actual " + kind;
} }
return null; return null;
} }
@@ -217,7 +217,10 @@ public record EOFLayout(
// This ReferenceEquality check is correct // This ReferenceEquality check is correct
if ((strictSize || result != parsedContainer) if ((strictSize || result != parsedContainer)
&& step.container.size() != parsedContainer.container.size()) { && step.container.size() != parsedContainer.container.size()) {
return invalidLayout(container, parsedContainer.version, "subcontainer size mismatch"); return invalidLayout(
container,
parsedContainer.version,
"invalid_section_bodies_size subcontainer size mismatch");
} }
if (step.index >= 0) { if (step.index >= 0) {
step.parentSubcontainers[step.index] = parsedContainer; step.parentSubcontainers[step.index] = parsedContainer;
@@ -233,18 +236,18 @@ public record EOFLayout(
new ByteArrayInputStream(step.container.toArrayUnsafe()); new ByteArrayInputStream(step.container.toArrayUnsafe());
if (inputStream.available() < 3) { if (inputStream.available() < 3) {
return invalidLayout(step.container, -1, "EOF Container too small"); return invalidLayout(step.container, -1, "invalid_magic EOF Container too small");
} }
if (inputStream.read() != 0xEF) { if (inputStream.read() != 0xEF) {
return invalidLayout(step.container, -1, "EOF header byte 0 incorrect"); return invalidLayout(step.container, -1, "invalid_magic EOF header byte 0 incorrect");
} }
if (inputStream.read() != 0x0) { if (inputStream.read() != 0x0) {
return invalidLayout(step.container, -1, "EOF header byte 1 incorrect"); return invalidLayout(step.container, -1, "invalid_magic EOF header byte 1 incorrect");
} }
final int version = inputStream.read(); final int version = inputStream.read();
if (version > MAX_SUPPORTED_VERSION || version < 1) { if (version > MAX_SUPPORTED_VERSION || version < 1) {
return invalidLayout(step.container, version, "Unsupported EOF Version " + version); return invalidLayout(step.container, version, "invalid_version " + version);
} }
String error = readKind(inputStream, SECTION_TYPES); String error = readKind(inputStream, SECTION_TYPES);
@@ -252,8 +255,11 @@ public record EOFLayout(
return invalidLayout(step.container, version, error); return invalidLayout(step.container, version, error);
} }
int typesLength = readUnsignedShort(inputStream); int typesLength = readUnsignedShort(inputStream);
if (typesLength <= 0 || typesLength % 4 != 0) { if (typesLength % 4 != 0) {
return invalidLayout(step.container, version, "Invalid Types section size"); return invalidLayout(
step.container,
version,
"invalid_type_section_size Invalid Types section size (mod 4 != 0)");
} }
error = readKind(inputStream, SECTION_CODE); error = readKind(inputStream, SECTION_CODE);
@@ -262,28 +268,29 @@ public record EOFLayout(
} }
int codeSectionCount = readUnsignedShort(inputStream); int codeSectionCount = readUnsignedShort(inputStream);
if (codeSectionCount <= 0) { if (codeSectionCount <= 0) {
return invalidLayout(step.container, version, "Invalid Code section count");
}
if (codeSectionCount * 4 != typesLength) {
return invalidLayout( return invalidLayout(
step.container, step.container, version, "incomplete_section_number Too few code sections");
version,
"Type section length incompatible with code section count - 0x"
+ Integer.toHexString(codeSectionCount)
+ " * 4 != 0x"
+ Integer.toHexString(typesLength));
} }
if (codeSectionCount > 1024) { if (codeSectionCount > 1024) {
return invalidLayout( return invalidLayout(
step.container, step.container,
version, version,
"Too many code sections - 0x" + Integer.toHexString(codeSectionCount)); "too_many_code_sections - 0x" + Integer.toHexString(codeSectionCount));
}
if (codeSectionCount * 4 != typesLength) {
return invalidLayout(
step.container,
version,
"invalid_section_bodies_size Type section - 0x"
+ Integer.toHexString(codeSectionCount)
+ " * 4 != 0x"
+ Integer.toHexString(typesLength));
} }
int[] codeSectionSizes = new int[codeSectionCount]; int[] codeSectionSizes = new int[codeSectionCount];
for (int i = 0; i < codeSectionCount; i++) { for (int i = 0; i < codeSectionCount; i++) {
int size = readUnsignedShort(inputStream); int size = readUnsignedShort(inputStream);
if (size <= 0) { if (size <= 0) {
return invalidLayout(step.container, version, "Invalid Code section size for section " + i); return invalidLayout(step.container, version, "zero_section_size code " + i);
} }
codeSectionSizes[i] = size; codeSectionSizes[i] = size;
} }
@@ -303,7 +310,7 @@ public record EOFLayout(
return invalidLayout( return invalidLayout(
step.container, step.container,
version, version,
"Too many container sections - 0x" + Integer.toHexString(containerSectionCount)); "too_many_containers sections - 0x" + Integer.toHexString(containerSectionCount));
} }
containerSectionSizes = new int[containerSectionCount]; containerSectionSizes = new int[containerSectionCount];
for (int i = 0; i < containerSectionCount; i++) { for (int i = 0; i < containerSectionCount; i++) {
@@ -325,7 +332,7 @@ public record EOFLayout(
} }
int dataSize = readUnsignedShort(inputStream); int dataSize = readUnsignedShort(inputStream);
if (dataSize < 0) { if (dataSize < 0) {
return invalidLayout(step.container, version, "Invalid Data section size"); return invalidLayout(step.container, version, "incomplete_data_header");
} }
error = readKind(inputStream, SECTION_TERMINATOR); error = readKind(inputStream, SECTION_TERMINATOR);
@@ -340,11 +347,12 @@ public record EOFLayout(
typeData[i][2] = readUnsignedShort(inputStream); typeData[i][2] = readUnsignedShort(inputStream);
} }
if (typeData[codeSectionCount - 1][2] == -1) { if (typeData[codeSectionCount - 1][2] == -1) {
return invalidLayout(step.container, version, "Incomplete type section"); return invalidLayout(
step.container, version, "invalid_section_bodies_size Incomplete type section");
} }
if (typeData[0][0] != 0 || (typeData[0][1] & 0x7f) != 0) { if (typeData[0][0] != 0 || (typeData[0][1] & 0x7f) != 0) {
return invalidLayout( return invalidLayout(
step.container, version, "Code section does not have zero inputs and outputs"); step.container, version, "invalid_first_section_type must be zero input non-returning");
} }
CodeSection[] codeSections = new CodeSection[codeSectionCount]; CodeSection[] codeSections = new CodeSection[codeSectionCount];
int pos = // calculate pos in stream... int pos = // calculate pos in stream...
@@ -364,25 +372,28 @@ public record EOFLayout(
for (int i = 0; i < codeSectionCount; i++) { for (int i = 0; i < codeSectionCount; i++) {
int codeSectionSize = codeSectionSizes[i]; int codeSectionSize = codeSectionSizes[i];
if (inputStream.skip(codeSectionSize) != codeSectionSize) { if (inputStream.skip(codeSectionSize) != codeSectionSize) {
return invalidLayout(step.container, version, "Incomplete code section " + i); return invalidLayout(
step.container, version, "invalid_section_bodies_size code section " + i);
} }
if (typeData[i][0] > 0x7f) { if (typeData[i][0] > 0x7f) {
return invalidLayout( return invalidLayout(
step.container, step.container,
version, version,
"Type data input stack too large - 0x" + Integer.toHexString(typeData[i][0])); "inputs_outputs_num_above_limit Type data input stack too large - 0x"
+ Integer.toHexString(typeData[i][0]));
} }
if (typeData[i][1] > 0x80) { if (typeData[i][1] > 0x80) {
return invalidLayout( return invalidLayout(
step.container, step.container,
version, version,
"Type data output stack too large - 0x" + Integer.toHexString(typeData[i][1])); "inputs_outputs_num_above_limit - 0x" + Integer.toHexString(typeData[i][1]));
} }
if (typeData[i][2] > 0x3ff) { if (typeData[i][2] > 0x3ff) {
return invalidLayout( return invalidLayout(
step.container, step.container,
version, version,
"Type data max stack too large - 0x" + Integer.toHexString(typeData[i][2])); "max_stack_height_above_limit Type data max stack too large - 0x"
+ Integer.toHexString(typeData[i][2]));
} }
codeSections[i] = codeSections[i] =
new CodeSection(codeSectionSize, typeData[i][0], typeData[i][1], typeData[i][2], pos); new CodeSection(codeSectionSize, typeData[i][0], typeData[i][1], typeData[i][2], pos);
@@ -390,8 +401,7 @@ public record EOFLayout(
return invalidLayout( return invalidLayout(
step.container, step.container,
version, version,
"Code section at zero expected non-returning flag, but had return stack of " "invalid_first_section_type want 0x80 (non-returning flag) has " + typeData[0][1]);
+ typeData[0][1]);
} }
pos += codeSectionSize; pos += codeSectionSize;
} }
@@ -400,7 +410,7 @@ public record EOFLayout(
for (int i = 0; i < containerSectionCount; i++) { for (int i = 0; i < containerSectionCount; i++) {
int subcontainerSize = containerSectionSizes[i]; int subcontainerSize = containerSectionSizes[i];
if (subcontainerSize != inputStream.skip(subcontainerSize)) { if (subcontainerSize != inputStream.skip(subcontainerSize)) {
return invalidLayout(step.container, version, "incomplete subcontainer"); return invalidLayout(step.container, version, "invalid_section_bodies_size");
} }
Bytes subcontainer = step.container.slice(pos, subcontainerSize); Bytes subcontainer = step.container.slice(pos, subcontainerSize);
pos += subcontainerSize; pos += subcontainerSize;
@@ -413,7 +423,8 @@ public record EOFLayout(
Bytes completeContainer; Bytes completeContainer;
if (inputStream.read() != -1) { if (inputStream.read() != -1) {
if (step.strictSize) { if (step.strictSize) {
return invalidLayout(step.container, version, "Dangling data after end of all sections"); return invalidLayout(
step.container, version, "invalid_section_bodies_size data after end of all sections");
} else { } else {
completeContainer = step.container.slice(0, pos + dataSize); completeContainer = step.container.slice(0, pos + dataSize);
} }
@@ -422,7 +433,9 @@ public record EOFLayout(
} }
if (step.strictSize && dataSize != data.size()) { if (step.strictSize && dataSize != data.size()) {
return invalidLayout( return invalidLayout(
step.container, version, "Truncated data section when a complete section was required"); step.container,
version,
"toplevel_container_truncated Truncated data section when a complete section was required");
} }
return new EOFLayout(completeContainer, version, codeSections, subContainers, dataSize, data); return new EOFLayout(completeContainer, version, codeSections, subContainers, dataSize, data);

View File

@@ -216,7 +216,7 @@ public class StandardJsonTracer implements OperationTracer {
} }
sb.append("\"depth\":").append(depth).append(","); sb.append("\"depth\":").append(depth).append(",");
if (subdepth >= 1) { if (subdepth >= 1) {
sb.append("\"fdepth\":").append(subdepth).append(","); sb.append("\"functionDepth\":").append(subdepth).append(",");
} }
sb.append("\"refund\":").append(messageFrame.getGasRefund()).append(","); sb.append("\"refund\":").append(messageFrame.getGasRefund()).append(",");
sb.append("\"opName\":\"").append(currentOp.getName()).append("\""); sb.append("\"opName\":\"").append(currentOp.getName()).append("\"");

View File

@@ -100,7 +100,7 @@ class CodeV1Test {
assertThat(validationError) assertThat(validationError)
.isEqualTo( .isEqualTo(
"Invalid EOF container - Code section at zero expected non-returning flag, but had return stack of 0"); "Invalid EOF container - invalid_first_section_type want 0x80 (non-returning flag) has 0");
} }
@ParameterizedTest @ParameterizedTest
@@ -751,7 +751,7 @@ class CodeV1Test {
return Stream.of( return Stream.of(
Arguments.of( Arguments.of(
"0 outputs at section 0", "0 outputs at section 0",
"EOF Layout invalid - Code section at zero expected non-returning flag, but had return stack of 0", "EOF Layout invalid - invalid_first_section_type want 0x80 (non-returning flag) has 0",
0, 0,
List.of(List.of("e4", 0, 0, 0), List.of("e4", 0, 0, 0))), List.of(List.of("e4", 0, 0, 0), List.of("e4", 0, 0, 0))),
Arguments.of( Arguments.of(
@@ -766,12 +766,12 @@ class CodeV1Test {
List.of(List.of("00", 0, 0x80, 0), List.of("e4", 1, 1, 1), List.of("e4", 0, 0, 0))), List.of(List.of("00", 0, 0x80, 0), List.of("e4", 1, 1, 1), List.of("e4", 0, 0, 0))),
Arguments.of( Arguments.of(
"more than 0 outputs section 0", "more than 0 outputs section 0",
"EOF Layout invalid - Code section at zero expected non-returning flag, but had return stack of 0", "EOF Layout invalid - invalid_first_section_type want 0x80 (non-returning flag) has 0",
0, 0,
List.of(List.of("44 50 e4", 0, 0, 1), List.of("4400", 0, 1, 1))), List.of(List.of("44 50 e4", 0, 0, 1), List.of("4400", 0, 1, 1))),
Arguments.of( Arguments.of(
"more than 0 outputs section 0", "more than 0 outputs section 0",
"EOF Layout invalid - Code section at zero expected non-returning flag, but had return stack of 0", "EOF Layout invalid - invalid_first_section_type want 0x80 (non-returning flag) has 0",
1, 1,
List.of(List.of("00", 0, 0, 0), List.of("44 e4", 0, 1, 1))), List.of(List.of("00", 0, 0, 0), List.of("44 e4", 0, 1, 1))),
Arguments.of( Arguments.of(

View File

@@ -29,162 +29,187 @@ public class EOFLayoutTest {
public static Collection<Object[]> containersWithFormatErrors() { public static Collection<Object[]> containersWithFormatErrors() {
return Arrays.asList( return Arrays.asList(
new Object[][] { new Object[][] {
{"EF", "No magic", "EOF Container too small", -1}, {"EF", "No magic", "invalid_magic EOF Container too small", -1},
{"FFFFFF", "Wrong magic", "EOF header byte 0 incorrect", -1}, {"FFFFFF", "Wrong magic", "invalid_magic EOF header byte 0 incorrect", -1},
{"EFFF01010002020004006000AABBCCDD", "Invalid magic", "EOF header byte 1 incorrect", -1},
{"EF00", "No version", "EOF Container too small", -1},
{"EF0000010002020004006000AABBCCDD", "Invalid version", "Unsupported EOF Version 0", 0},
{"EF0002010002020004006000AABBCCDD", "Invalid version", "Unsupported EOF Version 2", 2},
{ {
"EF00FF010002020004006000AABBCCDD", "EFFF01010002020004006000AABBCCDD",
"Invalid version", "Invalid magic",
"Unsupported EOF Version 255", "invalid_magic EOF header byte 1 incorrect",
255 -1
}, },
{"EF0001", "No header", "Improper section headers", 1}, {"EF00", "No version", "invalid_magic EOF Container too small", -1},
{"EF0001 00", "No code section", "Expected kind 1 but read kind 0", 1}, {"EF0000010002020004006000AABBCCDD", "Invalid version", "invalid_version 0", 0},
{"EF0001 01", "No code section size", "Invalid Types section size", 1}, {"EF0002010002020004006000AABBCCDD", "Invalid version", "invalid_version 2", 2},
{"EF0001 0100", "Code section size incomplete", "Invalid Types section size", 1}, {"EF00FF010002020004006000AABBCCDD", "Invalid version", "invalid_version 255", 255},
{"EF0001 010004", "No section terminator", "Improper section headers", 1}, {"EF0001", "No header", "missing_headers_terminator Improper section headers", 1},
{"EF0001 010004 00", "No code section contents", "Expected kind 2 but read kind 0", 1}, {"EF0001 00", "No code section", "unexpected_header_kind expected 1 actual 0", 1},
{"EF0001 010004 02", "No code section count", "Invalid Code section count", 1},
{"EF0001 010004 0200", "Short code section count", "Invalid Code section count", 1},
{ {
"EF0001 010004 020001", "EF0001 01",
"No code section size", "No code section size",
"Invalid Code section size for section 0", "invalid_type_section_size Invalid Types section size (mod 4 != 0)",
1 1
}, },
{ {
"EF0001 010004 02000100", "EF0001 0100",
"Short code section size", "Code section size incomplete",
"Invalid Code section size for section 0", "invalid_type_section_size Invalid Types section size (mod 4 != 0)",
1 1
}, },
{
"EF0001 010004",
"No section terminator",
"missing_headers_terminator Improper section headers",
1
},
{
"EF0001 010004 00",
"No code section contents",
"unexpected_header_kind expected 2 actual 0",
1
},
{
"EF0001 010004 02",
"No code section count",
"incomplete_section_number Too few code sections",
1
},
{
"EF0001 010004 0200",
"Short code section count",
"incomplete_section_number Too few code sections",
1
},
{"EF0001 010004 020001", "No code section size", "zero_section_size code 0", 1},
{"EF0001 010004 02000100", "Short code section size", "zero_section_size code 0", 1},
{ {
"EF0001 010008 0200020001", "EF0001 010008 0200020001",
"No code section size multiple codes", "No code section size multiple codes",
"Invalid Code section size for section 1", "zero_section_size code 1",
1 1
}, },
{ {
"EF0001 010008 020002000100", "EF0001 010008 020002000100",
"No code section size multiple codes", "No code section size multiple codes",
"Invalid Code section size for section 1", "zero_section_size code 1",
1 1
}, },
{"EF0001 010004 0200010001 04", "No data section size", "Invalid Data section size", 1}, {"EF0001 010004 0200010001 04", "No data section size", "incomplete_data_header", 1},
{"EF0001 010004 0200010001 0400", "Short data section size", "incomplete_data_header", 1},
{ {
"EF0001 010004 0200010001 0400", "EF0001 010004 0200010001 040000",
"Short data section size", "No Terminator",
"Invalid Data section size", "missing_headers_terminator Improper section headers",
1
},
{
"EF0001 010004 0200010002 040000 00",
"No type section",
"invalid_section_bodies_size Incomplete type section",
1 1
}, },
{"EF0001 010004 0200010001 040000", "No Terminator", "Improper section headers", 1},
{"EF0001 010004 0200010002 040000 00", "No type section", "Incomplete type section", 1},
{ {
"EF0001 010004 0200010002 040001 040001 00 DA DA", "EF0001 010004 0200010002 040001 040001 00 DA DA",
"Duplicate data sections", "Duplicate data sections",
"Expected kind 0 but read kind 4", "unexpected_header_kind expected 0 actual 4",
1 1
}, },
{ {
"EF0001 010004 0200010002 040000 00 00", "EF0001 010004 0200010002 040000 00 00",
"Incomplete type section", "Incomplete type section",
"Incomplete type section", "invalid_section_bodies_size Incomplete type section",
1 1
}, },
{ {
"EF0001 010008 02000200020002 040000 00 00000000FE", "EF0001 010008 02000200020002 040000 00 00000000FE",
"Incomplete type section", "Incomplete type section",
"Incomplete type section", "invalid_section_bodies_size Incomplete type section",
1 1
}, },
{ {
"EF0001 010008 0200010001 040000 00 00000000 FE ", "EF0001 010008 0200010001 040000 00 00000000 FE ",
"Incorrect type section size", "Incorrect type section size",
"Type section length incompatible with code section count - 0x1 * 4 != 0x8", "invalid_section_bodies_size Type section - 0x1 * 4 != 0x8",
1 1
}, },
{ {
"EF0001 010008 02000200010001 040000 00 0100000000000000 FE FE", "EF0001 010008 02000200010001 040000 00 0100000000000000 FE FE",
"Incorrect section zero type input", "Incorrect section zero type input",
"Code section does not have zero inputs and outputs", "invalid_first_section_type must be zero input non-returning",
1 1
}, },
{ {
"EF0001 010008 02000200010001 040000 00 0001000000000000 FE FE", "EF0001 010008 02000200010001 040000 00 0001000000000000 FE FE",
"Incorrect section zero type output", "Incorrect section zero type output",
"Code section does not have zero inputs and outputs", "invalid_first_section_type must be zero input non-returning",
1 1
}, },
{ {
"EF0001 010004 0200010002 040000 00 00000000 ", "EF0001 010004 0200010002 040000 00 00000000 ",
"Incomplete code section", "Incomplete code section",
"Incomplete code section 0", "invalid_section_bodies_size code section 0",
1 1
}, },
{ {
"EF0001 010004 0200010002 040000 00 00000000 FE", "EF0001 010004 0200010002 040000 00 00000000 FE",
"Incomplete code section", "Incomplete code section",
"Incomplete code section 0", "invalid_section_bodies_size code section 0",
1 1
}, },
{ {
"EF0001 010008 02000200020002 040000 00 00800000 00000000 FEFE ", "EF0001 010008 02000200020002 040000 00 00800000 00000000 FEFE ",
"No code section multiple", "No code section multiple",
"Incomplete code section 1", "invalid_section_bodies_size code section 1",
1 1
}, },
{ {
"EF0001 010008 02000200020002 040000 00 00800000 00000000 FEFE FE", "EF0001 010008 02000200020002 040000 00 00800000 00000000 FEFE FE",
"Incomplete code section multiple", "Incomplete code section multiple",
"Incomplete code section 1", "invalid_section_bodies_size code section 1",
1 1
}, },
{ {
"EF0001 010004 0200010001 040003 00 00800000 FE DEADBEEF", "EF0001 010004 0200010001 040003 00 00800000 FE DEADBEEF",
"Excess data section", "Excess data section",
"Dangling data after end of all sections", "invalid_section_bodies_size data after end of all sections",
1 1
}, },
{ {
"EF0001 0200010001 040001 00 FE DA", "EF0001 0200010001 040001 00 FE DA",
"type section missing", "type section missing",
"Expected kind 1 but read kind 2", "unexpected_header_kind expected 1 actual 2",
1 1
}, },
{ {
"EF0001 010004 040001 00 00000000 DA", "EF0001 010004 040001 00 00000000 DA",
"code section missing", "code section missing",
"Expected kind 2 but read kind 4", "unexpected_header_kind expected 2 actual 4",
1 1
}, },
{ {
"EF0001 010004 0200010001 00 00000000 FE", "EF0001 010004 0200010001 00 00000000 FE",
"data section missing", "data section missing",
"Expected kind 4 but read kind 0", "unexpected_header_kind expected 4 actual 0",
1 1
}, },
{ {
"EF0001 040001 00 DA", "EF0001 040001 00 DA",
"type and code section missing", "type and code section missing",
"Expected kind 1 but read kind 4", "unexpected_header_kind expected 1 actual 4",
1 1
}, },
{ {
"EF0001 0200010001 00 FE", "EF0001 0200010001 00 FE",
"type and data section missing", "type and data section missing",
"Expected kind 1 but read kind 2", "unexpected_header_kind expected 1 actual 2",
1 1
}, },
{ {
"EF0001 010004 00 00000000", "EF0001 010004 00 00000000",
"code and data sections missing", "code and data sections missing",
"Expected kind 2 but read kind 0", "unexpected_header_kind expected 2 actual 0",
1 1
}, },
{"EF0001 00", "all sections missing", "Expected kind 1 but read kind 0", 1}, {"EF0001 00", "all sections missing", "unexpected_header_kind expected 1 actual 0", 1},
{ {
"EF0001 011004 020401" "EF0001 011004 020401"
+ " 0001".repeat(1025) + " 0001".repeat(1025)
@@ -192,18 +217,33 @@ public class EOFLayoutTest {
+ " 00000000".repeat(1025) + " 00000000".repeat(1025)
+ " FE".repeat(1025), + " FE".repeat(1025),
"no data section, 1025 code sections", "no data section, 1025 code sections",
"Too many code sections - 0x401", "too_many_code_sections - 0x401",
1
},
{
"ef000101000002000003000000",
"All kinds zero size",
"incomplete_section_number Too few code sections",
1
},
{
"ef0001010000020001000103000000ef",
"Zero type size ",
"invalid_section_bodies_size Type section - 0x1 * 4 != 0x0",
1 1
}, },
{"ef000101000002000003000000", "All kinds zero size", "Invalid Types section size", 1},
{"ef0001010000020001000103000000ef", "Zero type size ", "Invalid Types section size", 1},
{ {
"ef0001010004020001000003000000", "ef0001010004020001000003000000",
"Zero code section length", "Zero code section length",
"Invalid Code section size for section 0", "zero_section_size code 0",
1
},
{
"ef000101000402000003000000",
"Zero code sections",
"incomplete_section_number Too few code sections",
1 1
}, },
{"ef000101000402000003000000", "Zero code sections", "Invalid Code section count", 1},
}); });
} }
@@ -241,31 +281,31 @@ public class EOFLayoutTest {
{ {
"EF0001 010008 02000200020002 040000 00 0100000000000000", "EF0001 010008 02000200020002 040000 00 0100000000000000",
"Incorrect section zero type input", "Incorrect section zero type input",
"Code section does not have zero inputs and outputs", "invalid_first_section_type must be zero input non-returning",
1 1
}, },
{ {
"EF0001 010008 02000200020002 040000 00 0001000000000000", "EF0001 010008 02000200020002 040000 00 0001000000000000",
"Incorrect section zero type output", "Incorrect section zero type output",
"Code section does not have zero inputs and outputs", "invalid_first_section_type must be zero input non-returning",
1 1
}, },
{ {
"EF0001 010010 0200040001000200020002 040000 00 00800000 F0000000 00010000 02030000 FE 5000 3000 8000", "EF0001 010010 0200040001000200020002 040000 00 00800000 F0000000 00010000 02030000 FE 5000 3000 8000",
"inputs too large", "inputs too large",
"Type data input stack too large - 0xf0", "inputs_outputs_num_above_limit Type data input stack too large - 0xf0",
1 1
}, },
{ {
"EF0001 010010 0200040001000200020002 040000 00 00800000 01000000 00F00000 02030000 FE 5000 3000 8000", "EF0001 010010 0200040001000200020002 040000 00 00800000 01000000 00F00000 02030000 FE 5000 3000 8000",
"outputs too large", "outputs too large",
"Type data output stack too large - 0xf0", "inputs_outputs_num_above_limit - 0xf0",
1 1
}, },
{ {
"EF0001 010010 0200040001000200020002 040000 00 00000400 01000000 00010000 02030400 FE 5000 3000 8000", "EF0001 010010 0200040001000200020002 040000 00 00000400 01000000 00010000 02030400 FE 5000 3000 8000",
"stack too large", "stack too large",
"Type data max stack too large - 0x400", "max_stack_height_above_limit Type data max stack too large - 0x400",
1 1
}, },
{ {
@@ -336,13 +376,13 @@ public class EOFLayoutTest {
{ {
"EF00 01 010004 0200010001 0300010015 040000 00 00800000 00 (EF0001 010004 0200010001 040000 00 00800000 00ff)", "EF00 01 010004 0200010001 0300010015 040000 00 00800000 00 (EF0001 010004 0200010001 040000 00 00800000 00ff)",
"dangling data in subcontainer", "dangling data in subcontainer",
"subcontainer size mismatch", "invalid_section_bodies_size subcontainer size mismatch",
1 1
}, },
{ {
"EF00 01 010004 0200010001 0300010014 040000 00 00800000 00 (EF0001 010004 0200010001 040000 00 00800000 00ff)", "EF00 01 010004 0200010001 0300010014 040000 00 00800000 00 (EF0001 010004 0200010001 040000 00 00800000 00ff)",
"dangling data in container", "dangling data in container",
"Dangling data after end of all sections", "invalid_section_bodies_size data after end of all sections",
1 1
}, },
}); });
@@ -363,6 +403,11 @@ public class EOFLayoutTest {
final Bytes container = Bytes.fromHexString(containerString.replaceAll("[^a-fxA-F0-9]", "")); final Bytes container = Bytes.fromHexString(containerString.replaceAll("[^a-fxA-F0-9]", ""));
final EOFLayout layout = EOFLayout.parseEOF(container, true); final EOFLayout layout = EOFLayout.parseEOF(container, true);
if (failureReason != null) {
assertThat(failureReason)
.withFailMessage("Error string should start with a reference test error code")
.matches("^[a-zA-Z]+_.*");
}
assertThat(layout.version()).isEqualTo(expectedVersion); assertThat(layout.version()).isEqualTo(expectedVersion);
assertThat(layout.invalidReason()).isEqualTo(failureReason); assertThat(layout.invalidReason()).isEqualTo(failureReason);
assertThat(layout.container()).isEqualTo(container); assertThat(layout.container()).isEqualTo(container);

View File

@@ -546,6 +546,19 @@
<sha256 value="74da05b3ca50a8158101b7e12fbfbf902e011340f14bf31c1776cb51f96147f3" origin="Generated by Gradle"/> <sha256 value="74da05b3ca50a8158101b7e12fbfbf902e011340f14bf31c1776cb51f96147f3" origin="Generated by Gradle"/>
</artifact> </artifact>
</component> </component>
<component group="com.gitlab.javafuzz" name="core" version="1.26">
<artifact name="core-1.26.jar">
<sha256 value="c6c2a7a67fac12db6dd495181082b2cc3fa8fd30399287854119054dde58ba92" origin="Generated by Gradle"/>
</artifact>
<artifact name="core-1.26.pom">
<sha256 value="e218318c0edfea8c7f7030cbd2ffe9c7db206de39b16147d8a8a2a801515efd6" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="com.gitlab.javafuzz" name="javafuzz" version="1.26">
<artifact name="javafuzz-1.26.pom">
<sha256 value="c5f521d9795c2bc11293ab08fbc563d453349b398b4fc5afe1388644abc392bf" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="com.google" name="google" version="5"> <component group="com.google" name="google" version="5">
<artifact name="google-5.pom"> <artifact name="google-5.pom">
<sha256 value="e09d345e73ca3fbca7f3e05f30deb74e9d39dd6b79a93fee8c511f23417b6828" origin="Generated by Gradle"/> <sha256 value="e09d345e73ca3fbca7f3e05f30deb74e9d39dd6b79a93fee8c511f23417b6828" origin="Generated by Gradle"/>
@@ -5393,6 +5406,14 @@
<sha256 value="74958acdde148f30bfa31ffc0858b62f71f63ccc2ceb4d8a8c67d7f428d43a2d" origin="Generated by Gradle"/> <sha256 value="74958acdde148f30bfa31ffc0858b62f71f63ccc2ceb4d8a8c67d7f428d43a2d" origin="Generated by Gradle"/>
</artifact> </artifact>
</component> </component>
<component group="org.mockito" name="mockito-inline" version="4.0.0">
<artifact name="mockito-inline-4.0.0.jar">
<sha256 value="ee52e1c299a632184fba274a9370993e09140429f5e516e6c5570fd6574b297f" origin="Generated by Gradle"/>
</artifact>
<artifact name="mockito-inline-4.0.0.pom">
<sha256 value="7ba6e072c76d24d3be8e9c9929c1115a69fa9c71d53d90865cb34cca6ccefb05" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.mockito" name="mockito-junit-jupiter" version="5.8.0"> <component group="org.mockito" name="mockito-junit-jupiter" version="5.8.0">
<artifact name="mockito-junit-jupiter-5.8.0.jar"> <artifact name="mockito-junit-jupiter-5.8.0.jar">
<sha256 value="9f6ccc29654335b92ac20e800eb44949772031711185bed6a44a5f8bd56e476b" origin="Generated by Gradle"/> <sha256 value="9f6ccc29654335b92ac20e800eb44949772031711185bed6a44a5f8bd56e476b" origin="Generated by Gradle"/>

View File

@@ -41,6 +41,8 @@ dependencyManagement {
dependency 'org.hyperledger.besu:besu-errorprone-checks:1.0.0' dependency 'org.hyperledger.besu:besu-errorprone-checks:1.0.0'
dependency 'com.gitlab.javafuzz:core:1.26'
dependency 'com.google.guava:guava:33.0.0-jre' dependency 'com.google.guava:guava:33.0.0-jre'
dependency 'com.graphql-java:graphql-java:21.5' dependency 'com.graphql-java:graphql-java:21.5'
@@ -153,8 +155,6 @@ dependencyManagement {
} }
dependency 'org.fusesource.jansi:jansi:2.4.1' dependency 'org.fusesource.jansi:jansi:2.4.1'
dependency 'org.openjdk.jol:jol-core:0.17'
dependency 'tech.pegasys:jc-kzg-4844:1.0.0'
dependencySet(group: 'org.hyperledger.besu', version: '0.9.4') { dependencySet(group: 'org.hyperledger.besu', version: '0.9.4') {
entry 'arithmetic' entry 'arithmetic'
@@ -173,6 +173,9 @@ dependencyManagement {
dependency 'org.java-websocket:Java-WebSocket:1.5.5' dependency 'org.java-websocket:Java-WebSocket:1.5.5'
dependency 'org.jacoco:org.jacoco.agent:0.8.11'
dependency 'org.jacoco:org.jacoco.core:0.8.11'
dependency 'org.jetbrains.kotlin:kotlin-stdlib:1.9.22' dependency 'org.jetbrains.kotlin:kotlin-stdlib:1.9.22'
dependencySet(group: 'org.junit.jupiter', version: '5.10.1') { dependencySet(group: 'org.junit.jupiter', version: '5.10.1') {
@@ -182,6 +185,8 @@ dependencyManagement {
entry 'junit-jupiter-params' entry 'junit-jupiter-params'
} }
dependency 'org.openjdk.jol:jol-core:0.17'
dependency 'org.junit.platform:junit-platform-runner:1.9.2' dependency 'org.junit.platform:junit-platform-runner:1.9.2'
dependency 'org.junit.vintage:junit-vintage-engine:5.10.1' dependency 'org.junit.vintage:junit-vintage-engine:5.10.1'
@@ -232,6 +237,8 @@ dependencyManagement {
dependency 'org.apache.maven:maven-artifact:3.9.6' dependency 'org.apache.maven:maven-artifact:3.9.6'
dependency 'tech.pegasys:jc-kzg-4844:1.0.0'
dependency 'tech.pegasys.discovery:discovery:22.12.0' dependency 'tech.pegasys.discovery:discovery:22.12.0'
} }
} }

View File

@@ -68,5 +68,6 @@ include 'privacy-contracts'
include 'services:kvstore' include 'services:kvstore'
include 'services:pipeline' include 'services:pipeline'
include 'services:tasks' include 'services:tasks'
include 'testfuzz'
include 'testutil' include 'testutil'
include 'util' include 'util'

29
testfuzz/README.md Normal file
View File

@@ -0,0 +1,29 @@
# BesuFuzz
BesuFuzz is where all the besu guided fuzzing tools live.
## eof-container
Performs differential fuzzing between Ethereum clients based on
the [txparse eofparse](https://github.com/holiman/txparse/blob/main/README.md#eof-parser-eofparse)
format. Note that only the inital `OK` and `err` values are used to determine if
there is a difference.
### Prototypical CLI Usage:
```shell
BesuFuzz eof-container \
--tests-dir=~/git/ethereum/tests/EOFTests \
--client=evm1=evmone-eofparse \
--client=revm=revme bytecode
```
### Prototypical Gradle usage:
```shell
./gradlew fuzzEvmone fuzzReth
```
There are pre-written Gradle targets for `fuzzEthereumJS`, `fuzzEvmone`,
`fuzzGeth`, `fuzzNethermind`, and `fuzzReth`. Besu is always a fuzzing target.
The `fuzzAll` target will fuzz all clients.

148
testfuzz/build.gradle Normal file
View File

@@ -0,0 +1,148 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
apply plugin: 'application'
apply plugin: 'java-library'
apply plugin: 'jacoco'
jar {
archiveBaseName = 'besu-test-fuzz'
manifest {
attributes(
'Specification-Title': archiveBaseName,
'Specification-Version': project.version,
'Implementation-Title': archiveBaseName,
'Implementation-Version': calculateVersion()
)
}
}
dependencies {
implementation project(':besu')
implementation project(':crypto:algorithms')
implementation project(':datatypes')
implementation project(':ethereum:referencetests')
implementation project(':evm')
implementation project(':util')
implementation 'com.fasterxml.jackson.core:jackson-databind'
implementation 'com.gitlab.javafuzz:core'
implementation 'info.picocli:picocli'
implementation 'io.tmio:tuweni-bytes'
implementation 'org.jacoco:org.jacoco.agent'
implementation 'org.jacoco:org.jacoco.core'
}
application {
applicationName = 'BesuFuzz'
mainClass = 'org.hyperledger.besu.testfuzz.BesuFuzz'
applicationDefaultJvmArgs = [
'-javaagent:$APP_HOME/lib/jacocoagent.jar'
]
}
def corpusDir = "${buildDir}/generated/corpus"
tasks.register("runFuzzer", JavaExec) {
classpath = sourceSets.main.runtimeClasspath
mainClass = 'org.hyperledger.besu.testfuzz.BesuFuzz'
args = [
"eof-container",
"--tests-dir=${projectDir}/../ethereum/referencetests/src/reference-test/external-resources/EOFTests",
"--corpus-dir=${corpusDir}"
]
doFirst {
mkdir corpusDir
}
}
tasks.register("fuzzEvmone") {
doLast {
runFuzzer.args += "--client=evm1=evmone-eofparse"
}
finalizedBy("runFuzzer")
}
tasks.register("fuzzEthereumJS") {
doLast {
runFuzzer.args += "--client=etjs=tsx ../../../ethereumjs/ethereumjs-monorepo/packages/evm/scripts/eofContainerValidator.ts"
}
finalizedBy("runFuzzer")
}
tasks.register("fuzzGeth") {
doLast {
runFuzzer.args += "--client=geth=eofdump eofparser"
}
finalizedBy("runFuzzer")
}
tasks.register("fuzzNethermind") {
doLast {
runFuzzer.args += "--client=neth=netheofparse -x"
}
finalizedBy("runFuzzer")
}
tasks.register("fuzzReth") {
doLast {
runFuzzer.args += "--client=revm=revme bytecode"
}
finalizedBy("runFuzzer")
}
tasks.register("fuzzAll") {
dependsOn fuzzEvmone, fuzzEthereumJS, fuzzGeth, fuzzNethermind, fuzzReth
}
jacoco {
applyTo run
applyTo runFuzzer
}
// Copies jacoco into the lib directory
tasks.register("copyJacoco", Copy) {
// The jacocoagent.jar is embedded within the jar
from zipTree(configurations.jacocoAgent.singleFile).filter { it.name == 'jacocoagent.jar' }.singleFile
into layout.buildDirectory.dir("install/${application.applicationName}/lib")
}
installDist.finalizedBy copyJacoco
startScripts {
defaultJvmOpts = [
"-Dsecp256k1.randomize=false"
]
unixStartScriptGenerator.template = resources.text.fromFile("${projectDir}/src/main/scripts/unixStartScript.txt")
windowsStartScriptGenerator.template = resources.text.fromFile("${projectDir}/src/main/scripts/windowsStartScript.txt")
doLast { tweakStartScript(startScripts) }
}
static def tweakStartScript(createScriptTask) {
def shortenWindowsClasspath = { line ->
line.replaceAll(/^set CLASSPATH=.*$/, "set CLASSPATH=%APP_HOME%/lib/*")
}
createScriptTask.unixScript.text = createScriptTask.unixScript.text.replace('BESU_HOME', '\$APP_HOME')
createScriptTask.windowsScript.text = createScriptTask.windowsScript.text.replace('BESU_HOME', '%~dp0..')
// Prevent the error originating from the 8191 chars limit on Windows
createScriptTask.windowsScript.text =
createScriptTask.windowsScript
.readLines()
.collect(shortenWindowsClasspath)
.join('\r\n')
}

View File

@@ -0,0 +1,38 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.testfuzz;
import org.hyperledger.besu.util.LogConfigurator;
/** The main entry point for the EVM (Ethereum Virtual Machine) tool. */
public final class BesuFuzz {
/** Default constructor for the EvmTool class. */
public BesuFuzz() {
// this is here only for Javadoc linting
}
/**
* The main entry point for the EVM (Ethereum Virtual Machine) tool.
*
* @param args The command line arguments.
*/
public static void main(final String... args) {
LogConfigurator.setLevel("", "DEBUG");
final BesuFuzzCommand besuFuzzCommand = new BesuFuzzCommand();
besuFuzzCommand.execute(args);
}
}

View File

@@ -0,0 +1,78 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.testfuzz;
import static java.nio.charset.StandardCharsets.UTF_8;
import org.hyperledger.besu.util.LogConfigurator;
import java.io.InputStream;
import java.io.PrintWriter;
import picocli.CommandLine;
import picocli.CommandLine.Command;
/**
* This is the root command for the `BesuFuzz` command line tool. It is a collection of fuzzers that
* are guided by Besu's implementations.
*/
@Command(
description = "Executes Besu based fuzz tests",
abbreviateSynopsis = true,
name = "evm",
mixinStandardHelpOptions = true,
versionProvider = VersionProvider.class,
sortOptions = false,
header = "Usage:",
synopsisHeading = "%n",
descriptionHeading = "%nDescription:%n%n",
optionListHeading = "%nOptions:%n",
footerHeading = "%n",
footer = "Hyperledger Besu is licensed under the Apache License 2.0",
subcommands = {EofContainerSubCommand.class})
@SuppressWarnings("java:S106")
public class BesuFuzzCommand implements Runnable {
PrintWriter out;
InputStream in;
/** Default Constructor */
BesuFuzzCommand() {
// this method is here only for JavaDoc linting
}
void execute(final String... args) {
execute(System.in, new PrintWriter(System.out, true, UTF_8), args);
}
void execute(final InputStream input, final PrintWriter output, final String[] args) {
final CommandLine commandLine = new CommandLine(this).setOut(output);
out = output;
in = input;
// don't require exact case to match enum values
commandLine.setCaseInsensitiveEnumValuesAllowed(true);
commandLine.setExecutionStrategy(new CommandLine.RunLast());
commandLine.execute(args);
}
@Override
public void run() {
LogConfigurator.setLevel("", "OFF");
System.out.println("No default command, please select a subcommand");
System.exit(1);
}
}

View File

@@ -0,0 +1,258 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.testfuzz;
import static org.hyperledger.besu.testfuzz.EofContainerSubCommand.COMMAND_NAME;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.referencetests.EOFTestCaseSpec;
import org.hyperledger.besu.evm.Code;
import org.hyperledger.besu.evm.EVM;
import org.hyperledger.besu.evm.MainnetEVMs;
import org.hyperledger.besu.evm.code.CodeInvalid;
import org.hyperledger.besu.evm.code.CodeV1;
import org.hyperledger.besu.evm.code.EOFLayout;
import org.hyperledger.besu.evm.code.EOFLayout.EOFContainerMode;
import org.hyperledger.besu.evm.internal.EvmConfiguration;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import com.fasterxml.jackson.core.JsonParser.Feature;
import com.fasterxml.jackson.core.util.DefaultIndenter;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import com.fasterxml.jackson.core.util.Separators;
import com.fasterxml.jackson.core.util.Separators.Spacing;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import com.gitlab.javafuzz.core.AbstractFuzzTarget;
import org.apache.tuweni.bytes.Bytes;
import picocli.CommandLine;
import picocli.CommandLine.Option;
/** Fuzzes the parsing and validation of an EOF container. */
@SuppressWarnings({"java:S106", "CallToPrintStackTrace"}) // we use lots the console, on purpose
@CommandLine.Command(
name = COMMAND_NAME,
description = "Fuzzes EOF container parsing and validation",
mixinStandardHelpOptions = true,
versionProvider = VersionProvider.class)
public class EofContainerSubCommand extends AbstractFuzzTarget implements Runnable {
static final String COMMAND_NAME = "eof-container";
@Option(
names = {"--corpus-dir"},
paramLabel = "<directory>",
description = "Directory to store corpus files")
private final Path corpusDir = Path.of("corpus");
@Option(
names = {"--tests-dir"},
paramLabel = "<directory>",
description = "Directory where EOF tests references file tree lives")
private final Path testsDir = null;
@Option(
names = {"--client"},
paramLabel = "<directory>=<CLI>",
description = "Add a client for differential fuzzing")
private final Map<String, String> clients = new LinkedHashMap<>();
@CommandLine.ParentCommand private final BesuFuzzCommand parentCommand;
static final ObjectMapper eofTestMapper = createObjectMapper();
static final JavaType javaType =
eofTestMapper
.getTypeFactory()
.constructParametricType(Map.class, String.class, EOFTestCaseSpec.class);
List<ExternalClient> externalClients = new ArrayList<>();
EVM evm = MainnetEVMs.pragueEOF(EvmConfiguration.DEFAULT);
long validContainers;
long totalContainers;
/**
* Default constructor for the EofContainerSubCommand class. This constructor initializes the
* parentCommand to null.
*/
public EofContainerSubCommand() {
this(null);
}
/**
* Constructs a new EofContainerSubCommand with the specified parent command.
*
* @param parentCommand The parent command for this subcommand.
*/
public EofContainerSubCommand(final BesuFuzzCommand parentCommand) {
this.parentCommand = parentCommand;
}
private static ObjectMapper createObjectMapper() {
final ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setDefaultPrettyPrinter(
(new DefaultPrettyPrinter())
.withSeparators(
Separators.createDefaultInstance().withObjectFieldValueSpacing(Spacing.BOTH))
.withObjectIndenter(DefaultIndenter.SYSTEM_LINEFEED_INSTANCE.withIndent(" "))
.withArrayIndenter(DefaultIndenter.SYSTEM_LINEFEED_INSTANCE.withIndent(" ")));
objectMapper.disable(Feature.AUTO_CLOSE_SOURCE);
SimpleModule serializers = new SimpleModule("Serializers");
serializers.addSerializer(Address.class, ToStringSerializer.instance);
serializers.addSerializer(Bytes.class, ToStringSerializer.instance);
objectMapper.registerModule(serializers);
return objectMapper;
}
@Override
public void run() {
// load test dir into corpus dir
if (testsDir != null) {
File f = testsDir.toFile();
if (f.isDirectory()) {
try (var files = Files.walk(f.toPath(), Integer.MAX_VALUE)) {
files.forEach(
ff -> {
File file = ff.toFile();
if (file.isFile()) {
extractFile(file, corpusDir.toFile());
}
});
} catch (IOException e) {
parentCommand.out.println("Exception walking " + f + ": " + e.getMessage());
}
}
}
clients.forEach((k, v) -> externalClients.add(new StreamingClient(k, v.split(" "))));
System.out.println("Fuzzing client set: " + clients.keySet());
try {
new Fuzzer(this, corpusDir.toString(), this::fuzzStats).start();
} catch (NoSuchAlgorithmException
| ClassNotFoundException
| InvocationTargetException
| IllegalAccessException
| NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
private void extractFile(final File f, final File initialCorpus) {
final Map<String, EOFTestCaseSpec> eofTests;
try {
eofTests = eofTestMapper.readValue(f, javaType);
} catch (IOException e) {
// presume parse failed because it's a corpus file
return;
}
for (var entry : eofTests.entrySet()) {
int index = 0;
for (var vector : entry.getValue().getVector().entrySet()) {
try (FileOutputStream fos =
new FileOutputStream(
new File(
initialCorpus,
f.toPath().getFileName() + "_" + (index++) + "_" + vector.getKey()))) {
Bytes codeBytes = Bytes.fromHexString(vector.getValue().code());
evm.getCodeUncached(codeBytes);
fos.write(codeBytes.toArrayUnsafe());
} catch (IOException e) {
parentCommand.out.println("Invalid file " + f + ": " + e.getMessage());
e.printStackTrace();
System.exit(1);
}
}
}
}
@Override
public void fuzz(final byte[] bytes) {
Bytes eofUnderTest = Bytes.wrap(bytes);
String eofUnderTestHexString = eofUnderTest.toHexString();
Code code = evm.getCodeUncached(eofUnderTest);
Map<String, String> results = new LinkedHashMap<>();
boolean mismatch = false;
for (var client : externalClients) {
String value = client.differentialFuzz(eofUnderTestHexString);
results.put(client.getName(), value);
if (value == null || value.startsWith("fail: ")) {
mismatch = true; // if an external client fails, always report it as an error
}
}
boolean besuValid = false;
String besuReason;
if (!code.isValid()) {
besuReason = ((CodeInvalid) code).getInvalidReason();
} else if (code.getEofVersion() != 1) {
EOFLayout layout = EOFLayout.parseEOF(eofUnderTest);
if (layout.isValid()) {
besuReason = "Besu Parsing Error";
parentCommand.out.println(layout.version());
parentCommand.out.println(layout.invalidReason());
parentCommand.out.println(code.getEofVersion());
parentCommand.out.println(code.getClass().getName());
System.exit(1);
mismatch = true;
} else {
besuReason = layout.invalidReason();
}
} else if (EOFContainerMode.INITCODE.equals(
((CodeV1) code).getEofLayout().containerMode().get())) {
besuReason = "Code is initcode, not runtime";
} else {
besuReason = "OK";
besuValid = true;
}
for (var entry : results.entrySet()) {
mismatch =
mismatch
|| besuValid != entry.getValue().toUpperCase(Locale.getDefault()).startsWith("OK");
}
if (mismatch) {
parentCommand.out.println("besu: " + besuReason);
for (var entry : results.entrySet()) {
parentCommand.out.println(entry.getKey() + ": " + entry.getValue());
}
parentCommand.out.println("code: " + eofUnderTest.toUnprefixedHexString());
parentCommand.out.println("size: " + eofUnderTest.size());
parentCommand.out.println();
} else {
if (besuValid) {
validContainers++;
}
totalContainers++;
}
}
String fuzzStats() {
return " / %5.2f%% valid %d/%d"
.formatted((100.0 * validContainers) / totalContainers, validContainers, totalContainers);
}
}

View File

@@ -0,0 +1,22 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.testfuzz;
interface ExternalClient {
String getName();
String differentialFuzz(String data);
}

View File

@@ -0,0 +1,239 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.testfuzz;
import org.hyperledger.besu.crypto.Hash;
import org.hyperledger.besu.crypto.MessageDigestFactory;
import java.io.ByteArrayInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Supplier;
import com.gitlab.javafuzz.core.AbstractFuzzTarget;
import com.gitlab.javafuzz.core.Corpus;
import org.apache.tuweni.bytes.Bytes;
import org.jacoco.core.data.ExecutionData;
import org.jacoco.core.data.ExecutionDataReader;
import org.jacoco.core.data.IExecutionDataVisitor;
import org.jacoco.core.data.ISessionInfoVisitor;
import org.jacoco.core.data.SessionInfo;
/** Ported from javafuzz because JaCoCo APIs changed. */
@SuppressWarnings({"java:S106", "CallToPrintStackTrace"}) // we use lots the console, on purpose
public class Fuzzer {
private final AbstractFuzzTarget target;
private final Corpus corpus;
private final Object agent;
private final Method getExecutionDataMethod;
private long executionsInSample;
private long lastSampleTime;
private long totalExecutions;
private long totalCoverage;
Supplier<String> fuzzStats;
/**
* Create a new fuzzer
*
* @param target The target to fuzz
* @param dirs the list of corpus dirs and files, comma separated.
* @param fuzzStats additional fuzzing data from the client
* @throws ClassNotFoundException If Jacoco RT is not found (because jacocoagent.jar is not
* loaded)
* @throws NoSuchMethodException If the wrong version of Jacoco is loaded
* @throws InvocationTargetException If the wrong version of Jacoco is loaded
* @throws IllegalAccessException If the wrong version of Jacoco is loaded
* @throws NoSuchAlgorithmException If the SHA-256 crypto algo cannot be loaded.
*/
public Fuzzer(
final AbstractFuzzTarget target, final String dirs, final Supplier<String> fuzzStats)
throws ClassNotFoundException,
NoSuchMethodException,
InvocationTargetException,
IllegalAccessException,
NoSuchAlgorithmException {
this.target = target;
this.corpus = new Corpus(dirs);
this.fuzzStats = fuzzStats;
Class<?> c = Class.forName("org.jacoco.agent.rt.RT");
Method getAgentMethod = c.getMethod("getAgent");
this.agent = getAgentMethod.invoke(null);
this.getExecutionDataMethod = agent.getClass().getMethod("getExecutionData", boolean.class);
fileNameForBuffer(new byte[0]);
}
void writeCrash(final byte[] buf) {
Bytes hash = Hash.sha256(Bytes.wrap(buf));
String filepath = "crash-" + hash.toUnprefixedHexString();
try (FileOutputStream fos = new FileOutputStream(filepath)) {
fos.write(buf);
System.out.printf("crash was written to %s%n", filepath);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
void logStats(final String type) {
long rss =
(Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024;
long endTime = System.currentTimeMillis();
long execs_per_second = -1;
if ((endTime - this.lastSampleTime) != 0) {
execs_per_second = (this.executionsInSample * 1000 / (endTime - this.lastSampleTime));
}
this.lastSampleTime = endTime;
this.executionsInSample = 0;
System.out.printf(
"#%d %s cov: %d corp: %d exec/s: %d rss: %d MB %s%n",
this.totalExecutions,
type,
this.totalCoverage,
this.corpus.getLength(),
execs_per_second,
rss,
fuzzStats.get());
}
/**
* Runs the fuzzer until the VM is shut down
*
* @throws InvocationTargetException if the wrong version of jacoco is loaded
* @throws IllegalAccessException if the wrong version of jacoco is loaded
* @throws NoSuchAlgorithmException if our favorite hash algo is not loaded
*/
@SuppressWarnings("java:S2189") // the endless loop is on purpose
public void start()
throws InvocationTargetException, IllegalAccessException, NoSuchAlgorithmException {
System.out.printf("#0 READ units: %d%n", this.corpus.getLength());
this.totalCoverage = 0;
this.totalExecutions = 0;
this.executionsInSample = 0;
this.lastSampleTime = System.currentTimeMillis();
Map<String, Integer> hitMap = new HashMap<>();
while (true) {
byte[] buf = this.corpus.generateInput();
// The next version will run this in a different thread.
try {
this.target.fuzz(buf);
} catch (Exception e) {
e.printStackTrace(System.out);
this.writeCrash(buf);
System.exit(1);
break;
}
this.totalExecutions++;
this.executionsInSample++;
long newCoverage = getHitCount(hitMap);
if (newCoverage > this.totalCoverage) {
this.totalCoverage = newCoverage;
this.corpus.putBuffer(buf);
this.logStats("NEW");
// If you want hex strings of new hits, uncomment the following.
// String filename = fileNameForBuffer(buf);
// try (var pw =
// new PrintWriter(
// new BufferedWriter(
// new OutputStreamWriter(new FileOutputStream(filename), UTF_8)))) {
// pw.println(Bytes.wrap(buf).toHexString());
// System.out.println(filename);
// } catch (IOException e) {
// e.printStackTrace(System.out);
// }
} else if ((System.currentTimeMillis() - this.lastSampleTime) > 30000) {
this.logStats("PULSE");
}
}
}
private static String fileNameForBuffer(final byte[] buf) throws NoSuchAlgorithmException {
MessageDigest md = MessageDigestFactory.create(MessageDigestFactory.SHA256_ALG);
md.update(buf);
byte[] digest = md.digest();
return String.format("./new-%064x.hex", new BigInteger(1, digest));
}
private long getHitCount(final Map<String, Integer> hitMap)
throws IllegalAccessException, InvocationTargetException {
byte[] dumpData = (byte[]) this.getExecutionDataMethod.invoke(this.agent, false);
ExecutionDataReader edr = new ExecutionDataReader(new ByteArrayInputStream(dumpData));
HitCounter hc = new HitCounter(hitMap);
edr.setExecutionDataVisitor(hc);
edr.setSessionInfoVisitor(hc);
try {
edr.read();
} catch (IOException e) {
e.printStackTrace();
this.writeCrash(dumpData);
}
return hc.getHits();
}
static class HitCounter implements IExecutionDataVisitor, ISessionInfoVisitor {
long hits = 0;
Map<String, Integer> hitMap;
public HitCounter(final Map<String, Integer> hitMap) {
this.hitMap = hitMap;
}
@Override
public void visitClassExecution(final ExecutionData executionData) {
int hit = 0;
for (boolean b : executionData.getProbes()) {
if (executionData.getName().startsWith("org/hyperledger/besu/testfuzz/")
|| executionData.getName().startsWith("org/bouncycastle/")
|| executionData.getName().startsWith("com/gitlab/javafuzz/")) {
continue;
}
if (b) {
hit++;
}
}
String name = executionData.getName();
if (hitMap.containsKey(name)) {
if (hitMap.get(name) < hit) {
hitMap.put(name, hit);
}
} else {
hitMap.put(name, hit);
}
hits += hit;
}
public long getHits() {
return hits;
}
@Override
public void visitSessionInfo(final SessionInfo sessionInfo) {
// nothing to do. Data parser requires a session listener.
}
}
}

View File

@@ -0,0 +1,89 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.testfuzz;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@SuppressWarnings({"java:S106", "CallToPrintStackTrace"}) // we use lots the console, on purpose
class SingleQueryClient implements ExternalClient {
final String name;
String[] command;
Pattern okRegexp;
String okRegexpStr;
int okGroup;
Pattern failRegexp;
int failGroup;
String failRegexpStr;
public SingleQueryClient(
final String clientName,
final String okRegexp,
final int okGroup,
final String errorRegexp,
final int failGroup,
final String... command) {
this.name = clientName;
this.okRegexp = Pattern.compile(okRegexp);
this.okRegexpStr = okRegexp;
this.okGroup = okGroup;
this.failRegexp = Pattern.compile(errorRegexp);
this.failGroup = failGroup;
this.failRegexpStr = errorRegexp;
this.command = command;
}
@Override
public String getName() {
return name;
}
@Override
@SuppressWarnings("java:S2142")
public String differentialFuzz(final String data) {
if (!data.startsWith("0xef")) {
return "err: <harness> invalid_magic";
}
try {
List<String> localCommand = new ArrayList<>(command.length + 1);
localCommand.addAll(Arrays.asList(command));
localCommand.add(data);
Process p = new ProcessBuilder().command(localCommand).redirectErrorStream(true).start();
if (!p.waitFor(1, TimeUnit.SECONDS)) {
System.out.println("Process Hang for " + name);
return "fail: process took more than 1 sec " + p.pid();
}
String s = new String(p.getInputStream().readAllBytes(), StandardCharsets.UTF_8);
Matcher m = okRegexp.matcher(s);
if (m.find()) {
return "OK " + m.group(okGroup);
}
m = failRegexp.matcher(s);
if (m.find()) {
return "err: " + m.group(failGroup);
}
return "fail: SingleClientQuery failed to get data";
} catch (InterruptedException | IOException e) {
e.printStackTrace();
return "fail: " + e.getMessage();
}
}
}

View File

@@ -0,0 +1,52 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.testfuzz;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
class StreamingClient implements ExternalClient {
final String name;
final BufferedReader reader;
final PrintWriter writer;
public StreamingClient(final String clientName, final String... command) {
try {
Process p = new ProcessBuilder().redirectErrorStream(true).command(command).start();
this.name = clientName;
this.reader = new BufferedReader(p.inputReader(StandardCharsets.UTF_8));
this.writer = new PrintWriter(p.getOutputStream(), true, StandardCharsets.UTF_8);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public String getName() {
return name;
}
@Override
public String differentialFuzz(final String data) {
try {
writer.println(data);
return reader.readLine();
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
}

View File

@@ -0,0 +1,47 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.testfuzz;
import org.hyperledger.besu.BesuInfo;
import picocli.CommandLine;
/**
* The VersionProvider class is responsible for providing the version of the Hyperledger Besu EVM
* tool. It implements the IVersionProvider interface from the picocli library.
*
* <p>The getVersion method returns a string array containing the version of the Hyperledger Besu
* EVM tool.
*/
public class VersionProvider implements CommandLine.IVersionProvider {
/**
* Default constructor for the VersionProvider class. This constructor does not perform any
* operations.
*/
public VersionProvider() {
// this constructor is here only for javadoc linting
}
/**
* This method returns the version of the Hyperledger Besu EVM tool.
*
* @return A string array containing the version of the Hyperledger Besu EVM tool.
*/
@Override
public String[] getVersion() {
return new String[] {"Hyperledger Besu evm " + BesuInfo.shortVersion()};
}
}

View File

@@ -0,0 +1,200 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## ${applicationName} start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: \$0 may be a link
PRG="\$0"
# Need this for relative symlinks.
while [ -h "\$PRG" ] ; do
ls=`ls -ld "\$PRG"`
link=`expr "\$ls" : '.*-> \\(.*\\)\$'`
if expr "\$link" : '/.*' > /dev/null; then
PRG="\$link"
else
PRG=`dirname "\$PRG"`"/\$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"\$PRG\"`/${appHomeRelativePath}" >/dev/null
APP_HOME="`pwd -P`"
cd "\$SAVED" >/dev/null
APP_NAME="${applicationName}"
APP_BASE_NAME=`basename "\$0"`
# Add default JVM options here. You can also use JAVA_OPTS and ${optsEnvironmentVar} to pass JVM options to this script.
DEFAULT_JVM_OPTS=${defaultJvmOpts}
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "\$*"
}
die () {
echo
echo "\$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MSYS* | MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$classpath
<% if ( mainClassName.startsWith('--module ') ) { %>MODULE_PATH=$modulePath<% } %>
# Determine the Java command to use to start the JVM.
if [ -n "\$JAVA_HOME" ] ; then
if [ -x "\$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="\$JAVA_HOME/jre/sh/java"
else
JAVACMD="\$JAVA_HOME/bin/java"
fi
if [ ! -x "\$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: \$JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "\$cygwin" = "false" -a "\$darwin" = "false" -a "\$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ \$? -eq 0 ] ; then
if [ "\$MAX_FD" = "maximum" -o "\$MAX_FD" = "max" ] ; then
MAX_FD="\$MAX_FD_LIMIT"
fi
ulimit -n \$MAX_FD
if [ \$? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: \$MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: \$MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if \$darwin; then
GRADLE_OPTS="\$GRADLE_OPTS \\"-Xdock:name=\$APP_NAME\\" \\"-Xdock:icon=\$APP_HOME/media/gradle.icns\\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "\$cygwin" = "true" -o "\$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "\$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "\$CLASSPATH"`
<% if ( mainClassName.startsWith('--module ') ) { %> MODULE_PATH=`cygpath --path --mixed "\$MODULE_PATH"`<% } %>
JAVACMD=`cygpath --unix "\$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in \$ROOTDIRSRAW ; do
ROOTDIRS="\$ROOTDIRS\$SEP\$dir"
SEP="|"
done
OURCYGPATTERN="(^(\$ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "\$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="\$OURCYGPATTERN|(\$GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "\$@" ; do
CHECK=`echo "\$arg"|egrep -c "\$OURCYGPATTERN" -`
CHECK2=`echo "\$arg"|egrep -c "^-"` ### Determine if an option
if [ \$CHECK -ne 0 ] && [ \$CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args\$i`=`cygpath --path --ignore --mixed "\$arg"`
else
eval `echo args\$i`="\"\$arg\""
fi
i=`expr \$i + 1`
done
case \$i in
0) set -- ;;
1) set -- "\$args0" ;;
2) set -- "\$args0" "\$args1" ;;
3) set -- "\$args0" "\$args1" "\$args2" ;;
4) set -- "\$args0" "\$args1" "\$args2" "\$args3" ;;
5) set -- "\$args0" "\$args1" "\$args2" "\$args3" "\$args4" ;;
6) set -- "\$args0" "\$args1" "\$args2" "\$args3" "\$args4" "\$args5" ;;
7) set -- "\$args0" "\$args1" "\$args2" "\$args3" "\$args4" "\$args5" "\$args6" ;;
8) set -- "\$args0" "\$args1" "\$args2" "\$args3" "\$args4" "\$args5" "\$args6" "\$args7" ;;
9) set -- "\$args0" "\$args1" "\$args2" "\$args3" "\$args4" "\$args5" "\$args6" "\$args7" "\$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\\\n "\$i" | sed "s/'/'\\\\\\\\''/g;1s/^/'/;\\\$s/\\\$/' \\\\\\\\/" ; done
echo " "
}
APP_ARGS=`save "\$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- -javaagent:\$APP_HOME/lib/jacocoagent.jar \$DEFAULT_JVM_OPTS \$JAVA_OPTS \$${optsEnvironmentVar} <% if ( appNameSystemProperty ) { %>"\"-D${appNameSystemProperty}=\$APP_BASE_NAME\"" <% } %>-classpath "\"\$CLASSPATH\"" <% if ( mainClassName.startsWith('--module ') ) { %>--module-path "\"\$MODULE_PATH\"" <% } %>${mainClassName} "\$APP_ARGS"
unset BESU_USING_JEMALLOC
if [ "\$darwin" = "false" -a "\$msys" = "false" ]; then
# check if jemalloc is available
TEST_JEMALLOC=\$(LD_PRELOAD=libjemalloc.so sh -c true 2>&1)
# if jemalloc is available the output is empty, otherwise the output has an error line
if [ -z "\$TEST_JEMALLOC" ]; then
export LD_PRELOAD=libjemalloc.so
export BESU_USING_JEMALLOC=true
else
# jemalloc not available, as fallback limit malloc to 2 arenas
export MALLOC_ARENA_MAX=2
fi
fi
exec "\$JAVACMD" "\$@"

View File

@@ -0,0 +1,91 @@
@rem
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem ${applicationName} startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.\
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%${appHomeRelativePath}
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and ${optsEnvironmentVar} to pass JVM options to this script.
set DEFAULT_JVM_OPTS=${defaultJvmOpts}
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=$classpath
<% if ( mainClassName.startsWith('--module ') ) { %>set MODULE_PATH=$modulePath<% } %>
@rem Execute ${applicationName}
"%JAVA_EXE%" -javaagent:%APP_HOME%/lib/jacocoagent.jar %DEFAULT_JVM_OPTS% %JAVA_OPTS% %${optsEnvironmentVar}% <% if ( appNameSystemProperty ) { %>"-D${appNameSystemProperty}=%APP_BASE_NAME%"<% } %> -classpath "%CLASSPATH%" <% if ( mainClassName.startsWith('--module ') ) { %>--module-path "%MODULE_PATH%" <% } %>${mainClassName} %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable ${exitEnvironmentVar} if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%${exitEnvironmentVar}%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega