Merge branch 'main' into zkbesu

This commit is contained in:
Fabio Di Fabio
2024-03-25 11:07:26 +01:00
33 changed files with 216 additions and 121 deletions

View File

@@ -24,6 +24,7 @@
- Transaction call object to accept both `input` and `data` field simultaneously if they are set to equal values [#6702](https://github.com/hyperledger/besu/pull/6702)
- `eth_call` for blob tx allows for empty `maxFeePerBlobGas` [#6731](https://github.com/hyperledger/besu/pull/6731)
- Extend error handling of plugin RPC methods [#6759](https://github.com/hyperledger/besu/pull/6759)
- Added engine_newPayloadV4 and engine_getPayloadV4 methods [#6783](https://github.com/hyperledger/besu/pull/6783)
### Bug fixes
- Fix txpool dump/restore race condition [#6665](https://github.com/hyperledger/besu/pull/6665)
@@ -442,7 +443,7 @@ https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/23.4.4/besu-23.4.4.z
- Early access - layered transaction pool implementation [#5290](https://github.com/hyperledger/besu/pull/5290)
- New RPC method `debug_getRawReceipts` [#5476](https://github.com/hyperledger/besu/pull/5476)
- Add TrieLogFactory plugin support [#5440](https://github.com/hyperledger/besu/pull/5440)
- Ignore `min-block-occupancy-ratio` option when on PoS networks, since in some cases, it prevents to have full blocks even if enough transactions are present [#5491](https://github.com/hyperledger/besu/pull/5491)
- Ignore `min-block-occupancy-ratio` option when on PoS networks, since in some cases, it prevents to have full blocks even if enough transactions are present [#5491](https://github.com/hyperledger/besu/pull/5491)
### Bug Fixes
- Fix eth_feeHistory response for the case in which blockCount is higher than highestBlock requested. [#5397](https://github.com/hyperledger/besu/pull/5397)

View File

@@ -22,13 +22,13 @@ import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.params.provider.Arguments;
public class ExecutionEngineEip6110AcceptanceTest extends AbstractJsonRpcTest {
private static final String GENESIS_FILE = "/jsonrpc/engine/eip6110/genesis.json";
private static final String TEST_CASE_PATH = "/jsonrpc/engine/eip6110/test-cases/";
public class ExecutionEnginePragueAcceptanceTest extends AbstractJsonRpcTest {
private static final String GENESIS_FILE = "/jsonrpc/engine/prague/genesis.json";
private static final String TEST_CASE_PATH = "/jsonrpc/engine/prague/test-cases/";
private static JsonRpcTestsContext testsContext;
public ExecutionEngineEip6110AcceptanceTest() {
public ExecutionEnginePragueAcceptanceTest() {
super(testsContext);
}

View File

@@ -14,7 +14,7 @@
"londonBlock":0,
"terminalTotalDifficulty":0,
"cancunTime":0,
"experimentalEipsTime":20,
"pragueTime":20,
"clique": {
"period": 5,
"epoch": 30000

View File

@@ -1,7 +1,7 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_getPayloadV6110",
"method": "engine_getPayloadV4",
"params": [
"0x282643b909febddf"
],

View File

@@ -1,7 +1,7 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_newPayloadV6110",
"method": "engine_newPayloadV4",
"params": [
{
"parentHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",

View File

@@ -1,7 +1,7 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_newPayloadV6110",
"method": "engine_newPayloadV4",
"params": [
{
"parentHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",

View File

@@ -1,7 +1,7 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_getPayloadV6110",
"method": "engine_getPayloadV4",
"params": [
"0x282643db882670cf"
],

View File

@@ -54,11 +54,11 @@ public enum RpcMethod {
ENGINE_GET_PAYLOAD_V1("engine_getPayloadV1"),
ENGINE_GET_PAYLOAD_V2("engine_getPayloadV2"),
ENGINE_GET_PAYLOAD_V3("engine_getPayloadV3"),
ENGINE_GET_PAYLOAD_V6110("engine_getPayloadV6110"),
ENGINE_GET_PAYLOAD_V4("engine_getPayloadV4"),
ENGINE_NEW_PAYLOAD_V1("engine_newPayloadV1"),
ENGINE_NEW_PAYLOAD_V2("engine_newPayloadV2"),
ENGINE_NEW_PAYLOAD_V3("engine_newPayloadV3"),
ENGINE_NEW_PAYLOAD_V6110("engine_newPayloadV6110"),
ENGINE_NEW_PAYLOAD_V4("engine_newPayloadV4"),
ENGINE_FORKCHOICE_UPDATED_V1("engine_forkchoiceUpdatedV1"),
ENGINE_FORKCHOICE_UPDATED_V2("engine_forkchoiceUpdatedV2"),
ENGINE_FORKCHOICE_UPDATED_V3("engine_forkchoiceUpdatedV3"),

View File

@@ -50,8 +50,8 @@ public class PluginJsonRpcMethod implements JsonRpcMethod {
final Object result = function.apply(() -> request.getRequest().getParams());
return new JsonRpcSuccessResponse(request.getRequest().getId(), result);
} catch (final PluginRpcEndpointException ex) {
final JsonRpcError error = new JsonRpcError(ex.getRpcMethodError(), ex.getMessage());
LOG.error("Error calling plugin JSON-RPC endpoint", ex);
final JsonRpcError error = new JsonRpcError(ex.getRpcMethodError(), ex.getData());
LOG.debug("Error calling plugin JSON-RPC endpoint", ex);
return new JsonRpcErrorResponse(request.getRequest().getId(), error);
}
}

View File

@@ -63,7 +63,6 @@ public class EngineExchangeCapabilities extends ExecutionEngineJsonRpcMethod {
.filter(e -> e.getMethodName().startsWith("engine_"))
.filter(e -> !e.equals(ENGINE_EXCHANGE_CAPABILITIES))
.filter(e -> !e.equals(ENGINE_PREPARE_PAYLOAD_DEBUG))
.filter(e -> !e.getMethodName().endsWith("6110"))
.map(RpcMethod::getMethodName)
.collect(Collectors.toList());

View File

@@ -32,11 +32,11 @@ import java.util.Optional;
import io.vertx.core.Vertx;
public class EngineGetPayloadV6110 extends AbstractEngineGetPayload {
public class EngineGetPayloadV4 extends AbstractEngineGetPayload {
private final Optional<ScheduledProtocolSpec.Hardfork> eip6110;
private final Optional<ScheduledProtocolSpec.Hardfork> prague;
public EngineGetPayloadV6110(
public EngineGetPayloadV4(
final Vertx vertx,
final ProtocolContext protocolContext,
final MergeMiningCoordinator mergeMiningCoordinator,
@@ -50,12 +50,12 @@ public class EngineGetPayloadV6110 extends AbstractEngineGetPayload {
mergeMiningCoordinator,
blockResultFactory,
engineCallListener);
this.eip6110 = schedule.hardforkFor(s -> s.fork().name().equalsIgnoreCase("ExperimentalEips"));
this.prague = schedule.hardforkFor(s -> s.fork().name().equalsIgnoreCase("Prague"));
}
@Override
public String getName() {
return RpcMethod.ENGINE_GET_PAYLOAD_V6110.getMethodName();
return RpcMethod.ENGINE_GET_PAYLOAD_V4.getMethodName();
}
@Override
@@ -66,22 +66,22 @@ public class EngineGetPayloadV6110 extends AbstractEngineGetPayload {
return new JsonRpcSuccessResponse(
request.getRequest().getId(),
blockResultFactory.payloadTransactionCompleteV6110(blockWithReceipts));
blockResultFactory.payloadTransactionCompleteV4(blockWithReceipts));
}
@Override
protected ValidationResult<RpcErrorType> validateForkSupported(final long blockTimestamp) {
if (protocolSchedule.isPresent()) {
if (eip6110.isPresent() && blockTimestamp >= eip6110.get().milestone()) {
if (prague.isPresent() && blockTimestamp >= prague.get().milestone()) {
return ValidationResult.valid();
} else {
return ValidationResult.invalid(
RpcErrorType.UNSUPPORTED_FORK,
"EIP-6110 configured to start at timestamp: " + eip6110.get().milestone());
"Prague configured to start at timestamp: " + prague.get().milestone());
}
} else {
return ValidationResult.invalid(
RpcErrorType.UNSUPPORTED_FORK, "Configuration error, no schedule for EIP-6110 fork set");
RpcErrorType.UNSUPPORTED_FORK, "Configuration error, no schedule for Prague fork set");
}
}
}

View File

@@ -29,11 +29,11 @@ import java.util.Optional;
import io.vertx.core.Vertx;
public class EngineNewPayloadV6110 extends AbstractEngineNewPayload {
public class EngineNewPayloadV4 extends AbstractEngineNewPayload {
private final Optional<ScheduledProtocolSpec.Hardfork> eip6110;
private final Optional<ScheduledProtocolSpec.Hardfork> prague;
public EngineNewPayloadV6110(
public EngineNewPayloadV4(
final Vertx vertx,
final ProtocolSchedule timestampSchedule,
final ProtocolContext protocolContext,
@@ -42,13 +42,12 @@ public class EngineNewPayloadV6110 extends AbstractEngineNewPayload {
final EngineCallListener engineCallListener) {
super(
vertx, timestampSchedule, protocolContext, mergeCoordinator, ethPeers, engineCallListener);
this.eip6110 =
timestampSchedule.hardforkFor(s -> s.fork().name().equalsIgnoreCase("ExperimentalEips"));
this.prague = timestampSchedule.hardforkFor(s -> s.fork().name().equalsIgnoreCase("prague"));
}
@Override
public String getName() {
return RpcMethod.ENGINE_NEW_PAYLOAD_V6110.getMethodName();
return RpcMethod.ENGINE_NEW_PAYLOAD_V4.getMethodName();
}
@Override
@@ -74,16 +73,16 @@ public class EngineNewPayloadV6110 extends AbstractEngineNewPayload {
@Override
protected ValidationResult<RpcErrorType> validateForkSupported(final long blockTimestamp) {
if (protocolSchedule.isPresent()) {
if (eip6110.isPresent() && blockTimestamp >= eip6110.get().milestone()) {
if (prague.isPresent() && blockTimestamp >= prague.get().milestone()) {
return ValidationResult.valid();
} else {
return ValidationResult.invalid(
RpcErrorType.UNSUPPORTED_FORK,
"EIP-6110 configured to start at timestamp: " + eip6110.get().milestone());
"Prague configured to start at timestamp: " + prague.get().milestone());
}
} else {
return ValidationResult.invalid(
RpcErrorType.UNSUPPORTED_FORK, "Configuration error, no schedule for EIP-6110 fork set");
RpcErrorType.UNSUPPORTED_FORK, "Configuration error, no schedule for Prague fork set");
}
}
}

View File

@@ -154,7 +154,7 @@ public class BlockResultFactory {
blobsBundleV1);
}
public EngineGetPayloadResultV6110 payloadTransactionCompleteV6110(
public EngineGetPayloadResultV4 payloadTransactionCompleteV4(
final BlockWithReceipts blockWithReceipts) {
final List<String> txs =
blockWithReceipts.getBlock().getBody().getTransactions().stream()
@@ -168,7 +168,7 @@ public class BlockResultFactory {
final BlobsBundleV1 blobsBundleV1 =
new BlobsBundleV1(blockWithReceipts.getBlock().getBody().getTransactions());
return new EngineGetPayloadResultV6110(
return new EngineGetPayloadResultV4(
blockWithReceipts.getHeader(),
txs,
blockWithReceipts.getBlock().getBody().getWithdrawals(),

View File

@@ -30,13 +30,13 @@ import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import org.apache.tuweni.bytes.Bytes32;
@JsonPropertyOrder({"executionPayload", "blockValue", "blobsBundle", "shouldOverrideBuilder"})
public class EngineGetPayloadResultV6110 {
public class EngineGetPayloadResultV4 {
protected final PayloadResult executionPayload;
private final String blockValue;
private final BlobsBundleV1 blobsBundle;
private final boolean shouldOverrideBuilder;
public EngineGetPayloadResultV6110(
public EngineGetPayloadResultV4(
final BlockHeader header,
final List<String> transactions,
final Optional<List<Withdrawal>> withdrawals,

View File

@@ -28,11 +28,11 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineG
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadV1;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadV2;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadV3;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadV6110;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineGetPayloadV4;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineNewPayloadV1;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineNewPayloadV2;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineNewPayloadV3;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineNewPayloadV6110;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineNewPayloadV4;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EnginePreparePayloadDebug;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.engine.EngineQosTimer;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.BlockResultFactory;
@@ -160,9 +160,9 @@ public class ExecutionEngineJsonRpcMethods extends ApiGroupJsonRpcMethods {
protocolSchedule));
}
if (protocolSchedule.anyMatch(p -> p.spec().getName().equalsIgnoreCase("ExperimentalEips"))) {
if (protocolSchedule.anyMatch(p -> p.spec().getName().equalsIgnoreCase("prague"))) {
executionEngineApisSupported.add(
new EngineGetPayloadV6110(
new EngineGetPayloadV4(
consensusEngineServer,
protocolContext,
mergeCoordinator.get(),
@@ -171,7 +171,7 @@ public class ExecutionEngineJsonRpcMethods extends ApiGroupJsonRpcMethods {
protocolSchedule));
executionEngineApisSupported.add(
new EngineNewPayloadV6110(
new EngineNewPayloadV4(
consensusEngineServer,
protocolSchedule,
protocolContext,

View File

@@ -37,8 +37,16 @@ public class JsonRpcTestHelper {
}
protected void assertValidJsonRpcError(
final JsonObject json, final Object id, final int errorCode, final String errorMessage)
throws Exception {
final JsonObject json, final Object id, final int errorCode, final String errorMessage) {
assertValidJsonRpcError(json, id, errorCode, errorMessage, null);
}
protected void assertValidJsonRpcError(
final JsonObject json,
final Object id,
final int errorCode,
final String errorMessage,
final String data) {
// Check all expected fieldnames are set
final Set<String> fieldNames = json.fieldNames();
assertThat(fieldNames.size()).isEqualTo(3);
@@ -53,13 +61,19 @@ public class JsonRpcTestHelper {
// Check error format
final JsonObject error = json.getJsonObject("error");
final Set<String> errorFieldNames = error.fieldNames();
assertThat(errorFieldNames.size()).isEqualTo(2);
assertThat(errorFieldNames.size()).isEqualTo(data == null ? 2 : 3);
assertThat(errorFieldNames.contains("code")).isTrue();
assertThat(errorFieldNames.contains("message")).isTrue();
if (data != null) {
assertThat(errorFieldNames.contains("data")).isTrue();
}
// Check error field values
assertThat(error.getInteger("code")).isEqualTo(errorCode);
assertThat(error.getString("message")).isEqualTo(errorMessage);
if (data != null) {
assertThat(error.getString("data")).isEqualTo(data);
}
}
protected void assertIdMatches(final JsonObject json, final Object expectedId) {

View File

@@ -24,6 +24,9 @@ import org.hyperledger.besu.plugin.services.exception.PluginRpcEndpointException
import org.hyperledger.besu.plugin.services.rpc.PluginRpcRequest;
import org.hyperledger.besu.plugin.services.rpc.RpcMethodError;
import java.util.Locale;
import java.util.Optional;
import io.vertx.core.json.JsonObject;
import okhttp3.RequestBody;
import okhttp3.Response;
@@ -127,6 +130,25 @@ public class PluginJsonRpcMethodTest extends JsonRpcHttpServiceTestBase {
}
}
@Test
public void methodErrorWithDataShouldReturnErrorResponseWithDecodedData() throws Exception {
final var wrongParamContent =
"""
{"jsonrpc":"2.0","id":1,"method":"plugin_echo","params":["data"]}""";
try (var unused =
addRpcMethod(
"plugin_echo",
new PluginJsonRpcMethod("plugin_echo", PluginJsonRpcMethodTest::echoPluginRpcMethod))) {
final RequestBody body = RequestBody.create(wrongParamContent, JSON);
try (final Response resp = client.newCall(buildPostRequest(body)).execute()) {
assertThat(resp.code()).isEqualTo(200);
final JsonObject json = new JsonObject(resp.body().string());
testHelper.assertValidJsonRpcError(json, 1, -2, "Error with data: ABC", "abc");
}
}
}
@Test
public void unhandledExceptionShouldReturnInternalErrorResponse() throws Exception {
final var nullParam =
@@ -168,6 +190,29 @@ public class PluginJsonRpcMethodTest extends JsonRpcHttpServiceTestBase {
}
});
}
if (input.toString().equals("data")) {
throw new PluginRpcEndpointException(
new RpcMethodError() {
@Override
public int getCode() {
return -2;
}
@Override
public String getMessage() {
return "Error with data";
}
@Override
public Optional<String> decodeData(final String data) {
// just turn everything uppercase
return Optional.of(data.toUpperCase(Locale.US));
}
},
"abc");
}
return input;
}
}

View File

@@ -39,8 +39,10 @@ public class AbstractScheduledApiTest {
new ScheduledProtocolSpec.Hardfork("Shanghai", 20);
protected final ScheduledProtocolSpec.Hardfork cancunHardfork =
new ScheduledProtocolSpec.Hardfork("Cancun", 30);
protected final ScheduledProtocolSpec.Hardfork pragueHardfork =
new ScheduledProtocolSpec.Hardfork("Prague", 40);
protected final ScheduledProtocolSpec.Hardfork experimentalHardfork =
new ScheduledProtocolSpec.Hardfork("ExperimentalEips", 40);
new ScheduledProtocolSpec.Hardfork("ExperimentalEips", 50);
@Mock protected DefaultProtocolSchedule protocolSchedule;
@@ -74,6 +76,9 @@ public class AbstractScheduledApiTest {
lenient()
.when(protocolSchedule.hardforkFor(argThat(new HardforkMatcher(cancunHardfork))))
.thenReturn(Optional.of(cancunHardfork));
lenient()
.when(protocolSchedule.hardforkFor(argThat(new HardforkMatcher(pragueHardfork))))
.thenReturn(Optional.of(pragueHardfork));
lenient()
.when(protocolSchedule.hardforkFor(argThat(new HardforkMatcher(shanghaiHardfork))))
.thenReturn(Optional.of(shanghaiHardfork));

View File

@@ -32,7 +32,7 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.RpcMethod;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcErrorResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcSuccessResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.RpcErrorType;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.EngineGetPayloadResultV6110;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.EngineGetPayloadResultV4;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.Quantity;
import org.hyperledger.besu.ethereum.core.BlobTestFixture;
import org.hyperledger.besu.ethereum.core.Block;
@@ -59,9 +59,9 @@ import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(
MockitoExtension.class) // mocks in parent class may not be used, throwing unnecessary stubbing
public class EngineGetPayloadV6110Test extends AbstractEngineGetPayloadTest {
public class EngineGetPayloadV4Test extends AbstractEngineGetPayloadTest {
public EngineGetPayloadV6110Test() {
public EngineGetPayloadV4Test() {
super();
}
@@ -74,7 +74,7 @@ public class EngineGetPayloadV6110Test extends AbstractEngineGetPayloadTest {
.thenReturn(Optional.of(mockBlockWithReceiptsAndDeposits));
when(protocolContext.safeConsensusContext(Mockito.any())).thenReturn(Optional.of(mergeContext));
this.method =
new EngineGetPayloadV6110(
new EngineGetPayloadV4(
vertx,
protocolContext,
mergeMiningCoordinator,
@@ -86,24 +86,24 @@ public class EngineGetPayloadV6110Test extends AbstractEngineGetPayloadTest {
@Override
@Test
public void shouldReturnExpectedMethodName() {
assertThat(method.getName()).isEqualTo("engine_getPayloadV6110");
assertThat(method.getName()).isEqualTo("engine_getPayloadV4");
}
@Override
@Test
public void shouldReturnBlockForKnownPayloadId() {
BlockHeader eip6110Header =
BlockHeader header =
new BlockHeaderTestFixture()
.prevRandao(Bytes32.random())
.timestamp(experimentalHardfork.milestone() + 1)
.timestamp(pragueHardfork.milestone() + 1)
.excessBlobGas(BlobGas.of(10L))
.buildHeader();
// should return withdrawals, deposits and excessGas for a post-6110 block
PayloadIdentifier postEip6110Pid =
PayloadIdentifier payloadIdentifier =
PayloadIdentifier.forPayloadParams(
Hash.ZERO,
experimentalHardfork.milestone(),
pragueHardfork.milestone(),
Bytes32.random(),
Address.fromHexString("0x42"),
Optional.empty(),
@@ -124,10 +124,10 @@ public class EngineGetPayloadV6110Test extends AbstractEngineGetPayloadTest {
.createTransaction(senderKeys);
TransactionReceipt blobReceipt = mock(TransactionReceipt.class);
when(blobReceipt.getCumulativeGasUsed()).thenReturn(100L);
BlockWithReceipts postEip6110Block =
BlockWithReceipts block =
new BlockWithReceipts(
new Block(
eip6110Header,
header,
new BlockBody(
List.of(blobTx),
Collections.emptyList(),
@@ -135,23 +135,23 @@ public class EngineGetPayloadV6110Test extends AbstractEngineGetPayloadTest {
Optional.of(Collections.emptyList()))),
List.of(blobReceipt));
when(mergeContext.retrieveBlockById(postEip6110Pid)).thenReturn(Optional.of(postEip6110Block));
when(mergeContext.retrieveBlockById(payloadIdentifier)).thenReturn(Optional.of(block));
final var resp = resp(RpcMethod.ENGINE_GET_PAYLOAD_V6110.getMethodName(), postEip6110Pid);
final var resp = resp(RpcMethod.ENGINE_GET_PAYLOAD_V4.getMethodName(), payloadIdentifier);
assertThat(resp).isInstanceOf(JsonRpcSuccessResponse.class);
Optional.of(resp)
.map(JsonRpcSuccessResponse.class::cast)
.ifPresent(
r -> {
assertThat(r.getResult()).isInstanceOf(EngineGetPayloadResultV6110.class);
final EngineGetPayloadResultV6110 res = (EngineGetPayloadResultV6110) r.getResult();
assertThat(r.getResult()).isInstanceOf(EngineGetPayloadResultV4.class);
final EngineGetPayloadResultV4 res = (EngineGetPayloadResultV4) r.getResult();
assertThat(res.getExecutionPayload().getWithdrawals()).isNotNull();
assertThat(res.getExecutionPayload().getDeposits()).isNotNull();
assertThat(res.getExecutionPayload().getHash())
.isEqualTo(eip6110Header.getHash().toString());
.isEqualTo(header.getHash().toString());
assertThat(res.getBlockValue()).isEqualTo(Quantity.create(0));
assertThat(res.getExecutionPayload().getPrevRandao())
.isEqualTo(eip6110Header.getPrevRandao().map(Bytes32::toString).orElse(""));
.isEqualTo(header.getPrevRandao().map(Bytes32::toString).orElse(""));
// excessBlobGas: QUANTITY, 256 bits
String expectedQuantityOf10 = Bytes32.leftPad(Bytes.of(10)).toQuantityHexString();
assertThat(res.getExecutionPayload().getExcessBlobGas()).isNotEmpty();
@@ -163,7 +163,7 @@ public class EngineGetPayloadV6110Test extends AbstractEngineGetPayloadTest {
@Test
public void shouldReturnUnsupportedFork() {
final var resp = resp(RpcMethod.ENGINE_GET_PAYLOAD_V6110.getMethodName(), mockPid);
final var resp = resp(RpcMethod.ENGINE_GET_PAYLOAD_V4.getMethodName(), mockPid);
assertThat(resp).isInstanceOf(JsonRpcErrorResponse.class);
assertThat(((JsonRpcErrorResponse) resp).getErrorType())
@@ -172,6 +172,6 @@ public class EngineGetPayloadV6110Test extends AbstractEngineGetPayloadTest {
@Override
protected String getMethodName() {
return RpcMethod.ENGINE_GET_PAYLOAD_V6110.getMethodName();
return RpcMethod.ENGINE_GET_PAYLOAD_V4.getMethodName();
}
}

View File

@@ -41,7 +41,7 @@ import org.hyperledger.besu.ethereum.core.Deposit;
import org.hyperledger.besu.ethereum.core.Withdrawal;
import org.hyperledger.besu.ethereum.mainnet.BodyValidation;
import org.hyperledger.besu.ethereum.mainnet.DepositsValidator;
import org.hyperledger.besu.evm.gascalculator.CancunGasCalculator;
import org.hyperledger.besu.evm.gascalculator.PragueGasCalculator;
import java.util.Collections;
import java.util.List;
@@ -54,17 +54,18 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class EngineNewPayloadV6110Test extends EngineNewPayloadV3Test {
public class EngineNewPayloadV4Test extends EngineNewPayloadV3Test {
private static final Address depositContractAddress =
Address.fromHexString("0x00000000219ab540356cbb839cbe05303d7705fa");
public EngineNewPayloadV6110Test() {}
public EngineNewPayloadV4Test() {}
@BeforeEach
@Override
public void before() {
super.before();
maybeParentBeaconBlockRoot = Optional.of(Bytes32.ZERO);
// TODO this should be using NewPayloadV4
this.method =
new EngineNewPayloadV3(
vertx,
@@ -73,10 +74,8 @@ public class EngineNewPayloadV6110Test extends EngineNewPayloadV3Test {
mergeCoordinator,
ethPeers,
engineCallListener);
lenient()
.when(protocolSchedule.hardforkFor(any()))
.thenReturn(Optional.of(super.cancunHardfork));
lenient().when(protocolSpec.getGasCalculator()).thenReturn(new CancunGasCalculator());
lenient().when(protocolSchedule.hardforkFor(any())).thenReturn(Optional.of(pragueHardfork));
lenient().when(protocolSpec.getGasCalculator()).thenReturn(new PragueGasCalculator());
}
@Override
@@ -171,7 +170,7 @@ public class EngineNewPayloadV6110Test extends EngineNewPayloadV3Test {
BlockHeader parentBlockHeader =
new BlockHeaderTestFixture()
.baseFeePerGas(Wei.ONE)
.timestamp(super.experimentalHardfork.milestone())
.timestamp(pragueHardfork.milestone())
.excessBlobGas(BlobGas.ZERO)
.blobGasUsed(0L)
.buildHeader();

View File

@@ -733,6 +733,9 @@ public abstract class MainnetProtocolSpecs {
final int contractSizeLimit =
configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT);
final Address depositContractAddress =
genesisConfigOptions.getDepositContractAddress().orElse(DEFAULT_DEPOSIT_CONTRACT_ADDRESS);
return cancunDefinition(
chainId,
configContractSizeLimit,
@@ -760,6 +763,7 @@ public abstract class MainnetProtocolSpecs {
SPURIOUS_DRAGON_FORCE_DELETE_WHEN_EMPTY_ADDRESSES))
// use prague precompiled contracts
.precompileContractRegistryBuilder(MainnetPrecompiledContractRegistries::prague)
.depositsValidator(new DepositsValidator.AllowedDeposits(depositContractAddress))
.name("Prague");
}

View File

@@ -63,40 +63,7 @@ public class BackwardSyncAlgorithm implements BesuEvents.InitialSyncCompletionLi
public CompletableFuture<Void> pickNextStep() {
final Optional<Hash> firstHash = context.getBackwardChain().getFirstHashToAppend();
if (firstHash.isPresent()) {
final CompletableFuture<Void> syncStep = new CompletableFuture<>();
executeSyncStep(firstHash.get())
.whenComplete(
(result, error) -> {
if (error != null) {
if (error instanceof CompletionException
&& error.getCause() instanceof MaxRetriesReachedException) {
context.getBackwardChain().removeFromHashToAppend(firstHash.get());
LOG.atWarn()
.setMessage(
"Unable to retrieve block {} from any peer, with {} peers available. Could be a reorged block. Waiting for the next block from the consensus client to try again.")
.addArgument(firstHash.get())
.addArgument(context.getEthContext().getEthPeers().peerCount())
.addArgument(context.getBackwardChain().getFirstHashToAppend())
.log();
LOG.atDebug()
.setMessage("Removing hash {} from hashesToAppend")
.addArgument(firstHash.get())
.log();
syncStep.complete(null);
} else {
syncStep.completeExceptionally(error);
}
} else {
LOG.atDebug()
.setMessage("Backward sync target block is {}")
.addArgument(result::toLogString)
.log();
context.getBackwardChain().removeFromHashToAppend(firstHash.get());
context.getStatus().updateTargetHeight(result.getHeader().getNumber());
syncStep.complete(null);
}
});
return syncStep;
return handleSyncStep(firstHash.get());
}
if (!context.isReady()) {
return waitForReady();
@@ -137,6 +104,59 @@ public class BackwardSyncAlgorithm implements BesuEvents.InitialSyncCompletionLi
return executeBackwardAsync(firstAncestorHeader);
}
private CompletableFuture<Void> handleSyncStep(final Hash firstHash) {
final CompletableFuture<Void> syncStep = new CompletableFuture<>();
executeSyncStep(firstHash)
.whenComplete(
(result, error) -> {
if (error != null) {
handleSyncStepError(error, firstHash, syncStep);
} else {
handleSyncStepSuccess(result, firstHash, syncStep);
}
});
return syncStep;
}
private void handleSyncStepSuccess(
final Block result, final Hash firstHash, final CompletableFuture<Void> syncStep) {
if (result == null) {
LOG.atWarn().setMessage("Unexpected null result in for hash {}").addArgument(firstHash).log();
syncStep.completeExceptionally(new BackwardSyncException("Unexpected null result", true));
} else {
LOG.atDebug()
.setMessage("Backward sync target block is {}")
.addArgument(result::toLogString)
.log();
context.getBackwardChain().removeFromHashToAppend(firstHash);
context.getStatus().updateTargetHeight(result.getHeader().getNumber());
syncStep.complete(null);
}
}
private void handleSyncStepError(
final Throwable error, final Hash firstHash, final CompletableFuture<Void> syncStep) {
if (error instanceof CompletionException
&& error.getCause() instanceof MaxRetriesReachedException) {
handleEthPeerMaxRetriesException(firstHash);
syncStep.complete(null);
} else {
syncStep.completeExceptionally(error);
}
}
private void handleEthPeerMaxRetriesException(final Hash firstHash) {
context.getBackwardChain().removeFromHashToAppend(firstHash);
LOG.atWarn()
.setMessage(
"Unable to retrieve block {} from any peer, with {} peers available. Could be a reorged block. Waiting for the next block from the consensus client to try again.")
.addArgument(firstHash)
.addArgument(context.getEthContext().getEthPeers().peerCount())
.addArgument(context.getBackwardChain().getFirstHashToAppend())
.log();
LOG.atDebug().setMessage("Removing hash {} from hashesToAppend").addArgument(firstHash).log();
}
@VisibleForTesting
public CompletableFuture<Void> executeProcessKnownAncestors() {
return new ProcessKnownAncestorsStep(context, context.getBackwardChain()).executeAsync();

View File

@@ -442,7 +442,6 @@ public class BackwardSyncContextTest {
}
}
@SuppressWarnings("BannedMethod")
@Test
public void whenBlockNotFoundInPeers_shouldRemoveBlockFromQueueAndProgressInNextSession() {
// This scenario can happen due to a reorg
@@ -466,7 +465,6 @@ public class BackwardSyncContextTest {
.isEqualTo(remoteBlockchain.getBlockByNumber(reorgBlockHeight).orElseThrow());
}
@SuppressWarnings("BannedMethod")
@Test
public void
whenBlockNotFoundInPeers_shouldRemoveBlockFromQueueAndProgressWithQueueInSameSession() {

View File

@@ -17,5 +17,6 @@ org.gradle.jvmargs=-Xmx4g \
--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED \
--add-opens jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED
# Could be moved to sonar properties after https://sonarsource.atlassian.net/browse/SONARGRADL-134
systemProp.sonar.gradle.skipCompile=true
besu.run.args="--network=linea"

View File

@@ -69,7 +69,7 @@ Calculated : ${currentHash}
tasks.register('checkAPIChanges', FileStateChecker) {
description = "Checks that the API for the Plugin-API project does not change without deliberate thought"
files = sourceSets.main.allJava.files
knownHash = '0xiYCyr3M4oSrvqYXVkLgVDzlBg2T3fmrADub5tY5a0='
knownHash = '/FHIztl2tLW5Gzc0qnfEeuVQa6ljVfUce7YE6JLDdZU='
}
check.dependsOn('checkAPIChanges')

View File

@@ -20,6 +20,8 @@ import org.hyperledger.besu.plugin.services.rpc.RpcMethodError;
public class PluginRpcEndpointException extends RuntimeException {
/** The error */
private final RpcMethodError rpcMethodError;
/** The data associated with the exception */
private final String data;
/**
* Constructs a new PluginRpcEndpointException exception with the specified error.
@@ -27,20 +29,18 @@ public class PluginRpcEndpointException extends RuntimeException {
* @param rpcMethodError the error.
*/
public PluginRpcEndpointException(final RpcMethodError rpcMethodError) {
super();
this.rpcMethodError = rpcMethodError;
this(rpcMethodError, null);
}
/**
* Constructs a new PluginRpcEndpointException exception with the specified error and message.
*
* @param rpcMethodError the error.
* @param message the detail message (which is saved for later retrieval by the {@link
* #getMessage()} method).
* @param data the data associated with the exception that could be parsed to extract more
* information to return in the error response.
*/
public PluginRpcEndpointException(final RpcMethodError rpcMethodError, final String message) {
super(message);
this.rpcMethodError = rpcMethodError;
public PluginRpcEndpointException(final RpcMethodError rpcMethodError, final String data) {
this(rpcMethodError, data, null);
}
/**
@@ -48,16 +48,17 @@ public class PluginRpcEndpointException extends RuntimeException {
* cause.
*
* @param rpcMethodError the error.
* @param message the detail message (which is saved for later retrieval by the {@link
* #getMessage()} method).
* @param data the data associated with the exception that could be parsed to extract more
* information to return in the error response.
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()} method).
* (A {@code null} value is permitted, and indicates that the cause is nonexistent or
* unknown.)
*/
public PluginRpcEndpointException(
final RpcMethodError rpcMethodError, final String message, final Throwable cause) {
super(message, cause);
final RpcMethodError rpcMethodError, final String data, final Throwable cause) {
super(rpcMethodError.getMessage(), cause);
this.rpcMethodError = rpcMethodError;
this.data = data;
}
/**
@@ -68,4 +69,13 @@ public class PluginRpcEndpointException extends RuntimeException {
public RpcMethodError getRpcMethodError() {
return rpcMethodError;
}
/**
* Get the data associated with the exception
*
* @return data as string, could be null.
*/
public String getData() {
return data;
}
}