feat: remove switch-to-linea-besu, counter-v1, conflation-v1, file-ma… (#952)

* feat: remove switch-to-linea-besu, counter-v1, conflation-v1, file-manager related configs and their reference codes in L1DependentApp

* fix: spotless

* feat: remove traces-api-facade module

* feat: remove traces-counter-v1 from coordinator config and removed TracesFileManager, TracesRpcClientV1, TraceCounterV1WatcherClient classes

* feat: removed TracesCounterV1 and update tests

* feat: removed TracingModuleV1 and traces-limits-v1 toml files

* feat: removed TracesClientV1, TracesClientV2Adapter, and revised codes accordingly

* feat: removed FileManager in CoordinatorConfig and updated test case for assertConsecutiveBlocksRange

* feat: updated l2-node-besu docker-compose configs

* feat: rename traces-node-v2 to traces-node in all places

* feat: use prover v3 compression proofs for tests

* fix: spotless
This commit is contained in:
jonesho
2025-05-09 19:52:46 +08:00
committed by GitHub
parent 5b7a84185d
commit 47a9d07d4f
96 changed files with 444 additions and 7940 deletions

View File

@@ -51,7 +51,6 @@ jobs:
- '.github/workflows/build-and-publish.yml'
- '.github/workflows/main.yml'
- '.github/workflows/reuse-*.yml'
- 'config/common/traces-limits-v1.toml'
- 'config/common/traces-limits-v2.toml'
- 'config/coordinator/**'
- 'e2e/**'

View File

@@ -49,7 +49,6 @@ jobs:
- '.github/workflows/build-and-publish.yml'
- '.github/workflows/main.yml'
- '.github/workflows/reuse-*.yml'
- 'config/common/traces-limits-v1.toml'
- 'config/common/traces-limits-v2.toml'
- 'config/coordinator/**'
- 'e2e/**'

View File

@@ -147,7 +147,7 @@ jobs:
docker logs zkbesu-shomei --since 1h &>> docker_logs/zkbesu-shomei.txt || true
docker logs shomei-frontend --since 1h &>> docker_logs/shomei-frontend.txt || true
docker logs postman --since 1h &>> docker_logs/postman.txt || true
docker logs traces-node-v2 --since 1h &>> docker_logs/traces-node-v2.txt || true
docker logs traces-node --since 1h &>> docker_logs/traces-node.txt || true
docker logs l2-node-besu --since 1h &>> docker_logs/l2-node-besu.txt || true
docker logs transaction-exclusion-api --since 1h &>> docker_logs/transaction-exclusion-api.txt || true
docker logs sequencer --since 1h &>> docker_logs/sequencer.txt || true

View File

@@ -1,23 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="TracesApi" type="Application" factoryName="Application">
<option name="ALTERNATIVE_JRE_PATH" value="21" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
<option name="MAIN_CLASS_NAME" value="net.consensys.linea.traces.app.TracesAppMain" />
<module name="zkevm.traces-api-facade.app.main" />
<option name="PROGRAM_PARAMETERS" value="config/traces-api/traces-app-docker.config.toml config/traces-api/traces-app-local-dev.config.overrides.toml" />
<option name="VM_PARAMETERS" value="-Dvertx.configurationFile=config/traces-api/vertx-options.json -Dlog4j2.configurationFile=config/traces-api/log4j2-dev.xml" />
<extension name="net.ashald.envfile">
<option name="IS_ENABLED" value="false" />
<option name="IS_SUBST" value="false" />
<option name="IS_PATH_MACRO_SUPPORTED" value="false" />
<option name="IS_IGNORE_MISSING_FILES" value="false" />
<option name="IS_ENABLE_EXPERIMENTAL_INTEGRATIONS" value="false" />
<ENTRIES>
<ENTRY IS_ENABLED="true" PARSER="runconfig" IS_EXECUTABLE="false" />
</ENTRIES>
</extension>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

View File

@@ -1,73 +0,0 @@
[traces-limits]
#
# Arithmetization module limits
#
ADD = 524288
BIN = 262144
#BIN_RT = 262144
EC_DATA = 4096
EUC = 1
EXT = 131072
HUB = 2097152
INSTRUCTION_DECODER = 512 # Ugly hack, TODO: @franklin
MMIO = 1048576
MMU = 524288
#MMU_ID = 256
MOD = 131072
MUL = 65536
MXP = 524288
#PHONEY_RLP = 65536 # can probably get lower
PUB_HASH = 32768
PUB_HASH_INFO = 8192
#OLD_PUB_HASH = 32768
#OLD_PUB_HASH_INFO = 8192
PUB_LOG = 16384
PUB_LOG_INFO = 16384
#RLP = 128
ROM = 1048576
ROM_LEX = 1048576
SHF = 65536
#SHF_RT = 262144
TX_RLP = 131072
TRM = 131072
WCP = 262144
#LOG_DATA = 262144
#LOG_INFO = 262144
RLP_ADDR = 262144
#RLP_TXN = 262144
RLP_TXRCPT = 262144
TXN_DATA = 262144
STP = 262144
#
# Block-specific limits
#
BLOCK_TX = 200 # max number of tx in an L2 block
BLOCK_L2L1LOGS = 16
BLOCK_KECCAK = 8192
BLOCK_L1SIZE = 1000000
#
# Precompiles limits
#
PRECOMPILE_ECRECOVER = 10000
PRECOMPILE_ECRECOVER_EFFECTIVE_CALL = 1000000
PRECOMPILE_SHA2 = 10000
PRECOMPILE_SHA2_BLOCKS = 1000000
PRECOMPILE_SHA2_EFFECTIVE_CALL = 1000000
PRECOMPILE_RIPEMD = 10000
PRECOMPILE_RIPEMD_EFFECTIVE_CALL = 1000000
PRECOMPILE_RIPEMD_BLOCKS = 1000000
PRECOMPILE_IDENTITY = 10000
PRECOMPILE_MODEXP = 10000
PRECOMPILE_MODEXP_EFFECTIVE_CALL = 1000
PRECOMPILE_ECADD = 10000
PRECOMPILE_ECADD_EFFECTIVE_CALL = 1000000
PRECOMPILE_ECMUL = 10000
PRECOMPILE_ECMUL_EFFECTIVE_CALL = 1000000
PRECOMPILE_ECPAIRING = 10000
PRECOMPILE_ECPAIRING_EFFECTIVE_CALL = 1000000
PRECOMPILE_ECPAIRING_MILLER_LOOP = 1000000
#PRECOMPILE_ECPAIRING_WEIGHTED = 10000
PRECOMPILE_BLAKE2F = 512
PRECOMPILE_BLAKE2F_ROUNDS = 1000000

View File

@@ -1,55 +0,0 @@
##
# This file specifies prover limit by each EVM module
# WARN: The prover/arithmetization team has the owneship of this.
# Changing this values may compromise the system.
# issue: https://github.com/ConsenSys/zkevm-monorepo/issues/525
##
[traces-limits]
#
# Arithmetization module limits
#
ADD = 524288
BIN = 262144
BIN_RT = 262144
EC_DATA = 4096
EXT = 131072
HUB = 2097152
INSTRUCTION_DECODER = 512
MMIO = 131072
MMU = 131072
MMU_ID = 131072
MOD = 131072
MUL = 65536
MXP = 524288
PHONEY_RLP = 32768
PUB_HASH = 32768
PUB_HASH_INFO = 32768
PUB_LOG = 16384
PUB_LOG_INFO = 16384
RLP = 512
ROM = 4194304
SHF = 65536
SHF_RT = 4096
TX_RLP = 131072
WCP = 262144
#
# Block-specific limits
#
BLOCK_TX = 200 # max number of tx in an L2 block
BLOCK_L2L1LOGS = 16
BLOCK_KECCAK = 8192
#
# Precompiles limits
#
PRECOMPILE_ECRECOVER = 10000
PRECOMPILE_SHA2 = 10000
PRECOMPILE_RIPEMD = 10000
PRECOMPILE_IDENTITY = 10000
PRECOMPILE_MODEXP = 10000
PRECOMPILE_ECADD = 10000
PRECOMPILE_ECMUL = 10000
PRECOMPILE_ECPAIRING = 10000
PRECOMPILE_BLAKE2F = 512

View File

@@ -1,16 +0,0 @@
# Can override any of this propeties in CLI as follows:
# -Dconfig.override.sequencer.engine-api=http://127.0.0.1:8650
#[sequencer]
#version="0.0.1"
#engine-api="http://traces-node:8550"
#eth-api="http://traces-node:8545"
# Config of Traces API Facade endpoint
[traces]
endpoints=["http://traces-api:8080/"]
[traces.file-manager]
polling-interval="PT0S"
[l1]
disabled=true

View File

@@ -10,16 +10,15 @@ fs-requests-directory = "/data/prover/v3/aggregation/requests"
fs-responses-directory = "/data/prover/v3/aggregation/responses"
[traces]
switch-to-linea-besu=true
blob-compressor-version="V1_0_1"
expected-traces-api-version-v2="beta-v2.1-rc14"
[traces.counters-v2]
endpoints=["http://traces-node-v2:8545/"]
endpoints=["http://traces-node:8545/"]
request-limit-per-endpoint=1
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2
[traces.conflation-v2]
endpoints=["http://traces-node-v2:8545/"]
endpoints=["http://traces-node:8545/"]
request-limit-per-endpoint=1
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2

View File

@@ -56,30 +56,20 @@ fs-responses-directory = "/data/prover/v2/aggregation/responses"
#fs-responses-directory = "/data/prover/v3/aggregation/responses"
[traces]
switch-to-linea-besu=false
blob-compressor-version="V0_1_0"
raw-execution-traces-version="0.2.0"
expected-traces-api-version="0.2.0"
[traces.counters]
endpoints=["http://traces-api:8080/"]
request-limit-per-endpoint=2
expected-traces-api-version-v2="beta-v2.1-rc14"
[traces.counters-v2]
endpoints=["http://traces-node:8545/"]
request-limit-per-endpoint=1
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2
[traces.conflation]
endpoints=["http://traces-api:8080/"]
request-limit-per-endpoint=2
[traces.conflation-v2]
endpoints=["http://traces-node:8545/"]
request-limit-per-endpoint=1
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2
[traces.file-manager]
traces-file-extension="json.gz"
raw-traces-directory="/data/traces/raw"
non-canonical-raw-traces-directory="/data/traces/raw-non-canonical"
create-non-canonical-directory=true
polling-interval="PT1S"
traces-file-creation-wait-timeout="PT2M"
[state-manager]
version="2.3.0"
endpoints=["http://shomei:8888/"]

View File

@@ -12,25 +12,23 @@ endpoint="http://127.0.0.1:9000"
[prover]
[prover.execution]
fs-requests-directory = "tmp/local/prover/v2/execution/requests"
fs-responses-directory = "tmp/local/prover/v2/execution/responses"
fs-requests-directory = "tmp/local/prover/v3/execution/requests"
fs-responses-directory = "tmp/local/prover/v3/execution/responses"
[prover.blob-compression]
fs-requests-directory = "tmp/local/prover/v2/compression/requests"
fs-responses-directory = "tmp/local/prover/v2/compression/responses"
fs-requests-directory = "tmp/local/prover/v3/compression/requests"
fs-responses-directory = "tmp/local/prover/v3/compression/responses"
[prover.proof-aggregation]
fs-requests-directory = "tmp/local/prover/v2/aggregation/requests"
fs-responses-directory = "tmp/local/prover/v2/aggregation/responses"
# Config of Traces API Facade endpoint
[traces]
[traces.counters]
endpoints=["http://127.0.0.1:8080/"]
[traces.conflation]
endpoints=["http://127.0.0.1:8080/"]
[traces.file-manager]
traces-file-extension = "json.gz"
raw-traces-directory = "tmp/local/traces/raw"
non-canonical-raw-traces-directory = "tmp/local/traces/raw-non-canonical"
blob-compressor-version="V1_0_1"
expected-traces-api-version-v2="beta-v2.1-rc14"
[traces.counters-v2]
endpoints=["http://127.0.0.1:8745/"]
[traces.conflation-v2]
endpoints=["http://127.0.0.1:8745/"]
[state-manager]
endpoints=["http://127.0.0.1:8998/"]
@@ -54,7 +52,7 @@ finalized-block-tag="finalized"
earliestBlock=0
[l2]
rpc-endpoint="http://127.0.0.1:8645"
rpc-endpoint="http://127.0.0.1:8845"
blocks-to-finalization=0
[database]

View File

@@ -101,8 +101,8 @@ run {
.findAll { it.key.startsWith("config") }
.collect { "-D${it.key}=${it.value}" }
args = [
"--traces-limits",
"config/common/traces-limits-v1.toml",
"--traces-limits-v2",
"config/common/traces-limits-v2.toml",
"--smart-contract-errors",
"config/common/smart-contract-errors.toml",
"--gas-price-cap-time-of-day-multipliers",

View File

@@ -7,14 +7,12 @@ import com.github.michaelbull.result.get
import com.github.michaelbull.result.getOrElse
import com.sksamuel.hoplite.ConfigLoaderBuilder
import com.sksamuel.hoplite.addPathSource
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.zkevm.coordinator.app.config.BlockParameterDecoder
import net.consensys.zkevm.coordinator.app.config.CoordinatorConfig
import net.consensys.zkevm.coordinator.app.config.CoordinatorConfigTomlDto
import net.consensys.zkevm.coordinator.app.config.GasPriceCapTimeOfDayMultipliersConfig
import net.consensys.zkevm.coordinator.app.config.SmartContractErrorCodesConfig
import net.consensys.zkevm.coordinator.app.config.TracesLimitsV1ConfigFile
import net.consensys.zkevm.coordinator.app.config.TracesLimitsV2ConfigFile
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
@@ -63,18 +61,15 @@ inline fun <reified T : Any> loadConfigsAndLogErrors(
fun loadConfigsOrError(
coordinatorConfigFiles: List<Path>,
tracesLimitsFileV1: Path?,
tracesLimitsFileV2: Path?,
tracesLimitsFileV2: Path,
gasPriceCapTimeOfDayMultipliersFile: Path,
smartContractErrorsFile: Path,
logger: Logger = LogManager.getLogger("linea.coordinator.config")
): Result<CoordinatorConfigTomlDto, String> {
val coordinatorBaseConfigs =
loadConfigsAndLogErrors<CoordinatorConfigTomlDto>(coordinatorConfigFiles, "coordinator", logger)
val tracesLimitsV1Configs = tracesLimitsFileV1
?.let { loadConfigsAndLogErrors<TracesLimitsV1ConfigFile>(listOf(it), "traces limit v1", logger) }
val tracesLimitsV2Configs = tracesLimitsFileV2
?.let { loadConfigsAndLogErrors<TracesLimitsV2ConfigFile>(listOf(it), "traces limits v2", logger) }
val tracesLimitsV2Configs =
loadConfigsAndLogErrors<TracesLimitsV2ConfigFile>(listOf(tracesLimitsFileV2), "traces limits v2", logger)
val gasPriceCapTimeOfDayMultipliersConfig =
loadConfigsAndLogErrors<GasPriceCapTimeOfDayMultipliersConfig>(
listOf(gasPriceCapTimeOfDayMultipliersFile),
@@ -88,8 +83,7 @@ fun loadConfigsOrError(
)
val configError = listOf(
coordinatorBaseConfigs,
tracesLimitsV1Configs,
tracesLimitsV1Configs,
tracesLimitsV2Configs,
gasPriceCapTimeOfDayMultipliersConfig,
smartContractErrorsConfig
)
@@ -103,8 +97,7 @@ fun loadConfigsOrError(
val baseConfig = coordinatorBaseConfigs.get()!!
val finalConfig = baseConfig.copy(
conflation = baseConfig.conflation.copy(
_tracesLimitsV1 = tracesLimitsV1Configs?.get()?.tracesLimits?.let { TracesCountersV1(it) },
_tracesLimitsV2 = tracesLimitsV2Configs?.get()?.tracesLimits?.let { TracesCountersV2(it) },
_tracesLimitsV2 = tracesLimitsV2Configs.get()?.tracesLimits?.let { TracesCountersV2(it) },
_smartContractErrors = smartContractErrorsConfig.get()!!.smartContractErrors
),
l1DynamicGasPriceCapService = baseConfig.l1DynamicGasPriceCapService.copy(
@@ -118,15 +111,13 @@ fun loadConfigsOrError(
fun loadConfigs(
coordinatorConfigFiles: List<Path>,
tracesLimitsFileV1: Path?,
tracesLimitsFileV2: Path?,
tracesLimitsFileV2: Path,
gasPriceCapTimeOfDayMultipliersFile: Path,
smartContractErrorsFile: Path,
logger: Logger = LogManager.getLogger("linea.coordinator.config")
): CoordinatorConfig {
loadConfigsOrError(
coordinatorConfigFiles,
tracesLimitsFileV1,
tracesLimitsFileV2,
gasPriceCapTimeOfDayMultipliersFile,
smartContractErrorsFile,

View File

@@ -28,14 +28,6 @@ internal constructor(private val errorWriter: PrintWriter, private val startActi
@Parameters(paramLabel = "CONFIG.toml", description = ["Configuration files"])
private val configFiles: List<File>? = null
@CommandLine.Option(
names = ["--traces-limits"],
paramLabel = "<FILE>",
description = ["Prover traces limits"],
arity = "1"
)
private val tracesLimitsFile: File? = null
@CommandLine.Option(
names = ["--traces-limits-v2"],
paramLabel = "<FILE>",
@@ -78,8 +70,8 @@ internal constructor(private val errorWriter: PrintWriter, private val startActi
printUsage(errorWriter)
return 1
}
if (tracesLimitsFile == null && tracesLimitsV2File == null) {
errorWriter.println("Please provide traces-limits or traces-limits-v2 file!")
if (tracesLimitsV2File == null) {
errorWriter.println("Please provide traces-limits-v2 file!")
printUsage(errorWriter)
return 1
}
@@ -102,8 +94,7 @@ internal constructor(private val errorWriter: PrintWriter, private val startActi
val configs = linea.coordinator.config.loadConfigs(
coordinatorConfigFiles = configFiles.map { it.toPath() },
tracesLimitsFileV1 = tracesLimitsFile?.toPath(),
tracesLimitsFileV2 = tracesLimitsV2File?.toPath(),
tracesLimitsFileV2 = tracesLimitsV2File.toPath(),
smartContractErrorsFile = smartContractErrorsFile.toPath(),
gasPriceCapTimeOfDayMultipliersFile = gasPriceCapTimeOfDayMultipliersFile.toPath(),
logger = logger

View File

@@ -35,8 +35,6 @@ import net.consensys.linea.ethereum.gaspricing.staticcap.FeeHistoryFetcherImpl
import net.consensys.linea.jsonrpc.client.VertxHttpJsonRpcClientFactory
import net.consensys.linea.metrics.LineaMetricsCategory
import net.consensys.linea.metrics.MetricsFacade
import net.consensys.linea.traces.TracesCounters
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.zkevm.LongRunningService
import net.consensys.zkevm.coordinator.app.config.CoordinatorConfig
@@ -44,13 +42,8 @@ import net.consensys.zkevm.coordinator.app.config.Type2StateProofProviderConfig
import net.consensys.zkevm.coordinator.blockcreation.BatchesRepoBasedLastProvenBlockNumberProvider
import net.consensys.zkevm.coordinator.blockcreation.BlockCreationMonitor
import net.consensys.zkevm.coordinator.blockcreation.GethCliqueSafeBlockProvider
import net.consensys.zkevm.coordinator.blockcreation.TracesConflationClientV2Adapter
import net.consensys.zkevm.coordinator.blockcreation.TracesCountersClientV2Adapter
import net.consensys.zkevm.coordinator.blockcreation.TracesCountersV1WatcherClient
import net.consensys.zkevm.coordinator.blockcreation.TracesFilesManager
import net.consensys.zkevm.coordinator.clients.ExecutionProverClientV2
import net.consensys.zkevm.coordinator.clients.ShomeiClient
import net.consensys.zkevm.coordinator.clients.TracesGeneratorJsonRpcClientV1
import net.consensys.zkevm.coordinator.clients.TracesGeneratorJsonRpcClientV2
import net.consensys.zkevm.coordinator.clients.prover.ProverClientFactory
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartContractClient
@@ -350,11 +343,8 @@ class L1DependentApp(
val calculators: MutableList<ConflationCalculator> =
mutableListOf(
ConflationCalculatorByExecutionTraces(
tracesCountersLimit = when (configs.traces.switchToLineaBesu) {
true -> configs.conflation.tracesLimitsV2
false -> configs.conflation.tracesLimitsV1
},
emptyTracesCounters = getEmptyTracesCounters(configs.traces.switchToLineaBesu),
tracesCountersLimit = configs.conflation.tracesLimitsV2,
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT,
metricsFacade = metricsFacade,
log = logger
),
@@ -383,7 +373,7 @@ class L1DependentApp(
lastBlockNumber = lastProcessedBlockNumber,
syncCalculators = createCalculatorsForBlobsAndConflation(logger, compressedBlobCalculator),
deferredTriggerConflationCalculators = listOf(deadlineConflationCalculatorRunnerNew),
emptyTracesCounters = getEmptyTracesCounters(configs.traces.switchToLineaBesu),
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT,
log = logger
)
@@ -459,11 +449,6 @@ class L1DependentApp(
maxProvenBlobCache
)
)
val blobShnarfCalculatorVersion = if (configs.traces.switchToLineaBesu) {
ShnarfCalculatorVersion.V1_0_1
} else {
ShnarfCalculatorVersion.V0_1_0
}
val blobCompressionProofCoordinator = BlobCompressionProofCoordinator(
vertx = vertx,
@@ -471,7 +456,7 @@ class L1DependentApp(
blobCompressionProverClient = proverClientFactory.blobCompressionProverClient(),
rollingBlobShnarfCalculator = RollingBlobShnarfCalculator(
blobShnarfCalculator = GoBackedBlobShnarfCalculator(
version = blobShnarfCalculatorVersion,
version = ShnarfCalculatorVersion.V1_0_1,
metricsFacade = metricsFacade
),
blobsRepository = blobsRepository,
@@ -707,102 +692,41 @@ class L1DependentApp(
private val block2BatchCoordinator = run {
val tracesCountersLog = LogManager.getLogger("clients.TracesCounters")
val tracesCountersClient = when (configs.traces.switchToLineaBesu) {
true -> {
val tracesCounterV2Config = configs.traces.countersV2!!
val expectedTracesApiVersionV2 = configs.traces.expectedTracesApiVersionV2!!
val tracesCountersClientV2 = TracesGeneratorJsonRpcClientV2(
vertx = vertx,
rpcClient = httpJsonRpcClientFactory.createWithLoadBalancing(
endpoints = tracesCounterV2Config.endpoints.toSet(),
maxInflightRequestsPerClient = tracesCounterV2Config.requestLimitPerEndpoint,
log = tracesCountersLog
),
config = TracesGeneratorJsonRpcClientV2.Config(
expectedTracesApiVersion = expectedTracesApiVersionV2
),
retryConfig = tracesCounterV2Config.requestRetryConfig,
val tracesCountersClient = run {
val tracesCounterV2Config = configs.traces.countersV2
val expectedTracesApiVersionV2 = configs.traces.expectedTracesApiVersionV2
TracesGeneratorJsonRpcClientV2(
vertx = vertx,
rpcClient = httpJsonRpcClientFactory.createWithLoadBalancing(
endpoints = tracesCounterV2Config.endpoints.toSet(),
maxInflightRequestsPerClient = tracesCounterV2Config.requestLimitPerEndpoint,
log = tracesCountersLog
)
TracesCountersClientV2Adapter(tracesCountersClientV2 = tracesCountersClientV2)
}
false -> {
val tracesFilesManager = TracesFilesManager(
vertx,
TracesFilesManager.Config(
configs.traces.fileManager.rawTracesDirectory,
configs.traces.fileManager.nonCanonicalRawTracesDirectory,
configs.traces.fileManager.pollingInterval.toKotlinDuration(),
configs.traces.fileManager.tracesFileCreationWaitTimeout.toKotlinDuration(),
configs.traces.rawExecutionTracesVersion,
configs.traces.fileManager.tracesFileExtension,
configs.traces.fileManager.createNonCanonicalDirectory
)
)
val tracesCountersClientV1 = TracesGeneratorJsonRpcClientV1(
vertx = vertx,
rpcClient = httpJsonRpcClientFactory.createWithLoadBalancing(
endpoints = configs.traces.counters.endpoints.toSet(),
maxInflightRequestsPerClient = configs.traces.counters.requestLimitPerEndpoint,
log = tracesCountersLog
),
config = TracesGeneratorJsonRpcClientV1.Config(
rawExecutionTracesVersion = configs.traces.rawExecutionTracesVersion,
expectedTracesApiVersion = configs.traces.expectedTracesApiVersion
),
retryConfig = configs.traces.counters.requestRetryConfig,
log = tracesCountersLog
)
TracesCountersV1WatcherClient(
tracesFilesManager = tracesFilesManager,
tracesCountersClientV1 = tracesCountersClientV1
)
}
),
config = TracesGeneratorJsonRpcClientV2.Config(
expectedTracesApiVersion = expectedTracesApiVersionV2
),
retryConfig = tracesCounterV2Config.requestRetryConfig,
log = tracesCountersLog
)
}
val tracesConflationLog = LogManager.getLogger("clients.TracesConflation")
val tracesConflationClient = when (configs.traces.switchToLineaBesu) {
true -> {
val tracesConflationConfigV2 = configs.traces.conflationV2!!
val expectedTracesApiVersionV2 = configs.traces.expectedTracesApiVersionV2!!
val tracesConflationClientV2 = TracesGeneratorJsonRpcClientV2(
vertx = vertx,
rpcClient = httpJsonRpcClientFactory.createWithLoadBalancing(
endpoints = tracesConflationConfigV2.endpoints.toSet(),
maxInflightRequestsPerClient = tracesConflationConfigV2.requestLimitPerEndpoint,
log = tracesConflationLog
),
config = TracesGeneratorJsonRpcClientV2.Config(
expectedTracesApiVersion = expectedTracesApiVersionV2
),
retryConfig = configs.traces.conflation.requestRetryConfig,
val tracesConflationClient = run {
val tracesConflationConfigV2 = configs.traces.conflationV2
val expectedTracesApiVersionV2 = configs.traces.expectedTracesApiVersionV2
TracesGeneratorJsonRpcClientV2(
vertx = vertx,
rpcClient = httpJsonRpcClientFactory.createWithLoadBalancing(
endpoints = tracesConflationConfigV2.endpoints.toSet(),
maxInflightRequestsPerClient = tracesConflationConfigV2.requestLimitPerEndpoint,
log = tracesConflationLog
)
TracesConflationClientV2Adapter(
tracesConflationClientV2 = tracesConflationClientV2
)
}
false -> {
TracesGeneratorJsonRpcClientV1(
vertx = vertx,
rpcClient = httpJsonRpcClientFactory.createWithLoadBalancing(
endpoints = configs.traces.conflation.endpoints.toSet(),
maxInflightRequestsPerClient = configs.traces.conflation.requestLimitPerEndpoint,
log = tracesConflationLog
),
config = TracesGeneratorJsonRpcClientV1.Config(
rawExecutionTracesVersion = configs.traces.rawExecutionTracesVersion,
expectedTracesApiVersion = configs.traces.expectedTracesApiVersion
),
retryConfig = configs.traces.conflation.requestRetryConfig,
log = tracesConflationLog
)
}
),
config = TracesGeneratorJsonRpcClientV2.Config(
expectedTracesApiVersion = expectedTracesApiVersionV2
),
retryConfig = tracesConflationConfigV2.requestRetryConfig,
log = tracesConflationLog
)
}
val blobsConflationHandler: (BlocksConflation) -> SafeFuture<*> = run {
@@ -1142,13 +1066,6 @@ class L1DependentApp(
}
}
fun getEmptyTracesCounters(switchToLineaBesu: Boolean): TracesCounters {
return when (switchToLineaBesu) {
true -> TracesCountersV2.EMPTY_TRACES_COUNT
false -> TracesCountersV1.EMPTY_TRACES_COUNT
}
}
fun setupL1FinalizationMonitorForShomeiFrontend(
type2StateProofProviderConfig: Type2StateProofProviderConfig?,
httpJsonRpcClientFactory: VertxHttpJsonRpcClientFactory,

View File

@@ -15,15 +15,12 @@ import net.consensys.linea.ethereum.gaspricing.dynamiccap.TimeOfDayMultipliers
import net.consensys.linea.ethereum.gaspricing.dynamiccap.getAllTimeOfDayKeys
import net.consensys.linea.jsonrpc.client.RequestRetryConfig
import net.consensys.linea.traces.TracesCounters
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.linea.traces.TracingModuleV1
import net.consensys.linea.traces.TracingModuleV2
import net.consensys.zkevm.coordinator.app.L2NetworkGasPricingService
import net.consensys.zkevm.coordinator.clients.prover.ProversConfig
import java.math.BigInteger
import java.net.URL
import java.nio.file.Path
import java.time.Duration
import kotlin.time.Duration.Companion.milliseconds
import kotlin.time.Duration.Companion.minutes
@@ -41,7 +38,6 @@ data class ConflationConfig(
val conflationDeadlineCheckInterval: Duration,
val conflationDeadlineLastBlockConfirmationDelay: Duration,
val blocksLimit: Long? = null,
private var _tracesLimitsV1: TracesCountersV1?,
private var _tracesLimitsV2: TracesCountersV2?,
private var _smartContractErrors: SmartContractErrors?,
val fetchBlocksLimit: Int,
@@ -63,9 +59,6 @@ data class ConflationConfig(
?: emptyMap()
}
val tracesLimitsV1: TracesCounters
get() = _tracesLimitsV1 ?: throw IllegalStateException("Traces limits not defined!")
val tracesLimitsV2: TracesCounters
get() = _tracesLimitsV2 ?: throw IllegalStateException("Traces limits not defined!")
val smartContractErrors: SmartContractErrors = _smartContractErrors!!
@@ -181,26 +174,11 @@ data class AggregationConfig(
data class TracesConfig(
val rawExecutionTracesVersion: String,
val expectedTracesApiVersion: String,
val counters: FunctionalityEndpoint,
val conflation: FunctionalityEndpoint,
val fileManager: FileManager,
val switchToLineaBesu: Boolean = false,
val blobCompressorVersion: BlobCompressorVersion,
val expectedTracesApiVersionV2: String? = null,
val countersV2: FunctionalityEndpoint? = null,
val conflationV2: FunctionalityEndpoint? = null
val expectedTracesApiVersionV2: String,
val countersV2: FunctionalityEndpoint,
val conflationV2: FunctionalityEndpoint
) {
init {
if (switchToLineaBesu) {
require(expectedTracesApiVersionV2 != null) {
"expectedTracesApiVersionV2 is required when switching to linea besu for tracing"
}
require(countersV2 != null) { "countersV2 is required when switching to linea besu for tracing" }
require(conflationV2 != null) { "conflationV2 is required when switching to linea besu for tracing" }
}
}
data class FunctionalityEndpoint(
val endpoints: List<URL>,
val requestLimitPerEndpoint: UInt,
@@ -210,15 +188,6 @@ data class TracesConfig(
require(requestLimitPerEndpoint > 0u) { "requestLimitPerEndpoint must be greater than 0" }
}
}
data class FileManager(
val tracesFileExtension: String,
val rawTracesDirectory: Path,
val nonCanonicalRawTracesDirectory: Path,
val createNonCanonicalDirectory: Boolean,
val pollingInterval: Duration,
val tracesFileCreationWaitTimeout: Duration
)
}
data class StateManagerClientConfig(
@@ -515,7 +484,6 @@ data class Type2StateProofProviderConfig(
override val requestRetry: RequestRetryConfigTomlFriendly
) : RequestRetryConfigurable
data class TracesLimitsV1ConfigFile(val tracesLimits: Map<TracingModuleV1, UInt>)
data class TracesLimitsV2ConfigFile(val tracesLimits: Map<TracingModuleV2, UInt>)
//

View File

@@ -1,36 +0,0 @@
package net.consensys.zkevm.coordinator.blockcreation
import com.github.michaelbull.result.Result
import linea.domain.BlockNumberAndHash
import net.consensys.linea.errors.ErrorResponse
import net.consensys.zkevm.coordinator.clients.GenerateTracesResponse
import net.consensys.zkevm.coordinator.clients.GetTracesCountersResponse
import net.consensys.zkevm.coordinator.clients.TracesConflationClientV1
import net.consensys.zkevm.coordinator.clients.TracesConflationClientV2
import net.consensys.zkevm.coordinator.clients.TracesCountersClientV1
import net.consensys.zkevm.coordinator.clients.TracesCountersClientV2
import net.consensys.zkevm.coordinator.clients.TracesServiceErrorType
import tech.pegasys.teku.infrastructure.async.SafeFuture
class TracesCountersClientV2Adapter(
private val tracesCountersClientV2: TracesCountersClientV2
) : TracesCountersClientV1 {
override fun rollupGetTracesCounters(
block: BlockNumberAndHash
): SafeFuture<Result<GetTracesCountersResponse, ErrorResponse<TracesServiceErrorType>>> {
return tracesCountersClientV2.getTracesCounters(block.number)
}
}
class TracesConflationClientV2Adapter(
private val tracesConflationClientV2: TracesConflationClientV2
) : TracesConflationClientV1 {
override fun rollupGenerateConflatedTracesToFile(
blocks: List<BlockNumberAndHash>
): SafeFuture<Result<GenerateTracesResponse, ErrorResponse<TracesServiceErrorType>>> {
return tracesConflationClientV2.generateConflatedTracesToFile(
startBlockNumber = blocks.minOf { it.number },
endBlockNumber = blocks.maxOf { it.number }
)
}
}

View File

@@ -1,26 +0,0 @@
package net.consensys.zkevm.coordinator.blockcreation
import com.github.michaelbull.result.Result
import linea.domain.BlockNumberAndHash
import net.consensys.linea.errors.ErrorResponse
import net.consensys.zkevm.coordinator.clients.GetTracesCountersResponse
import net.consensys.zkevm.coordinator.clients.TracesCountersClientV1
import net.consensys.zkevm.coordinator.clients.TracesServiceErrorType
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import org.apache.tuweni.bytes.Bytes32
import tech.pegasys.teku.infrastructure.async.SafeFuture
class TracesCountersV1WatcherClient(
private val tracesFilesManager: TracesFilesManager,
private val tracesCountersClientV1: TracesCountersClientV1,
private val log: Logger = LogManager.getLogger(TracesCountersV1WatcherClient::class.java)
) : TracesCountersClientV1 {
override fun rollupGetTracesCounters(block: BlockNumberAndHash):
SafeFuture<Result<GetTracesCountersResponse, ErrorResponse<TracesServiceErrorType>>> {
return tracesFilesManager.waitRawTracesGenerationOf(block.number, Bytes32.wrap(block.hash)).thenCompose {
log.trace("Traces file generated: block={}", block.number)
tracesCountersClientV1.rollupGetTracesCounters(block)
}
}
}

View File

@@ -1,109 +0,0 @@
package net.consensys.zkevm.coordinator.blockcreation
import io.vertx.core.Future
import io.vertx.core.Vertx
import net.consensys.linea.async.AsyncRetryer
import net.consensys.linea.async.toSafeFuture
import net.consensys.linea.traces.TracesFileNameSupplier
import net.consensys.linea.traces.TracesFiles
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import org.apache.tuweni.bytes.Bytes32
import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.io.FileNotFoundException
import java.nio.file.Files
import java.nio.file.Path
import kotlin.time.Duration
class TracesFilesManager(
private val vertx: Vertx,
private val config: Config,
private val tracesFileNameSupplier: TracesFileNameSupplier = TracesFiles::rawTracesFileNameSupplierV1
) {
data class Config(
val tracesFolder: Path,
val nonCanonicalTracesDir: Path,
val pollingInterval: Duration,
val tracesGenerationTimeout: Duration,
val tracesEngineVersion: String,
val tracesFileExtension: String,
val createNonCanonicalTracesDirIfDoesNotExist: Boolean
)
private val log: Logger = LogManager.getLogger(this::class.java)
private val retries: Int = run {
config.tracesGenerationTimeout.inWholeMilliseconds /
config.pollingInterval.inWholeMilliseconds.coerceAtLeast(1L)
}.toInt()
init {
if (!Files.exists(config.nonCanonicalTracesDir)) {
if (config.createNonCanonicalTracesDirIfDoesNotExist) {
Files.createDirectories(config.nonCanonicalTracesDir)
} else {
throw FileNotFoundException("${config.nonCanonicalTracesDir} directory not found!")
}
}
}
fun waitRawTracesGenerationOf(
blockNumber: ULong,
blockHash: Bytes32
): SafeFuture<String> {
val fileName =
tracesFileNameSupplier(
blockNumber,
blockHash,
config.tracesEngineVersion,
config.tracesFileExtension
)
val targetFile = config.tracesFolder.resolve(fileName).toFile()
return AsyncRetryer.retry(
vertx = vertx,
maxRetries = retries,
backoffDelay = config.pollingInterval
) {
log.trace("Waiting for traces file: ${targetFile.absolutePath}")
if (targetFile.exists()) {
log.trace("Found for traces file: ${targetFile.absolutePath}")
SafeFuture.completedFuture(targetFile.absolutePath)
} else {
val errorMessage = "File matching '$fileName' not found after ${config.tracesGenerationTimeout}."
SafeFuture.failedFuture(FileNotFoundException(errorMessage))
}
}
}
internal fun cleanNonCanonicalSiblingsByHeight(
blockNumber: ULong,
canonicalBlockHashToKeep: Bytes32
): SafeFuture<List<String>> {
return vertx
.fileSystem()
.readDir(config.tracesFolder.toString())
.flatMap { listOfFiles ->
val filesToMove =
listOfFiles.filter { fileAbsolutePath ->
val fileName = Path.of(fileAbsolutePath).fileName.toString().lowercase()
fileName.startsWith("$blockNumber-") &&
fileName.endsWith(config.tracesFileExtension.lowercase()) &&
!fileName.contains(canonicalBlockHashToKeep.toHexString().lowercase())
}
Future.all(
filesToMove.map { fileAbsolutePath ->
val destination =
config.nonCanonicalTracesDir
.resolve(Path.of(fileAbsolutePath).fileName)
.toString()
log.info("Moving non-canonical traces file $fileAbsolutePath --> $destination")
vertx.fileSystem().move(fileAbsolutePath, destination)
}
)
.map { filesToMove }
}
.toSafeFuture()
.whenException { th -> log.error("Failed to move traces files: errorMessage={}", th.message, th) }
}
}

View File

@@ -23,7 +23,6 @@ import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.assertDoesNotThrow
import org.junit.jupiter.api.assertThrows
import org.mockito.kotlin.timeout
import java.math.BigInteger
import java.net.URI
import java.nio.file.Path
@@ -42,7 +41,6 @@ class CoordinatorConfigTest {
conflationDeadlineCheckInterval = Duration.parse("PT3S"),
conflationDeadlineLastBlockConfirmationDelay = Duration.parse("PT2S"),
blocksLimit = 2,
_tracesLimitsV1 = expectedTracesCountersV1,
_tracesLimitsV2 = expectedTracesLimitsV2,
_smartContractErrors = mapOf(
// L1 Linea Rollup
@@ -97,37 +95,24 @@ class CoordinatorConfigTest {
)
private val tracesConfig = TracesConfig(
switchToLineaBesu = false,
blobCompressorVersion = BlobCompressorVersion.V0_1_0,
rawExecutionTracesVersion = "0.2.0",
expectedTracesApiVersion = "0.2.0",
counters = TracesConfig.FunctionalityEndpoint(
listOf(
URI("http://traces-api:8080/").toURL()
),
requestLimitPerEndpoint = 2U,
expectedTracesApiVersionV2 = "v0.8.0-rc8",
conflationV2 = TracesConfig.FunctionalityEndpoint(
endpoints = listOf(URI("http://traces-node:8545/").toURL()),
requestLimitPerEndpoint = 1U,
requestRetry = RequestRetryConfigTomlFriendly(
backoffDelay = Duration.parse("PT1S"),
failuresWarningThreshold = 2
)
),
conflation = TracesConfig.FunctionalityEndpoint(
endpoints = listOf(
URI("http://traces-api:8080/").toURL()
),
requestLimitPerEndpoint = 2U,
countersV2 = TracesConfig.FunctionalityEndpoint(
endpoints = listOf(URI("http://traces-node:8545/").toURL()),
requestLimitPerEndpoint = 1U,
requestRetry = RequestRetryConfigTomlFriendly(
backoffDelay = Duration.parse("PT1S"),
failuresWarningThreshold = 2
)
),
fileManager = TracesConfig.FileManager(
tracesFileExtension = "json.gz",
rawTracesDirectory = Path.of("/data/traces/raw"),
nonCanonicalRawTracesDirectory = Path.of("/data/traces/raw-non-canonical"),
createNonCanonicalDirectory = true,
pollingInterval = Duration.parse("PT1S"),
tracesFileCreationWaitTimeout = Duration.parse("PT2M")
)
)
@@ -388,7 +373,6 @@ class CoordinatorConfigTest {
Path.of("../../config/coordinator/coordinator-local-dev.config.overrides.toml"),
Path.of("../../config/coordinator/coordinator-local-dev.config-traces-v2.overrides.toml")
),
tracesLimitsFileV1 = Path.of("../../config/common/traces-limits-v1.toml"),
tracesLimitsFileV2 = Path.of("../../config/common/traces-limits-v2.toml"),
gasPriceCapTimeOfDayMultipliersFile = Path.of("../../config/common/gas-price-cap-time-of-day-multipliers.toml"),
smartContractErrorsFile = Path.of("../../config/common/smart-contract-errors.toml")
@@ -406,7 +390,6 @@ class CoordinatorConfigTest {
fun `should parse and consolidate configs`() {
val configs = loadConfigs(
coordinatorConfigFiles = listOf(pathToResource("configs/coordinator.config.toml")),
tracesLimitsFileV1 = pathToResource("configs/traces-limits-v1.toml"),
tracesLimitsFileV2 = pathToResource("configs/traces-limits-v2.toml"),
gasPriceCapTimeOfDayMultipliersFile = pathToResource("configs/gas-price-cap-time-of-day-multipliers.toml"),
smartContractErrorsFile = pathToResource("configs/smart-contract-errors.toml")
@@ -423,7 +406,6 @@ class CoordinatorConfigTest {
pathToResource("configs/coordinator.config.toml"),
pathToResource("configs/coordinator-web3signer-override.config.toml")
),
tracesLimitsFileV1 = pathToResource("configs/traces-limits-v1.toml"),
tracesLimitsFileV2 = pathToResource("configs/traces-limits-v2.toml"),
gasPriceCapTimeOfDayMultipliersFile = pathToResource("configs/gas-price-cap-time-of-day-multipliers.toml"),
smartContractErrorsFile = pathToResource("configs/smart-contract-errors.toml")
@@ -446,7 +428,6 @@ class CoordinatorConfigTest {
pathToResource("configs/coordinator.config.toml"),
pathToResource("configs/coordinator-traces-v2-override.config.toml")
),
tracesLimitsFileV1 = pathToResource("configs/traces-limits-v1.toml"),
tracesLimitsFileV2 = pathToResource("configs/traces-limits-v2.toml"),
gasPriceCapTimeOfDayMultipliersFile = pathToResource("configs/gas-price-cap-time-of-day-multipliers.toml"),
smartContractErrorsFile = pathToResource("configs/smart-contract-errors.toml")
@@ -465,23 +446,10 @@ class CoordinatorConfigTest {
)
),
traces = tracesConfig.copy(
switchToLineaBesu = true,
blobCompressorVersion = BlobCompressorVersion.V1_0_1,
expectedTracesApiVersionV2 = "v0.8.0-rc8",
conflationV2 = tracesConfig.conflation.copy(
endpoints = listOf(URI("http://traces-node-v2:8545/").toURL()),
requestLimitPerEndpoint = 1U
),
countersV2 = TracesConfig.FunctionalityEndpoint(
listOf(
URI("http://traces-node-v2:8545/").toURL()
),
requestLimitPerEndpoint = 1U,
requestRetry = RequestRetryConfigTomlFriendly(
backoffDelay = Duration.parse("PT1S"),
failuresWarningThreshold = 2
)
)
conflationV2 = tracesConfig.conflationV2,
countersV2 = tracesConfig.countersV2
),
proversConfig = proversConfig.copy(
proverA = proversConfig.proverA.copy(

View File

@@ -1,48 +0,0 @@
package net.consensys.zkevm.coordinator.app.config
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracingModuleV1
val expectedTracesCountersV1 = TracesCountersV1(
mapOf(
// EVM Arithmetization Limits
TracingModuleV1.ADD to 1U,
TracingModuleV1.BIN to 2U,
TracingModuleV1.BIN_RT to 3U,
TracingModuleV1.EC_DATA to 4U,
TracingModuleV1.EXT to 5U,
TracingModuleV1.HUB to 6U,
TracingModuleV1.INSTRUCTION_DECODER to 7U,
TracingModuleV1.MMIO to 8U,
TracingModuleV1.MMU to 9U,
TracingModuleV1.MMU_ID to 10U,
TracingModuleV1.MOD to 11U,
TracingModuleV1.MUL to 12U,
TracingModuleV1.MXP to 13U,
TracingModuleV1.PHONEY_RLP to 14U,
TracingModuleV1.PUB_HASH to 15U,
TracingModuleV1.PUB_HASH_INFO to 16U,
TracingModuleV1.PUB_LOG to 17U,
TracingModuleV1.PUB_LOG_INFO to 18U,
TracingModuleV1.RLP to 19U,
TracingModuleV1.ROM to 20U,
TracingModuleV1.SHF to 21U,
TracingModuleV1.SHF_RT to 22U,
TracingModuleV1.TX_RLP to 23U,
TracingModuleV1.WCP to 24U,
// Block Limits
TracingModuleV1.BLOCK_TX to 25U,
TracingModuleV1.BLOCK_L2L1LOGS to 26U,
TracingModuleV1.BLOCK_KECCAK to 27U,
// Precompile Limits
TracingModuleV1.PRECOMPILE_ECRECOVER to 28U,
TracingModuleV1.PRECOMPILE_SHA2 to 29U,
TracingModuleV1.PRECOMPILE_RIPEMD to 30U,
TracingModuleV1.PRECOMPILE_IDENTITY to 31U,
TracingModuleV1.PRECOMPILE_MODEXP to 32U,
TracingModuleV1.PRECOMPILE_ECADD to 32U,
TracingModuleV1.PRECOMPILE_ECMUL to 34U,
TracingModuleV1.PRECOMPILE_ECPAIRING to 35U,
TracingModuleV1.PRECOMPILE_BLAKE2F to 36U
)
)

View File

@@ -1,180 +0,0 @@
package net.consensys.zkevm.coordinator.blockcreation
import io.vertx.core.Vertx
import net.consensys.linea.traces.TracesFiles
import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.RepeatedTest
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.assertThrows
import org.junit.jupiter.api.io.TempDir
import java.io.FileNotFoundException
import java.nio.file.Files
import java.nio.file.Path
import java.util.concurrent.ExecutionException
import kotlin.io.path.createFile
import kotlin.time.Duration.Companion.milliseconds
class TracesFilesManagerTest {
private val tracesVersion = "0.0.1"
private val tracesFileExtension = "json.gz"
private lateinit var vertx: Vertx
private lateinit var tracesDir: Path
private lateinit var nonCanonicalBlocksTracesDir: Path
private lateinit var tracesFilesManager: TracesFilesManager
private val block1Hash =
Bytes32.fromHexString("0x0000000000000000000000000000000000000000000000000000000000000001")
private val block2Hash1 =
Bytes32.fromHexString("0x00000000000000000000000000000000000000000000000000000000000000a1")
private val block2Hash2 =
Bytes32.fromHexString("0x00000000000000000000000000000000000000000000000000000000000000a2")
private lateinit var block1TracesFile: Path
private lateinit var block2TracesFile1: Path
private lateinit var block2TracesFile2: Path
private lateinit var block20TracesFile: Path
private lateinit var config: TracesFilesManager.Config
@BeforeEach
fun beforeEach(@TempDir tmpTestDir: Path) {
tracesDir = tmpTestDir.resolve("raw-traces")
nonCanonicalBlocksTracesDir = tmpTestDir.resolve("non-canonical-raw-traces")
Files.createDirectories(tracesDir)
val block1TracesFileName = TracesFiles.rawTracesFileNameSupplierV1(
1UL,
block1Hash,
tracesVersion,
tracesFileExtension
)
val block2TracesFile1Name = TracesFiles.rawTracesFileNameSupplierV1(
2UL,
block2Hash1,
tracesVersion,
tracesFileExtension
)
val block2TracesFile2Name = TracesFiles.rawTracesFileNameSupplierV1(
2UL,
block2Hash2,
tracesVersion,
tracesFileExtension
)
val block20TracesFileName = TracesFiles.rawTracesFileNameSupplierV1(
20UL,
block2Hash1,
tracesVersion,
tracesFileExtension
)
block1TracesFile =
tracesDir.resolve(Path.of(block1TracesFileName))
block2TracesFile1 =
tracesDir.resolve(Path.of(block2TracesFile1Name))
block2TracesFile2 =
tracesDir.resolve(Path.of(block2TracesFile2Name))
block20TracesFile =
tracesDir.resolve(Path.of(block20TracesFileName))
vertx = Vertx.vertx()
config =
TracesFilesManager.Config(
tracesDir,
nonCanonicalBlocksTracesDir,
pollingInterval = 10.milliseconds,
tracesGenerationTimeout = 200.milliseconds,
tracesFileExtension = tracesFileExtension,
tracesEngineVersion = tracesVersion,
createNonCanonicalTracesDirIfDoesNotExist = true
)
tracesFilesManager = TracesFilesManager(vertx, config, TracesFiles::rawTracesFileNameSupplierV1)
}
@Test
fun `waitRawTracesGenerationOf waits until traces file is found`() {
val inprogressFile = tracesDir
.resolve(Path.of("1-${block1Hash.toHexString()}.inprogress"))
.createFile()
assertThat(inprogressFile).exists()
val future = tracesFilesManager.waitRawTracesGenerationOf(1uL, block1Hash)
vertx.setTimer(config.tracesGenerationTimeout.inWholeMilliseconds / 2) {
Files.createFile(block1TracesFile)
}
assertThat(future.get()).endsWith(block1TracesFile.toString())
}
@RepeatedTest(10)
fun `waitRawTracesGenerationOf returns error after timeout`() {
val future = tracesFilesManager.waitRawTracesGenerationOf(2uL, block2Hash1)
val exception = assertThrows<ExecutionException> { future.get() }
assertThat(exception.cause).isInstanceOf(FileNotFoundException::class.java)
assertThat(exception.message)
.matches(".* File matching '2-$block2Hash1.* not found .*")
}
@Test
fun `cleanNonCanonicalSiblingsByHeight returns error when file to keep is not found`() {
val future = tracesFilesManager.cleanNonCanonicalSiblingsByHeight(1uL, block1Hash)
assertThat(future.get()).isEmpty()
}
@Test
fun `cleanNonCanonicalSiblingsByHeight removes found siblings`() {
Files.createFile(block2TracesFile1)
Files.createFile(block2TracesFile2)
Files.createFile(block20TracesFile)
assertThat(Files.exists(block2TracesFile1)).isTrue()
assertThat(Files.exists(block2TracesFile2)).isTrue()
assertThat(Files.exists(block20TracesFile)).isTrue()
tracesFilesManager.cleanNonCanonicalSiblingsByHeight(2uL, block2Hash1).get()
assertThat(block2TracesFile1).exists()
assertThat(block2TracesFile2).doesNotExist()
assertThat(block20TracesFile).exists()
}
@Test
fun `initialization fails when nonCanonicalTracesDir doesn't exist and creation is disabled`() {
val configWithoutDirCreation = config.copy(
createNonCanonicalTracesDirIfDoesNotExist = false
)
Files.delete(nonCanonicalBlocksTracesDir)
assertThrows<FileNotFoundException> {
TracesFilesManager(vertx, configWithoutDirCreation)
}
}
@Test
fun `initialization creates nonCanonicalTracesDir when it doesn't exist and creation is enabled`() {
Files.delete(nonCanonicalBlocksTracesDir)
TracesFilesManager(vertx, config)
assertThat(nonCanonicalBlocksTracesDir).exists()
}
@Test
fun `cleanNonCanonicalSiblingsByHeight moves files to nonCanonicalTracesDir`() {
Files.createFile(block2TracesFile1)
Files.createFile(block2TracesFile2)
tracesFilesManager.cleanNonCanonicalSiblingsByHeight(2uL, block2Hash1).get()
val movedFile = nonCanonicalBlocksTracesDir.resolve(block2TracesFile2.fileName)
assertThat(movedFile).exists()
assertThat(block2TracesFile2).doesNotExist()
}
@Test
fun `waitRawTracesGenerationOf handles extremely short polling interval`() {
val configWithShortPolling = config.copy(pollingInterval = 1.milliseconds)
val manager = TracesFilesManager(vertx, configWithShortPolling)
val future = manager.waitRawTracesGenerationOf(1uL, block1Hash)
vertx.setTimer(50) { Files.createFile(block1TracesFile) }
assertThat(future.get()).endsWith(block1TracesFile.toString())
}
}

View File

@@ -10,16 +10,15 @@ fs-requests-directory = "/data/prover/v3/aggregation/requests"
fs-responses-directory = "/data/prover/v3/aggregation/responses"
[traces]
switch-to-linea-besu=true
blob-compressor-version="V1_0_1"
expected-traces-api-version-v2="v0.8.0-rc8"
[traces.counters-v2]
endpoints=["http://traces-node-v2:8545/"]
endpoints=["http://traces-node:8545/"]
request-limit-per-endpoint=1
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2
[traces.conflation-v2]
endpoints=["http://traces-node-v2:8545/"]
endpoints=["http://traces-node:8545/"]
request-limit-per-endpoint=1
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2

View File

@@ -38,30 +38,20 @@ handler-polling-interval="PT1S"
batches-limit=1
[traces]
switch-to-linea-besu=false
blob-compressor-version="V0_1_0"
raw-execution-traces-version="0.2.0"
expected-traces-api-version="0.2.0"
[traces.counters]
endpoints=["http://traces-api:8080/"]
request-limit-per-endpoint=2
expected-traces-api-version-v2="v0.8.0-rc8"
[traces.counters-v2]
endpoints=["http://traces-node:8545/"]
request-limit-per-endpoint=1
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2
[traces.conflation]
endpoints=["http://traces-api:8080/"]
request-limit-per-endpoint=2
[traces.conflation-v2]
endpoints=["http://traces-node:8545/"]
request-limit-per-endpoint=1
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2
[traces.file-manager]
traces-file-extension="json.gz"
raw-traces-directory="/data/traces/raw"
non-canonical-raw-traces-directory="/data/traces/raw-non-canonical"
create-non-canonical-directory=true
polling-interval="PT1S"
traces-file-creation-wait-timeout="PT2M"
[state-manager]
version="2.3.0"
endpoints=["http://shomei:8888/"]

View File

@@ -1,40 +0,0 @@
[traces-limits]
# EVM Arithmetization Limits
ADD = 1
BIN = 2
BIN_RT = 3
EC_DATA = 4
EXT = 5
HUB = 6
INSTRUCTION_DECODER = 7
MMIO = 8
MMU = 9
MMU_ID = 10
MOD = 11
MUL = 12
MXP = 13
PHONEY_RLP = 14
PUB_HASH = 15
PUB_HASH_INFO = 16
PUB_LOG = 17
PUB_LOG_INFO = 18
RLP = 19
ROM = 20
SHF = 21
SHF_RT = 22
TX_RLP = 23
WCP = 24
# Block Limits
BLOCK_TX = 25
BLOCK_L2L1LOGS = 26
BLOCK_KECCAK = 27
# Precompile Limits
PRECOMPILE_ECRECOVER = 28
PRECOMPILE_SHA2 = 29
PRECOMPILE_RIPEMD = 30
PRECOMPILE_IDENTITY = 31
PRECOMPILE_MODEXP = 32
PRECOMPILE_ECADD = 32
PRECOMPILE_ECMUL = 34
PRECOMPILE_ECPAIRING = 35
PRECOMPILE_BLAKE2F = 36

View File

@@ -53,8 +53,9 @@ class BlobCompressionProofJsonResponseTest {
}
companion object {
private const val testdataPath1 = "../../../../testdata/prover/blob-compression/responses"
private const val testdataPath2 = "../../../../testdata/prover-v2/prover-compression/responses/"
private const val testdataPath1 =
"../../../../testdata/coordinator/prover/v3/submissionAndFinalization/compression/responses"
private const val testdataPath2 = "../../../../testdata/coordinator/prover/v3/stateRecovery/compression/responses"
private fun testFiles(): Array<File> {
val testFiles1 = File(testdataPath1).listFiles()!!

View File

@@ -8,9 +8,7 @@ import io.vertx.core.json.JsonObject
import net.consensys.linea.errors.ErrorResponse
import net.consensys.linea.jsonrpc.JsonRpcErrorResponse
import net.consensys.linea.jsonrpc.JsonRpcSuccessResponse
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.linea.traces.TracingModuleV1
import net.consensys.linea.traces.TracingModuleV2
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
@@ -18,16 +16,6 @@ import org.apache.logging.log4j.Logger
object TracesClientResponsesParser {
private val log: Logger = LogManager.getLogger(this::class.java)
internal fun mapErrorResponseV1(
jsonRpcErrorResponse: JsonRpcErrorResponse
): ErrorResponse<TracesServiceErrorType> {
val errorType: TracesServiceErrorType = runCatching {
TracesServiceErrorType.valueOf(jsonRpcErrorResponse.error.message.substringBefore(':'))
}.getOrElse { TracesServiceErrorType.UNKNOWN_ERROR }
return ErrorResponse(errorType, jsonRpcErrorResponse.error.message)
}
internal fun mapErrorResponseV2(
jsonRpcErrorResponse: JsonRpcErrorResponse
): ErrorResponse<TracesServiceErrorType> {
@@ -38,17 +26,6 @@ object TracesClientResponsesParser {
return ErrorResponse(errorType, jsonRpcErrorResponse.error.message)
}
internal fun parseTracesCounterResponseV1(
jsonRpcResponse: JsonRpcSuccessResponse
): GetTracesCountersResponse {
val result = jsonRpcResponse.result as JsonObject
return GetTracesCountersResponse(
result.getJsonObject("tracesCounters").let { parseTracesCountersV1(it) },
result.getString("tracesEngineVersion")
)
}
internal fun parseTracesCounterResponseV2(
jsonRpcResponse: JsonRpcSuccessResponse
): GetTracesCountersResponse {
@@ -60,38 +37,6 @@ object TracesClientResponsesParser {
)
}
internal fun parseTracesCountersV1(tracesCounters: JsonObject): TracesCountersV1 {
val expectedModules = TracingModuleV1.entries.map { it.name }.toSet()
val evmModulesInResponse = tracesCounters.map.keys.toSet()
val modulesMissing = expectedModules - evmModulesInResponse
val unExpectedModules = evmModulesInResponse - expectedModules
val error =
if (modulesMissing.isNotEmpty()) {
"Traces counters response is missing modules: ${modulesMissing.joinToString(",")}"
} else if (unExpectedModules.isNotEmpty()) {
"Traces counters has unsupported modules: ${unExpectedModules.joinToString(",")}"
} else {
null
}
if (error != null) {
log.error(error)
throw IllegalStateException(error)
}
val traces = TracingModuleV1.entries.associateWith { traceModule ->
val counterValue = tracesCounters.getString(traceModule.name)
runCatching { counterValue.toUInt() }
.onFailure {
log.error(
"Failed to parse Evm module ${traceModule.name}='$counterValue' to UInt. errorMessage={}",
it.message,
it
)
}
.getOrThrow()
}
return TracesCountersV1(traces)
}
internal fun parseTracesCountersV2(tracesCounters: JsonObject): TracesCountersV2 {
val expectedModules = TracingModuleV2.entries.map { it.name }.toSet()
val evmModulesInResponse = tracesCounters.map.keys.toSet()

View File

@@ -1,116 +0,0 @@
package net.consensys.zkevm.coordinator.clients
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.mapEither
import io.vertx.core.Vertx
import io.vertx.core.json.JsonObject
import linea.domain.BlockNumberAndHash
import linea.kotlin.encodeHex
import net.consensys.linea.async.toSafeFuture
import net.consensys.linea.errors.ErrorResponse
import net.consensys.linea.jsonrpc.JsonRpcRequestMapParams
import net.consensys.linea.jsonrpc.client.JsonRpcClient
import net.consensys.linea.jsonrpc.client.JsonRpcRequestRetryer
import net.consensys.linea.jsonrpc.client.RequestRetryConfig
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.util.concurrent.atomic.AtomicInteger
class TracesGeneratorJsonRpcClientV1(
private val rpcClient: JsonRpcClient,
private val config: Config
) :
TracesCountersClientV1, TracesConflationClientV1 {
constructor(
vertx: Vertx,
rpcClient: JsonRpcClient,
config: Config,
retryConfig: RequestRetryConfig,
log: Logger = LogManager.getLogger(TracesGeneratorJsonRpcClientV1::class.java)
) : this(
JsonRpcRequestRetryer(
vertx,
rpcClient,
config = JsonRpcRequestRetryer.Config(
methodsToRetry = retryableMethods,
requestRetry = retryConfig
),
log = log
),
config
)
data class Config(
val rawExecutionTracesVersion: String,
val expectedTracesApiVersion: String
)
private var id = AtomicInteger(0)
override fun rollupGetTracesCounters(
block: BlockNumberAndHash
): SafeFuture<Result<GetTracesCountersResponse, ErrorResponse<TracesServiceErrorType>>> {
val jsonRequest =
JsonRpcRequestMapParams(
"2.0",
id.incrementAndGet(),
"rollup_getBlockTracesCountersV1",
mapOf(
"block" to mapOf(
"blockNumber" to block.number.toString(),
"blockHash" to block.hash.encodeHex()
),
"rawExecutionTracesVersion" to config.rawExecutionTracesVersion,
"expectedTracesApiVersion" to config.expectedTracesApiVersion
)
)
return rpcClient.makeRequest(jsonRequest).toSafeFuture()
.thenApply { responseResult ->
responseResult.mapEither(
TracesClientResponsesParser::parseTracesCounterResponseV1,
TracesClientResponsesParser::mapErrorResponseV1
)
}
}
override fun rollupGenerateConflatedTracesToFile(
blocks: List<BlockNumberAndHash>
): SafeFuture<Result<GenerateTracesResponse, ErrorResponse<TracesServiceErrorType>>> {
// TODO: validate list of blocks
// 1 - does not have repeated/duplicated pairs
// 2 - blocks numbers are consecutive
val jsonRequest =
JsonRpcRequestMapParams(
"2.0",
id.incrementAndGet(),
"rollup_generateConflatedTracesToFileV1",
mapOf(
"blocks" to blocks.map { block ->
JsonObject.of(
"blockNumber",
block.number.toString(),
"blockHash",
block.hash.encodeHex()
)
},
"rawExecutionTracesVersion" to config.rawExecutionTracesVersion,
"expectedTracesApiVersion" to config.expectedTracesApiVersion
)
)
return rpcClient.makeRequest(jsonRequest).toSafeFuture()
.thenApply { responseResult ->
responseResult.mapEither(
TracesClientResponsesParser::parseConflatedTracesToFileResponse,
TracesClientResponsesParser::mapErrorResponseV1
)
}
}
companion object {
internal val retryableMethods = setOf("rollup_getBlockTracesCountersV1", "rollup_generateConflatedTracesToFileV1")
}
}

View File

@@ -1,454 +0,0 @@
package net.consensys.zkevm.coordinator.clients
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.client.WireMock.aResponse
import com.github.tomakehurst.wiremock.client.WireMock.containing
import com.github.tomakehurst.wiremock.client.WireMock.equalTo
import com.github.tomakehurst.wiremock.client.WireMock.equalToJson
import com.github.tomakehurst.wiremock.client.WireMock.ok
import com.github.tomakehurst.wiremock.client.WireMock.post
import com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor
import com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo
import com.github.tomakehurst.wiremock.core.WireMockConfiguration.options
import com.github.tomakehurst.wiremock.stubbing.Scenario.STARTED
import io.micrometer.core.instrument.simple.SimpleMeterRegistry
import io.vertx.core.Vertx
import io.vertx.core.json.JsonObject
import io.vertx.junit5.VertxExtension
import linea.domain.BlockNumberAndHash
import linea.kotlin.ByteArrayExt
import linea.kotlin.encodeHex
import net.consensys.linea.async.get
import net.consensys.linea.errors.ErrorResponse
import net.consensys.linea.jsonrpc.client.JsonRpcClient
import net.consensys.linea.jsonrpc.client.RequestRetryConfig
import net.consensys.linea.jsonrpc.client.VertxHttpJsonRpcClientFactory
import net.consensys.linea.metrics.MetricsFacade
import net.consensys.linea.metrics.micrometer.MicrometerMetricsFacade
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracingModuleV1
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.assertThrows
import org.junit.jupiter.api.extension.ExtendWith
import java.net.URI
import java.net.URL
import java.util.concurrent.ExecutionException
import kotlin.collections.set
import kotlin.random.Random
import kotlin.random.nextUInt
import kotlin.time.Duration.Companion.milliseconds
import kotlin.time.Duration.Companion.seconds
@ExtendWith(VertxExtension::class)
class TracesGeneratorJsonRpcClientV1Test {
private lateinit var wiremock: WireMockServer
private lateinit var tracesGeneratorClient: TracesGeneratorJsonRpcClientV1
private lateinit var meterRegistry: SimpleMeterRegistry
private val blockL1Size = 101U
private val tracesCountersValid: Map<String, Long> =
TracingModuleV1.entries
.fold(mutableMapOf()) { acc: MutableMap<String, Long>,
evmModule: TracingModuleV1 ->
acc[evmModule.name] = Random.nextUInt(0u, UInt.MAX_VALUE).toLong()
acc
}
.also {
// add edge case of max UInt
it[TracingModuleV1.EXT.name] = UInt.MAX_VALUE.toLong()
}
private lateinit var fakeTracesServerUri: URL
private lateinit var vertxHttpJsonRpcClient: JsonRpcClient
private val expectedTracesApiVersion = "2.3.4"
private val rawExecutionTracesVersion = "9.8.7"
@BeforeEach
fun setup(vertx: Vertx) {
wiremock = WireMockServer(options().dynamicPort())
wiremock.start()
fakeTracesServerUri = URI("http://127.0.0.1:" + wiremock.port()).toURL()
meterRegistry = SimpleMeterRegistry()
val metricsFacade: MetricsFacade = MicrometerMetricsFacade(registry = meterRegistry, "linea")
val rpcClientFactory = VertxHttpJsonRpcClientFactory(vertx, metricsFacade)
vertxHttpJsonRpcClient = rpcClientFactory.createWithRetries(
fakeTracesServerUri,
methodsToRetry = TracesGeneratorJsonRpcClientV1.retryableMethods,
retryConfig = RequestRetryConfig(
maxRetries = 2u,
timeout = 10.seconds,
backoffDelay = 10.milliseconds,
failuresWarningThreshold = 1u
)
)
tracesGeneratorClient = TracesGeneratorJsonRpcClientV1(
vertxHttpJsonRpcClient,
TracesGeneratorJsonRpcClientV1.Config(
rawExecutionTracesVersion = rawExecutionTracesVersion,
expectedTracesApiVersion = expectedTracesApiVersion
)
)
}
@AfterEach
fun tearDown(vertx: Vertx) {
val vertxStopFuture = vertx.close()
wiremock.stop()
vertxStopFuture.get()
}
private fun successTracesCountersResponse(tracesEngineVersion: String = "0.0.1"): JsonObject {
return JsonObject.of(
"jsonrpc",
"2.0",
"id",
"1",
"result",
mapOf(
"blockL1Size" to blockL1Size.toString(),
"tracesEngineVersion" to tracesEngineVersion,
"tracesCounters" to tracesCountersValid
)
)
}
private fun jsonRpcErrorResponse(errorMessage: String): JsonObject {
return JsonObject.of(
"jsonrpc",
"2.0",
"id",
"1",
"error",
mapOf("code" to "1", "message" to errorMessage)
)
}
@Test
fun getTracesCounters_allEvmModulesOk() {
val tracesEngineVersion = "0.0.1"
val response = successTracesCountersResponse(tracesEngineVersion)
wiremock.stubFor(
post("/")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
ok().withHeader("Content-type", "application/json").withBody(response.toString())
)
)
val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGetTracesCounters(blockIdAndHash)
resultFuture.get()
assertThat(resultFuture)
.isCompletedWithValue(
Ok(
GetTracesCountersResponse(
TracesCountersV1(
tracesCountersValid
.mapKeys { TracingModuleV1.valueOf(it.key) }
.mapValues { it.value.toUInt() }
),
tracesEngineVersion
)
)
)
val expectedJsonRequest = JsonObject.of(
"jsonrpc",
"2.0",
"id",
1,
"method",
"rollup_getBlockTracesCountersV1",
"params",
mapOf(
"block" to mapOf(
"blockNumber" to "1",
"blockHash" to blockIdAndHash.hash.encodeHex()
),
"rawExecutionTracesVersion" to rawExecutionTracesVersion,
"expectedTracesApiVersion" to expectedTracesApiVersion
)
)
wiremock.verify(
postRequestedFor(urlEqualTo("/"))
.withHeader("Content-Type", equalTo("application/json"))
.withRequestBody(equalToJson(expectedJsonRequest.toString(), false, true))
)
}
@Test
fun `getTracesCounters when response misses EVM module returns error`() {
val tracesCountersMissingModule =
tracesCountersValid.toMutableMap().apply { this.remove(TracingModuleV1.WCP.name) }
val tracesEngineVersion = "0.0.1"
val response =
JsonObject.of(
"jsonrpc",
"2.0",
"id",
"1",
"result",
mapOf(
"blockL1Size" to blockL1Size.toString(),
"tracesEngineVersion" to tracesEngineVersion,
"tracesCounters" to tracesCountersMissingModule
)
)
wiremock.stubFor(
post("/")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
ok().withHeader("Content-type", "application/json").withBody(response.toString())
)
)
val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGetTracesCounters(blockIdAndHash)
val exception = assertThrows<ExecutionException> { resultFuture.get() }
assertThat(exception.message).contains("missing modules: WCP")
}
@Test
fun `getTracesCounters when response has unrecognized evm module returns error`() {
val tracesCountersMissingModule =
tracesCountersValid.toMutableMap().apply { this["NEW_EVM_MODULE"] = 100 }
val tracesEngineVersion = "0.0.1"
val response =
JsonObject.of(
"jsonrpc",
"2.0",
"id",
"1",
"result",
mapOf(
"blockL1Size" to blockL1Size.toString(),
"tracesEngineVersion" to tracesEngineVersion,
"tracesCounters" to tracesCountersMissingModule
)
)
wiremock.stubFor(
post("/")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
ok().withHeader("Content-type", "application/json").withBody(response.toString())
)
)
val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGetTracesCounters(blockIdAndHash)
val exception = assertThrows<ExecutionException> { resultFuture.get() }
assertThat(exception.message).contains("unsupported modules: NEW_EVM_MODULE")
}
@Test
fun generateConflatedTracesToFile() {
val startBlockNumber = 50L
val endBlockNumber = 100L
val tracesEngineVersion = "0.0.1"
val conflatedTracesFileName =
"$startBlockNumber-$endBlockNumber.conflated.v$tracesEngineVersion.json.gz"
val response =
JsonObject.of(
"jsonrpc",
"2.0",
"id",
"1",
"result",
mapOf(
"tracesEngineVersion" to tracesEngineVersion,
"conflatedTracesFileName" to conflatedTracesFileName
)
)
wiremock.stubFor(
post("/")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
ok()
.withHeader("Content-type", "application/json")
.withBody(response.toString().toByteArray())
)
)
val blocks = listOf(
BlockNumberAndHash(1U, ByteArrayExt.random32()),
BlockNumberAndHash(2U, ByteArrayExt.random32()),
BlockNumberAndHash(3U, ByteArrayExt.random32())
)
val resultFuture =
tracesGeneratorClient.rollupGenerateConflatedTracesToFile(blocks)
resultFuture.get()
assertThat(resultFuture)
.isCompletedWithValue(
Ok(GenerateTracesResponse(conflatedTracesFileName, tracesEngineVersion))
)
val expectedJsonRequest = JsonObject.of(
"jsonrpc",
"2.0",
"id",
1,
"method",
"rollup_generateConflatedTracesToFileV1",
"params",
JsonObject.of(
"blocks",
blocks.map {
JsonObject.of(
"blockNumber",
it.number.toString(),
"blockHash",
it.hash.encodeHex()
)
},
"rawExecutionTracesVersion",
rawExecutionTracesVersion,
"expectedTracesApiVersion",
expectedTracesApiVersion
)
)
wiremock.verify(
postRequestedFor(urlEqualTo("/"))
.withHeader("Content-Type", equalTo("application/json"))
.withRequestBody(equalToJson(expectedJsonRequest.toString(), false, true))
)
}
@Test
fun error_getTracesCounter() {
val errorMessage = "BLOCK_MISSING_IN_CHAIN: Block 1 doesn't exist in the chain"
val response = jsonRpcErrorResponse(errorMessage)
wiremock.stubFor(
post("/")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
ok()
.withHeader("Content-type", "application/json")
.withBody(response.toString().toByteArray())
)
)
val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGetTracesCounters(blockIdAndHash)
resultFuture.get()
assertThat(resultFuture)
.isCompletedWithValue(
Err(ErrorResponse(TracesServiceErrorType.BLOCK_MISSING_IN_CHAIN, errorMessage))
)
}
@Test
fun error_generateConflatedTracesToFile() {
val errorMessage = "BLOCK_RANGE_TOO_LARGE: Block range between 50 and 100 is too large"
val response = jsonRpcErrorResponse(errorMessage)
wiremock.stubFor(
post("/")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
ok()
.withHeader("Content-type", "application/json")
.withBody(response.toString().toByteArray())
)
)
val blocks = listOf(
BlockNumberAndHash(1U, ByteArrayExt.random32()),
BlockNumberAndHash(2U, ByteArrayExt.random32()),
BlockNumberAndHash(3U, ByteArrayExt.random32())
)
val resultFuture =
tracesGeneratorClient.rollupGenerateConflatedTracesToFile(blocks)
resultFuture.get()
assertThat(resultFuture)
.isCompletedWithValue(
Err(ErrorResponse(TracesServiceErrorType.BLOCK_RANGE_TOO_LARGE, errorMessage))
)
}
@Test
fun error_generateConflatedTracesToFile_retriesRequest() {
val tracesEngineVersion = "0.0.1"
val errorMessage = "BLOCK_MISSING_IN_CHAIN: Block 1 doesn't exist in the chain"
val jsonRpcErrorResponse = jsonRpcErrorResponse(errorMessage)
wiremock.stubFor(
post("/")
.inScenario("retry")
.whenScenarioStateIs(STARTED)
.willSetStateTo("first failure")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
aResponse()
.withStatus(500)
.withBody("Internal Server Error")
)
)
wiremock.stubFor(
post("/")
.inScenario("retry")
.whenScenarioStateIs("first failure")
.willSetStateTo("second failure")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
aResponse()
.withStatus(200)
.withBody(jsonRpcErrorResponse.toString())
)
)
wiremock.stubFor(
post("/")
.inScenario("retry")
.whenScenarioStateIs("second failure")
.willSetStateTo("success")
.withHeader("Content-Type", containing("application/json"))
.willReturn(
ok()
.withHeader("Content-type", "application/json")
.withBody(
JsonObject.of(
"jsonrpc",
"2.0",
"id",
"1",
"result",
mapOf(
"tracesEngineVersion" to tracesEngineVersion,
"conflatedTracesFileName" to "conflated-traces-1-3.json"
)
).toString()
)
)
)
tracesGeneratorClient = TracesGeneratorJsonRpcClientV1(
vertxHttpJsonRpcClient,
TracesGeneratorJsonRpcClientV1.Config(
rawExecutionTracesVersion = rawExecutionTracesVersion,
expectedTracesApiVersion = expectedTracesApiVersion
)
)
val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGenerateConflatedTracesToFile(listOf(blockIdAndHash))
assertThat(resultFuture.get()).isInstanceOf(Ok::class.java)
}
}

View File

@@ -1,7 +1,6 @@
package net.consensys.zkevm.coordinator.clients
import com.github.michaelbull.result.Result
import linea.domain.BlockNumberAndHash
import net.consensys.linea.errors.ErrorResponse
import net.consensys.linea.traces.TracesCounters
import tech.pegasys.teku.infrastructure.async.SafeFuture
@@ -17,24 +16,12 @@ data class GetTracesCountersResponse(val tracesCounters: TracesCounters, val tra
data class GenerateTracesResponse(val tracesFileName: String, val tracesEngineVersion: String)
interface TracesCountersClientV1 {
fun rollupGetTracesCounters(
block: BlockNumberAndHash
): SafeFuture<Result<GetTracesCountersResponse, ErrorResponse<TracesServiceErrorType>>>
}
interface TracesCountersClientV2 {
fun getTracesCounters(
blockNumber: ULong
): SafeFuture<Result<GetTracesCountersResponse, ErrorResponse<TracesServiceErrorType>>>
}
interface TracesConflationClientV1 {
fun rollupGenerateConflatedTracesToFile(
blocks: List<BlockNumberAndHash>
): SafeFuture<Result<GenerateTracesResponse, ErrorResponse<TracesServiceErrorType>>>
}
interface TracesConflationClientV2 {
fun generateConflatedTracesToFile(
startBlockNumber: ULong,

View File

@@ -8,7 +8,7 @@ import linea.domain.Block
import net.consensys.linea.async.toSafeFuture
import net.consensys.linea.errors.ErrorResponse
import net.consensys.zkevm.coordinator.clients.GetTracesCountersResponse
import net.consensys.zkevm.coordinator.clients.TracesCountersClientV1
import net.consensys.zkevm.coordinator.clients.TracesCountersClientV2
import net.consensys.zkevm.coordinator.clients.TracesServiceErrorType
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.encoding.BlockEncoder
@@ -21,7 +21,7 @@ import java.util.concurrent.Callable
class BlockToBatchSubmissionCoordinator(
private val conflationService: ConflationService,
private val tracesCountersClient: TracesCountersClientV1,
private val tracesCountersClient: TracesCountersClientV2,
private val vertx: Vertx,
private val encoder: BlockEncoder,
private val log: Logger = LogManager.getLogger(BlockToBatchSubmissionCoordinator::class.java)
@@ -30,7 +30,7 @@ class BlockToBatchSubmissionCoordinator(
block: Block
): SafeFuture<GetTracesCountersResponse> {
return tracesCountersClient
.rollupGetTracesCounters(block.numberAndHash)
.getTracesCounters(block.number)
.thenCompose { result ->
when (result) {
is Err<ErrorResponse<TracesServiceErrorType>> -> {

View File

@@ -1,8 +1,13 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.getOrElse
import com.github.michaelbull.result.getOrThrow
import com.github.michaelbull.result.runCatching
import io.vertx.core.Vertx
import linea.domain.Block
import net.consensys.linea.async.AsyncRetryer
import net.consensys.zkevm.domain.BlocksConflation
import net.consensys.zkevm.ethereum.coordination.proofcreation.BatchProofHandler
@@ -60,38 +65,62 @@ class ProofGeneratingConflationHandlerImpl(
}
private fun conflationToProofCreation(conflation: BlocksConflation): SafeFuture<*> {
val blockNumbersAndHash = conflation.blocks.map { it.numberAndHash }
val blockIntervalString = conflation.conflationResult.intervalString()
return tracesProductionCoordinator
.conflateExecutionTraces(blockNumbersAndHash)
.whenException { th ->
log.debug(
"traces conflation failed: batch={} errorMessage={}",
conflation.conflationResult.intervalString(),
th.message,
th
)
}
.thenCompose { blocksTracesConflated: BlocksTracesConflated ->
log.debug(
"requesting execution proof: batch={} tracesFile={}",
blockIntervalString,
blocksTracesConflated.tracesResponse.tracesFileName
)
zkProofProductionCoordinator
.createZkProof(conflation, blocksTracesConflated)
.thenPeek {
log.info("execution proof generated: batch={}", blockIntervalString)
}
return assertConsecutiveBlocksRange(conflation.blocks)
.getOrThrow().let { blocksRange ->
tracesProductionCoordinator
.conflateExecutionTraces(blocksRange)
.whenException { th ->
log.debug(
"execution proof failure: batch={} errorMessage={}",
blockIntervalString,
"traces conflation failed: batch={} errorMessage={}",
conflation.conflationResult.intervalString(),
th.message,
th
)
}
.thenCompose { blocksTracesConflated: BlocksTracesConflated ->
log.debug(
"requesting execution proof: batch={} tracesFile={}",
blockIntervalString,
blocksTracesConflated.tracesResponse.tracesFileName
)
zkProofProductionCoordinator
.createZkProof(conflation, blocksTracesConflated)
.thenPeek {
log.info("execution proof generated: batch={}", blockIntervalString)
}
.whenException { th ->
log.debug(
"execution proof failure: batch={} errorMessage={}",
blockIntervalString,
th.message,
th
)
}
}
.thenCompose { batchProofHandler.acceptNewBatch(it) }
}
.thenCompose { batchProofHandler.acceptNewBatch(it) }
}
}
internal fun assertConsecutiveBlocksRange(
blocks: List<Block>
): Result<ULongRange, IllegalArgumentException> {
if (blocks.isEmpty()) {
return Err(IllegalArgumentException("Empty list of blocks"))
}
if (blocks.size == 1) {
return Ok(blocks.first().number..blocks.last().number)
}
val sortedByNumber = blocks.sortedBy { it.number }
val gapFound = sortedByNumber
.zipWithNext { a, b -> b.number - a.number }
.any { it != 1UL }
if (gapFound) {
return Err(IllegalArgumentException("Conflated blocks list has non consecutive blocks!"))
}
return Ok(sortedByNumber.first().number..sortedByNumber.last().number)
}

View File

@@ -1,7 +1,6 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import build.linea.clients.GetZkEVMStateMerkleProofResponse
import linea.domain.BlockNumberAndHash
import net.consensys.zkevm.coordinator.clients.GenerateTracesResponse
import tech.pegasys.teku.infrastructure.async.SafeFuture
@@ -12,6 +11,6 @@ data class BlocksTracesConflated(
interface TracesConflationCoordinator {
fun conflateExecutionTraces(
blocks: List<BlockNumberAndHash>
blockRange: ULongRange
): SafeFuture<BlocksTracesConflated>
}

View File

@@ -3,32 +3,30 @@ package net.consensys.zkevm.ethereum.coordination.conflation
import build.linea.clients.GetStateMerkleProofRequest
import build.linea.clients.GetZkEVMStateMerkleProofResponse
import build.linea.clients.StateManagerClientV1
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.getOrElse
import com.github.michaelbull.result.map
import com.github.michaelbull.result.mapBoth
import linea.domain.BlockInterval
import linea.domain.BlockNumberAndHash
import net.consensys.linea.errors.ErrorResponse
import net.consensys.zkevm.coordinator.clients.GenerateTracesResponse
import net.consensys.zkevm.coordinator.clients.TracesConflationClientV1
import net.consensys.zkevm.coordinator.clients.TracesConflationClientV2
import net.consensys.zkevm.coordinator.clients.TracesServiceErrorType
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import tech.pegasys.teku.infrastructure.async.SafeFuture
class TracesConflationCoordinatorImpl(
private val tracesConflationClient: TracesConflationClientV1,
private val tracesConflationClient: TracesConflationClientV2,
private val zkStateClient: StateManagerClientV1
) : TracesConflationCoordinator {
private val log: Logger = LogManager.getLogger(this::class.java)
private fun requestConflatedTraces(
blocks: List<BlockNumberAndHash>
blockRange: ULongRange
): SafeFuture<GenerateTracesResponse> {
return tracesConflationClient
.rollupGenerateConflatedTracesToFile(blocks)
.generateConflatedTracesToFile(
startBlockNumber = blockRange.first(),
endBlockNumber = blockRange.last()
)
.thenCompose { result: Result<GenerateTracesResponse, ErrorResponse<TracesServiceErrorType>>
->
result.mapBoth(
@@ -42,56 +40,23 @@ class TracesConflationCoordinatorImpl(
}
private fun requestStateMerkleProof(
startBlockNumber: ULong,
endBlockNumber: ULong
blockRange: ULongRange
): SafeFuture<GetZkEVMStateMerkleProofResponse> {
return zkStateClient.makeRequest(GetStateMerkleProofRequest(BlockInterval(startBlockNumber, endBlockNumber)))
return zkStateClient.makeRequest(
GetStateMerkleProofRequest(BlockInterval(blockRange.first(), blockRange.last()))
)
}
override fun conflateExecutionTraces(
blocks: List<BlockNumberAndHash>
blockRange: ULongRange
): SafeFuture<BlocksTracesConflated> {
return assertBlocksList(blocks).map { sortedByNumber ->
requestConflatedTraces(blocks).thenCompose { tracesConflationResult: GenerateTracesResponse ->
// these 2 requests can be done in parallel, but traces-api is much slower to respond so
// requesting stateManger after traces-API because
// and we want to avoid having stateManager heavy JSON responses in memory in the meantime
requestStateMerkleProof(
sortedByNumber.first().number,
sortedByNumber.last().number
).thenApply { zkStateUpdateResult: GetZkEVMStateMerkleProofResponse ->
BlocksTracesConflated(tracesConflationResult, zkStateUpdateResult)
}
return requestConflatedTraces(blockRange).thenCompose { tracesConflationResult: GenerateTracesResponse ->
// these 2 requests can be done in parallel, but traces-api is much slower to respond so
// requesting stateManger after traces-API because
// we want to avoid having stateManager heavy JSON responses in memory in the meantime
requestStateMerkleProof(blockRange).thenApply { zkStateUpdateResult: GetZkEVMStateMerkleProofResponse ->
BlocksTracesConflated(tracesConflationResult, zkStateUpdateResult)
}
}.getOrElse { SafeFuture.failedFuture(it) }
}
}
internal fun assertBlocksList(
blocks: List<BlockNumberAndHash>
): Result<List<BlockNumberAndHash>, IllegalArgumentException> {
if (blocks.isEmpty()) {
return Err(IllegalArgumentException("Empty list of blocs"))
}
if (blocks.size == 1) {
return Ok(blocks)
}
val sortedByNumber = blocks.sortedBy { it.number }
var prevBlockNumber = sortedByNumber.first().number
var gapFound = false
for (i in 1 until sortedByNumber.size) {
val block = sortedByNumber[i]
if (block.number != prevBlockNumber + 1u) {
gapFound = true
break
}
prevBlockNumber = block.number
}
if (gapFound) {
return Err(IllegalArgumentException("Conflated blocks list has non consecutive blocks!"))
}
return Ok(sortedByNumber)
}

View File

@@ -4,9 +4,9 @@ import com.github.michaelbull.result.Ok
import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import linea.domain.createBlock
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.zkevm.coordinator.clients.GetTracesCountersResponse
import net.consensys.zkevm.coordinator.clients.TracesCountersClientV1
import net.consensys.zkevm.coordinator.clients.TracesCountersClientV2
import net.consensys.zkevm.ethereum.coordination.blockcreation.BlockCreated
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
@@ -32,7 +32,7 @@ class BlockToBatchSubmissionCoordinatorTest {
private val randomBlock = createBlock(number = 100UL)
private val baseBlock = BlockCreated(randomBlock)
private val blockRlpEncoded = ByteArray(0)
private val tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
private val tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
}
private fun createBlockToBatchSubmissionCoordinator(
@@ -41,8 +41,8 @@ class BlockToBatchSubmissionCoordinatorTest {
log: Logger = LogManager.getLogger(this::class.java)
): BlockToBatchSubmissionCoordinator {
val tracesCountersClient =
mock<TracesCountersClientV1>().also {
whenever(it.rollupGetTracesCounters(randomBlock.numberAndHash))
mock<TracesCountersClientV2>().also {
whenever(it.getTracesCounters(randomBlock.number))
.thenReturn(SafeFuture.completedFuture(Ok(GetTracesCountersResponse(tracesCounters, ""))))
}
return BlockToBatchSubmissionCoordinator(

View File

@@ -1,8 +1,8 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import kotlinx.datetime.Instant
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.ConflationTrigger
import org.assertj.core.api.Assertions.assertThat
@@ -19,7 +19,7 @@ class ConflationCalculatorByBlockLimitTest {
@Test
fun `should accumulate blockCounting`() {
val counters = ConflationCounters.empty(TracesCountersV1.EMPTY_TRACES_COUNT)
val counters = ConflationCounters.empty(TracesCountersV2.EMPTY_TRACES_COUNT)
calculator.copyCountersTo(counters)
assertThat(counters.blockCount).isEqualTo(0u)
calculator.appendBlock(blockCounters(1))
@@ -48,7 +48,7 @@ class ConflationCalculatorByBlockLimitTest {
return BlockCounters(
blockNumber = blockNumber.toULong(),
blockTimestamp = Instant.parse("2021-01-01T00:00:00.000Z"),
tracesCounters = fakeTracesCountersV1(blockNumber.toUInt()),
tracesCounters = fakeTracesCountersV2(blockNumber.toUInt()),
blockRLPEncoded = ByteArray(0)
)
}

View File

@@ -1,7 +1,7 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import kotlinx.datetime.Instant
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.ConflationTrigger
import net.consensys.zkevm.ethereum.coordination.blob.BlobCompressionException
@@ -196,7 +196,7 @@ class ConflationCalculatorByDataCompressedTest {
private fun blockCounters(rlpRawData: ByteArray = ByteArray(1)): BlockCounters = BlockCounters(
blockNumber = 0u,
blockTimestamp = Instant.parse("2021-01-01T00:00:00Z"),
tracesCounters = fakeTracesCountersV1(0u),
tracesCounters = fakeTracesCountersV2(0u),
blockRLPEncoded = rlpRawData
)
}

View File

@@ -4,9 +4,9 @@ import io.micrometer.core.instrument.simple.SimpleMeterRegistry
import kotlinx.datetime.Instant
import net.consensys.linea.metrics.micrometer.MicrometerMetricsFacade
import net.consensys.linea.traces.TracesCounters
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracingModuleV1
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.linea.traces.TracingModuleV2
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.ConflationTrigger
import org.assertj.core.api.Assertions.assertThat
@@ -16,11 +16,11 @@ import org.junit.jupiter.api.Test
import org.mockito.kotlin.mock
class ConflationCalculatorByExecutionTracesTest {
private val tracesLimit = fakeTracesCountersV1(100u)
private val tracesLimit = fakeTracesCountersV2(100u)
private val testMeterRegistry = SimpleMeterRegistry()
private val calculator = ConflationCalculatorByExecutionTraces(
tracesLimit,
TracesCountersV1.EMPTY_TRACES_COUNT,
TracesCountersV2.EMPTY_TRACES_COUNT,
metricsFacade = MicrometerMetricsFacade(testMeterRegistry, "test")
)
private lateinit var conflationTriggerConsumer: ConflationTriggerConsumer
@@ -31,7 +31,7 @@ class ConflationCalculatorByExecutionTracesTest {
}
private fun assertCountersEqualTo(expectedTracesCounters: TracesCounters) {
val inflightCounters = ConflationCounters.empty(TracesCountersV1.EMPTY_TRACES_COUNT)
val inflightCounters = ConflationCounters.empty(TracesCountersV2.EMPTY_TRACES_COUNT)
calculator.copyCountersTo(inflightCounters)
assertThat(inflightCounters)
.isEqualTo(ConflationCounters(tracesCounters = expectedTracesCounters))
@@ -39,79 +39,79 @@ class ConflationCalculatorByExecutionTracesTest {
@Test
fun `appendBlock should accumulate counters`() {
calculator.appendBlock(blockCounters(fakeTracesCountersV1(10u)))
assertCountersEqualTo(fakeTracesCountersV1(10u))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(10u)))
assertCountersEqualTo(fakeTracesCountersV2(10u))
calculator.appendBlock(blockCounters(fakeTracesCountersV1(20u)))
assertCountersEqualTo(fakeTracesCountersV1(30u))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(20u)))
assertCountersEqualTo(fakeTracesCountersV2(30u))
calculator.appendBlock(blockCounters(fakeTracesCountersV1(40u)))
assertCountersEqualTo(fakeTracesCountersV1(70u))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(40u)))
assertCountersEqualTo(fakeTracesCountersV2(70u))
calculator.reset()
assertCountersEqualTo(fakeTracesCountersV1(0u))
assertCountersEqualTo(fakeTracesCountersV2(0u))
}
@Test
fun `appendBlock should throw if counter go over limit when accumulated`() {
calculator.appendBlock(blockCounters(fakeTracesCountersV1(10u)))
assertThatThrownBy { calculator.appendBlock(blockCounters(fakeTracesCountersV1(91u))) }
calculator.appendBlock(blockCounters(fakeTracesCountersV2(10u)))
assertThatThrownBy { calculator.appendBlock(blockCounters(fakeTracesCountersV2(91u))) }
.isInstanceOf(IllegalStateException::class.java)
// it should allow single oversized block
calculator.reset()
calculator.appendBlock(blockCounters(fakeTracesCountersV1(200u)))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(200u)))
}
@Test
fun `copyCountersTo`() {
val inflightConflationCounters = ConflationCounters.empty(TracesCountersV1.EMPTY_TRACES_COUNT)
calculator.appendBlock(blockCounters(fakeTracesCountersV1(10u)))
val inflightConflationCounters = ConflationCounters.empty(TracesCountersV2.EMPTY_TRACES_COUNT)
calculator.appendBlock(blockCounters(fakeTracesCountersV2(10u)))
calculator.copyCountersTo(inflightConflationCounters)
assertThat(inflightConflationCounters)
.isEqualTo(ConflationCounters(tracesCounters = fakeTracesCountersV1(10u)))
.isEqualTo(ConflationCounters(tracesCounters = fakeTracesCountersV2(10u)))
calculator.appendBlock(blockCounters(fakeTracesCountersV1(20u)))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(20u)))
calculator.copyCountersTo(inflightConflationCounters)
assertThat(inflightConflationCounters)
.isEqualTo(ConflationCounters(tracesCounters = fakeTracesCountersV1(30u)))
.isEqualTo(ConflationCounters(tracesCounters = fakeTracesCountersV2(30u)))
calculator.appendBlock(blockCounters(fakeTracesCountersV1(30u)))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(30u)))
calculator.copyCountersTo(inflightConflationCounters)
assertThat(inflightConflationCounters)
.isEqualTo(ConflationCounters(tracesCounters = fakeTracesCountersV1(60u)))
.isEqualTo(ConflationCounters(tracesCounters = fakeTracesCountersV2(60u)))
calculator.reset()
calculator.copyCountersTo(inflightConflationCounters)
assertThat(inflightConflationCounters)
.isEqualTo(ConflationCounters(tracesCounters = fakeTracesCountersV1(0u)))
.isEqualTo(ConflationCounters(tracesCounters = fakeTracesCountersV2(0u)))
}
@Test
fun `checkOverflow should return trigger when block is oversized`() {
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV1(100u)))).isNull()
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV1(101u))))
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV2(100u)))).isNull()
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV2(101u))))
.isEqualTo(ConflationCalculator.OverflowTrigger(ConflationTrigger.TRACES_LIMIT, true))
}
@Test
fun `checkOverflow should return trigger accumulated traces overflow`() {
calculator.appendBlock(blockCounters(fakeTracesCountersV1(10u)))
calculator.appendBlock(blockCounters(fakeTracesCountersV1(89u)))
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV1(1u)))).isNull()
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV1(2u))))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(10u)))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(89u)))
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV2(1u)))).isNull()
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV2(2u))))
.isEqualTo(ConflationCalculator.OverflowTrigger(ConflationTrigger.TRACES_LIMIT, false))
}
@Test
fun `module counters incremented when traces overflow`() {
val overflowingTraces = listOf(
TracingModuleV1.MMU,
TracingModuleV1.ADD,
TracingModuleV1.RLP
TracingModuleV2.MMU,
TracingModuleV2.ADD,
TracingModuleV2.RLP_TXN
)
val oversizedTraceCounters = TracesCountersV1(
TracingModuleV1.entries.associate {
val oversizedTraceCounters = TracesCountersV2(
TracingModuleV2.entries.associate {
if (overflowingTraces.contains(it)) {
it to 101u
} else {
@@ -120,16 +120,16 @@ class ConflationCalculatorByExecutionTracesTest {
}
)
TracingModuleV1.entries.forEach { module ->
TracingModuleV2.entries.forEach { module ->
val moduleOverflowCounter = testMeterRegistry.get("test.conflation.overflow.evm")
.tag("module", module.name).counter()
assertThat(moduleOverflowCounter.count()).isEqualTo(0.0)
}
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV1(100u)))).isNull()
assertThat(calculator.checkOverflow(blockCounters(fakeTracesCountersV2(100u)))).isNull()
assertThat(calculator.checkOverflow(blockCounters(oversizedTraceCounters)))
.isEqualTo(ConflationCalculator.OverflowTrigger(ConflationTrigger.TRACES_LIMIT, true))
TracingModuleV1.entries.forEach { module ->
TracingModuleV2.entries.forEach { module ->
val moduleOverflowCounter = testMeterRegistry.get("test.conflation.overflow.evm")
.tag("module", module.name).counter()
@@ -140,8 +140,8 @@ class ConflationCalculatorByExecutionTracesTest {
}
}
val overflowCounters = TracesCountersV1(
TracingModuleV1.entries.associate {
val overflowCounters = TracesCountersV2(
TracingModuleV2.entries.associate {
if (overflowingTraces.contains(it)) {
it to 99u
} else {
@@ -150,11 +150,11 @@ class ConflationCalculatorByExecutionTracesTest {
}
)
calculator.appendBlock(blockCounters(fakeTracesCountersV1(10u)))
calculator.appendBlock(blockCounters(fakeTracesCountersV2(10u)))
assertThat(calculator.checkOverflow(blockCounters(overflowCounters)))
.isEqualTo(ConflationCalculator.OverflowTrigger(ConflationTrigger.TRACES_LIMIT, false))
TracingModuleV1.entries.forEach { module ->
TracingModuleV2.entries.forEach { module ->
val moduleOverflowCounter = testMeterRegistry.get("test.conflation.overflow.evm")
.tag("module", module.name).counter()

View File

@@ -1,7 +1,7 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import kotlinx.datetime.Instant
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.ConflationTrigger
import org.assertj.core.api.Assertions.assertThat
@@ -36,7 +36,7 @@ class ConflationCalculatorByTargetBlockNumbersTest {
return BlockCounters(
blockNumber = blockNumber,
blockTimestamp = Instant.parse("2021-01-01T00:00:00.000Z"),
tracesCounters = fakeTracesCountersV1(blockNumber.toUInt()),
tracesCounters = fakeTracesCountersV2(blockNumber.toUInt()),
blockRLPEncoded = ByteArray(0)
)
}

View File

@@ -4,7 +4,7 @@ import kotlinx.datetime.Clock
import kotlinx.datetime.Instant
import linea.domain.BlockHeaderSummary
import linea.kotlin.ByteArrayExt
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.ethereum.coordination.blockcreation.SafeBlockProvider
import org.apache.logging.log4j.Logger
@@ -191,7 +191,7 @@ class ConflationCalculatorByTimeDeadlineTest {
return BlockCounters(
blockNumber = blockNumber,
blockTimestamp = timestamp,
tracesCounters = fakeTracesCountersV1(1u),
tracesCounters = fakeTracesCountersV2(1u),
blockRLPEncoded = ByteArray(0)
)
}

View File

@@ -2,8 +2,8 @@ package net.consensys.zkevm.ethereum.coordination.conflation
import kotlinx.datetime.Instant
import linea.domain.createBlock
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.BlocksConflation
import net.consensys.zkevm.domain.ConflationCalculationResult
@@ -35,7 +35,7 @@ class ConflationServiceImplTest {
ConflationCalculatorByBlockLimit(conflationBlockLimit)
),
deferredTriggerConflationCalculators = emptyList(),
emptyTracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
conflationService = ConflationServiceImpl(conflationCalculator, mock(defaultAnswer = RETURNS_DEEP_STUBS))
}
@@ -49,19 +49,19 @@ class ConflationServiceImplTest {
val payloadCounters1 = BlockCounters(
blockNumber = 1UL,
payload1Time.plus(0.seconds),
tracesCounters = fakeTracesCountersV1(40u),
tracesCounters = fakeTracesCountersV2(40u),
blockRLPEncoded = ByteArray(0)
)
val payloadCounters2 = BlockCounters(
blockNumber = 2UL,
payload1Time.plus(2.seconds),
tracesCounters = fakeTracesCountersV1(40u),
tracesCounters = fakeTracesCountersV2(40u),
blockRLPEncoded = ByteArray(0)
)
val payloadCounters3 = BlockCounters(
blockNumber = 3UL,
payload1Time.plus(4.seconds),
tracesCounters = fakeTracesCountersV1(100u),
tracesCounters = fakeTracesCountersV2(100u),
blockRLPEncoded = ByteArray(0)
)
@@ -85,7 +85,7 @@ class ConflationServiceImplTest {
endBlockNumber = 2u,
conflationTrigger = ConflationTrigger.BLOCKS_LIMIT,
// these are not counted in conflation, so will be 0
tracesCounters = fakeTracesCountersV1(0u)
tracesCounters = fakeTracesCountersV2(0u)
)
)
)
@@ -100,7 +100,7 @@ class ConflationServiceImplTest {
assertThat(numberOfBlocks % numberOfThreads).isEqualTo(0)
val expectedConflations = numberOfBlocks / conflationBlockLimit.toInt() - 1
val blocks = (1UL..numberOfBlocks.toULong()).map { createBlock(number = it, gasLimit = 20_000_000UL) }
val fixedTracesCounters = fakeTracesCountersV1(moduleTracesCounter)
val fixedTracesCounters = fakeTracesCountersV2(moduleTracesCounter)
val blockTime = Instant.parse("2021-01-01T00:00:00Z")
val conflationEvents = mutableListOf<BlocksConflation>()
conflationService.onConflatedBatch { conflationEvent: BlocksConflation ->
@@ -144,7 +144,7 @@ class ConflationServiceImplTest {
@Test
fun `if calculator fails, error is propagated`() {
val moduleTracesCounter = 10u
val fixedTracesCounters = fakeTracesCountersV1(moduleTracesCounter)
val fixedTracesCounters = fakeTracesCountersV2(moduleTracesCounter)
val blockTime = Instant.parse("2021-01-01T00:00:00Z")
val expectedException = RuntimeException("Calculator failed!")

View File

@@ -9,8 +9,8 @@ import net.consensys.linea.metrics.FakeHistogram
import net.consensys.linea.metrics.LineaMetricsCategory
import net.consensys.linea.metrics.MetricsFacade
import net.consensys.linea.metrics.micrometer.MicrometerMetricsFacade
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.Blob
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.ConflationCalculationResult
@@ -174,8 +174,8 @@ class GlobalBlobAwareConflationCalculatorTest {
blobCompressor = spy<BlobCompressor>(FakeBlobCompressor(dataLimit = 100, fakeCompressionRatio = 1.0))
calculatorByDataCompressed = ConflationCalculatorByDataCompressed(blobCompressor = blobCompressor)
calculatorByTraces = ConflationCalculatorByExecutionTraces(
tracesCountersLimit = fakeTracesCountersV1(100u),
emptyTracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT,
tracesCountersLimit = fakeTracesCountersV2(100u),
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT,
metricsFacade = mock(defaultAnswer = Mockito.RETURNS_DEEP_STUBS)
)
conflationTargetEndBlockNumbers.clear()
@@ -187,7 +187,7 @@ class GlobalBlobAwareConflationCalculatorTest {
lastBlockNumber = lastBlockNumber,
syncCalculators = listOf(calculatorByTraces, calculatorByDataCompressed, calculatorByTargetBlockNumber),
deferredTriggerConflationCalculators = listOf(calculatorByDealine),
emptyTracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
calculator = GlobalBlobAwareConflationCalculator(
conflationCalculator = globalCalculator,
@@ -216,7 +216,7 @@ class GlobalBlobAwareConflationCalculatorTest {
BlockCounters(
blockNumber = it,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(1u),
tracesCounters = fakeTracesCountersV2(1u),
blockRLPEncoded = ByteArray(11),
numOfTransactions = 1u,
gasUsed = 10uL
@@ -231,13 +231,13 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 5uL,
conflationTrigger = ConflationTrigger.TARGET_BLOCK_NUMBER,
tracesCounters = fakeTracesCountersV1(5u)
tracesCounters = fakeTracesCountersV2(5u)
),
ConflationCalculationResult(
startBlockNumber = 6uL,
endBlockNumber = 10uL,
conflationTrigger = ConflationTrigger.TARGET_BLOCK_NUMBER,
tracesCounters = fakeTracesCountersV1(5u)
tracesCounters = fakeTracesCountersV2(5u)
)
)
)
@@ -265,7 +265,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11),
numOfTransactions = 1u,
gasUsed = 10uL
@@ -273,7 +273,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12),
numOfTransactions = 1u,
gasUsed = 10uL
@@ -281,7 +281,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(83),
numOfTransactions = 1u,
gasUsed = 10uL
@@ -289,7 +289,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(44),
numOfTransactions = 1u,
gasUsed = 10uL
@@ -321,13 +321,13 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 2uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(20u)
tracesCounters = fakeTracesCountersV2(20u)
),
ConflationCalculationResult(
startBlockNumber = 3uL,
endBlockNumber = 3uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(10u)
tracesCounters = fakeTracesCountersV2(10u)
)
)
)
@@ -356,47 +356,47 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(90u),
tracesCounters = fakeTracesCountersV2(90u),
blockRLPEncoded = ByteArray(13)
)
// over sized block
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(200u),
tracesCounters = fakeTracesCountersV2(200u),
blockRLPEncoded = ByteArray(14)
)
// blob size is 0 bytes up to this point (fake compression, limit 100)
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = block4Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(15)
)
// blob size is 15 bytes up to this point (fake compression, limit 100)
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = block5Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(61)
)
// block 7 does not fit on top of 6, so it should emit conflation and blob events
val block7Counters = BlockCounters(
blockNumber = 7uL,
blockTimestamp = block6Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(71)
)
@@ -414,25 +414,25 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 2uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(20u)
tracesCounters = fakeTracesCountersV2(20u)
),
ConflationCalculationResult(
startBlockNumber = 3uL,
endBlockNumber = 3uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(90u)
tracesCounters = fakeTracesCountersV2(90u)
),
ConflationCalculationResult(
startBlockNumber = 4uL,
endBlockNumber = 4uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(200u)
tracesCounters = fakeTracesCountersV2(200u)
),
ConflationCalculationResult(
startBlockNumber = 5uL,
endBlockNumber = 6uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(20u)
tracesCounters = fakeTracesCountersV2(20u)
)
)
)
@@ -451,31 +451,31 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(13)
)
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(14)
)
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = block4Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(15)
)
// blob size is 65 bytes up to this point (fake compression, limit 100)
@@ -483,14 +483,14 @@ class GlobalBlobAwareConflationCalculatorTest {
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = block5Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(30u),
tracesCounters = fakeTracesCountersV2(30u),
blockRLPEncoded = ByteArray(61)
)
// block 7 does not fit on top of 6, so it should emit conflation and blob events
val block7Counters = BlockCounters(
blockNumber = 7uL,
blockTimestamp = block6Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(71)
)
@@ -524,13 +524,13 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 5uL,
conflationTrigger = ConflationTrigger.TIME_LIMIT,
tracesCounters = fakeTracesCountersV1(50u)
tracesCounters = fakeTracesCountersV2(50u)
),
ConflationCalculationResult(
startBlockNumber = 6uL,
endBlockNumber = 6uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(30u)
tracesCounters = fakeTracesCountersV2(30u)
)
)
)
@@ -551,38 +551,38 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(13)
)
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(14)
)
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = block4Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(15)
)
// traces limit will be triggered
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = block5Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(60u),
tracesCounters = fakeTracesCountersV2(60u),
blockRLPEncoded = ByteArray(16)
)
// blob size is 71 bytes up to this point (fake compression, limit 100)
@@ -590,7 +590,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block7Counters = BlockCounters(
blockNumber = 7uL,
blockTimestamp = block6Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(71)
)
@@ -610,13 +610,13 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 5uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(50u)
tracesCounters = fakeTracesCountersV2(50u)
),
ConflationCalculationResult(
startBlockNumber = 6uL,
endBlockNumber = 6uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(60u)
tracesCounters = fakeTracesCountersV2(60u)
)
)
)
@@ -633,35 +633,35 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(50u),
tracesCounters = fakeTracesCountersV2(50u),
blockRLPEncoded = ByteArray(11)
)
// traces limit will be triggered
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(100u),
tracesCounters = fakeTracesCountersV2(100u),
blockRLPEncoded = ByteArray(12)
)
// traces limit will be triggered
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(90u),
tracesCounters = fakeTracesCountersV2(90u),
blockRLPEncoded = ByteArray(13)
)
// traces limit will be triggered
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(100u),
tracesCounters = fakeTracesCountersV2(100u),
blockRLPEncoded = ByteArray(14)
)
// traces limit will be triggered
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = block4Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(50u),
tracesCounters = fakeTracesCountersV2(50u),
blockRLPEncoded = ByteArray(15)
)
// traces limit will be triggered and blob batch limit will be triggered
@@ -670,7 +670,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = block5Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(60u),
tracesCounters = fakeTracesCountersV2(60u),
blockRLPEncoded = ByteArray(16)
)
@@ -692,31 +692,31 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 1uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(50u)
tracesCounters = fakeTracesCountersV2(50u)
),
ConflationCalculationResult(
startBlockNumber = 2uL,
endBlockNumber = 2uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(100u)
tracesCounters = fakeTracesCountersV2(100u)
),
ConflationCalculationResult(
startBlockNumber = 3uL,
endBlockNumber = 3uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(90u)
tracesCounters = fakeTracesCountersV2(90u)
),
ConflationCalculationResult(
startBlockNumber = 4uL,
endBlockNumber = 4uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(100u)
tracesCounters = fakeTracesCountersV2(100u)
),
ConflationCalculationResult(
startBlockNumber = 5uL,
endBlockNumber = 5uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(50u)
tracesCounters = fakeTracesCountersV2(50u)
)
)
)
@@ -737,38 +737,38 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(13)
)
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(14)
)
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = block4Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(15)
)
// over-sized block traces limit will be triggered
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = block5Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(200u),
tracesCounters = fakeTracesCountersV2(200u),
blockRLPEncoded = ByteArray(16)
)
// blob size is 71 bytes up to this point (fake compression, limit 100)
@@ -776,7 +776,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block7Counters = BlockCounters(
blockNumber = 7uL,
blockTimestamp = block6Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(71)
)
@@ -796,13 +796,13 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 5uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(50u)
tracesCounters = fakeTracesCountersV2(50u)
),
ConflationCalculationResult(
startBlockNumber = 6uL,
endBlockNumber = 6uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(200u)
tracesCounters = fakeTracesCountersV2(200u)
)
)
)
@@ -819,31 +819,31 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(13)
)
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(14)
)
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = block4Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(15)
)
// traces limit and data limit will be triggered
@@ -852,7 +852,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = block5Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(60u),
tracesCounters = fakeTracesCountersV2(60u),
blockRLPEncoded = ByteArray(61)
)
// blob size is 61 bytes up to this point (fake compression, limit 100)
@@ -860,7 +860,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block7Counters = BlockCounters(
blockNumber = 7uL,
blockTimestamp = block6Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(71)
)
@@ -880,13 +880,13 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 5uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(50u)
tracesCounters = fakeTracesCountersV2(50u)
),
ConflationCalculationResult(
startBlockNumber = 6uL,
endBlockNumber = 6uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(60u)
tracesCounters = fakeTracesCountersV2(60u)
)
)
)
@@ -907,31 +907,31 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(13)
)
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(14)
)
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = block4Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(15)
)
// over-sized traces limit and data limit will be triggered
@@ -940,7 +940,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = block5Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(200u),
tracesCounters = fakeTracesCountersV2(200u),
blockRLPEncoded = ByteArray(61)
)
// blob size is 61 bytes up to this point (fake compression, limit 100)
@@ -948,7 +948,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block7Counters = BlockCounters(
blockNumber = 7uL,
blockTimestamp = block6Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(71)
)
@@ -968,13 +968,13 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 5uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(50u)
tracesCounters = fakeTracesCountersV2(50u)
),
ConflationCalculationResult(
startBlockNumber = 6uL,
endBlockNumber = 6uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(200u)
tracesCounters = fakeTracesCountersV2(200u)
)
)
)
@@ -995,31 +995,31 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(13)
)
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(14)
)
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = block4Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(15)
)
// over-sized block traces limit and data limit will be triggered
@@ -1028,7 +1028,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = block5Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(200u),
tracesCounters = fakeTracesCountersV2(200u),
blockRLPEncoded = ByteArray(61)
)
// blob size is 61 bytes up to this point (fake compression, limit 100)
@@ -1036,7 +1036,7 @@ class GlobalBlobAwareConflationCalculatorTest {
val block7Counters = BlockCounters(
blockNumber = 7uL,
blockTimestamp = block6Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(71)
)
@@ -1070,13 +1070,13 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 5uL,
conflationTrigger = ConflationTrigger.TIME_LIMIT,
tracesCounters = fakeTracesCountersV1(50u)
tracesCounters = fakeTracesCountersV2(50u)
),
ConflationCalculationResult(
startBlockNumber = 6uL,
endBlockNumber = 6uL,
conflationTrigger = ConflationTrigger.DATA_LIMIT,
tracesCounters = fakeTracesCountersV1(200u)
tracesCounters = fakeTracesCountersV2(200u)
)
)
)
@@ -1097,19 +1097,19 @@ class GlobalBlobAwareConflationCalculatorTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClockTime,
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(11)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(12)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
tracesCounters = fakeTracesCountersV1(90u),
tracesCounters = fakeTracesCountersV2(90u),
blockRLPEncoded = ByteArray(13)
)
@@ -1127,7 +1127,7 @@ class GlobalBlobAwareConflationCalculatorTest {
startBlockNumber = 1uL,
endBlockNumber = 2uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(20u)
tracesCounters = fakeTracesCountersV2(20u)
)
)
)

View File

@@ -5,8 +5,8 @@ import linea.domain.BlockHeaderSummary
import linea.kotlin.ByteArrayExt
import net.consensys.FakeFixedClock
import net.consensys.linea.metrics.MetricsFacade
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.ConflationCalculationResult
import net.consensys.zkevm.domain.ConflationTrigger
@@ -68,15 +68,15 @@ class GlobalBlockConflationCalculatorIntTest {
}
calculatorByTraces = ConflationCalculatorByExecutionTraces(
tracesCountersLimit = fakeTracesCountersV1(100u),
emptyTracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT,
tracesCountersLimit = fakeTracesCountersV2(100u),
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT,
metricsFacade = mock<MetricsFacade>(defaultAnswer = Mockito.RETURNS_DEEP_STUBS)
)
globalCalculator = GlobalBlockConflationCalculator(
lastBlockNumber = lastBlockNumber,
syncCalculators = listOf(calculatorByTraces, calculatorByData),
deferredTriggerConflationCalculators = listOf(calculatorByDealine),
emptyTracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
globalCalculator.onConflatedBatch { trigger ->
conflations.add(trigger)
@@ -90,13 +90,13 @@ class GlobalBlockConflationCalculatorIntTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(101u),
tracesCounters = fakeTracesCountersV2(101u),
blockRLPEncoded = ByteArray(10)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(10)
)
globalCalculator.newBlock(block1Counters)
@@ -119,13 +119,13 @@ class GlobalBlockConflationCalculatorIntTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(60u),
tracesCounters = fakeTracesCountersV2(60u),
blockRLPEncoded = ByteArray(10)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(50u),
tracesCounters = fakeTracesCountersV2(50u),
blockRLPEncoded = ByteArray(20)
)
globalCalculator.newBlock(block1Counters)
@@ -148,19 +148,19 @@ class GlobalBlockConflationCalculatorIntTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(50u),
tracesCounters = fakeTracesCountersV2(50u),
blockRLPEncoded = ByteArray(10)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(50u),
tracesCounters = fakeTracesCountersV2(50u),
blockRLPEncoded = ByteArray(20)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(20)
)
globalCalculator.newBlock(block1Counters)
@@ -184,19 +184,19 @@ class GlobalBlockConflationCalculatorIntTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(500)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(20u),
tracesCounters = fakeTracesCountersV2(20u),
blockRLPEncoded = ByteArray(480)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(20u),
tracesCounters = fakeTracesCountersV2(20u),
blockRLPEncoded = ByteArray(21)
)
globalCalculator.newBlock(block1Counters)
@@ -220,46 +220,46 @@ class GlobalBlockConflationCalculatorIntTest {
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(101u),
tracesCounters = fakeTracesCountersV2(101u),
blockRLPEncoded = ByteArray(100)
)
// block with data in size limit
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(20u),
tracesCounters = fakeTracesCountersV2(20u),
blockRLPEncoded = ByteArray(1_000)
)
val block3Counters = BlockCounters(
blockNumber = 3uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(30u),
tracesCounters = fakeTracesCountersV2(30u),
blockRLPEncoded = ByteArray(300)
)
val block4Counters = BlockCounters(
blockNumber = 4uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(70u),
tracesCounters = fakeTracesCountersV2(70u),
blockRLPEncoded = ByteArray(400)
)
// will trigger traces overflow
val block5Counters = BlockCounters(
blockNumber = 5uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(100)
)
val block6Counters = BlockCounters(
blockNumber = 6uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(100)
)
val block7Counters = BlockCounters(
blockNumber = 7uL,
blockTimestamp = fakeClock.now(),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(100)
)

View File

@@ -1,8 +1,8 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import kotlinx.datetime.Instant
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.ConflationCalculationResult
import net.consensys.zkevm.domain.ConflationTrigger
@@ -24,18 +24,18 @@ class GlobalBlockConflationCalculatorTest {
private lateinit var calculatorByTraces: ConflationCalculator
private lateinit var globalCalculator: GlobalBlockConflationCalculator
private val lastBlockNumber: ULong = 0uL
private val fakeCountersAfterConflation = fakeTracesCountersV1(123u)
private val fakeCountersAfterConflation = fakeTracesCountersV2(123u)
private val fakeDataSizeAfterConflation = 123u
val block1Counters = BlockCounters(
blockNumber = 1uL,
blockTimestamp = Instant.parse("2023-12-11T00:00:00.000Z"),
tracesCounters = fakeTracesCountersV1(10u),
tracesCounters = fakeTracesCountersV2(10u),
blockRLPEncoded = ByteArray(0)
)
val block2Counters = BlockCounters(
blockNumber = 2uL,
blockTimestamp = Instant.parse("2023-12-11T00:00:02.000Z"),
tracesCounters = fakeTracesCountersV1(20u),
tracesCounters = fakeTracesCountersV2(20u),
blockRLPEncoded = ByteArray(0)
)
@@ -70,7 +70,7 @@ class GlobalBlockConflationCalculatorTest {
lastBlockNumber = lastBlockNumber,
syncCalculators = listOf(calculatorByTraces, calculatorByData),
deferredTriggerConflationCalculators = listOf(calculatorByDealine),
emptyTracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
conflations = mutableListOf<ConflationCalculationResult>()
globalCalculator.onConflatedBatch { trigger ->
@@ -86,7 +86,7 @@ class GlobalBlockConflationCalculatorTest {
lastBlockNumber = lastBlockNumber,
syncCalculators = listOf(calculatorByTraces, calculatorByData, calculatorByDealine),
deferredTriggerConflationCalculators = listOf(calculatorByDealine),
emptyTracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
emptyTracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
}.isInstanceOf(IllegalArgumentException::class.java)
.hasMessageContaining("calculators must not contain duplicates")

View File

@@ -0,0 +1,47 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.getError
import linea.domain.createBlock
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
class ProofGeneratingConflationHandlerImplTest {
@Test fun `assertConsecutiveBlocksRange return error when empty`() {
assertConsecutiveBlocksRange(emptyList()).let { result ->
assertThat(result).isInstanceOf(Err::class.java)
assertThat(result.getError()).isInstanceOf(IllegalArgumentException::class.java)
assertThat(result.getError()!!.message).isEqualTo("Empty list of blocks")
}
}
@Test fun `assertConsecutiveBlocksRange return error when there is gap in block numbers`() {
val blocks = listOf(
createBlock(15UL),
createBlock(14UL),
// // gap on 13
createBlock(12UL),
createBlock(11UL),
createBlock(10UL)
)
assertConsecutiveBlocksRange(blocks).let { result ->
assertThat(result).isInstanceOf(Err::class.java)
assertThat(result.getError()).isInstanceOf(IllegalArgumentException::class.java)
assertThat(result.getError()!!.message).isEqualTo("Conflated blocks list has non consecutive blocks!")
}
}
@Test fun `assertConsecutiveBlocksRange returns sorted list when all blocks are consecutive`() {
val blocks = listOf(
createBlock(13UL),
createBlock(12UL),
createBlock(11UL),
createBlock(10UL)
)
assertThat(assertConsecutiveBlocksRange(blocks)).isEqualTo(Ok(10UL..13UL))
assertThat(assertConsecutiveBlocksRange(listOf(blocks[0]))).isEqualTo(Ok(13UL..13UL))
}
}

View File

@@ -1,48 +0,0 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.getError
import linea.domain.BlockNumberAndHash
import linea.kotlin.ByteArrayExt
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
class TracesConflationCoordinatorImplTest {
@Test fun `assertBlocksList return error when empty`() {
assertBlocksList(emptyList()).let { result ->
assertThat(result).isInstanceOf(Err::class.java)
assertThat(result.getError()).isInstanceOf(IllegalArgumentException::class.java)
assertThat(result.getError()!!.message).isEqualTo("Empty list of blocs")
}
}
@Test fun `assertBlocksList return error when there is gap in block numbers`() {
val blocks = listOf(
BlockNumberAndHash(15u, ByteArrayExt.random32()),
BlockNumberAndHash(14u, ByteArrayExt.random32()),
// gap on 13
BlockNumberAndHash(12u, ByteArrayExt.random32()),
BlockNumberAndHash(11u, ByteArrayExt.random32()),
BlockNumberAndHash(10u, ByteArrayExt.random32())
)
assertBlocksList(blocks).let { result ->
assertThat(result).isInstanceOf(Err::class.java)
assertThat(result.getError()).isInstanceOf(IllegalArgumentException::class.java)
assertThat(result.getError()!!.message).isEqualTo("Conflated blocks list has non consecutive blocks!")
}
}
@Test fun `assertBlocksList returns sorted list when all blocks are consecutive`() {
val blocks = listOf(
BlockNumberAndHash(13u, ByteArrayExt.random32()),
BlockNumberAndHash(12u, ByteArrayExt.random32()),
BlockNumberAndHash(11u, ByteArrayExt.random32()),
BlockNumberAndHash(10u, ByteArrayExt.random32())
)
assertThat(assertBlocksList(blocks)).isEqualTo(Ok(blocks.sortedBy { it.number }))
assertThat(assertBlocksList(listOf(blocks[0]))).isEqualTo(Ok(listOf(blocks[0])))
}
}

View File

@@ -9,7 +9,7 @@ import linea.ethapi.FakeEthApiClient
import linea.kotlin.ByteArrayExt
import linea.kotlin.encodeHex
import linea.log4j.configureLoggers
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV2
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofRequestV1
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofResponse
import net.consensys.zkevm.coordinator.clients.ExecutionProverClientV2
@@ -95,7 +95,7 @@ class ZkProofCreationCoordinatorImplTest {
startBlockNumber = 123UL,
endBlockNumber = 124UL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = fakeTracesCountersV1(0u)
tracesCounters = fakeTracesCountersV2(0u)
)
),
traces = BlocksTracesConflated(

View File

@@ -12,7 +12,7 @@ import kotlinx.datetime.Clock
import kotlinx.datetime.Instant
import linea.domain.BlockIntervals
import linea.kotlin.ByteArrayExt
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
import net.consensys.zkevm.coordinator.clients.BlobCompressionProofRequest
import net.consensys.zkevm.coordinator.clients.BlobCompressionProverClientV2
@@ -191,7 +191,7 @@ class BlobCompressionProofCoordinatorIntTest : CleanDbTestSuiteParallel() {
startBlockNumber = currentBlockNumber,
endBlockNumber = endBlockNumber,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
),
compressedData = Random.nextBytes(128),
@@ -224,19 +224,19 @@ class BlobCompressionProofCoordinatorIntTest : CleanDbTestSuiteParallel() {
startBlockNumber = blobEventStartBlock,
endBlockNumber = blobEventEndBlock,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
),
ConflationCalculationResult(
startBlockNumber = blobEventEndBlock + 1UL,
endBlockNumber = blobEventEndBlock + 200UL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
),
ConflationCalculationResult(
startBlockNumber = blobEventEndBlock + 201UL,
endBlockNumber = blobEventEndBlock + 300UL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
),
compressedData = Random.nextBytes(128),
@@ -284,7 +284,7 @@ class BlobCompressionProofCoordinatorIntTest : CleanDbTestSuiteParallel() {
startBlockNumber = blobEventStartBlock,
endBlockNumber = blobEventEndBlock,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
),
compressedData = Random.nextBytes(128),

View File

@@ -4,7 +4,7 @@ import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import linea.domain.BlockIntervals
import net.consensys.FakeFixedClock
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracesCountersV2
import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
import net.consensys.zkevm.coordinator.clients.BlobCompressionProofRequest
import net.consensys.zkevm.coordinator.clients.BlobCompressionProverClientV2
@@ -30,7 +30,6 @@ import org.mockito.kotlin.times
import org.mockito.kotlin.verify
import org.mockito.kotlin.whenever
import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.lang.RuntimeException
import kotlin.random.Random
import kotlin.time.Duration.Companion.milliseconds
import kotlin.time.Duration.Companion.seconds
@@ -109,7 +108,7 @@ class BlobCompressionProofCoordinatorTest {
startBlockNumber = expectedStartBlock,
endBlockNumber = expectedEndBlock,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
),
compressedData = Random.nextBytes(128),
@@ -185,7 +184,7 @@ class BlobCompressionProofCoordinatorTest {
startBlockNumber = 1uL,
endBlockNumber = 10uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
),
compressedData = Random.nextBytes(128),
@@ -199,7 +198,7 @@ class BlobCompressionProofCoordinatorTest {
startBlockNumber = 11uL,
endBlockNumber = 20uL,
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
tracesCounters = TracesCountersV1.EMPTY_TRACES_COUNT
tracesCounters = TracesCountersV2.EMPTY_TRACES_COUNT
)
),
compressedData = Random.nextBytes(128),

View File

@@ -126,9 +126,9 @@ services:
linea:
ipv4_address: 11.11.11.119
traces-node-v2:
hostname: traces-node-v2
container_name: traces-node-v2
traces-node:
hostname: traces-node
container_name: traces-node
image: consensys/linea-besu-package:${BESU_PACKAGE_TAG:-sepolia-3e31c8f}
profiles: [ "l2", "l2-bc", "debug", "external-to-monorepo" ]
depends_on:
@@ -154,12 +154,12 @@ services:
- -c
- |
/opt/besu/bin/besu \
--config-file=/var/lib/besu/traces-node-v2.config.toml \
--config-file=/var/lib/besu/traces-node.config.toml \
--genesis-file=/var/lib/besu/genesis.json \
--bootnodes=enode://14408801a444dafc44afbccce2eb755f902aed3b5743fed787b3c790e021fef28b8c827ed896aa4e8fb46e22bd67c39f994a73768b4b382f8597b0d44370e15d@11.11.11.101:30303
volumes:
- ./config/traces-node-v2/traces-node-v2-config.toml:/var/lib/besu/traces-node-v2.config.toml:ro
- ./config/traces-node-v2/log4j.xml:/var/lib/besu/log4j.xml:ro
- ./config/traces-node/traces-node-config.toml:/var/lib/besu/traces-node.config.toml:ro
- ./config/traces-node/log4j.xml:/var/lib/besu/log4j.xml:ro
- ./config/linea-local-dev-genesis-PoA-besu.json/:/var/lib/besu/genesis.json:ro
- ../tmp/local/:/data/:rw
networks:
@@ -228,11 +228,15 @@ services:
ports:
- "9545:9545"
restart: on-failure
command: [ 'java', '-Dvertx.configurationFile=/var/lib/coordinator/vertx-options.json', '-Dlog4j2.configurationFile=/var/lib/coordinator/log4j2-dev.xml', '-jar', 'libs/coordinator.jar', '--traces-limits', 'config/traces-limits-v1.toml', '--smart-contract-errors', 'config/smart-contract-errors.toml', '--gas-price-cap-time-of-day-multipliers', 'config/gas-price-cap-time-of-day-multipliers.toml', 'config/coordinator-docker.config.toml', 'config/coordinator-docker-web3signer-override.config.toml' ]
environment:
config__override__l2-network-gas-pricing__json-rpc-pricing-propagation__disabled: ${DISABLE_JSON_RPC_PRICING_PROPAGATION:-true}
config__override__type2-state-proof-provider__endpoints: # leave empty to disable
command: [ 'java', '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005','-Dvertx.configurationFile=/var/lib/coordinator/vertx-options.json', '-Dlog4j2.configurationFile=/var/lib/coordinator/log4j2-dev.xml', '-jar', 'libs/coordinator.jar', '--traces-limits-v2', 'config/traces-limits-v2.toml', '--smart-contract-errors', 'config/smart-contract-errors.toml', '--gas-price-cap-time-of-day-multipliers', 'config/gas-price-cap-time-of-day-multipliers.toml', 'config/coordinator-docker.config.toml', 'config/coordinator-docker-traces-v2-override.config.toml' ]
volumes:
- ../config/coordinator/coordinator-docker.config.toml:/opt/consensys/linea/coordinator/config/coordinator-docker.config.toml:ro
- ../config/coordinator/coordinator-docker-web3signer-override.config.toml:/opt/consensys/linea/coordinator/config/coordinator-docker-web3signer-override.config.toml:ro
- ../config/common/traces-limits-v1.toml:/opt/consensys/linea/coordinator/config/traces-limits-v1.toml:ro
- ../config/coordinator/coordinator-docker-traces-v2-override.config.toml:/opt/consensys/linea/coordinator/config/coordinator-docker-traces-v2-override.config.toml:ro
- ../config/common/traces-limits-v2.toml:/opt/consensys/linea/coordinator/config/traces-limits-v2.toml:ro
- ../config/common/smart-contract-errors.toml:/opt/consensys/linea/coordinator/config/smart-contract-errors.toml:ro
- ../config/common/gas-price-cap-time-of-day-multipliers.toml:/opt/consensys/linea/coordinator/config/gas-price-cap-time-of-day-multipliers.toml:ro
- ../config/coordinator/vertx-options.json:/var/lib/coordinator/vertx-options.json:ro

View File

@@ -11,12 +11,6 @@ services:
extends:
file: compose-spec-l2-services.yml
service: l2-node-besu
environment:
BESU_PLUGIN_LINEA_BUNDLES_FORWARD_URLS: "http://sequencer:8545"
BESU_PLUGIN_LINEA_BUNDLES_FORWARD_RETRY_DELAY: 1000
BESU_PLUGIN_LINEA_BUNDLES_FORWARD_TIMEOUT: 5000
volumes:
- ../config/common/traces-limits-besu-v2.toml:/var/lib/besu/traces-limits.toml:ro
shomei-frontend:
extends:

View File

@@ -41,10 +41,10 @@ services:
file: compose-spec-l2-services.yml
service: sequencer
traces-node-v2:
traces-node:
extends:
file: compose-spec-l2-services.yml
service: traces-node-v2
service: traces-node
prover-v3:
extends:
@@ -60,14 +60,6 @@ services:
extends:
file: compose-spec-l2-services.yml
service: coordinator
environment:
config__override__l2-network-gas-pricing__json-rpc-pricing-propagation__disabled: ${DISABLE_JSON_RPC_PRICING_PROPAGATION:-true}
config__override__type2-state-proof-provider__endpoints: # leave empty to disable
command: [ 'java', '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005','-Dvertx.configurationFile=/var/lib/coordinator/vertx-options.json', '-Dlog4j2.configurationFile=/var/lib/coordinator/log4j2-dev.xml', '-jar', 'libs/coordinator.jar', '--traces-limits-v2', 'config/traces-limits-v2.toml', '--smart-contract-errors', 'config/smart-contract-errors.toml', '--gas-price-cap-time-of-day-multipliers', 'config/gas-price-cap-time-of-day-multipliers.toml', 'config/coordinator-docker.config.toml', 'config/coordinator-docker-traces-v2-override.config.toml' ]
volumes:
- ../config/common/traces-limits-v2.toml:/opt/consensys/linea/coordinator/config/traces-limits-v2.toml:ro
- ../config/coordinator/coordinator-docker-traces-v2-override.config.toml:/opt/consensys/linea/coordinator/config/coordinator-docker-traces-v2-override.config.toml:ro
- ../tmp/local/:/data/
web3signer:
extends:

View File

@@ -53,6 +53,9 @@ plugin-linea-variable-gas-cost-wei=1000000000
plugin-linea-extra-data-set-min-gas-price-enabled=true
plugin-linea-estimate-gas-compatibility-mode-enabled=false
plugin-linea-estimate-gas-min-margin="1.2"
plugin-linea-bundles-forward-urls=["http://sequencer:8545"]
plugin-linea-bundles-forward-retry-delay=1000
plugin-linea-bundles-forward-timeout=5000
bonsai-limit-trie-logs-enabled=false
bonsai-historical-block-limit=1024

View File

@@ -41,7 +41,6 @@ data class BlockData<TxData>(
private val isTransactionHashOnly: Boolean
get() = transactions.isNotEmpty() && transactions.first() is ByteArray
val numberAndHash = BlockNumberAndHash(this.number, this.hash)
val headerSummary = BlockHeaderSummary(this.number, this.hash, Instant.fromEpochSeconds(this.timestamp.toLong()))
override fun equals(other: Any?): Boolean {
@@ -78,7 +77,6 @@ data class BlockData<TxData>(
return false
}
if (ommers != other.ommers) return false
if (numberAndHash != other.numberAndHash) return false
if (headerSummary != other.headerSummary) return false
return true
@@ -110,7 +108,6 @@ data class BlockData<TxData>(
}
}
result = 31 * result + ommers.hashCode()
result = 31 * result + numberAndHash.hashCode()
result = 31 * result + headerSummary.hashCode()
return result
}

View File

@@ -77,20 +77,6 @@ private fun add(tc1: TracesCounters, tc2: TracesCounters): Map<TracingModule, UI
return sum
}
data class TracesCountersV1(
private val countersMap: Map<TracingModuleV1, UInt>
) : TracesCountersImpl(countersMap, TracingModuleV1.entries) {
companion object {
val EMPTY_TRACES_COUNT = TracesCountersV1(TracingModuleV1.entries.associateWith { 0u })
}
override fun add(o: TracesCounters): TracesCountersV1 {
val sum = add(this, o)
@Suppress("UNCHECKED_CAST")
return TracesCountersV1(sum as Map<TracingModuleV1, UInt>)
}
}
data class TracesCountersV2(private val countersMap: Map<TracingModuleV2, UInt>) :
TracesCountersImpl(countersMap, TracingModuleV2.entries) {
companion object {

View File

@@ -4,78 +4,6 @@ sealed interface TracingModule {
val name: String
}
enum class TracingModuleV1 : TracingModule {
// EMV Module limits
ADD,
BIN,
BIN_RT,
EC_DATA,
EXT,
HUB,
INSTRUCTION_DECODER,
MMIO,
MMU,
MMU_ID,
MOD,
MUL,
MXP,
PHONEY_RLP,
PUB_HASH,
PUB_HASH_INFO,
PUB_LOG,
PUB_LOG_INFO,
RLP,
ROM,
SHF,
SHF_RT,
TX_RLP,
WCP,
// Block-specific limits
BLOCK_TX,
BLOCK_L2L1LOGS,
BLOCK_KECCAK,
// Precompiles call limits
PRECOMPILE_ECRECOVER,
PRECOMPILE_SHA2,
PRECOMPILE_RIPEMD,
PRECOMPILE_IDENTITY,
PRECOMPILE_MODEXP,
PRECOMPILE_ECADD,
PRECOMPILE_ECMUL,
PRECOMPILE_ECPAIRING,
PRECOMPILE_BLAKE2F;
companion object {
val evmModules: Set<TracingModuleV1> = setOf(
ADD,
BIN,
BIN_RT,
EC_DATA,
EXT,
HUB,
INSTRUCTION_DECODER,
MMIO,
MMU,
MMU_ID,
MOD,
MUL,
MXP,
PHONEY_RLP,
PUB_HASH,
PUB_HASH_INFO,
PUB_LOG,
PUB_LOG_INFO,
RLP,
ROM,
SHF,
SHF_RT,
WCP
)
}
}
enum class TracingModuleV2 : TracingModule {
// EMV Module limits
ADD,

View File

@@ -7,34 +7,10 @@ import org.assertj.core.api.Assertions.assertThat
import org.assertj.core.api.Assertions.assertThatThrownBy
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.assertThrows
import java.lang.IllegalArgumentException
class TracesCountersTest {
data class TracesConfigV1(val tracesLimits: Map<TracingModuleV1, UInt>)
data class TracesConfigV2(val tracesLimits: Map<TracingModuleV2, UInt>)
@Test
fun `configs v1 match specifiedModules`() {
val path = findPathTo("config/common/traces-limits-v1.toml")
val tracesConfigV1 = ConfigLoaderBuilder.default()
.addFileSource(path.toString())
.build()
.loadConfigOrThrow<TracesConfigV1>()
val tracesCountersLimit = TracesCountersV1(tracesConfigV1.tracesLimits)
tracesCountersLimit.entries().forEach { moduleLimit ->
// PoW 2 requirement only apply to traces passed to the prover
if (TracingModuleV1.evmModules.contains(moduleLimit.first)) {
val isPowerOf2 = (moduleLimit.second and (moduleLimit.second - 1u)) == 0u
assertThat(isPowerOf2)
.withFailMessage("Trace limit ${moduleLimit.first}=${moduleLimit.second} is not a power of 2!")
.isTrue()
}
}
}
@Test
fun `configs v2 match specifiedModules`() {
val path = findPathTo("config/common/traces-limits-v2.toml")
@@ -59,34 +35,26 @@ class TracesCountersTest {
@Test
fun add_notOverflow() {
val counters1 = fakeTracesCountersV1(10u)
val counters2 = fakeTracesCountersV1(20u)
val counters3 = fakeTracesCountersV1(20u)
val counters1 = fakeTracesCountersV2(10u)
val counters2 = fakeTracesCountersV2(20u)
val counters3 = fakeTracesCountersV2(20u)
assertThat(counters1.add(counters2).add(counters3))
.isEqualTo(fakeTracesCountersV1(50u))
.isEqualTo(fakeTracesCountersV2(50u))
}
@Test
fun add_Overflow_throwsError() {
val counters1 = fakeTracesCountersV1(10u)
val counters2 = fakeTracesCountersV1(UInt.MAX_VALUE)
val counters1 = fakeTracesCountersV2(10u)
val counters2 = fakeTracesCountersV2(UInt.MAX_VALUE)
assertThatThrownBy { counters1.add(counters2) }.isInstanceOf(ArithmeticException::class.java)
.withFailMessage("integer overflow")
}
@Test
fun add_multipleVersion_throwsError() {
val counters1 = fakeTracesCountersV1(10u)
val counters2 = fakeTracesCountersV2(10u)
assertThatThrownBy { counters1.add(counters2) }.isInstanceOf(IllegalArgumentException::class.java)
.hasMessageContaining("Cannot add different traces counters")
}
@Test
fun allTracesWithinLimits() {
val limits = fakeTracesCountersV1(20u, mapOf(Pair(TracingModuleV1.ADD, 10u)))
val countersWithinLimits = fakeTracesCountersV1(3u)
val countersOvertLimits = fakeTracesCountersV1(5u, mapOf(Pair(TracingModuleV1.ADD, 11u)))
val limits = fakeTracesCountersV2(20u, mapOf(Pair(TracingModuleV2.ADD, 10u)))
val countersWithinLimits = fakeTracesCountersV2(3u)
val countersOvertLimits = fakeTracesCountersV2(5u, mapOf(Pair(TracingModuleV2.ADD, 11u)))
assertThat(countersWithinLimits.allTracesWithinLimits(limits)).isTrue()
assertThat(countersOvertLimits.allTracesWithinLimits(limits)).isFalse()
@@ -94,11 +62,6 @@ class TracesCountersTest {
@Test
fun empty_counters() {
val tracesCountersV1 = TracesCountersV1(
TracingModuleV1.entries.associateWith { 0u }
)
assertThat(tracesCountersV1).isEqualTo(TracesCountersV1.EMPTY_TRACES_COUNT)
val tracesCountersV2 = TracesCountersV2(
TracingModuleV2.entries.associateWith { 0u }
)
@@ -107,12 +70,6 @@ class TracesCountersTest {
@Test
fun incomplete_counters_throwsError() {
assertThrows<IllegalArgumentException> {
TracesCountersV1(emptyMap())
}
assertThrows<IllegalArgumentException> {
TracesCountersV1(mapOf(Pair(TracingModuleV1.ADD, 10u)))
}
assertThrows<IllegalArgumentException> {
TracesCountersV2(emptyMap())
}
@@ -123,13 +80,13 @@ class TracesCountersTest {
@Test
fun oversizedTraces() {
val limits = fakeTracesCountersV1(20u, mapOf(Pair(TracingModuleV1.ADD, 10u)))
val countersWithinLimits = fakeTracesCountersV1(3u)
val countersOvertLimits = fakeTracesCountersV1(5u, mapOf(Pair(TracingModuleV1.ADD, 11u)))
val limits = fakeTracesCountersV2(20u, mapOf(Pair(TracingModuleV2.ADD, 10u)))
val countersWithinLimits = fakeTracesCountersV2(3u)
val countersOvertLimits = fakeTracesCountersV2(5u, mapOf(Pair(TracingModuleV2.ADD, 11u)))
assertThat(countersWithinLimits.oversizedTraces(limits)).isEmpty()
val oversizedTraces = countersOvertLimits.oversizedTraces(limits)
assertThat(oversizedTraces).hasSize(1)
assertThat(oversizedTraces.first()).isEqualTo(Triple(TracingModuleV1.ADD, 11u, 10u))
assertThat(oversizedTraces.first()).isEqualTo(Triple(TracingModuleV2.ADD, 11u, 10u))
}
}

View File

@@ -3,17 +3,6 @@ package net.consensys.linea.traces
import kotlin.random.Random
import kotlin.random.nextUInt
fun fakeTracesCountersV1(
defaultValue: UInt?,
moduleValue: Map<TracingModuleV1, UInt> = emptyMap()
): TracesCountersV1 {
return TracesCountersV1(
TracingModuleV1.entries.associateWith {
moduleValue[it] ?: defaultValue ?: Random.nextUInt(0u, UInt.MAX_VALUE)
}
)
}
fun fakeTracesCountersV2(
defaultValue: UInt?,
moduleValue: Map<TracingModuleV2, UInt> = emptyMap()

View File

@@ -57,9 +57,6 @@ include 'coordinator:ethereum:gas-pricing:static-cap'
include 'coordinator:ethereum:gas-pricing:dynamic-cap'
include 'testing-tools'
include 'testing-tools:app'
include 'traces-api-facade:app'
include 'traces-api-facade:core'
include 'traces-api-facade:conflation'
include 'transaction-decoder-tool'
include 'transaction-exclusion-api:app'
include 'transaction-exclusion-api:core'

View File

@@ -1,31 +0,0 @@
FROM openjdk:21-slim-bullseye
RUN apt-get update \
&& apt-get install curl -y \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /opt/consensys/linea/traces-api-facade
# copy application
COPY --from=jar ./app-all.jar /opt/consensys/linea/traces-api-facade/libs/
RUN mkdir -p /opt/consensys/linea/traces-api-facade/logs
# Build-time metadata as defined at http://label-schema.org
ARG BUILD_DATE
ARG VCS_REF
ARG VERSION
LABEL org.label-schema.build-date=$BUILD_DATE \
org.label-schema.name="traces-api-facade" \
org.label-schema.description="Linea Traces API" \
org.label-schema.url="https://consensys.io/" \
org.label-schema.vcs-ref=$VCS_REF \
org.label-schema.vcs-url="https://github.com/ConsenSys/linea-monorepo" \
org.label-schema.vendor="ConsenSys" \
org.label-schema.version=$VERSION \
org.label-schema.schema-version="1.0"
WORKDIR /opt/consensys/linea/traces-api-facade/

View File

@@ -1,103 +0,0 @@
# Traces API Facade
This micro-service implements an API on top of the traces generated by zkGeth in json files to the file system.
## V1 API Methods
### rollup_getBlockTracesCountersV1
```bash
curl -H 'content-type:application/json' --data '{
"id": "1",
"jsonrpc": "2.0",
"method": "rollup_getBlockTracesCountersV1",
"params": {
"block": {
"blockNumber": "1",
"blockHash": "0xab538e7ab831af9442aab00443ee9803907654359dfcdfe1755f1a98fb87eafd"
},
"rawExecutionTracesVersion": "0.0.1",
"expectedTracesApiVersion": "0.0.2"
}
}' http://127.0.0.1:8080
```
### rollup_generateConflatedTracesToFileV1
```bash
curl -H 'content-type:application/json' --data '{
"jsonrpc": "2.0",
"id": "53",
"method": "rollup_generateConflatedTracesToFileV1",
"params": [
{
"blockNumber": "0x1",
"blockHash": "0xa64ab6ad3196000bc28973b88d285bc789de4afb6d4541b3d894b3293e745035"
},
{
"blockNumber": "0x2",
"blockHash": "0x71b744b0bc0028c635fb096cace4fcabeb5ca311a42915e4707d9d549b132182"
},
{
"blockNumber": "0x3",
"blockHash": "0x0bbeddf6cc52f77935c5e4ba2e9c310a317cbdcfc4965e5defc6310912e0e8d2"
}
]
}' http://127.0.0.1:8080
```
### Traces Counters Response Example
```json
{
"jsonrpc": "2.0",
"id": "53",
"result": {
"tracesEngineVersion": "0.0.1",
"tracesCounters": {
"ADD": 41,
"BIN": 896,
"BIN_RT": 879,
"DISPATCHER": 119,
"EXT": 795,
"HUB": 349,
"INSTRUCTION_DECODER": 123,
"MMU_ID": 857,
"MOD": 854,
"MUL": 329,
"MXP": 901,
"PUB": 104,
"ROM": 981,
"SHF": 839,
"SHF_RT": 235
}
}
}
```
### Conflation Response Example
```json
{
"jsonrpc": "2.0",
"id": "53",
"result": {
"tracesEngineVersion": "0.0.1",
"conflatedTracesFileName": "13673-13675.conflated.v0.0.1.json.gz"
}
}
```
## V0 Api Methods - Deprecated
Same responses as V1
### rollup_getTracesCountersByBlockNumberV0
```bash
curl -H 'content-type:application/json' --data '{"jsonrpc":"2.0","id":"53","method":"rollup_getTracesCountersByBlockNumberV0","params":["0xDF1"]}' http://127.0.0.1:8080
```
### rollup_generateConflatedTracesToFileV0
```bash
curl -H 'content-type:application/json' --data '{"jsonrpc":"2.0","id":"53","method":"rollup_generateConflatedTracesToFileV0","params":["0xDF1", "0xDF3"]}' http://127.0.0.1:8080/
```
### rollup_getConflatedTracesV0
```bash
curl -H 'content-type:application/json' --data '{"jsonrpc":"2.0","id":"53","method":"rollup_generateConflatedTracesToFileV0","params":["3569", "356B"]}' http://127.0.0.1:8080/
```

View File

@@ -1,75 +0,0 @@
plugins {
id 'net.consensys.zkevm.kotlin-application-conventions'
id 'com.github.johnrengelman.shadow' version '7.1.2'
}
dependencies {
implementation project(':traces-api-facade:conflation')
implementation project(':traces-api-facade:core')
implementation project(':jvm-libs:generic:json-rpc')
implementation project(':jvm-libs:linea:core:metrics')
implementation project(':jvm-libs:linea:metrics:micrometer')
implementation project(':jvm-libs:generic:extensions:kotlin')
implementation project(':jvm-libs:generic:extensions:futures')
implementation project(':jvm-libs:generic:vertx-helper')
implementation "com.github.ben-manes.caffeine:caffeine:${libs.versions.caffeine.get()}"
implementation "io.vertx:vertx-core"
implementation "io.vertx:vertx-web"
implementation "io.vertx:vertx-health-check"
implementation "io.vertx:vertx-lang-kotlin"
implementation "io.vertx:vertx-config"
implementation "io.vertx:vertx-micrometer-metrics"
implementation "io.tmio:tuweni-units:${libs.versions.tuweni.get()}"
implementation "info.picocli:picocli:${libs.versions.picoli.get()}"
implementation "com.sksamuel.hoplite:hoplite-core:${libs.versions.hoplite.get()}"
implementation "com.sksamuel.hoplite:hoplite-toml:${libs.versions.hoplite.get()}"
implementation "io.micrometer:micrometer-registry-prometheus:${libs.versions.micrometer.get()}"
implementation "com.fasterxml.jackson.core:jackson-annotations:${libs.versions.jackson.get()}"
implementation "com.fasterxml.jackson.core:jackson-databind:${libs.versions.jackson.get()}"
implementation "com.fasterxml.jackson.module:jackson-module-kotlin:${libs.versions.jackson.get()}"
api("io.netty:netty-transport-native-epoll:${libs.versions.netty.get()}:linux-x86_64") {
because "It enables native transport for Linux."
// Note that its version should match netty version used in Vertx
}
api("io.netty:netty-transport-native-kqueue:${libs.versions.netty.get()}:osx-x86_64") {
because "It enables native transport for Mac OSX."
// Note that its version should match netty version used in Vertx
}
testImplementation "io.vertx:vertx-junit5"
testImplementation "io.rest-assured:rest-assured:${libs.versions.restassured.get()}"
testImplementation "io.rest-assured:json-schema-validator:${libs.versions.restassured.get()}"
}
application {
mainClass = 'net.consensys.linea.traces.app.TracesAppMain'
}
jar {
manifest {
attributes(
'Class-Path': configurations.runtimeClasspath.collect { it.getName() }.findAll {it.endsWith('jar') }.join(' '),
'Main-Class': 'net.consensys.linea.traces.app.TracesAppMain',
'Multi-Release': 'true'
)
}
}
run {
workingDir = rootProject.projectDir
jvmArgs = [
"-Dvertx.configurationFile=config/traces-api/vertx.json",
"-Dlog4j2.configurationFile=config/traces-api/log4j2-dev.xml"
] + System.properties.entrySet()
.findAll { it.key.startsWith("config") }
.collect { "-D${it.key}=${it.value}" }
args = [
"config/traces-api/traces-app-docker.config.toml",
"config/traces-api/traces-app-local-dev.config.overrides.toml"
]
}
test {
systemProperty "vertx.configurationFile", "vertx-options.json"
}

View File

@@ -1,122 +0,0 @@
package net.consensys.linea.traces.app
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import io.micrometer.core.instrument.MeterRegistry
import io.vertx.core.Future
import io.vertx.core.Vertx
import io.vertx.core.json.jackson.VertxModule
import io.vertx.micrometer.backends.BackendRegistries
import net.consensys.linea.TracesConflationServiceV1Impl
import net.consensys.linea.TracesCountingServiceWithRetry
import net.consensys.linea.metrics.micrometer.MicrometerMetricsFacade
import net.consensys.linea.traces.RawJsonTracesConflator
import net.consensys.linea.traces.RawJsonTracesCounter
import net.consensys.linea.traces.RawJsonTracesCounterV0
import net.consensys.linea.traces.app.api.Api
import net.consensys.linea.traces.app.api.ApiConfig
import net.consensys.linea.traces.app.api.TracesSemanticVersionValidator
import net.consensys.linea.traces.repository.FilesystemConflatedTracesRepository
import net.consensys.linea.traces.repository.FilesystemTracesRepositoryV1
import net.consensys.linea.vertx.loadVertxConfig
import org.apache.logging.log4j.LogManager
import java.nio.file.Files
import java.nio.file.Path
data class AppConfig(
val inputTracesDirectory: String,
val outputTracesDirectory: String,
val tracesApiVersion: String,
val api: ApiConfig,
val tracesFileExtension: String,
// This is meant fo be false for local Debug only. Not in prod
// Override in CLI with --Dconfig.override.conflated_trace_compression=false
val conflatedTracesCompression: Boolean = true
)
class TracesApiFacadeApp(config: AppConfig) {
private val log = LogManager.getLogger(TracesApiFacadeApp::class.java)
private val meterRegistry: MeterRegistry
private val vertx: Vertx
private var api: Api
init {
log.debug("System properties: {}", System.getProperties())
val vertxConfig = loadVertxConfig()
log.debug("Vertx full configs: {}", vertxConfig)
log.info("App configs: {}", config)
validateConfig(config)
this.vertx = Vertx.vertx(vertxConfig)
this.meterRegistry = BackendRegistries.getDefaultNow()
val tracesRepository =
FilesystemTracesRepositoryV1(
FilesystemTracesRepositoryV1.Config(
Path.of(config.inputTracesDirectory),
config.tracesFileExtension
)
)
val jsonSerializerObjectMapper = jacksonObjectMapper().apply {
registerModule(VertxModule())
}
val conflatedTracesRepository =
FilesystemConflatedTracesRepository(
vertx,
Path.of(config.outputTracesDirectory),
gzipCompressionEnabled = config.conflatedTracesCompression,
jsonSerializerObjectMapper
)
val tracesCounterService =
TracesCountingServiceWithRetry(
repository = tracesRepository,
tracesCounter = RawJsonTracesCounter(config.tracesApiVersion),
tracesCounterV0 = RawJsonTracesCounterV0(config.tracesApiVersion)
)
val tracesConflationService =
TracesConflationServiceV1Impl(
tracesRepository,
RawJsonTracesConflator(config.tracesApiVersion),
conflatedTracesRepository,
config.tracesApiVersion
)
val semVerValidator = TracesSemanticVersionValidator(
TracesSemanticVersionValidator.SemanticVersion.fromString(config.tracesApiVersion)
)
this.api =
Api(
configs = config.api,
vertx = vertx,
metricsFacade = MicrometerMetricsFacade(meterRegistry),
semVerValidator = semVerValidator,
tracesCountingService = tracesCounterService,
tracesConflationService = tracesConflationService
)
}
fun start(): Future<*> {
return api.start().onComplete { log.info("App successfully started") }
}
fun stop(): Future<*> {
log.info("Shooting down app..")
return api.stop().onComplete { log.info("App successfully closed") }
}
private fun validateConfig(config: AppConfig): Boolean {
assertDirectory(Path.of(config.inputTracesDirectory).toAbsolutePath())
assertDirectory(
Path.of(config.outputTracesDirectory).toAbsolutePath(),
createIfDoesNotExist = true
)
return true
}
private fun assertDirectory(directory: Path, createIfDoesNotExist: Boolean = false) {
if (!Files.exists(directory)) {
if (createIfDoesNotExist) {
Files.createDirectories(directory)
} else {
throw Exception("Directory not found: $directory")
}
}
if (!Files.isReadable(directory)) throw Exception("Cannot read directory: $directory")
}
}

View File

@@ -1,114 +0,0 @@
package net.consensys.linea.traces.app
import com.sksamuel.hoplite.ConfigFailure
import com.sksamuel.hoplite.ConfigLoaderBuilder
import com.sksamuel.hoplite.addFileSource
import com.sksamuel.hoplite.fp.Validated
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import picocli.CommandLine.Command
import picocli.CommandLine.Parameters
import java.io.File
import java.io.PrintWriter
import java.nio.charset.Charset
import java.util.concurrent.Callable
@Command(
name = TracesAppCli.COMMAND_NAME,
showDefaultValues = true,
abbreviateSynopsis = true,
description = ["Runs Traces API Facade"],
version = ["0.0.1"],
synopsisHeading = "%n",
descriptionHeading = "%nDescription:%n%n",
optionListHeading = "%nOptions:%n",
footerHeading = "%n"
)
class TracesAppCli
internal constructor(private val errorWriter: PrintWriter, private val startAction: StartAction) :
Callable<Int> {
@Parameters(paramLabel = "CONFIG.toml", description = ["Configuration files"])
private val configFiles: List<File>? = null
override fun call(): Int {
return try {
if (configFiles == null) {
errorWriter.println("Please provide a configuration file!")
printUsage(errorWriter)
return 1
}
for (configFile in configFiles) {
if (!canReadFile(configFile)) {
return 1
}
}
val configs: Validated<ConfigFailure, AppConfig> = configs(configFiles)
if (configs.isInvalid()) {
errorWriter.println(configs.getInvalidUnsafe().description())
return 1
}
startAction.start(configs.getUnsafe())
0
} catch (e: Exception) {
reportUserError(e)
1
}
}
private fun canReadFile(file: File): Boolean {
if (!file.canRead()) {
errorWriter.println("Cannot read configuration file '${file.absolutePath}'")
return false
}
return true
}
fun configs(configFiles: List<File>): Validated<ConfigFailure, AppConfig> {
val confBuilder: ConfigLoaderBuilder = ConfigLoaderBuilder.Companion.empty().addDefaults()
for (i in configFiles.indices.reversed()) {
// files must be added in reverse order for overriding
// files must be added in reverse order for overriding
confBuilder.addFileSource(configFiles[i], false)
}
val config: Validated<ConfigFailure, AppConfig> =
confBuilder.build().loadConfig<AppConfig>(emptyList())
return config
}
fun reportUserError(ex: Throwable) {
logger.fatal(ex.message, ex)
errorWriter.println(ex.message)
printUsage(errorWriter)
}
private fun printUsage(outputWriter: PrintWriter) {
outputWriter.println()
outputWriter.println("To display full help:")
outputWriter.println(COMMAND_NAME + " --help")
}
/**
* Not using a static field for this log instance because some code in this class executes prior
* to the logging configuration being applied so it's not always safe to use the logger.
*
* Where this is used we also ensure the messages are printed to the error writer so they will be
* printed even if logging is not yet configured.
*
* @return the logger for this class
*/
private val logger: Logger = LogManager.getLogger()
fun interface StartAction {
fun start(configs: AppConfig)
}
companion object {
const val COMMAND_NAME = "traces"
fun withAction(startAction: StartAction): TracesAppCli {
val errorWriter = PrintWriter(System.err, true, Charset.defaultCharset())
return TracesAppCli(errorWriter, startAction)
}
}
}

View File

@@ -1,40 +0,0 @@
package net.consensys.linea.traces.app
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.core.LoggerContext
import org.apache.logging.log4j.core.config.Configurator
import picocli.CommandLine
import kotlin.system.exitProcess
class TracesAppMain {
companion object {
private val log = LogManager.getLogger(TracesAppMain::class)
@JvmStatic
fun main(args: Array<String>) {
val cmd = CommandLine(TracesAppCli.withAction(::startApp))
cmd.execute(*args)
}
private fun startApp(configs: AppConfig) {
try {
val sumoApp = TracesApiFacadeApp(configs)
Runtime.getRuntime()
.addShutdownHook(
Thread {
sumoApp.stop()
if (LogManager.getContext() is LoggerContext) {
// Disable log4j auto shutdown hook is not used otherwise
// Messages in App.stop won't appear in the logs
Configurator.shutdown(LogManager.getContext() as LoggerContext)
}
}
)
sumoApp.start()
} catch (t: Throwable) {
log.error("Startup failure: ", t)
exitProcess(1)
}
}
}
}

View File

@@ -1,76 +0,0 @@
package net.consensys.linea.traces.app.api
import io.vertx.core.DeploymentOptions
import io.vertx.core.Future
import io.vertx.core.Vertx
import net.consensys.linea.TracesConflationServiceV1
import net.consensys.linea.TracesCountingServiceV1
import net.consensys.linea.jsonrpc.HttpRequestHandler
import net.consensys.linea.jsonrpc.JsonRpcMessageHandler
import net.consensys.linea.jsonrpc.JsonRpcMessageProcessor
import net.consensys.linea.jsonrpc.JsonRpcRequestRouter
import net.consensys.linea.jsonrpc.httpserver.HttpJsonRpcServer
import net.consensys.linea.metrics.MetricsFacade
import net.consensys.linea.vertx.ObservabilityServer
data class ApiConfig(
val port: UInt,
val observabilityPort: UInt,
val path: String = "/",
val numberOfVerticles: UInt
)
class Api(
private val configs: ApiConfig,
private val vertx: Vertx,
private val metricsFacade: MetricsFacade,
private val semVerValidator: TracesSemanticVersionValidator,
private val tracesCountingService: TracesCountingServiceV1,
private val tracesConflationService: TracesConflationServiceV1
) {
private var jsonRpcServerId: String? = null
private var observabilityServerId: String? = null
fun start(): Future<*> {
val requestHandlersV1 =
mapOf(
ApiMethod.ROLLUP_GET_BLOCK_TRACES_COUNTERS_V1.method to
TracesCounterRequestHandlerV1(tracesCountingService, semVerValidator),
ApiMethod.ROLLUP_GENERATE_CONFLATED_TRACES_TO_FILE_V1.method to
GenerateConflatedTracesToFileRequestHandlerV1(tracesConflationService, semVerValidator),
// Just for Debug/Dev Purposes
ApiMethod.ROLLUP_GET_CONFLATED_TRACES_V1.method to
GetConflatedTracesRequestHandlerV1(tracesConflationService, semVerValidator)
)
val messageHandler: JsonRpcMessageHandler =
JsonRpcMessageProcessor(JsonRpcRequestRouter(requestHandlersV1), metricsFacade)
val numberOfVerticles: Int =
if (configs.numberOfVerticles.toInt() > 0) {
configs.numberOfVerticles.toInt()
} else {
Runtime.getRuntime().availableProcessors()
}
val observabilityServer =
ObservabilityServer(ObservabilityServer.Config("traces-api", configs.observabilityPort.toInt()))
return vertx
.deployVerticle(
{ HttpJsonRpcServer(configs.port, configs.path, HttpRequestHandler(messageHandler)) },
DeploymentOptions().setInstances(numberOfVerticles)
)
.compose { verticleId: String ->
jsonRpcServerId = verticleId
vertx.deployVerticle(observabilityServer).onSuccess { monitorVerticleId ->
this.observabilityServerId = monitorVerticleId
}
}
}
fun stop(): Future<*> {
return Future.all(
this.jsonRpcServerId?.let { vertx.undeploy(it) } ?: Future.succeededFuture(null),
this.observabilityServerId?.let { vertx.undeploy(it) } ?: Future.succeededFuture(null)
)
}
}

View File

@@ -1,11 +0,0 @@
package net.consensys.linea.traces.app.api
enum class ApiMethod(val method: String) {
ROLLUP_GET_TRACES_COUNTERS_BY_BLOCK_NUMBER_V0("rollup_getTracesCountersByBlockNumberV0"),
ROLLUP_GENERATE_CONFLATED_TRACES_TO_FILE_V0("rollup_generateConflatedTracesToFileV0"),
ROLLUP_GET_CONFLATED_TRACES_V0("rollup_getConflatedTracesV0"),
ROLLUP_GET_BLOCK_TRACES_COUNTERS_V1("rollup_getBlockTracesCountersV1"),
ROLLUP_GENERATE_CONFLATED_TRACES_TO_FILE_V1("rollup_generateConflatedTracesToFileV1"),
ROLLUP_GET_CONFLATED_TRACES_V1("rollup_getConflatedTracesV1")
}

View File

@@ -1,32 +0,0 @@
package net.consensys.linea.traces.app.api
import linea.kotlin.fromHexString
import net.consensys.linea.jsonrpc.argument.getArgument
import org.apache.tuweni.bytes.Bytes32
object ArgumentParser {
fun getHexString(arguments: List<*>, index: Int, argumentName: String): String {
return getArgument(String::class, arguments, index, argumentName)
.also { require(it.startsWith("0x")) { "$argumentName must have '0x' hexadecimal prefix." } }
}
fun <T> getHexStringParsed(arguments: List<*>, index: Int, argumentName: String, parser: (String) -> T): T {
return getArgument(String::class, arguments, index, argumentName)
.also { require(it.startsWith("0x")) { "$argumentName must have '0x' hexadecimal prefix." } }
.let(parser)
}
fun getBlockNumber(arguments: List<*>, index: Int, argumentName: String): ULong {
return getHexStringParsed(arguments, index, argumentName) {
try {
ULong.fromHexString(it)
} catch (ex: NumberFormatException) {
throw NumberFormatException("${ex.message} on argument $argumentName")
}
}
}
fun getBytes32(arguments: List<*>, index: Int, argumentName: String): Bytes32 {
return getHexStringParsed(arguments, index, argumentName, Bytes32::fromHexString)
}
}

View File

@@ -1,229 +0,0 @@
package net.consensys.linea.traces.app.api
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.flatMap
import com.github.michaelbull.result.get
import com.github.michaelbull.result.map
import com.github.michaelbull.result.mapError
import io.vertx.core.Future
import io.vertx.core.json.JsonObject
import io.vertx.ext.auth.User
import linea.domain.BlockNumberAndHash
import linea.kotlin.decodeHex
import net.consensys.linea.TracesConflationServiceV1
import net.consensys.linea.TracesCountingServiceV1
import net.consensys.linea.TracesError
import net.consensys.linea.VersionedResult
import net.consensys.linea.async.toVertxFuture
import net.consensys.linea.jsonrpc.JsonRpcErrorResponse
import net.consensys.linea.jsonrpc.JsonRpcRequest
import net.consensys.linea.jsonrpc.JsonRpcRequestHandler
import net.consensys.linea.jsonrpc.JsonRpcRequestMapParams
import net.consensys.linea.jsonrpc.JsonRpcSuccessResponse
import tech.pegasys.teku.infrastructure.async.SafeFuture
private fun parseBlockNumberAndHash(json: JsonObject) = BlockNumberAndHash(
json.getString("blockNumber").toULong(),
json.getString("blockHash").decodeHex()
)
internal fun validateParams(request: JsonRpcRequest): Result<JsonRpcRequestMapParams, JsonRpcErrorResponse> {
if (request.params !is Map<*, *>) {
return Err(
JsonRpcErrorResponse.invalidParams(
request.id,
"params should be an object"
)
)
}
return try {
val jsonRpcRequest = request as JsonRpcRequestMapParams
if (jsonRpcRequest.params.isEmpty()) {
Err(
JsonRpcErrorResponse.invalidParams(
request.id,
"Parameters map is empty!"
)
)
} else {
Ok(request)
}
} catch (e: Exception) {
Err(JsonRpcErrorResponse.invalidRequest())
}
}
class TracesCounterRequestHandlerV1(
private val tracesCountingService: TracesCountingServiceV1,
private val validator: TracesSemanticVersionValidator
) :
JsonRpcRequestHandler {
override fun invoke(
user: User?,
request: JsonRpcRequest,
requestJson: JsonObject
): Future<Result<JsonRpcSuccessResponse, JsonRpcErrorResponse>> {
val (block, version) = try {
val parsingResult = validateParams(request).flatMap { validatedRequest ->
validator.validateExpectedVersion(
validatedRequest.id,
validatedRequest.params["expectedTracesApiVersion"].toString()
).map {
val version =
validatedRequest.params["rawExecutionTracesVersion"].toString()
Pair(
parseBlockNumberAndHash(JsonObject.mapFrom(validatedRequest.params["block"])),
version
)
}
}
if (parsingResult is Err) {
return Future.succeededFuture(parsingResult)
} else {
parsingResult.get()!!
}
} catch (e: Exception) {
return Future.succeededFuture(
Err(
JsonRpcErrorResponse.invalidParams(
request.id,
e.message
)
)
)
}
return tracesCountingService
.getBlockTracesCounters(block, version)
.thenApply { result ->
result
.map {
val rpcResult =
JsonObject()
.put("tracesEngineVersion", it.version)
.put("blockNumber", block.number.toString())
.put("blockL1Size", it.result.blockL1Size.toString())
.put(
"tracesCounters",
it.result.tracesCounters.entries().associate { it.first to it.second.toLong() }
)
JsonRpcSuccessResponse(request.id, rpcResult)
}
.mapError { error -> JsonRpcErrorResponse(request.id, jsonRpcError(error)) }
}
.toVertxFuture()
}
}
abstract class AbstractTracesConflationRequestHandlerV1<T>(private val validator: TracesSemanticVersionValidator) :
JsonRpcRequestHandler {
abstract fun tracesContent(
blocks: List<BlockNumberAndHash>,
version: String
): SafeFuture<Result<T, TracesError>>
override fun invoke(
user: User?,
request: JsonRpcRequest,
requestJson: JsonObject
): Future<Result<JsonRpcSuccessResponse, JsonRpcErrorResponse>> {
val (blocks: List<BlockNumberAndHash>, version: String) = try {
val parsingResult = validateParams(request).flatMap { validatedRequest ->
validator.validateExpectedVersion(
validatedRequest.id,
validatedRequest.params["expectedTracesApiVersion"].toString()
).map {
val version = validatedRequest.params["rawExecutionTracesVersion"].toString()
val blocks = validatedRequest.params["blocks"] as List<Any?>
Pair(
blocks.map { blockJson ->
parseBlockNumberAndHash(
JsonObject.mapFrom(blockJson)
)
},
version
)
}
}
if (parsingResult is Err) {
return Future.succeededFuture(parsingResult)
} else {
parsingResult.get()!!
}
} catch (e: Exception) {
return Future.succeededFuture(
Err(
JsonRpcErrorResponse.invalidParams(
request.id,
e.message
)
)
)
}
if (blocks.isEmpty()) {
return Future.succeededFuture(
Err(
JsonRpcErrorResponse.invalidParams(
request.id,
"Empty list of blocks!"
)
)
)
}
return tracesContent(blocks, version)
.thenApply { result ->
result
.map { JsonRpcSuccessResponse(request.id, it) }
.mapError { error -> JsonRpcErrorResponse(request.id, jsonRpcError(error)) }
}
.toVertxFuture()
}
}
class GenerateConflatedTracesToFileRequestHandlerV1(
private val service: TracesConflationServiceV1,
validator: TracesSemanticVersionValidator
) :
AbstractTracesConflationRequestHandlerV1<JsonObject>(validator) {
override fun tracesContent(
blocks: List<BlockNumberAndHash>,
version: String
): SafeFuture<Result<JsonObject, TracesError>> {
val blocksSorted = blocks.sortedBy { it.number }
return service.generateConflatedTracesToFile(blocksSorted, version)
.thenApply { result: Result<VersionedResult<String>, TracesError> ->
result.map {
JsonObject()
.put("tracesEngineVersion", it.version)
.put("startBlockNumber", blocksSorted.first().number.toString())
.put("endBlockNumber", blocksSorted.last().number.toString())
.put("conflatedTracesFileName", it.result)
}
}
}
}
class GetConflatedTracesRequestHandlerV1(
private val service: TracesConflationServiceV1,
validator: TracesSemanticVersionValidator
) :
AbstractTracesConflationRequestHandlerV1<JsonObject>(validator) {
override fun tracesContent(
blocks: List<BlockNumberAndHash>,
version: String
): SafeFuture<Result<JsonObject, TracesError>> {
return service.getConflatedTraces(blocks, version)
.thenApply { result: Result<VersionedResult<JsonObject>, TracesError> ->
result.map {
JsonObject().put("tracesEngineVersion", it.version)
.put("conflatedTraces", it.result)
}
}
}
}

View File

@@ -1,34 +0,0 @@
package net.consensys.linea.traces.app.api
import net.consensys.linea.ErrorType
import net.consensys.linea.TracesError
import net.consensys.linea.jsonrpc.JsonRpcError
enum class TracesErrorCodes(val code: Int, val message: String) {
// User' error codes
INVALID_BLOCK_NUMBERS(-4000, "Invalid block numbers"),
TRACES_UNAVAILABLE(-4001, "Traces not available"),
// App/System' error codes
TRACES_AMBIGUITY(-5001, "Trances Ambiguity: multiple traces found for the same block"),
TRACES_INVALID_JSON_FORMAT(-5002, "Traces file has invalid json format."),
TRACES_INVALID_CONTENT(-5003, "Traces file has invalid content.");
fun toErrorObject(data: Any? = null): JsonRpcError {
return JsonRpcError(this.code, this.message, data)
}
}
fun jsonRpcError(appError: TracesError): JsonRpcError {
return when (appError.errorType) {
ErrorType.INVALID_BLOCK_NUMBERS_RANGE ->
TracesErrorCodes.INVALID_BLOCK_NUMBERS.toErrorObject(appError.errorDetail)
ErrorType.TRACES_UNAVAILABLE ->
TracesErrorCodes.TRACES_UNAVAILABLE.toErrorObject(appError.errorDetail)
ErrorType.TRACES_AMBIGUITY ->
TracesErrorCodes.TRACES_AMBIGUITY.toErrorObject(appError.errorDetail)
ErrorType.WRONG_JSON_FORMAT ->
TracesErrorCodes.TRACES_INVALID_JSON_FORMAT.toErrorObject(appError.errorDetail)
ErrorType.WRONG_JSON_CONTENT ->
TracesErrorCodes.TRACES_INVALID_CONTENT.toErrorObject(appError.errorDetail)
}
}

View File

@@ -1,52 +0,0 @@
package net.consensys.linea.traces.app.api
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
import net.consensys.linea.jsonrpc.JsonRpcErrorResponse
class TracesSemanticVersionValidator(private val apiVersion: SemanticVersion) {
// Non-core portion is not supported
data class SemanticVersion(val major: UInt, val minor: UInt, val patch: UInt) {
companion object {
private val simplifiedSemverRegex = """(\d+)\.(\d+)\.(\d+)""".toRegex()
fun fromString(s: String): SemanticVersion {
val groups = simplifiedSemverRegex.findAll(s).toList().first().groups
return SemanticVersion(
groups[1]!!.value.toUInt(),
groups[2]!!.value.toUInt(),
groups[3]!!.value.toUInt()
)
}
}
override fun toString(): String = "$major.$minor.$patch"
// `this` considered to be server version and the argument to be client version
fun isCompatible(clientRequestedVersion: SemanticVersion): Boolean {
return when {
(clientRequestedVersion.major != major) -> false
(clientRequestedVersion.minor > minor) -> false
(clientRequestedVersion.minor == minor && clientRequestedVersion.patch > patch) -> false
else -> true
}
}
}
fun validateExpectedVersion(
requestId: Any,
expectedVersion: String
): Result<Unit, JsonRpcErrorResponse> {
val clientRequestedVersion = SemanticVersion.fromString(expectedVersion)
return when (apiVersion.isCompatible(clientRequestedVersion)) {
true -> Ok(Unit)
false -> Err(
JsonRpcErrorResponse.invalidParams(
requestId,
"Client requested version $clientRequestedVersion is not compatible to server version $apiVersion"
)
)
}
}
}

View File

@@ -1,142 +0,0 @@
package net.consensys.linea.traces.repository
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import io.vertx.core.Vertx
import io.vertx.core.json.JsonObject
import net.consensys.linea.ConflatedTracesRepository
import net.consensys.linea.TracesConflation
import net.consensys.linea.async.toSafeFuture
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.io.FileOutputStream
import java.io.OutputStream
import java.nio.file.Path
import java.util.concurrent.Callable
import java.util.zip.GZIPOutputStream
import kotlin.system.measureTimeMillis
class FilesystemConflatedTracesRepository(
private val vertx: Vertx,
private val tracesDirectory: Path,
private val gzipCompressionEnabled: Boolean = true,
private val objectMapper: ObjectMapper = jacksonObjectMapper()
) : ConflatedTracesRepository {
private val log: Logger = LogManager.getLogger(this.javaClass)
private fun destinationFileName(
startBlockNumber: ULong,
endBlockNumber: ULong,
tracesVersion: String
): String {
val extension = if (gzipCompressionEnabled) "json.gz" else "json"
return "$startBlockNumber-$endBlockNumber.conflated.v$tracesVersion.$extension"
}
private fun inProgressDestinationFileName(
startBlockNumber: ULong,
endBlockNumber: ULong,
tracesVersion: String
): String {
return "${destinationFileName(startBlockNumber, endBlockNumber, tracesVersion)}.inprogress"
}
override fun findConflatedTraces(
startBlockNumber: ULong,
endBlockNumber: ULong,
tracesVersion: String
): SafeFuture<String?> {
val fileName = destinationFileName(
startBlockNumber,
endBlockNumber,
tracesVersion
)
tracesDirectory.resolve(fileName).toFile().let { file ->
return if (file.exists()) {
SafeFuture.completedFuture(file.name)
} else {
SafeFuture.completedFuture(null)
}
}
}
override fun saveConflatedTraces(conflation: TracesConflation): SafeFuture<String> {
val inProgressFileName =
inProgressDestinationFileName(
conflation.startBlockNumber,
conflation.endBlockNumber,
conflation.traces.version
)
val inProgressFilePath = tracesDirectory.resolve(inProgressFileName)
return vertx
.executeBlocking(
Callable {
if (gzipCompressionEnabled) {
saveConflatedTracesGzipCompressed(inProgressFilePath, conflation.traces.result)
} else {
saveConflatedTracesRawJson(inProgressFilePath, conflation.traces.result)
}
},
false
)
.toSafeFuture()
.thenApply {
destinationFileName(
conflation.startBlockNumber,
conflation.endBlockNumber,
conflation.traces.version
).also { destinationFileName ->
tracesDirectory.resolve(destinationFileName).run {
inProgressFilePath.toFile().renameTo(this.toFile())
}
}
}
}
private fun saveConflatedTracesGzipCompressed(filePath: Path, traces: JsonObject) {
var serializationTime: Long
log.info("saving conflation to {}", filePath.fileName)
val time = measureTimeMillis {
FileOutputStream(filePath.toString()).use { outputStream: OutputStream ->
GZIPOutputStream(outputStream).use { gzipOutputStream ->
serializationTime = measureTimeMillis { objectMapper.writeValue(gzipOutputStream, traces) }
}
}
}
log.debug(
"total_time={}ms (json_encode + gzip + fs_write={}) in {}",
time,
serializationTime,
filePath.fileName
)
}
private fun saveConflatedTracesRawJson(filePath: Path, traces: JsonObject) {
FileOutputStream(filePath.toString()).use { outputStream: OutputStream ->
outputStream.write(traces.toString().toByteArray())
}
}
}
// Keeping for quick debugging in the future and fast iteration
// fun main() {
// val vertx = Vertx.vertx()
// val filePath = Path.of("tmp/")
// val repository = FilesystemConflatedTracesRepository(
// vertx,
// filePath,
// true
// )
//
// val jsonObject = JsonObject.of(
// "key", "value",
// "key2", "value2",
// "key3", "value3"
// )
// repository.saveConflatedTraces(TracesConflation(1u, 4u, VersionedResult("", jsonObject)))
// .get()
// vertx.close()
// }

View File

@@ -1,65 +0,0 @@
package net.consensys.linea.traces.repository
import com.fasterxml.jackson.databind.ObjectMapper
import io.vertx.core.Future
import io.vertx.core.json.JsonObject
import net.consensys.linea.metrics.micrometer.elapsedTimeInMillisSince
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import java.nio.file.Path
import java.util.zip.GZIPInputStream
class FilesystemHelper(
val objectMapper: ObjectMapper = ObjectMapper(),
val log: Logger = LogManager.getLogger(FilesystemHelper::class.java)
) {
fun readGzipedJsonFile(filePath: Path): Future<JsonObject> {
val startTime = System.nanoTime()
return java.io.FileInputStream(filePath.toFile()).use { fileIs ->
val filesystemLoadTime = elapsedTimeInMillisSince(startTime)
val startUngzipTime = System.nanoTime()
GZIPInputStream(fileIs).use { gzipInputStream ->
@Suppress("UNCHECKED_CAST")
val jsonm = objectMapper.readValue(gzipInputStream, Map::class.java) as Map<String, Any>
val json = JsonObject(jsonm)
val jsonTime = elapsedTimeInMillisSince(startUngzipTime)
val totalTime = elapsedTimeInMillisSince(startTime)
log.debug(
"total_time={}ms (file_load={} unzip+json_parse={}) in {}",
totalTime,
filesystemLoadTime,
jsonTime,
filePath.fileName
)
Future.succeededFuture(json)
}
}
}
fun readGzippedJsonFileAsString(filePath: Path): Future<String> {
val startTime = System.nanoTime()
return java.io.FileInputStream(filePath.toFile()).use { fileIs ->
val filesystemLoadTime = elapsedTimeInMillisSince(startTime)
GZIPInputStream(fileIs).use { gzipInputStream ->
val unzipTime = elapsedTimeInMillisSince(startTime)
try {
val allBytes = gzipInputStream.readAllBytes()
val result = String(allBytes)
val toStringTime = elapsedTimeInMillisSince(startTime)
log.debug(
"total_time={}ms (file_load={} unzip={} toString={}) in {}",
elapsedTimeInMillisSince(startTime),
filesystemLoadTime,
unzipTime - filesystemLoadTime,
toStringTime - unzipTime,
filePath.fileName
)
Future.succeededFuture(result)
} catch (e: OutOfMemoryError) {
log.warn("File is too large to read into String: ${filePath.fileName} error={}", e.message)
Future.failedFuture(e)
}
}
}
}
}

View File

@@ -1,113 +0,0 @@
package net.consensys.linea.traces.repository
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.get
import com.github.michaelbull.result.getError
import net.consensys.linea.BlockTraces
import net.consensys.linea.ErrorType
import net.consensys.linea.TracesError
import net.consensys.linea.TracesFileIndex
import net.consensys.linea.TracesRepositoryV1
import net.consensys.linea.async.toSafeFuture
import net.consensys.linea.metrics.micrometer.elapsedTimeInMillisSince
import net.consensys.linea.traces.TracesFileNameSupplier
import net.consensys.linea.traces.TracesFiles
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import org.apache.tuweni.bytes.Bytes32
import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.nio.file.Path
internal fun tracesOnlyFromContent(content: String): String {
// TODO: filter out from the file objects that are not traces
return content
}
class FilesystemTracesRepositoryV1(
private val config: Config,
private val fileNameSupplier: TracesFileNameSupplier = TracesFiles::rawTracesFileNameSupplierV1,
private val tracesOnlyFilter: (content: String) -> String = ::tracesOnlyFromContent
) : TracesRepositoryV1 {
data class Config(
val tracesDirectory: Path,
val tracesFileExtension: String
)
private val log: Logger = LogManager.getLogger(this::class.java)
private val fsHelper = FilesystemHelper(log = log)
private fun findTracesFile(block: TracesFileIndex): Result<String, TracesError> {
val tracesFileName = fileNameSupplier(
block.number,
Bytes32.wrap(block.hash),
block.version,
config.tracesFileExtension
)
val tracesFile = config.tracesDirectory.resolve(tracesFileName).toFile()
return if (tracesFile.exists()) {
Ok(tracesFile.absolutePath)
} else {
Err(
TracesError(
ErrorType.TRACES_UNAVAILABLE,
"Traces not available for block ${block.number}. Target file: ${tracesFile.absolutePath}"
)
)
}
}
private fun loadTracesFileContent(
filePath: String,
block: TracesFileIndex
): SafeFuture<BlockTraces> {
val startTime = System.nanoTime()
return fsHelper.readGzipedJsonFile(Path.of(filePath))
.map { json -> BlockTraces(block.number, json) }
.toSafeFuture()
.whenComplete { _, _ ->
log.debug(
"load time=${elapsedTimeInMillisSince(startTime)}ms blockNumber=${block.number}"
)
}
}
private fun loadTracesFileContentAsString(
filePath: String,
block: TracesFileIndex
): SafeFuture<String> {
val startTime = System.nanoTime()
return fsHelper.readGzippedJsonFileAsString(Path.of(filePath))
.map { json -> tracesOnlyFilter(json) }
.toSafeFuture()
.whenComplete { _, _ ->
log.debug(
"load time=${elapsedTimeInMillisSince(startTime)}ms blockNumber=${block.number}"
)
}
}
override fun getTracesAsString(block: TracesFileIndex): SafeFuture<Result<String, TracesError>> {
return when (val result = findTracesFile(block)) {
is Ok<String> -> loadTracesFileContentAsString(result.value, block).thenApply { Ok(it) }
is Err<TracesError> -> SafeFuture.completedFuture(result)
}
}
override fun getTraces(blocks: List<TracesFileIndex>): SafeFuture<Result<List<BlockTraces>, TracesError>> {
val blocksFiles: List<Pair<TracesFileIndex, Result<String, TracesError>>> =
blocks.map { it to findTracesFile(it) }
val fileMissingError: TracesError? =
blocksFiles.find { it.second is Err }?.second?.getError()
if (fileMissingError != null) {
return SafeFuture.completedFuture(Err(fileMissingError))
}
return SafeFuture.collectAll(
blocksFiles.map { loadTracesFileContent(it.second.get()!!, it.first) }.stream()
)
.thenApply { listOfTraces: List<BlockTraces> -> Ok(listOfTraces) }
}
}

View File

@@ -1,178 +0,0 @@
package net.consensys.linea.traces
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
class JsonParserHelperTest {
@Test
fun find_simple_key() {
// {"add":{"Trace":{"ACC_1"...
assertEquals("add", JsonParserHelper.findKey("""{"add":{"Trace":{"ACC_1""", 9))
assertThat(JsonParserHelper.findKey("""{"mmu":{"mmio":{"Trace":{"ACC_1":", 17}}""", 18)).isEqualTo("mmio")
}
@Test
fun find_simple_key_ill_formed() {
// {"add":{"Trace":{"ACC_1"...
assertThat(JsonParserHelper.findKey("{\"Trace\":{\"ACC_1\"", 2)).isNull()
assertThat(JsonParserHelper.findKey("[{\"field\":{\"a\":0}, \"Trace\":{\"ACC_1\":1}}]", 17)).isNull()
}
@Test
fun long_name_is_in_RLP() {
val json = """
{"BlockRlp":"+ZIc+QJeoBDQoow7Otewm1p4nUNM2EznGaI6Jxg/2WxGgvNusziKoB3LongNameMTejex116q4W1Z7bM1BrTEkUb",
"hub": {"mmu":{"mmio":{"Trace":{"ACC_1":["0","0","0","0","0"]}}},
"LongName": 0} }
""".trimIndent()
val value = JsonParserHelper.getPrecomputedLimit(json, "LongName")
assertEquals(0, value)
}
@Test
fun long_name_is_in_RLP2() {
val json = """
{"LongName": 0, "BlockRlp":"+ZIc+QJeoBDQoow7Otewm1p4nUNM2EznGaI6Jxg/2WxGgvNusziKoB3LongNameMTejex116Z7bM1BrTEkUb",
"hub": {"mmu":{"mmio":{"Trace":{"ACC_1":["0","0","0","0","0"]}}}}}
""".trimIndent()
val value = JsonParserHelper.getPrecomputedLimit(json, "LongName")
assertEquals(0, value)
}
@Test
fun trace_is_in_RLP() {
val json = """
{"hub":
{"mmu":{"mmio":{"Trace":{"ACC_1":["0","0","0","0","0"]}}}},
"BlockRlp":"+ZIc+QJeoBDQoow7Otewm1p4nUNM2EznGaI6Jxg/2WxGgvNusziKoB3MTejex116q4W1Z7bM1BrTEkUblIp0E/a1x5Ql
Traceuan0BAAAAAAAAAAA"}
""".trimIndent()
val positions = JsonParserHelper.getTracesPosition(json)
assertEquals(listOf(24), positions)
assertEquals("mmio", JsonParserHelper.findKey(json, 24))
}
@Test
fun find_nested_key() {
val json = """
{"hub":
{"mmu":{"mmio":{"Trace":{"ACC_1":["0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0",
"0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0",
"0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0",
"0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0",
"0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0",
"0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0",
"0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0",
"0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0"]}},
"RamStamp":38,"MicroStamp":74,"Trace":{"ACC_1":["0","0","4","0","0","0","196","0","0","0","0","0","0","0","0","0"
,"0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","4","0","0","0","196","0","0","0","192","0","0","0"
,"160","0","0","0","128","0","0","0","96","0","0","0","64","0","0","0","32","0","0","0","0","0","0","0","4","0",
"0","0","4","0","0","0","8","0","0","0","10","0","0","0","0","0","0","0","2","0","0","0","0","0","0","0","0","0",
"0","0","0","0","0","2","0","0","0","0","0","0","0","0","0","0","4","0","0","0","8","0","0","0","10","0","0","0",
"4","0","0","0","4","0","0","0","10","0","0","0","12","0","0","0","14","0","0","0","16","0","0","0","18","0","0",
"0","20","0","0","0","8","0","0","0","22","0","0","0","24","0","0","0","4","0","0","0","10","0","0","0","0","0",
"0","0","0","0","0","0","0","0","0"]}},"Trace":
""".trimIndent()
val positions = JsonParserHelper.getTracesPosition(json)
assertEquals(listOf(24, 919, 1723), positions)
assertEquals("mmio", JsonParserHelper.findKey(json, 24))
assertEquals("mmu", JsonParserHelper.findKey(json, 919))
assertEquals("hub", JsonParserHelper.findKey(json, 1723))
}
@Test
fun count_ill_formed() {
assertEquals(-1, JsonParserHelper.countRow("{\"Trace\": {\"ACC_DELTA\": [ ", 7))
}
@Test
fun count() {
assertEquals(0, JsonParserHelper.countRow("{\"Trace\" :{ \"ACC_DELTA\": []", 7))
assertEquals(1, JsonParserHelper.countRow("{\"Trace\":{\"ACC_DELTA\":[\"1\"]", 7))
assertEquals(
8,
JsonParserHelper.countRow(
"""
"mod":{"Trace":{"ACC_1_2":["0" ," 0","0","0","0","0","0","0"],
"ACC_1_3":["0","0","0","0","0","0","0","0"],
"ACC_2_2":["0","0","0","0","0", "0","0","0"],
"ACC_2_3":["0","0","0","0","0","0","0","0"] ,
" ACC_B_0":["0","0","0","0","0","0","0","1"],
"ACC_B_1":["0","0","0","0","0","0","0","0"],"AC
""".trimIndent(),
13
)
)
assertEquals(-1, JsonParserHelper.countRow("{\"Trace\":{\"ACC_DELTA\":[\"1,[0]\"]", 7))
}
@Test
fun get_long() {
val result = JsonParserHelper.getPrecomputedLimit(
"""
"KeccakCount":11,"L2L1logsCount":0,"TxCount":1 ,
"PrecompileCalls":{"EcRecover":0,"Sha2":0,"RipeMD":0,"Identity":0,"ModExp": 0,
"EcAdd":0,"EcMul":0,"EcPairing":0,"Blake2f":0}
""".trimIndent(),
"TxCount"
)
assertEquals(1L, result)
}
@Test
fun get_long_missing() {
val result = JsonParserHelper.getPrecomputedLimit(
"""
"KeccakCount":11,"L2L1logsCount":0,"TxCount":1,
"PrecompileCalls":{"EcRecover":0,"Sha2":0,"RipeMD":0,"Identity":0,"ModExp":0,
"EcAdd":0,"EcMul":0,"EcPairing":0,"Blake2f":0}
""".trimIndent(),
"missing"
)
assertEquals(-1L, result)
}
@Test
fun precompiles() {
val result = JsonParserHelper.getPrecompiles(
"""
"PrecompileCalls": { "b":1}
""".trimIndent()
)
assertThat(result).isEqualTo(mapOf("b" to 1))
}
@Test
fun precompiles_invalid() {
val result = JsonParserHelper.getPrecompiles(
"""
"PrecompileCalls": { "b":1
""".trimIndent()
)
assertThat(result).isNull()
}
@Test
fun precompiles_nested() {
val result = JsonParserHelper.getPrecompiles(
"""
"PrecompileCalls": { "b":1, "nested":{"c":"text"}}
""".trimIndent()
)
assertThat(result).isEqualTo(mapOf("b" to 1, "nested" to mapOf("c" to "text")))
}
@Test
fun validateSimpleMatrix() {
assertThat(JsonParserHelper.simpleMatrix("1,1,1", 0, 4)).isTrue
assertThat(JsonParserHelper.simpleMatrix(""""ACC_1_3":["0","0","0","0","0","0","0","0"]""", 11, 41)).isTrue
assertThat(JsonParserHelper.simpleMatrix("1,[0],1", 0, 6)).isFalse()
assertThat(JsonParserHelper.simpleMatrix("""[1,"field":{"subField":1},1,0,5]""", 1, 32)).isFalse()
}
}

View File

@@ -1,119 +0,0 @@
package net.consensys.linea.traces
import net.consensys.linea.async.get
import net.consensys.linea.traces.repository.FilesystemHelper
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import org.mockito.Mockito
import java.nio.file.Path
class RawJsonTracesCounterTest {
private val log: Logger = LogManager.getLogger(this::class.java)
var counter = RawJsonTracesCounter("test")
@Test
fun parse_file() {
val fsHelper = FilesystemHelper(Mockito.mock(), log = log)
val parsed = fsHelper.readGzippedJsonFileAsString(Path.of("../../testdata/traces/raw/small.json.gz"))
val counted = counter.concreteCountTraces(parsed.get()).component1()
?.tracesCounters
?.entries()
?.associate { e -> e.first.name to e.second }
assertThat(counted).isEqualTo(
mapOf(
"ADD" to 514U,
"BIN" to 256U,
"BIN_RT" to 0U,
"EC_DATA" to 12U,
"EXT" to 8U,
"HUB" to 1361U,
"INSTRUCTION_DECODER" to 0U,
"MMIO" to 209U,
"MMU" to 188U,
"MMU_ID" to 0U,
"MOD" to 16U,
"MUL" to 14U,
"MXP" to 128U,
"PHONEY_RLP" to 0U,
"PUB_HASH" to 8U,
"PUB_HASH_INFO" to 2U,
"PUB_LOG" to 14U,
"PUB_LOG_INFO" to 1U,
"RLP" to 8U,
"ROM" to 11266U,
"SHF" to 48U,
"SHF_RT" to 0U,
"TX_RLP" to 365U,
"WCP" to 226U,
"BLOCK_TX" to 1U,
"BLOCK_L2L1LOGS" to 0U,
"BLOCK_KECCAK" to 11U,
"PRECOMPILE_ECRECOVER" to 0U,
"PRECOMPILE_SHA2" to 0U,
"PRECOMPILE_RIPEMD" to 0U,
"PRECOMPILE_IDENTITY" to 0U,
"PRECOMPILE_MODEXP" to 0U,
"PRECOMPILE_ECADD" to 0U,
"PRECOMPILE_ECMUL" to 0U,
"PRECOMPILE_ECPAIRING" to 0U,
"PRECOMPILE_BLAKE2F" to 0U
)
)
}
@Test
fun parse_string() {
val parsed = """
{"blockL1Size":1000,"KeccakCount":11,"L2L1logsCount":0,"TxCount":1,"PrecompileCalls":{"EcRecover":1,"Sha2":0,"RipeMD":0,"Identity":0,"ModExp":0,
"EcAdd":0,"EcMul":0,"EcPairing":0,"Blake2f":0}}
""".trimIndent()
val counted = counter.concreteCountTraces(parsed)
assertThat(counted.component1()?.blockL1Size).isEqualTo(1000U)
assertThat(counted.component1()?.tracesCounters?.entries()?.associate { e -> e.first.name to e.second }).isEqualTo(
mapOf(
"ADD" to 0U,
"BIN" to 0U,
"BIN_RT" to 0U,
"BLOCK_KECCAK" to 11U,
"BLOCK_L2L1LOGS" to 0U,
"BLOCK_TX" to 1U,
"EC_DATA" to 0U,
"EXT" to 0U,
"HUB" to 0U,
"INSTRUCTION_DECODER" to 0U,
"MMIO" to 0U,
"MMU" to 0U,
"MMU_ID" to 0U,
"MOD" to 0U,
"MUL" to 0U,
"MXP" to 0U,
"PHONEY_RLP" to 0U,
"PRECOMPILE_BLAKE2F" to 0U,
"PRECOMPILE_ECADD" to 0U,
"PRECOMPILE_ECMUL" to 0U,
"PRECOMPILE_ECPAIRING" to 0U,
"PRECOMPILE_ECRECOVER" to 1U,
"PRECOMPILE_IDENTITY" to 0U,
"PRECOMPILE_MODEXP" to 0U,
"PRECOMPILE_RIPEMD" to 0U,
"PRECOMPILE_SHA2" to 0U,
"PUB_HASH" to 0U,
"PUB_HASH_INFO" to 0U,
"PUB_LOG" to 0U,
"PUB_LOG_INFO" to 0U,
"RLP" to 0U,
"ROM" to 0U,
"SHF" to 0U,
"SHF_RT" to 0U,
"TX_RLP" to 0U,
"WCP" to 0U
)
)
}
}

View File

@@ -1,282 +0,0 @@
package net.consensys.linea.traces.app
import io.restassured.RestAssured
import io.restassured.builder.RequestSpecBuilder
import io.restassured.http.ContentType
import io.restassured.response.Response
import io.restassured.specification.RequestSpecification
import io.vertx.core.json.JsonObject
import net.consensys.linea.async.get
import net.consensys.linea.traces.TracingModuleV1
import net.consensys.linea.traces.app.api.ApiConfig
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance
import org.junit.jupiter.api.io.TempDir
import java.nio.file.Path
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class TracesApiFacadeAppTest {
private val apiConfig =
ApiConfig(port = 9394u, observabilityPort = 9395u, path = "/", numberOfVerticles = 1u)
private val rawTracesVersion = "0.0.1"
private val tracesApiVersion = "0.0.2"
private val blockL1Size = 1000001U
private lateinit var appConfig: AppConfig
private val requestSpecification: RequestSpecification =
RequestSpecBuilder()
// enable for debug only
// .addFilters(listOf(ResponseLoggingFilter(), RequestLoggingFilter()))
.setBaseUri("http://localhost:${apiConfig.port}/")
.build()
private lateinit var app: TracesApiFacadeApp
@BeforeEach
fun beforeEach(
@TempDir conflatedTracesDirectory: Path
) {
appConfig =
AppConfig(
inputTracesDirectory = "../../testdata/traces/raw/",
outputTracesDirectory = conflatedTracesDirectory.toString(),
tracesApiVersion = tracesApiVersion,
api = apiConfig,
tracesFileExtension = "json.gz"
)
app = TracesApiFacadeApp(appConfig)
app.start().get()
}
@AfterEach
fun afterEach() {
app.stop().get()
}
private fun assertTracesCountersResponse(
jsonRpcResponse: JsonObject,
jsonRpcRequestObject: JsonObject
) {
val jsonRpcRequest = JsonObject(jsonRpcRequestObject.toString())
assertThat(jsonRpcResponse.getValue("id")).isEqualTo(jsonRpcRequest.getValue("id"))
assertThat(jsonRpcResponse.getValue("error")).isNull()
jsonRpcResponse.getValue("result").let { result ->
assertThat(result).isNotNull
result as JsonObject
assertThat(result.getValue("tracesEngineVersion")).isEqualTo(tracesApiVersion)
assertThat(result.getString("blockNumber")).isEqualTo(
(
jsonRpcRequest.getJsonObject("params")
.getJsonObject("block")
).getString("blockNumber")
)
result.getValue("tracesCounters").let { tracesCounters ->
assertThat(tracesCounters).isNotNull
tracesCounters as JsonObject
for (traceModule in TracingModuleV1.entries) {
assertThat(tracesCounters.getValue(traceModule.name) as Int).isGreaterThanOrEqualTo(
0
)
}
}
assertThat(result.getString("blockL1Size")).isEqualTo(blockL1Size.toString())
}
}
private fun assertTracesConflationResponse(
jsonRpcResponse: JsonObject,
jsonRpcRequestObject: JsonObject
) {
val jsonRpcRequest = JsonObject(jsonRpcRequestObject.toString())
assertThat(jsonRpcResponse.getValue("id")).isEqualTo(jsonRpcRequest.getValue("id"))
assertThat(jsonRpcResponse.getValue("error")).isNull()
jsonRpcResponse.getValue("result").let { result ->
assertThat(result).isNotNull
result as JsonObject
assertThat(result.getValue("tracesEngineVersion")).isEqualTo(tracesApiVersion)
val blocks = jsonRpcRequest.getJsonObject("params").getJsonArray("blocks")
assertThat(result.getString("startBlockNumber")).isEqualTo(
(blocks.first() as JsonObject).getString("blockNumber")
)
assertThat(result.getString("endBlockNumber")).isEqualTo(
(blocks.last() as JsonObject).getString("blockNumber")
)
result.getValue("conflatedTracesFileName").let { fileName ->
assertThat(fileName).isNotNull
assertThat(fileName).isInstanceOf(String::class.java)
fileName as String
assertThat(fileName).matches(".*.json.gz")
assertThat(Path.of(appConfig.outputTracesDirectory, fileName)).isRegularFile()
}
}
}
@Test
fun tracesCountersV1_tracesExist() {
val jsonRpcRequest = buildRpcJsonWithNamedParams(
"rollup_getBlockTracesCountersV1",
mapOf(
"block" to mapOf(
"blockNumber" to "1",
"blockHash" to "0xab538e7ab831af9442aab00443ee9803907654359dfcdfe1755f1a98fb87eafd"
),
"rawExecutionTracesVersion" to rawTracesVersion,
"expectedTracesApiVersion" to tracesApiVersion
)
)
val response = makeJsonRpcRequest(jsonRpcRequest)
response.then().statusCode(200).contentType("application/json")
assertTracesCountersResponse(JsonObject(response.body.asString()), jsonRpcRequest)
}
@Test
fun tracesCountersV1_incompatibleVersionIsRejected() {
val incompatibleVersion = "1.0.2"
val jsonRpcRequest = buildRpcJsonWithNamedParams(
"rollup_getBlockTracesCountersV1",
mapOf(
"block" to mapOf(
"blockNumber" to "1",
"blockHash" to "0xab538e7ab831af9442aab00443ee9803907654359dfcdfe1755f1a98fb87eafd"
),
"rawExecutionTracesVersion" to rawTracesVersion,
"expectedTracesApiVersion" to incompatibleVersion
)
)
val response = makeJsonRpcRequest(jsonRpcRequest)
response.then().statusCode(200).contentType("application/json")
val jsonRpcResponse = JsonObject(response.body.asString())
assertThat(jsonRpcResponse.getValue("id")).isEqualTo(jsonRpcRequest.getValue("id"))
assertThat(jsonRpcResponse.getValue("result")).isNull()
jsonRpcResponse.getValue("error").let { error ->
assertThat(error).isNotNull
error as JsonObject
assertThat(error.getValue("code")).isEqualTo(-32602)
assertThat(error.getValue("message")).isEqualTo(
"Client requested version $incompatibleVersion is not compatible to server version $tracesApiVersion"
)
assertThat(error.getString("data")).isNull()
}
}
@Test
fun tracesCountersV1_FileNotFound() {
val jsonRpcRequest = buildRpcJsonWithNamedParams(
"rollup_getBlockTracesCountersV1",
mapOf(
"block" to mapOf(
"blockNumber" to "1",
"blockHash" to "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
),
"rawExecutionTracesVersion" to rawTracesVersion,
"expectedTracesApiVersion" to tracesApiVersion
)
)
val response = makeJsonRpcRequest(jsonRpcRequest)
response.then().statusCode(200).contentType("application/json")
val jsonRpcResponse = JsonObject(response.body.asString())
assertThat(jsonRpcResponse.getValue("id")).isEqualTo(jsonRpcRequest.getValue("id"))
assertThat(jsonRpcResponse.getValue("result")).isNull()
jsonRpcResponse.getValue("error").let { error ->
assertThat(error).isNotNull
error as JsonObject
assertThat(error.getValue("code")).isEqualTo(-4001)
assertThat(error.getValue("message")).isEqualTo("Traces not available")
assertThat(error.getString("data")).startsWith("Traces not available for block 1.")
}
}
@Test
fun generateConflatedTracesToFileV1_success() {
val jsonRpcRequest = buildRpcJsonWithNamedParams(
"rollup_generateConflatedTracesToFileV1",
mapOf(
"blocks" to listOf(
mapOf(
"blockNumber" to "1",
"blockHash" to "0xab538e7ab831af9442aab00443ee9803907654359dfcdfe1755f1a98fb87eafd"
),
mapOf(
"blockNumber" to "2",
"blockHash" to "0x0b68ebab401c813394f4c6139c743b6e5c72fe2da68c660c7a7616e2519a66a7"
),
mapOf(
"blockNumber" to "3",
"blockHash" to "0x833d27bb3b09544c2de8ddf7e4e1c95557ebafdba0a308d59ba016e793eac568"
)
),
"rawExecutionTracesVersion" to rawTracesVersion,
"expectedTracesApiVersion" to tracesApiVersion
)
)
val response = makeJsonRpcRequest(jsonRpcRequest)
response.then().statusCode(200).contentType("application/json")
val jsonRpcResponse = JsonObject(response.body.asString())
assertTracesConflationResponse(jsonRpcResponse, jsonRpcRequest)
}
@Test
fun generateConflatedTracesV1_FileNotFound() {
val jsonRpcRequest = buildRpcJsonWithNamedParams(
"rollup_generateConflatedTracesToFileV1",
mapOf(
"blocks" to listOf(
mapOf(
"blockNumber" to "1",
"blockHash" to "0xab538e7ab831af9442aab00443ee9803907654359dfcdfe1755f1a98fb87eafd"
),
mapOf(
"blockNumber" to "2",
"blockHash" to "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
),
mapOf(
"blockNumber" to "3",
"blockHash" to "0x833d27bb3b09544c2de8ddf7e4e1c95557ebafdba0a308d59ba016e793eac568"
)
),
"rawExecutionTracesVersion" to rawTracesVersion,
"expectedTracesApiVersion" to tracesApiVersion
)
)
val response = makeJsonRpcRequest(jsonRpcRequest)
response.then().statusCode(200).contentType("application/json")
val jsonRpcResponse = JsonObject(response.body.asString())
assertThat(jsonRpcResponse.getValue("id")).isEqualTo(jsonRpcRequest.getValue("id"))
assertThat(jsonRpcResponse.getValue("result")).isNull()
jsonRpcResponse.getValue("error").let { error ->
assertThat(error).isNotNull
error as JsonObject
assertThat(error.getValue("code")).isEqualTo(-4001)
assertThat(error.getValue("message")).isEqualTo("Traces not available")
assertThat(error.getString("data")).startsWith("Traces not available for block 2.")
}
}
private fun makeJsonRpcRequest(request: JsonObject): Response {
return RestAssured.given()
.spec(requestSpecification)
.accept(ContentType.JSON)
.body(request.toString())
.`when`()
.post("/")
}
private fun buildRpcJsonWithNamedParams(method: String, params: Map<String, Any?>): JsonObject {
return JsonObject()
.put("id", "1")
.put("jsonrpc", "2.0")
.put("method", method)
.put("params", params)
}
}

View File

@@ -1,30 +0,0 @@
package net.consensys.linea.traces.app.api
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import net.consensys.linea.jsonrpc.JsonRpcErrorResponse
import net.consensys.linea.jsonrpc.JsonRpcRequestMapParams
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Test
class RequestHandlersTest {
@Test
fun validateParams_rejectsEmptyMap() {
val request = JsonRpcRequestMapParams("", "", "", emptyMap<String, Any>())
Assertions.assertEquals(
Err(
JsonRpcErrorResponse.invalidParams(
request.id,
"Parameters map is empty!"
)
),
validateParams(request)
)
}
@Test
fun validateParams_acceptsNonEmptyMap() {
val request = JsonRpcRequestMapParams("", "", "", mapOf("key" to "value"))
Assertions.assertEquals(Ok(request), validateParams(request))
}
}

View File

@@ -1,90 +0,0 @@
package net.consensys.linea.traces.app.api
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import net.consensys.linea.jsonrpc.JsonRpcErrorResponse
import net.consensys.linea.jsonrpc.JsonRpcRequest
import net.consensys.linea.jsonrpc.JsonRpcRequestMapParams
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.Arguments
import org.junit.jupiter.params.provider.MethodSource
import java.util.stream.Stream
class TracesSemanticVersionValidatorTest {
private val validator = TracesSemanticVersionValidator(serverSemanticVersion)
private fun buildRequestWithVersion(version: TracesSemanticVersionValidator.SemanticVersion): JsonRpcRequest {
return JsonRpcRequestMapParams(
"",
1,
"",
mapOf(
"block" to null,
"rawExecutionTracesVersion" to null,
"expectedTracesApiVersion" to version
)
)
}
@Test
fun semanticVersion_isCreatedFromString_correctly() {
val parsedSemanticVersion =
TracesSemanticVersionValidator.SemanticVersion.fromString("1.2.3")
assertEquals(
TracesSemanticVersionValidator.SemanticVersion(1u, 2u, 3u),
parsedSemanticVersion
)
}
@ParameterizedTest
@MethodSource("negativeTests")
fun negativeTests(clientVersion: TracesSemanticVersionValidator.SemanticVersion) {
val request = buildRequestWithVersion(clientVersion)
assertEquals(
Err(
JsonRpcErrorResponse.invalidParams(
request.id,
"Client requested version $clientVersion is not compatible to server version $serverSemanticVersion"
)
),
validator.validateExpectedVersion(request.id, clientVersion.toString())
)
}
@ParameterizedTest
@MethodSource("positiveTests")
fun positiveTests(clientVersion: TracesSemanticVersionValidator.SemanticVersion) {
val request = buildRequestWithVersion(clientVersion)
assertEquals(
Ok(Unit),
validator.validateExpectedVersion(request.id, clientVersion.toString())
)
}
companion object {
private val serverSemanticVersion = TracesSemanticVersionValidator.SemanticVersion(2u, 3u, 4u)
@JvmStatic
private fun negativeTests(): Stream<Arguments> {
return Stream.of(
Arguments.of(serverSemanticVersion.copy(major = 1u)),
Arguments.of(serverSemanticVersion.copy(patch = 5u)),
Arguments.of(serverSemanticVersion.copy(minor = 4u)),
Arguments.of(serverSemanticVersion.copy(minor = 4u, patch = 7u)),
Arguments.of(serverSemanticVersion.copy(major = 3u))
)
}
@JvmStatic
private fun positiveTests(): Stream<Arguments> {
return Stream.of(
Arguments.of(serverSemanticVersion),
Arguments.of(serverSemanticVersion.copy(minor = 1u, patch = 0u)),
Arguments.of(serverSemanticVersion.copy(minor = 1u)),
Arguments.of(serverSemanticVersion.copy(minor = 2u, patch = 4u))
)
}
}
}

View File

@@ -1,33 +0,0 @@
package net.consensys.linea.traces.repository
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
import kotlin.io.path.Path
class FilesystemHelperTest {
private val tracesDirectory = Path("../../testdata/traces/raw")
lateinit var filesystemHelper: FilesystemHelper
@BeforeEach
fun beforeEach() {
filesystemHelper = FilesystemHelper()
}
@Test
fun readGzipedJsonFileAsString() {
val traces = filesystemHelper.readGzippedJsonFileAsString(
tracesDirectory.resolve("1-0xab538e7ab831af9442aab00443ee9803907654359dfcdfe1755f1a98fb87eafd.v0.0.1.json.gz")
)
assertThat(traces).isNotNull()
}
// @Test
// disabled: meant for local prototyping only with production files
fun readGzipedJsonFileAsString_canReadVeryLargeFile() {
val filePath = Path("../../tmp/local/traces/raw")
.resolve("2480859-0xeed3fd5ffcf442e9e7906d1d078ef6e607c1fc1aa015ebdb6cf2fff938d837a3.v0.2.0.json.gz")
val traces = filesystemHelper.readGzippedJsonFileAsString(filePath)
assertThat(traces).isNotNull()
}
}

View File

@@ -1,10 +0,0 @@
{
"metricsOptions": {
"enabled": true,
"jvmMetricsEnabled": true,
"prometheusOptions": {
"publishQuantiles": true,
"enabled": true
}
}
}

View File

@@ -1,16 +0,0 @@
plugins {
id 'net.consensys.zkevm.kotlin-library-conventions'
id 'idea'
id 'application'
}
dependencies {
implementation project(':traces-api-facade:core')
implementation "org.jetbrains.kotlin:kotlin-reflect:1.7.20"
implementation "com.fasterxml.jackson.core:jackson-annotations:${libs.versions.jackson.get()}"
implementation "com.fasterxml.jackson.core:jackson-databind:${libs.versions.jackson.get()}"
}
application {
mainClass = 'net.consensys.linea.traces.MainKt'
}

View File

@@ -1,202 +0,0 @@
package net.consensys.linea.traces
import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
class JsonParserHelper {
companion object {
private val log: Logger = LogManager.getLogger(this::class.java)
val objectMapper: ObjectMapper = ObjectMapper()
var mapOfValues: Map<String, TracingModuleV1> = mapOf(
"add" to TracingModuleV1.ADD,
"bin" to TracingModuleV1.BIN,
"binRT" to TracingModuleV1.BIN_RT,
"ext" to TracingModuleV1.EXT,
"ec_data" to TracingModuleV1.EC_DATA,
"hash_data" to TracingModuleV1.PUB_HASH,
"hash_info" to TracingModuleV1.PUB_HASH_INFO,
"hub" to TracingModuleV1.HUB,
"log_data" to TracingModuleV1.PUB_LOG,
"log_info" to TracingModuleV1.PUB_LOG_INFO,
"mmio" to TracingModuleV1.MMIO,
"mmu" to TracingModuleV1.MMU,
"mmuID" to TracingModuleV1.MMU_ID,
"mod" to TracingModuleV1.MOD,
"mul" to TracingModuleV1.MUL,
"mxp" to TracingModuleV1.MXP,
"phoneyRLP" to TracingModuleV1.PHONEY_RLP,
"rlp" to TracingModuleV1.RLP,
"rom" to TracingModuleV1.ROM,
"shf" to TracingModuleV1.SHF,
"shfRT" to TracingModuleV1.SHF_RT,
"txRlp" to TracingModuleV1.TX_RLP,
"wcp" to TracingModuleV1.WCP
)
fun from(key: String): TracingModuleV1? {
return mapOfValues.get(key)
}
fun findKey(json: String, pos: Int): String? {
// "Trace" is within a {} block, we need to find the name to which this block is associated: "name":{ ..., "Trace":....}
var curlyPosition = openingCurlyBracketPosition(pos, json)
if (curlyPosition == -1) {
// error
log.warn("Opening curly bracket not found at $pos")
return null
}
// now that we found the {, we look for what is between the previous pair of ", it's the name we look for.
return extractName(json, curlyPosition)
}
private fun extractName(json: String, curlyPosition: Int): String? {
val nameEnd = json.lastIndexOf('"', curlyPosition)
if (nameEnd == -1) {
log.warn("Name end not found at pos $curlyPosition")
return null
}
val nameStart = json.lastIndexOf('"', nameEnd - 1)
if (nameStart == -1) {
// error
log.warn("Name start not found at pos $nameEnd")
return null
}
return json.substring(nameStart + 1, nameEnd).trim()
}
private fun openingCurlyBracketPosition(pos: Int, json: String): Int {
var countOfCurlyBrackets = 0
var curlyPosition = -1
for (i in pos downTo 0) {
if (json.get(i).equals('{')) {
countOfCurlyBrackets += 1
if (countOfCurlyBrackets == 1) {
curlyPosition = i
break
}
} else if (json.get(i).equals('}')) {
countOfCurlyBrackets -= 1
}
}
return curlyPosition
}
fun simpleMatrix(json: String, start: Int, end: Int): Boolean {
for (i in start..end) {
if (json.get(i).equals('[') || json.get(i).equals('{')) {
return false
}
}
return true
}
fun countRow(json: String, pos: Int): Int {
// Trace represent a matrix by columns lie this: {"Trace":{"ACC_1":["0","0","0"... We need to find the first pair of [] and count how many items it contains.
val startArray = json.indexOf('[', pos + 1)
if (startArray < 0) {
log.warn("Start array not found at $pos")
return -1
}
val endArray = json.indexOf(']', startArray + 1)
if (endArray < 0) {
log.warn("End array not found at $pos")
return -1
}
if (!simpleMatrix(json, startArray + 1, endArray)) {
log.warn(
"We assume that traces are made of colum without nested objects," +
" but it was not the case for the trace ar $pos."
)
return -1
}
val size = json.substring(startArray + 1, endArray).filter { ch -> ch == ',' }.count()
if (size == 0 && json.substring(startArray + 1, endArray).trim().length == 0) {
// if it's an empty matrix: {"Trace":{"ACC_1":[]
return 0
} else {
return size + 1
}
}
fun getPrecomputedLimit(trace: String, name: String): Long {
// add quotes to make it more robust. Key are surrounded by quotes.
val position = trace.indexOf("\"$name\"")
if (position == -1) {
log.warn("Name $name not found.")
return -1L
}
val start = trace.indexOf(':', position + name.length)
var end = -1
for (i in (start + 1)..trace.length) {
if (trace.get(i).equals(',') || trace.get(i).equals('}')) {
end = i
break
}
}
if (end == -1) {
return -1
}
return trace.substring(start + 1, end).trim().toLongOrNull() ?: -1
}
fun getTracesPosition(trace: String): ArrayList<Int> {
var pos = 0
val positions: ArrayList<Int> = ArrayList()
while (pos >= 0 && pos <= trace.length) {
pos = trace.indexOf("\"Trace\"", pos + 1)
if (pos >= 0) {
positions.add(pos)
}
}
return positions
}
fun getPrecompiles(trace: String): Map<String, Any>? {
val preCompilePos = trace.indexOf("PrecompileCalls")
if (preCompilePos < 0) {
log.warn("precompile block not found.")
return null
} else {
val startPosition = preCompilePos - 1 // add -1 to get the "
val endPosition = getPositionOfMatchingClosingBracket(startPosition, trace)
if (endPosition == -1) {
log.warn("precompile closing block in json not found at $startPosition")
return null
}
val subString = "{" + trace.substring(startPosition, endPosition + 1) + "}"
@Suppress("UNCHECKED_CAST")
val jsonObject = objectMapper.readValue(subString, Map::class.java) as Map<String, Map<String, Any>>
return jsonObject.get("PrecompileCalls")
}
}
private fun getPositionOfMatchingClosingBracket(startPosition: Int, trace: String): Int {
var countOfClosingCurlyBrackets = 0
var curlyPosition = -1
for (i in startPosition..trace.length - 1) {
if (trace.get(i).equals('}')) {
countOfClosingCurlyBrackets += 1
if (countOfClosingCurlyBrackets == 0) {
curlyPosition = i
break
}
} else if (trace.get(i).equals('{')) {
countOfClosingCurlyBrackets -= 1
}
}
return curlyPosition
}
}
}

View File

@@ -1,49 +0,0 @@
package net.consensys.linea.traces
import com.github.michaelbull.result.expect
import io.vertx.core.json.JsonObject
import java.io.File
import java.io.FileWriter
import java.io.PrintWriter
import java.nio.charset.Charset
fun testCount(args: List<String>) {
val counter = RawJsonTracesCounter("0.1")
for (arg in args) {
println("reading $arg")
val r =
counter.concreteCountTraces(File(arg).bufferedReader().use { it.readText() })
println(r)
}
}
fun testConflate(args: List<String>) {
val conflator = RawJsonTracesConflator("0.1")
val r =
conflator
.conflateTraces(
args.map {
println("reading $it")
JsonObject(File(it).bufferedReader().use { it.readText() })
}
)
.expect { "Conflation failed" }
.result
try {
PrintWriter(FileWriter("conflated.json", Charset.defaultCharset())).use {
it.write(r.toString())
}
} catch (e: Exception) {
e.printStackTrace()
}
}
fun main(args: Array<String>) {
when (args[0]) {
"count" -> testCount(args.slice(1..args.size - 1))
"conflate" -> testConflate(args.slice(1..args.size - 1))
else -> println("unknown command $args[0]; expected `count` or `conflate`")
}
}

View File

@@ -1,141 +0,0 @@
package net.consensys.linea.traces
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.map
import net.consensys.linea.BlockCounters
import net.consensys.linea.ErrorType
import net.consensys.linea.TracesCounter
import net.consensys.linea.TracesError
import net.consensys.linea.VersionedResult
import net.consensys.linea.traces.JsonParserHelper.Companion.countRow
import net.consensys.linea.traces.JsonParserHelper.Companion.findKey
import net.consensys.linea.traces.JsonParserHelper.Companion.getPrecomputedLimit
import net.consensys.linea.traces.JsonParserHelper.Companion.getTracesPosition
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
class RawJsonTracesCounter(private val tracesVersion: String) :
TracesCounter {
private val log: Logger = LogManager.getLogger(this::class.java)
private val emptyCounters = TracingModuleV1.values().associateWith { 0.toUInt() }
fun concreteCountTraces(trace: String): Result<BlockCounters, TracesError> {
val counters = emptyCounters.toMutableMap()
val traceInfoPosition = getTracesPosition(trace)
for (pos: Int in traceInfoPosition) {
val key = findKey(trace, pos)
if (key == null) {
log.info("Key of Trace at $pos not found.")
continue
}
val count = countRow(trace, pos)
if (count < 0) {
return Err(TracesError(ErrorType.WRONG_JSON_CONTENT, "Invalid count at $pos."))
}
val module = JsonParserHelper.from(key)
if (module == null) {
log.warn("Unrecognized module {}", key)
} else {
counters[module] = count.toUInt() + getOffset(module)
}
}
// some constant limits
counters[TracingModuleV1.MMU_ID] = 0.toUInt()
counters[TracingModuleV1.SHF_RT] = 0.toUInt()
counters[TracingModuleV1.INSTRUCTION_DECODER] = 0.toUInt()
counters[TracingModuleV1.BIN_RT] = 0.toUInt()
// Block limits
val blockTx = getPrecomputedLimit(trace, "TxCount")
if (blockTx < 0) {
return Err(TracesError(ErrorType.WRONG_JSON_CONTENT, "Error while looking for blockTx: TxCount."))
} else {
counters[TracingModuleV1.BLOCK_TX] = blockTx.toUInt()
}
val l2L1logsCount = getPrecomputedLimit(trace, "L2L1logsCount")
if (l2L1logsCount < 0) {
return Err(TracesError(ErrorType.WRONG_JSON_CONTENT, "Error while looking for L2L1logsCount."))
} else {
counters[TracingModuleV1.BLOCK_L2L1LOGS] = l2L1logsCount.toUInt()
}
val keccakCount = getPrecomputedLimit(trace, "KeccakCount")
if (keccakCount < 0) {
return Err(TracesError(ErrorType.WRONG_JSON_CONTENT, "Error while looking for KeccakCount."))
} else {
counters[TracingModuleV1.BLOCK_KECCAK] = keccakCount.toUInt()
}
// Precompile limits
val precompiles = JsonParserHelper.getPrecompiles(trace)
if (precompiles == null) {
return Err(TracesError(ErrorType.WRONG_JSON_CONTENT, "Missing precompiles."))
} else {
counters[TracingModuleV1.PRECOMPILE_ECRECOVER] = ((precompiles.get("EcRecover") ?: 0) as Int).toUInt()
counters[TracingModuleV1.PRECOMPILE_SHA2] = ((precompiles.get("Sha2") ?: 0) as Int).toUInt()
counters[TracingModuleV1.PRECOMPILE_RIPEMD] = ((precompiles.get("RipeMD") ?: 0) as Int).toUInt()
counters[TracingModuleV1.PRECOMPILE_IDENTITY] = ((precompiles.get("Identity") ?: 0) as Int).toUInt()
counters[TracingModuleV1.PRECOMPILE_MODEXP] = ((precompiles.get("ModExp") ?: 0) as Int).toUInt()
counters[TracingModuleV1.PRECOMPILE_ECADD] = ((precompiles.get("EcAdd") ?: 0) as Int).toUInt()
counters[TracingModuleV1.PRECOMPILE_ECMUL] = ((precompiles.get("EcMul") ?: 0) as Int).toUInt()
counters[TracingModuleV1.PRECOMPILE_ECPAIRING] = ((precompiles.get("EcPairing") ?: 0) as Int).toUInt()
counters[TracingModuleV1.PRECOMPILE_BLAKE2F] = ((precompiles.get("Blake2f") ?: 0) as Int).toUInt()
}
val blockL1Size = getPrecomputedLimit(trace, "blockL1Size")
if (blockL1Size < 0) {
return Err(TracesError(ErrorType.WRONG_JSON_CONTENT, "Invalid blockL1Size $blockL1Size"))
}
return Ok(BlockCounters(TracesCountersV1(counters), blockL1Size.toUInt()))
}
private fun getOffset(module: TracingModuleV1): UInt {
return when (module) {
TracingModuleV1.ADD -> 2U
TracingModuleV1.BIN -> 16U
TracingModuleV1.BIN_RT -> 0U
TracingModuleV1.EC_DATA -> 12U
TracingModuleV1.EXT -> 8U
TracingModuleV1.HUB -> 2U
TracingModuleV1.INSTRUCTION_DECODER -> 0U
TracingModuleV1.MMIO -> 0U
TracingModuleV1.MMU -> 0U
TracingModuleV1.MMU_ID -> 0U
TracingModuleV1.MOD -> 8U
TracingModuleV1.MUL -> 9U
TracingModuleV1.MXP -> 4U
TracingModuleV1.PHONEY_RLP -> 0U
TracingModuleV1.PUB_HASH -> 0U
TracingModuleV1.PUB_HASH_INFO -> 0U
TracingModuleV1.PUB_LOG -> 0U
TracingModuleV1.PUB_LOG_INFO -> 0U
TracingModuleV1.RLP -> 8U
TracingModuleV1.ROM -> 2U
TracingModuleV1.SHF -> 16U
TracingModuleV1.SHF_RT -> 0U
TracingModuleV1.TX_RLP -> 0U
TracingModuleV1.WCP -> 16U
TracingModuleV1.BLOCK_TX -> 0U
TracingModuleV1.BLOCK_L2L1LOGS -> 0U
TracingModuleV1.BLOCK_KECCAK -> 0U
TracingModuleV1.PRECOMPILE_ECRECOVER -> 0U
TracingModuleV1.PRECOMPILE_SHA2 -> 0U
TracingModuleV1.PRECOMPILE_RIPEMD -> 0U
TracingModuleV1.PRECOMPILE_IDENTITY -> 0U
TracingModuleV1.PRECOMPILE_MODEXP -> 0U
TracingModuleV1.PRECOMPILE_ECADD -> 0U
TracingModuleV1.PRECOMPILE_ECMUL -> 0U
TracingModuleV1.PRECOMPILE_ECPAIRING -> 0U
TracingModuleV1.PRECOMPILE_BLAKE2F -> 0U
}
}
override fun countTraces(traces: String): Result<VersionedResult<BlockCounters>, TracesError> {
return concreteCountTraces(traces).map { VersionedResult(tracesVersion, it) }
}
}

View File

@@ -1,99 +0,0 @@
package net.consensys.linea.traces
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.map
import io.vertx.core.json.JsonObject
import net.consensys.linea.BlockCounters
import net.consensys.linea.ErrorType
import net.consensys.linea.TracesCounterV0
import net.consensys.linea.TracesError
import net.consensys.linea.VersionedResult
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
class RawJsonTracesCounterV0(private val tracesVersion: String) : TracesCounterV0 {
private val log: Logger = LogManager.getLogger(this::class.java)
private val emptyCounters = TracingModuleV1.values().associateWith { 0.toUInt() }
fun concreteCountTraces(trace: JsonObject): Result<BlockCounters, TracesError> {
val counters = emptyCounters.toMutableMap()
// Module limits
MODULES.forEach { moduleInfo ->
val (jsonPath, klass) = moduleInfo
trace.getTrace(jsonPath)?.let { traceJsonObject ->
if (!traceJsonObject.isEmpty) {
traceJsonObject.mapTo(klass)?.also {
when (it) {
is Add -> counters[TracingModuleV1.ADD] = it.ACC_1.size.toUInt() + 2U
is Bin -> counters[TracingModuleV1.BIN] = it.ACC_1.size.toUInt() + 16U
is Ext -> counters[TracingModuleV1.EXT] = it.ACC_A_0.size.toUInt() + 8U
is EcData -> counters[TracingModuleV1.EC_DATA] = it.ACC_DELTA.size.toUInt() + 12U
is HashData -> counters[TracingModuleV1.PUB_HASH] = it.INDEX.size.toUInt()
is HashInfo -> counters[TracingModuleV1.PUB_HASH_INFO] = it.HASH_HI.size.toUInt()
is Hub -> counters[TracingModuleV1.HUB] = it.ALPHA.size.toUInt() + 2U
is LogData -> counters[TracingModuleV1.PUB_LOG] = it.INDEX.size.toUInt()
is LogInfo -> counters[TracingModuleV1.PUB_LOG_INFO] = it.ADDR_HI.size.toUInt()
is Mmio -> counters[TracingModuleV1.MMIO] = it.ACC_1.size.toUInt() // TODO: get spilling
is Mmu -> counters[TracingModuleV1.MMU] = it.ACC_1.size.toUInt() // TODO: get spilling
is Mod -> counters[TracingModuleV1.MOD] = it.ACC_1_2.size.toUInt() + 8U
is Mul -> counters[TracingModuleV1.MUL] = it.ACC_A_0.size.toUInt() + 9U
is Mxp -> counters[TracingModuleV1.MXP] = it.ACC_1.size.toUInt() + 4U
is PhoneyRlp -> counters[TracingModuleV1.PHONEY_RLP] = it.INDEX.size.toUInt()
is Rlp -> counters[TracingModuleV1.RLP] = it.ADDR_HI.size.toUInt() + 8U
is Rom -> counters[TracingModuleV1.ROM] = it.PC.size.toUInt() + 2U
is Shf -> counters[TracingModuleV1.SHF] = it.ACC_1.size.toUInt() + 16U
is TxRlp -> counters[TracingModuleV1.TX_RLP] = it.ABS_TX_NUM.size.toUInt()
is Wcp -> counters[TracingModuleV1.WCP] = it.ACC_1.size.toUInt() + 16U
//
// These modules are constant-sized, so they do not matter to the blocks
// conflation counters
//
is MmuId -> counters[TracingModuleV1.MMU_ID] = 0.toUInt()
is ShfRt -> counters[TracingModuleV1.SHF_RT] = 0.toUInt()
is InstructionDecoder -> counters[TracingModuleV1.INSTRUCTION_DECODER] = 0.toUInt()
is BinRt -> counters[TracingModuleV1.BIN_RT] = 0.toUInt()
else -> log.warn("Unrecognized evm module {}", it::class)
}
}
}
}
?: run {
log.warn("Traces do not contain object with path: '{}'", jsonPath.joinToString("."))
}
}
// Block limits
counters[TracingModuleV1.BLOCK_TX] = trace.getLong("TxCount").toUInt()
counters[TracingModuleV1.BLOCK_L2L1LOGS] = trace.getLong("L2L1logsCount").toUInt()
counters[TracingModuleV1.BLOCK_KECCAK] = trace.getLong("KeccakCount").toUInt()
// Precompile limits
val precompiles = trace.getJsonObject("PrecompileCalls")
counters[TracingModuleV1.PRECOMPILE_ECRECOVER] = precompiles.getLong("EcRecover").toUInt()
counters[TracingModuleV1.PRECOMPILE_SHA2] = precompiles.getLong("Sha2").toUInt()
counters[TracingModuleV1.PRECOMPILE_RIPEMD] = precompiles.getLong("RipeMD").toUInt()
counters[TracingModuleV1.PRECOMPILE_IDENTITY] = precompiles.getLong("Identity").toUInt()
counters[TracingModuleV1.PRECOMPILE_MODEXP] = precompiles.getLong("ModExp").toUInt()
counters[TracingModuleV1.PRECOMPILE_ECADD] = precompiles.getLong("EcAdd").toUInt()
counters[TracingModuleV1.PRECOMPILE_ECMUL] = precompiles.getLong("EcMul").toUInt()
counters[TracingModuleV1.PRECOMPILE_ECPAIRING] = precompiles.getLong("EcPairing").toUInt()
counters[TracingModuleV1.PRECOMPILE_BLAKE2F] = precompiles.getLong("Blake2f").toUInt()
val blockL1Size = trace.getLong("blockL1Size")
if (blockL1Size == null || blockL1Size < 0) {
Err(TracesError(ErrorType.WRONG_JSON_CONTENT, "Invalid blockL1Size $blockL1Size"))
}
return Ok(BlockCounters(TracesCountersV1(counters), blockL1Size.toUInt()))
}
override fun countTraces(
traces: JsonObject
): Result<VersionedResult<BlockCounters>, TracesError> {
return concreteCountTraces(traces).map { VersionedResult(tracesVersion, it) }
}
}

View File

@@ -1,9 +0,0 @@
plugins {
id 'net.consensys.zkevm.kotlin-library-conventions'
}
dependencies {
api project(':jvm-libs:linea:core:domain-models')
api project(':jvm-libs:linea:core:traces')
api "io.vertx:vertx-core"
}

View File

@@ -1,12 +0,0 @@
package net.consensys.linea
/** For simplicity, placing all error codes into single enum */
enum class ErrorType {
INVALID_BLOCK_NUMBERS_RANGE,
TRACES_UNAVAILABLE,
TRACES_AMBIGUITY,
WRONG_JSON_FORMAT,
WRONG_JSON_CONTENT
}
data class TracesError(val errorType: ErrorType, val errorDetail: String)

View File

@@ -1,71 +0,0 @@
package net.consensys.linea
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.flatMap
import io.vertx.core.json.JsonObject
import linea.domain.BlockNumberAndHash
import linea.domain.CommonDomainFunctions.blockIntervalString
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import tech.pegasys.teku.infrastructure.async.SafeFuture
class TracesConflationServiceV1Impl(
private val repository: TracesRepositoryV1,
private val tracesConflator: TracesConflator,
private val conflatedTracesRepository: ConflatedTracesRepository,
private val tracesVersion: String
) : TracesConflationServiceV1 {
val log: Logger = LogManager.getLogger(this::class.java)
override fun getConflatedTraces(
blocks: List<BlockNumberAndHash>,
version: String
): SafeFuture<Result<VersionedResult<JsonObject>, TracesError>> {
val tracesIndexes = blocks.map {
TracesFileIndex(it, version)
}
return repository.getTraces(tracesIndexes)
.thenApply { result ->
result.flatMap { blocksTraces ->
tracesConflator.conflateTraces(blocksTraces.sortedBy { it.blockNumber }.map { it.traces })
}
}
}
override fun generateConflatedTracesToFile(
blocks: List<BlockNumberAndHash>,
version: String
): SafeFuture<Result<VersionedResult<String>, TracesError>> {
val blocksSorted = blocks.sortedBy { it.number }
// we check if we already have the conflation for the given blocks
return conflatedTracesRepository.findConflatedTraces(
blocksSorted.first().number,
blocksSorted.last().number,
tracesVersion
).thenCompose { conflatedTracesFileName: String? ->
if (conflatedTracesFileName != null) {
log.info(
"Reusing conflated traces for batch={} file={}",
blockIntervalString(blocksSorted.first().number, blocksSorted.last().number),
conflatedTracesFileName
)
SafeFuture.completedFuture(Ok(VersionedResult(tracesVersion, conflatedTracesFileName)))
} else {
getConflatedTraces(blocks, version).thenCompose { result ->
when (result) {
is Ok -> {
conflatedTracesRepository
.saveConflatedTraces(
TracesConflation(blocksSorted.first().number, blocksSorted.last().number, result.value)
)
.thenApply { Ok(VersionedResult(result.value.version, it)) }
}
is Err -> SafeFuture.completedFuture(Err(result.error))
}
}
}
}
}
}

View File

@@ -1,16 +0,0 @@
package net.consensys.linea
import com.github.michaelbull.result.Result
import io.vertx.core.json.JsonObject
fun interface TracesCounterV0 {
fun countTraces(traces: JsonObject): Result<VersionedResult<BlockCounters>, TracesError>
}
fun interface TracesCounter {
fun countTraces(traces: String): Result<VersionedResult<BlockCounters>, TracesError>
}
fun interface TracesConflator {
fun conflateTraces(traces: List<JsonObject>): Result<VersionedResult<JsonObject>, TracesError>
}

View File

@@ -1,77 +0,0 @@
package net.consensys.linea
import com.github.michaelbull.result.Result
import com.github.michaelbull.result.flatMap
import linea.domain.BlockNumberAndHash
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import tech.pegasys.teku.infrastructure.async.SafeFuture
class TracesCountingServiceWithOriginalJsonCounter(
private val repository: TracesRepositoryV1,
private val tracesCounterV0: TracesCounterV0
) : TracesCountingServiceV1 {
val log: Logger = LogManager.getLogger(this::class.java)
override fun getBlockTracesCounters(
block: BlockNumberAndHash,
version: String
): SafeFuture<Result<VersionedResult<BlockCounters>, TracesError>> {
val tracesFileIndex = TracesFileIndex(block, version)
return repository.getTraces(listOf(tracesFileIndex)).thenApply { result ->
result.flatMap { blocksTraces ->
tracesCounterV0.countTraces(blocksTraces[0].traces)
}
}
}
}
class TracesCountingServiceWithEfficientStringParserCounter(
private val repository: TracesRepositoryV1,
private val tracesCounter: TracesCounter
) : TracesCountingServiceV1 {
val log: Logger = LogManager.getLogger(this::class.java)
override fun getBlockTracesCounters(
block: BlockNumberAndHash,
version: String
): SafeFuture<Result<VersionedResult<BlockCounters>, TracesError>> {
val tracesFileIndex = TracesFileIndex(block, version)
return repository.getTracesAsString(tracesFileIndex).thenApply { result ->
result.flatMap { blockTraces -> tracesCounter.countTraces(blockTraces) }
}
}
}
class TracesCountingServiceWithRetry(
private val efficientCounter: TracesCountingServiceV1,
private val jsonOriginalCounter: TracesCountingServiceV1
) : TracesCountingServiceV1 {
val log: Logger = LogManager.getLogger(this::class.java)
constructor(
repository: TracesRepositoryV1,
tracesCounter: TracesCounter,
tracesCounterV0: TracesCounterV0
) : this(
efficientCounter = TracesCountingServiceWithEfficientStringParserCounter(repository, tracesCounter),
jsonOriginalCounter = TracesCountingServiceWithOriginalJsonCounter(repository, tracesCounterV0)
)
override fun getBlockTracesCounters(
block: BlockNumberAndHash,
version: String
): SafeFuture<Result<VersionedResult<BlockCounters>, TracesError>> {
return efficientCounter.getBlockTracesCounters(block, version)
.exceptionallyCompose { e ->
if (e.cause is OutOfMemoryError) {
// parsing the whole file as a string failed, try to parse it as a json object.
// Less performant but does not try to lead the whole file into a single String, limited to 2^31-1 bytes
jsonOriginalCounter.getBlockTracesCounters(block, version)
} else {
log.error("Error getting traces for block={} error={}", block.number, e.message, e)
SafeFuture.failedFuture(e)
}
}
}
}

View File

@@ -1,33 +0,0 @@
package net.consensys.linea
import com.github.michaelbull.result.Result
import io.vertx.core.json.JsonObject
import linea.domain.BlockNumberAndHash
import tech.pegasys.teku.infrastructure.async.SafeFuture
class BlockTraces(
val blockNumber: ULong,
// val blockHash: Bytes32,
val traces: JsonObject
)
data class TracesConflation(
val startBlockNumber: ULong,
val endBlockNumber: ULong,
val traces: VersionedResult<JsonObject>
)
data class TracesFileIndex(val blockIndex: BlockNumberAndHash, val version: String) {
val number get() = blockIndex.number
val hash get() = blockIndex.hash
}
interface TracesRepositoryV1 {
fun getTracesAsString(block: TracesFileIndex): SafeFuture<Result<String, TracesError>>
fun getTraces(blocks: List<TracesFileIndex>): SafeFuture<Result<List<BlockTraces>, TracesError>>
}
interface ConflatedTracesRepository {
fun findConflatedTraces(startBlockNumber: ULong, endBlockNumber: ULong, tracesVersion: String): SafeFuture<String?>
fun saveConflatedTraces(conflation: TracesConflation): SafeFuture<String>
}

View File

@@ -1,29 +0,0 @@
package net.consensys.linea
import com.github.michaelbull.result.Result
import io.vertx.core.json.JsonObject
import linea.domain.BlockNumberAndHash
import net.consensys.linea.traces.TracesCounters
import tech.pegasys.teku.infrastructure.async.SafeFuture
data class VersionedResult<T>(val version: String, val result: T)
data class BlockCounters(val tracesCounters: TracesCounters, val blockL1Size: UInt)
interface TracesCountingServiceV1 {
fun getBlockTracesCounters(
block: BlockNumberAndHash,
version: String
): SafeFuture<Result<VersionedResult<BlockCounters>, TracesError>>
}
interface TracesConflationServiceV1 {
fun getConflatedTraces(
blocks: List<BlockNumberAndHash>,
version: String
): SafeFuture<Result<VersionedResult<JsonObject>, TracesError>>
fun generateConflatedTracesToFile(
blocks: List<BlockNumberAndHash>,
version: String
): SafeFuture<Result<VersionedResult<String>, TracesError>>
}