mirror of
https://github.com/vacp2p/linea-monorepo.git
synced 2026-01-09 20:27:58 -05:00
feat: add metrics in GlobalBlobAwareConflationCalculator and move bri… (#654)
* feat: add metrics in GlobalBlobAwareConflationCalculator and move bridge logs and state root hash retrieval from execution prover client to coordinator * fix: unit test * feat: testing for docker cache pipeline * feat: disabled docker cache * feat: move encoder back to file-based prover client * fix: compare with hex string instead of ulong for bridgelog block number * feat: add test case for ExecutionProofRequestDtoMapper * feat: revise ExecutionProofRequestDtoMapper test case * feat: revise metric names and add testing for the added metrics * feat: added metricscheck * feat: handle zero transactions in empty blocks for avg-tx-data-size histogram metrics * feat: resolve conflicts from latest main and fix warning on missing env vars in docker compose * fix: unit tests * feat: revise unit tests with fake implementation of histogram to reduce the use of mock * feat: resuse filtered block counters
This commit is contained in:
7
.github/workflows/cache-docker-images.yml
vendored
7
.github/workflows/cache-docker-images.yml
vendored
@@ -39,9 +39,7 @@ jobs:
|
||||
list-files: "json"
|
||||
filters: |
|
||||
cache_images:
|
||||
- 'docker/compose.yml'
|
||||
- 'docker/compose-local-dev.overrides.yml'
|
||||
- 'docker/compose-local-dev-traces-v2.overrides.yml'
|
||||
- 'docker/compose-*.yml'
|
||||
|
||||
pull-and-cache-images:
|
||||
needs: [ check-dockerhub-secrets-present, changes ]
|
||||
@@ -73,7 +71,8 @@ jobs:
|
||||
- name: Pull docker images
|
||||
if: steps.cache-common-docker-images.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
docker compose -f docker/compose.yml -f docker/compose-local-dev-traces-v2.overrides.yml --profile l1 --profile l2 pull
|
||||
COMPOSE_PROFILES=l1,l2 docker compose -f docker/compose-tracing-v1-ci-extension.yml pull && \
|
||||
COMPOSE_PROFILES=l1,l2 docker compose -f docker/compose-tracing-v2-ci-extension.yml pull
|
||||
- name: Cache docker images
|
||||
if: steps.cache-common-docker-images.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
|
||||
@@ -399,6 +399,7 @@ class L1DependentApp(
|
||||
GlobalBlobAwareConflationCalculator(
|
||||
conflationCalculator = globalCalculator,
|
||||
blobCalculator = compressedBlobCalculator,
|
||||
metricsFacade = metricsFacade,
|
||||
batchesLimit = batchesLimit
|
||||
)
|
||||
}
|
||||
@@ -822,15 +823,15 @@ class L1DependentApp(
|
||||
)
|
||||
val executionProverClient: ExecutionProverClientV2 = proverClientFactory.executionProverClient(
|
||||
tracesVersion = configs.traces.rawExecutionTracesVersion,
|
||||
stateManagerVersion = configs.stateManager.version,
|
||||
l2MessageServiceLogsClient = l2MessageServiceLogsClient,
|
||||
l2Web3jClient = l2Web3jClient
|
||||
stateManagerVersion = configs.stateManager.version
|
||||
)
|
||||
|
||||
val proofGeneratingConflationHandlerImpl = ProofGeneratingConflationHandlerImpl(
|
||||
tracesProductionCoordinator = TracesConflationCoordinatorImpl(tracesConflationClient, zkStateClient),
|
||||
zkProofProductionCoordinator = ZkProofCreationCoordinatorImpl(
|
||||
executionProverClient = executionProverClient
|
||||
executionProverClient = executionProverClient,
|
||||
l2MessageServiceLogsClient = l2MessageServiceLogsClient,
|
||||
l2Web3jClient = l2Web3jClient
|
||||
),
|
||||
batchProofHandler = batchProofHandler,
|
||||
vertx = vertx,
|
||||
|
||||
@@ -23,6 +23,7 @@ dependencies {
|
||||
implementation "com.fasterxml.jackson.module:jackson-module-kotlin:${libs.versions.jackson.get()}"
|
||||
implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${libs.versions.jackson.get()}")
|
||||
|
||||
testImplementation testFixtures(project(':coordinator:core'))
|
||||
testImplementation testFixtures(project(':jvm-libs:linea:core:domain-models'))
|
||||
testImplementation "io.vertx:vertx-junit5"
|
||||
}
|
||||
|
||||
@@ -5,12 +5,10 @@ import com.fasterxml.jackson.databind.node.ArrayNode
|
||||
import io.vertx.core.Vertx
|
||||
import linea.encoding.BlockRLPEncoder
|
||||
import linea.kotlin.encodeHex
|
||||
import linea.kotlin.toBigInteger
|
||||
import net.consensys.linea.async.toSafeFuture
|
||||
import linea.kotlin.toHexString
|
||||
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofRequestV1
|
||||
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofResponse
|
||||
import net.consensys.zkevm.coordinator.clients.ExecutionProverClientV2
|
||||
import net.consensys.zkevm.coordinator.clients.L2MessageServiceLogsClient
|
||||
import net.consensys.zkevm.coordinator.clients.prover.serialization.JsonSerialization
|
||||
import net.consensys.zkevm.domain.ProofIndex
|
||||
import net.consensys.zkevm.domain.RlpBridgeLogsData
|
||||
@@ -18,8 +16,6 @@ import net.consensys.zkevm.encoding.BlockEncoder
|
||||
import net.consensys.zkevm.fileio.FileReader
|
||||
import net.consensys.zkevm.fileio.FileWriter
|
||||
import org.apache.logging.log4j.LogManager
|
||||
import org.web3j.protocol.Web3j
|
||||
import org.web3j.protocol.core.DefaultBlockParameter
|
||||
import tech.pegasys.teku.infrastructure.async.SafeFuture
|
||||
import java.nio.file.Path
|
||||
|
||||
@@ -33,45 +29,29 @@ data class BatchExecutionProofRequestDto(
|
||||
val blocksData: List<RlpBridgeLogsData>
|
||||
)
|
||||
|
||||
internal class ExecutionProofRequestDataDecorator(
|
||||
private val l2MessageServiceLogsClient: L2MessageServiceLogsClient,
|
||||
private val l2Web3jClient: Web3j,
|
||||
internal class ExecutionProofRequestDtoMapper(
|
||||
private val encoder: BlockEncoder = BlockRLPEncoder
|
||||
) : (BatchExecutionProofRequestV1) -> SafeFuture<BatchExecutionProofRequestDto> {
|
||||
private fun getBlockStateRootHash(blockNumber: ULong): SafeFuture<String> {
|
||||
return l2Web3jClient
|
||||
.ethGetBlockByNumber(
|
||||
DefaultBlockParameter.valueOf(blockNumber.toBigInteger()),
|
||||
false
|
||||
)
|
||||
.sendAsync()
|
||||
.thenApply { block -> block.block.stateRoot }
|
||||
.toSafeFuture()
|
||||
}
|
||||
|
||||
override fun invoke(request: BatchExecutionProofRequestV1): SafeFuture<BatchExecutionProofRequestDto> {
|
||||
val bridgeLogsSfList = request.blocks.map { block ->
|
||||
l2MessageServiceLogsClient.getBridgeLogs(blockNumber = block.number.toLong())
|
||||
.thenApply { block to it }
|
||||
val blocksData = request.blocks.map { block ->
|
||||
val rlp = encoder.encode(block).encodeHex()
|
||||
val bridgeLogs = request.bridgeLogs.filter {
|
||||
it.blockNumber == block.number.toHexString()
|
||||
}
|
||||
RlpBridgeLogsData(rlp, bridgeLogs)
|
||||
}
|
||||
|
||||
return SafeFuture.collectAll(bridgeLogsSfList.stream())
|
||||
.thenCombine(
|
||||
getBlockStateRootHash(request.blocks.first().number - 1UL)
|
||||
) { blocksAndBridgeLogs, previousKeccakStateRootHash ->
|
||||
BatchExecutionProofRequestDto(
|
||||
zkParentStateRootHash = request.type2StateData.zkParentStateRootHash.encodeHex(),
|
||||
keccakParentStateRootHash = previousKeccakStateRootHash,
|
||||
conflatedExecutionTracesFile = request.tracesResponse.tracesFileName,
|
||||
tracesEngineVersion = request.tracesResponse.tracesEngineVersion,
|
||||
type2StateManagerVersion = request.type2StateData.zkStateManagerVersion,
|
||||
zkStateMerkleProof = request.type2StateData.zkStateMerkleProof,
|
||||
blocksData = blocksAndBridgeLogs.map { (block, bridgeLogs) ->
|
||||
val rlp = encoder.encode(block).encodeHex()
|
||||
RlpBridgeLogsData(rlp, bridgeLogs)
|
||||
}
|
||||
)
|
||||
}
|
||||
return SafeFuture.completedFuture(
|
||||
BatchExecutionProofRequestDto(
|
||||
zkParentStateRootHash = request.type2StateData.zkParentStateRootHash.encodeHex(),
|
||||
keccakParentStateRootHash = request.keccakParentStateRootHash.encodeHex(),
|
||||
conflatedExecutionTracesFile = request.tracesResponse.tracesFileName,
|
||||
tracesEngineVersion = request.tracesResponse.tracesEngineVersion,
|
||||
type2StateManagerVersion = request.type2StateData.zkStateManagerVersion,
|
||||
zkStateMerkleProof = request.type2StateData.zkStateMerkleProof,
|
||||
blocksData = blocksData
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,9 +70,7 @@ class FileBasedExecutionProverClientV2(
|
||||
config: FileBasedProverConfig,
|
||||
private val tracesVersion: String,
|
||||
private val stateManagerVersion: String,
|
||||
l2MessageServiceLogsClient: L2MessageServiceLogsClient,
|
||||
vertx: Vertx,
|
||||
l2Web3jClient: Web3j,
|
||||
jsonObjectMapper: ObjectMapper = JsonSerialization.proofResponseMapperV1,
|
||||
executionProofRequestFileNameProvider: ProverFileNameProvider =
|
||||
ExecutionProofRequestFileNameProvider(
|
||||
@@ -114,7 +92,7 @@ class FileBasedExecutionProverClientV2(
|
||||
fileReader = FileReader(vertx, jsonObjectMapper, Any::class.java),
|
||||
requestFileNameProvider = executionProofRequestFileNameProvider,
|
||||
responseFileNameProvider = executionProofResponseFileNameProvider,
|
||||
requestMapper = ExecutionProofRequestDataDecorator(l2MessageServiceLogsClient, l2Web3jClient),
|
||||
requestMapper = ExecutionProofRequestDtoMapper(),
|
||||
responseMapper = { throw UnsupportedOperationException("Batch execution proof response shall not be parsed!") },
|
||||
proofTypeLabel = "batch",
|
||||
log = LogManager.getLogger(FileBasedExecutionProverClientV2::class.java)
|
||||
|
||||
@@ -2,7 +2,6 @@ package net.consensys.zkevm.coordinator.clients.prover
|
||||
|
||||
import io.vertx.core.Vertx
|
||||
import linea.domain.BlockInterval
|
||||
import net.consensys.linea.contract.Web3JL2MessageServiceLogsClient
|
||||
import net.consensys.linea.metrics.LineaMetricsCategory
|
||||
import net.consensys.linea.metrics.MetricsFacade
|
||||
import net.consensys.linea.metrics.micrometer.GaugeAggregator
|
||||
@@ -10,7 +9,6 @@ import net.consensys.zkevm.coordinator.clients.BlobCompressionProverClientV2
|
||||
import net.consensys.zkevm.coordinator.clients.ExecutionProverClientV2
|
||||
import net.consensys.zkevm.coordinator.clients.ProofAggregationProverClientV2
|
||||
import net.consensys.zkevm.coordinator.clients.ProverClient
|
||||
import org.web3j.protocol.Web3j
|
||||
|
||||
class ProverClientFactory(
|
||||
private val vertx: Vertx,
|
||||
@@ -45,9 +43,7 @@ class ProverClientFactory(
|
||||
|
||||
fun executionProverClient(
|
||||
tracesVersion: String,
|
||||
stateManagerVersion: String,
|
||||
l2MessageServiceLogsClient: Web3JL2MessageServiceLogsClient,
|
||||
l2Web3jClient: Web3j
|
||||
stateManagerVersion: String
|
||||
): ExecutionProverClientV2 {
|
||||
return createClient(
|
||||
proverAConfig = config.proverA.execution,
|
||||
@@ -58,9 +54,7 @@ class ProverClientFactory(
|
||||
config = proverConfig,
|
||||
vertx = vertx,
|
||||
tracesVersion = tracesVersion,
|
||||
stateManagerVersion = stateManagerVersion,
|
||||
l2MessageServiceLogsClient = l2MessageServiceLogsClient,
|
||||
l2Web3jClient = l2Web3jClient
|
||||
stateManagerVersion = stateManagerVersion
|
||||
).also { executionWaitingResponsesMetric.addReporter(it) }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
package net.consensys.zkevm.coordinator.clients.prover
|
||||
|
||||
import net.consensys.linea.contract.parseBridgeLogsData
|
||||
import org.web3j.protocol.core.methods.response.EthLog
|
||||
|
||||
object CommonTestData {
|
||||
val validTransactionRlp = this::class.java.getResource("/valid-transaction.rlp")!!.readText().trim()
|
||||
val ethLogs: List<EthLog.LogObject> =
|
||||
listOf(
|
||||
EthLog.LogObject(
|
||||
false,
|
||||
"0x64",
|
||||
"0x3c",
|
||||
"0xc2375a8e64bf9c3f4c2b1700735f6042a37e44d0c3df8c1735daeaa684197874",
|
||||
"0xb042425dedd752d905e97706ff4778d92fa6e3971017392380d8bef0153d0603",
|
||||
"0xb663a",
|
||||
"0x91ba8a14d2cc851abb69212c09f59e06e1e7f0a5",
|
||||
"0x0000000000000000000000000000000000000000000000008ac7230489" +
|
||||
"e800000000000000000000000000007ef911f8ef130f73d166468c0068753932357b17",
|
||||
null,
|
||||
listOf(
|
||||
"0x1cedfb451d4da3a63f72a945ba92f51b8fd558a5be4652404d037bb1578ff582",
|
||||
"0x0000000000000000000000000000000000000000000000000000000000040648"
|
||||
)
|
||||
),
|
||||
EthLog.LogObject(
|
||||
false,
|
||||
"0x177",
|
||||
"0x8d",
|
||||
"0x4796d8934c88a1faa97aa6cf45a22aff130a51d0b8aeb0fbf5d7adb9985ea298",
|
||||
"0xb042425dedd752d905e97706ff4778d92fa6e3971017392380d8bef0153d0603",
|
||||
"0xb663a",
|
||||
"0x91ba8a14d2cc851abb69212c09f59e06e1e7f0a5",
|
||||
"0x",
|
||||
null,
|
||||
listOf(
|
||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x000000000000000000000000f0bed1cff809d9314809a6905ad3d6efd7a062e7",
|
||||
"0x0000000000000000000000000000000000000000000000000000000000070341"
|
||||
)
|
||||
)
|
||||
)
|
||||
val bridgeLogs = ethLogs.map { ethLog -> parseBridgeLogsData(ethLog) }
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
package net.consensys.zkevm.coordinator.clients.prover
|
||||
|
||||
import build.linea.clients.GetZkEVMStateMerkleProofResponse
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode
|
||||
import linea.domain.Block
|
||||
import linea.domain.createBlock
|
||||
import linea.kotlin.ByteArrayExt
|
||||
import linea.kotlin.encodeHex
|
||||
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofRequestV1
|
||||
import net.consensys.zkevm.coordinator.clients.GenerateTracesResponse
|
||||
import net.consensys.zkevm.domain.CommonTestData
|
||||
import net.consensys.zkevm.domain.RlpBridgeLogsData
|
||||
import net.consensys.zkevm.encoding.BlockEncoder
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.mockito.kotlin.spy
|
||||
import kotlin.random.Random
|
||||
|
||||
class ExecutionProofRequestDtoMapperTest {
|
||||
|
||||
private lateinit var encoder: BlockEncoder
|
||||
private lateinit var requestDtoMapper: ExecutionProofRequestDtoMapper
|
||||
private val fakeEncoder: BlockEncoder = object : BlockEncoder {
|
||||
override fun encode(block: Block): ByteArray {
|
||||
return block.number.toString().toByteArray()
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
fun beforeEach() {
|
||||
encoder = spy(fakeEncoder)
|
||||
requestDtoMapper = ExecutionProofRequestDtoMapper(encoder)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `should return request dto with correct rlp and bridge logs`() {
|
||||
val block1 = createBlock(number = 747066UL)
|
||||
val block2 = createBlock(number = 747067UL)
|
||||
val block3 = createBlock(number = 747068UL)
|
||||
val type2StateResponse = GetZkEVMStateMerkleProofResponse(
|
||||
zkStateMerkleProof = ArrayNode(null),
|
||||
zkParentStateRootHash = ByteArrayExt.random32(),
|
||||
zkEndStateRootHash = ByteArrayExt.random32(),
|
||||
zkStateManagerVersion = "2.0.0"
|
||||
)
|
||||
val generateTracesResponse = GenerateTracesResponse(
|
||||
tracesFileName = "747066-747068-conflated-traces.json",
|
||||
tracesEngineVersion = "1.0.0"
|
||||
)
|
||||
val stateRoot = Random.nextBytes(32)
|
||||
val request = BatchExecutionProofRequestV1(
|
||||
blocks = listOf(block1, block2, block3),
|
||||
bridgeLogs = CommonTestData.bridgeLogs,
|
||||
tracesResponse = generateTracesResponse,
|
||||
type2StateData = type2StateResponse,
|
||||
keccakParentStateRootHash = stateRoot
|
||||
)
|
||||
|
||||
val requestDto = requestDtoMapper.invoke(request).get()
|
||||
|
||||
assertThat(requestDto.keccakParentStateRootHash).isEqualTo(stateRoot.encodeHex())
|
||||
assertThat(requestDto.zkParentStateRootHash).isEqualTo(type2StateResponse.zkParentStateRootHash.encodeHex())
|
||||
assertThat(requestDto.conflatedExecutionTracesFile).isEqualTo("747066-747068-conflated-traces.json")
|
||||
assertThat(requestDto.tracesEngineVersion).isEqualTo("1.0.0")
|
||||
assertThat(requestDto.type2StateManagerVersion).isEqualTo("2.0.0")
|
||||
assertThat(requestDto.zkStateMerkleProof).isEqualTo(type2StateResponse.zkStateMerkleProof)
|
||||
assertThat(requestDto.blocksData).hasSize(3)
|
||||
assertThat(requestDto.blocksData[0]).isEqualTo(
|
||||
RlpBridgeLogsData(
|
||||
rlp = "747066".toByteArray().encodeHex(),
|
||||
bridgeLogs = listOf(CommonTestData.bridgeLogs[0], CommonTestData.bridgeLogs[1])
|
||||
)
|
||||
)
|
||||
assertThat(requestDto.blocksData[1]).isEqualTo(
|
||||
RlpBridgeLogsData(
|
||||
rlp = "747067".toByteArray().encodeHex(),
|
||||
bridgeLogs = emptyList()
|
||||
)
|
||||
)
|
||||
assertThat(requestDto.blocksData[2]).isEqualTo(
|
||||
RlpBridgeLogsData(
|
||||
rlp = "747068".toByteArray().encodeHex(),
|
||||
bridgeLogs = listOf(CommonTestData.bridgeLogs[2])
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -12,12 +12,12 @@ dependencies {
|
||||
api project(':jvm-libs:linea:clients:interfaces')
|
||||
api project(':jvm-libs:linea:clients:linea-state-manager')
|
||||
api project(':jvm-libs:linea:core:traces')
|
||||
api project(':jvm-libs:linea:web3j-extensions')
|
||||
api project(':jvm-libs:generic:errors')
|
||||
api project(':jvm-libs:generic:extensions:kotlin')
|
||||
api project(':jvm-libs:generic:extensions:futures')
|
||||
api "tech.pegasys.teku.internal:unsigned:${libs.versions.teku.get()}"
|
||||
api "org.jetbrains.kotlinx:kotlinx-datetime:${libs.versions.kotlinxDatetime.get()}"
|
||||
implementation project(":jvm-libs:linea:metrics:micrometer")
|
||||
implementation "io.vertx:vertx-core"
|
||||
// jackson shall never be used in the core module
|
||||
// however, it is used already :( but was as transitive through Teku Execution Client
|
||||
@@ -34,7 +34,7 @@ dependencies {
|
||||
testFixturesImplementation("org.web3j:core:${libs.versions.web3j.get()}") {
|
||||
exclude group: 'org.slf4j', module: 'slf4j-nop'
|
||||
}
|
||||
testImplementation project(':jvm-libs:linea:metrics:micrometer')
|
||||
testFixturesImplementation project(':jvm-libs:linea:metrics:micrometer')
|
||||
testImplementation(testFixtures(project(':jvm-libs:linea:core:traces')))
|
||||
testImplementation(testFixtures(project(':jvm-libs:generic:extensions:kotlin')))
|
||||
testImplementation("io.vertx:vertx-junit5")
|
||||
|
||||
@@ -3,11 +3,14 @@ package net.consensys.zkevm.coordinator.clients
|
||||
import build.linea.clients.GetZkEVMStateMerkleProofResponse
|
||||
import linea.domain.Block
|
||||
import linea.domain.BlockInterval
|
||||
import net.consensys.zkevm.domain.BridgeLogsData
|
||||
|
||||
data class BatchExecutionProofRequestV1(
|
||||
val blocks: List<Block>,
|
||||
val bridgeLogs: List<BridgeLogsData>,
|
||||
val tracesResponse: GenerateTracesResponse,
|
||||
val type2StateData: GetZkEVMStateMerkleProofResponse
|
||||
val type2StateData: GetZkEVMStateMerkleProofResponse,
|
||||
val keccakParentStateRootHash: ByteArray
|
||||
) : BlockInterval {
|
||||
override val startBlockNumber: ULong
|
||||
get() = blocks.first().number
|
||||
|
||||
@@ -72,8 +72,19 @@ data class BlockCounters(
|
||||
val blockNumber: ULong,
|
||||
val blockTimestamp: Instant,
|
||||
val tracesCounters: TracesCounters,
|
||||
val blockRLPEncoded: ByteArray
|
||||
val blockRLPEncoded: ByteArray,
|
||||
val numOfTransactions: UInt = 0u,
|
||||
val gasUsed: ULong = 0uL
|
||||
) {
|
||||
|
||||
override fun toString(): String {
|
||||
return "BlockCounters(blockNumber=$blockNumber, " +
|
||||
"blockTimestamp=$blockTimestamp, " +
|
||||
"tracesCounters=$tracesCounters, " +
|
||||
"blockRLPEncoded=${blockRLPEncoded.size}bytes), " +
|
||||
"numOfTransactions=$numOfTransactions"
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) return true
|
||||
if (javaClass != other?.javaClass) return false
|
||||
@@ -84,8 +95,8 @@ data class BlockCounters(
|
||||
if (blockTimestamp != other.blockTimestamp) return false
|
||||
if (tracesCounters != other.tracesCounters) return false
|
||||
if (!blockRLPEncoded.contentEquals(other.blockRLPEncoded)) return false
|
||||
|
||||
return true
|
||||
if (numOfTransactions != other.numOfTransactions) return false
|
||||
return gasUsed == other.gasUsed
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
@@ -93,13 +104,8 @@ data class BlockCounters(
|
||||
result = 31 * result + blockTimestamp.hashCode()
|
||||
result = 31 * result + tracesCounters.hashCode()
|
||||
result = 31 * result + blockRLPEncoded.contentHashCode()
|
||||
result = 31 * result + numOfTransactions.hashCode()
|
||||
result = 31 * result + gasUsed.hashCode()
|
||||
return result
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "BlockCounters(blockNumber=$blockNumber, " +
|
||||
"blockTimestamp=$blockTimestamp, " +
|
||||
"tracesCounters=$tracesCounters, " +
|
||||
"blockRLPEncoded=${blockRLPEncoded.size}bytes)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,7 +54,9 @@ class BlockToBatchSubmissionCoordinator(
|
||||
blockNumber = blockEvent.block.number,
|
||||
blockTimestamp = Instant.fromEpochSeconds(blockEvent.block.timestamp.toLong()),
|
||||
tracesCounters = traces.tracesCounters,
|
||||
blockRLPEncoded = blockRLPEncoded
|
||||
blockRLPEncoded = blockRLPEncoded,
|
||||
numOfTransactions = blockEvent.block.transactions.size.toUInt(),
|
||||
gasUsed = blockEvent.block.gasUsed
|
||||
)
|
||||
)
|
||||
}.whenException { th ->
|
||||
|
||||
@@ -42,10 +42,8 @@ class ConflationCalculatorByDataCompressed(
|
||||
throw IllegalStateException("Trying to append unvalidated block. Please call checkOverflow first.")
|
||||
}
|
||||
val appendResult = blobCompressor.appendBlock(blockCounters.blockRLPEncoded)
|
||||
val compressionRatio = 1.0 - (
|
||||
(appendResult.compressedSizeAfter - appendResult.compressedSizeBefore).toDouble() /
|
||||
blockCounters.blockRLPEncoded.size
|
||||
)
|
||||
val compressedDataSize = appendResult.compressedSizeAfter - appendResult.compressedSizeBefore
|
||||
val compressionRatio = 1.0 - compressedDataSize.toDouble().div(blockCounters.blockRLPEncoded.size)
|
||||
log.debug(
|
||||
"compression result: blockNumber={} blockRlpSize={} blobSizeBefore={} " +
|
||||
"blobSizeAfter={} blockCompressionRatio={}",
|
||||
@@ -82,6 +80,10 @@ class ConflationCalculatorByDataCompressed(
|
||||
blobCompressor.startNewBatch()
|
||||
}
|
||||
|
||||
fun getCompressedDataSizeInCurrentBatch(): UInt {
|
||||
return dataSize - dataSizeUpToLastBatch
|
||||
}
|
||||
|
||||
fun getCompressedData(): ByteArray {
|
||||
val data = blobCompressor.getCompressedData()
|
||||
dataDrained = true
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package net.consensys.zkevm.ethereum.coordination.conflation
|
||||
|
||||
import linea.domain.CommonDomainFunctions.blockIntervalString
|
||||
import linea.domain.BlockInterval
|
||||
import linea.domain.toBlockIntervalsString
|
||||
import net.consensys.linea.metrics.LineaMetricsCategory
|
||||
import net.consensys.linea.metrics.MetricsFacade
|
||||
import net.consensys.zkevm.domain.Blob
|
||||
import net.consensys.zkevm.domain.BlockCounters
|
||||
import net.consensys.zkevm.domain.ConflationCalculationResult
|
||||
@@ -22,6 +24,7 @@ class GlobalBlobAwareConflationCalculator(
|
||||
private val conflationCalculator: GlobalBlockConflationCalculator,
|
||||
private val blobCalculator: ConflationCalculatorByDataCompressed,
|
||||
private val batchesLimit: UInt,
|
||||
private val metricsFacade: MetricsFacade,
|
||||
private val log: Logger = LogManager.getLogger(GlobalBlobAwareConflationCalculator::class.java)
|
||||
) : TracesConflationCalculator {
|
||||
private var conflationHandler: (ConflationCalculationResult) -> SafeFuture<*> = NOOP_CONSUMER
|
||||
@@ -32,10 +35,97 @@ class GlobalBlobAwareConflationCalculator(
|
||||
override val lastBlockNumber: ULong
|
||||
get() = conflationCalculator.lastBlockNumber
|
||||
|
||||
private val gasUsedInBlobHistogram = metricsFacade.createHistogram(
|
||||
category = LineaMetricsCategory.BLOB,
|
||||
name = "gas",
|
||||
description = "Total gas in each blob"
|
||||
)
|
||||
private val compressedDataSizeInBlobHistogram = metricsFacade.createHistogram(
|
||||
category = LineaMetricsCategory.BLOB,
|
||||
name = "compressed.data.size",
|
||||
description = "Compressed L2 data size in bytes of each blob"
|
||||
)
|
||||
private val uncompressedDataSizeInBlobHistogram = metricsFacade.createHistogram(
|
||||
category = LineaMetricsCategory.BLOB,
|
||||
name = "uncompressed.data.size",
|
||||
description = "Uncompressed L2 data size in bytes of each blob"
|
||||
)
|
||||
private val gasUsedInBatchHistogram = metricsFacade.createHistogram(
|
||||
category = LineaMetricsCategory.BATCH,
|
||||
name = "gas",
|
||||
description = "Total gas in each batch"
|
||||
)
|
||||
private val compressedDataSizeInBatchHistogram = metricsFacade.createHistogram(
|
||||
category = LineaMetricsCategory.BATCH,
|
||||
name = "compressed.data.size",
|
||||
description = "Compressed L2 data size in bytes of each batch"
|
||||
)
|
||||
private val uncompressedDataSizeInBatchHistogram = metricsFacade.createHistogram(
|
||||
category = LineaMetricsCategory.BATCH,
|
||||
name = "uncompressed.data.size",
|
||||
description = "Uncompressed L2 data size in bytes of each batch"
|
||||
)
|
||||
private val avgCompressedTxDataSizeInBatchHistogram = metricsFacade.createHistogram(
|
||||
category = LineaMetricsCategory.BATCH,
|
||||
name = "avg.compressed.tx.data.size",
|
||||
description = "Average compressed transaction data size in bytes of each batch"
|
||||
)
|
||||
private val avgUncompressedTxDataSizeInBatchHistogram = metricsFacade.createHistogram(
|
||||
category = LineaMetricsCategory.BATCH,
|
||||
name = "avg.uncompressed.tx.data.size",
|
||||
description = "Average uncompressed transaction data size in bytes of each batch"
|
||||
)
|
||||
|
||||
init {
|
||||
conflationCalculator.onConflatedBatch(this::handleBatchTrigger)
|
||||
}
|
||||
|
||||
private fun recordBatchMetrics(conflation: ConflationCalculationResult) {
|
||||
runCatching {
|
||||
val filteredBlockCounters = blobBlockCounters
|
||||
.filter { conflation.blocksRange.contains(it.blockNumber) }
|
||||
val gasUsedInBatch = filteredBlockCounters.sumOf { it.gasUsed }
|
||||
val uncompressedDataSizeInBatch = filteredBlockCounters.sumOf { it.blockRLPEncoded.size }
|
||||
val numOfTransactionsInBatch = filteredBlockCounters.sumOf { it.numOfTransactions }
|
||||
val compressedDataSizeInBatch = blobCalculator.getCompressedDataSizeInCurrentBatch()
|
||||
gasUsedInBatchHistogram.record(gasUsedInBatch.toDouble())
|
||||
uncompressedDataSizeInBatchHistogram.record(uncompressedDataSizeInBatch.toDouble())
|
||||
compressedDataSizeInBatchHistogram.record(compressedDataSizeInBatch.toDouble())
|
||||
avgUncompressedTxDataSizeInBatchHistogram.record(
|
||||
if (numOfTransactionsInBatch > 0U) {
|
||||
uncompressedDataSizeInBatch.div(numOfTransactionsInBatch.toInt()).toDouble()
|
||||
} else {
|
||||
0.0
|
||||
}
|
||||
)
|
||||
avgCompressedTxDataSizeInBatchHistogram.record(
|
||||
if (numOfTransactionsInBatch > 0U) {
|
||||
compressedDataSizeInBatch.toInt().div(numOfTransactionsInBatch.toInt()).toDouble()
|
||||
} else {
|
||||
0.0
|
||||
}
|
||||
)
|
||||
}.onFailure {
|
||||
log.error("Error when recording batch metrics: errorMessage={}", it.message)
|
||||
}
|
||||
}
|
||||
|
||||
private fun recordBlobMetrics(blobInterval: BlockInterval, blobCompressedDataSize: Int) {
|
||||
runCatching {
|
||||
val filteredBlockCounters = blobBlockCounters
|
||||
.filter { blobInterval.blocksRange.contains(it.blockNumber) }
|
||||
gasUsedInBlobHistogram.record(
|
||||
filteredBlockCounters.sumOf { it.gasUsed }.toDouble()
|
||||
)
|
||||
uncompressedDataSizeInBlobHistogram.record(
|
||||
filteredBlockCounters.sumOf { it.blockRLPEncoded.size }.toDouble()
|
||||
)
|
||||
compressedDataSizeInBlobHistogram.record(blobCompressedDataSize.toDouble())
|
||||
}.onFailure {
|
||||
log.error("Error when recording blob metrics: errorMessage={}", it.message)
|
||||
}
|
||||
}
|
||||
|
||||
@Synchronized
|
||||
override fun newBlock(blockCounters: BlockCounters) {
|
||||
blobBlockCounters.add(blockCounters)
|
||||
@@ -47,6 +137,10 @@ class GlobalBlobAwareConflationCalculator(
|
||||
log.trace("handleBatchTrigger: numberOfBatches={} conflation={}", numberOfBatches, conflation)
|
||||
blobBatches.add(conflation)
|
||||
numberOfBatches += 1U
|
||||
|
||||
// Record the batch metrics
|
||||
recordBatchMetrics(conflation)
|
||||
|
||||
val future = conflationHandler.invoke(conflation)
|
||||
if (conflation.conflationTrigger == ConflationTrigger.DATA_LIMIT ||
|
||||
conflation.conflationTrigger == ConflationTrigger.TIME_LIMIT ||
|
||||
@@ -66,17 +160,21 @@ class GlobalBlobAwareConflationCalculator(
|
||||
|
||||
private fun fireBlobTriggerAndResetState(trigger: ConflationTrigger) {
|
||||
val compressedData = blobCalculator.getCompressedData()
|
||||
val blobInterval = BlockInterval(
|
||||
blobBatches.first().startBlockNumber,
|
||||
blobBatches.last().endBlockNumber
|
||||
)
|
||||
val blob = Blob(
|
||||
conflations = blobBatches,
|
||||
compressedData = compressedData,
|
||||
startBlockTime = blobBlockCounters
|
||||
.find { it.blockNumber == blobBatches.first().startBlockNumber }!!.blockTimestamp,
|
||||
.find { it.blockNumber == blobInterval.startBlockNumber }!!.blockTimestamp,
|
||||
endBlockTime = blobBlockCounters
|
||||
.find { it.blockNumber == blobBatches.last().endBlockNumber }!!.blockTimestamp
|
||||
.find { it.blockNumber == blobInterval.endBlockNumber }!!.blockTimestamp
|
||||
)
|
||||
log.info(
|
||||
"new blob: blob={} trigger={} blobSizeBytes={} blobBatchesCount={} blobBatchesLimit={} blobBatchesList={}",
|
||||
blockIntervalString(blobBatches.first().startBlockNumber, blobBatches.last().endBlockNumber),
|
||||
blobInterval.intervalString(),
|
||||
trigger,
|
||||
compressedData.size,
|
||||
blobBatches.size,
|
||||
@@ -84,6 +182,10 @@ class GlobalBlobAwareConflationCalculator(
|
||||
blobBatches.toBlockIntervalsString()
|
||||
)
|
||||
blobHandler.handleBlob(blob)
|
||||
|
||||
// Record the blob metrics
|
||||
recordBlobMetrics(blobInterval, compressedData.size)
|
||||
|
||||
blobBatches = run {
|
||||
blobBatches.forEach { conflation ->
|
||||
blobBlockCounters.removeIf { conflation.blocksRange.contains(it.blockNumber) }
|
||||
|
||||
@@ -1,37 +1,69 @@
|
||||
package net.consensys.zkevm.ethereum.coordination.proofcreation
|
||||
|
||||
import build.linea.web3j.domain.toWeb3j
|
||||
import linea.domain.BlockParameter.Companion.toBlockParameter
|
||||
import net.consensys.linea.async.toSafeFuture
|
||||
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofRequestV1
|
||||
import net.consensys.zkevm.coordinator.clients.ExecutionProverClientV2
|
||||
import net.consensys.zkevm.coordinator.clients.L2MessageServiceLogsClient
|
||||
import net.consensys.zkevm.domain.Batch
|
||||
import net.consensys.zkevm.domain.BlocksConflation
|
||||
import net.consensys.zkevm.ethereum.coordination.conflation.BlocksTracesConflated
|
||||
import org.apache.logging.log4j.LogManager
|
||||
import org.apache.logging.log4j.Logger
|
||||
import org.web3j.protocol.Web3j
|
||||
import tech.pegasys.teku.infrastructure.async.SafeFuture
|
||||
|
||||
class ZkProofCreationCoordinatorImpl(
|
||||
private val executionProverClient: ExecutionProverClientV2
|
||||
private val executionProverClient: ExecutionProverClientV2,
|
||||
private val l2MessageServiceLogsClient: L2MessageServiceLogsClient,
|
||||
private val l2Web3jClient: Web3j
|
||||
) : ZkProofCreationCoordinator {
|
||||
private val log: Logger = LogManager.getLogger(this::class.java)
|
||||
|
||||
private fun getBlockStateRootHash(blockNumber: ULong): SafeFuture<String> {
|
||||
return l2Web3jClient
|
||||
.ethGetBlockByNumber(
|
||||
blockNumber.toBlockParameter().toWeb3j(),
|
||||
false
|
||||
)
|
||||
.sendAsync()
|
||||
.thenApply { block -> block.block.stateRoot }
|
||||
.toSafeFuture()
|
||||
}
|
||||
|
||||
override fun createZkProof(
|
||||
blocksConflation: BlocksConflation,
|
||||
traces: BlocksTracesConflated
|
||||
): SafeFuture<Batch> {
|
||||
val startBlockNumber = blocksConflation.blocks.first().number.toULong()
|
||||
val endBlockNumber = blocksConflation.blocks.last().number.toULong()
|
||||
val startBlockNumber = blocksConflation.blocks.first().number
|
||||
val endBlockNumber = blocksConflation.blocks.last().number
|
||||
val blocksConflationInterval = blocksConflation.intervalString()
|
||||
val bridgeLogsListFutures = blocksConflation.blocks.map { block ->
|
||||
l2MessageServiceLogsClient.getBridgeLogs(blockNumber = block.number.toLong())
|
||||
}
|
||||
|
||||
return executionProverClient
|
||||
.requestProof(BatchExecutionProofRequestV1(blocksConflation.blocks, traces.tracesResponse, traces.zkStateTraces))
|
||||
.thenApply {
|
||||
Batch(
|
||||
startBlockNumber = startBlockNumber,
|
||||
endBlockNumber = endBlockNumber
|
||||
)
|
||||
}
|
||||
.whenException {
|
||||
log.error("Prover returned for batch={} errorMessage={}", blocksConflationInterval, it.message, it)
|
||||
return getBlockStateRootHash(blocksConflation.startBlockNumber - 1UL)
|
||||
.thenCompose { previousKeccakStateRootHash ->
|
||||
SafeFuture.collectAll(bridgeLogsListFutures.stream())
|
||||
.thenCompose { bridgeLogsList ->
|
||||
executionProverClient.requestProof(
|
||||
BatchExecutionProofRequestV1(
|
||||
blocks = blocksConflation.blocks,
|
||||
bridgeLogs = bridgeLogsList.flatten(),
|
||||
tracesResponse = traces.tracesResponse,
|
||||
type2StateData = traces.zkStateTraces,
|
||||
keccakParentStateRootHash = previousKeccakStateRootHash.encodeToByteArray()
|
||||
)
|
||||
).thenApply {
|
||||
Batch(
|
||||
startBlockNumber = startBlockNumber,
|
||||
endBlockNumber = endBlockNumber
|
||||
)
|
||||
}.whenException {
|
||||
log.error("Prover returned for batch={} errorMessage={}", blocksConflationInterval, it.message, it)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import kotlinx.datetime.Clock
|
||||
import kotlinx.datetime.Instant
|
||||
import linea.domain.BlockIntervals
|
||||
import linea.kotlin.trimToSecondPrecision
|
||||
import net.consensys.linea.metrics.MetricsFacade
|
||||
import net.consensys.linea.metrics.micrometer.MicrometerMetricsFacade
|
||||
import net.consensys.zkevm.coordinator.clients.ProofAggregationProverClientV2
|
||||
import net.consensys.zkevm.domain.Aggregation
|
||||
@@ -19,9 +20,9 @@ import net.consensys.zkevm.persistence.AggregationsRepository
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.mockito.Mockito.anyLong
|
||||
import org.mockito.Mockito.mock
|
||||
import org.mockito.kotlin.any
|
||||
import org.mockito.kotlin.argThat
|
||||
import org.mockito.kotlin.mock
|
||||
import org.mockito.kotlin.verify
|
||||
import org.mockito.kotlin.whenever
|
||||
import tech.pegasys.teku.infrastructure.async.SafeFuture
|
||||
@@ -74,7 +75,9 @@ class ProofAggregationCoordinatorServiceTest {
|
||||
val mockAggregationCalculator = mock<AggregationCalculator>()
|
||||
val mockAggregationsRepository = mock<AggregationsRepository>()
|
||||
val mockProofAggregationClient = mock<ProofAggregationProverClientV2>()
|
||||
val aggregationL2StateProvider = mock<AggregationL2StateProvider>()
|
||||
val mockAggregationL2StateProvider = mock<AggregationL2StateProvider>()
|
||||
val meterRegistry = SimpleMeterRegistry()
|
||||
val metricsFacade: MetricsFacade = MicrometerMetricsFacade(registry = meterRegistry)
|
||||
|
||||
val config = ProofAggregationCoordinatorService.Config(
|
||||
pollingInterval = 10.milliseconds,
|
||||
@@ -92,8 +95,8 @@ class ProofAggregationCoordinatorServiceTest {
|
||||
aggregationsRepository = mockAggregationsRepository,
|
||||
consecutiveProvenBlobsProvider = mockAggregationsRepository::findConsecutiveProvenBlobs,
|
||||
proofAggregationClient = mockProofAggregationClient,
|
||||
aggregationL2StateProvider = aggregationL2StateProvider,
|
||||
metricsFacade = MicrometerMetricsFacade(registry = SimpleMeterRegistry()),
|
||||
aggregationL2StateProvider = mockAggregationL2StateProvider,
|
||||
metricsFacade = metricsFacade,
|
||||
provenAggregationEndBlockNumberConsumer = provenAggregationEndBlockNumberConsumer
|
||||
)
|
||||
verify(mockAggregationCalculator).onAggregation(proofAggregationCoordinatorService)
|
||||
@@ -148,7 +151,7 @@ class ProofAggregationCoordinatorServiceTest {
|
||||
parentAggregationLastL1RollingHash = ByteArray(32)
|
||||
)
|
||||
|
||||
whenever(aggregationL2StateProvider.getAggregationL2State(anyLong()))
|
||||
whenever(mockAggregationL2StateProvider.getAggregationL2State(anyLong()))
|
||||
.thenAnswer { SafeFuture.completedFuture(rollingInfo1) }
|
||||
.thenAnswer { SafeFuture.completedFuture(rollingInfo2) }
|
||||
|
||||
@@ -220,6 +223,12 @@ class ProofAggregationCoordinatorServiceTest {
|
||||
// First aggregation should Trigger
|
||||
proofAggregationCoordinatorService.action().get()
|
||||
|
||||
assertThat(meterRegistry.summary("aggregation.blocks.size").count()).isEqualTo(1)
|
||||
assertThat(meterRegistry.summary("aggregation.batches.size").count()).isEqualTo(1)
|
||||
assertThat(meterRegistry.summary("aggregation.blobs.size").count()).isEqualTo(1)
|
||||
assertThat(meterRegistry.summary("aggregation.blocks.size").max()).isEqualTo(23.0)
|
||||
assertThat(meterRegistry.summary("aggregation.batches.size").max()).isEqualTo(6.0)
|
||||
assertThat(meterRegistry.summary("aggregation.blobs.size").max()).isEqualTo(2.0)
|
||||
verify(mockProofAggregationClient).requestProof(proofsToAggregate1)
|
||||
verify(mockAggregationsRepository).saveNewAggregation(aggregation1)
|
||||
assertThat(provenAggregation).isEqualTo(aggregation1.endBlockNumber)
|
||||
@@ -227,9 +236,14 @@ class ProofAggregationCoordinatorServiceTest {
|
||||
// Second aggregation should Trigger
|
||||
proofAggregationCoordinatorService.action().get()
|
||||
|
||||
assertThat(meterRegistry.summary("aggregation.blocks.size").count()).isEqualTo(2)
|
||||
assertThat(meterRegistry.summary("aggregation.batches.size").count()).isEqualTo(2)
|
||||
assertThat(meterRegistry.summary("aggregation.blobs.size").count()).isEqualTo(2)
|
||||
assertThat(meterRegistry.summary("aggregation.blocks.size").max()).isEqualTo(27.0)
|
||||
assertThat(meterRegistry.summary("aggregation.batches.size").max()).isEqualTo(6.0)
|
||||
assertThat(meterRegistry.summary("aggregation.blobs.size").max()).isEqualTo(2.0)
|
||||
verify(mockProofAggregationClient).requestProof(proofsToAggregate2)
|
||||
verify(mockAggregationsRepository).saveNewAggregation(aggregation2)
|
||||
|
||||
assertThat(provenAggregation).isEqualTo(aggregation2.endBlockNumber)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
package net.consensys.zkevm.ethereum.coordination.conflation
|
||||
|
||||
import io.micrometer.core.instrument.simple.SimpleMeterRegistry
|
||||
import kotlinx.datetime.Instant
|
||||
import linea.domain.BlockHeaderSummary
|
||||
import linea.kotlin.ByteArrayExt
|
||||
import net.consensys.FakeFixedClock
|
||||
import net.consensys.linea.metrics.FakeHistogram
|
||||
import net.consensys.linea.metrics.LineaMetricsCategory
|
||||
import net.consensys.linea.metrics.MetricsFacade
|
||||
import net.consensys.linea.metrics.micrometer.MicrometerMetricsFacade
|
||||
import net.consensys.linea.traces.TracesCountersV1
|
||||
import net.consensys.linea.traces.fakeTracesCountersV1
|
||||
import net.consensys.zkevm.domain.Blob
|
||||
@@ -17,6 +22,10 @@ import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.mockito.Mockito
|
||||
import org.mockito.Mockito.doReturn
|
||||
import org.mockito.kotlin.any
|
||||
import org.mockito.kotlin.anyOrNull
|
||||
import org.mockito.kotlin.eq
|
||||
import org.mockito.kotlin.mock
|
||||
import org.mockito.kotlin.spy
|
||||
import org.mockito.kotlin.whenever
|
||||
@@ -43,6 +52,17 @@ class GlobalBlobAwareConflationCalculatorTest {
|
||||
private lateinit var blobs: MutableList<Blob>
|
||||
private val defaultBatchesLimit = 2U
|
||||
private val conflationTargetEndBlockNumbers: MutableSet<ULong> = mutableSetOf()
|
||||
private lateinit var metricsFacade: MetricsFacade
|
||||
|
||||
// histogram metrics mocks
|
||||
private lateinit var fakeGasUsedInBlobHistogram: FakeHistogram
|
||||
private lateinit var fakeCompressedDataSizeInBlobHistogram: FakeHistogram
|
||||
private lateinit var fakeUncompressedDataSizeInBlobHistogram: FakeHistogram
|
||||
private lateinit var fakeGasUsedInBatchHistogram: FakeHistogram
|
||||
private lateinit var fakeCompressedDataSizeInBatchHistogram: FakeHistogram
|
||||
private lateinit var fakeUncompressedDataSizeInBatchHistogram: FakeHistogram
|
||||
private lateinit var fakeAvgCompressedTxDataSizeInBatchHistogram: FakeHistogram
|
||||
private lateinit var fakeAvgUncompressedTxDataSizeInBatchHistogram: FakeHistogram
|
||||
|
||||
@BeforeEach
|
||||
fun beforeEach() {
|
||||
@@ -55,6 +75,92 @@ class GlobalBlobAwareConflationCalculatorTest {
|
||||
SafeFuture.failedFuture(RuntimeException("getLatestSafeBlockHeader not mocked yet"))
|
||||
)
|
||||
}
|
||||
metricsFacade = spy(MicrometerMetricsFacade(registry = SimpleMeterRegistry()))
|
||||
doReturn(FakeHistogram().also { fakeGasUsedInBlobHistogram = it })
|
||||
.whenever(metricsFacade).createHistogram(
|
||||
eq(LineaMetricsCategory.BLOB),
|
||||
eq("gas"),
|
||||
any(),
|
||||
any(),
|
||||
any(),
|
||||
anyOrNull()
|
||||
)
|
||||
doReturn(FakeHistogram().also { fakeCompressedDataSizeInBlobHistogram = it })
|
||||
.whenever(metricsFacade).createHistogram(
|
||||
eq(LineaMetricsCategory.BLOB),
|
||||
eq("compressed.data.size"),
|
||||
any(),
|
||||
any(),
|
||||
any(),
|
||||
anyOrNull()
|
||||
)
|
||||
doReturn(FakeHistogram().also { fakeUncompressedDataSizeInBlobHistogram = it })
|
||||
.whenever(
|
||||
metricsFacade
|
||||
).createHistogram(
|
||||
eq(LineaMetricsCategory.BLOB),
|
||||
eq("uncompressed.data.size"),
|
||||
any(),
|
||||
any(),
|
||||
any(),
|
||||
anyOrNull()
|
||||
)
|
||||
doReturn(FakeHistogram().also { fakeGasUsedInBatchHistogram = it })
|
||||
.whenever(
|
||||
metricsFacade
|
||||
).createHistogram(
|
||||
eq(LineaMetricsCategory.BATCH),
|
||||
eq("gas"),
|
||||
any(),
|
||||
any(),
|
||||
any(),
|
||||
anyOrNull()
|
||||
)
|
||||
doReturn(FakeHistogram().also { fakeCompressedDataSizeInBatchHistogram = it })
|
||||
.whenever(
|
||||
metricsFacade
|
||||
).createHistogram(
|
||||
eq(LineaMetricsCategory.BATCH),
|
||||
eq("compressed.data.size"),
|
||||
any(),
|
||||
any(),
|
||||
any(),
|
||||
anyOrNull()
|
||||
)
|
||||
doReturn(FakeHistogram().also { fakeUncompressedDataSizeInBatchHistogram = it })
|
||||
.whenever(
|
||||
metricsFacade
|
||||
).createHistogram(
|
||||
eq(LineaMetricsCategory.BATCH),
|
||||
eq("uncompressed.data.size"),
|
||||
any(),
|
||||
any(),
|
||||
any(),
|
||||
anyOrNull()
|
||||
)
|
||||
doReturn(FakeHistogram().also { fakeAvgCompressedTxDataSizeInBatchHistogram = it })
|
||||
.whenever(
|
||||
metricsFacade
|
||||
).createHistogram(
|
||||
eq(LineaMetricsCategory.BATCH),
|
||||
eq("avg.compressed.tx.data.size"),
|
||||
any(),
|
||||
any(),
|
||||
any(),
|
||||
anyOrNull()
|
||||
)
|
||||
doReturn(FakeHistogram().also { fakeAvgUncompressedTxDataSizeInBatchHistogram = it })
|
||||
.whenever(
|
||||
metricsFacade
|
||||
).createHistogram(
|
||||
eq(LineaMetricsCategory.BATCH),
|
||||
eq("avg.uncompressed.tx.data.size"),
|
||||
any(),
|
||||
any(),
|
||||
any(),
|
||||
anyOrNull()
|
||||
)
|
||||
|
||||
calculatorByDealine = spy(
|
||||
ConflationCalculatorByTimeDeadline(
|
||||
config = ConflationCalculatorByTimeDeadline.Config(
|
||||
@@ -86,7 +192,8 @@ class GlobalBlobAwareConflationCalculatorTest {
|
||||
calculator = GlobalBlobAwareConflationCalculator(
|
||||
conflationCalculator = globalCalculator,
|
||||
blobCalculator = calculatorByDataCompressed,
|
||||
batchesLimit = defaultBatchesLimit
|
||||
batchesLimit = defaultBatchesLimit,
|
||||
metricsFacade = metricsFacade
|
||||
)
|
||||
conflations = mutableListOf()
|
||||
blobs = mutableListOf()
|
||||
@@ -110,7 +217,9 @@ class GlobalBlobAwareConflationCalculatorTest {
|
||||
blockNumber = it,
|
||||
blockTimestamp = fakeClockTime,
|
||||
tracesCounters = fakeTracesCountersV1(1u),
|
||||
blockRLPEncoded = ByteArray(11)
|
||||
blockRLPEncoded = ByteArray(11),
|
||||
numOfTransactions = 1u,
|
||||
gasUsed = 10uL
|
||||
)
|
||||
}
|
||||
blockCounters.forEach {
|
||||
@@ -139,6 +248,16 @@ class GlobalBlobAwareConflationCalculatorTest {
|
||||
assertThat(blobs[1].conflations).isEqualTo(conflations.subList(1, 2))
|
||||
assertThat(blobs[1].startBlockTime).isEqualTo(blockCounters[5].blockTimestamp)
|
||||
assertThat(blobs[1].endBlockTime).isEqualTo(blockCounters[9].blockTimestamp)
|
||||
|
||||
// verify histogram metrics
|
||||
assertThat(fakeGasUsedInBlobHistogram.records).isEqualTo(listOf(50.0, 50.0))
|
||||
assertThat(fakeCompressedDataSizeInBlobHistogram.records).isEqualTo(listOf(55.0, 55.0))
|
||||
assertThat(fakeUncompressedDataSizeInBlobHistogram.records).isEqualTo(listOf(55.0, 55.0))
|
||||
assertThat(fakeGasUsedInBatchHistogram.records).isEqualTo(listOf(50.0, 50.0))
|
||||
assertThat(fakeCompressedDataSizeInBatchHistogram.records).isEqualTo(listOf(55.0, 55.0))
|
||||
assertThat(fakeUncompressedDataSizeInBatchHistogram.records).isEqualTo(listOf(55.0, 55.0))
|
||||
assertThat(fakeAvgCompressedTxDataSizeInBatchHistogram.records).isEqualTo(listOf(11.0, 11.0))
|
||||
assertThat(fakeAvgUncompressedTxDataSizeInBatchHistogram.records).isEqualTo(listOf(11.0, 11.0))
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -147,31 +266,51 @@ class GlobalBlobAwareConflationCalculatorTest {
|
||||
blockNumber = 1uL,
|
||||
blockTimestamp = fakeClockTime,
|
||||
tracesCounters = fakeTracesCountersV1(10u),
|
||||
blockRLPEncoded = ByteArray(11)
|
||||
blockRLPEncoded = ByteArray(11),
|
||||
numOfTransactions = 1u,
|
||||
gasUsed = 10uL
|
||||
)
|
||||
val block2Counters = BlockCounters(
|
||||
blockNumber = 2uL,
|
||||
blockTimestamp = block1Counters.blockTimestamp.plus(blockTime),
|
||||
tracesCounters = fakeTracesCountersV1(10u),
|
||||
blockRLPEncoded = ByteArray(12)
|
||||
blockRLPEncoded = ByteArray(12),
|
||||
numOfTransactions = 1u,
|
||||
gasUsed = 10uL
|
||||
)
|
||||
val block3Counters = BlockCounters(
|
||||
blockNumber = 3uL,
|
||||
blockTimestamp = block2Counters.blockTimestamp.plus(blockTime),
|
||||
tracesCounters = fakeTracesCountersV1(10u),
|
||||
blockRLPEncoded = ByteArray(83)
|
||||
blockRLPEncoded = ByteArray(83),
|
||||
numOfTransactions = 1u,
|
||||
gasUsed = 10uL
|
||||
)
|
||||
val block4Counters = BlockCounters(
|
||||
blockNumber = 4uL,
|
||||
blockTimestamp = block3Counters.blockTimestamp.plus(blockTime),
|
||||
tracesCounters = fakeTracesCountersV1(10u),
|
||||
blockRLPEncoded = ByteArray(44)
|
||||
blockRLPEncoded = ByteArray(44),
|
||||
numOfTransactions = 1u,
|
||||
gasUsed = 10uL
|
||||
)
|
||||
|
||||
calculator.newBlock(block1Counters)
|
||||
calculator.newBlock(block2Counters)
|
||||
|
||||
// up till now no batch and blob histogram metrics should be recorded
|
||||
assertThat(fakeGasUsedInBlobHistogram.records).isEmpty()
|
||||
assertThat(fakeCompressedDataSizeInBlobHistogram.records).isEmpty()
|
||||
assertThat(fakeUncompressedDataSizeInBlobHistogram.records).isEmpty()
|
||||
assertThat(fakeGasUsedInBatchHistogram.records).isEmpty()
|
||||
assertThat(fakeCompressedDataSizeInBatchHistogram.records).isEmpty()
|
||||
assertThat(fakeUncompressedDataSizeInBatchHistogram.records).isEmpty()
|
||||
assertThat(fakeAvgCompressedTxDataSizeInBatchHistogram.records).isEmpty()
|
||||
assertThat(fakeAvgUncompressedTxDataSizeInBatchHistogram.records).isEmpty()
|
||||
|
||||
// block 3 goes over data limit, so it should emit conflation and blob events
|
||||
calculator.newBlock(block3Counters)
|
||||
|
||||
// block 4 goes over data limit, so it should emit conflation and blob events
|
||||
calculator.newBlock(block4Counters)
|
||||
assertThat(calculator.lastBlockNumber).isEqualTo(4uL)
|
||||
@@ -200,6 +339,16 @@ class GlobalBlobAwareConflationCalculatorTest {
|
||||
assertThat(blobs[1].conflations).isEqualTo(conflations.subList(1, 2))
|
||||
assertThat(blobs[1].startBlockTime).isEqualTo(block3Counters.blockTimestamp)
|
||||
assertThat(blobs[1].endBlockTime).isEqualTo(block3Counters.blockTimestamp)
|
||||
|
||||
// verify batch and blob histogram metrics
|
||||
assertThat(fakeGasUsedInBlobHistogram.records).isEqualTo(listOf(20.0, 10.0))
|
||||
assertThat(fakeCompressedDataSizeInBlobHistogram.records).isEqualTo(listOf(23.0, 83.0))
|
||||
assertThat(fakeUncompressedDataSizeInBlobHistogram.records).isEqualTo(listOf(23.0, 83.0))
|
||||
assertThat(fakeGasUsedInBatchHistogram.records).isEqualTo(listOf(20.0, 10.0))
|
||||
assertThat(fakeCompressedDataSizeInBatchHistogram.records).isEqualTo(listOf(23.0, 83.0))
|
||||
assertThat(fakeUncompressedDataSizeInBatchHistogram.records).isEqualTo(listOf(23.0, 83.0))
|
||||
assertThat(fakeAvgCompressedTxDataSizeInBatchHistogram.records).isEqualTo(listOf(11.0, 83.0))
|
||||
assertThat(fakeAvgUncompressedTxDataSizeInBatchHistogram.records).isEqualTo(listOf(11.0, 83.0))
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package net.consensys.zkevm.ethereum.coordination.conflation.upgrade
|
||||
|
||||
import kotlinx.datetime.Instant
|
||||
import net.consensys.linea.metrics.MetricsFacade
|
||||
import net.consensys.linea.traces.TracesCountersV1
|
||||
import net.consensys.linea.traces.fakeTracesCountersV1
|
||||
import net.consensys.zkevm.domain.BlockCounters
|
||||
@@ -12,6 +13,7 @@ import net.consensys.zkevm.ethereum.coordination.conflation.GlobalBlockConflatio
|
||||
import net.consensys.zkevm.ethereum.coordination.conflation.TracesConflationCalculator
|
||||
import org.assertj.core.api.Assertions
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.mockito.Mockito
|
||||
import org.mockito.Mockito.clearInvocations
|
||||
import org.mockito.Mockito.mock
|
||||
import org.mockito.Mockito.spy
|
||||
@@ -49,7 +51,8 @@ class SwitchAwareCalculatorTest {
|
||||
TracesCountersV1.EMPTY_TRACES_COUNT
|
||||
),
|
||||
blobCalculator = mockBlobCalculator,
|
||||
batchesLimit = 10U
|
||||
batchesLimit = 10U,
|
||||
metricsFacade = org.mockito.kotlin.mock<MetricsFacade>(defaultAnswer = Mockito.RETURNS_DEEP_STUBS)
|
||||
)
|
||||
return newCalculator
|
||||
}
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
package net.consensys.zkevm.coordinator.clients.prover
|
||||
package net.consensys.zkevm.ethereum.coordination.proofcreation
|
||||
|
||||
import build.linea.clients.GetZkEVMStateMerkleProofResponse
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode
|
||||
import linea.domain.Block
|
||||
import linea.domain.createBlock
|
||||
import linea.kotlin.ByteArrayExt
|
||||
import linea.kotlin.encodeHex
|
||||
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofRequestV1
|
||||
import net.consensys.linea.traces.fakeTracesCountersV1
|
||||
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofResponse
|
||||
import net.consensys.zkevm.coordinator.clients.ExecutionProverClientV2
|
||||
import net.consensys.zkevm.coordinator.clients.GenerateTracesResponse
|
||||
import net.consensys.zkevm.coordinator.clients.L2MessageServiceLogsClient
|
||||
import net.consensys.zkevm.domain.RlpBridgeLogsData
|
||||
import net.consensys.zkevm.encoding.BlockEncoder
|
||||
import net.consensys.zkevm.domain.BlocksConflation
|
||||
import net.consensys.zkevm.domain.CommonTestData
|
||||
import net.consensys.zkevm.domain.ConflationCalculationResult
|
||||
import net.consensys.zkevm.domain.ConflationTrigger
|
||||
import net.consensys.zkevm.ethereum.coordination.conflation.BlocksTracesConflated
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.Test
|
||||
@@ -19,35 +23,33 @@ import org.mockito.kotlin.any
|
||||
import org.mockito.kotlin.doReturn
|
||||
import org.mockito.kotlin.eq
|
||||
import org.mockito.kotlin.mock
|
||||
import org.mockito.kotlin.spy
|
||||
import org.mockito.kotlin.whenever
|
||||
import org.web3j.protocol.Web3j
|
||||
import org.web3j.protocol.core.methods.response.EthBlock
|
||||
import tech.pegasys.teku.infrastructure.async.SafeFuture
|
||||
import kotlin.random.Random
|
||||
|
||||
class ExecutionProofRequestDataDecoratorTest {
|
||||
class ZkProofCreationCoordinatorImplTest {
|
||||
|
||||
private lateinit var l2MessageServiceLogsClient: L2MessageServiceLogsClient
|
||||
private lateinit var l2Web3jClient: Web3j
|
||||
private lateinit var encoder: BlockEncoder
|
||||
private lateinit var requestDatDecorator: ExecutionProofRequestDataDecorator
|
||||
private val fakeEncoder: BlockEncoder = object : BlockEncoder {
|
||||
override fun encode(block: Block): ByteArray {
|
||||
return block.number.toString().toByteArray()
|
||||
}
|
||||
}
|
||||
private lateinit var executionProverClient: ExecutionProverClientV2
|
||||
private lateinit var zkProofCreationCoordinator: ZkProofCreationCoordinator
|
||||
|
||||
@BeforeEach
|
||||
fun beforeEach() {
|
||||
l2MessageServiceLogsClient = mock(defaultAnswer = Mockito.RETURNS_DEEP_STUBS)
|
||||
l2Web3jClient = mock<Web3j>(defaultAnswer = Mockito.RETURNS_DEEP_STUBS)
|
||||
encoder = spy(fakeEncoder)
|
||||
requestDatDecorator = ExecutionProofRequestDataDecorator(l2MessageServiceLogsClient, l2Web3jClient, encoder)
|
||||
executionProverClient = mock<ExecutionProverClientV2>(defaultAnswer = Mockito.RETURNS_DEEP_STUBS)
|
||||
zkProofCreationCoordinator = ZkProofCreationCoordinatorImpl(
|
||||
executionProverClient = executionProverClient,
|
||||
l2MessageServiceLogsClient = l2MessageServiceLogsClient,
|
||||
l2Web3jClient = l2Web3jClient
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `should decorate data with bridge logs and parent stateRootHash`() {
|
||||
fun `should return batch with correct fields`() {
|
||||
val block1 = createBlock(number = 123UL)
|
||||
val block2 = createBlock(number = 124UL)
|
||||
val type2StateResponse = GetZkEVMStateMerkleProofResponse(
|
||||
@@ -57,14 +59,10 @@ class ExecutionProofRequestDataDecoratorTest {
|
||||
zkStateManagerVersion = "2.0.0"
|
||||
)
|
||||
val generateTracesResponse = GenerateTracesResponse(
|
||||
tracesFileName = "123-114-conflated-traces.json",
|
||||
tracesFileName = "123-124-conflated-traces.json",
|
||||
tracesEngineVersion = "1.0.0"
|
||||
)
|
||||
val request = BatchExecutionProofRequestV1(
|
||||
blocks = listOf(block1, block2),
|
||||
tracesResponse = generateTracesResponse,
|
||||
type2StateData = type2StateResponse
|
||||
)
|
||||
|
||||
val stateRoot = Random.nextBytes(32).encodeHex()
|
||||
whenever(l2Web3jClient.ethGetBlockByNumber(any(), any()).sendAsync())
|
||||
.thenAnswer {
|
||||
@@ -79,26 +77,33 @@ class ExecutionProofRequestDataDecoratorTest {
|
||||
whenever(l2MessageServiceLogsClient.getBridgeLogs(eq(block2.number.toLong())))
|
||||
.thenReturn(SafeFuture.completedFuture(listOf(CommonTestData.bridgeLogs[1])))
|
||||
|
||||
val requestDto = requestDatDecorator.invoke(request).get()
|
||||
whenever(executionProverClient.requestProof(any()))
|
||||
.thenReturn(
|
||||
SafeFuture.completedFuture(
|
||||
BatchExecutionProofResponse(
|
||||
startBlockNumber = 123UL,
|
||||
endBlockNumber = 124UL
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(requestDto.keccakParentStateRootHash).isEqualTo(stateRoot)
|
||||
assertThat(requestDto.zkParentStateRootHash).isEqualTo(type2StateResponse.zkParentStateRootHash.encodeHex())
|
||||
assertThat(requestDto.conflatedExecutionTracesFile).isEqualTo("123-114-conflated-traces.json")
|
||||
assertThat(requestDto.tracesEngineVersion).isEqualTo("1.0.0")
|
||||
assertThat(requestDto.type2StateManagerVersion).isEqualTo("2.0.0")
|
||||
assertThat(requestDto.zkStateMerkleProof).isEqualTo(type2StateResponse.zkStateMerkleProof)
|
||||
assertThat(requestDto.blocksData).hasSize(2)
|
||||
assertThat(requestDto.blocksData[0]).isEqualTo(
|
||||
RlpBridgeLogsData(
|
||||
rlp = "123".toByteArray().encodeHex(),
|
||||
bridgeLogs = listOf(CommonTestData.bridgeLogs[0])
|
||||
val batch = zkProofCreationCoordinator.createZkProof(
|
||||
blocksConflation = BlocksConflation(
|
||||
blocks = listOf(block1, block2),
|
||||
conflationResult = ConflationCalculationResult(
|
||||
startBlockNumber = 123UL,
|
||||
endBlockNumber = 124UL,
|
||||
conflationTrigger = ConflationTrigger.TRACES_LIMIT,
|
||||
tracesCounters = fakeTracesCountersV1(0u)
|
||||
)
|
||||
),
|
||||
traces = BlocksTracesConflated(
|
||||
tracesResponse = generateTracesResponse,
|
||||
zkStateTraces = type2StateResponse
|
||||
)
|
||||
)
|
||||
assertThat(requestDto.blocksData[1]).isEqualTo(
|
||||
RlpBridgeLogsData(
|
||||
rlp = "124".toByteArray().encodeHex(),
|
||||
bridgeLogs = listOf(CommonTestData.bridgeLogs[1])
|
||||
)
|
||||
)
|
||||
).get()
|
||||
|
||||
assertThat(batch.startBlockNumber).isEqualTo(123UL)
|
||||
assertThat(batch.endBlockNumber).isEqualTo(124UL)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
package net.consensys.zkevm.domain
|
||||
|
||||
object CommonTestData {
|
||||
val bridgeLogs = listOf(
|
||||
BridgeLogsData(
|
||||
false,
|
||||
"0x64",
|
||||
"0x3c",
|
||||
"0xc2375a8e64bf9c3f4c2b1700735f6042a37e44d0c3df8c1735daeaa684197874",
|
||||
"0xb042425dedd752d905e97706ff4778d92fa6e3971017392380d8bef0153d0603",
|
||||
"0xb663a",
|
||||
"0x91ba8a14d2cc851abb69212c09f59e06e1e7f0a5",
|
||||
"0x0000000000000000000000000000000000000000000000008ac7230489" +
|
||||
"e800000000000000000000000000007ef911f8ef130f73d166468c0068753932357b17",
|
||||
listOf(
|
||||
"0x1cedfb451d4da3a63f72a945ba92f51b8fd558a5be4652404d037bb1578ff582",
|
||||
"0x0000000000000000000000000000000000000000000000000000000000040648"
|
||||
)
|
||||
),
|
||||
BridgeLogsData(
|
||||
false,
|
||||
"0x177",
|
||||
"0x8d",
|
||||
"0x4796d8934c88a1faa97aa6cf45a22aff130a51d0b8aeb0fbf5d7adb9985ea298",
|
||||
"0xb042425dedd752d905e97706ff4778d92fa6e3971017392380d8bef0153d0603",
|
||||
"0xb663a",
|
||||
"0x91ba8a14d2cc851abb69212c09f59e06e1e7f0a5",
|
||||
"0x",
|
||||
listOf(
|
||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x000000000000000000000000f0bed1cff809d9314809a6905ad3d6efd7a062e7",
|
||||
"0x0000000000000000000000000000000000000000000000000000000000070341"
|
||||
)
|
||||
),
|
||||
BridgeLogsData(
|
||||
false,
|
||||
"0x1",
|
||||
"0x0",
|
||||
"0x204ff9d95cecc20de8950bd2be922d8f255281858add0245b1d1e3ab129424be",
|
||||
"0x4b74e51c999babd9214f2dc347f588d7c73705f6d23d66130d8622d2f9b35525",
|
||||
"0xb663c",
|
||||
"0x508ca82df566dcd1b0de8296e70a96332cd644ec",
|
||||
"0x",
|
||||
listOf(
|
||||
"0x99b65a4301b38c09fb6a5f27052d73e8372bbe8f6779d678bfe8a41b66cce7ac",
|
||||
"0x00000000000000000000000000000000000000000000000000000000000b18f7",
|
||||
"0x48bd01fc5d686725f7c618c2a2e798da08b557c681dce782ef15c3e81c2da737"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
@@ -124,4 +124,3 @@ services:
|
||||
- loki
|
||||
networks:
|
||||
- linea
|
||||
|
||||
|
||||
@@ -78,9 +78,9 @@ services:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
PRAGUE=${PRAGUE} bash /generate-genesis.sh
|
||||
PRAGUE=${PRAGUE:-} bash /generate-genesis.sh
|
||||
/usr/local/bin/eth2-testnet-genesis deneb \
|
||||
--config /config/network-config.yml \${L1_GENESIS_TIME:+--timestamp ${L1_GENESIS_TIME} \}
|
||||
--config /config/network-config.yml \${L1_GENESIS_TIME:+--timestamp ${L1_GENESIS_TIME:-} \}
|
||||
--mnemonics /config/mnemonics.yaml \
|
||||
--tranches-dir /data/l1-node-config/tranches \
|
||||
--state-output /data/l1-node-config/genesis.ssz \
|
||||
|
||||
@@ -76,3 +76,10 @@ interface MetricsFacade {
|
||||
tagValueExtractor: Function<T, String>
|
||||
): TimerCapture<T>
|
||||
}
|
||||
|
||||
class FakeHistogram : Histogram {
|
||||
val records = mutableListOf<Double>()
|
||||
override fun record(data: Double) {
|
||||
records.add(data)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user