mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-12 07:28:08 -05:00
Compare commits
14 Commits
v4.1.16
...
chunk-info
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
628df733ee | ||
|
|
938f05064c | ||
|
|
28fa44d3b8 | ||
|
|
6b7ca50599 | ||
|
|
c5b80937ce | ||
|
|
ea3e08ab2a | ||
|
|
0360f44ff6 | ||
|
|
1b57982368 | ||
|
|
b09c2bbecb | ||
|
|
7d2a516be1 | ||
|
|
ee55fe3d51 | ||
|
|
09d7764dcb | ||
|
|
4cd199b3b3 | ||
|
|
ced64e8563 |
@@ -374,15 +374,6 @@ func (r *Layer2Relayer) ProcessPendingBatches() {
|
||||
|
||||
// send transaction
|
||||
txID := batch.Hash + "-commit"
|
||||
log.Info(
|
||||
"Sending commitBatch",
|
||||
"batch.Index", batch.Index,
|
||||
"batch.Hash", batch.Hash,
|
||||
"RollupContractAddress", r.cfg.RollupContractAddress,
|
||||
"calldata", common.Bytes2Hex(calldata),
|
||||
"parentBatch.BatchHeader", common.Bytes2Hex(parentBatch.BatchHeader),
|
||||
"batch.BatchHeader", common.Bytes2Hex(batch.BatchHeader),
|
||||
)
|
||||
txHash, err := r.rollupSender.SendTransaction(txID, &r.cfg.RollupContractAddress, big.NewInt(0), calldata, 0)
|
||||
if err != nil {
|
||||
if !errors.Is(err, sender.ErrNoAvailableAccount) && !errors.Is(err, sender.ErrFullPending) {
|
||||
@@ -433,7 +424,6 @@ func (r *Layer2Relayer) ProcessCommittedBatches() {
|
||||
return
|
||||
case types.ProvingTaskVerified:
|
||||
log.Info("Start to roll up zk proof", "hash", hash)
|
||||
success := false
|
||||
|
||||
var parentBatchStateRoot string
|
||||
if batch.Index > 0 {
|
||||
@@ -447,24 +437,14 @@ func (r *Layer2Relayer) ProcessCommittedBatches() {
|
||||
parentBatchStateRoot = parentBatch.StateRoot
|
||||
}
|
||||
|
||||
defer func() {
|
||||
// TODO: need to revisit this and have a more fine-grained error handling
|
||||
if !success {
|
||||
log.Info("Failed to upload the proof, change rollup status to RollupFinalizeFailed", "hash", hash)
|
||||
if err = r.batchOrm.UpdateRollupStatus(r.ctx, hash, types.RollupFinalizeFailed); err != nil {
|
||||
log.Warn("UpdateRollupStatus failed", "hash", hash, "err", err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
aggProof, err := r.batchOrm.GetVerifiedProofByHash(r.ctx, hash)
|
||||
if err != nil {
|
||||
log.Warn("get verified proof by hash failed", "hash", hash, "err", err)
|
||||
log.Error("get verified proof by hash failed", "hash", hash, "err", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err = aggProof.SanityCheck(); err != nil {
|
||||
log.Warn("agg_proof sanity check fails", "hash", hash, "error", err)
|
||||
log.Error("agg_proof sanity check fails", "hash", hash, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -487,8 +467,18 @@ func (r *Layer2Relayer) ProcessCommittedBatches() {
|
||||
finalizeTxHash := &txHash
|
||||
if err != nil {
|
||||
if !errors.Is(err, sender.ErrNoAvailableAccount) && !errors.Is(err, sender.ErrFullPending) {
|
||||
log.Error("finalizeBatchWithProof in layer1 failed",
|
||||
"index", batch.Index, "hash", batch.Hash, "err", err)
|
||||
// This can happen normally if we try to finalize 2 or more
|
||||
// batches around the same time. The 2nd tx might fail since
|
||||
// the client does not see the 1st tx's updates at this point.
|
||||
// TODO: add more fine-grained error handling
|
||||
log.Error(
|
||||
"finalizeBatchWithProof in layer1 failed",
|
||||
"index", batch.Index,
|
||||
"hash", batch.Hash,
|
||||
"RollupContractAddress", r.cfg.RollupContractAddress,
|
||||
"calldata", common.Bytes2Hex(data),
|
||||
"err", err,
|
||||
)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -498,11 +488,10 @@ func (r *Layer2Relayer) ProcessCommittedBatches() {
|
||||
// record and sync with db, @todo handle db error
|
||||
err = r.batchOrm.UpdateFinalizeTxHashAndRollupStatus(r.ctx, hash, finalizeTxHash.String(), types.RollupFinalizing)
|
||||
if err != nil {
|
||||
log.Warn("UpdateFinalizeTxHashAndRollupStatus failed",
|
||||
log.Error("UpdateFinalizeTxHashAndRollupStatus failed",
|
||||
"index", batch.Index, "batch hash", batch.Hash,
|
||||
"tx hash", finalizeTxHash.String(), "err", err)
|
||||
}
|
||||
success = true
|
||||
r.processingFinalization.Store(txID, hash)
|
||||
|
||||
case types.ProvingTaskFailed:
|
||||
|
||||
@@ -90,10 +90,9 @@ func testL2RelayerProcessCommittedBatches(t *testing.T) {
|
||||
statuses, err := batchOrm.GetRollupStatusByHashList(context.Background(), []string{batch.Hash})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(statuses))
|
||||
assert.Equal(t, types.RollupFinalizeFailed, statuses[0])
|
||||
// no valid proof, rollup status remains the same
|
||||
assert.Equal(t, types.RollupCommitted, statuses[0])
|
||||
|
||||
err = batchOrm.UpdateRollupStatus(context.Background(), batch.Hash, types.RollupCommitted)
|
||||
assert.NoError(t, err)
|
||||
proof := &message.BatchProof{
|
||||
Proof: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
|
||||
}
|
||||
|
||||
24
common/libzkp/impl/Cargo.lock
generated
24
common/libzkp/impl/Cargo.lock
generated
@@ -32,7 +32,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "aggregator"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"ark-std",
|
||||
"env_logger 0.10.0",
|
||||
@@ -432,7 +432,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
|
||||
[[package]]
|
||||
name = "bus-mapping"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"ethers-core",
|
||||
@@ -1048,7 +1048,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "eth-types"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"ethers-core",
|
||||
"ethers-signers",
|
||||
@@ -1225,7 +1225,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "external-tracer"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"geth-utils",
|
||||
@@ -1438,7 +1438,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gadgets"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"digest 0.7.6",
|
||||
"eth-types",
|
||||
@@ -1478,7 +1478,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "geth-utils"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"env_logger 0.9.3",
|
||||
"gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)",
|
||||
@@ -2076,7 +2076,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "keccak256"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"env_logger 0.9.3",
|
||||
"eth-types",
|
||||
@@ -2263,7 +2263,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mock"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"ethers-core",
|
||||
@@ -2278,7 +2278,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpt-zktrie"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"bus-mapping",
|
||||
"eth-types",
|
||||
@@ -2754,7 +2754,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "prover"
|
||||
version = "0.4.0"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.3#337089ac40bac756d88b9ae30a3be1f82538b216"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.8#785bef96944a27bc2d0ddb41623fcb77de527824"
|
||||
dependencies = [
|
||||
"aggregator",
|
||||
"anyhow",
|
||||
@@ -4039,7 +4039,7 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
|
||||
[[package]]
|
||||
name = "types"
|
||||
version = "0.4.0"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.3#337089ac40bac756d88b9ae30a3be1f82538b216"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.8#785bef96944a27bc2d0ddb41623fcb77de527824"
|
||||
dependencies = [
|
||||
"base64 0.13.1",
|
||||
"blake2",
|
||||
@@ -4490,7 +4490,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
|
||||
[[package]]
|
||||
name = "zkevm-circuits"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"array-init",
|
||||
"bus-mapping",
|
||||
|
||||
@@ -20,8 +20,8 @@ maingate = { git = "https://github.com/scroll-tech/halo2wrong", branch = "halo2-
|
||||
halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = "0.3.1-derive-serde" }
|
||||
|
||||
[dependencies]
|
||||
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.3" }
|
||||
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.3" }
|
||||
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.8" }
|
||||
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.8" }
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
|
||||
|
||||
log = "0.4"
|
||||
|
||||
@@ -48,6 +48,9 @@ pub unsafe extern "C" fn gen_batch_proof(
|
||||
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs).unwrap();
|
||||
assert_eq!(chunk_hashes.len(), chunk_proofs.len());
|
||||
|
||||
log::error!("gupeng - rust - 1 - chunk_hashes = {chunk_hashes:#?}");
|
||||
log::error!("gupeng - rust - 1 - chunk_proofs = {chunk_proofs:?}");
|
||||
|
||||
let chunk_hashes_proofs = chunk_hashes
|
||||
.into_iter()
|
||||
.zip(chunk_proofs.into_iter())
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.1.16"
|
||||
var tag = "v4.1.22"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
@@ -22,6 +22,7 @@ contract InitializeL1BridgeContracts is Script {
|
||||
|
||||
uint256 CHAIN_ID_L2 = vm.envUint("CHAIN_ID_L2");
|
||||
uint256 MAX_L2_TX_IN_CHUNK = vm.envUint("MAX_L2_TX_IN_CHUNK");
|
||||
uint256 MAX_L1_MESSAGE_GAS_LIMIT = vm.envUint("MAX_L1_MESSAGE_GAS_LIMIT");
|
||||
address L1_ROLLUP_OPERATOR_ADDR = vm.envAddress("L1_ROLLUP_OPERATOR_ADDR");
|
||||
address L1_FEE_VAULT_ADDR = vm.envAddress("L1_FEE_VAULT_ADDR");
|
||||
address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR");
|
||||
@@ -84,7 +85,7 @@ contract InitializeL1BridgeContracts is Script {
|
||||
L1_SCROLL_CHAIN_PROXY_ADDR,
|
||||
L1_ENFORCED_TX_GATEWAY_PROXY_ADDR,
|
||||
L2_GAS_PRICE_ORACLE_PROXY_ADDR,
|
||||
10000000
|
||||
MAX_L1_MESSAGE_GAS_LIMIT
|
||||
);
|
||||
|
||||
// initialize L1ScrollMessenger
|
||||
|
||||
@@ -106,6 +106,17 @@ contract ScrollChainTest is DSTestPlus {
|
||||
hevm.expectRevert("invalid chunk length");
|
||||
rollup.commitBatch(0, batchHeader0, chunks, new bytes(0));
|
||||
|
||||
// num txs less than num L1 msgs, revert
|
||||
chunk0 = new bytes(1 + 60);
|
||||
bytes memory bitmap = new bytes(32);
|
||||
chunk0[0] = bytes1(uint8(1)); // one block in this chunk
|
||||
chunk0[58] = bytes1(uint8(1)); // numTransactions = 1
|
||||
chunk0[60] = bytes1(uint8(3)); // numL1Messages = 3
|
||||
bitmap[31] = bytes1(uint8(7));
|
||||
chunks[0] = chunk0;
|
||||
hevm.expectRevert("num txs less than num L1 msgs");
|
||||
rollup.commitBatch(0, batchHeader0, chunks, bitmap);
|
||||
|
||||
// incomplete l2 transaction data, revert
|
||||
chunk0 = new bytes(1 + 60 + 1);
|
||||
chunk0[0] = bytes1(uint8(1)); // one block in this chunk
|
||||
|
||||
@@ -64,7 +64,7 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
}
|
||||
|
||||
batchTask := batchTasks[0]
|
||||
log.Info("start batch proof generation session", "id", batchTask.Hash)
|
||||
log.Info("start batch proof generation session", "id", batchTask.Hash, "public key", publicKey, "prover name", proverName)
|
||||
|
||||
if !bp.checkAttemptsExceeded(batchTask.Hash, message.ProofTypeBatch) {
|
||||
return nil, fmt.Errorf("the batch task id:%s check attempts have reach the maximum", batchTask.Hash)
|
||||
|
||||
@@ -66,7 +66,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
|
||||
chunkTask := chunkTasks[0]
|
||||
|
||||
log.Info("start chunk generation session", "id", chunkTask.Hash)
|
||||
log.Info("start chunk generation session", "id", chunkTask.Hash, "public key", publicKey, "prover name", proverName)
|
||||
|
||||
if !cp.checkAttemptsExceeded(chunkTask.Hash, message.ProofTypeChunk) {
|
||||
return nil, fmt.Errorf("chunk proof hash id:%s check attempts have reach the maximum", chunkTask.Hash)
|
||||
|
||||
@@ -50,7 +50,7 @@ func (b *BaseProverTask) checkAttemptsExceeded(hash string, taskType message.Pro
|
||||
if len(proverTasks) >= int(b.cfg.ProverManager.SessionAttempts) {
|
||||
coordinatorSessionsTimeoutTotalCounter.Inc(1)
|
||||
|
||||
log.Warn("proof generation prover task %s ended because reach the max attempts", hash)
|
||||
log.Warn("proof generation prover task reach the max attempts", "hash", hash)
|
||||
|
||||
transErr := b.db.Transaction(func(tx *gorm.DB) error {
|
||||
switch message.ProofType(proverTasks[0].TaskType) {
|
||||
|
||||
@@ -294,6 +294,7 @@ func (o *Batch) UpdateUnassignedBatchReturning(ctx context.Context, limit int) (
|
||||
var batches []*Batch
|
||||
db = db.Model(&batches).Clauses(clause.Returning{})
|
||||
db = db.Where("index = (?)", subQueryDB)
|
||||
db = db.Where("proving_status = ?", types.ProvingTaskUnassigned)
|
||||
if err := db.Update("proving_status", types.ProvingTaskAssigned).Error; err != nil {
|
||||
return nil, fmt.Errorf("Batch.UpdateUnassignedBatchReturning error: %w", err)
|
||||
}
|
||||
|
||||
@@ -364,6 +364,7 @@ func (o *Chunk) UpdateUnassignedChunkReturning(ctx context.Context, height, limi
|
||||
var chunks []*Chunk
|
||||
db = db.Model(&chunks).Clauses(clause.Returning{})
|
||||
db = db.Where("index = (?)", subQueryDB)
|
||||
db = db.Where("proving_status = ?", types.ProvingTaskUnassigned)
|
||||
if err := db.Update("proving_status", types.ProvingTaskAssigned).Error; err != nil {
|
||||
return nil, fmt.Errorf("Chunk.UpdateUnassignedBatchReturning error: %w", err)
|
||||
}
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
.PHONY: lint docker clean prover mock-prover
|
||||
|
||||
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
||||
ZKEVM_VERSION=$(shell grep -m 1 "scroll-prover" ../common/libzkp/impl/Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ../common/libzkp/impl/Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
ZKEVM_VERSION=$(shell grep -m 1 "scroll-prover" ../common/libzkp/impl/Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ../common/libzkp/impl/Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
else
|
||||
ZKEVM_VERSION=$(shell grep -m 1 "scroll-prover" ../common/libzkp/impl/Cargo.lock | cut -d "\#" -f2 | cut -c-7)
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ../common/libzkp/impl/Cargo.lock | cut -d "\#" -f2 | cut -c-7)
|
||||
ZKEVM_VERSION=$(shell grep -m 1 "scroll-prover" ../common/libzkp/impl/Cargo.lock | cut -d "\#" -f2 | cut -c-7)
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ../common/libzkp/impl/Cargo.lock | cut -d "\#" -f2 | cut -c-7)
|
||||
endif
|
||||
|
||||
HALO2_GPU_VERSION=$(shell ./print_halo2gpu_version.sh | sed -n '2p')
|
||||
|
||||
ifeq (${HALO2_GPU_VERSION},)
|
||||
# use halo2_proofs with CPU
|
||||
ZK_VERSION=${ZKEVM_VERSION}-${HALO2_VERSION}
|
||||
ZK_VERSION=${ZKEVM_VERSION}-${HALO2_VERSION}
|
||||
else
|
||||
# use halo2_gpu
|
||||
ZK_VERSION=${ZKEVM_VERSION}-${HALO2_GPU_VERSION}
|
||||
ZK_VERSION=${ZKEVM_VERSION}-${HALO2_GPU_VERSION}
|
||||
endif
|
||||
|
||||
libzkp:
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
"github.com/scroll-tech/go-ethereum/core/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
scrollTypes "scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
|
||||
"scroll-tech/prover/config"
|
||||
@@ -21,68 +20,16 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
paramsPath = flag.String("params", "/assets/test_params", "params dir")
|
||||
proofDumpPath = flag.String("dump", "/assets/proof_data", "the path proofs dump to")
|
||||
tracePath1 = flag.String("trace1", "/assets/traces/1_transfer.json", "chunk trace 1")
|
||||
tracePath2 = flag.String("trace2", "/assets/traces/10_transfer.json", "chunk trace 2")
|
||||
paramsPath = flag.String("params", "/assets/test_params", "params dir")
|
||||
proofDumpPath = flag.String("dump", "/assets/proof_data", "the path proofs dump to")
|
||||
tracePath1 = flag.String("trace1", "/assets/traces/1_transfer.json", "chunk trace 1")
|
||||
tracePath2 = flag.String("trace2", "/assets/traces/10_transfer.json", "chunk trace 2")
|
||||
batchDetailPath = flag.String("batch_detail", "/assets/traces/full_proof_3.json", "batch detail")
|
||||
)
|
||||
|
||||
func TestFFI(t *testing.T) {
|
||||
as := assert.New(t)
|
||||
|
||||
chunkProverConfig := &config.ProverCoreConfig{
|
||||
DumpDir: *proofDumpPath,
|
||||
ParamsPath: *paramsPath,
|
||||
ProofType: message.ProofTypeChunk,
|
||||
}
|
||||
chunkProverCore, err := core.NewProverCore(chunkProverConfig)
|
||||
as.NoError(err)
|
||||
t.Log("Constructed chunk prover")
|
||||
|
||||
chunkTrace1 := readChunkTrace(*tracePath1, as)
|
||||
chunkTrace2 := readChunkTrace(*tracePath2, as)
|
||||
t.Log("Loaded chunk traces")
|
||||
|
||||
chunkInfo1, err := chunkProverCore.TracesToChunkInfo(chunkTrace1)
|
||||
as.NoError(err)
|
||||
chunkInfo2, err := chunkProverCore.TracesToChunkInfo(chunkTrace2)
|
||||
as.NoError(err)
|
||||
t.Log("Converted to chunk infos")
|
||||
|
||||
wrappedBlock1 := &scrollTypes.WrappedBlock{
|
||||
Header: chunkTrace1[0].Header,
|
||||
Transactions: chunkTrace1[0].Transactions,
|
||||
WithdrawRoot: chunkTrace1[0].WithdrawTrieRoot,
|
||||
}
|
||||
chunk1 := &scrollTypes.Chunk{Blocks: []*scrollTypes.WrappedBlock{wrappedBlock1}}
|
||||
chunkHash1, err := chunk1.Hash(0)
|
||||
as.NoError(err)
|
||||
as.Equal(chunkInfo1.PostStateRoot, wrappedBlock1.Header.Root)
|
||||
as.Equal(chunkInfo1.WithdrawRoot, wrappedBlock1.WithdrawRoot)
|
||||
as.Equal(chunkInfo1.DataHash, chunkHash1)
|
||||
t.Log("Successful to check chunk info 1")
|
||||
|
||||
wrappedBlock2 := &scrollTypes.WrappedBlock{
|
||||
Header: chunkTrace2[0].Header,
|
||||
Transactions: chunkTrace2[0].Transactions,
|
||||
WithdrawRoot: chunkTrace2[0].WithdrawTrieRoot,
|
||||
}
|
||||
chunk2 := &scrollTypes.Chunk{Blocks: []*scrollTypes.WrappedBlock{wrappedBlock2}}
|
||||
chunkHash2, err := chunk2.Hash(chunk1.NumL1Messages(0))
|
||||
as.NoError(err)
|
||||
as.Equal(chunkInfo2.PostStateRoot, wrappedBlock2.Header.Root)
|
||||
as.Equal(chunkInfo2.WithdrawRoot, wrappedBlock2.WithdrawRoot)
|
||||
as.Equal(chunkInfo2.DataHash, chunkHash2)
|
||||
t.Log("Successful to check chunk info 2")
|
||||
|
||||
chunkProof1, err := chunkProverCore.ProveChunk("chunk_proof1", chunkTrace1)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped chunk proof 1")
|
||||
|
||||
chunkProof2, err := chunkProverCore.ProveChunk("chunk_proof2", chunkTrace2)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped chunk proof 2")
|
||||
|
||||
batchProverConfig := &config.ProverCoreConfig{
|
||||
DumpDir: *proofDumpPath,
|
||||
ParamsPath: *paramsPath,
|
||||
@@ -91,13 +38,27 @@ func TestFFI(t *testing.T) {
|
||||
batchProverCore, err := core.NewProverCore(batchProverConfig)
|
||||
as.NoError(err)
|
||||
|
||||
chunkInfos := []*message.ChunkInfo{chunkInfo1, chunkInfo2}
|
||||
chunkProofs := []*message.ChunkProof{chunkProof1, chunkProof2}
|
||||
// gupeng
|
||||
batchDetail := readBatchDetail(*batchDetailPath, as)
|
||||
chunkInfos := batchDetail.ChunkInfos
|
||||
chunkProofs := batchDetail.ChunkProofs
|
||||
|
||||
_, err = batchProverCore.ProveBatch("batch_proof", chunkInfos, chunkProofs)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped batch proof")
|
||||
}
|
||||
|
||||
func readBatchDetail(filePat string, as *assert.Assertions) *message.BatchTaskDetail {
|
||||
f, err := os.Open(filePat)
|
||||
as.NoError(err)
|
||||
byt, err := io.ReadAll(f)
|
||||
as.NoError(err)
|
||||
|
||||
batchDetail := &message.BatchTaskDetail{}
|
||||
as.NoError(json.Unmarshal(byt, batchDetail))
|
||||
|
||||
return batchDetail
|
||||
}
|
||||
func readChunkTrace(filePat string, as *assert.Assertions) []*types.BlockTrace {
|
||||
f, err := os.Open(filePat)
|
||||
as.NoError(err)
|
||||
|
||||
Reference in New Issue
Block a user