mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-12 15:38:18 -05:00
Compare commits
6 Commits
v4.1.21
...
chunk-info
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
628df733ee | ||
|
|
938f05064c | ||
|
|
28fa44d3b8 | ||
|
|
6b7ca50599 | ||
|
|
c5b80937ce | ||
|
|
ea3e08ab2a |
@@ -48,6 +48,9 @@ pub unsafe extern "C" fn gen_batch_proof(
|
||||
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs).unwrap();
|
||||
assert_eq!(chunk_hashes.len(), chunk_proofs.len());
|
||||
|
||||
log::error!("gupeng - rust - 1 - chunk_hashes = {chunk_hashes:#?}");
|
||||
log::error!("gupeng - rust - 1 - chunk_proofs = {chunk_proofs:?}");
|
||||
|
||||
let chunk_hashes_proofs = chunk_hashes
|
||||
.into_iter()
|
||||
.zip(chunk_proofs.into_iter())
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.1.21"
|
||||
var tag = "v4.1.22"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
@@ -22,6 +22,7 @@ contract InitializeL1BridgeContracts is Script {
|
||||
|
||||
uint256 CHAIN_ID_L2 = vm.envUint("CHAIN_ID_L2");
|
||||
uint256 MAX_L2_TX_IN_CHUNK = vm.envUint("MAX_L2_TX_IN_CHUNK");
|
||||
uint256 MAX_L1_MESSAGE_GAS_LIMIT = vm.envUint("MAX_L1_MESSAGE_GAS_LIMIT");
|
||||
address L1_ROLLUP_OPERATOR_ADDR = vm.envAddress("L1_ROLLUP_OPERATOR_ADDR");
|
||||
address L1_FEE_VAULT_ADDR = vm.envAddress("L1_FEE_VAULT_ADDR");
|
||||
address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR");
|
||||
@@ -84,7 +85,7 @@ contract InitializeL1BridgeContracts is Script {
|
||||
L1_SCROLL_CHAIN_PROXY_ADDR,
|
||||
L1_ENFORCED_TX_GATEWAY_PROXY_ADDR,
|
||||
L2_GAS_PRICE_ORACLE_PROXY_ADDR,
|
||||
10000000
|
||||
MAX_L1_MESSAGE_GAS_LIMIT
|
||||
);
|
||||
|
||||
// initialize L1ScrollMessenger
|
||||
|
||||
@@ -64,7 +64,7 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
}
|
||||
|
||||
batchTask := batchTasks[0]
|
||||
log.Info("start batch proof generation session", "id", batchTask.Hash)
|
||||
log.Info("start batch proof generation session", "id", batchTask.Hash, "public key", publicKey, "prover name", proverName)
|
||||
|
||||
if !bp.checkAttemptsExceeded(batchTask.Hash, message.ProofTypeBatch) {
|
||||
return nil, fmt.Errorf("the batch task id:%s check attempts have reach the maximum", batchTask.Hash)
|
||||
|
||||
@@ -66,7 +66,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
|
||||
chunkTask := chunkTasks[0]
|
||||
|
||||
log.Info("start chunk generation session", "id", chunkTask.Hash)
|
||||
log.Info("start chunk generation session", "id", chunkTask.Hash, "public key", publicKey, "prover name", proverName)
|
||||
|
||||
if !cp.checkAttemptsExceeded(chunkTask.Hash, message.ProofTypeChunk) {
|
||||
return nil, fmt.Errorf("chunk proof hash id:%s check attempts have reach the maximum", chunkTask.Hash)
|
||||
|
||||
@@ -50,7 +50,7 @@ func (b *BaseProverTask) checkAttemptsExceeded(hash string, taskType message.Pro
|
||||
if len(proverTasks) >= int(b.cfg.ProverManager.SessionAttempts) {
|
||||
coordinatorSessionsTimeoutTotalCounter.Inc(1)
|
||||
|
||||
log.Warn("proof generation prover task %s ended because reach the max attempts", hash)
|
||||
log.Warn("proof generation prover task reach the max attempts", "hash", hash)
|
||||
|
||||
transErr := b.db.Transaction(func(tx *gorm.DB) error {
|
||||
switch message.ProofType(proverTasks[0].TaskType) {
|
||||
|
||||
@@ -294,6 +294,7 @@ func (o *Batch) UpdateUnassignedBatchReturning(ctx context.Context, limit int) (
|
||||
var batches []*Batch
|
||||
db = db.Model(&batches).Clauses(clause.Returning{})
|
||||
db = db.Where("index = (?)", subQueryDB)
|
||||
db = db.Where("proving_status = ?", types.ProvingTaskUnassigned)
|
||||
if err := db.Update("proving_status", types.ProvingTaskAssigned).Error; err != nil {
|
||||
return nil, fmt.Errorf("Batch.UpdateUnassignedBatchReturning error: %w", err)
|
||||
}
|
||||
|
||||
@@ -364,6 +364,7 @@ func (o *Chunk) UpdateUnassignedChunkReturning(ctx context.Context, height, limi
|
||||
var chunks []*Chunk
|
||||
db = db.Model(&chunks).Clauses(clause.Returning{})
|
||||
db = db.Where("index = (?)", subQueryDB)
|
||||
db = db.Where("proving_status = ?", types.ProvingTaskUnassigned)
|
||||
if err := db.Update("proving_status", types.ProvingTaskAssigned).Error; err != nil {
|
||||
return nil, fmt.Errorf("Chunk.UpdateUnassignedBatchReturning error: %w", err)
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
"github.com/scroll-tech/go-ethereum/core/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
scrollTypes "scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
|
||||
"scroll-tech/prover/config"
|
||||
@@ -21,68 +20,16 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
paramsPath = flag.String("params", "/assets/test_params", "params dir")
|
||||
proofDumpPath = flag.String("dump", "/assets/proof_data", "the path proofs dump to")
|
||||
tracePath1 = flag.String("trace1", "/assets/traces/1_transfer.json", "chunk trace 1")
|
||||
tracePath2 = flag.String("trace2", "/assets/traces/10_transfer.json", "chunk trace 2")
|
||||
paramsPath = flag.String("params", "/assets/test_params", "params dir")
|
||||
proofDumpPath = flag.String("dump", "/assets/proof_data", "the path proofs dump to")
|
||||
tracePath1 = flag.String("trace1", "/assets/traces/1_transfer.json", "chunk trace 1")
|
||||
tracePath2 = flag.String("trace2", "/assets/traces/10_transfer.json", "chunk trace 2")
|
||||
batchDetailPath = flag.String("batch_detail", "/assets/traces/full_proof_3.json", "batch detail")
|
||||
)
|
||||
|
||||
func TestFFI(t *testing.T) {
|
||||
as := assert.New(t)
|
||||
|
||||
chunkProverConfig := &config.ProverCoreConfig{
|
||||
DumpDir: *proofDumpPath,
|
||||
ParamsPath: *paramsPath,
|
||||
ProofType: message.ProofTypeChunk,
|
||||
}
|
||||
chunkProverCore, err := core.NewProverCore(chunkProverConfig)
|
||||
as.NoError(err)
|
||||
t.Log("Constructed chunk prover")
|
||||
|
||||
chunkTrace1 := readChunkTrace(*tracePath1, as)
|
||||
chunkTrace2 := readChunkTrace(*tracePath2, as)
|
||||
t.Log("Loaded chunk traces")
|
||||
|
||||
chunkInfo1, err := chunkProverCore.TracesToChunkInfo(chunkTrace1)
|
||||
as.NoError(err)
|
||||
chunkInfo2, err := chunkProverCore.TracesToChunkInfo(chunkTrace2)
|
||||
as.NoError(err)
|
||||
t.Log("Converted to chunk infos")
|
||||
|
||||
wrappedBlock1 := &scrollTypes.WrappedBlock{
|
||||
Header: chunkTrace1[0].Header,
|
||||
Transactions: chunkTrace1[0].Transactions,
|
||||
WithdrawRoot: chunkTrace1[0].WithdrawTrieRoot,
|
||||
}
|
||||
chunk1 := &scrollTypes.Chunk{Blocks: []*scrollTypes.WrappedBlock{wrappedBlock1}}
|
||||
chunkHash1, err := chunk1.Hash(0)
|
||||
as.NoError(err)
|
||||
as.Equal(chunkInfo1.PostStateRoot, wrappedBlock1.Header.Root)
|
||||
as.Equal(chunkInfo1.WithdrawRoot, wrappedBlock1.WithdrawRoot)
|
||||
as.Equal(chunkInfo1.DataHash, chunkHash1)
|
||||
t.Log("Successful to check chunk info 1")
|
||||
|
||||
wrappedBlock2 := &scrollTypes.WrappedBlock{
|
||||
Header: chunkTrace2[0].Header,
|
||||
Transactions: chunkTrace2[0].Transactions,
|
||||
WithdrawRoot: chunkTrace2[0].WithdrawTrieRoot,
|
||||
}
|
||||
chunk2 := &scrollTypes.Chunk{Blocks: []*scrollTypes.WrappedBlock{wrappedBlock2}}
|
||||
chunkHash2, err := chunk2.Hash(chunk1.NumL1Messages(0))
|
||||
as.NoError(err)
|
||||
as.Equal(chunkInfo2.PostStateRoot, wrappedBlock2.Header.Root)
|
||||
as.Equal(chunkInfo2.WithdrawRoot, wrappedBlock2.WithdrawRoot)
|
||||
as.Equal(chunkInfo2.DataHash, chunkHash2)
|
||||
t.Log("Successful to check chunk info 2")
|
||||
|
||||
chunkProof1, err := chunkProverCore.ProveChunk("chunk_proof1", chunkTrace1)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped chunk proof 1")
|
||||
|
||||
chunkProof2, err := chunkProverCore.ProveChunk("chunk_proof2", chunkTrace2)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped chunk proof 2")
|
||||
|
||||
batchProverConfig := &config.ProverCoreConfig{
|
||||
DumpDir: *proofDumpPath,
|
||||
ParamsPath: *paramsPath,
|
||||
@@ -91,13 +38,27 @@ func TestFFI(t *testing.T) {
|
||||
batchProverCore, err := core.NewProverCore(batchProverConfig)
|
||||
as.NoError(err)
|
||||
|
||||
chunkInfos := []*message.ChunkInfo{chunkInfo1, chunkInfo2}
|
||||
chunkProofs := []*message.ChunkProof{chunkProof1, chunkProof2}
|
||||
// gupeng
|
||||
batchDetail := readBatchDetail(*batchDetailPath, as)
|
||||
chunkInfos := batchDetail.ChunkInfos
|
||||
chunkProofs := batchDetail.ChunkProofs
|
||||
|
||||
_, err = batchProverCore.ProveBatch("batch_proof", chunkInfos, chunkProofs)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped batch proof")
|
||||
}
|
||||
|
||||
func readBatchDetail(filePat string, as *assert.Assertions) *message.BatchTaskDetail {
|
||||
f, err := os.Open(filePat)
|
||||
as.NoError(err)
|
||||
byt, err := io.ReadAll(f)
|
||||
as.NoError(err)
|
||||
|
||||
batchDetail := &message.BatchTaskDetail{}
|
||||
as.NoError(json.Unmarshal(byt, batchDetail))
|
||||
|
||||
return batchDetail
|
||||
}
|
||||
func readChunkTrace(filePat string, as *assert.Assertions) []*types.BlockTrace {
|
||||
f, err := os.Open(filePat)
|
||||
as.NoError(err)
|
||||
|
||||
Reference in New Issue
Block a user