Compare commits

..

1 Commits

Author SHA1 Message Date
colinlyguo
f0fc344303 update-rollup-verifier-deployment-script 2024-04-12 23:30:42 +08:00
6 changed files with 22 additions and 80 deletions

View File

@@ -92,10 +92,12 @@ contract DeployL1BridgeContracts is Script {
}
function deployMultipleVersionRollupVerifier() internal {
uint256[] memory _versions = new uint256[](1);
address[] memory _verifiers = new address[](1);
uint256[] memory _versions = new uint256[](2);
address[] memory _verifiers = new address[](2);
_versions[0] = 0;
_verifiers[0] = address(zkEvmVerifierV1);
_versions[1] = 1;
_verifiers[1] = address(zkEvmVerifierV1);
rollupVerifier = new MultipleVersionRollupVerifier(L1_SCROLL_CHAIN_PROXY_ADDR, _versions, _verifiers);
logAddress("L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR", address(rollupVerifier));

View File

@@ -42,7 +42,4 @@ type BatchProposerConfig struct {
MaxL1CommitCalldataSizePerBatch uint64 `json:"max_l1_commit_calldata_size_per_batch"`
BatchTimeoutSec uint64 `json:"batch_timeout_sec"`
GasCostIncreaseMultiplier float64 `json:"gas_cost_increase_multiplier"`
EnableTestEnvSamplingFeature bool `json:"enable_test_env_sampling_feature,omitempty"`
SamplingPercentage uint64 `json:"sampling_percentage,omitempty"`
}

View File

@@ -64,9 +64,6 @@ type RelayerConfig struct {
EnableTestEnvBypassFeatures bool `json:"enable_test_env_bypass_features"`
// The timeout in seconds for finalizing a batch without proof, only used when EnableTestEnvBypassFeatures is true.
FinalizeBatchWithoutProofTimeoutSec uint64 `json:"finalize_batch_without_proof_timeout_sec"`
EnableTestEnvSamplingFeature bool `json:"enable_test_env_sampling_feature,omitempty"`
SamplingPercentage uint64 `json:"sampling_percentage,omitempty"`
}
// GasOracleConfig The config for updating gas price oracle.
@@ -131,10 +128,6 @@ func (r *RelayerConfig) UnmarshalJSON(input []byte) error {
return fmt.Errorf("error converting and checking finalize sender private key: %w", err)
}
if r.SamplingPercentage == 0 {
r.SamplingPercentage = 100
}
return nil
}

View File

@@ -464,9 +464,8 @@ func (r *Layer2Relayer) ProcessCommittedBatches() {
case types.ProvingTaskVerified:
log.Info("Start to roll up zk proof", "hash", batch.Hash)
r.metrics.rollupL2RelayerProcessCommittedBatchesFinalizedTotal.Inc()
skipProof := r.cfg.EnableTestEnvSamplingFeature && ((batch.Index % 100) >= r.cfg.SamplingPercentage)
if err := r.finalizeBatch(batch, !skipProof); err != nil {
log.Error("Failed to finalize batch", "index", batch.Index, "hash", batch.Hash, "withProof", !skipProof, "err", err)
if err := r.finalizeBatch(batch, true); err != nil {
log.Error("Failed to finalize batch with proof", "index", batch.Index, "hash", batch.Hash, "err", err)
}
case types.ProvingTaskFailed:
@@ -587,22 +586,22 @@ func (r *Layer2Relayer) finalizeBatch(dbBatch *orm.Batch, withProof bool) error
return err
}
// // Updating the proving status when finalizing without proof, thus the coordinator could omit this task.
// // it isn't a necessary step, so don't put in a transaction with UpdateFinalizeTxHashAndRollupStatus
// if !withProof {
// txErr := r.db.Transaction(func(tx *gorm.DB) error {
// if updateErr := r.batchOrm.UpdateProvingStatus(r.ctx, dbBatch.Hash, types.ProvingTaskVerified); updateErr != nil {
// return updateErr
// }
// if updateErr := r.chunkOrm.UpdateProvingStatusByBatchHash(r.ctx, dbBatch.Hash, types.ProvingTaskVerified); updateErr != nil {
// return updateErr
// }
// return nil
// })
// if txErr != nil {
// log.Error("Updating chunk and batch proving status when finalizing without proof failure", "batchHash", dbBatch.Hash, "err", txErr)
// }
// }
// Updating the proving status when finalizing without proof, thus the coordinator could omit this task.
// it isn't a necessary step, so don't put in a transaction with UpdateFinalizeTxHashAndRollupStatus
if !withProof {
txErr := r.db.Transaction(func(tx *gorm.DB) error {
if updateErr := r.batchOrm.UpdateProvingStatus(r.ctx, dbBatch.Hash, types.ProvingTaskVerified); updateErr != nil {
return updateErr
}
if updateErr := r.chunkOrm.UpdateProvingStatusByBatchHash(r.ctx, dbBatch.Hash, types.ProvingTaskVerified); updateErr != nil {
return updateErr
}
return nil
})
if txErr != nil {
log.Error("Updating chunk and batch proving status when finalizing without proof failure", "batchHash", dbBatch.Hash, "err", txErr)
}
}
r.metrics.rollupL2RelayerProcessCommittedBatchesFinalizedSuccessTotal.Inc()
return nil

View File

@@ -14,7 +14,6 @@ import (
"gorm.io/gorm"
"scroll-tech/common/forks"
"scroll-tech/common/types"
"scroll-tech/common/types/encoding"
"scroll-tech/rollup/internal/config"
@@ -38,7 +37,6 @@ type BatchProposer struct {
gasCostIncreaseMultiplier float64
forkMap map[uint64]bool
cfg *config.BatchProposerConfig
chainCfg *params.ChainConfig
batchProposerCircleTotal prometheus.Counter
@@ -76,7 +74,6 @@ func NewBatchProposer(ctx context.Context, cfg *config.BatchProposerConfig, chai
batchTimeoutSec: cfg.BatchTimeoutSec,
gasCostIncreaseMultiplier: cfg.GasCostIncreaseMultiplier,
forkMap: forkMap,
cfg: cfg,
chainCfg: chainCfg,
batchProposerCircleTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
@@ -147,27 +144,6 @@ func (p *BatchProposer) updateDBBatchInfo(batch *encoding.Batch, codecVersion en
log.Warn("BatchProposer.UpdateBatchHashInRange update the chunk's batch hash failure", "hash", batch.Hash, "error", dbErr)
return dbErr
}
skipProof := false
if p.cfg.EnableTestEnvSamplingFeature && ((batch.Index % 100) >= p.cfg.SamplingPercentage) {
skipProof = true
}
if skipProof {
dbErr = p.batchOrm.UpdateProvingStatus(p.ctx, batch.Hash, types.ProvingTaskVerified, dbTX)
if dbErr != nil {
log.Warn("BatchProposer.updateBatchInfoInDB update batch proving_status failure",
"batch hash", batch.Hash, "error", dbErr)
return dbErr
}
dbErr = p.chunkOrm.UpdateProvingStatusInRange(p.ctx, batch.StartChunkIndex, batch.EndChunkIndex, types.ProvingTaskVerified, dbTX)
if dbErr != nil {
log.Warn("BatchProposer.updateBatchInfoInDB update chunk proving_status failure",
"start chunk index", batch.StartChunkIndex, "end chunk index", batch.EndChunkIndex,
"batch hash", batch.Hash, "error", dbErr)
return dbErr
}
}
return nil
})
if err != nil {

View File

@@ -306,28 +306,3 @@ func (o *Chunk) UpdateBatchHashInRange(ctx context.Context, startIndex uint64, e
}
return nil
}
func (o *Chunk) UpdateProvingStatusInRange(ctx context.Context, startIndex uint64, endIndex uint64, status types.ProvingStatus, dbTX ...*gorm.DB) error {
db := o.db
if len(dbTX) > 0 && dbTX[0] != nil {
db = dbTX[0]
}
db = db.WithContext(ctx)
db = db.Model(&Chunk{})
db = db.Where("index >= ? AND index <= ?", startIndex, endIndex)
updateFields := make(map[string]interface{})
updateFields["proving_status"] = int(status)
switch status {
case types.ProvingTaskAssigned:
updateFields["prover_assigned_at"] = time.Now()
case types.ProvingTaskUnassigned:
updateFields["prover_assigned_at"] = nil
case types.ProvingTaskVerified:
updateFields["proved_at"] = time.Now()
}
if err := db.Updates(updateFields).Error; err != nil {
return fmt.Errorf("Chunk.UpdateProvingStatusInRange error: %w, start index: %v, end index: %v, status: %v", err, startIndex, endIndex, status.String())
}
return nil
}