Compare commits

..

13 Commits

Author SHA1 Message Date
Max Wolff
487f4f2af4 wip cli 2023-06-27 21:42:24 -07:00
Max Wolff
0a57747085 add wip cli 2023-06-27 21:36:44 -07:00
Max Wolff
e59a1d4fba demo 2023-06-19 00:40:26 -07:00
Max Wolff
17bbb929b7 debug v param. sigs now work 2023-06-16 02:04:58 -07:00
Max Wolff
c95e0c1782 wip 2023-06-13 10:13:55 -07:00
Max Wolff
091da32936 add deployment script 2023-06-02 16:11:27 -07:00
Max Wolff
6155612eec wip 2023-05-31 00:33:49 -07:00
Xi Lin
d743f2ce96 feat(contracts): add fallback contract to deployed in L2 (#522)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-05-30 08:31:12 +08:00
maskpp
798179ee6d fix(mock): fix mocked proof to pass SanityCheck (#524) 2023-05-30 08:23:57 +08:00
maskpp
b706cb69d3 fix(db): return same amount of status as hashes for GetRollupStatusByHashList (#523) 2023-05-29 15:46:01 +08:00
Xi Lin
aa24cdd1db feat(contracts): add chain id in public input hash (#517)
Co-authored-by: Haichen Shen <shenhaichen@gmail.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-05-28 09:41:38 +08:00
HAOYUatHZ
4398a36ee2 feat(db): update block_batch's proof (#520) 2023-05-27 15:53:38 +08:00
HAOYUatHZ
9a27499c03 feat(db): rename created_time to created_at & updated_time to `… (#502)
Co-authored-by: maskpp <maskpp266@gmail.com>
2023-05-27 06:25:19 +08:00
49 changed files with 1318 additions and 190 deletions

3
.gitmodules vendored
View File

@@ -13,3 +13,6 @@
[submodule "contracts/lib/solmate"]
path = contracts/lib/solmate
url = https://github.com/rari-capital/solmate
[submodule "contracts/lib/safe-contracts"]
path = contracts/lib/safe-contracts
url = https://github.com/safe-global/safe-contracts

View File

@@ -457,27 +457,20 @@ func (r *Layer2Relayer) ProcessCommittedBatches() {
}
}()
proofBuffer, icBuffer, err := r.blockBatchOrm.GetVerifiedProofAndInstanceCommitmentsByHash(hash)
aggProof, err := r.blockBatchOrm.GetVerifiedProofByHash(hash)
if err != nil {
log.Warn("fetch get proof by hash failed", "hash", hash, "err", err)
return
}
if proofBuffer == nil || icBuffer == nil {
log.Warn("proof or instance not ready", "hash", hash)
return
}
if len(proofBuffer)%32 != 0 {
log.Error("proof buffer has wrong length", "hash", hash, "length", len(proofBuffer))
return
}
if len(icBuffer)%32 != 0 {
log.Warn("instance buffer has wrong length", "hash", hash, "length", len(icBuffer))
log.Warn("get verified proof by hash failed", "hash", hash, "err", err)
return
}
proof := utils.BufferToUint256Le(proofBuffer)
instance := utils.BufferToUint256Le(icBuffer)
data, err := r.l1RollupABI.Pack("finalizeBatchWithProof", common.HexToHash(hash), proof, instance)
if err = aggProof.SanityCheck(); err != nil {
log.Warn("agg_proof sanity check fails", "hash", hash, "error", err)
return
}
proof := utils.BufferToUint256Le(aggProof.Proof)
finalPair := utils.BufferToUint256Le(aggProof.FinalPair)
data, err := r.l1RollupABI.Pack("finalizeBatchWithProof", common.HexToHash(hash), proof, finalPair)
if err != nil {
log.Error("Pack finalizeBatchWithProof failed", "err", err)
return

View File

@@ -18,6 +18,7 @@ import (
"gorm.io/gorm"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
"scroll-tech/common/utils"
"scroll-tech/bridge/internal/controller/sender"
@@ -156,9 +157,11 @@ func testL2RelayerProcessCommittedBatches(t *testing.T) {
err = blockBatchOrm.UpdateRollupStatus(context.Background(), batchHash, types.RollupCommitted)
assert.NoError(t, err)
tProof := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
tInstanceCommitments := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
err = blockBatchOrm.UpdateProofByHash(context.Background(), batchHash, tProof, tInstanceCommitments, 100)
proof := &message.AggProof{
Proof: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
FinalPair: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
}
err = blockBatchOrm.UpdateProofByHash(context.Background(), batchHash, proof, 100)
assert.NoError(t, err)
err = blockBatchOrm.UpdateProvingStatus(batchHash, types.ProvingTaskVerified)
assert.NoError(t, err)
@@ -199,9 +202,11 @@ func testL2RelayerSkipBatches(t *testing.T) {
err = blockBatchOrm.UpdateRollupStatus(context.Background(), batchHash, rollupStatus)
assert.NoError(t, err)
tProof := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
tInstanceCommitments := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
err = blockBatchOrm.UpdateProofByHash(context.Background(), batchHash, tProof, tInstanceCommitments, 100)
proof := &message.AggProof{
Proof: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
FinalPair: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
}
err = blockBatchOrm.UpdateProofByHash(context.Background(), batchHash, proof, 100)
assert.NoError(t, err)
err = blockBatchOrm.UpdateProvingStatus(batchHash, provingStatus)
assert.NoError(t, err)

View File

@@ -2,6 +2,7 @@ package orm
import (
"context"
"encoding/json"
"errors"
"time"
@@ -9,6 +10,7 @@ import (
"gorm.io/gorm"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
bridgeTypes "scroll-tech/bridge/internal/types"
)
@@ -17,31 +19,30 @@ import (
type BlockBatch struct {
db *gorm.DB `gorm:"column:-"`
Hash string `json:"hash" gorm:"column:hash"`
Index uint64 `json:"index" gorm:"column:index"`
StartBlockNumber uint64 `json:"start_block_number" gorm:"column:start_block_number"`
StartBlockHash string `json:"start_block_hash" gorm:"column:start_block_hash"`
EndBlockNumber uint64 `json:"end_block_number" gorm:"column:end_block_number"`
EndBlockHash string `json:"end_block_hash" gorm:"column:end_block_hash"`
ParentHash string `json:"parent_hash" gorm:"column:parent_hash"`
StateRoot string `json:"state_root" gorm:"column:state_root"`
TotalTxNum uint64 `json:"total_tx_num" gorm:"column:total_tx_num"`
TotalL1TxNum uint64 `json:"total_l1_tx_num" gorm:"column:total_l1_tx_num"`
TotalL2Gas uint64 `json:"total_l2_gas" gorm:"column:total_l2_gas"`
ProvingStatus int `json:"proving_status" gorm:"column:proving_status;default:1"`
Proof []byte `json:"proof" gorm:"column:proof"`
InstanceCommitments []byte `json:"instance_commitments" gorm:"column:instance_commitments"`
ProofTimeSec uint64 `json:"proof_time_sec" gorm:"column:proof_time_sec;default:0"`
RollupStatus int `json:"rollup_status" gorm:"column:rollup_status;default:1"`
CommitTxHash string `json:"commit_tx_hash" gorm:"column:commit_tx_hash;default:NULL"`
OracleStatus int `json:"oracle_status" gorm:"column:oracle_status;default:1"`
OracleTxHash string `json:"oracle_tx_hash" gorm:"column:oracle_tx_hash;default:NULL"`
FinalizeTxHash string `json:"finalize_tx_hash" gorm:"column:finalize_tx_hash;default:NULL"`
CreatedAt time.Time `json:"created_at" gorm:"column:created_at;default:CURRENT_TIMESTAMP()"`
ProverAssignedAt *time.Time `json:"prover_assigned_at" gorm:"column:prover_assigned_at;default:NULL"`
ProvedAt *time.Time `json:"proved_at" gorm:"column:proved_at;default:NULL"`
CommittedAt *time.Time `json:"committed_at" gorm:"column:committed_at;default:NULL"`
FinalizedAt *time.Time `json:"finalized_at" gorm:"column:finalized_at;default:NULL"`
Hash string `json:"hash" gorm:"column:hash"`
Index uint64 `json:"index" gorm:"column:index"`
StartBlockNumber uint64 `json:"start_block_number" gorm:"column:start_block_number"`
StartBlockHash string `json:"start_block_hash" gorm:"column:start_block_hash"`
EndBlockNumber uint64 `json:"end_block_number" gorm:"column:end_block_number"`
EndBlockHash string `json:"end_block_hash" gorm:"column:end_block_hash"`
ParentHash string `json:"parent_hash" gorm:"column:parent_hash"`
StateRoot string `json:"state_root" gorm:"column:state_root"`
TotalTxNum uint64 `json:"total_tx_num" gorm:"column:total_tx_num"`
TotalL1TxNum uint64 `json:"total_l1_tx_num" gorm:"column:total_l1_tx_num"`
TotalL2Gas uint64 `json:"total_l2_gas" gorm:"column:total_l2_gas"`
ProvingStatus int `json:"proving_status" gorm:"column:proving_status;default:1"`
Proof []byte `json:"proof" gorm:"column:proof"`
ProofTimeSec uint64 `json:"proof_time_sec" gorm:"column:proof_time_sec;default:0"`
RollupStatus int `json:"rollup_status" gorm:"column:rollup_status;default:1"`
CommitTxHash string `json:"commit_tx_hash" gorm:"column:commit_tx_hash;default:NULL"`
OracleStatus int `json:"oracle_status" gorm:"column:oracle_status;default:1"`
OracleTxHash string `json:"oracle_tx_hash" gorm:"column:oracle_tx_hash;default:NULL"`
FinalizeTxHash string `json:"finalize_tx_hash" gorm:"column:finalize_tx_hash;default:NULL"`
CreatedAt time.Time `json:"created_at" gorm:"column:created_at;default:CURRENT_TIMESTAMP()"`
ProverAssignedAt *time.Time `json:"prover_assigned_at" gorm:"column:prover_assigned_at;default:NULL"`
ProvedAt *time.Time `json:"proved_at" gorm:"column:proved_at;default:NULL"`
CommittedAt *time.Time `json:"committed_at" gorm:"column:committed_at;default:NULL"`
FinalizedAt *time.Time `json:"finalized_at" gorm:"column:finalized_at;default:NULL"`
}
// NewBlockBatch create an blockBatchOrm instance
@@ -100,14 +101,24 @@ func (o *BlockBatch) GetBlockBatchesHashByRollupStatus(status types.RollupStatus
return hashes, nil
}
// GetVerifiedProofAndInstanceCommitmentsByHash get verified proof and instance comments by hash
func (o *BlockBatch) GetVerifiedProofAndInstanceCommitmentsByHash(hash string) ([]byte, []byte, error) {
var blockBatch BlockBatch
err := o.db.Select("proof, instance_commitments").Where("hash", hash).Where("proving_status", int(types.ProvingTaskVerified)).Find(&blockBatch).Error
if err != nil {
return nil, nil, err
// GetVerifiedProofByHash get verified proof and instance comments by hash
func (o *BlockBatch) GetVerifiedProofByHash(hash string) (*message.AggProof, error) {
result := o.db.Model(&BlockBatch{}).Select("proof").Where("hash", hash).Where("proving_status", int(types.ProvingTaskVerified)).Row()
if result.Err() != nil {
return nil, result.Err()
}
return blockBatch.Proof, blockBatch.InstanceCommitments, nil
var proofBytes []byte
if err := result.Scan(&proofBytes); err != nil {
return nil, err
}
var proof message.AggProof
if err := json.Unmarshal(proofBytes, &proof); err != nil {
return nil, err
}
return &proof, nil
}
// GetLatestBatch get the latest batch
@@ -147,10 +158,17 @@ func (o *BlockBatch) GetRollupStatusByHashList(hashes []string) ([]types.RollupS
return nil, err
}
var statuses []types.RollupStatus
for _, v := range blockBatches {
statuses = append(statuses, types.RollupStatus(v.RollupStatus))
var (
statuses []types.RollupStatus
_statusMap = make(map[string]types.RollupStatus, len(hashes))
)
for _, _batch := range blockBatches {
_statusMap[_batch.Hash] = types.RollupStatus(_batch.RollupStatus)
}
for _, _hash := range hashes {
statuses = append(statuses, _statusMap[_hash])
}
return statuses, nil
}
@@ -279,12 +297,16 @@ func (o *BlockBatch) UpdateL2GasOracleStatusAndOracleTxHash(ctx context.Context,
// UpdateProofByHash update the block batch proof by hash
// for unit test
func (o *BlockBatch) UpdateProofByHash(ctx context.Context, hash string, proof, instanceCommitments []byte, proofTimeSec uint64) error {
func (o *BlockBatch) UpdateProofByHash(ctx context.Context, hash string, proof *message.AggProof, proofTimeSec uint64) error {
proofBytes, err := json.Marshal(proof)
if err != nil {
return err
}
updateFields := make(map[string]interface{})
updateFields["proof"] = proof
updateFields["instance_commitments"] = instanceCommitments
updateFields["proof"] = proofBytes
updateFields["proof_time_sec"] = proofTimeSec
err := o.db.WithContext(ctx).Model(&BlockBatch{}).Where("hash", hash).Updates(updateFields).Error
err = o.db.WithContext(ctx).Model(&BlockBatch{}).Where("hash", hash).Updates(updateFields).Error
if err != nil {
log.Error("failed to update proof", "err", err)
}

View File

@@ -13,8 +13,8 @@ create table l1_message
layer1_hash VARCHAR NOT NULL,
layer2_hash VARCHAR DEFAULT NULL,
status INTEGER DEFAULT 1,
created_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP
);
comment
@@ -32,7 +32,7 @@ create index l1_message_height_index
CREATE OR REPLACE FUNCTION update_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_time = CURRENT_TIMESTAMP;
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';

View File

@@ -13,8 +13,8 @@ create table l2_message
layer1_hash VARCHAR DEFAULT NULL,
proof TEXT DEFAULT NULL,
status INTEGER DEFAULT 1,
created_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP
);
comment
@@ -32,7 +32,7 @@ create index l2_message_height_index
CREATE OR REPLACE FUNCTION update_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_time = CURRENT_TIMESTAMP;
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';

View File

@@ -16,7 +16,6 @@ create table block_batch
total_l2_gas BIGINT NOT NULL,
proving_status INTEGER DEFAULT 1,
proof BYTEA DEFAULT NULL,
instance_commitments BYTEA DEFAULT NULL,
proof_time_sec INTEGER DEFAULT 0,
rollup_status INTEGER DEFAULT 1,
commit_tx_hash VARCHAR DEFAULT NULL,

View File

@@ -14,6 +14,7 @@ import (
"gorm.io/gorm"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
"scroll-tech/bridge/internal/controller/relayer"
"scroll-tech/bridge/internal/controller/watcher"
@@ -110,9 +111,11 @@ func testRelayL2MessageSucceed(t *testing.T) {
assert.NoError(t, err)
// add dummy proof
tProof := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
tInstanceCommitments := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
err = blockBatchOrm.UpdateProofByHash(context.Background(), batchHash, tProof, tInstanceCommitments, 100)
proof := &message.AggProof{
Proof: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
FinalPair: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
}
err = blockBatchOrm.UpdateProofByHash(context.Background(), batchHash, proof, 100)
assert.NoError(t, err)
err = blockBatchOrm.UpdateProvingStatus(batchHash, types.ProvingTaskVerified)
assert.NoError(t, err)

View File

@@ -13,6 +13,7 @@ import (
"gorm.io/gorm"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
"scroll-tech/bridge/internal/controller/relayer"
"scroll-tech/bridge/internal/controller/watcher"
@@ -117,9 +118,11 @@ func testCommitBatchAndFinalizeBatch(t *testing.T) {
assert.Equal(t, types.RollupCommitted, statuses[0])
// add dummy proof
tProof := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
tInstanceCommitments := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
err = blockBatchOrm.UpdateProofByHash(context.Background(), batchHash, tProof, tInstanceCommitments, 100)
proof := &message.AggProof{
Proof: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
FinalPair: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
}
err = blockBatchOrm.UpdateProofByHash(context.Background(), batchHash, proof, 100)
assert.NoError(t, err)
err = blockBatchOrm.UpdateProvingStatus(batchHash, types.ProvingTaskVerified)
assert.NoError(t, err)

View File

@@ -233,31 +233,30 @@ const (
// BlockBatch is structure of stored block_batch
type BlockBatch struct {
Hash string `json:"hash" db:"hash"`
Index uint64 `json:"index" db:"index"`
ParentHash string `json:"parent_hash" db:"parent_hash"`
StartBlockNumber uint64 `json:"start_block_number" db:"start_block_number"`
StartBlockHash string `json:"start_block_hash" db:"start_block_hash"`
EndBlockNumber uint64 `json:"end_block_number" db:"end_block_number"`
EndBlockHash string `json:"end_block_hash" db:"end_block_hash"`
StateRoot string `json:"state_root" db:"state_root"`
TotalTxNum uint64 `json:"total_tx_num" db:"total_tx_num"`
TotalL1TxNum uint64 `json:"total_l1_tx_num" db:"total_l1_tx_num"`
TotalL2Gas uint64 `json:"total_l2_gas" db:"total_l2_gas"`
ProvingStatus ProvingStatus `json:"proving_status" db:"proving_status"`
Proof []byte `json:"proof" db:"proof"`
InstanceCommitments []byte `json:"instance_commitments" db:"instance_commitments"`
ProofTimeSec uint64 `json:"proof_time_sec" db:"proof_time_sec"`
RollupStatus RollupStatus `json:"rollup_status" db:"rollup_status"`
OracleStatus GasOracleStatus `json:"oracle_status" db:"oracle_status"`
CommitTxHash sql.NullString `json:"commit_tx_hash" db:"commit_tx_hash"`
FinalizeTxHash sql.NullString `json:"finalize_tx_hash" db:"finalize_tx_hash"`
OracleTxHash sql.NullString `json:"oracle_tx_hash" db:"oracle_tx_hash"`
CreatedAt *time.Time `json:"created_at" db:"created_at"`
ProverAssignedAt *time.Time `json:"prover_assigned_at" db:"prover_assigned_at"`
ProvedAt *time.Time `json:"proved_at" db:"proved_at"`
CommittedAt *time.Time `json:"committed_at" db:"committed_at"`
FinalizedAt *time.Time `json:"finalized_at" db:"finalized_at"`
Hash string `json:"hash" db:"hash"`
Index uint64 `json:"index" db:"index"`
ParentHash string `json:"parent_hash" db:"parent_hash"`
StartBlockNumber uint64 `json:"start_block_number" db:"start_block_number"`
StartBlockHash string `json:"start_block_hash" db:"start_block_hash"`
EndBlockNumber uint64 `json:"end_block_number" db:"end_block_number"`
EndBlockHash string `json:"end_block_hash" db:"end_block_hash"`
StateRoot string `json:"state_root" db:"state_root"`
TotalTxNum uint64 `json:"total_tx_num" db:"total_tx_num"`
TotalL1TxNum uint64 `json:"total_l1_tx_num" db:"total_l1_tx_num"`
TotalL2Gas uint64 `json:"total_l2_gas" db:"total_l2_gas"`
ProvingStatus ProvingStatus `json:"proving_status" db:"proving_status"`
Proof []byte `json:"proof" db:"proof"`
ProofTimeSec uint64 `json:"proof_time_sec" db:"proof_time_sec"`
RollupStatus RollupStatus `json:"rollup_status" db:"rollup_status"`
OracleStatus GasOracleStatus `json:"oracle_status" db:"oracle_status"`
CommitTxHash sql.NullString `json:"commit_tx_hash" db:"commit_tx_hash"`
FinalizeTxHash sql.NullString `json:"finalize_tx_hash" db:"finalize_tx_hash"`
OracleTxHash sql.NullString `json:"oracle_tx_hash" db:"oracle_tx_hash"`
CreatedAt *time.Time `json:"created_at" db:"created_at"`
ProverAssignedAt *time.Time `json:"prover_assigned_at" db:"prover_assigned_at"`
ProvedAt *time.Time `json:"proved_at" db:"proved_at"`
CommittedAt *time.Time `json:"committed_at" db:"committed_at"`
FinalizedAt *time.Time `json:"finalized_at" db:"finalized_at"`
}
// AggTask is a wrapper type around db AggProveTask type.
@@ -269,6 +268,6 @@ type AggTask struct {
EndBatchHash string `json:"end_batch_hash" db:"end_batch_hash"`
ProvingStatus ProvingStatus `json:"proving_status" db:"proving_status"`
Proof []byte `json:"proof" db:"proof"`
CreatedTime *time.Time `json:"created_time" db:"created_time"`
UpdatedTime *time.Time `json:"updated_time" db:"updated_time"`
CreatedAt *time.Time `json:"created_at" db:"created_at"`
UpdatedAt *time.Time `json:"updated_at" db:"updated_at"`
}

View File

@@ -4,6 +4,8 @@ import (
"crypto/ecdsa"
"crypto/rand"
"encoding/hex"
"errors"
"fmt"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
@@ -205,7 +207,7 @@ type TaskMsg struct {
// For decentralization, basic rollers will get block hashes from the coordinator. So that they can refer to the block hashes and fetch traces locally. Only applicable for basic rollers.
BlockHashes []common.Hash `json:"block_hashes,omitempty"`
// Only applicable for aggregator rollers.
SubProofs [][]byte `json:"sub_proofs,omitempty"`
SubProofs []*AggProof `json:"sub_proofs,omitempty"`
}
// ProofDetail is the message received from rollers that contains zk proof, the status of
@@ -237,3 +239,26 @@ type AggProof struct {
Vk []byte `json:"vk"`
BlockCount uint `json:"block_count"`
}
// SanityCheck checks whether an AggProof is in a legal format
// TODO: change to check Proof&Instance when upgrading to snark verifier v0.4
func (ap *AggProof) SanityCheck() error {
if ap == nil {
return errors.New("agg_proof is nil")
}
if len(ap.Proof) == 0 {
return errors.New("proof not ready")
}
if len(ap.FinalPair) == 0 {
return errors.New("final_pair not ready")
}
if len(ap.Proof)%32 != 0 {
return fmt.Errorf("proof buffer has wrong length, expected: 32, got: %d", len(ap.Proof))
}
if len(ap.FinalPair)%32 != 0 {
return fmt.Errorf("final_pair buffer has wrong length, expected: 32, got: %d", len(ap.FinalPair))
}
return nil
}

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v3.1.2"
var tag = "v3.3.1"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

3
contracts/admin/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
dist

23
contracts/admin/README.md Normal file
View File

@@ -0,0 +1,23 @@
# admin cli
WIP
provides commands to generate calldata to then paste into `cast sign` or similar tools. No cast sign raw tx exists, and want to give users ability to
chose what method they sign with, so prefer not signing the tx in this cli tool.
example (hypothetical) usage:
- npm link
- admin-cli approveHash --network testnet --domain L1 --targetAddress 0x0 --targetCalldata 0x0
{
to: 0x1234,
data: 0x1234,
functionSig: "approveHash(bytes32)"
}
Flow:
- first, approve desired transaction (schedules transaction in Timelock) in SAFE with approveHash()
- second, someone collects all the signers and sends executeTransaction()
- third, someone calls execute() on the Timelock. this actually sends the transaction throught the forwarder and executes the call

11
contracts/admin/abis.sh Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/bash
set -ue
# This script is used to generate the typechain artifacts for the contracts
mkdir -p abis types
cat ../artifacts/src/Safe.sol/Safe.json | jq .abi >> abis/safe.json
cat ../artifacts/src/TimelockController.sol/TimelockController.json | jq .abi >> abis/timelock.json
cat ../artifacts/src/Forwarder.sol/Forwarder.json | jq .abi >> abis/forwarder.json
npx typechain --target=ethers-v6 "abis/*.json"

2
contracts/admin/bin/index.js Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env node
require("../dist/cli.js");

57
contracts/admin/cli.ts Normal file
View File

@@ -0,0 +1,57 @@
import yargs from "yargs";
import { ethers } from "ethers";
import { DomainDeployment, getConfig } from "./config";
import { approveHash } from "./tx";
// eslint-disable-next-line no-unused-expressions
yargs
.command(
"approveHash",
"approve transaction hash in SAFE",
(yargs) =>
yargs
.options({
network: {
alias: "n",
describe: "name of network config to use, eg: {mainnet | goerli | testnet}",
string: true,
},
domain: {
describe: "L1 or L2",
string: true,
coerce: (arg) => arg.toUpperCase(),
},
targetAddress: {
describe: "address of contract to call",
string: true,
},
targetCalldata: {
describe: "calldata to send to contract",
string: true,
},
})
.check((argv) => {
if (!(argv.targetAddress && argv.targetCalldata) && !(argv.network && argv.domain)) {
throw new Error("Must provide network, domain, targetAddress and targetCalldata");
}
return true; // If no error was thrown, validation passed and you can return true
}),
async (argv) => {
// todo: validate
const targetAddress = ethers.getAddress(argv.targetAddress!);
const targetCalldata = argv.targetCalldata!;
console.log("using target value from args: ", { targetAddress, targetCalldata });
const conf = getConfig(argv.network!, argv.domain!);
const fragment = await approveHash(
targetAddress,
ethers.getBytes(targetCalldata),
conf.ScrollSafeAddress,
conf.ForwarderAddress,
conf.ScrollTimelockAddress
);
console.log(fragment);
}
)
.help().argv;

49
contracts/admin/config.ts Normal file
View File

@@ -0,0 +1,49 @@
export interface DomainDeployment {
ForwarderAddress: string;
ScrollSafeAddress: string;
ScrollTimelockAddress: string;
CouncilSafeAddress: string;
CouncilTimelockAddress: string;
}
export interface Deployment {
L1: DomainDeployment;
L2: DomainDeployment;
}
export interface Config {
[key: string]: Deployment;
}
const config: Config = {
testnet: {
L1: {
ForwarderAddress: "0x0000000000000000000000000000000000000000",
ScrollSafeAddress: "0x0000000000000000000000000000000000000000",
ScrollTimelockAddress: "0x0000000000000000000000000000000000000000",
CouncilSafeAddress: "0x0000000000000000000000000000000000000000",
CouncilTimelockAddress: "0x0000000000000000000000000000000000000000",
},
L2: {
ForwarderAddress: "0xA51c1fc2f0D1a1b8494Ed1FE312d7C3a78Ed91C0",
ScrollSafeAddress: "0xa513E6E4b8f2a923D98304ec87F64353C4D5C853",
ScrollTimelockAddress: "0x8A791620dd6260079BF849Dc5567aDC3F2FdC318",
CouncilSafeAddress: "0x0000000000000000000000000000000000000000",
CouncilTimelockAddress: "0x0000000000000000000000000000000000000000",
},
},
};
export const getConfig = (network: string, domain: string): DomainDeployment => {
if (network in config) {
if (domain in config[network]) {
return config[network][domain as keyof Deployment];
} else {
throw new Error(`Invalid domain: ${domain}`);
}
} else {
throw new Error(`Invalid network: ${network}`);
}
};

View File

@@ -0,0 +1,19 @@
{
"name": "admin-cli",
"bin": {
"admin-cli": "./bin/index.js"
},
"main": "bin/index.js",
"scripts": {
"build": "tsc",
"prepublishOnly": "npm run build"
},
"dependencies": {
"ethers": "^6.6.1",
"yargs": "^17.7.2"
},
"devDependencies": {
"@typechain/ethers-v6": "^0.4.0",
"@types/yargs": "^17.0.24"
}
}

View File

@@ -0,0 +1,10 @@
{
"compilerOptions": {
"target": "es2018",
"module": "commonjs",
"strict": true,
"esModuleInterop": true,
"outDir": "dist",
"declaration": true
}
}

113
contracts/admin/tx.ts Normal file
View File

@@ -0,0 +1,113 @@
import { ethers } from "ethers";
import {
Safe__factory,
Safe,
Forwarder__factory,
Forwarder,
Timelock__factory,
Timelock,
} from "./types/ethers-contracts";
export interface RawTxFragment {
to: string;
callData: string;
functionSig: string;
}
async function execTransaction(wallet: ethers.Wallet, safeContract: Safe, calldata: string, senders: string[]) {
// ethers.AbiCoder.encode(
// Safe__factory.abi
let signatures = "0x0000000000000000000000000000000000000000";
for (let i = 0; i < senders.length; i++) {
signatures += encodeAddress(senders[i]);
}
await safeContract
.connect(wallet)
.execTransaction(
"0x0000000000000000000000000000000000000000",
0,
calldata,
0,
0,
0,
0,
ethers.ZeroAddress,
ethers.ZeroAddress,
signatures,
{ gasLimit: 1000000 }
);
}
export async function approveHash(
targetAddress: ethers.AddressLike,
targetCalldata: ethers.BytesLike,
safeAddress: ethers.AddressLike,
forwarderAddress: ethers.AddressLike,
timelockAddress: ethers.AddressLike
): Promise<RawTxFragment> {
// either implement getTransactionHash in JS or make RPC call to get hash
const provider = new ethers.JsonRpcProvider("http://localhost:1234");
const safeContract = Safe__factory.connect(safeAddress.toString(), provider);
const forwarderContract = Forwarder__factory.connect(forwarderAddress.toString());
const timelockContract = Timelock__factory.connect(timelockAddress.toString());
// const targetCalldata = targetContract.interface.encodeFunctionData("err");
const forwarderCalldata = forwarderContract.interface.encodeFunctionData("forward", [
targetAddress.toString(),
targetCalldata,
]);
const timelockScheduleCalldata = timelockContract.interface.encodeFunctionData("schedule", [
forwarderAddress.toString(),
0,
forwarderCalldata,
ethers.ZeroHash,
ethers.ZeroHash,
0,
]);
const txHash = await safeContract.getTransactionHash(
timelockAddress.toString(),
0,
timelockScheduleCalldata,
0,
0,
0,
0,
ethers.ZeroAddress,
ethers.ZeroAddress,
0
);
return {
to: safeAddress.toString(),
callData: txHash,
functionSig: "approveHash(bytes32)",
};
}
// await safeContract.checkNSignatures(scheduleSafeTxHash, ethers.arrayify("0x00"), sigSchedule, 1);
// await timelockContract
// .connect(wallet)
// .execute(L2_FORWARDER_ADDR, 0, forwarderCalldata, ethers.HashZero, ethers.HashZero, {
// gasLimit: 1000000,
// });
// safe takes address as part of the signature
function encodeAddress(address: string) {
const r = ethers.zeroPadValue(address, 32);
const s = ethers.zeroPadValue("0x00", 32);
const v = "0x01";
return ethers.toBeHex(ethers.concat([r, s, v])).slice(-2);
}
// add 4 to the v byte at the end of the signature
function editSig(sig: string) {
const v = parseInt(sig.slice(-2), 16);
const newV = v + 4;
const newSig = sig.slice(0, -2) + newV.toString(16);
return newSig;
}
console.log(encodeAddress("0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"));
module.exports = {
approveHash,
};

View File

@@ -3,7 +3,7 @@ src = 'src' # the source directory
test = 'src/test' # the test directory
script = 'scripts' # the script directory
out = 'artifacts/src' # the output directory (for artifacts)
libs = [] # a list of library directories
libs = ["lib"] # the library directory
remappings = [] # a list of remappings
libraries = [] # a list of deployed libraries to link against
cache = true # whether to cache builds or not

26
contracts/scripts/deploy.sh Executable file
View File

@@ -0,0 +1,26 @@
#/bin/sh
set -uex
PID=$(lsof -t -i:1234)
echo $PID
kill $PID
export L2_DEPLOYER_PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
PORT=1234
# deploys a local instance of the contracts
anvil --port $PORT &
while ! lsof -i :$PORT
do
echo "...waiting for anvil"
sleep 1
done
echo "started anvil"
forge script ./foundry/DeployL2AdminContracts.s.sol:DeployL2AdminContracts --rpc-url http://localhost:1234 --legacy --broadcast -vvvv
npx ts-node ./encode.ts
echo "deployment success"

74
contracts/scripts/encode.sh Executable file
View File

@@ -0,0 +1,74 @@
#/bin/sh
set -uex
# does not work due to V recovery bit being off
L2_COUNCIL_SAFE_ADDR=0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512
L2_COUNCIL_TIMELOCK_ADDR=0xCf7Ed3AccA5a467e9e704C703E8D87F634fB0Fc9
L2_SCROLL_SAFE_ADDR=0xa513E6E4b8f2a923D98304ec87F64353C4D5C853
L2_SCROLL_TIMELOCK_ADDR=0x8A791620dd6260079BF849Dc5567aDC3F2FdC318
L2_FORWARDER_ADDR=0xA51c1fc2f0D1a1b8494Ed1FE312d7C3a78Ed91C0
L2_TARGET_ADDR=0x0DCd1Bf9A1b36cE34237eEaFef220932846BCD82
# 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
L2_DEPLOYER_PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
ZERO_BYTES=0x0000000000000000000000000000000000000000
# sign tx hash for timelock schedule call
ADMIN_CALLDATA=$(cast calldata "err()")
FORWARDER_CALLDATA=$(cast calldata "forward(address,bytes)" $L2_FORWARDER_ADDR $ADMIN_CALLDATA)
TIMELOCK_SCHEDULE_CALLDATA=$(cast calldata "schedule(address,uint256,bytes,bytes32,bytes32,uint256)" $L2_FORWARDER_ADDR 0 $FORWARDER_CALLDATA 0x0 0x0 0x0)
SAFE_TX_HASH=$(cast call -r http://localhost:1234 $L2_SCROLL_SAFE_ADDR "getTransactionHash(address,uint256,bytes,uint8,uint256,uint256,uint256,address,address,uint256)" \
$L2_SCROLL_TIMELOCK_ADDR 0 $TIMELOCK_SCHEDULE_CALLDATA 0 0 0 0 $ZERO_BYTES $ZERO_BYTES 0)
SAFE_SIG=$(cast wallet sign --private-key $L2_DEPLOYER_PRIVATE_KEY $SAFE_TX_HASH | awk '{print $2}')
# echo $SAFE_SIG
# echo $SAFE_TX_HASH
# send safe tx to schedule the call
cast send -c 31337 --legacy --private-key $L2_DEPLOYER_PRIVATE_KEY -r http://localhost:1234 --gas-limit 1000000 $L2_SCROLL_SAFE_ADDR "execTransaction(address,uint256,bytes,uint8,uint256,uint256,uint256,address,address,bytes)" \
$L2_SCROLL_TIMELOCK_ADDR 0 $TIMELOCK_SCHEDULE_CALLDATA 0 0 0 0 $ZERO_BYTES $ZERO_BYTES $SAFE_SIG
# function encodeTransactionData(
# address to,
# uint256 value,
# bytes calldata data,
# Enum.Operation operation,
# uint256 safeTxGas,
# uint256 baseGas,
# uint256 gasPrice,
# address gasToken,
# address refundReceiver,
# uint256 _nonce
# function execTransaction(
# address to,
# uint256 value,
# bytes calldata data,
# Enum.Operation operation,
# uint256 safeTxGas,
# uint256 baseGas,
# uint256 gasPrice,
# address gasToken,
# address payable refundReceiver,
# bytes memory signatures
exit 0
# /////////////// 2nd tx ///////////////
# sign tx hash for execute call
TIMELOCK_EXECUTE_CALLDATA=$(cast calldata "execute(address,uint256,bytes,bytes32,bytes32)" $L2_FORWARDER_ADDR 0 $FORWARDER_CALLDATA 0x0 0x0)
SAFE_TX_HASH_=$(cast call -r http://localhost:1234 $L2_SCROLL_SAFE_ADDR "getTransactionHash(address,uint256,bytes,uint8,uint256,uint256,uint256,address,address,uint256)" \
$L2_SCROLL_TIMELOCK_ADDR 0 $TIMELOCK_SCHEDULE_CALLDATA 0 0 0 0 $ZERO_BYTES $ZERO_BYTES 0)
SAFE_SIG=$(cast wallet sign --private-key $L2_DEPLOYER_PRIVATE_KEY $SAFE_TX_HASH | awk '{print $2}')
# send safe tx to execute the call
cast send -c 31337 --legacy --private-key $L2_DEPLOYER_PRIVATE_KEY -r http://localhost:1234 --gas-limit 1000000 $L2_SCROLL_SAFE_ADDR "execTransaction(address,uint256,bytes,uint8,uint256,uint256,uint256,address,address,bytes)" \
$L2_SCROLL_TIMELOCK_ADDR 0 $TIMELOCK_EXECUTE_CALLDATA 0 0 0 0 $ZERO_BYTES $ZERO_BYTES $SAFE_SIG
echo "DONE"

102
contracts/scripts/encode.ts Normal file
View File

@@ -0,0 +1,102 @@
import { ethers } from "ethers";
import { Safeabi__factory, Forwarder__factory, Target__factory, Timelock__factory } from "../safeAbi";
const L2_SCROLL_SAFE_ADDR = "0xa513E6E4b8f2a923D98304ec87F64353C4D5C853";
const L2_SCROLL_TIMELOCK_ADDR = "0x8A791620dd6260079BF849Dc5567aDC3F2FdC318";
const L2_FORWARDER_ADDR = "0xA51c1fc2f0D1a1b8494Ed1FE312d7C3a78Ed91C0";
const L2_TARGET_ADDR = "0x0DCd1Bf9A1b36cE34237eEaFef220932846BCD82";
const L2_DEPLOYER_PRIVATE_KEY = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80";
/*
TODO:
* read from env
* use approve hash flow
* read nonce from safe
* split script into schedule and execute
* add gas limit
* document how to use
* how to get addresses from deployment?
* get abis in a reasonable way
*/
/*
to get safe abi
* forge build
* cat artifacts/src/Safe.sol/Safe.json| jq .abi >> safeabi.json
* mkdir safeAbi
* npx typechain --target=ethers-v5 safeabi.json --out-dir safeAbi
repeat for forwarder, timelock, target
*/
async function main() {
const provider = new ethers.providers.JsonRpcProvider("http://localhost:1234");
const wallet = new ethers.Wallet(L2_DEPLOYER_PRIVATE_KEY, provider);
const safeContract = Safeabi__factory.connect(L2_SCROLL_SAFE_ADDR, provider);
const forwarderContract = Forwarder__factory.connect(L2_FORWARDER_ADDR, provider);
const timelockContract = Timelock__factory.connect(L2_SCROLL_TIMELOCK_ADDR, provider);
const targetContract = Target__factory.connect(L2_TARGET_ADDR, provider);
const targetCalldata = targetContract.interface.encodeFunctionData("err");
const forwarderCalldata = forwarderContract.interface.encodeFunctionData("forward", [L2_TARGET_ADDR, targetCalldata]);
const timelockScheduleCalldata = timelockContract.interface.encodeFunctionData("schedule", [
L2_FORWARDER_ADDR,
0,
forwarderCalldata,
ethers.constants.HashZero,
ethers.constants.HashZero,
0,
]);
const scheduleSafeTxHash = await safeContract.getTransactionHash(
L2_SCROLL_TIMELOCK_ADDR,
0,
timelockScheduleCalldata,
0,
0,
0,
0,
ethers.constants.AddressZero,
ethers.constants.AddressZero,
0
);
const sigRawSchedule = await wallet.signMessage(ethers.utils.arrayify(scheduleSafeTxHash));
const sigSchedule = editSig(sigRawSchedule);
await safeContract.checkNSignatures(scheduleSafeTxHash, ethers.utils.arrayify("0x00"), sigSchedule, 1);
await safeContract
.connect(wallet)
.execTransaction(
L2_SCROLL_TIMELOCK_ADDR,
0,
timelockScheduleCalldata,
0,
0,
0,
0,
ethers.constants.AddressZero,
ethers.constants.AddressZero,
sigSchedule,
{ gasLimit: 1000000 }
);
console.log("scheduled");
await timelockContract
.connect(wallet)
.execute(L2_FORWARDER_ADDR, 0, forwarderCalldata, ethers.constants.HashZero, ethers.constants.HashZero, {
gasLimit: 1000000,
});
}
// add 4 to the v byte at the end of the signature
function editSig(sig: string) {
const v = parseInt(sig.slice(-2), 16);
const newV = v + 4;
const newSig = sig.slice(0, -2) + newV.toString(16);
return newSig;
}
main();

View File

@@ -0,0 +1,76 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Safe} from "safe-contracts/Safe.sol";
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
import {Forwarder} from "../../src/misc/Forwarder.sol";
contract DeployL1AdminContracts is Script {
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
function run() external {
vm.startBroadcast(L1_DEPLOYER_PRIVATE_KEY);
address council_safe = deploySafe();
// deploy timelock with no delay just to have flow between council and scroll admin
address council_timelock = deployTimelockController(council_safe, 0);
logAddress("L1_COUNCIL_SAFE_ADDR", address(council_safe));
logAddress("L1_COUNCIL_TIMELOCK_ADDR", address(council_timelock));
address scroll_safe = deploySafe();
// TODO: get timelock delay from env. for now just use 2 days
address scroll_timelock = deployTimelockController(scroll_safe, 2 days);
logAddress("L1_SCROLL_SAFE_ADDR", address(scroll_safe));
logAddress("L1_SCROLL_TIMELOCK_ADDR", address(scroll_timelock));
address forwarder = deployForwarder(address(council_safe), address(scroll_safe));
logAddress("L1_FORWARDER_ADDR", address(forwarder));
vm.stopBroadcast();
}
function deployForwarder(address admin, address superAdmin) internal returns (address) {
Forwarder forwarder = new Forwarder(admin, superAdmin);
return address(forwarder);
}
function deploySafe() internal returns (address) {
address owner = vm.addr(L1_DEPLOYER_PRIVATE_KEY);
// TODO: get safe signers from env
Safe safe = new Safe();
address[] memory owners = new address[](1);
owners[0] = owner;
// deployer 1/1. no gas refunds for now
safe.setup(owners, 1, address(0), new bytes(0), address(0), address(0), 0, payable(address(0)));
return address(safe);
}
function deployTimelockController(address safe, uint256 delay) internal returns (address) {
address deployer = vm.addr(L1_DEPLOYER_PRIVATE_KEY);
address[] memory proposers = new address[](1);
proposers[0] = safe;
// add SAFE as the only proposer, anyone can execute
address[] memory executors = new address[](1);
executors[0] = deployer;
TimelockController timelock = new TimelockController(delay, proposers, executors);
bytes32 TIMELOCK_ADMIN_ROLE = keccak256("TIMELOCK_ADMIN_ROLE");
// make safe admin of timelock, then revoke deployer's rights
timelock.grantRole(TIMELOCK_ADMIN_ROLE, address(safe));
timelock.revokeRole(TIMELOCK_ADMIN_ROLE, deployer);
return address(timelock);
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -4,8 +4,8 @@ pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
import {L1CustomERC20Gateway} from "../../src/L1/gateways/L1CustomERC20Gateway.sol";
import {L1ERC1155Gateway} from "../../src/L1/gateways/L1ERC1155Gateway.sol";
@@ -22,22 +22,23 @@ import {L2GasPriceOracle} from "../../src/L1/rollup/L2GasPriceOracle.sol";
import {ScrollChain} from "../../src/L1/rollup/ScrollChain.sol";
import {Whitelist} from "../../src/L2/predeploys/Whitelist.sol";
contract DeployL1BridgeContracts is Script {
uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY");
uint256 CHAIN_ID_L2 = vm.envUint("CHAIN_ID_L2");
uint32 CHAIN_ID_L2 = uint32(vm.envUint("CHAIN_ID_L2"));
address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR");
address L2_WETH_ADDR = vm.envAddress("L2_WETH_ADDR");
ProxyAdmin proxyAdmin;
// scroll admin (timelocked) or security council
address FORWARDER = vm.envAddress("L1_FORWARDER");
function run() external {
vm.startBroadcast(L1_DEPLOYER_PRIVATE_KEY);
// note: the RollupVerifier library is deployed implicitly
deployProxyAdmin();
deployL1Whitelist();
deployL1MessageQueue();
deployL2GasPriceOracle();
@@ -55,12 +56,6 @@ contract DeployL1BridgeContracts is Script {
vm.stopBroadcast();
}
function deployProxyAdmin() internal {
proxyAdmin = new ProxyAdmin();
logAddress("L1_PROXY_ADMIN_ADDR", address(proxyAdmin));
}
function deployL1Whitelist() internal {
address owner = vm.addr(L1_DEPLOYER_PRIVATE_KEY);
Whitelist whitelist = new Whitelist(owner);
@@ -72,7 +67,7 @@ contract DeployL1BridgeContracts is Script {
ScrollChain impl = new ScrollChain(CHAIN_ID_L2);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -84,7 +79,7 @@ contract DeployL1BridgeContracts is Script {
L1MessageQueue impl = new L1MessageQueue();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
logAddress("L1_MESSAGE_QUEUE_IMPLEMENTATION_ADDR", address(impl));
@@ -95,7 +90,7 @@ contract DeployL1BridgeContracts is Script {
L2GasPriceOracle impl = new L2GasPriceOracle();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
logAddress("L2_GAS_PRICE_ORACLE_IMPLEMENTATION_ADDR", address(impl));
@@ -106,7 +101,7 @@ contract DeployL1BridgeContracts is Script {
L1StandardERC20Gateway impl = new L1StandardERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -118,7 +113,7 @@ contract DeployL1BridgeContracts is Script {
L1ETHGateway impl = new L1ETHGateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -130,7 +125,7 @@ contract DeployL1BridgeContracts is Script {
L1WETHGateway impl = new L1WETHGateway(L1_WETH_ADDR, L2_WETH_ADDR);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -142,7 +137,7 @@ contract DeployL1BridgeContracts is Script {
L1GatewayRouter impl = new L1GatewayRouter();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -154,7 +149,7 @@ contract DeployL1BridgeContracts is Script {
L1ScrollMessenger impl = new L1ScrollMessenger();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -166,7 +161,7 @@ contract DeployL1BridgeContracts is Script {
EnforcedTxGateway impl = new EnforcedTxGateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -178,7 +173,7 @@ contract DeployL1BridgeContracts is Script {
L1CustomERC20Gateway impl = new L1CustomERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -190,7 +185,7 @@ contract DeployL1BridgeContracts is Script {
L1ERC721Gateway impl = new L1ERC721Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -202,7 +197,7 @@ contract DeployL1BridgeContracts is Script {
L1ERC1155Gateway impl = new L1ERC1155Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);

View File

@@ -0,0 +1,114 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Safe} from "safe-contracts/Safe.sol";
import {SafeProxy} from "safe-contracts/proxies/SafeProxy.sol";
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
import {Forwarder} from "../../src/misc/Forwarder.sol";
import {MockTarget} from "../../src/mocks/MockTarget.sol";
interface ISafe {
function setup(
address[] calldata _owners,
uint256 _threshold,
address to,
bytes calldata data,
address fallbackHandler,
address paymentToken,
uint256 payment,
address payable paymentReceiver
) external;
}
contract DeployL2AdminContracts is Script {
uint256 L2_DEPLOYER_PRIVATE_KEY = vm.envUint("L2_DEPLOYER_PRIVATE_KEY");
function run() external {
vm.startBroadcast(L2_DEPLOYER_PRIVATE_KEY);
address council_safe = deploySafe();
// deploy timelock with no delay, just to keep council and scroll admin flows be parallel
address council_timelock = deployTimelockController(council_safe, 0);
logAddress("L2_COUNCIL_SAFE_ADDR", address(council_safe));
logAddress("L2_COUNCIL_TIMELOCK_ADDR", address(council_timelock));
address scroll_safe = deploySafe();
// TODO: get timelock delay from env. for now just use 0
address scroll_timelock = deployTimelockController(scroll_safe, 0);
logAddress("L2_SCROLL_SAFE_ADDR", address(scroll_safe));
logAddress("L2_SCROLL_TIMELOCK_ADDR", address(scroll_timelock));
address forwarder = deployForwarder(address(council_timelock), address(scroll_timelock));
logAddress("L1_FORWARDER_ADDR", address(forwarder));
MockTarget target = new MockTarget();
logAddress("L2_TARGET_ADDR", address(target));
vm.stopBroadcast();
}
function deployForwarder(address admin, address superAdmin) internal returns (address) {
Forwarder forwarder = new Forwarder(admin, superAdmin);
return address(forwarder);
}
function deploySafe() internal returns (address) {
address owner = vm.addr(L2_DEPLOYER_PRIVATE_KEY);
// TODO: get safe signers from env
Safe safe = new Safe();
SafeProxy proxy = new SafeProxy(address(safe));
address[] memory owners = new address[](1);
owners[0] = owner;
// deployer 1/1. no gas refunds for now
ISafe(address(proxy)).setup(
owners,
1,
address(0),
new bytes(0),
address(0),
address(0),
0,
payable(address(0))
);
return address(proxy);
}
function deployTimelockController(address safe, uint256 delay) internal returns (address) {
address deployer = vm.addr(L2_DEPLOYER_PRIVATE_KEY);
address[] memory proposers = new address[](1);
proposers[0] = safe;
address[] memory executors = new address[](1);
executors[0] = address(0);
// add SAFE as the only proposer, anyone can execute
TimelockController timelock = new TimelockController(delay, proposers, executors);
bytes32 TIMELOCK_ADMIN_ROLE = keccak256("TIMELOCK_ADMIN_ROLE");
// make safe admin of timelock, then revoke deployer's rights
timelock.grantRole(TIMELOCK_ADMIN_ROLE, address(safe));
timelock.revokeRole(TIMELOCK_ADMIN_ROLE, deployer);
return address(timelock);
}
function logBytes32(string memory name, bytes32 value) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(bytes32(value)))));
}
function logUint(string memory name, uint256 value) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(uint256(value)))));
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -4,7 +4,6 @@ pragma solidity ^0.8.10;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {ProxyAdmin} from "@openzeppelin/contracts/proxy/transparent/ProxyAdmin.sol";
import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
import {L2CustomERC20Gateway} from "../../src/L2/gateways/L2CustomERC20Gateway.sol";
@@ -30,10 +29,12 @@ contract DeployL2BridgeContracts is Script {
address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR");
address L2_WETH_ADDR = vm.envAddress("L2_WETH_ADDR");
// scroll admin (timelocked) or security council
address FORWARDER = vm.envAddress("L2_FORWARDER");
L1GasPriceOracle oracle;
L1BlockContainer container;
L2MessageQueue queue;
ProxyAdmin proxyAdmin;
// predeploy contracts
address L1_BLOCK_CONTAINER_PREDEPLOY_ADDR = vm.envOr("L1_BLOCK_CONTAINER_PREDEPLOY_ADDR", address(0));
@@ -53,7 +54,6 @@ contract DeployL2BridgeContracts is Script {
deployL2Whitelist();
// upgradable
deployProxyAdmin();
deployL2ScrollMessenger();
deployL2ETHGateway();
deployL2WETHGateway();
@@ -130,17 +130,11 @@ contract DeployL2BridgeContracts is Script {
logAddress("L2_WHITELIST_ADDR", address(whitelist));
}
function deployProxyAdmin() internal {
proxyAdmin = new ProxyAdmin();
logAddress("L2_PROXY_ADMIN_ADDR", address(proxyAdmin));
}
function deployL2ScrollMessenger() internal {
L2ScrollMessenger impl = new L2ScrollMessenger(address(container), address(oracle), address(queue));
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -152,7 +146,7 @@ contract DeployL2BridgeContracts is Script {
L2StandardERC20Gateway impl = new L2StandardERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -164,7 +158,7 @@ contract DeployL2BridgeContracts is Script {
L2ETHGateway impl = new L2ETHGateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -176,7 +170,7 @@ contract DeployL2BridgeContracts is Script {
L2WETHGateway impl = new L2WETHGateway(L2_WETH_ADDR, L1_WETH_ADDR);
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -188,7 +182,7 @@ contract DeployL2BridgeContracts is Script {
L2GatewayRouter impl = new L2GatewayRouter();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -208,7 +202,7 @@ contract DeployL2BridgeContracts is Script {
L2CustomERC20Gateway impl = new L2CustomERC20Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -220,7 +214,7 @@ contract DeployL2BridgeContracts is Script {
L2ERC721Gateway impl = new L2ERC721Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);
@@ -232,7 +226,7 @@ contract DeployL2BridgeContracts is Script {
L2ERC1155Gateway impl = new L2ERC1155Gateway();
TransparentUpgradeableProxy proxy = new TransparentUpgradeableProxy(
address(impl),
address(proxyAdmin),
FORWARDER,
new bytes(0)
);

View File

@@ -10,6 +10,7 @@ import {BatchHeaderV0Codec} from "../../libraries/codec/BatchHeaderV0Codec.sol";
import {ChunkCodec} from "../../libraries/codec/ChunkCodec.sol";
import {IRollupVerifier} from "../../libraries/verifier/IRollupVerifier.sol";
// solhint-disable no-inline-assembly
// solhint-disable reason-string
/// @title ScrollChain
@@ -39,7 +40,7 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
*************/
/// @notice The chain id of the corresponding layer 2 chain.
uint256 public immutable layer2ChainId;
uint32 public immutable layer2ChainId;
/*************
* Variables *
@@ -83,7 +84,7 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
* Constructor *
***************/
constructor(uint256 _chainId) {
constructor(uint32 _chainId) {
layer2ChainId = _chainId;
}
@@ -295,7 +296,9 @@ contract ScrollChain is OwnableUpgradeable, IScrollChain {
require(finalizedStateRoots[_batchIndex] == bytes32(0), "batch already verified");
// compute public input hash
bytes32 _publicInputHash = keccak256(abi.encode(_prevStateRoot, _postStateRoot, _withdrawRoot, _dataHash));
bytes32 _publicInputHash = keccak256(
abi.encodePacked(layer2ChainId, _prevStateRoot, _postStateRoot, _withdrawRoot, _dataHash)
);
// verify batch
IRollupVerifier(verifier).verifyAggregateProof(_aggrProof, _publicInputHash);

View File

@@ -2,6 +2,8 @@
pragma solidity ^0.8.0;
// solhint-disable no-inline-assembly
/// @dev Below is the encoding for `BatchHeader` V0, total 89 + ceil(l1MessagePopped / 256) * 32 bytes.
/// ```text
/// * Field Bytes Type Index Comments

View File

@@ -0,0 +1,36 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import {Ownable} from "@openzeppelin/contracts/access/Ownable.sol";
import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol";
import {SafeERC20} from "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol";
contract Fallback is Ownable {
using SafeERC20 for IERC20;
/// @notice Withdraw stucked token from this contract.
/// @param _token The address of token to withdraw, use `address(0)` if withdraw ETH.
/// @param _amount The amount of token to withdraw.
/// @param _recipient The address of receiver.
function withdraw(
address _token,
uint256 _amount,
address _recipient
) external onlyOwner {
if (_token == address(0)) {
(bool _success, ) = _recipient.call{value: _amount}("");
require(_success, "transfer ETH failed");
} else {
IERC20(_token).safeTransfer(_recipient, _amount);
}
}
/// @notice Execute an arbitrary message.
/// @param _target The address of contract to call.
/// @param _data The calldata passed to target contract.
function execute(address _target, bytes calldata _data) external payable onlyOwner {
(bool _success, ) = _target.call{value: msg.value}(_data);
require(_success, "call failed");
}
}

View File

@@ -0,0 +1,45 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract Forwarder {
address public admin;
address public superAdmin;
event Forwarded(address indexed target, uint256 value, bytes data);
event SetAdmin(address indexed admin);
event SetSuperAdmin(address indexed superAdmin);
constructor(address _admin, address _superAdmin) {
admin = _admin;
superAdmin = _superAdmin;
}
function setAdmin(address _admin) public {
require(msg.sender == superAdmin, "only superAdmin");
admin = _admin;
emit SetAdmin(_admin);
}
function setSuperAdmin(address _superAdmin) public {
require(msg.sender == superAdmin, "only superAdmin");
superAdmin = _superAdmin;
emit SetSuperAdmin(_superAdmin);
}
function forward(address _target, bytes memory _data) public payable {
require(msg.sender == superAdmin || msg.sender == admin, "only admin or superAdmin");
(bool success, ) = _target.call{value: msg.value}(_data);
// bubble up revert reason
if (!success) {
assembly {
let ptr := mload(0x40)
let size := returndatasize()
returndatacopy(ptr, 0, size)
revert(ptr, size)
}
}
emit Forwarded(_target, msg.value, _data);
}
}

View File

@@ -0,0 +1,14 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract MockTarget {
event ABC(uint256);
function err() pure external {
revert("test error");
}
function succeed() external {
emit ABC(1);
}
}

View File

@@ -0,0 +1,69 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import {DSTestPlus} from "solmate/test/utils/DSTestPlus.sol";
import {WETH} from "solmate/tokens/WETH.sol";
import {Forwarder} from "../misc/Forwarder.sol";
import {MockTarget} from "../mocks/MockTarget.sol";
import {IL1ScrollMessenger, L1ScrollMessenger} from "../L1/L1ScrollMessenger.sol";
contract ForwarderTest is DSTestPlus {
MockTarget public target;
Forwarder public forwarder;
L1ScrollMessenger internal l1Messenger;
address public admin = address(2);
address public superAdmin = address(3);
function setUp() public {
target = new MockTarget();
forwarder = new Forwarder(admin, superAdmin);
l1Messenger = new L1ScrollMessenger();
l1Messenger.initialize(address(0), address(0), address(0), address(0));
l1Messenger.transferOwnership(address(forwarder));
}
function testAdminFail() external {
hevm.expectRevert("only admin or superAdmin");
forwarder.forward(address(l1Messenger),hex"00");
hevm.expectRevert("only superAdmin");
forwarder.setAdmin(address(0));
hevm.expectRevert("only superAdmin");
forwarder.setSuperAdmin(address(0));
}
function testAdmin() external {
// cast calldata "transferOwnership(address)" 0x0000000000000000000000000000000000000005
// 0xf2fde38b0000000000000000000000000000000000000000000000000000000000000005
hevm.startPrank(admin);
forwarder.forward(address(l1Messenger), hex"f2fde38b0000000000000000000000000000000000000000000000000000000000000006");
assertEq(address(6), l1Messenger.owner());
hevm.stopPrank();
}
function testForwardSuperAdmin() external {
hevm.startPrank(superAdmin);
forwarder.forward(address(l1Messenger), hex"f2fde38b0000000000000000000000000000000000000000000000000000000000000006");
assertEq(address(6), l1Messenger.owner());
forwarder.setAdmin(address(0));
assertEq(forwarder.admin(), address(0));
forwarder.setSuperAdmin(address(0));
assertEq(forwarder.superAdmin(), address(0));
}
function testNestedRevert() external {
hevm.startPrank(superAdmin);
hevm.expectRevert("test error");
forwarder.forward(address(target), hex"38df7677");
}
}

View File

@@ -0,0 +1,212 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import {DSTestPlus} from "solmate/test/utils/DSTestPlus.sol";
import "forge-std/Vm.sol";
// import {Vm, VmSafe} from "./Vm.sol";
import "forge-std/Test.sol";
import "forge-std/console.sol";
import {Safe} from "safe-contracts/Safe.sol";
import {SafeProxy} from "safe-contracts/proxies/SafeProxy.sol";
import {TimelockController} from "@openzeppelin/contracts/governance/TimelockController.sol";
import {Forwarder} from "../../src/misc/Forwarder.sol";
import {MockTarget} from "../../src/mocks/MockTarget.sol";
interface ISafe {
// enum
enum Operation {
Call,
DelegateCall
}
function setup(
address[] calldata _owners,
uint256 _threshold,
address to,
bytes calldata data,
address fallbackHandler,
address paymentToken,
uint256 payment,
address payable paymentReceiver
) external;
function execTransaction(
address to,
uint256 value,
bytes calldata data,
Operation operation,
uint256 safeTxGas,
uint256 baseGas,
uint256 gasPrice,
address gasToken,
address payable refundReceiver,
bytes memory signatures
) external returns (bool success);
function checkNSignatures(
bytes32 dataHash,
bytes memory data,
bytes memory signatures,
uint256 requiredSignatures
) external;
}
// scratchpad
contract Temp is DSTestPlus {
address scroll_safe;
// function setUp() external {
// hevm.prank(0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266);
// address council_safe = deploySafe();
// // deploy timelock with no delay, just to keep council and scroll admin flows be parallel
// address council_timelock = deployTimelockController(council_safe, 0);
// // logAddress("L2_COUNCIL_SAFE_ADDR", address(council_safe));
// // logAddress("L2_COUNCIL_TIMELOCK_ADDR", address(council_timelock));
// address scroll_safe = deploySafe();
// // TODO: get timelock delay from env. for now just use 0
// address scroll_timelock = deployTimelockController(scroll_safe, 0);
// // logAddress("L2_SCROLL_SAFE_ADDR", address(scroll_safe));
// // logAddress("L2_SCROLL_TIMELOCK_ADDR", address(scroll_timelock));
// address forwarder = deployForwarder(address(council_safe), address(scroll_safe));
// // logAddress("L1_FORWARDER_ADDR", address(forwarder));
// MockTarget target = new MockTarget();
// // logAddress("L2_TARGET_ADDR", address(target));
// // vm.stopBroadcast();
// }
function testEcrecover() external {
bytes32 dataHash = 0xb453bd4e271eed985cbab8231da609c4ce0a9cf1f763b6c1594e76315510e0f1;
// (uint8 v, bytes32 r, bytes32 s) = signatureSplit(
// hex"078461ca16494711508b8602c1ea3ef515e5bfe11d67fc76e45b9217d42059f57abdde7cb9bf83b094991e2b6e61fd8b1146de575fd12080d65eaedd2e0c74da1c",
// 0
// );
bytes
memory signatures = hex"078461ca16494711508b8602c1ea3ef515e5bfe11d67fc76e45b9217d42059f57abdde7cb9bf83b094991e2b6e61fd8b1146de575fd12080d65eaedd2e0c74da1c";
uint256 requiredSignatures = 1;
uint8 v;
bytes32 r;
bytes32 s;
uint256 i;
for (i = 0; i < requiredSignatures; i++) {
(v, r, s) = signatureSplit(signatures, i);
emit log_uint(v);
emit log_bytes32(r);
emit log_bytes32(s);
address currentOwner = ecrecover(
keccak256(abi.encodePacked("\x19Ethereum Signed Message:\n32", dataHash)),
v,
r,
s
);
assertEq(address(0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf), currentOwner);
}
}
function testEcrecover1() external {
bytes
memory sig = hex"078461ca16494711508b8602c1ea3ef515e5bfe11d67fc76e45b9217d42059f57abdde7cb9bf83b094991e2b6e61fd8b1146de575fd12080d65eaedd2e0c74da1c";
uint8 v;
bytes32 r;
bytes32 s;
(v, r, s) = signatureSplit(sig, 0);
emit log_uint(v);
emit log_bytes32(r);
emit log_bytes32(s);
require(r == 0x078461ca16494711508b8602c1ea3ef515e5bfe11d67fc76e45b9217d42059f5, "r");
require(s == 0x7abdde7cb9bf83b094991e2b6e61fd8b1146de575fd12080d65eaedd2e0c74da, "s");
require(v == 28, "v");
}
function testSigVerify() external {
address currentOwner = ecrecover(
keccak256(
abi.encodePacked(
"\x19Ethereum Signed Message:\n32",
bytes32(0xb453bd4e271eed985cbab8231da609c4ce0a9cf1f763b6c1594e76315510e0f1)
)
),
28,
0x078461ca16494711508b8602c1ea3ef515e5bfe11d67fc76e45b9217d42059f5,
0x7abdde7cb9bf83b094991e2b6e61fd8b1146de575fd12080d65eaedd2e0c74da
);
require(currentOwner == 0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf, "SIG FAIL ABC");
}
function signatureSplit(bytes memory signatures, uint256 pos)
public
returns (
uint8 v,
bytes32 r,
bytes32 s
)
{
// solhint-disable-next-line no-inline-assembly
assembly {
let signaturePos := mul(0x41, pos)
r := mload(add(signatures, add(signaturePos, 0x20)))
s := mload(add(signatures, add(signaturePos, 0x40)))
/**
* Here we are loading the last 32 bytes, including 31 bytes
* of 's'. There is no 'mload8' to do this.
* 'byte' is not working due to the Solidity parser, so lets
* use the second best option, 'and'
*/
v := and(mload(add(signatures, add(signaturePos, 0x41))), 0xff)
}
}
function deployForwarder(address admin, address superAdmin) internal returns (address) {
Forwarder forwarder = new Forwarder(admin, superAdmin);
return address(forwarder);
}
function deploySafe() internal returns (address) {
address owner = 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266;
// TODO: get safe signers from env
Safe safe = new Safe();
SafeProxy proxy = new SafeProxy(address(safe));
address[] memory owners = new address[](1);
owners[0] = owner;
// deployer 1/1. no gas refunds for now
ISafe(address(proxy)).setup(
owners,
1,
address(0),
new bytes(0),
address(0),
address(0),
0,
payable(address(0))
);
return address(proxy);
}
function deployTimelockController(address safe, uint256 delay) internal returns (address) {
address deployer = 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266;
address[] memory proposers = new address[](1);
proposers[0] = safe;
// add SAFE as the only proposer, anyone can execute
TimelockController timelock = new TimelockController(delay, proposers, new address[](0));
bytes32 TIMELOCK_ADMIN_ROLE = keccak256("TIMELOCK_ADMIN_ROLE");
// make safe admin of timelock, then revoke deployer's rights
timelock.grantRole(TIMELOCK_ADMIN_ROLE, address(safe));
timelock.revokeRole(TIMELOCK_ADMIN_ROLE, deployer);
return address(timelock);
}
}

View File

@@ -1,4 +1,4 @@
.PHONY: lint docker clean coordinator
.PHONY: lint docker clean coordinator mock_coordinator
IMAGE_NAME=coordinator
IMAGE_VERSION=latest
@@ -25,6 +25,9 @@ libzkp:
coordinator: libzkp ## Builds the Coordinator instance.
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator ./cmd
mock_coordinator: ## Builds the mocked Coordinator instance.
go build -tags="mock_prover mock_verifier" -o $(PWD)/build/bin/coordinator ./cmd
test-verifier: libzkp
go test -tags ffi -timeout 0 -v ./verifier

View File

@@ -359,7 +359,7 @@ func (m *Manager) handleZkProof(pk string, msg *message.ProofDetail) error {
// store proof content
if msg.Type == message.BasicProve {
if dbErr = m.orm.UpdateProofByHash(m.ctx, msg.ID, msg.Proof.Proof, msg.Proof.FinalPair, proofTimeSec); dbErr != nil {
if dbErr = m.orm.UpdateProofByHash(m.ctx, msg.ID, msg.Proof, proofTimeSec); dbErr != nil {
log.Error("failed to store basic proof into db", "error", dbErr)
return dbErr
}

View File

@@ -13,8 +13,8 @@ create table l1_message
layer1_hash VARCHAR NOT NULL,
layer2_hash VARCHAR DEFAULT NULL,
status INTEGER DEFAULT 1,
created_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP
);
comment
@@ -32,7 +32,7 @@ create index l1_message_height_index
CREATE OR REPLACE FUNCTION update_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_time = CURRENT_TIMESTAMP;
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';

View File

@@ -13,8 +13,8 @@ create table l2_message
layer1_hash VARCHAR DEFAULT NULL,
proof TEXT DEFAULT NULL,
status INTEGER DEFAULT 1,
created_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP
);
comment
@@ -32,7 +32,7 @@ create index l2_message_height_index
CREATE OR REPLACE FUNCTION update_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_time = CURRENT_TIMESTAMP;
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';

View File

@@ -16,7 +16,6 @@ create table block_batch
total_l2_gas BIGINT NOT NULL,
proving_status INTEGER DEFAULT 1,
proof BYTEA DEFAULT NULL,
instance_commitments BYTEA DEFAULT NULL,
proof_time_sec INTEGER DEFAULT 0,
rollup_status INTEGER DEFAULT 1,
commit_tx_hash VARCHAR DEFAULT NULL,

View File

@@ -10,8 +10,8 @@ create table agg_task
end_batch_hash VARCHAR NOT NULL,
proving_status SMALLINT DEFAULT 1,
proof BYTEA DEFAULT NULL,
created_time TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_time TIMESTAMP(0) DEFAULT CURRENT_TIMESTAMP
created_at TIMESTAMP(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP(0) DEFAULT CURRENT_TIMESTAMP
);
create unique index agg_task_hash_uindex
@@ -21,7 +21,7 @@ create unique index agg_task_hash_uindex
CREATE OR REPLACE FUNCTION update_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_time = CURRENT_TIMESTAMP;
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';

View File

@@ -20,7 +20,7 @@ func NewAggTaskOrm(db *sqlx.DB) AggTaskOrm {
return &aggTaskOrm{db: db}
}
func (a *aggTaskOrm) GetSubProofsByAggTaskID(id string) ([][]byte, error) {
func (a *aggTaskOrm) GetSubProofsByAggTaskID(id string) ([]*message.AggProof, error) {
var (
startIdx uint64
endIdx uint64
@@ -34,14 +34,20 @@ func (a *aggTaskOrm) GetSubProofsByAggTaskID(id string) ([][]byte, error) {
if err != nil {
return nil, err
}
var subProofs [][]byte
var subProofs []*message.AggProof
for rows.Next() {
var proofByt []byte
err = rows.Scan(&proofByt)
if err != nil {
return nil, err
}
subProofs = append(subProofs, proofByt)
var proof message.AggProof
if err := json.Unmarshal(proofByt, &proof); err != nil {
return nil, err
}
subProofs = append(subProofs, &proof)
}
return subProofs, nil
}

View File

@@ -3,6 +3,7 @@ package orm
import (
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"strings"
@@ -12,6 +13,7 @@ import (
"github.com/scroll-tech/go-ethereum/log"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
)
type blockBatchOrm struct {
@@ -62,22 +64,31 @@ func (o *blockBatchOrm) GetProvingStatusByHash(hash string) (types.ProvingStatus
return status, nil
}
func (o *blockBatchOrm) GetVerifiedProofAndInstanceCommitmentsByHash(hash string) ([]byte, []byte, error) {
var proof []byte
var instanceCommitments []byte
row := o.db.QueryRow(`SELECT proof, instance_commitments FROM block_batch WHERE hash = $1 and proving_status = $2`, hash, types.ProvingTaskVerified)
if err := row.Scan(&proof, &instanceCommitments); err != nil {
return nil, nil, err
func (o *blockBatchOrm) GetVerifiedProofByHash(hash string) (*message.AggProof, error) {
var proofBytes []byte
row := o.db.QueryRow(`SELECT proof FROM block_batch WHERE hash = $1 and proving_status = $2`, hash, types.ProvingTaskVerified)
if err := row.Scan(&proofBytes); err != nil {
return nil, err
}
return proof, instanceCommitments, nil
var proof message.AggProof
if err := json.Unmarshal(proofBytes, &proof); err != nil {
return nil, err
}
return &proof, nil
}
func (o *blockBatchOrm) UpdateProofByHash(ctx context.Context, hash string, proof, instanceCommitments []byte, proofTimeSec uint64) error {
func (o *blockBatchOrm) UpdateProofByHash(ctx context.Context, hash string, proof *message.AggProof, proofTimeSec uint64) error {
proofBytes, err := json.Marshal(proof)
if err != nil {
return err
}
db := o.db
if _, err := db.ExecContext(ctx,
db.Rebind(`UPDATE block_batch set proof = ?, instance_commitments = ?, proof_time_sec = ? where hash = ?;`),
proof, instanceCommitments, proofTimeSec, hash,
db.Rebind(`UPDATE block_batch set proof = ?, proof_time_sec = ? where hash = ?;`),
proofBytes, proofTimeSec, hash,
); err != nil {
log.Error("failed to update proof", "err", err)
}

View File

@@ -4,11 +4,11 @@ import (
"context"
"database/sql"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
"github.com/jmoiron/sqlx"
"github.com/scroll-tech/go-ethereum/common"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
)
// L1BlockOrm l1_block operation interface
@@ -50,7 +50,7 @@ type SessionInfoOrm interface {
type AggTaskOrm interface {
GetAssignedAggTasks() ([]*types.AggTask, error)
GetUnassignedAggTasks() ([]*types.AggTask, error)
GetSubProofsByAggTaskID(id string) ([][]byte, error)
GetSubProofsByAggTaskID(id string) ([]*message.AggProof, error)
InsertAggTask(id string, startBatchIndex uint64, startBatchHash string, endBatchIndex uint64, endBatchHash string) error
UpdateAggTaskStatus(aggTaskID string, status types.ProvingStatus) error
UpdateProofForAggTask(aggTaskID string, proof *message.AggProof) error
@@ -60,8 +60,8 @@ type AggTaskOrm interface {
type BlockBatchOrm interface {
GetBlockBatches(fields map[string]interface{}, args ...string) ([]*types.BlockBatch, error)
GetProvingStatusByHash(hash string) (types.ProvingStatus, error)
GetVerifiedProofAndInstanceCommitmentsByHash(hash string) ([]byte, []byte, error)
UpdateProofByHash(ctx context.Context, hash string, proof, instanceCommitments []byte, proofTimeSec uint64) error
GetVerifiedProofByHash(hash string) (*message.AggProof, error)
UpdateProofByHash(ctx context.Context, hash string, proof *message.AggProof, proofTimeSec uint64) error
UpdateProvingStatus(hash string, status types.ProvingStatus) error
ResetProvingStatusFor(before types.ProvingStatus) error
NewBatchInDBTx(dbTx *sqlx.Tx, batchData *types.BatchData) error

View File

@@ -73,8 +73,11 @@ var (
},
}
proof1 = []byte{1}
subProofs = [][]byte{proof1}
proof1 = &message.AggProof{
Proof: []byte{1},
FinalPair: []byte{2},
}
subProofs = []*message.AggProof{proof1}
aggTask1 = &types.AggTask{ID: "test-agg-1"}
aggTask2 = &types.AggTask{ID: "test-agg-2"}
@@ -344,7 +347,7 @@ func testOrmBlockBatch(t *testing.T) {
provingStatus, err := ormBatch.GetProvingStatusByHash(batchHash1)
assert.NoError(t, err)
assert.Equal(t, types.ProvingTaskUnassigned, provingStatus)
err = ormBatch.UpdateProofByHash(context.Background(), batchHash1, proof1, []byte{2}, 1200)
err = ormBatch.UpdateProofByHash(context.Background(), batchHash1, proof1, 1200)
assert.NoError(t, err)
err = ormBatch.UpdateProvingStatus(batchHash1, types.ProvingTaskVerified)
assert.NoError(t, err)
@@ -489,7 +492,7 @@ func testOrmAggTask(t *testing.T) {
provingStatus, err := ormBatch.GetProvingStatusByHash(batchHash1)
assert.NoError(t, err)
assert.Equal(t, types.ProvingTaskUnassigned, provingStatus)
err = ormBatch.UpdateProofByHash(context.Background(), batchHash1, proof1, []byte{2}, 1200)
err = ormBatch.UpdateProofByHash(context.Background(), batchHash1, proof1, 1200)
assert.NoError(t, err)
err = ormBatch.UpdateProvingStatus(batchHash1, types.ProvingTaskVerified)
assert.NoError(t, err)

View File

@@ -29,12 +29,12 @@ libzkp:
roller: libzkp ## Build the Roller instance.
GOBIN=$(PWD)/build/bin go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/roller ./cmd
mock_roller: ## Build the mocked Roller instance.
GOBIN=$(PWD)/build/bin go build -tags="mock_prover mock_verifier" -o $(PWD)/build/bin/roller ./cmd
gpu-roller: libzkp ## Build the GPU Roller instance.
GOBIN=$(PWD)/build/bin go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -tags gpu -o $(PWD)/build/bin/roller ./cmd
mock_roller:
GOBIN=$(PWD)/build/bin go build -tags mock_prover -o $(PWD)/build/bin/roller $(PWD)/cmd
test-prover: libzkp
go test -tags ffi -timeout 0 -v ./prover

View File

@@ -3,10 +3,13 @@
package prover
import (
"scroll-tech/common/types/message"
"math/big"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
"scroll-tech/common/types/message"
"scroll-tech/roller/config"
)
@@ -22,9 +25,10 @@ func NewProver(cfg *config.ProverConfig) (*Prover, error) {
// Prove call rust ffi to generate proof, if first failed, try again.
func (p *Prover) Prove(taskID string, traces []*types.BlockTrace) (*message.AggProof, error) {
_empty := common.BigToHash(big.NewInt(0))
return &message.AggProof{
Proof: []byte{},
Instance: []byte{},
FinalPair: []byte{},
Proof: _empty[:],
Instance: _empty[:],
FinalPair: _empty[:],
}, nil
}