Compare commits

..

28 Commits

Author SHA1 Message Date
colinlyguo
32fd01f66e add fix 2024-07-09 21:42:37 +08:00
colinlyguo
fe328153b8 Revert "remove linux/arm64"
This reverts commit d94ac817ca.
2024-07-09 21:36:46 +08:00
colinlyguo
bee20f86ec Revert "update"
This reverts commit 195c079348.
2024-07-09 21:36:31 +08:00
colinlyguo
d94ac817ca remove linux/arm64 2024-07-09 19:57:34 +08:00
georgehao
195c079348 update 2024-07-09 19:05:25 +08:00
georgehao
102fa0e95a Merge branch 'fix-coordinator-image-build' of github.com:scroll-tech/scroll into fix-coordinator-image-build 2024-07-09 18:45:43 +08:00
georgehao
9d73e9c55a fix 2024-07-09 18:45:34 +08:00
colinlyguo
4a4c51f25f fix 2024-07-09 18:36:26 +08:00
colinlyguo
ba475cb6e5 tweak 2024-07-09 18:35:04 +08:00
georgehao
4cadb0a9bd test 2024-07-09 18:28:24 +08:00
georgehao
5235032989 add ENV CGO_ENABLED=1 2024-07-09 18:17:50 +08:00
colinlyguo
6c43c91640 use scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base image 2024-07-09 18:10:41 +08:00
colinlyguo
5d99825a2a update da-codec dependency 2024-07-09 18:06:21 +08:00
colinlyguo
3bd94cba1b trigger images building 2024-07-09 17:31:04 +08:00
colinlyguo
8674b49771 add CGO_LDFLAGS="-ldl" 2024-07-09 17:27:16 +08:00
Mengran Lan
abe66c6f7e feat(prover): supporting upgrade4 (#1412)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-07-08 19:32:34 +08:00
colinlyguo
7196c5cab6 Merge branch 'develop' into feat/upgrade4 2024-07-08 19:27:05 +08:00
georgehao
380dbba61d feat(coordinator): upgrade 4 (#1399)
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
Co-authored-by: Mengran Lan <lanmengran@qq.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-07-08 18:57:53 +08:00
colin
bc9bc5db36 feat(rollup-relayer): finalize bundle (#1411) 2024-07-08 15:26:50 +08:00
colin
beee0c286c feat(coordinator): enhance batch proof sanity check (#1408)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: Mengran Lan <mengran@scroll.io>
Co-authored-by: amoylan2 <amoylan2@users.noreply.github.com>
2024-07-02 16:59:52 +08:00
colin
0b32509a55 Merge branch 'develop' into feat/upgrade4 2024-07-02 16:57:26 +08:00
Rohit Narurkar
e679052df1 feat(common): update types as per upgrade4 (#1409)
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
2024-07-02 00:42:29 +08:00
colin
229809ad6f feat(rollup-relayer): support commitBatchWithBlobProof (#1397) 2024-06-27 21:25:51 +08:00
colin
fb43e87608 feat(rollup-db): add bundle hash (#1401) 2024-06-27 15:00:47 +08:00
colin
54adbb3e77 feat(common): add hardfork util functions supporting both height and timestamp (#1400) 2024-06-27 09:55:07 +08:00
georgehao
664c042a14 Merge branch 'develop' into feat/upgrade4 2024-06-26 21:53:57 +08:00
colin
1a739cd5a7 faet(db): add bundle table (#1395)
Co-authored-by: georgehao <haohongfan@gmail.com>
2024-06-25 10:13:34 +08:00
georgehao
1b6886bb49 init 2024-06-24 09:21:57 +08:00
131 changed files with 1444 additions and 2795 deletions

View File

@@ -2,8 +2,6 @@ name: Docker
on:
push:
tags:
- v**
env:
AWS_REGION: us-west-2

View File

@@ -1,12 +1,12 @@
.PHONY: fmt dev_docker build_test_docker run_test_docker clean update
L2GETH_TAG=scroll-v5.6.3
L2GETH_TAG=scroll-v5.5.1
help: ## Display this help message
@grep -h \
-E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
update: ## Update dependencies
update:
go work sync
cd $(PWD)/bridge-history-api/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/common/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG}&& go mod tidy
@@ -15,14 +15,14 @@ update: ## Update dependencies
cd $(PWD)/rollup/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
cd $(PWD)/tests/integration-test/ && go get -u github.com/scroll-tech/go-ethereum@${L2GETH_TAG} && go mod tidy
lint: ## The code's format and security checks
lint: ## The code's format and security checks.
make -C rollup lint
make -C common lint
make -C coordinator lint
make -C database lint
make -C bridge-history-api lint
fmt: ## Format the code
fmt: ## format the code
go work sync
cd $(PWD)/bridge-history-api/ && go mod tidy
cd $(PWD)/common/ && go mod tidy
@@ -38,10 +38,10 @@ fmt: ## Format the code
goimports -local $(PWD)/rollup/ -w .
goimports -local $(PWD)/tests/integration-test/ -w .
dev_docker: ## Build docker images for development/testing usages
dev_docker: ## build docker images for development/testing usages
docker pull postgres
docker build -t scroll_l1geth --platform linux/amd64 ./common/testcontainers/docker/l1geth/
docker build -t scroll_l2geth --platform linux/amd64 ./common/testcontainers/docker/l2geth/
docker build -t scroll_l1geth ./common/testcontainers/docker/l1geth/
docker build -t scroll_l2geth ./common/testcontainers/docker/l2geth/
clean: ## Empty out the bin folder
@rm -rf build/bin

View File

@@ -1,6 +1,7 @@
# Scroll Monorepo
[![rollup](https://github.com/scroll-tech/scroll/actions/workflows/rollup.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/rollup.yml)
[![contracts](https://github.com/scroll-tech/scroll/actions/workflows/contracts.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/contracts.yml)
[![bridge-history](https://github.com/scroll-tech/scroll/actions/workflows/bridge_history_api.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/bridge_history_api.yml)
[![coordinator](https://github.com/scroll-tech/scroll/actions/workflows/coordinator.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/coordinator.yml)
[![prover](https://github.com/scroll-tech/scroll/actions/workflows/prover.yml/badge.svg)](https://github.com/scroll-tech/scroll/actions/workflows/prover.yml)
@@ -16,9 +17,10 @@
├── <a href="./common/">common</a>: Common libraries and types
├── <a href="./coordinator/">coordinator</a>: Prover coordinator service that dispatches proving tasks to provers
├── <a href="./database">database</a>: Database client and schema definition
├── <a href="./src">l2geth</a>: Scroll execution node
├── <a href="./prover">prover</a>: Prover client that runs proof generation for zkEVM circuit and aggregation circuit
├── <a href="./rollup">rollup</a>: Rollup-related services
├── <a href="https://github.com/scroll-tech/scroll-contracts.git">scroll-contracts</a>: solidity code for Scroll L1 bridge and rollup contracts and L2 bridge and pre-deployed contracts.
├── <a href="./rpc-gateway">rpc-gateway</a>: RPC gateway external repo
└── <a href="./tests">tests</a>: Integration tests
</pre>

File diff suppressed because one or more lines are too long

View File

@@ -19,9 +19,7 @@
"ScrollChainAddr": "0xa13BAF47339d63B743e7Da8741db5456DAc1E556",
"GatewayRouterAddr": "0xF8B1378579659D8F7EE5f3C929c2f3E332E41Fd6",
"MessageQueueAddr": "0x0d7E906BD9cAFa154b048cFa766Cc1E54E39AF9B",
"BatchBridgeGatewayAddr": "0x5Bcfd99c34cf7E06fc756f6f5aE7400504852bc4",
"GasTokenGatewayAddr": "0x0000000000000000000000000000000000000000",
"WrappedTokenGatewayAddr": "0x0000000000000000000000000000000000000000"
"BatchBridgeGatewayAddr": "0x0000000000000000000000000000000000000000"
},
"L2": {
"confirmation": 0,
@@ -41,7 +39,7 @@
"PufferGatewayAddr": "0x9eBf2f33526CD571f8b2ad312492cb650870CFd6",
"GatewayRouterAddr": "0x4C0926FF5252A435FD19e10ED15e5a249Ba19d79",
"MessageQueueAddr": "0x5300000000000000000000000000000000000000",
"BatchBridgeGatewayAddr": "0xa1a12158bE6269D7580C63eC5E609Cdc0ddD82bC"
"BatchBridgeGatewayAddr": "0x0000000000000000000000000000000000000000"
},
"db": {
"dsn": "postgres://postgres:123456@localhost:5444/test?sslmode=disable",

View File

@@ -89,7 +89,7 @@ require (
github.com/rjeczalik/notify v0.9.1 // indirect
github.com/rs/cors v1.7.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb // indirect
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7 // indirect
github.com/scroll-tech/zktrie v0.8.4 // indirect
github.com/sethvargo/go-retry v0.2.4 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect

View File

@@ -308,8 +308,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7 h1:Mx3fKRszn6CMeJ7YBTdjLSfXoSDd9hkP8BwhIaaUVfQ=
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,7 +6,6 @@ import (
"path/filepath"
"scroll-tech/common/database"
"scroll-tech/common/utils"
)
// FetcherConfig is the configuration of Layer1 or Layer2 fetcher.
@@ -31,8 +30,6 @@ type FetcherConfig struct {
GatewayRouterAddr string `json:"GatewayRouterAddr"`
MessageQueueAddr string `json:"MessageQueueAddr"`
BatchBridgeGatewayAddr string `json:"BatchBridgeGatewayAddr"`
GasTokenGatewayAddr string `json:"GasTokenGatewayAddr"`
WrappedTokenGatewayAddr string `json:"WrappedTokenGatewayAddr"`
}
// RedisConfig redis config
@@ -67,11 +64,5 @@ func NewConfig(file string) (*Config, error) {
return nil, err
}
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_BRIDGE_HISTORY")
if err != nil {
return nil, err
}
return cfg, nil
}

View File

@@ -141,7 +141,7 @@ func (c *L2MessageFetcher) fetchAndSaveEvents(confirmation uint64) {
return
}
if updateErr := c.eventUpdateLogic.UpdateL2WithdrawMessageProofs(c.ctx, c.l2SyncHeight); updateErr != nil {
if updateErr := c.eventUpdateLogic.UpdateL1BatchIndexAndStatus(c.ctx, c.l2SyncHeight); updateErr != nil {
log.Error("failed to update L1 batch index and status", "from", from, "to", to, "err", updateErr)
return
}

View File

@@ -2,7 +2,7 @@ package logic
import (
"context"
"errors"
"fmt"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
@@ -125,11 +125,6 @@ func (b *EventUpdateLogic) L1InsertOrUpdate(ctx context.Context, l1FetcherResult
}
func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, batchIndex, startBlock, endBlock uint64) error {
if startBlock > endBlock {
log.Warn("start block is greater than end block", "start", startBlock, "end", endBlock)
return nil
}
l2WithdrawMessages, err := b.crossMessageOrm.GetL2WithdrawalsByBlockRange(ctx, startBlock, endBlock)
if err != nil {
log.Error("failed to get L2 withdrawals by batch index", "batch index", batchIndex, "err", err)
@@ -153,7 +148,7 @@ func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, bat
if withdrawTrie.NextMessageNonce != l2WithdrawMessages[0].MessageNonce {
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actual next message nonce", l2WithdrawMessages[0].MessageNonce)
return errors.New("nonce mismatch")
return fmt.Errorf("nonce mismatch")
}
messageHashes := make([]common.Hash, len(l2WithdrawMessages))
@@ -178,42 +173,24 @@ func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, bat
return nil
}
// UpdateL2WithdrawMessageProofs updates L2 withdrawal message proofs.
func (b *EventUpdateLogic) UpdateL2WithdrawMessageProofs(ctx context.Context, height uint64) error {
lastUpdatedFinalizedBlockHeight, err := b.batchEventOrm.GetLastUpdatedFinalizedBlockHeight(ctx)
// UpdateL1BatchIndexAndStatus updates L1 finalized batch index and status
func (b *EventUpdateLogic) UpdateL1BatchIndexAndStatus(ctx context.Context, height uint64) error {
finalizedBatches, err := b.batchEventOrm.GetFinalizedBatchesLEBlockHeight(ctx, height)
if err != nil {
log.Error("failed to get last updated finalized block height", "error", err)
return err
}
finalizedBatches, err := b.batchEventOrm.GetUnupdatedFinalizedBatchesLEBlockHeight(ctx, height)
if err != nil {
log.Error("failed to get unupdated finalized batches >= block height", "error", err)
log.Error("failed to get batches >= block height", "error", err)
return err
}
for _, finalizedBatch := range finalizedBatches {
log.Info("update finalized batch or bundle info of L2 withdrawals", "index", finalizedBatch.BatchIndex, "lastUpdatedFinalizedBlockHeight", lastUpdatedFinalizedBlockHeight, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber)
// This method is compatible with both "finalize by batch" and "finalize by bundle" modes:
// - In "finalize by batch" mode, each batch emits a FinalizedBatch event.
// - In "finalize by bundle" mode, all batches in the bundle emit only one FinalizedBatch event, using the last batch's index and hash.
//
// The method updates two types of information in L2 withdrawal messages:
// 1. Withdraw proof generation:
// - finalize by batch: Generates proofs for each batch.
// - finalize by bundle: Generates proofs for the entire bundle at once.
// 2. Batch index updating:
// - finalize by batch: Updates the batch index for withdrawal messages in each processed batch.
// - finalize by bundle: Updates the batch index for all withdrawal messages in the bundle, using the index of the last batch in the bundle.
if updateErr := b.updateL2WithdrawMessageInfos(ctx, finalizedBatch.BatchIndex, lastUpdatedFinalizedBlockHeight+1, finalizedBatch.EndBlockNumber); updateErr != nil {
log.Error("failed to update L2 withdraw message infos", "index", finalizedBatch.BatchIndex, "lastUpdatedFinalizedBlockHeight", lastUpdatedFinalizedBlockHeight, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber, "error", updateErr)
log.Info("update finalized batch info of L2 withdrawals", "index", finalizedBatch.BatchIndex, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber)
if updateErr := b.updateL2WithdrawMessageInfos(ctx, finalizedBatch.BatchIndex, finalizedBatch.StartBlockNumber, finalizedBatch.EndBlockNumber); updateErr != nil {
log.Error("failed to update L2 withdraw message infos", "index", finalizedBatch.BatchIndex, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber, "error", updateErr)
return updateErr
}
if dbErr := b.batchEventOrm.UpdateBatchEventStatus(ctx, finalizedBatch.BatchIndex); dbErr != nil {
log.Error("failed to update batch event status as updated", "index", finalizedBatch.BatchIndex, "lastUpdatedFinalizedBlockHeight", lastUpdatedFinalizedBlockHeight, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber, "error", dbErr)
log.Error("failed to update batch event status as updated", "index", finalizedBatch.BatchIndex, "start", finalizedBatch.StartBlockNumber, "end", finalizedBatch.EndBlockNumber, "error", dbErr)
return dbErr
}
lastUpdatedFinalizedBlockHeight = finalizedBatch.EndBlockNumber
b.eventUpdateLogicL1FinalizeBatchEventL2BlockUpdateHeight.Set(float64(finalizedBatch.EndBlockNumber))
}
return nil

View File

@@ -168,14 +168,6 @@ func (e *L1EventParser) ParseL1SingleCrossChainEventLogs(ctx context.Context, lo
lastMessage.L2TokenAddress = event.L2Token.String()
lastMessage.TokenIDs = utils.ConvertBigIntArrayToString(event.TokenIDs)
lastMessage.TokenAmounts = utils.ConvertBigIntArrayToString(event.TokenAmounts)
case backendabi.L1DepositWrappedTokenSig:
event := backendabi.WrappedTokenMessageEvent{}
if err := utils.UnpackLog(backendabi.L1WrappedTokenGatewayABI, &event, "DepositWrappedToken", vlog); err != nil {
log.Error("Failed to unpack DepositWrappedToken event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
lastMessage.Sender = event.From.String()
case backendabi.L1SentMessageEventSig:
event := backendabi.L1SentMessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ScrollMessengerABI, &event, "SentMessage", vlog); err != nil {
@@ -281,7 +273,6 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
l1BatchEvents = append(l1BatchEvents, &orm.BatchEvent{
BatchStatus: int(btypes.BatchStatusTypeFinalized),
BatchIndex: event.BatchIndex.Uint64(),
BatchHash: event.BatchHash.String(),
L1BlockNumber: vlog.BlockNumber,
})
}
@@ -328,16 +319,6 @@ func (e *L1EventParser) ParseL1MessageQueueEventLogs(logs []types.Log, l1Deposit
QueueIndex: index,
})
}
case backendabi.L1ResetDequeuedTransactionEventSig:
event := backendabi.L1ResetDequeuedTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "ResetDequeuedTransaction", vlog); err != nil {
log.Error("Failed to unpack ResetDequeuedTransaction event", "err", err)
return nil, err
}
l1MessageQueueEvents = append(l1MessageQueueEvents, &orm.MessageQueueEvent{
EventType: btypes.MessageQueueEventTypeResetDequeuedTransaction,
QueueIndex: event.StartIndex.Uint64(),
})
case backendabi.L1DropTransactionEventSig:
event := backendabi.L1DropTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "DropTransaction", vlog); err != nil {

View File

@@ -51,8 +51,11 @@ type L1FetcherLogic struct {
// NewL1FetcherLogic creates L1 fetcher logic
func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L1FetcherLogic {
addressList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -66,8 +69,11 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
}
gatewayList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -99,26 +105,6 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
gatewayList = append(gatewayList, common.HexToAddress(cfg.BatchBridgeGatewayAddr))
}
if common.HexToAddress(cfg.ETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.ETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.ETHGatewayAddr))
}
if common.HexToAddress(cfg.WETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WETHGatewayAddr))
}
if common.HexToAddress(cfg.GasTokenGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.GasTokenGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.GasTokenGatewayAddr))
}
if common.HexToAddress(cfg.WrappedTokenGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WrappedTokenGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WrappedTokenGatewayAddr))
}
log.Info("L1 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L1FetcherLogic{
@@ -224,7 +210,7 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
Topics: make([][]common.Hash, 1),
}
query.Topics[0] = make([]common.Hash, 16)
query.Topics[0] = make([]common.Hash, 14)
query.Topics[0][0] = backendabi.L1DepositETHSig
query.Topics[0][1] = backendabi.L1DepositERC20Sig
query.Topics[0][2] = backendabi.L1DepositERC721Sig
@@ -238,9 +224,7 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
query.Topics[0][10] = backendabi.L1QueueTransactionEventSig
query.Topics[0][11] = backendabi.L1DequeueTransactionEventSig
query.Topics[0][12] = backendabi.L1DropTransactionEventSig
query.Topics[0][13] = backendabi.L1ResetDequeuedTransactionEventSig
query.Topics[0][14] = backendabi.L1BridgeBatchDepositSig
query.Topics[0][15] = backendabi.L1DepositWrappedTokenSig
query.Topics[0][13] = backendabi.L1BridgeBatchDepositSig
eventLogs, err := f.client.FilterLogs(ctx, query)
if err != nil {
@@ -355,10 +339,6 @@ func (f *L1FetcherLogic) updateMetrics(res L1FilterResult) {
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_skip_message").Add(1)
case btypes.MessageQueueEventTypeDropTransaction:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_drop_message").Add(1)
// one ResetDequeuedTransaction event could indicate reset multiple skipped messages,
// this metric only counts the number of events, not the number of skipped messages.
case btypes.MessageQueueEventTypeResetDequeuedTransaction:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_reset_skipped_messages").Add(1)
}
}

View File

@@ -54,6 +54,7 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -67,6 +68,7 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -98,11 +100,6 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
gatewayList = append(gatewayList, common.HexToAddress(cfg.BatchBridgeGatewayAddr))
}
if common.HexToAddress(cfg.WETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WETHGatewayAddr))
}
log.Info("L2 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L2FetcherLogic{

View File

@@ -53,26 +53,8 @@ func (c *BatchEvent) GetBatchEventSyncedHeightInDB(ctx context.Context) (uint64,
return batch.L1BlockNumber, nil
}
// GetLastUpdatedFinalizedBlockHeight returns the last updated finalized block height in db.
func (c *BatchEvent) GetLastUpdatedFinalizedBlockHeight(ctx context.Context) (uint64, error) {
var batch BatchEvent
db := c.db.WithContext(ctx)
db = db.Model(&BatchEvent{})
db = db.Where("batch_status = ?", btypes.BatchStatusTypeFinalized)
db = db.Where("update_status = ?", btypes.UpdateStatusTypeUpdated)
db = db.Order("batch_index desc")
if err := db.First(&batch).Error; err != nil {
if err == gorm.ErrRecordNotFound {
// No finalized batch found, return genesis batch's end block number.
return 0, nil
}
return 0, fmt.Errorf("failed to get last updated finalized block height, error: %w", err)
}
return batch.EndBlockNumber, nil
}
// GetUnupdatedFinalizedBatchesLEBlockHeight returns the finalized batches with end block <= given block height in db.
func (c *BatchEvent) GetUnupdatedFinalizedBatchesLEBlockHeight(ctx context.Context, blockHeight uint64) ([]*BatchEvent, error) {
// GetFinalizedBatchesLEBlockHeight returns the finalized batches with end block <= given block height in db.
func (c *BatchEvent) GetFinalizedBatchesLEBlockHeight(ctx context.Context, blockHeight uint64) ([]*BatchEvent, error) {
var batches []*BatchEvent
db := c.db.WithContext(ctx)
db = db.Model(&BatchEvent{})
@@ -84,13 +66,16 @@ func (c *BatchEvent) GetUnupdatedFinalizedBatchesLEBlockHeight(ctx context.Conte
if err == gorm.ErrRecordNotFound {
return nil, nil
}
return nil, fmt.Errorf("failed to get unupdated finalized batches >= block height, error: %w", err)
return nil, fmt.Errorf("failed to get batches >= block height, error: %w", err)
}
return batches, nil
}
// InsertOrUpdateBatchEvents inserts a new batch event or updates an existing one based on the BatchStatusType.
func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvents []*BatchEvent) error {
var maxFinalizedBatchIndex uint64
var containsFinalizedEvent bool
var maxL1BlockNumber uint64
for _, l1BatchEvent := range l1BatchEvents {
db := c.db
db = db.WithContext(ctx)
@@ -107,12 +92,13 @@ func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvent
return fmt.Errorf("failed to insert or ignore batch event, error: %w", err)
}
case btypes.BatchStatusTypeFinalized:
db = db.Where("batch_index = ?", l1BatchEvent.BatchIndex)
db = db.Where("batch_hash = ?", l1BatchEvent.BatchHash)
updateFields["batch_status"] = btypes.BatchStatusTypeFinalized
updateFields["l1_block_number"] = l1BatchEvent.L1BlockNumber
if err := db.Updates(updateFields).Error; err != nil {
return fmt.Errorf("failed to update batch event, error: %w", err)
containsFinalizedEvent = true
// get the maxFinalizedBatchIndex, which signals all the batch before it are all finalized
if l1BatchEvent.BatchIndex > maxFinalizedBatchIndex {
maxFinalizedBatchIndex = l1BatchEvent.BatchIndex
}
if l1BatchEvent.L1BlockNumber > maxL1BlockNumber {
maxL1BlockNumber = l1BatchEvent.L1BlockNumber
}
case btypes.BatchStatusTypeReverted:
db = db.Where("batch_index = ?", l1BatchEvent.BatchIndex)
@@ -127,6 +113,21 @@ func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvent
}
}
}
if containsFinalizedEvent {
db := c.db
db = db.WithContext(ctx)
db = db.Model(&BatchEvent{})
updateFields := make(map[string]interface{})
// After darwin, FinalizeBatch event signals a range of batches are finalized,
// thus losing the batch hash info. Meanwhile, only batch_index is enough to update finalized batches.
db = db.Where("batch_index <= ?", maxFinalizedBatchIndex)
db = db.Where("batch_status != ?", btypes.BatchStatusTypeFinalized)
updateFields["batch_status"] = btypes.BatchStatusTypeFinalized
updateFields["l1_block_number"] = maxL1BlockNumber
if err := db.Updates(updateFields).Error; err != nil {
return fmt.Errorf("failed to update batch event, error: %w", err)
}
}
return nil
}

View File

@@ -217,12 +217,6 @@ func (c *CrossMessage) UpdateL1MessageQueueEventsInfo(ctx context.Context, l1Mes
db = db.Where("message_nonce = ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", btypes.MessageTypeL1SentMessage)
txStatusUpdateFields["tx_status"] = types.TxStatusTypeDropped
case btypes.MessageQueueEventTypeResetDequeuedTransaction:
db = db.Where("tx_status = ?", types.TxStatusTypeSkipped)
// reset skipped messages that the nonce is greater than or equal to the queue index.
db = db.Where("message_nonce >= ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", btypes.MessageTypeL1SentMessage)
txStatusUpdateFields["tx_status"] = types.TxStatusTypeSent
}
if err := db.Updates(txStatusUpdateFields).Error; err != nil {
return fmt.Errorf("failed to update tx statuses of L1 message queue events, update fields: %v, error: %w", txStatusUpdateFields, err)
@@ -236,7 +230,7 @@ func (c *CrossMessage) UpdateL1MessageQueueEventsInfo(ctx context.Context, l1Mes
db = db.Model(&CrossMessage{})
txHashUpdateFields := make(map[string]interface{})
switch l1MessageQueueEvent.EventType {
case btypes.MessageQueueEventTypeDequeueTransaction, btypes.MessageQueueEventTypeResetDequeuedTransaction:
case btypes.MessageQueueEventTypeDequeueTransaction:
continue
case btypes.MessageQueueEventTypeQueueTransaction:
// only replayMessages or enforced txs (whose message hashes would not be found), sendMessages have been filtered out.

View File

@@ -70,7 +70,6 @@ const (
MessageQueueEventTypeQueueTransaction
MessageQueueEventTypeDequeueTransaction
MessageQueueEventTypeDropTransaction
MessageQueueEventTypeResetDequeuedTransaction
)
// BatchStatusType represents the type of batch status.

View File

@@ -38,7 +38,7 @@ func GetBlockNumber(ctx context.Context, client *ethclient.Client, confirmations
// @todo: add unit test.
func UnpackLog(c *abi.ABI, out interface{}, event string, log types.Log) error {
if log.Topics[0] != c.Events[event].ID {
return errors.New("event signature mismatch")
return fmt.Errorf("event signature mismatch")
}
if len(log.Data) > 0 {
if err := c.UnpackIntoInterface(out, event, log.Data); err != nil {
@@ -66,55 +66,32 @@ func ComputeMessageHash(
return common.BytesToHash(crypto.Keccak256(data))
}
type commitBatchArgs struct {
Version uint8
ParentBatchHeader []byte
Chunks [][]byte
SkippedL1MessageBitmap []byte
}
// GetBatchRangeFromCalldata find the block range from calldata, both inclusive.
func GetBatchRangeFromCalldata(txData []byte) (uint64, uint64, error) {
const methodIDLength = 4
if len(txData) < methodIDLength {
return 0, 0, fmt.Errorf("transaction data is too short, length of tx data: %v, minimum length required: %v", len(txData), methodIDLength)
}
method, err := backendabi.IScrollChainABI.MethodById(txData[:methodIDLength])
func GetBatchRangeFromCalldata(calldata []byte) (uint64, uint64, error) {
method := backendabi.IScrollChainABI.Methods["commitBatch"]
values, err := method.Inputs.Unpack(calldata[4:])
if err != nil {
return 0, 0, fmt.Errorf("failed to get method by ID, ID: %v, err: %w", txData[:methodIDLength], err)
// special case: import genesis batch
method = backendabi.IScrollChainABI.Methods["importGenesisBatch"]
_, err2 := method.Inputs.Unpack(calldata[4:])
if err2 == nil {
// genesis batch
return 0, 0, nil
}
// none of "commitBatch" and "importGenesisBatch" match, give up
return 0, 0, err
}
values, err := method.Inputs.Unpack(txData[methodIDLength:])
args := commitBatchArgs{}
err = method.Inputs.Copy(&args, values)
if err != nil {
return 0, 0, fmt.Errorf("failed to unpack transaction data using ABI, tx data: %v, err: %w", txData, err)
}
var chunks [][]byte
if method.Name == "importGenesisBatch" {
return 0, 0, nil
} else if method.Name == "commitBatch" {
type commitBatchArgs struct {
Version uint8
ParentBatchHeader []byte
Chunks [][]byte
SkippedL1MessageBitmap []byte
}
var args commitBatchArgs
if err = method.Inputs.Copy(&args, values); err != nil {
return 0, 0, fmt.Errorf("failed to decode calldata into commitBatch args, values: %+v, err: %w", values, err)
}
chunks = args.Chunks
} else if method.Name == "commitBatchWithBlobProof" {
type commitBatchWithBlobProofArgs struct {
Version uint8
ParentBatchHeader []byte
Chunks [][]byte
SkippedL1MessageBitmap []byte
BlobDataProof []byte
}
var args commitBatchWithBlobProofArgs
if err = method.Inputs.Copy(&args, values); err != nil {
return 0, 0, fmt.Errorf("failed to decode calldata into commitBatchWithBlobProofArgs args, values: %+v, err: %w", values, err)
}
chunks = args.Chunks
return 0, 0, err
}
var startBlock uint64
@@ -123,14 +100,14 @@ func GetBatchRangeFromCalldata(txData []byte) (uint64, uint64, error) {
// decode blocks from chunk and assume that there's no empty chunk
// | 1 byte | 60 bytes | ... | 60 bytes |
// | num blocks | block 1 | ... | block n |
if len(chunks) == 0 {
if len(args.Chunks) == 0 {
return 0, 0, errors.New("invalid chunks")
}
chunk := chunks[0]
chunk := args.Chunks[0]
block := chunk[1:61] // first block in chunk
startBlock = binary.BigEndian.Uint64(block[0:8])
chunk = chunks[len(chunks)-1]
chunk = args.Chunks[len(args.Chunks)-1]
lastBlockIndex := int(chunk[0]) - 1
block = chunk[1+lastBlockIndex*60 : 1+lastBlockIndex*60+60] // last block in chunk
finishBlock = binary.BigEndian.Uint64(block[0:8])

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +1,5 @@
# Download Go dependencies
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
FROM golang:1.21-alpine3.19 as base
WORKDIR /src
COPY go.mod* ./
@@ -11,13 +11,11 @@ FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/bridge-history-api/cmd/api && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" go build -v -p 4 -o /bin/bridgehistoryapi-api
cd /src/bridge-history-api/cmd/api && go build -v -p 4 -o /bin/bridgehistoryapi-api
# Pull bridgehistoryapi-api into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
# Pull bridgehistoryapi-api into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/bridgehistoryapi-api /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-api"]
ENTRYPOINT ["bridgehistoryapi-api"]

View File

@@ -1,5 +1,5 @@
# Download Go dependencies
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
FROM golang:1.21-alpine3.19 as base
WORKDIR /src
COPY go.mod* ./
@@ -11,13 +11,11 @@ FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/bridge-history-api/cmd/fetcher && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" go build -v -p 4 -o /bin/bridgehistoryapi-fetcher
cd /src/bridge-history-api/cmd/fetcher && go build -v -p 4 -o /bin/bridgehistoryapi-fetcher
# Pull bridgehistoryapi-fetcher into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
# Pull bridgehistoryapi-fetcher into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/bridgehistoryapi-fetcher /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-fetcher"]
ENTRYPOINT ["bridgehistoryapi-fetcher"]

View File

@@ -23,6 +23,7 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
@@ -30,15 +31,16 @@ RUN go mod download -x
# Build coordinator
FROM base as builder
RUN apt-get install libc6-dev
COPY . .
RUN cp -r ./common/libzkp/interface ./coordinator/internal/logic/verifier/lib
COPY --from=zkp-builder /app/target/release/libzkp.so ./coordinator/internal/logic/verifier/lib/
RUN cd ./coordinator && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" make coordinator_api_skip_libzkp && mv ./build/bin/coordinator_api /bin/coordinator_api && mv internal/logic/verifier/lib /bin/
RUN cd ./coordinator && make coordinator_api_skip_libzkp && mv ./build/bin/coordinator_api /bin/coordinator_api && mv internal/logic/verifier/lib /bin/
# Pull coordinator into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib
# ENV CHAIN_ID=534353
RUN mkdir -p /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib

View File

@@ -7,6 +7,7 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
@@ -15,13 +16,13 @@ RUN go mod download -x
FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/coordinator/cmd/cron/ && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" go build -v -p 4 -o /bin/coordinator_cron
cd /src/coordinator/cmd/cron/ && CGO_LDFLAGS="-ldl" go build -v -p 4 -o /bin/coordinator_cron
# Pull coordinator into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
ENV CGO_LDFLAGS="-ldl"
COPY --from=builder /bin/coordinator_cron /bin/
WORKDIR /app
ENTRYPOINT ["coordinator_cron"]
ENTRYPOINT ["coordinator_cron"]

View File

@@ -7,6 +7,7 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x

View File

@@ -7,6 +7,7 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
@@ -21,10 +22,8 @@ RUN --mount=target=. \
# Pull gas_oracle into a second stage deploy ubuntu container
FROM ubuntu:20.04
RUN apt update && apt install ca-certificates -y
ENV CGO_LDFLAGS="-ldl"
COPY --from=builder /bin/gas_oracle /bin/
WORKDIR /app
ENTRYPOINT ["gas_oracle"]
ENTRYPOINT ["gas_oracle"]

View File

@@ -7,6 +7,7 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
@@ -21,8 +22,6 @@ RUN --mount=target=. \
# Pull rollup_relayer into a second stage deploy ubuntu container
FROM ubuntu:20.04
RUN apt update && apt install ca-certificates -y
ENV CGO_LDFLAGS="-ldl"
COPY --from=builder /bin/rollup_relayer /bin/

View File

@@ -1,12 +1,83 @@
package forks
import (
"math"
"math/big"
"sort"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/go-ethereum/params"
)
// CollectSortedForkHeights returns a sorted set of block numbers that one or more forks are activated on
func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]bool, map[string]uint64) {
type nameFork struct {
name string
block *big.Int
}
forkHeightNameMap := make(map[uint64]string)
for _, fork := range []nameFork{
{name: "homestead", block: config.HomesteadBlock},
{name: "daoFork", block: config.DAOForkBlock},
{name: "eip150", block: config.EIP150Block},
{name: "eip155", block: config.EIP155Block},
{name: "eip158", block: config.EIP158Block},
{name: "byzantium", block: config.ByzantiumBlock},
{name: "constantinople", block: config.ConstantinopleBlock},
{name: "petersburg", block: config.PetersburgBlock},
{name: "istanbul", block: config.IstanbulBlock},
{name: "muirGlacier", block: config.MuirGlacierBlock},
{name: "berlin", block: config.BerlinBlock},
{name: "london", block: config.LondonBlock},
{name: "arrowGlacier", block: config.ArrowGlacierBlock},
{name: "archimedes", block: config.ArchimedesBlock},
{name: "shanghai", block: config.ShanghaiBlock},
{name: "bernoulli", block: config.BernoulliBlock},
{name: "curie", block: config.CurieBlock},
} {
if fork.block == nil {
continue
}
height := fork.block.Uint64()
// only keep latest fork for at each height, discard the rest
forkHeightNameMap[height] = fork.name
}
forkHeightsMap := make(map[uint64]bool)
forkNameHeightMap := make(map[string]uint64)
for height, name := range forkHeightNameMap {
forkHeightsMap[height] = true
forkNameHeightMap[name] = height
}
var forkHeights []uint64
for height := range forkHeightsMap {
forkHeights = append(forkHeights, height)
}
sort.Slice(forkHeights, func(i, j int) bool {
return forkHeights[i] < forkHeights[j]
})
return forkHeights, forkHeightsMap, forkNameHeightMap
}
// BlockRange returns the block range of the hard fork
// Need ensure the forkHeights is incremental
func BlockRange(currentForkHeight uint64, forkHeights []uint64) (from, to uint64) {
to = math.MaxInt64
for _, height := range forkHeights {
if currentForkHeight < height {
to = height
return
}
from = height
}
return
}
// GetHardforkName returns the name of the hardfork active at the given block height and timestamp.
// It checks the chain configuration to determine which hardfork is active.
func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uint64) string {
@@ -16,10 +87,8 @@ func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uin
return "bernoulli"
} else if !config.IsDarwin(blockTimestamp) {
return "curie"
} else if !config.IsDarwinV2(blockTimestamp) {
return "darwin"
} else {
return "darwinV2"
return "darwin"
}
}
@@ -32,10 +101,8 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin
return encoding.CodecV1
} else if !config.IsDarwin(blockTimestamp) {
return encoding.CodecV2
} else if !config.IsDarwinV2(blockTimestamp) {
return encoding.CodecV3
} else {
return encoding.CodecV4
return encoding.CodecV3
}
}
@@ -48,8 +115,6 @@ func GetMaxChunksPerBatch(config *params.ChainConfig, blockHeight, blockTimestam
return 15
} else if !config.IsDarwin(blockTimestamp) {
return 45
} else if !config.IsDarwinV2(blockTimestamp) {
return 45
} else {
return 45
}

102
common/forks/forks_test.go Normal file
View File

@@ -0,0 +1,102 @@
package forks
import (
"math"
"math/big"
"testing"
"github.com/scroll-tech/go-ethereum/params"
"github.com/stretchr/testify/require"
)
func TestCollectSortedForkBlocks(t *testing.T) {
l, m, n := CollectSortedForkHeights(&params.ChainConfig{
ArchimedesBlock: big.NewInt(0),
ShanghaiBlock: big.NewInt(3),
BernoulliBlock: big.NewInt(3),
CurieBlock: big.NewInt(4),
})
require.Equal(t, l, []uint64{
0,
3,
4,
})
require.Equal(t, map[uint64]bool{
3: true,
4: true,
0: true,
}, m)
require.Equal(t, map[string]uint64{
"archimedes": 0,
"bernoulli": 3,
"curie": 4,
}, n)
}
func TestBlockRange(t *testing.T) {
tests := []struct {
name string
forkHeight uint64
forkHeights []uint64
expectedFrom uint64
expectedTo uint64
}{
{
name: "ToInfinite",
forkHeight: 300,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 300,
expectedTo: math.MaxInt64,
},
{
name: "To300",
forkHeight: 200,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 200,
expectedTo: 300,
},
{
name: "To200",
forkHeight: 100,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 100,
expectedTo: 200,
},
{
name: "To100",
forkHeight: 0,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 0,
expectedTo: 100,
},
{
name: "To200-1",
forkHeight: 100,
forkHeights: []uint64{100, 200},
expectedFrom: 100,
expectedTo: 200,
},
{
name: "To2",
forkHeight: 1,
forkHeights: []uint64{1, 2},
expectedFrom: 1,
expectedTo: 2,
},
{
name: "ToInfinite-1",
forkHeight: 0,
forkHeights: []uint64{0},
expectedFrom: 0,
expectedTo: math.MaxInt64,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
from, to := BlockRange(test.forkHeight, test.forkHeights)
require.Equal(t, test.expectedFrom, from)
require.Equal(t, test.expectedTo, to)
})
}
}

View File

@@ -13,7 +13,7 @@ require (
github.com/modern-go/reflect2 v1.0.2
github.com/orcaman/concurrent-map v1.0.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/stretchr/testify v1.9.0
github.com/testcontainers/testcontainers-go v0.30.0

View File

@@ -633,8 +633,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7 h1:Mx3fKRszn6CMeJ7YBTdjLSfXoSDd9hkP8BwhIaaUVfQ=
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,8 +6,6 @@ export RUST_BACKTRACE=full
export RUST_LOG=debug
export RUST_MIN_STACK=100000000
export PROVER_OUTPUT_DIR=test_zkp_test
export SCROLL_PROVER_ASSETS_DIR=/assets/test_assets
export DARWIN_V2_TEST_DIR=/assets
#export LD_LIBRARY_PATH=/:/usr/local/cuda/lib64
mkdir -p $PROVER_OUTPUT_DIR
@@ -15,16 +13,32 @@ mkdir -p $PROVER_OUTPUT_DIR
REPO=$(realpath ../..)
function build_test_bins() {
cd impl
cargo build --release
ln -f -s $(realpath target/release/libzkp.so) $REPO/prover/core/lib
ln -f -s $(realpath target/release/libzkp.so) $REPO/coordinator/internal/logic/verifier/lib
cd $REPO/prover
make tests_binary
go test -tags="gpu ffi" -timeout 0 -c core/prover_test.go
cd $REPO/coordinator
make libzkp
go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier
cd $REPO/common/libzkp
}
function build_test_bins_old() {
cd $REPO
cd prover
make libzkp
go test -tags="gpu ffi" -timeout 0 -c core/prover_test.go
cd ..
cd coordinator
make libzkp
go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier
cd ..
cd common/libzkp
}
build_test_bins
rm -rf $PROVER_OUTPUT_DIR/*
#rm -rf test_zkp_test/*
#rm -rf prover.log verifier.log
$REPO/prover/prover.test --exact zk_circuits_handler::darwin_v2::tests::test_circuits 2>&1 | tee prover.log
#$REPO/prover/core.test -test.v 2>&1 | tee prover.log
$REPO/coordinator/verifier.test -test.v 2>&1 | tee verifier.log

View File

@@ -30,8 +30,8 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -39,9 +39,9 @@ dependencies = [
"ctor",
"encoder",
"env_logger 0.10.0",
"eth-types 0.12.0",
"eth-types",
"ethers-core",
"gadgets 0.12.0",
"gadgets",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
@@ -59,41 +59,7 @@ dependencies = [
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.12.0",
]
[[package]]
name = "aggregator"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor",
"encoder",
"env_logger 0.10.0",
"eth-types 0.13.0",
"ethers-core",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.13.0",
"zkevm-circuits",
]
[[package]]
@@ -171,9 +137,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.86"
version = "1.0.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
[[package]]
name = "arc-swap"
@@ -570,47 +536,24 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types",
"ethers-core",
"ethers-providers",
"ethers-signers",
"gadgets 0.12.0",
"external-tracer",
"gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.12.0",
"mpt-zktrie 0.12.0",
"num",
"poseidon-circuit",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"ethers-core",
"ethers-providers",
"ethers-signers",
"gadgets 0.13.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"mock",
"mpt-zktrie",
"num",
"poseidon-circuit",
"rand",
"revm-precompile",
"serde",
"serde_json",
@@ -1182,36 +1125,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [
"base64 0.13.1",
"ethers-core",
"ethers-signers",
"halo2curves",
"hex",
"itertools 0.11.0",
"log",
"num",
"num-bigint",
"poseidon-base",
"regex",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"serde_with",
"sha3 0.10.8",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
"uint",
]
[[package]]
name = "eth-types"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"base64 0.13.1",
"ethers-core",
@@ -1228,6 +1143,7 @@ dependencies = [
"revm-primitives",
"serde",
"serde_json",
"serde_stacker",
"serde_with",
"sha3 0.10.8",
"strum 0.25.0",
@@ -1366,24 +1282,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"geth-utils 0.12.0",
"log",
"serde",
"serde_json",
"serde_stacker",
]
[[package]]
name = "external-tracer"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"geth-utils 0.13.0",
"eth-types",
"geth-utils",
"log",
"serde",
"serde_json",
@@ -1561,22 +1464,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
"strum 0.25.0",
]
[[package]]
name = "gadgets"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"eth-types",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1596,18 +1487,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [
"env_logger 0.10.0",
"gobuild",
"log",
]
[[package]]
name = "geth-utils"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"env_logger 0.10.0",
"gobuild",
@@ -2355,28 +2236,13 @@ dependencies = [
[[package]]
name = "mock"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types",
"ethers-core",
"ethers-signers",
"external-tracer 0.12.0",
"itertools 0.11.0",
"log",
"rand",
"rand_chacha",
]
[[package]]
name = "mock"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"ethers-core",
"ethers-signers",
"external-tracer 0.13.0",
"external-tracer",
"itertools 0.11.0",
"log",
"rand",
@@ -2385,30 +2251,16 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "mpt-zktrie"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie",
]
[[package]]
@@ -2871,17 +2723,17 @@ dependencies = [
[[package]]
name = "prover"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"aggregator 0.12.0",
"aggregator",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.12.0",
"bus-mapping",
"chrono",
"dotenvy",
"eth-types 0.12.0",
"eth-types",
"ethers-core",
"git-version",
"halo2_proofs",
@@ -2889,7 +2741,7 @@ dependencies = [
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.12.0",
"mpt-zktrie",
"num-bigint",
"rand",
"rand_xorshift",
@@ -2900,41 +2752,7 @@ dependencies = [
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.12.0",
]
[[package]]
name = "prover"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"aggregator 0.13.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.13.0",
"chrono",
"dotenvy",
"eth-types 0.13.0",
"ethers-core",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.13.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.13.0",
"zkevm-circuits",
]
[[package]]
@@ -3140,7 +2958,7 @@ dependencies = [
[[package]]
name = "revm-precompile"
version = "7.0.0"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#36c304d9e9ba4e4b2d5468d91a6bd27210133b6a"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#8543dd627348907773d8057807b6a310b276bb30"
dependencies = [
"aurora-engine-modexp",
"c-kzg",
@@ -3156,7 +2974,7 @@ dependencies = [
[[package]]
name = "revm-primitives"
version = "4.0.0"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#36c304d9e9ba4e4b2d5468d91a6bd27210133b6a"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#8543dd627348907773d8057807b6a310b276bb30"
dependencies = [
"alloy-primitives",
"auto_impl",
@@ -3707,7 +3525,7 @@ checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
[[package]]
name = "snark-verifier"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#58c46b74c73156b9e09dc27617369d2acfb4461b"
source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#fe1f8906041ad323034881fbd808908250d44829"
dependencies = [
"bytes",
"ethereum-types",
@@ -3730,7 +3548,7 @@ dependencies = [
[[package]]
name = "snark-verifier-sdk"
version = "0.0.1"
source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#58c46b74c73156b9e09dc27617369d2acfb4461b"
source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#fe1f8906041ad323034881fbd808908250d44829"
dependencies = [
"bincode",
"ethereum-types",
@@ -4542,18 +4360,18 @@ dependencies = [
[[package]]
name = "zkevm-circuits"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"array-init",
"bus-mapping 0.12.0",
"bus-mapping",
"either",
"env_logger 0.10.0",
"eth-types 0.12.0",
"eth-types",
"ethers-core",
"ethers-signers",
"ff",
"gadgets 0.12.0",
"gadgets",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
@@ -4563,50 +4381,8 @@ dependencies = [
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.12.0",
"mpt-zktrie 0.12.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"array-init",
"bus-mapping 0.13.0",
"either",
"env_logger 0.10.0",
"eth-types 0.13.0",
"ethers-core",
"ethers-signers",
"ff",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"mock",
"mpt-zktrie",
"num",
"num-bigint",
"poseidon-circuit",
@@ -4628,15 +4404,13 @@ dependencies = [
name = "zkp"
version = "0.1.0"
dependencies = [
"anyhow",
"base64 0.13.1",
"env_logger 0.9.3",
"halo2_proofs",
"libc",
"log",
"once_cell",
"prover 0.12.0",
"prover 0.13.0",
"prover",
"serde",
"serde_derive",
"serde_json",
@@ -4649,16 +4423,7 @@ version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie_rust",
]
[[package]]
@@ -4675,20 +4440,6 @@ dependencies = [
"strum_macros 0.24.3",
]
[[package]]
name = "zktrie_rust"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"hex",
"lazy_static",
"num",
"num-derive",
"num-traits",
"strum 0.24.1",
"strum_macros 0.24.3",
]
[[package]]
name = "zstd"
version = "0.13.0"

View File

@@ -13,6 +13,8 @@ halo2curves = { git = "https://github.com/scroll-tech/halo2curves", branch = "v0
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
#ethers-etherscan = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
#ethers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
[patch."https://github.com/privacy-scaling-explorations/halo2.git"]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
@@ -23,11 +25,7 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
[dependencies]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
# darwin
prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin_v2
prover_v5 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.0"
env_logger = "0.9.0"
@@ -37,7 +35,6 @@ once_cell = "1.19"
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0.66"
anyhow = "1.0.86"
[profile.test]
opt-level = 3

View File

@@ -0,0 +1,198 @@
use crate::{
types::{CheckChunkProofsResponse, ProofResult},
utils::{
c_char_to_str, c_char_to_vec, file_exists, panic_catch, string_to_c_char, vec_to_c_char,
OUTPUT_DIR,
},
};
use libc::c_char;
use prover::{
aggregator::{Prover, Verifier},
check_chunk_hashes,
consts::AGG_VK_FILENAME,
utils::{chunk_trace_to_witness_block, init_env_and_log},
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof,
};
use snark_verifier_sdk::verify_evm_calldata;
use std::{cell::OnceCell, env, ptr::null};
static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER: OnceCell<Verifier> = OnceCell::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_batch_prover(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_batch_prove");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &AGG_VK_FILENAME) {
panic!("{} must exist in folder {}", *AGG_VK_FILENAME, assets_dir);
}
let prover = Prover::from_dirs(params_dir, assets_dir);
PROVER.set(prover).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_batch_verifier(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_batch_verify");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_dirs(params_dir, assets_dir);
VERIFIER.set(verifier).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_batch_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_vk());
vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn check_chunk_proofs(chunk_proofs: *const c_char) -> *const c_char {
let check_result: Result<bool, String> = panic_catch(|| {
let chunk_proofs = c_char_to_vec(chunk_proofs);
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs)
.map_err(|e| format!("failed to deserialize chunk proofs: {e:?}"))?;
if chunk_proofs.is_empty() {
return Err("provided chunk proofs are empty.".to_string());
}
let prover_ref = PROVER.get().expect("failed to get reference to PROVER.");
let valid = prover_ref.check_protocol_of_chunks(&chunk_proofs);
Ok(valid)
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match check_result {
Ok(valid) => CheckChunkProofsResponse {
ok: valid,
error: None,
},
Err(err) => CheckChunkProofsResponse {
ok: false,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_batch_proof(
chunk_hashes: *const c_char,
chunk_proofs: *const c_char,
) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let chunk_hashes = c_char_to_vec(chunk_hashes);
let chunk_proofs = c_char_to_vec(chunk_proofs);
let chunk_hashes = serde_json::from_slice::<Vec<ChunkInfo>>(&chunk_hashes)
.map_err(|e| format!("failed to deserialize chunk hashes: {e:?}"))?;
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs)
.map_err(|e| format!("failed to deserialize chunk proofs: {e:?}"))?;
if chunk_hashes.len() != chunk_proofs.len() {
return Err(format!("chunk hashes and chunk proofs lengths mismatch: chunk_hashes.len() = {}, chunk_proofs.len() = {}",
chunk_hashes.len(), chunk_proofs.len()));
}
let chunk_hashes_proofs: Vec<(_,_)> = chunk_hashes
.into_iter()
.zip(chunk_proofs.clone())
.collect();
check_chunk_hashes("", &chunk_hashes_proofs).map_err(|e| format!("failed to check chunk info: {e:?}"))?;
let batch = BatchProvingTask {
chunk_proofs
};
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_agg_evm_proof(batch, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate proof: {e:?}"))?;
serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"bernoulli" => 2,
"curie" => 3,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as curie");
3
}
};
let verified = panic_catch(|| {
if fork_id == 2 {
// before upgrade#3(DA Compression)
verify_evm_calldata(
include_bytes!("plonk_verifier_0.10.3.bin").to_vec(),
proof.calldata(),
)
} else {
VERIFIER.get().unwrap().verify_agg_evm_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}
// This function is only used for debugging on Go side.
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn block_traces_to_chunk_info(block_traces: *const c_char) -> *const c_char {
let block_traces = c_char_to_vec(block_traces);
let block_traces = serde_json::from_slice::<Vec<BlockTrace>>(&block_traces).unwrap();
let witness_block = chunk_trace_to_witness_block(block_traces).unwrap();
let chunk_info = ChunkInfo::from_witness_block(&witness_block, false);
let chunk_info_bytes = serde_json::to_vec(&chunk_info).unwrap();
vec_to_c_char(chunk_info_bytes)
}

View File

@@ -0,0 +1,108 @@
use crate::{
types::ProofResult,
utils::{
c_char_to_str, c_char_to_vec, file_exists, panic_catch, string_to_c_char, vec_to_c_char,
OUTPUT_DIR,
},
};
use libc::c_char;
use prover::{
consts::CHUNK_VK_FILENAME,
utils::init_env_and_log,
zkevm::{Prover, Verifier},
BlockTrace, ChunkProof, ChunkProvingTask,
};
use std::{cell::OnceCell, env, ptr::null};
static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER: OnceCell<Verifier> = OnceCell::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_chunk_prover(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_chunk_prove");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &CHUNK_VK_FILENAME) {
panic!("{} must exist in folder {}", *CHUNK_VK_FILENAME, assets_dir);
}
let prover = Prover::from_dirs(params_dir, assets_dir);
PROVER.set(prover).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_chunk_verifier(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_chunk_verify");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_dirs(params_dir, assets_dir);
VERIFIER.set(verifier).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_chunk_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_vk());
vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_chunk_proof(block_traces: *const c_char) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let block_traces = c_char_to_vec(block_traces);
let block_traces = serde_json::from_slice::<Vec<BlockTrace>>(&block_traces)
.map_err(|e| format!("failed to deserialize block traces: {e:?}"))?;
let chunk = ChunkProvingTask::from(block_traces);
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_chunk_proof(chunk, None, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate proof: {e:?}"))?;
serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(proof: *const c_char) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
let verified = panic_catch(|| VERIFIER.get().unwrap().verify_chunk_proof(proof));
verified.unwrap_or(false) as c_char
}

View File

@@ -1,63 +1,4 @@
mod batch;
mod chunk;
mod types;
mod utils;
mod verifier;
use crate::utils::{c_char_to_str, c_char_to_vec};
use libc::c_char;
use prover_v5::utils::init_env_and_log;
use verifier::{TaskType, VerifierConfig};
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init(config: *const c_char) {
init_env_and_log("ffi_init");
let config_str = c_char_to_str(config);
let verifier_config = serde_json::from_str::<VerifierConfig>(config_str).unwrap();
verifier::init(verifier_config);
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Chunk)
}
fn verify_proof(proof: *const c_char, fork_name: *const c_char, task_type: TaskType) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let verifier = verifier::get_verifier(fork_name_str);
if let Err(e) = verifier {
log::warn!("failed to get verifier, error: {:#}", e);
return 0 as c_char;
}
match verifier.unwrap().verify(task_type, proof) {
Err(e) => {
log::error!("{:?} verify failed, error: {:#}", task_type, e);
false as c_char
}
Ok(result) => result as c_char,
}
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Batch)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Bundle)
}

Binary file not shown.

View File

@@ -0,0 +1,22 @@
use serde::{Deserialize, Serialize};
// Represents the result of a chunk proof checking operation.
// `ok` indicates whether the proof checking was successful.
// `error` provides additional details in case the check failed.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CheckChunkProofsResponse {
pub ok: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}
// Encapsulates the result from generating a proof.
// `message` holds the generated proof in byte slice format.
// `error` provides additional details in case the proof generation failed.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ProofResult {
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<Vec<u8>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}

View File

@@ -1,9 +1,29 @@
use once_cell::sync::Lazy;
use std::{
ffi::CStr,
env,
ffi::{CStr, CString},
os::raw::c_char,
panic::{catch_unwind, AssertUnwindSafe},
path::PathBuf,
};
// Only used for debugging.
pub(crate) static OUTPUT_DIR: Lazy<Option<String>> =
Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
/// # Safety
#[no_mangle]
pub extern "C" fn free_c_chars(ptr: *mut c_char) {
if ptr.is_null() {
log::warn!("Try to free an empty pointer!");
return;
}
unsafe {
let _ = CString::from_raw(ptr);
}
}
pub(crate) fn c_char_to_str(c: *const c_char) -> &'static str {
let cstr = unsafe { CStr::from_ptr(c) };
cstr.to_str().unwrap()
@@ -14,6 +34,21 @@ pub(crate) fn c_char_to_vec(c: *const c_char) -> Vec<u8> {
cstr.to_bytes().to_vec()
}
pub(crate) fn string_to_c_char(string: String) -> *const c_char {
CString::new(string).unwrap().into_raw()
}
pub(crate) fn vec_to_c_char(bytes: Vec<u8>) -> *const c_char {
CString::new(bytes).unwrap().into_raw()
}
pub(crate) fn file_exists(dir: &str, filename: &str) -> bool {
let mut path = PathBuf::from(dir);
path.push(filename);
path.exists()
}
pub(crate) fn panic_catch<F: FnOnce() -> R, R>(f: F) -> Result<R, String> {
catch_unwind(AssertUnwindSafe(f)).map_err(|err| {
if let Some(s) = err.downcast_ref::<String>() {

View File

@@ -1,110 +0,0 @@
mod darwin;
mod darwin_v2;
use anyhow::{bail, Result};
use darwin::DarwinVerifier;
use darwin_v2::DarwinV2Verifier;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use prover_v4::utils::load_params;
use serde::{Deserialize, Serialize};
use std::{cell::OnceCell, collections::BTreeMap, rc::Rc};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TaskType {
Chunk,
Batch,
Bundle,
}
pub trait ProofVerifier {
fn verify(&self, task_type: TaskType, proof: Vec<u8>) -> Result<bool>;
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CircuitConfig {
pub fork_name: String,
pub params_path: String,
pub assets_path: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VerifierConfig {
pub low_version_circuit: CircuitConfig,
pub high_version_circuit: CircuitConfig,
}
type HardForkName = String;
struct VerifierPair(HardForkName, Rc<Box<dyn ProofVerifier>>);
static mut VERIFIER_HIGH: OnceCell<VerifierPair> = OnceCell::new();
static mut VERIFIER_LOW: OnceCell<VerifierPair> = OnceCell::new();
static mut PARAMS_MAP: OnceCell<BTreeMap<u32, ParamsKZG<Bn256>>> = OnceCell::new();
pub fn init(config: VerifierConfig) {
let low_conf = config.low_version_circuit;
std::env::set_var("SCROLL_PROVER_ASSETS_DIR", &low_conf.assets_path);
let params_degrees = [
*prover_v4::config::LAYER2_DEGREE,
*prover_v4::config::LAYER4_DEGREE,
];
// params should be shared between low and high
let mut params_map = BTreeMap::new();
for degree in params_degrees {
if let std::collections::btree_map::Entry::Vacant(e) = params_map.entry(degree) {
match load_params(&low_conf.params_path, degree, None) {
Ok(params) => {
e.insert(params);
}
Err(e) => panic!(
"failed to load params, degree {}, dir {}, err {}",
degree, low_conf.params_path, e
),
}
}
}
unsafe {
PARAMS_MAP.set(params_map).unwrap_unchecked();
}
let verifier = DarwinVerifier::new(unsafe { PARAMS_MAP.get().unwrap() }, &low_conf.assets_path);
unsafe {
VERIFIER_LOW
.set(VerifierPair(
low_conf.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
let high_conf = config.high_version_circuit;
let verifier =
DarwinV2Verifier::new(unsafe { PARAMS_MAP.get().unwrap() }, &high_conf.assets_path);
unsafe {
VERIFIER_HIGH
.set(VerifierPair(
high_conf.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
}
pub fn get_verifier(fork_name: &str) -> Result<Rc<Box<dyn ProofVerifier>>> {
unsafe {
if let Some(verifier) = VERIFIER_LOW.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
}
if let Some(verifier) = VERIFIER_HIGH.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
}
}
bail!("failed to get verifier, key not found, {}", fork_name)
}

View File

@@ -1,48 +0,0 @@
use super::{ProofVerifier, TaskType};
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use crate::utils::panic_catch;
use prover_v4::{
aggregator::Verifier as AggVerifier, zkevm::Verifier, BatchProof, BundleProof, ChunkProof,
};
use std::{collections::BTreeMap, env};
pub struct DarwinVerifier<'params> {
verifier: Verifier<'params>,
agg_verifier: AggVerifier<'params>,
}
impl<'params> DarwinVerifier<'params> {
pub fn new(params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>, assets_dir: &str) -> Self {
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_params_and_assets(params_map, assets_dir);
let agg_verifier = AggVerifier::from_params_and_assets(params_map, assets_dir);
Self {
verifier,
agg_verifier,
}
}
}
impl<'params> ProofVerifier for DarwinVerifier<'params> {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
let result = panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.verifier.verify_chunk_proof(proof)
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_batch_proof(&proof)
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_bundle_proof(proof)
}
});
result.map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -1,48 +0,0 @@
use super::{ProofVerifier, TaskType};
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use crate::utils::panic_catch;
use prover_v5::{
aggregator::Verifier as AggVerifier, zkevm::Verifier, BatchProof, BundleProof, ChunkProof,
};
use std::{collections::BTreeMap, env};
pub struct DarwinV2Verifier<'params> {
verifier: Verifier<'params>,
agg_verifier: AggVerifier<'params>,
}
impl<'params> DarwinV2Verifier<'params> {
pub fn new(params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>, assets_dir: &str) -> Self {
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_params_and_assets(params_map, assets_dir);
let agg_verifier = AggVerifier::from_params_and_assets(params_map, assets_dir);
Self {
verifier,
agg_verifier,
}
}
}
impl<'params> ProofVerifier for DarwinV2Verifier<'params> {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
let result = panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.verifier.verify_chunk_proof(proof)
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_batch_proof(&proof)
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_bundle_proof(proof)
}
});
result.map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -1,10 +1,15 @@
// BatchVerifier is used to:
// - Verify a batch proof
// - Verify a bundle proof
void init(char* config);
void init_batch_prover(char* params_dir, char* assets_dir);
void init_batch_verifier(char* params_dir, char* assets_dir);
char* get_batch_vk();
char* check_chunk_proofs(char* chunk_proofs);
char* gen_batch_proof(char* chunk_hashes, char* chunk_proofs);
char verify_batch_proof(char* proof, char* fork_name);
char verify_bundle_proof(char* proof, char* fork_name);
void init_chunk_prover(char* params_dir, char* assets_dir);
void init_chunk_verifier(char* params_dir, char* assets_dir);
char* get_chunk_vk();
char* gen_chunk_proof(char* block_traces);
char verify_chunk_proof(char* proof);
char verify_chunk_proof(char* proof, char* fork_name);
char* block_traces_to_chunk_info(char* block_traces);
void free_c_chars(char* ptr);

View File

@@ -2,7 +2,6 @@ package testcontainers
import (
"context"
"errors"
"fmt"
"log"
"os"
@@ -115,7 +114,7 @@ func (t *TestcontainerApps) StartPoSL1Container() error {
// GetPoSL1EndPoint returns the endpoint of the running PoS L1 endpoint
func (t *TestcontainerApps) GetPoSL1EndPoint() (string, error) {
if t.poSL1Container == nil {
return "", errors.New("PoS L1 container is not running")
return "", fmt.Errorf("PoS L1 container is not running")
}
contrainer, err := t.poSL1Container.ServiceContainer(context.Background(), "geth")
if err != nil {
@@ -136,7 +135,7 @@ func (t *TestcontainerApps) GetPoSL1Client() (*ethclient.Client, error) {
// GetDBEndPoint returns the endpoint of the running postgres container
func (t *TestcontainerApps) GetDBEndPoint() (string, error) {
if t.postgresContainer == nil || !t.postgresContainer.IsRunning() {
return "", errors.New("postgres is not running")
return "", fmt.Errorf("postgres is not running")
}
return t.postgresContainer.ConnectionString(context.Background(), "sslmode=disable")
}
@@ -144,7 +143,7 @@ func (t *TestcontainerApps) GetDBEndPoint() (string, error) {
// GetL2GethEndPoint returns the endpoint of the running L2Geth container
func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) {
if t.l2GethContainer == nil || !t.l2GethContainer.IsRunning() {
return "", errors.New("l2 geth is not running")
return "", fmt.Errorf("l2 geth is not running")
}
endpoint, err := t.l2GethContainer.PortEndpoint(context.Background(), "8546/tcp", "ws")
if err != nil {
@@ -218,7 +217,7 @@ func findProjectRootDir() (string, error) {
parentDir := filepath.Dir(currentDir)
if parentDir == currentDir {
return "", errors.New("go.work file not found in any parent directory")
return "", fmt.Errorf("go.work file not found in any parent directory")
}
currentDir = parentDir

View File

@@ -109,10 +109,6 @@ const (
ProverTaskFailureTypeVerifiedFailed
// ProverTaskFailureTypeServerError collect occur error
ProverTaskFailureTypeServerError
// ProverTaskFailureTypeObjectAlreadyVerified object(batch/chunk) already verified, may exists in test env when ENABLE_TEST_ENV_BYPASS_FEATURES is true
ProverTaskFailureTypeObjectAlreadyVerified
// ProverTaskFailureTypeReassignedByAdmin reassigned by admin, this value is used in admin-system and defined here for clarity
ProverTaskFailureTypeReassignedByAdmin
)
func (r ProverTaskFailureType) String() string {
@@ -127,10 +123,6 @@ func (r ProverTaskFailureType) String() string {
return "prover task failure verified failed"
case ProverTaskFailureTypeServerError:
return "prover task failure server exception"
case ProverTaskFailureTypeObjectAlreadyVerified:
return "prover task failure object already verified"
case ProverTaskFailureTypeReassignedByAdmin:
return "prover task failure reassigned by admin"
default:
return fmt.Sprintf("illegal prover task failure type (%d)", int32(r))
}

View File

@@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/go-ethereum/common"
)
@@ -51,15 +52,22 @@ type ChunkTaskDetail struct {
// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
ParentStateRoot common.Hash `json:"parent_state_root"`
ParentBatchHash common.Hash `json:"parent_batch_hash"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
}
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
type BundleTaskDetail struct {
BatchProofs []*BatchProof `json:"batch_proofs"`
ChainID uint64 `json:"chain_id"`
FinalizedBatchHash common.Hash `json:"finalized_batch_hash"`
FinalizedStateRoot common.Hash `json:"finalized_state_root"`
PendingBatchHash common.Hash `json:"pending_batch_hash"`
PendingStateRoot common.Hash `json:"pending_state_root"`
PendingWithdrawRoot common.Hash `json:"pending_withdraw_root"`
BatchProofs []*BatchProof `json:"batch_proofs"`
}
// ChunkInfo is for calculating pi_hash for chunk

View File

@@ -2,7 +2,6 @@ package utils
import (
"crypto/ecdsa"
"errors"
"fmt"
"os"
"path/filepath"
@@ -29,7 +28,7 @@ func LoadOrCreateKey(keystorePath string, keystorePassword string) (*ecdsa.Priva
} else if err != nil {
return nil, err
} else if fi.IsDir() {
return nil, errors.New("keystorePath cannot be a dir")
return nil, fmt.Errorf("keystorePath cannot be a dir")
}
keyjson, err := os.ReadFile(filepath.Clean(keystorePath))

View File

@@ -9,14 +9,10 @@ import (
"math/big"
"os"
"path/filepath"
"reflect"
"strconv"
"strings"
"time"
"github.com/modern-go/reflect2"
"github.com/scroll-tech/go-ethereum/core"
"github.com/scroll-tech/go-ethereum/log"
)
// TryTimes try run several times until the function return true.
@@ -82,89 +78,3 @@ func ReadGenesis(genesisPath string) (*core.Genesis, error) {
}
return genesis, file.Close()
}
// OverrideConfigWithEnv recursively overrides config values with environment variables
func OverrideConfigWithEnv(cfg interface{}, prefix string) error {
v := reflect.ValueOf(cfg)
if v.Kind() != reflect.Ptr || v.IsNil() {
return nil
}
v = v.Elem()
t := v.Type()
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
fieldValue := v.Field(i)
if !fieldValue.CanSet() {
continue
}
tag := field.Tag.Get("json")
if tag == "" {
tag = strings.ToLower(field.Name)
}
envKey := prefix + "_" + strings.ToUpper(tag)
switch fieldValue.Kind() {
case reflect.Ptr:
if !fieldValue.IsNil() {
err := OverrideConfigWithEnv(fieldValue.Interface(), envKey)
if err != nil {
return err
}
}
case reflect.Struct:
err := OverrideConfigWithEnv(fieldValue.Addr().Interface(), envKey)
if err != nil {
return err
}
default:
if envValue, exists := os.LookupEnv(envKey); exists {
log.Info("Overriding config with env var", "key", envKey)
err := setField(fieldValue, envValue)
if err != nil {
return err
}
}
}
}
return nil
}
// setField sets the value of a field based on the environment variable value
func setField(field reflect.Value, value string) error {
switch field.Kind() {
case reflect.String:
field.SetString(value)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
intValue, err := strconv.ParseInt(value, 10, 64)
if err != nil {
return err
}
field.SetInt(intValue)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
uintValue, err := strconv.ParseUint(value, 10, 64)
if err != nil {
return err
}
field.SetUint(uintValue)
case reflect.Float32, reflect.Float64:
floatValue, err := strconv.ParseFloat(value, 64)
if err != nil {
return err
}
field.SetFloat(floatValue)
case reflect.Bool:
boolValue, err := strconv.ParseBool(value)
if err != nil {
return err
}
field.SetBool(boolValue)
default:
return fmt.Errorf("unsupported type: %v", field.Kind())
}
return nil
}

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.4.51"
var tag = "v4.4.23"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -26,11 +26,8 @@ coordinator_api: libzkp ## Builds the Coordinator api instance.
coordinator_cron:
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_cron ./cmd/cron
coordinator_tool:
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_tool ./cmd/tool
coordinator_api_skip_libzkp:
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_api ./cmd/api
CGO_ENABLED=1 CGO_LDFLAGS="-ldl" go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_api ./cmd/api
mock_coordinator_api: ## Builds the mocked Coordinator instance.
go build -tags="mock_prover mock_verifier" -o $(PWD)/build/bin/coordinator_api ./cmd/api

View File

@@ -88,25 +88,13 @@ func (c *CoordinatorApp) MockConfig(store bool) error {
}
// Reset prover manager config for manager test cases.
cfg.ProverManager = &coordinatorConfig.ProverManager{
ProversPerSession: 1,
Verifier: &coordinatorConfig.VerifierConfig{
MockMode: true,
LowVersionCircuit: &coordinatorConfig.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "darwin",
MinProverVersion: "v4.2.0",
},
HighVersionCircuit: &coordinatorConfig.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "darwinV2",
MinProverVersion: "v4.3.0",
},
},
ProversPerSession: 1,
Verifier: &coordinatorConfig.VerifierConfig{MockMode: true},
BatchCollectionTimeSec: 60,
ChunkCollectionTimeSec: 60,
SessionAttempts: 10,
MaxVerifierWorkers: 4,
MinProverVersion: "v1.0.0",
}
endpoint, err := c.testApps.GetDBEndPoint()
if err != nil {

View File

@@ -1,101 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"os"
"github.com/scroll-tech/go-ethereum/log"
"github.com/urfave/cli/v2"
"scroll-tech/common/database"
"scroll-tech/common/types/message"
"scroll-tech/common/utils"
"scroll-tech/common/version"
"scroll-tech/coordinator/internal/config"
"scroll-tech/coordinator/internal/orm"
coordinatorType "scroll-tech/coordinator/internal/types"
)
var app *cli.App
func init() {
// Set up coordinator app info.
app = cli.NewApp()
app.Action = action
app.Name = "coordinator-tool"
app.Usage = "The Scroll L2 Coordinator Tool"
app.Version = version.Version
app.Flags = append(app.Flags, utils.CommonFlags...)
app.Before = func(ctx *cli.Context) error {
return utils.LogSetup(ctx)
}
}
func action(ctx *cli.Context) error {
cfgFile := ctx.String(utils.ConfigFileFlag.Name)
cfg, err := config.NewConfig(cfgFile)
if err != nil {
log.Crit("failed to load config file", "config file", cfgFile, "error", err)
}
db, err := database.InitDB(cfg.DB)
if err != nil {
log.Crit("failed to init db connection", "err", err)
}
defer func() {
if err = database.CloseDB(db); err != nil {
log.Error("can not close db connection", "error", err)
}
}()
batchOrm := orm.NewBatch(db)
taskID := "fa9a290c8f1a46dc626fa67d626fadfe4803968ce776383996f3ae12504a2591"
batches, err := batchOrm.GetBatchesByBundleHash(ctx.Context, taskID)
if err != nil {
log.Error("failed to get batch proofs for batch", "task_id", taskID, "error", err)
return err
}
if len(batches) == 0 {
log.Error("failed to get batch proofs for bundle, not found batch", "task_id", taskID)
return fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", taskID)
}
var batchProofs []*message.BatchProof
for _, batch := range batches {
var proof message.BatchProof
if encodeErr := json.Unmarshal(batch.Proof, &proof); encodeErr != nil {
log.Error("failed to unmarshal batch proof")
return fmt.Errorf("failed to unmarshal proof: %w, bundle hash: %v, batch hash: %v", encodeErr, taskID, batch.Hash)
}
batchProofs = append(batchProofs, &proof)
}
taskDetail := message.BundleTaskDetail{
BatchProofs: batchProofs,
}
batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil {
log.Error("failed to marshal batch proof")
return fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", taskID, err)
}
taskMsg := &coordinatorType.GetTaskSchema{
TaskID: taskID,
TaskType: int(message.ProofTypeBundle),
TaskData: string(batchProofsBytes),
}
log.Info("task_msg", "data", taskMsg)
return nil
}
func main() {
// RunApp the coordinator.
if err := app.Run(os.Args); err != nil {
_, _ = fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}

View File

@@ -6,20 +6,13 @@
"batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180,
"verifier": {
"fork_name": "bernoulli",
"mock_mode": true,
"low_version_circuit": {
"params_path": "params",
"assets_path": "assets",
"fork_name": "darwin",
"min_prover_version": "v4.4.43"
},
"high_version_circuit": {
"params_path": "params",
"assets_path": "assets",
"fork_name": "darwinV2",
"min_prover_version": "v4.4.45"
}
}
"params_path": "",
"assets_path": ""
},
"max_verifier_workers": 4,
"min_prover_version": "v1.0.0"
},
"db": {
"driver_name": "postgres",

View File

@@ -9,7 +9,7 @@ require (
github.com/google/uuid v1.6.0
github.com/mitchellh/mapstructure v1.5.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/shopspring/decimal v1.3.1
github.com/stretchr/testify v1.9.0
@@ -18,37 +18,13 @@ require (
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
)
require (
github.com/bytedance/sonic v1.10.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.15.5 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/leodido/go-urn v1.2.4 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.1.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect
golang.org/x/net v0.21.0 // indirect
golang.org/x/text v0.16.0 // indirect
google.golang.org/protobuf v1.33.0 // indirect
)
require (
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect
github.com/btcsuite/btcd v0.20.1-beta // indirect
github.com/bytedance/sonic v1.10.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/chenzhuoyu/iasm v0.9.0 // indirect
github.com/consensys/bavard v0.1.13 // indirect
github.com/consensys/gnark-crypto v0.12.1 // indirect
@@ -56,11 +32,27 @@ require (
github.com/crate-crypto/go-kzg-4844 v1.0.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/ethereum/c-kzg-4844 v1.0.2 // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.15.5 // indirect
github.com/go-stack/stack v1.8.1 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
github.com/holiman/uint256 v1.2.4 // indirect
github.com/iden3/go-iden3-crypto v0.0.16 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/leodido/go-urn v1.2.4 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mmcloughlin/addchain v0.4.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.1.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.5.0 // indirect
github.com/prometheus/common v0.48.0 // indirect
@@ -71,11 +63,16 @@ require (
github.com/supranational/blst v0.3.12 // indirect
github.com/tklauser/go-sysconf v0.3.14 // indirect
github.com/tklauser/numcpus v0.8.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
golang.org/x/crypto v0.24.0 // indirect
golang.org/x/net v0.21.0 // indirect
golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.21.0 // indirect
golang.org/x/text v0.16.0 // indirect
google.golang.org/protobuf v1.33.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
rsc.io/tmplfunc v0.0.3 // indirect
)

View File

@@ -173,8 +173,8 @@ github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjR
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068 h1:oVGwhg4cCq35B04eG/S4OBXDwXiFH7+LezuH2ZTRBPs=
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7 h1:Mx3fKRszn6CMeJ7YBTdjLSfXoSDd9hkP8BwhIaaUVfQ=
github.com/scroll-tech/da-codec v0.0.0-20240703091800-5b6cded48ab7/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,7 +6,6 @@ import (
"path/filepath"
"scroll-tech/common/database"
"scroll-tech/common/utils"
)
// ProverManager loads sequencer configuration items.
@@ -24,6 +23,10 @@ type ProverManager struct {
ChunkCollectionTimeSec int `json:"chunk_collection_time_sec"`
// BundleCollectionTimeSec bundle Proof collection time (in seconds).
BundleCollectionTimeSec int `json:"bundle_collection_time_sec"`
// Max number of workers in verifier worker pool
MaxVerifierWorkers int `json:"max_verifier_workers"`
// MinProverVersion is the minimum version of the prover that is required.
MinProverVersion string `json:"min_prover_version"`
}
// L2 loads l2geth configuration items.
@@ -47,19 +50,12 @@ type Config struct {
Auth *Auth `json:"auth"`
}
// CircuitConfig circuit items.
type CircuitConfig struct {
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
ForkName string `json:"fork_name"`
MinProverVersion string `json:"min_prover_version"`
}
// VerifierConfig load zk verifier config.
type VerifierConfig struct {
MockMode bool `json:"mock_mode"`
LowVersionCircuit *CircuitConfig `json:"low_version_circuit"`
HighVersionCircuit *CircuitConfig `json:"high_version_circuit"`
ForkName string `json:"fork_name"`
MockMode bool `json:"mock_mode"`
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
}
// NewConfig returns a new instance of Config.
@@ -75,11 +71,5 @@ func NewConfig(file string) (*Config, error) {
return nil, err
}
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_COORDINATOR")
if err != nil {
return nil, err
}
return cfg, nil
}

View File

@@ -44,33 +44,21 @@ func (a *AuthController) Login(c *gin.Context) (interface{}, error) {
return "", fmt.Errorf("check the login parameter failure: %w", err)
}
hardForkName, err := a.loginLogic.ProverHardForkName(&login)
if err != nil {
return "", fmt.Errorf("prover hard name failure:%w", err)
}
// check the challenge is used, if used, return failure
if err := a.loginLogic.InsertChallengeString(c, login.Message.Challenge); err != nil {
return "", fmt.Errorf("login insert challenge string failure:%w", err)
}
returnData := types.LoginParameterWithHardForkName{
HardForkName: hardForkName,
LoginParameter: login,
}
return returnData, nil
return login, nil
}
// PayloadFunc returns jwt.MapClaims with {public key, prover name}.
func (a *AuthController) PayloadFunc(data interface{}) jwt.MapClaims {
v, ok := data.(types.LoginParameterWithHardForkName)
v, ok := data.(types.LoginParameter)
if !ok {
return jwt.MapClaims{}
}
return jwt.MapClaims{
types.HardForkName: v.HardForkName,
types.PublicKey: v.PublicKey,
types.ProverName: v.Message.ProverName,
types.ProverVersion: v.Message.ProverVersion,
@@ -91,10 +79,5 @@ func (a *AuthController) IdentityHandler(c *gin.Context) interface{} {
if proverVersion, ok := claims[types.ProverVersion]; ok {
c.Set(types.ProverVersion, proverVersion)
}
if hardForkName, ok := claims[types.HardForkName]; ok {
c.Set(types.HardForkName, hardForkName)
}
return nil
}

View File

@@ -26,7 +26,7 @@ func InitController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.D
panic("proof receiver new verifier failure")
}
log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap, "bundleVerifier", vf.BundleVkMap)
log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap)
Auth = NewAuthController(db, cfg, vf)
GetTask = NewGetTaskController(cfg, chainCfg, db, reg)

View File

@@ -1,7 +1,6 @@
package api
import (
"errors"
"fmt"
"math/rand"
@@ -50,15 +49,15 @@ func NewGetTaskController(cfg *config.Config, chainCfg *params.ChainConfig, db *
func (ptc *GetTaskController) incGetTaskAccessCounter(ctx *gin.Context) error {
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
if !publicKeyExist {
return errors.New("get public key from context failed")
return fmt.Errorf("get public key from context failed")
}
proverName, proverNameExist := ctx.Get(coordinatorType.ProverName)
if !proverNameExist {
return errors.New("get prover name from context failed")
return fmt.Errorf("get prover name from context failed")
}
proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion)
if !proverVersionExist {
return errors.New("get prover version from context failed")
return fmt.Errorf("get prover version from context failed")
}
ptc.getTaskAccessCounter.With(prometheus.Labels{
@@ -98,7 +97,7 @@ func (ptc *GetTaskController) GetTasks(ctx *gin.Context) {
}
if result == nil {
nerr := errors.New("get empty prover task")
nerr := fmt.Errorf("get empty prover task")
types.RenderFailure(ctx, types.ErrCoordinatorEmptyProofData, nerr)
return
}
@@ -108,6 +107,10 @@ func (ptc *GetTaskController) GetTasks(ctx *gin.Context) {
func (ptc *GetTaskController) proofType(para *coordinatorType.GetTaskParameter) message.ProofType {
var proofTypes []message.ProofType
if para.TaskType != 0 {
proofTypes = append(proofTypes, message.ProofType(para.TaskType))
}
for _, proofType := range para.TaskTypes {
proofTypes = append(proofTypes, message.ProofType(proofType))
}

View File

@@ -363,7 +363,7 @@ func (c *Collector) checkBundleAllBatchReady() {
for _, bundle := range bundles {
allReady, checkErr := c.batchOrm.CheckIfBundleBatchProofsAreReady(c.ctx, bundle.Hash)
if checkErr != nil {
log.Warn("checkBundleAllBatchReady CheckIfBundleBatchProofsAreReady failure", "error", checkErr, "hash", bundle.Hash)
log.Warn("checkBatchAllChunkReady CheckIfBatchChunkProofsAreReady failure", "error", checkErr, "hash", bundle.Hash)
continue
}
@@ -371,8 +371,8 @@ func (c *Collector) checkBundleAllBatchReady() {
continue
}
if updateErr := c.bundleOrm.UpdateBatchProofsStatusByBundleHash(c.ctx, bundle.Hash, types.BatchProofsStatusReady); updateErr != nil {
log.Warn("checkBundleAllBatchReady UpdateBatchProofsStatusByBundleHash failure", "error", checkErr, "hash", bundle.Hash)
if updateErr := c.bundleOrm.UpdateBatchProofsStatusByBatchHash(c.ctx, bundle.Hash, types.BatchProofsStatusReady); updateErr != nil {
log.Warn("checkBundleAllBatchReady UpdateBatchProofsStatusByBatchHash failure", "error", checkErr, "hash", bundle.Hash)
}
}

View File

@@ -3,7 +3,6 @@ package auth
import (
"errors"
"fmt"
"strings"
"github.com/gin-gonic/gin"
"github.com/scroll-tech/go-ethereum/log"
@@ -24,31 +23,31 @@ type LoginLogic struct {
chunkVks map[string]struct{}
batchVKs map[string]struct{}
bundleVks map[string]struct{}
proverVersionHardForkMap map[string]string
}
// NewLoginLogic new a LoginLogic
func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic {
proverVersionHardForkMap := make(map[string]string)
if version.CheckScrollRepoVersion(cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion) {
log.Error("config file error, low verifier min_prover_version should not more than high verifier min_prover_version",
"low verifier min_prover_version", cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion,
"high verifier min_prover_version", cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion)
panic("verifier config file error")
l := &LoginLogic{
cfg: cfg,
chunkVks: make(map[string]struct{}),
batchVKs: make(map[string]struct{}),
bundleVks: make(map[string]struct{}),
challengeOrm: orm.NewChallenge(db),
}
proverVersionHardForkMap[cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion] = cfg.ProverManager.Verifier.LowVersionCircuit.ForkName
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = cfg.ProverManager.Verifier.HighVersionCircuit.ForkName
return &LoginLogic{
cfg: cfg,
chunkVks: vf.ChunkVKMap,
batchVKs: vf.BatchVKMap,
bundleVks: vf.BundleVkMap,
challengeOrm: orm.NewChallenge(db),
proverVersionHardForkMap: proverVersionHardForkMap,
for _, vk := range vf.ChunkVKMap {
l.chunkVks[vk] = struct{}{}
}
for _, vk := range vf.BatchVKMap {
l.batchVKs[vk] = struct{}{}
}
for _, vk := range vf.BundleVkMap {
l.bundleVks[vk] = struct{}{}
}
return l
}
// InsertChallengeString insert and check the challenge string is existed
@@ -57,16 +56,16 @@ func (l *LoginLogic) InsertChallengeString(ctx *gin.Context, challenge string) e
}
func (l *LoginLogic) Check(login *types.LoginParameter) error {
verify, err := login.Verify()
if err != nil || !verify {
log.Error("auth message verify failure", "prover_name", login.Message.ProverName,
"prover_version", login.Message.ProverVersion, "message", login.Message)
return errors.New("auth message verify failure")
if login.PublicKey != "" {
verify, err := login.Verify()
if err != nil || !verify {
return errors.New("auth message verify failure")
}
}
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion) {
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.MinProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, login.Message.ProverVersion)
l.cfg.ProverManager.MinProverVersion, login.Message.ProverVersion)
}
if len(login.Message.ProverTypes) > 0 {
@@ -85,37 +84,21 @@ func (l *LoginLogic) Check(login *types.LoginParameter) error {
vks[vk] = struct{}{}
}
default:
log.Error("invalid prover_type", "value", proverType, "prover name", login.Message.ProverName, "prover_version", login.Message.ProverVersion)
log.Error("invalid prover_type", "value", proverType)
}
}
for _, vk := range login.Message.VKs {
if _, ok := vks[vk]; !ok {
log.Error("vk inconsistency", "prover vk", vk, "prover name", login.Message.ProverName,
"prover_version", login.Message.ProverVersion, "message", login.Message)
log.Error("vk inconsistency", "prover vk", vk)
if !version.CheckScrollProverVersion(login.Message.ProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s",
version.Version, login.Message.ProverVersion)
}
// if the prover reports a same prover version
return errors.New("incompatible vk. please check your params files or config files")
return fmt.Errorf("incompatible vk. please check your params files or config files")
}
}
}
return nil
}
// ProverHardForkName retrieves hard fork name which prover belongs to
func (l *LoginLogic) ProverHardForkName(login *types.LoginParameter) (string, error) {
proverVersionSplits := strings.Split(login.Message.ProverVersion, "-")
if len(proverVersionSplits) == 0 {
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}
proverVersion := proverVersionSplits[0]
if hardForkName, ok := l.proverVersionHardForkMap[proverVersion]; ok {
return hardForkName, nil
}
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}

View File

@@ -9,9 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/da-codec/encoding/codecv4"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
@@ -31,8 +29,9 @@ import (
type BatchProverTask struct {
BaseProverTask
batchTaskGetTaskTotal *prometheus.CounterVec
batchTaskGetTaskProver *prometheus.CounterVec
batchAttemptsExceedTotal prometheus.Counter
batchTaskGetTaskTotal *prometheus.CounterVec
batchTaskGetTaskProver *prometheus.CounterVec
}
// NewBatchProverTask new a batch collector
@@ -48,6 +47,10 @@ func NewBatchProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *go
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
batchAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_batch_attempts_exceed_total",
Help: "Total number of batch attempts exceed.",
}),
batchTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_batch_get_task_total",
Help: "Total number of batch get task.",
@@ -120,13 +123,6 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}
if hardForkName != taskCtx.HardForkName {
bp.recoverActiveAttempts(ctx, batchTask)
log.Error("incompatible prover version. requisite hard fork name:%s, prover hard fork name:%s, batch task_id:%s",
hardForkName, taskCtx.HardForkName, "task_id", batchTask.Hash)
return nil, ErrCoordinatorInternalFailure
}
proverTask := orm.ProverTask{
TaskID: batchTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
@@ -212,9 +208,19 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
chunkInfos = append(chunkInfos, &chunkInfo)
}
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs)
if err != nil {
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
taskDetail := message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}
if hardForkName == "darwin" {
taskDetail.ParentStateRoot = common.HexToHash(chunks[0].ParentChunkStateRoot)
taskDetail.ParentBatchHash = common.HexToHash(batch.ParentBatchHash)
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(batch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header, taskID:%s err:%w", task.TaskID, decodeErr)
}
taskDetail.BatchHeader = batchHeader
}
chunkProofsBytes, err := json.Marshal(taskDetail)
@@ -237,34 +243,3 @@ func (bp *BatchProverTask) recoverActiveAttempts(ctx *gin.Context, batchTask *or
log.Error("failed to recover batch active attempts", "hash", batchTask.Hash, "error", err)
}
}
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []*message.ChunkProof) (*message.BatchTaskDetail, error) {
taskDetail := &message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}
if encoding.CodecVersion(dbBatch.CodecVersion) != encoding.CodecV3 && encoding.CodecVersion(dbBatch.CodecVersion) != encoding.CodecV4 {
return taskDetail, nil
}
if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV3 {
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v3) for batch %d: %w", dbBatch.Index, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail.BlobBytes = dbBatch.BlobBytes
} else {
batchHeader, decodeErr := codecv4.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v4) for batch %d: %w", dbBatch.Index, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail.BlobBytes = dbBatch.BlobBytes
}
return taskDetail, nil
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
@@ -27,8 +28,9 @@ import (
type BundleProverTask struct {
BaseProverTask
bundleTaskGetTaskTotal *prometheus.CounterVec
bundleTaskGetTaskProver *prometheus.CounterVec
bundleAttemptsExceedTotal prometheus.Counter
bundleTaskGetTaskTotal *prometheus.CounterVec
bundleTaskGetTaskProver *prometheus.CounterVec
}
// NewBundleProverTask new a bundle collector
@@ -45,6 +47,10 @@ func NewBundleProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *g
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
bundleAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_bundle_attempts_exceed_total",
Help: "Total number of bundle attempts exceed.",
}),
bundleTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_bundle_get_task_total",
Help: "Total number of bundle get task.",
@@ -108,7 +114,7 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
return nil, nil
}
log.Info("start bundle proof generation session", "task index", bundleTask.Index, "public key", taskCtx.PublicKey, "prover name", taskCtx.ProverName)
log.Info("start batch proof generation session", "task index", bundleTask.Index, "public key", taskCtx.PublicKey, "prover name", taskCtx.ProverName)
hardForkName, getHardForkErr := bp.hardForkName(ctx, bundleTask)
if getHardForkErr != nil {
@@ -117,13 +123,6 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
return nil, ErrCoordinatorInternalFailure
}
if hardForkName != taskCtx.HardForkName {
bp.recoverActiveAttempts(ctx, bundleTask)
log.Error("incompatible prover version. requisite hard fork name:%s, prover hard fork name:%s, bundle task_id:%s",
hardForkName, taskCtx.HardForkName, "task_id", bundleTask.Hash)
return nil, ErrCoordinatorInternalFailure
}
proverTask := orm.ProverTask{
TaskID: bundleTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
@@ -192,6 +191,15 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
return nil, fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", task.TaskID)
}
latestFinalizedBatch, err := bp.batchOrm.GetLatestFinalizedBatch(ctx)
if err != nil {
return nil, fmt.Errorf("failed to get latest finalized batch by bundle hash: %w, bundle hash: %v", err, task.TaskID)
}
if latestFinalizedBatch == nil {
return nil, fmt.Errorf("failed to get latest finalized batch by bundle hash: %v, no finalized batch found", task.TaskID)
}
var batchProofs []*message.BatchProof
for _, batch := range batches {
var proof message.BatchProof
@@ -201,13 +209,20 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
batchProofs = append(batchProofs, &proof)
}
lastPendingBatch := batches[len(batches)-1]
taskDetail := message.BundleTaskDetail{
BatchProofs: batchProofs,
ChainID: bp.cfg.L2.ChainID,
FinalizedBatchHash: common.HexToHash(latestFinalizedBatch.Hash),
FinalizedStateRoot: common.HexToHash(latestFinalizedBatch.StateRoot),
PendingBatchHash: common.HexToHash(lastPendingBatch.Hash),
PendingStateRoot: common.HexToHash(lastPendingBatch.StateRoot),
PendingWithdrawRoot: common.HexToHash(lastPendingBatch.WithdrawRoot),
BatchProofs: batchProofs,
}
batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil {
return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)
return nil, fmt.Errorf("failed to marshal chunk proofs, taskID:%s err:%w", task.TaskID, err)
}
taskMsg := &coordinatorType.GetTaskSchema{
@@ -221,7 +236,7 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
}
func (bp *BundleProverTask) recoverActiveAttempts(ctx *gin.Context, bundleTask *orm.Bundle) {
if err := bp.bundleOrm.DecreaseActiveAttemptsByHash(ctx.Copy(), bundleTask.Hash); err != nil {
if err := bp.chunkOrm.DecreaseActiveAttemptsByHash(ctx.Copy(), bundleTask.Hash); err != nil {
log.Error("failed to recover bundle active attempts", "hash", bundleTask.Hash, "error", err)
}
}

View File

@@ -27,8 +27,9 @@ import (
type ChunkProverTask struct {
BaseProverTask
chunkTaskGetTaskTotal *prometheus.CounterVec
chunkTaskGetTaskProver *prometheus.CounterVec
chunkAttemptsExceedTotal prometheus.Counter
chunkTaskGetTaskTotal *prometheus.CounterVec
chunkTaskGetTaskProver *prometheus.CounterVec
}
// NewChunkProverTask new a chunk prover task
@@ -43,6 +44,10 @@ func NewChunkProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *go
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
chunkAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_chunk_attempts_exceed_total",
Help: "Total number of chunk attempts exceed.",
}),
chunkTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_chunk_get_task_total",
Help: "Total number of chunk get task.",
@@ -115,13 +120,6 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}
if hardForkName != taskCtx.HardForkName {
cp.recoverActiveAttempts(ctx, chunkTask)
log.Error("incompatible prover version. requisite hard fork name:%s, prover hard fork name:%s, chunk task_id:%s",
hardForkName, taskCtx.HardForkName, "task_id", chunkTask.Hash)
return nil, ErrCoordinatorInternalFailure
}
proverTask := orm.ProverTask{
TaskID: chunkTask.Hash,
ProverPublicKey: taskCtx.PublicKey,

View File

@@ -1,7 +1,6 @@
package provertask
import (
"errors"
"fmt"
"sync"
@@ -18,7 +17,9 @@ import (
var (
// ErrCoordinatorInternalFailure coordinator internal db failure
ErrCoordinatorInternalFailure = errors.New("coordinator internal error")
ErrCoordinatorInternalFailure = fmt.Errorf("coordinator internal error")
// ErrHardForkName indicates client request with the wrong hard fork name
ErrHardForkName = fmt.Errorf("wrong hard fork name")
)
var (
@@ -49,7 +50,6 @@ type proverTaskContext struct {
PublicKey string
ProverName string
ProverVersion string
HardForkName string
}
// checkParameter check the prover task parameter illegal
@@ -58,28 +58,22 @@ func (b *BaseProverTask) checkParameter(ctx *gin.Context) (*proverTaskContext, e
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
if !publicKeyExist {
return nil, errors.New("get public key from context failed")
return nil, fmt.Errorf("get public key from context failed")
}
ptc.PublicKey = publicKey.(string)
proverName, proverNameExist := ctx.Get(coordinatorType.ProverName)
if !proverNameExist {
return nil, errors.New("get prover name from context failed")
return nil, fmt.Errorf("get prover name from context failed")
}
ptc.ProverName = proverName.(string)
proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion)
if !proverVersionExist {
return nil, errors.New("get prover version from context failed")
return nil, fmt.Errorf("get prover version from context failed")
}
ptc.ProverVersion = proverVersion.(string)
hardForkName, hardForkNameExist := ctx.Get(coordinatorType.HardForkName)
if !hardForkNameExist {
return nil, errors.New("get hard fork name from context failed")
}
ptc.HardForkName = hardForkName.(string)
isBlocked, err := b.proverBlockListOrm.IsPublicKeyBlocked(ctx.Copy(), publicKey.(string))
if err != nil {
return nil, fmt.Errorf("failed to check whether the public key %s is blocked before assigning a chunk task, err: %w, proverName: %s, proverVersion: %s", publicKey, err, proverName, proverVersion)

View File

@@ -4,6 +4,7 @@ import (
"context"
"encoding/json"
"errors"
"fmt"
"strings"
"time"
@@ -137,11 +138,11 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
m.proofReceivedTotal.Inc()
pk := ctx.GetString(coordinatorType.PublicKey)
if len(pk) == 0 {
return errors.New("get public key from context failed")
return fmt.Errorf("get public key from context failed")
}
pv := ctx.GetString(coordinatorType.ProverVersion)
if len(pv) == 0 {
return errors.New("get ProverVersion from context failed")
return fmt.Errorf("get ProverVersion from context failed")
}
proverTask, err := m.proverTaskOrm.GetProverTaskByUUIDAndPublicKey(ctx.Copy(), proofParameter.UUID, pk)
@@ -168,26 +169,17 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
if getHardForkErr != nil {
return ErrGetHardForkNameFailed
}
switch message.ProofType(proofParameter.TaskType) {
case message.ProofTypeChunk:
var chunkProof message.ChunkProof
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &chunkProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyChunkProof(&chunkProof, hardForkName)
case message.ProofTypeBatch:
// only verify batch proof. chunk proof verifier have been disabled after Bernoulli
if message.ProofType(proofParameter.TaskType) == message.ProofTypeBatch {
var batchProof message.BatchProof
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &batchProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBatchProof(&batchProof, hardForkName)
case message.ProofTypeBundle:
var bundleProof message.BundleProof
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof, hardForkName)
}
if message.ProofType(proofParameter.TaskType) == message.ProofTypeBundle {
// TODO add bundle check here
}
if verifyErr != nil || !success {
@@ -296,7 +288,6 @@ func (m *ProofReceiverLogic) validator(ctx context.Context, proverTask *orm.Prov
// if the batch/chunk have proved and verifier success, need skip this submit proof
if m.checkIsTaskSuccess(ctx, proofParameter.TaskID, message.ProofType(proofParameter.TaskType)) {
m.proofRecover(ctx, proverTask, types.ProverTaskFailureTypeObjectAlreadyVerified, proofParameter)
m.validateFailureProverTaskHaveVerifier.Inc()
log.Info("the prove task have proved and verifier success, skip this submit proof", "hash", proofParameter.TaskID,
"taskType", proverTask.TaskType, "proverName", proverTask.ProverName, "proverPublicKey", pk)

View File

@@ -0,0 +1,11 @@
#!/bin/bash
work_dir="$(dirname -- "${BASH_SOURCE[0]}")"
work_dir="$(cd -- "$work_dir" && pwd)"
echo $work_dir
rm $work_dir/*.vkey
version=release-v0.11.4
wget https://circuit-release.s3.us-west-2.amazonaws.com/${version}/chunk_vk.vkey -O $work_dir/chunk_vk.vkey
wget https://circuit-release.s3.us-west-2.amazonaws.com/${version}/agg_vk.vkey -O $work_dir/agg_vk.vkey

View File

@@ -10,13 +10,13 @@ import (
// NewVerifier Sets up a mock verifier.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
batchVKMap := map[string]struct{}{"mock_vk": {}}
chunkVKMap := map[string]struct{}{"mock_vk": {}}
batchVKMap := map[string]string{cfg.ForkName: "mock_vk"}
chunkVKMap := map[string]string{cfg.ForkName: "mock_vk"}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap}, nil
}
// VerifyChunkProof return a mock verification result for a ChunkProof.
func (v *Verifier) VerifyChunkProof(proof *message.ChunkProof, forkName string) (bool, error) {
func (v *Verifier) VerifyChunkProof(proof *message.ChunkProof) (bool, error) {
if string(proof.Proof) == InvalidTestProof {
return false, nil
}
@@ -30,11 +30,3 @@ func (v *Verifier) VerifyBatchProof(proof *message.BatchProof, forkName string)
}
return true, nil
}
// VerifyBundleProof return a mock verification result for a BundleProof.
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string) (bool, error) {
if string(proof.Proof) == InvalidTestProof {
return false, nil
}
return true, nil
}

View File

@@ -10,7 +10,7 @@ const InvalidTestProof = "this is a invalid proof"
// Verifier represents a rust ffi to a halo2 verifier.
type Verifier struct {
cfg *config.VerifierConfig
ChunkVKMap map[string]struct{}
BatchVKMap map[string]struct{}
BundleVkMap map[string]struct{}
ChunkVKMap map[string]string
BatchVKMap map[string]string
BundleVkMap map[string]string
}

View File

@@ -11,104 +11,69 @@ package verifier
import "C" //nolint:typecheck
import (
"embed"
"encoding/base64"
"encoding/json"
"io"
"io/fs"
"os"
"path"
"unsafe"
"github.com/scroll-tech/go-ethereum/log"
"scroll-tech/common/types/message"
coordinatorType "scroll-tech/common/types/message"
"scroll-tech/coordinator/internal/config"
)
// This struct maps to `CircuitConfig` in common/libzkp/impl/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.CircuitConfig` being changed
type rustCircuitConfig struct {
ForkName string `json:"fork_name"`
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
}
func newRustCircuitConfig(cfg *config.CircuitConfig) *rustCircuitConfig {
return &rustCircuitConfig{
ForkName: cfg.ForkName,
ParamsPath: cfg.ParamsPath,
AssetsPath: cfg.AssetsPath,
}
}
// This struct maps to `VerifierConfig` in common/libzkp/impl/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.VerifierConfig` being changed
type rustVerifierConfig struct {
LowVersionCircuit *rustCircuitConfig `json:"low_version_circuit"`
HighVersionCircuit *rustCircuitConfig `json:"high_version_circuit"`
}
func newRustVerifierConfig(cfg *config.VerifierConfig) *rustVerifierConfig {
return &rustVerifierConfig{
LowVersionCircuit: newRustCircuitConfig(cfg.LowVersionCircuit),
HighVersionCircuit: newRustCircuitConfig(cfg.HighVersionCircuit),
}
}
// NewVerifier Sets up a rust ffi to call verify.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
if cfg.MockMode {
chunkVKMap := map[string]struct{}{"mock_vk": {}}
batchVKMap := map[string]struct{}{"mock_vk": {}}
bundleVKMap := map[string]struct{}{"mock_vk": {}}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap, BundleVkMap: bundleVKMap}, nil
batchVKMap := map[string]string{cfg.ForkName: "mock_vk"}
chunkVKMap := map[string]string{cfg.ForkName: "mock_vk"}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap}, nil
}
verifierConfig := newRustVerifierConfig(cfg)
configBytes, err := json.Marshal(verifierConfig)
if err != nil {
return nil, err
}
configStr := C.CString(string(configBytes))
paramsPathStr := C.CString(cfg.ParamsPath)
assetsPathStr := C.CString(cfg.AssetsPath)
defer func() {
C.free(unsafe.Pointer(configStr))
C.free(unsafe.Pointer(paramsPathStr))
C.free(unsafe.Pointer(assetsPathStr))
}()
C.init(configStr)
C.init_batch_verifier(paramsPathStr, assetsPathStr)
C.init_chunk_verifier(paramsPathStr, assetsPathStr)
v := &Verifier{
cfg: cfg,
ChunkVKMap: make(map[string]struct{}),
BatchVKMap: make(map[string]struct{}),
BundleVkMap: make(map[string]struct{}),
cfg: cfg,
ChunkVKMap: make(map[string]string),
BatchVKMap: make(map[string]string),
}
bundleVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_bundle.vkey"))
bundleVK, err := v.readVK(path.Join(cfg.AssetsPath, "bundle_vk.vkey"))
if err != nil {
return nil, err
}
batchVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_batch.vkey"))
batchVK, err := v.readVK(path.Join(cfg.AssetsPath, "batch_vk.vkey"))
if err != nil {
return nil, err
}
chunkVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_chunk.vkey"))
chunkVK, err := v.readVK(path.Join(cfg.AssetsPath, "chunk_vk.vkey"))
if err != nil {
return nil, err
}
v.BundleVkMap[bundleVK] = struct{}{}
v.BatchVKMap[batchVK] = struct{}{}
v.ChunkVKMap[chunkVK] = struct{}{}
v.BundleVkMap[cfg.ForkName] = bundleVK
v.BatchVKMap[cfg.ForkName] = batchVK
v.ChunkVKMap[cfg.ForkName] = chunkVK
if err := v.loadLowVersionVKs(cfg); err != nil {
if err := v.loadEmbedVK(); err != nil {
return nil, err
}
return v, nil
}
// VerifyBatchProof Verify a ZkProof by marshaling it and sending it to the Halo2 Verifier.
func (v *Verifier) VerifyBatchProof(proof *message.BatchProof, forkName string) (bool, error) {
func (v *Verifier) VerifyBatchProof(proof *coordinatorType.BatchProof, forkName string) (bool, error) {
if v.cfg.MockMode {
log.Info("Mock mode, batch verifier disabled")
if string(proof.Proof) == InvalidTestProof {
@@ -135,34 +100,7 @@ func (v *Verifier) VerifyBatchProof(proof *message.BatchProof, forkName string)
}
// VerifyChunkProof Verify a ZkProof by marshaling it and sending it to the Halo2 Verifier.
func (v *Verifier) VerifyChunkProof(proof *message.ChunkProof, forkName string) (bool, error) {
if v.cfg.MockMode {
log.Info("Mock mode, verifier disabled")
if string(proof.Proof) == InvalidTestProof {
return false, nil
}
return true, nil
}
buf, err := json.Marshal(proof)
if err != nil {
return false, err
}
log.Info("Start to verify chunk proof", "forkName", forkName)
proofStr := C.CString(string(buf))
forkNameStr := C.CString(forkName)
defer func() {
C.free(unsafe.Pointer(proofStr))
C.free(unsafe.Pointer(forkNameStr))
}()
verified := C.verify_chunk_proof(proofStr, forkNameStr)
return verified != 0, nil
}
// VerifyBundleProof Verify a ZkProof for a bundle of batches, by marshaling it and verifying it via the EVM verifier.
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string) (bool, error) {
func (v *Verifier) VerifyChunkProof(proof *coordinatorType.ChunkProof) (bool, error) {
if v.cfg.MockMode {
log.Info("Mock mode, verifier disabled")
if string(proof.Proof) == InvalidTestProof {
@@ -177,14 +115,12 @@ func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string
}
proofStr := C.CString(string(buf))
forkNameStr := C.CString(forkName)
defer func() {
C.free(unsafe.Pointer(proofStr))
C.free(unsafe.Pointer(forkNameStr))
}()
log.Info("Start to verify bundle proof ...")
verified := C.verify_bundle_proof(proofStr, forkNameStr)
log.Info("Start to verify chunk proof ...")
verified := C.verify_chunk_proof(proofStr)
return verified != 0, nil
}
@@ -200,22 +136,23 @@ func (v *Verifier) readVK(filePat string) (string, error) {
return base64.StdEncoding.EncodeToString(byt), nil
}
// load low version vks, current is darwin
func (v *Verifier) loadLowVersionVKs(cfg *config.VerifierConfig) error {
bundleVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_bundle.vkey"))
//go:embed legacy_vk/*
var legacyVKFS embed.FS
func (v *Verifier) loadEmbedVK() error {
batchVKBytes, err := fs.ReadFile(legacyVKFS, "legacy_vk/agg_vk.vkey")
if err != nil {
log.Error("load embed batch vk failure", "err", err)
return err
}
batchVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_batch.vkey"))
chunkVkBytes, err := fs.ReadFile(legacyVKFS, "legacy_vk/chunk_vk.vkey")
if err != nil {
log.Error("load embed chunk vk failure", "err", err)
return err
}
chunkVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_chunk.vkey"))
if err != nil {
return err
}
v.BundleVkMap[bundleVK] = struct{}{}
v.BatchVKMap[batchVK] = struct{}{}
v.ChunkVKMap[chunkVK] = struct{}{}
v.BatchVKMap["curie"] = base64.StdEncoding.EncodeToString(batchVKBytes)
v.ChunkVKMap["curie"] = base64.StdEncoding.EncodeToString(chunkVkBytes)
return nil
}

View File

@@ -18,8 +18,7 @@ import (
var (
paramsPath = flag.String("params", "/assets/test_params", "params dir")
assetsPathLo = flag.String("assets_lo", "/assets/test_assets_lo", "assets dir")
assetsPathHi = flag.String("assets", "/assets/test_assets", "assets dir")
assetsPath = flag.String("assets", "/assets/test_assets", "assets dir")
batchProofPath = flag.String("batch_proof", "/assets/proof_data/batch_proof", "batch proof file path")
chunkProofPath1 = flag.String("chunk_proof1", "/assets/proof_data/chunk_proof1", "chunk proof file path 1")
chunkProofPath2 = flag.String("chunk_proof2", "/assets/proof_data/chunk_proof2", "chunk proof file path 2")
@@ -29,29 +28,28 @@ func TestFFI(t *testing.T) {
as := assert.New(t)
cfg := &config.VerifierConfig{
MockMode: false,
ParamsPath: *paramsPath,
AssetsPathLo: *assetsPathLo,
AssetsPathHi: *assetsPathHi,
MockMode: false,
ParamsPath: *paramsPath,
AssetsPath: *assetsPath,
}
v, err := NewVerifier(cfg)
as.NoError(err)
chunkProof1 := readChunkProof(*chunkProofPath1, as)
chunkOk1, err := v.VerifyChunkProof(chunkProof1, "darwinV2")
chunkOk1, err := v.VerifyChunkProof(chunkProof1)
as.NoError(err)
as.True(chunkOk1)
t.Log("Verified chunk proof 1")
chunkProof2 := readChunkProof(*chunkProofPath2, as)
chunkOk2, err := v.VerifyChunkProof(chunkProof2, "darwinV2")
chunkOk2, err := v.VerifyChunkProof(chunkProof2)
as.NoError(err)
as.True(chunkOk2)
t.Log("Verified chunk proof 2")
batchProof := readBatchProof(*batchProofPath, as)
batchOk, err := v.VerifyBatchProof(batchProof, "darwinV2")
batchOk, err := v.VerifyBatchProof(batchProof, "curie")
as.NoError(err)
as.True(batchOk)
t.Log("Verified batch proof")

View File

@@ -31,9 +31,6 @@ type Batch struct {
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
// proof
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
@@ -214,6 +211,23 @@ func (o *Batch) GetBatchByHash(ctx context.Context, hash string) (*Batch, error)
return &batch, nil
}
// GetLatestFinalizedBatch retrieves the latest finalized batch from the database.
func (o *Batch) GetLatestFinalizedBatch(ctx context.Context) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("rollup_status = ?", int(types.RollupFinalized))
db = db.Order("index desc")
var latestFinalizedBatch Batch
if err := db.First(&latestFinalizedBatch).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return nil, nil
}
return nil, fmt.Errorf("Batch.GetLatestBatch error: %w", err)
}
return &latestFinalizedBatch, nil
}
// GetBatchesByBundleHash retrieves the given batch.
func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) ([]*Batch, error) {
db := o.db.WithContext(ctx)
@@ -222,25 +236,12 @@ func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) (
db = db.Order("index ASC")
var batches []*Batch
if err := db.Find(&batches).Error; err != nil {
if err := db.First(&batches).Error; err != nil {
return nil, fmt.Errorf("Batch.GetBatchesByBundleHash error: %w, bundle hash: %v", err, bundleHash)
}
return batches, nil
}
// GetBatchByIndex retrieves the batch by the given index.
func (o *Batch) GetBatchByIndex(ctx context.Context, index uint64) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("index = ?", index)
var batch Batch
if err := db.First(&batch).Error; err != nil {
return nil, fmt.Errorf("Batch.GetBatchByIndex error: %w, index: %v", err, index)
}
return &batch, nil
}
// InsertBatch inserts a new batch into the database.
func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...*gorm.DB) (*Batch, error) {
if batch == nil {

View File

@@ -28,6 +28,7 @@ type Bundle struct {
BatchProofsStatus int16 `json:"batch_proofs_status" gorm:"column:batch_proofs_status;default:1"`
ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:1"`
Proof []byte `json:"proof" gorm:"column:proof;default:NULL"`
ProverAssignedAt *time.Time `json:"prover_assigned_at" gorm:"column:prover_assigned_at;default:NULL"`
ProvedAt *time.Time `json:"proved_at" gorm:"column:proved_at;default:NULL"`
ProofTimeSec int32 `json:"proof_time_sec" gorm:"column:proof_time_sec;default:NULL"`
TotalAttempts int16 `json:"total_attempts" gorm:"column:total_attempts;default:0"`
@@ -135,14 +136,14 @@ func (o *Bundle) GetUnassignedAndBatchesUnreadyBundles(ctx context.Context, offs
return bundles, nil
}
// UpdateBatchProofsStatusByBundleHash updates the status of batch_proofs_status field for a given bundle hash.
func (o *Bundle) UpdateBatchProofsStatusByBundleHash(ctx context.Context, bundleHash string, status types.BatchProofsStatus) error {
// UpdateBatchProofsStatusByBatchHash updates the status of batch_proofs_status field for a given bundle hash.
func (o *Bundle) UpdateBatchProofsStatusByBatchHash(ctx context.Context, bundleHash string, status types.BatchProofsStatus) error {
db := o.db.WithContext(ctx)
db = db.Model(&Bundle{})
db = db.Where("hash = ?", bundleHash)
if err := db.Update("batch_proofs_status", status).Error; err != nil {
return fmt.Errorf("Bundle.UpdateBatchProofsStatusByBundleHash error: %w, bundle hash: %v, status: %v", err, bundleHash, status.String())
return fmt.Errorf("Bundle.UpdateBatchProofsStatusByBatchHash error: %w, bundle hash: %v, status: %v", err, bundleHash, status.String())
}
return nil
}

View File

@@ -2,7 +2,6 @@ package orm
import (
"context"
"errors"
"fmt"
"time"
@@ -54,7 +53,7 @@ func (r *Challenge) InsertChallenge(ctx context.Context, challengeString string)
return fmt.Errorf("the challenge string:%s have been used", challengeString)
}
return errors.New("insert challenge string affected rows more than 1")
return fmt.Errorf("insert challenge string affected rows more than 1")
}
// DeleteExpireChallenge delete the expire challenge

View File

@@ -87,55 +87,6 @@ func (o *L2Block) GetL2BlockByNumber(ctx context.Context, blockNumber uint64) (*
return &l2Block, nil
}
// GetL2BlocksInRange retrieves the L2 blocks within the specified range (inclusive).
// The range is closed, i.e., it includes both start and end block numbers.
// The returned blocks are sorted in ascending order by their block number.
func (o *L2Block) GetL2BlocksInRange(ctx context.Context, startBlockNumber uint64, endBlockNumber uint64) ([]*encoding.Block, error) {
if startBlockNumber > endBlockNumber {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: start block number should be less than or equal to end block number, start block: %v, end block: %v", startBlockNumber, endBlockNumber)
}
db := o.db.WithContext(ctx)
db = db.Model(&L2Block{})
db = db.Select("header, transactions, withdraw_root, row_consumption")
db = db.Where("number >= ? AND number <= ?", startBlockNumber, endBlockNumber)
db = db.Order("number ASC")
var l2Blocks []L2Block
if err := db.Find(&l2Blocks).Error; err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
// sanity check
if uint64(len(l2Blocks)) != endBlockNumber-startBlockNumber+1 {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: unexpected number of results, expected: %v, got: %v", endBlockNumber-startBlockNumber+1, len(l2Blocks))
}
var blocks []*encoding.Block
for _, v := range l2Blocks {
var block encoding.Block
if err := json.Unmarshal([]byte(v.Transactions), &block.Transactions); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
block.Header = &gethTypes.Header{}
if err := json.Unmarshal([]byte(v.Header), block.Header); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
block.WithdrawRoot = common.HexToHash(v.WithdrawRoot)
if err := json.Unmarshal([]byte(v.RowConsumption), &block.RowConsumption); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
blocks = append(blocks, &block)
}
return blocks, nil
}
// InsertL2Blocks inserts l2 blocks into the "l2_block" table.
// for unit test
func (o *L2Block) InsertL2Blocks(ctx context.Context, blocks []*encoding.Block) error {

View File

@@ -11,7 +11,7 @@ import (
"gorm.io/gorm/clause"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
coordinatorType "scroll-tech/common/types/message"
"scroll-tech/common/utils"
)
@@ -98,7 +98,7 @@ func (o *ProverTask) GetProverTasks(ctx context.Context, fields map[string]inter
// GetProverTasksByHashes retrieves the ProverTask records associated with the specified hashes.
// The returned prover task objects are sorted in ascending order by their ids.
func (o *ProverTask) GetProverTasksByHashes(ctx context.Context, taskType message.ProofType, hashes []string) ([]*ProverTask, error) {
func (o *ProverTask) GetProverTasksByHashes(ctx context.Context, taskType coordinatorType.ProofType, hashes []string) ([]*ProverTask, error) {
if len(hashes) == 0 {
return nil, nil
}
@@ -116,6 +116,25 @@ func (o *ProverTask) GetProverTasksByHashes(ctx context.Context, taskType messag
return proverTasks, nil
}
// GetAssignedProverTaskByTaskIDAndProver get prover task taskID and public key
// TODO: when prover all upgrade need DEPRECATED this function
func (o *ProverTask) GetAssignedProverTaskByTaskIDAndProver(ctx context.Context, taskType coordinatorType.ProofType, taskID, proverPublicKey, proverVersion string) (*ProverTask, error) {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("task_type", int(taskType))
db = db.Where("task_id", taskID)
db = db.Where("prover_public_key", proverPublicKey)
db = db.Where("prover_version", proverVersion)
db = db.Where("proving_status", types.ProverAssigned)
var proverTask ProverTask
err := db.First(&proverTask).Error
if err != nil {
return nil, fmt.Errorf("ProverTask.GetProverTaskByTaskIDAndProver err:%w, taskID:%s, pubkey:%s, prover_version:%s", err, taskID, proverPublicKey, proverVersion)
}
return &proverTask, nil
}
// GetProverTaskByUUIDAndPublicKey get prover task taskID by uuid and public key
func (o *ProverTask) GetProverTaskByUUIDAndPublicKey(ctx context.Context, uuid, publicKey string) (*ProverTask, error) {
db := o.db.WithContext(ctx)
@@ -132,7 +151,7 @@ func (o *ProverTask) GetProverTaskByUUIDAndPublicKey(ctx context.Context, uuid,
}
// GetAssignedTaskOfOtherProvers get the chunk/batch task assigned other provers
func (o *ProverTask) GetAssignedTaskOfOtherProvers(ctx context.Context, taskType message.ProofType, taskID, proverPublicKey string) ([]ProverTask, error) {
func (o *ProverTask) GetAssignedTaskOfOtherProvers(ctx context.Context, taskType coordinatorType.ProofType, taskID, proverPublicKey string) ([]ProverTask, error) {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("task_type", int(taskType))
@@ -148,7 +167,7 @@ func (o *ProverTask) GetAssignedTaskOfOtherProvers(ctx context.Context, taskType
}
// GetProvingStatusByTaskID retrieves the proving status of a prover task
func (o *ProverTask) GetProvingStatusByTaskID(ctx context.Context, taskType message.ProofType, taskID string) (types.ProverProveStatus, error) {
func (o *ProverTask) GetProvingStatusByTaskID(ctx context.Context, taskType coordinatorType.ProofType, taskID string) (types.ProverProveStatus, error) {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Select("proving_status")
@@ -163,7 +182,7 @@ func (o *ProverTask) GetProvingStatusByTaskID(ctx context.Context, taskType mess
}
// GetTimeoutAssignedProverTasks get the timeout and assigned proving_status prover task
func (o *ProverTask) GetTimeoutAssignedProverTasks(ctx context.Context, limit int, taskType message.ProofType, timeout time.Duration) ([]ProverTask, error) {
func (o *ProverTask) GetTimeoutAssignedProverTasks(ctx context.Context, limit int, taskType coordinatorType.ProofType, timeout time.Duration) ([]ProverTask, error) {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("proving_status", int(types.ProverAssigned))
@@ -180,7 +199,7 @@ func (o *ProverTask) GetTimeoutAssignedProverTasks(ctx context.Context, limit in
}
// TaskTimeoutMoreThanOnce get the timeout twice task. a temp design
func (o *ProverTask) TaskTimeoutMoreThanOnce(ctx context.Context, taskType message.ProofType, taskID string) bool {
func (o *ProverTask) TaskTimeoutMoreThanOnce(ctx context.Context, taskType coordinatorType.ProofType, taskID string) bool {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("task_type", int(taskType))

View File

@@ -18,8 +18,6 @@ const (
ProverName = "prover_name"
// ProverVersion the prover version for context
ProverVersion = "prover_version"
// HardForkName the hard fork name for context
HardForkName = "hard_fork_name"
)
// LoginSchema for /login response
@@ -37,12 +35,6 @@ type Message struct {
VKs []string `form:"vks" json:"vks"`
}
// LoginParameterWithHardForkName constructs new payload for login
type LoginParameterWithHardForkName struct {
LoginParameter
HardForkName string `form:"hard_fork_name" json:"hard_fork_name"`
}
// LoginParameter for /login api
type LoginParameter struct {
Message Message `form:"message" json:"message" binding:"required"`

View File

@@ -60,10 +60,10 @@ func TestGenerateSignature(t *testing.T) {
authMsg := LoginParameter{
Message: Message{
ProverName: "test",
ProverVersion: "v4.4.43",
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjQzODg4MTEsIm9yaWdfaWF0IjoxNzI0Mzg1MjExLCJyYW5kb20iOiItZ2UxTjhRc1NlTzhxRVdBTk5KWWtFVjU3ekhJX1JmTGw5Mjdkb2pMTm5JPSJ9.TmcRyXTyfCAHIk5WXdpShck0qUUesTDmi_0IhD87GmA",
ProverVersion: "v4.1.115-4dd11c6-000000-000000",
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MTk1NjkyNDAsIm9yaWdfaWF0IjoxNzE5NTY1NjQwLCJyYW5kb20iOiJPRExnNEZtUW1MOEwzTDRvZ3BMcnl6c09EN1ZXd0FoNmd3bVpzVURJV3M0PSJ9.3Oq7fDtFnKGbPyjc8fslzfftyzreQbi-lAr0_HFy54w",
ProverTypes: []ProverType{ProverTypeChunk},
VKs: []string{"mock_vk"},
VKs: []string{"mock_chunk_vk"},
},
PublicKey: publicKeyHex,
}

View File

@@ -3,6 +3,7 @@ package types
// GetTaskParameter for ProverTasks request parameter
type GetTaskParameter struct {
ProverHeight uint64 `form:"prover_height" json:"prover_height"`
TaskType int `form:"task_type" json:"task_type"`
TaskTypes []int `form:"task_types" json:"task_types"`
}

View File

@@ -2,7 +2,6 @@ package types
import (
"fmt"
"scroll-tech/common/types/message"
)

View File

@@ -33,6 +33,12 @@ import (
"scroll-tech/coordinator/internal/route"
)
const (
forkNumberTwo = 2
forkNumberOne = 1
minProverVersion = "v2.0.0"
)
var (
conf *config.Config
@@ -66,7 +72,7 @@ func randomURL() string {
return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64())
}
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, forks []string) (*cron.Collector, *http.Server) {
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, nameForkMap map[string]int64) (*cron.Collector, *http.Server) {
var err error
db, err = testApps.GetGormDBClient()
@@ -84,23 +90,13 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
ProversPerSession: proversPerSession,
Verifier: &config.VerifierConfig{
MockMode: true,
LowVersionCircuit: &config.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "homestead",
MinProverVersion: "v4.2.0",
},
HighVersionCircuit: &config.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "bernoulli",
MinProverVersion: "v4.3.0",
},
},
BatchCollectionTimeSec: 10,
ChunkCollectionTimeSec: 10,
BundleCollectionTimeSec: 10,
MaxVerifierWorkers: 10,
SessionAttempts: 5,
MinProverVersion: minProverVersion,
},
Auth: &config.Auth{
ChallengeExpireDurationSec: tokenTimeout,
@@ -109,12 +105,20 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
}
var chainConf params.ChainConfig
for _, forkName := range forks {
for forkName, forkNumber := range nameForkMap {
switch forkName {
case "shanghai":
chainConf.ShanghaiBlock = big.NewInt(forkNumber)
case "bernoulli":
chainConf.BernoulliBlock = big.NewInt(100)
chainConf.BernoulliBlock = big.NewInt(forkNumber)
case "london":
chainConf.LondonBlock = big.NewInt(forkNumber)
case "istanbul":
chainConf.IstanbulBlock = big.NewInt(forkNumber)
case "homestead":
chainConf.HomesteadBlock = big.NewInt(0)
chainConf.HomesteadBlock = big.NewInt(forkNumber)
case "eip155":
chainConf.EIP155Block = big.NewInt(forkNumber)
}
}
@@ -197,7 +201,7 @@ func TestApis(t *testing.T) {
func testHandshake(t *testing.T) {
// Setup coordinator and http server.
coordinatorURL := randomURL()
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
proofCollector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -210,7 +214,7 @@ func testHandshake(t *testing.T) {
func testFailedHandshake(t *testing.T) {
// Setup coordinator and http server.
coordinatorURL := randomURL()
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
proofCollector.Stop()
}()
@@ -228,7 +232,7 @@ func testFailedHandshake(t *testing.T) {
func testGetTaskBlocked(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -246,12 +250,12 @@ func testGetTaskBlocked(t *testing.T) {
expectedErr := fmt.Errorf("return prover task err:check prover task parameter failed, error:public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", chunkProver.publicKey(), chunkProver.proverName, chunkProver.proverVersion)
code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrCoordinatorGetTaskFailure, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
expectedErr = errors.New("get empty prover task")
expectedErr = fmt.Errorf("get empty prover task")
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrCoordinatorEmptyProofData, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
err = proverBlockListOrm.InsertProverPublicKey(context.Background(), batchProver.proverName, batchProver.publicKey())
assert.NoError(t, err)
@@ -259,20 +263,20 @@ func testGetTaskBlocked(t *testing.T) {
err = proverBlockListOrm.DeleteProverPublicKey(context.Background(), chunkProver.publicKey())
assert.NoError(t, err)
expectedErr = errors.New("get empty prover task")
expectedErr = fmt.Errorf("get empty prover task")
code, errMsg = chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrCoordinatorEmptyProofData, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
expectedErr = fmt.Errorf("return prover task err:check prover task parameter failed, error:public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", batchProver.publicKey(), batchProver.proverName, batchProver.proverVersion)
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrCoordinatorGetTaskFailure, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
}
func testOutdatedProverVersion(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -284,22 +288,20 @@ func testOutdatedProverVersion(t *testing.T) {
batchProver := newMockProver(t, "prover_batch_test", coordinatorURL, message.ProofTypeBatch, "v1.999.999")
assert.True(t, chunkProver.healthCheckSuccess(t))
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, chunkProver.proverVersion)
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", minProverVersion, chunkProver.proverVersion)
code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrJWTCommonErr, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, batchProver.proverVersion)
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", minProverVersion, batchProver.proverVersion)
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrJWTCommonErr, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
}
func testValidProof(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -382,7 +384,7 @@ func testValidProof(t *testing.T) {
func testInvalidProof(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -470,7 +472,7 @@ func testInvalidProof(t *testing.T) {
func testProofGeneratedFailed(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -571,7 +573,7 @@ func testProofGeneratedFailed(t *testing.T) {
func testTimeoutProof(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))

View File

@@ -160,7 +160,7 @@ func (r *mockProver) getProverTask(t *testing.T, proofType message.ProofType) (*
resp, err := client.R().
SetHeader("Content-Type", "application/json").
SetHeader("Authorization", fmt.Sprintf("Bearer %s", token)).
SetBody(map[string]interface{}{"prover_height": 100, "task_types": []int{int(proofType)}}).
SetBody(map[string]interface{}{"prover_height": 100, "task_type": int(proofType)}).
SetResult(&result).
Post("http://" + r.coordinatorURL + "/coordinator/v1/get_task")
assert.NoError(t, err)

View File

@@ -59,20 +59,20 @@ func testResetDB(t *testing.T) {
cur, err := Current(pgDB)
assert.NoError(t, err)
// total number of tables.
assert.Equal(t, int64(24), cur)
assert.Equal(t, int64(22), cur)
}
func testMigrate(t *testing.T) {
assert.NoError(t, Migrate(pgDB))
cur, err := Current(pgDB)
assert.NoError(t, err)
assert.Equal(t, int64(24), cur)
assert.Equal(t, int64(22), cur)
}
func testRollback(t *testing.T) {
version, err := Current(pgDB)
assert.NoError(t, err)
assert.Equal(t, int64(24), version)
assert.Equal(t, int64(22), version)
assert.NoError(t, Rollback(pgDB, nil))

View File

@@ -14,6 +14,7 @@ CREATE TABLE bundle (
batch_proofs_status SMALLINT NOT NULL DEFAULT 1,
proving_status SMALLINT NOT NULL DEFAULT 1,
proof BYTEA DEFAULT NULL,
prover_assigned_at TIMESTAMP(0) DEFAULT NULL,
proved_at TIMESTAMP(0) DEFAULT NULL,
proof_time_sec INTEGER DEFAULT NULL,
total_attempts SMALLINT NOT NULL DEFAULT 0,

View File

@@ -1,23 +0,0 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE chunk
ADD COLUMN codec_version SMALLINT NOT NULL DEFAULT 0,
ADD COLUMN enable_compress BOOLEAN NOT NULL DEFAULT false;
ALTER TABLE batch
ADD COLUMN enable_compress BOOLEAN NOT NULL DEFAULT false;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS chunk
DROP COLUMN IF EXISTS enable_compress,
DROP COLUMN IF EXISTS codec_version;
ALTER TABLE IF EXISTS batch
DROP COLUMN IF EXISTS enable_compress;
-- +goose StatementEnd

View File

@@ -1,15 +0,0 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE batch
ADD COLUMN blob_bytes BYTEA;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS batch
DROP COLUMN IF EXISTS blob_bytes;
-- +goose StatementEnd

View File

@@ -477,7 +477,6 @@ github.com/docker/cli-docs-tool v0.6.0 h1:Z9x10SaZgFaB6jHgz3OWooynhSa40CsWkpe5hE
github.com/docker/cli-docs-tool v0.6.0/go.mod h1:zMjqTFCU361PRh8apiXzeAZ1Q/xupbIwTusYpzCXS/o=
github.com/docker/docker v1.4.2-0.20180625184442-8e610b2b55bf/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
github.com/dop251/goja v0.0.0-20211011172007-d99e4b8cbf48 h1:iZOop7pqsg+56twTopWgwCGxdB5SI2yDO8Ti7eTRliQ=
github.com/dop251/goja v0.0.0-20211011172007-d99e4b8cbf48/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk=
github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf h1:Yt+4K30SdjOkRoRRm3vYNQgR+/ZIy0RmeUDZo7Y8zeQ=
github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk=
@@ -592,7 +591,6 @@ github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qK
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gotestyourself/gotestyourself v1.4.0 h1:CDSlSIuRL/Fsc72Ln5lMybtrCvSRDddsHsDRG/nP7Rg=
github.com/gotestyourself/gotestyourself v1.4.0/go.mod h1:zZKM6oeNM8k+FRljX1mnzVYeS8wiGgQyvST1/GafPbY=
github.com/graph-gophers/graphql-go v0.0.0-20201113091052-beb923fada29 h1:sezaKhEfPFg8W0Enm61B9Gs911H8iesGY5R8NDPtd1M=
github.com/graph-gophers/graphql-go v0.0.0-20201113091052-beb923fada29/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc=
github.com/graph-gophers/graphql-go v1.3.0 h1:Eb9x/q6MFpCLz7jBCiP/WTxjSDrYLR1QY41SORZyNJ0=
github.com/graph-gophers/graphql-go v1.3.0/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc=
@@ -667,7 +665,6 @@ github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5 h1:PJr+ZMXIecYc
github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
github.com/jwilder/encoding v0.0.0-20170811194829-b4e1701a28ef h1:2jNeR4YUziVtswNP9sEFAI913cVrzH85T+8Q6LpYbT0=
github.com/jwilder/encoding v0.0.0-20170811194829-b4e1701a28ef/go.mod h1:Ct9fl0F6iIOGgxJ5npU/IUOhOhqlVrGjyIZc8/MagT0=
github.com/karalabe/usb v0.0.0-20211005121534-4c5740d64559 h1:0VWDXPNE0brOek1Q8bLfzKkvOzwbQE/snjGojlCr8CY=
github.com/karalabe/usb v0.0.0-20211005121534-4c5740d64559/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU=
github.com/karalabe/usb v0.0.2 h1:M6QQBNxF+CQ8OFvxrT90BA0qBOXymndZnk5q235mFc4=
github.com/karalabe/usb v0.0.2/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU=
@@ -808,7 +805,6 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240605080813-32bfc9fccde7/go.mod h1:1wWYii0OPwd5kw+xrz0PFgS420xNadrNF1x/ELJT+TM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
github.com/segmentio/kafka-go v0.2.0 h1:HtCSf6B4gN/87yc5qTl7WsxPKQIIGXLPPM1bMCPOsoY=
@@ -949,7 +945,6 @@ golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMx
golang.org/x/image v0.0.0-20190802002840-cff245a6509b h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3 h1:XQyxROzUlZH+WIQwySDgnISgOivlhjIEwaQaJEJrrN0=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
@@ -1098,7 +1093,6 @@ gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gotest.tools v1.4.0/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs=
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
k8s.io/component-base v0.26.7 h1:uqsOyZh0Zqoaup8tmHa491D/CvgFdGUs+X2H/inNUKM=

212
prover/Cargo.lock generated
View File

@@ -30,8 +30,8 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -39,9 +39,9 @@ dependencies = [
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.12.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"gadgets 0.12.0",
"gadgets 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
@@ -59,13 +59,13 @@ dependencies = [
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.12.0",
"zkevm-circuits 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
]
[[package]]
name = "aggregator"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -73,9 +73,9 @@ dependencies = [
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.13.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"gadgets 0.13.0",
"gadgets 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
@@ -93,7 +93,7 @@ dependencies = [
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.13.0",
"zkevm-circuits 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
]
[[package]]
@@ -634,22 +634,24 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bus-mapping"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-providers 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ethers-signers",
"gadgets 0.12.0",
"external-tracer 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"gadgets 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.12.0",
"mpt-zktrie 0.12.0",
"mock 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"mpt-zktrie 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"num",
"poseidon-circuit",
"rand",
"revm-precompile",
"serde",
"serde_json",
@@ -659,20 +661,20 @@ dependencies = [
[[package]]
name = "bus-mapping"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"eth-types 0.13.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-providers 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ethers-signers",
"gadgets 0.13.0",
"gadgets 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"mock 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"mpt-zktrie 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"num",
"poseidon-circuit",
"revm-precompile",
@@ -1309,8 +1311,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1327,6 +1329,7 @@ dependencies = [
"revm-primitives",
"serde",
"serde_json",
"serde_stacker",
"serde_with",
"sha3 0.10.8",
"strum 0.25.0",
@@ -1337,8 +1340,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1557,11 +1560,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"geth-utils 0.12.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"geth-utils 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"log",
"serde",
"serde_json",
@@ -1570,11 +1573,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"eth-types 0.13.0",
"geth-utils 0.13.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"geth-utils 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"log",
"serde",
"serde_json",
@@ -1787,10 +1790,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1799,10 +1802,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"eth-types 0.13.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1822,8 +1825,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"env_logger 0.10.2",
"gobuild",
@@ -1832,8 +1835,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"env_logger 0.10.2",
"gobuild",
@@ -2672,13 +2675,13 @@ dependencies = [
[[package]]
name = "mock"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"external-tracer 0.12.0",
"external-tracer 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"itertools 0.11.0",
"log",
"rand",
@@ -2687,13 +2690,13 @@ dependencies = [
[[package]]
name = "mock"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"eth-types 0.13.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"external-tracer 0.13.0",
"external-tracer 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"itertools 0.11.0",
"log",
"rand",
@@ -2702,30 +2705,30 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
"zktrie",
]
[[package]]
name = "mpt-zktrie"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"eth-types 0.13.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie",
]
[[package]]
@@ -3291,8 +3294,8 @@ dependencies = [
"http 1.1.0",
"log",
"once_cell",
"prover 0.12.0",
"prover 0.13.0",
"prover 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"prover 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"rand",
"reqwest 0.12.4",
"reqwest-middleware",
@@ -3308,17 +3311,17 @@ dependencies = [
[[package]]
name = "prover"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"aggregator 0.12.0",
"aggregator 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.12.0",
"bus-mapping 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"chrono",
"dotenvy",
"eth-types 0.12.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"git-version",
"halo2_proofs",
@@ -3326,7 +3329,7 @@ dependencies = [
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.12.0",
"mpt-zktrie 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"num-bigint",
"rand",
"rand_xorshift",
@@ -3337,22 +3340,22 @@ dependencies = [
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.12.0",
"zkevm-circuits 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
]
[[package]]
name = "prover"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"aggregator 0.13.0",
"aggregator 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.13.0",
"bus-mapping 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"chrono",
"dotenvy",
"eth-types 0.13.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"git-version",
"halo2_proofs",
@@ -3360,7 +3363,7 @@ dependencies = [
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.13.0",
"mpt-zktrie 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"num-bigint",
"rand",
"rand_xorshift",
@@ -3371,7 +3374,7 @@ dependencies = [
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.13.0",
"zkevm-circuits 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
]
[[package]]
@@ -3663,7 +3666,7 @@ dependencies = [
[[package]]
name = "revm-precompile"
version = "7.0.0"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#36c304d9e9ba4e4b2d5468d91a6bd27210133b6a"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#8543dd627348907773d8057807b6a310b276bb30"
dependencies = [
"aurora-engine-modexp",
"c-kzg",
@@ -3679,7 +3682,7 @@ dependencies = [
[[package]]
name = "revm-primitives"
version = "4.0.0"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#36c304d9e9ba4e4b2d5468d91a6bd27210133b6a"
source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#8543dd627348907773d8057807b6a310b276bb30"
dependencies = [
"alloy-primitives",
"auto_impl",
@@ -5337,18 +5340,18 @@ dependencies = [
[[package]]
name = "zkevm-circuits"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"array-init",
"bus-mapping 0.12.0",
"bus-mapping 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"either",
"env_logger 0.10.2",
"eth-types 0.12.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"ff",
"gadgets 0.12.0",
"gadgets 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
@@ -5358,8 +5361,8 @@ dependencies = [
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.12.0",
"mpt-zktrie 0.12.0",
"mock 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"mpt-zktrie 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4)",
"num",
"num-bigint",
"poseidon-circuit",
@@ -5379,18 +5382,18 @@ dependencies = [
[[package]]
name = "zkevm-circuits"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.0#cf71a0e324d42f7895363f4f3e8575d79bb7ea97"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion#a5337832455b1ed49bc1104a6ed4265dcdc27cdb"
dependencies = [
"array-init",
"bus-mapping 0.13.0",
"bus-mapping 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"either",
"env_logger 0.10.2",
"eth-types 0.13.0",
"eth-types 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"ff",
"gadgets 0.13.0",
"gadgets 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
@@ -5400,8 +5403,8 @@ dependencies = [
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"mock 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"mpt-zktrie 0.11.0 (git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/agg_recursion)",
"num",
"num-bigint",
"poseidon-circuit",
@@ -5425,16 +5428,7 @@ version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie_rust",
]
[[package]]
@@ -5451,20 +5445,6 @@ dependencies = [
"strum_macros 0.24.3",
]
[[package]]
name = "zktrie_rust"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"hex",
"lazy_static",
"num",
"num-derive",
"num-traits",
"strum 0.24.1",
"strum_macros 0.24.3",
]
[[package]]
name = "zstd"
version = "0.13.0"

View File

@@ -29,8 +29,8 @@ ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.0", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_darwin_v2 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.0", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_curie = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/agg_recursion", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.1"
reqwest = { version = "0.12.4", features = ["gzip"] }
reqwest-middleware = "0.3"

View File

@@ -3,7 +3,7 @@
"keystore_path": "keystore.json",
"keystore_password": "prover-pwd",
"db_path": "unique-db-path-for-prover-1",
"prover_type": 2,
"proof_type": 2,
"low_version_circuit": {
"hard_fork_name": "bernoulli",
"params_path": "params",

View File

@@ -14,8 +14,6 @@ use types::*;
use crate::{config::Config, key_signer::KeySigner};
pub use errors::ProofStatusNotOKError;
pub struct CoordinatorClient<'a> {
api: Api,
token: Option<String>,

View File

@@ -1,6 +1,4 @@
use crate::{coordinator_client::ProofStatusNotOKError, types::ProofStatus};
use super::{errors::*, types::*};
use super::types::*;
use anyhow::{bail, Result};
use core::time::Duration;
use reqwest::{header::CONTENT_TYPE, Url};
@@ -78,23 +76,7 @@ impl Api {
token: &String,
) -> Result<Response<SubmitProofResponseData>> {
let method = "/coordinator/v1/submit_proof";
let response = self
.post_with_token::<SubmitProofRequest, Response<SubmitProofResponseData>>(
method, req, token,
)
.await?;
// when req's status already not ok, we mark the error returned from coordinator and will
// ignore it later.
if response.errcode == ErrorCode::ErrCoordinatorHandleZkProofFailure
&& req.status != ProofStatus::Ok
&& response
.errmsg
.contains("validator failure proof msg status not ok")
{
return Err(anyhow::anyhow!(ProofStatusNotOKError));
}
Ok(response)
self.post_with_token(method, req, token).await
}
async fn post_with_token<Req, Resp>(

View File

@@ -1,5 +1,4 @@
use serde::{Deserialize, Deserializer};
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ErrorCode {
@@ -52,14 +51,3 @@ impl<'de> Deserialize<'de> for ErrorCode {
Ok(ErrorCode::from_i32(v))
}
}
// ====================================================
#[derive(Debug, Clone)]
pub struct ProofStatusNotOKError;
impl fmt::Display for ProofStatusNotOKError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "proof status not ok")
}
}

View File

@@ -37,7 +37,7 @@ struct Args {
log_file: Option<String>,
}
fn start() -> Result<()> {
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Args::parse();
if args.version {
@@ -76,10 +76,3 @@ fn start() -> Result<()> {
Ok(())
}
fn main() {
let result = start();
if let Err(e) = result {
log::error!("main exit with error {:#}", e)
}
}

View File

@@ -71,6 +71,7 @@ impl<'a> Prover<'a> {
let mut req = GetTaskRequest {
task_types: get_task_types(self.config.prover_type),
prover_height: None,
// vks: self.circuits_handler_provider.borrow().get_vks(),
};
if self.config.prover_type == ProverType::Chunk {
@@ -147,7 +148,7 @@ impl<'a> Prover<'a> {
task_type: task.task_type,
status: ProofStatus::Error,
failure_type: Some(failure_type),
failure_msg: Some(format!("{:#}", error)),
failure_msg: Some(error.to_string()),
..Default::default()
};
self.do_submit(&request)

View File

@@ -1,4 +1,4 @@
use super::{coordinator_client::ProofStatusNotOKError, prover::Prover, task_cache::TaskCache};
use super::{prover::Prover, task_cache::TaskCache};
use anyhow::{Context, Result};
use std::rc::Rc;
@@ -16,11 +16,7 @@ impl<'a> TaskProcessor<'a> {
loop {
log::info!("start a new round.");
if let Err(err) = self.prove_and_submit() {
if err.is::<ProofStatusNotOKError>() {
log::info!("proof status not ok, downgrade level to info.");
} else {
log::error!("encounter error: {:#}", err);
}
log::error!("encounter error: {:#}", err);
} else {
log::info!("prove & submit succeed.");
}
@@ -58,18 +54,11 @@ impl<'a> TaskProcessor<'a> {
);
let result = match self.prover.prove_task(&task_wrapper.task) {
Ok(proof_detail) => self.prover.submit_proof(proof_detail, &task_wrapper.task),
Err(error) => {
log::error!(
"failed to prove task, id: {}, error: {:#}",
&task_wrapper.task.id,
error
);
self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::NoPanic,
error,
)
}
Err(error) => self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::NoPanic,
error,
),
};
return result;
}

View File

@@ -1,5 +1,5 @@
mod curie;
mod darwin;
mod darwin_v2;
use super::geth_client::GethClient;
use crate::{
@@ -8,8 +8,8 @@ use crate::{
utils::get_task_types,
};
use anyhow::{bail, Result};
use curie::CurieHandler;
use darwin::DarwinHandler;
use darwin_v2::DarwinV2Handler;
use std::{cell::RefCell, collections::HashMap, rc::Rc};
type HardForkName = String;
@@ -38,7 +38,7 @@ pub struct CircuitsHandlerProvider<'a> {
geth_client: Option<Rc<RefCell<GethClient>>>,
circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>,
current_fork_name: Option<HardForkName>,
current_hard_fork_name: Option<HardForkName>,
current_circuit: Option<Rc<Box<dyn CircuitsHandler>>>,
}
@@ -60,7 +60,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
&config.low_version_circuit.hard_fork_name
);
AssetsDirEnvConfig::enable_first();
DarwinHandler::new(
CurieHandler::new(
prover_type,
&config.low_version_circuit.params_path,
&config.low_version_circuit.assets_path,
@@ -83,7 +83,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
&config.high_version_circuit.hard_fork_name
);
AssetsDirEnvConfig::enable_second();
DarwinV2Handler::new(
DarwinHandler::new(
prover_type,
&config.high_version_circuit.params_path,
&config.high_version_circuit.assets_path,
@@ -102,7 +102,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
config,
geth_client,
circuits_handler_builder_map: m,
current_fork_name: None,
current_hard_fork_name: None,
current_circuit: None,
};
@@ -113,12 +113,13 @@ impl<'a> CircuitsHandlerProvider<'a> {
&mut self,
hard_fork_name: &String,
) -> Result<Rc<Box<dyn CircuitsHandler>>> {
match &self.current_fork_name {
Some(fork_name) if fork_name == hard_fork_name => {
match &self.current_hard_fork_name {
Some(name) if name == hard_fork_name => {
log::info!("get circuits handler from cache");
if let Some(handler) = &self.current_circuit {
Ok(handler.clone())
} else {
log::error!("missing cached handler, there must be something wrong.");
bail!("missing cached handler, there must be something wrong.")
}
}
@@ -130,11 +131,12 @@ impl<'a> CircuitsHandlerProvider<'a> {
log::info!("building circuits handler for {hard_fork_name}");
let handler = builder(self.prover_type, self.config, self.geth_client.clone())
.expect("failed to build circuits handler");
self.current_fork_name = Some(hard_fork_name.clone());
self.current_hard_fork_name = Some(hard_fork_name.clone());
let rc_handler = Rc::new(handler);
self.current_circuit = Some(rc_handler.clone());
Ok(rc_handler)
} else {
log::error!("missing builder, there must be something wrong.");
bail!("missing builder, there must be something wrong.")
}
}
@@ -165,7 +167,6 @@ impl<'a> CircuitsHandlerProvider<'a> {
);
vk
})
.filter(|vk| !vk.is_empty())
.collect::<Vec<String>>()
})
.collect::<Vec<String>>()

View File

@@ -10,25 +10,21 @@ use serde::Deserialize;
use crate::types::{CommonHash, Task};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
use prover_darwin_v2::{
use prover_curie::{
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
ChunkProof, ChunkProvingTask,
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
};
// Only used for debugging.
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
#[derive(Debug, Clone, Deserialize)]
#[derive(Deserialize)]
pub struct BatchTaskDetail {
pub chunk_infos: Vec<ChunkInfo>,
#[serde(flatten)]
pub batch_proving_task: BatchProvingTask,
pub chunk_proofs: Vec<ChunkProof>,
}
type BundleTaskDetail = BundleProvingTask;
#[derive(Debug, Clone, Deserialize)]
#[derive(Deserialize)]
pub struct ChunkTaskDetail {
pub block_hashes: Vec<CommonHash>,
}
@@ -38,14 +34,14 @@ fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
}
#[derive(Default)]
pub struct DarwinV2Handler {
pub struct CurieHandler {
chunk_prover: Option<RefCell<ChunkProver>>,
batch_prover: Option<RefCell<BatchProver>>,
geth_client: Option<Rc<RefCell<GethClient>>>,
}
impl DarwinV2Handler {
impl CurieHandler {
pub fn new(
prover_type: ProverType,
params_dir: &str,
@@ -87,15 +83,11 @@ impl DarwinV2Handler {
Ok(serde_json::to_string(&chunk_proof)?)
}
fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
fn gen_batch_proof_raw(
&self,
chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)>,
) -> Result<BatchProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.batch_proving_task.chunk_proofs.clone())
.collect();
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
@@ -105,11 +97,11 @@ impl DarwinV2Handler {
bail!("non-match chunk protocol")
}
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
let batch_proof = prover.borrow_mut().gen_batch_proof(
batch_task_detail.batch_proving_task,
None,
self.get_output_dir(),
)?;
let batch = BatchProvingTask { chunk_proofs };
let batch_proof =
prover
.borrow_mut()
.gen_agg_evm_proof(batch, None, self.get_output_dir())?;
return Ok(batch_proof);
}
@@ -118,32 +110,12 @@ impl DarwinV2Handler {
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_batch_proof for task {}", task.id);
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
let batch_proof = self.gen_batch_proof_raw(batch_task_detail)?;
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
Ok(serde_json::to_string(&batch_proof)?)
}
fn gen_bundle_proof_raw(&self, bundle_task_detail: BundleTaskDetail) -> Result<BundleProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let bundle_proof = prover.borrow_mut().gen_bundle_proof(
bundle_task_detail,
None,
self.get_output_dir(),
)?;
return Ok(bundle_proof);
}
unreachable!("please check errors in proof_type logic")
}
fn gen_bundle_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_bundle_proof for task {}", task.id);
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&task.task_data)?;
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail)?;
Ok(serde_json::to_string(&bundle_proof)?)
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
@@ -153,6 +125,17 @@ impl DarwinV2Handler {
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
}
fn gen_chunk_hashes_proofs(&self, task: &Task) -> Result<Vec<(ChunkInfo, ChunkProof)>> {
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
Ok(batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.chunk_proofs.clone())
.collect())
}
fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> {
if block_hashes.is_empty() {
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
@@ -207,7 +190,7 @@ impl DarwinV2Handler {
}
}
impl CircuitsHandler for DarwinV2Handler {
impl CircuitsHandler for CurieHandler {
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
match task_type {
TaskType::Chunk => self
@@ -217,11 +200,7 @@ impl CircuitsHandler for DarwinV2Handler {
TaskType::Batch => self
.batch_prover
.as_ref()
.and_then(|prover| prover.borrow().get_batch_vk()),
TaskType::Bundle => self
.batch_prover
.as_ref()
.and_then(|prover| prover.borrow().get_bundle_vk()),
.and_then(|prover| prover.borrow().get_vk()),
_ => unreachable!(),
}
}
@@ -230,7 +209,6 @@ impl CircuitsHandler for DarwinV2Handler {
match task_type {
TaskType::Chunk => self.gen_chunk_proof(task),
TaskType::Batch => self.gen_batch_proof(task),
TaskType::Bundle => self.gen_bundle_proof(task),
_ => unreachable!(),
}
}
@@ -242,11 +220,7 @@ impl CircuitsHandler for DarwinV2Handler {
mod tests {
use super::*;
use crate::zk_circuits_handler::utils::encode_vk;
use ethers_core::types::H256;
use prover_darwin_v2::{
aggregator::eip4844, utils::chunk_trace_to_witness_block, BatchData, BatchHeader,
MAX_AGG_SNARKS,
};
use prover_curie::utils::chunk_trace_to_witness_block;
use std::{path::PathBuf, sync::LazyLock};
#[ctor::ctor]
@@ -257,7 +231,7 @@ mod tests {
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("DARWIN_V2_TEST_DIR")
std::env::var("CURIE_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
@@ -269,9 +243,9 @@ mod tests {
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
@@ -281,22 +255,20 @@ mod tests {
#[test]
fn test_circuits() -> Result<()> {
let chunk_handler =
DarwinV2Handler::new(ProverType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_handler = CurieHandler::new(ProverType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
let chunk_dir_paths = get_chunk_dir_paths()?;
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
let mut chunk_traces = vec![];
let mut chunk_infos = vec![];
let mut chunk_proofs = vec![];
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
let chunk_id = format!("chunk_proof{}", id + 1);
log::info!("start to process {chunk_id}");
let chunk_trace = read_chunk_trace(chunk_path)?;
chunk_traces.push(chunk_trace.clone());
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
chunk_infos.push(chunk_info);
@@ -307,97 +279,30 @@ mod tests {
chunk_proofs.push(chunk_proof);
}
let batch_handler =
DarwinV2Handler::new(ProverType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_handler = CurieHandler::new(ProverType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
let batch_task_detail = make_batch_task_detail(chunk_traces, chunk_proofs, None);
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
log::info!("start to prove batch");
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail)?;
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
let proof_data = serde_json::to_string(&batch_proof)?;
dump_proof("batch_proof".to_string(), proof_data)?;
Ok(())
}
// copied from https://github.com/scroll-tech/scroll-prover/blob/main/integration/src/prove.rs
fn get_blob_from_chunks(chunks: &[ChunkInfo]) -> Vec<u8> {
let num_chunks = chunks.len();
let padded_chunk =
ChunkInfo::mock_padded_chunk_info_for_testing(chunks.last().as_ref().unwrap());
let chunks_with_padding = [
chunks.to_vec(),
vec![padded_chunk; MAX_AGG_SNARKS - num_chunks],
]
.concat();
let batch_data = BatchData::<{ MAX_AGG_SNARKS }>::new(chunks.len(), &chunks_with_padding);
let batch_bytes = batch_data.get_batch_data_bytes();
let blob_bytes = eip4844::get_blob_bytes(&batch_bytes);
log::info!("blob_bytes len {}", blob_bytes.len());
blob_bytes
}
// TODO: chunk_infos can be extracted from chunk_proofs.
// Still needed?
fn make_batch_task_detail(
chunk_traces: Vec<Vec<BlockTrace>>,
chunk_proofs: Vec<ChunkProof>,
last_batcher_header: Option<BatchHeader<{ MAX_AGG_SNARKS }>>,
) -> BatchTaskDetail {
// dummy parent batch hash
let dummy_parent_batch_hash = H256([
0xab, 0xac, 0xad, 0xae, 0xaf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
]);
let chunk_infos: Vec<_> = chunk_proofs.iter().map(|p| p.chunk_info.clone()).collect();
let l1_message_popped = chunk_traces
.iter()
.flatten()
.map(|chunk| chunk.num_l1_txs())
.sum();
let last_block_timestamp = chunk_traces.last().map_or(0, |block_traces| {
block_traces
.last()
.map_or(0, |block_trace| block_trace.header.timestamp.as_u64())
});
let blob_bytes = get_blob_from_chunks(&chunk_infos);
let batch_header = BatchHeader::construct_from_chunks(
last_batcher_header.map_or(4, |header| header.version),
last_batcher_header.map_or(123, |header| header.batch_index + 1),
l1_message_popped,
last_batcher_header.map_or(l1_message_popped, |header| {
header.total_l1_message_popped + l1_message_popped
}),
last_batcher_header.map_or(dummy_parent_batch_hash, |header| header.batch_hash()),
last_block_timestamp,
&chunk_infos,
&blob_bytes,
);
BatchTaskDetail {
chunk_infos,
batch_proving_task: BatchProvingTask {
chunk_proofs,
batch_header,
blob_bytes,
},
}
}
fn check_vk(proof_type: TaskType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", proof_type);
let vk_from_file = read_vk(proof_type).unwrap();
fn check_vk(task_type: TaskType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", task_type);
let vk_from_file = read_vk(task_type).unwrap();
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
}
fn read_vk(proof_type: TaskType) -> Result<String> {
log::info!("read_vk, {:?}", proof_type);
let vk_file = match proof_type {
fn read_vk(task_type: TaskType) -> Result<String> {
log::info!("read_vk, {:?}", task_type);
let vk_file = match task_type {
TaskType::Chunk => CHUNK_VK_PATH.clone(),
TaskType::Batch => BATCH_VK_PATH.clone(),
TaskType::Bundle => todo!(),
TaskType::Bundle => unreachable!(),
TaskType::Undefined => unreachable!(),
};

Some files were not shown because too many files have changed in this diff Show More