Compare commits

..

35 Commits

Author SHA1 Message Date
georgehao
82f39aa9dc fix bundle task detail bug (#1460) 2024-07-19 23:44:59 +08:00
georgehao
0c106636c2 update zkcircuit (#1457) 2024-07-19 17:48:52 +08:00
georgehao
c87c35629d fix typo (#1456) 2024-07-19 13:23:04 +08:00
georgehao
8f273f7a69 fix coordinator bug (#1455) 2024-07-19 12:33:33 +08:00
Mengran Lan
17b7e5caee Feat/upgrade4 prover print more error (#1454) 2024-07-19 08:23:26 +08:00
colin
07dc2c91bb Merge branch 'develop' into feat/upgrade4 2024-07-19 00:45:46 +08:00
colinlyguo
768226c71f Merge branch 'develop' into feat/upgrade4 2024-07-19 00:27:28 +08:00
georgehao
74c197e85a update codec version (#1448)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Suuuuuuperrrrr fred <FredrikaPhililip@proton.me>
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: sbaizet <74511063+sbaizet-ledger@users.noreply.github.com>
Co-authored-by: caseylove <casey4love@foxmail.com>
Co-authored-by: BoxChen <13927203+nishuzumi@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
2024-07-19 00:00:45 +08:00
Mengran Lan
f95eaec3ce feat(prover): upgrade darwin's zkevm-circuits branch (#1451) 2024-07-18 23:43:00 +08:00
Mengran Lan
c34883e0aa fix lint issue (#1450) 2024-07-18 23:26:30 +08:00
Mengran Lan
8960d0f32d fix(prover): fix panic issue when trying to get curie's bundle vk (#1449) 2024-07-18 23:13:01 +08:00
georgehao
76a9d6fb20 fix init bundle vk map (#1447) 2024-07-18 20:51:32 +08:00
Rohit Narurkar
8b779ff49a prover(branch): release/v0.12.0 (#1446) 2024-07-18 18:47:23 +08:00
georgehao
d516949f39 fix ProcessPendingBundles record not found (#1444) 2024-07-18 11:38:14 +08:00
Rohit Narurkar
3716c5a436 bump prover (upgrade #4) (#1437) 2024-07-18 10:19:31 +08:00
georgehao
04a893a98f feat: enable process pending bundles (#1433)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-07-12 11:44:30 +08:00
Mengran Lan
622ce7b0c9 feat(prover): upgrade zkevm-circuits to same as coordinator's with changes of BatchProvingTask & BundleProvingTask (#1434) 2024-07-12 10:56:33 +08:00
georgehao
2392d3458a enable chunk proof verify (#1432) 2024-07-11 20:02:43 +08:00
Rohit Narurkar
a2e8b6a644 libzkp and coordinator updates (preparation for Upgrade4) (#1419)
Co-authored-by: georgehao <haohongfan@gmail.com>
2024-07-11 19:47:02 +08:00
Mengran Lan
3567f5b62a feat(prover): upgrade zkevm-circuits version (#1424) 2024-07-11 15:06:24 +08:00
georgehao
61a3af538d enable push tag to build docker 2024-07-10 10:05:38 +08:00
colin
c758aa6fa4 fix: build coordinator and bridge-history images (#1421) 2024-07-10 09:30:32 +08:00
Mengran Lan
abe66c6f7e feat(prover): supporting upgrade4 (#1412)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-07-08 19:32:34 +08:00
colinlyguo
7196c5cab6 Merge branch 'develop' into feat/upgrade4 2024-07-08 19:27:05 +08:00
georgehao
380dbba61d feat(coordinator): upgrade 4 (#1399)
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
Co-authored-by: Mengran Lan <lanmengran@qq.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-07-08 18:57:53 +08:00
colin
bc9bc5db36 feat(rollup-relayer): finalize bundle (#1411) 2024-07-08 15:26:50 +08:00
colin
beee0c286c feat(coordinator): enhance batch proof sanity check (#1408)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: Mengran Lan <mengran@scroll.io>
Co-authored-by: amoylan2 <amoylan2@users.noreply.github.com>
2024-07-02 16:59:52 +08:00
colin
0b32509a55 Merge branch 'develop' into feat/upgrade4 2024-07-02 16:57:26 +08:00
Rohit Narurkar
e679052df1 feat(common): update types as per upgrade4 (#1409)
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
2024-07-02 00:42:29 +08:00
colin
229809ad6f feat(rollup-relayer): support commitBatchWithBlobProof (#1397) 2024-06-27 21:25:51 +08:00
colin
fb43e87608 feat(rollup-db): add bundle hash (#1401) 2024-06-27 15:00:47 +08:00
colin
54adbb3e77 feat(common): add hardfork util functions supporting both height and timestamp (#1400) 2024-06-27 09:55:07 +08:00
georgehao
664c042a14 Merge branch 'develop' into feat/upgrade4 2024-06-26 21:53:57 +08:00
colin
1a739cd5a7 faet(db): add bundle table (#1395)
Co-authored-by: georgehao <haohongfan@gmail.com>
2024-06-25 10:13:34 +08:00
georgehao
1b6886bb49 init 2024-06-24 09:21:57 +08:00
137 changed files with 1890 additions and 4139 deletions

View File

@@ -1,15 +0,0 @@
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
language: "en-US"
early_access: false
reviews:
profile: "chill"
request_changes_workflow: false
high_level_summary: true
poem: true
review_status: true
collapse_walkthrough: false
auto_review:
enabled: true
drafts: false
chat:
auto_reply: true

View File

@@ -49,8 +49,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -94,8 +94,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -139,8 +139,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -184,8 +184,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -229,8 +229,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -274,8 +274,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -318,8 +318,8 @@ jobs:
file: ./build/dockerfiles/coordinator-api.Dockerfile
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -363,7 +363,7 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest

View File

@@ -1,6 +1,6 @@
.PHONY: fmt dev_docker build_test_docker run_test_docker clean update
L2GETH_TAG=scroll-v5.6.3
L2GETH_TAG=scroll-v5.5.1
help: ## Display this help message
@grep -h \
@@ -40,8 +40,8 @@ fmt: ## Format the code
dev_docker: ## Build docker images for development/testing usages
docker pull postgres
docker build -t scroll_l1geth --platform linux/amd64 ./common/testcontainers/docker/l1geth/
docker build -t scroll_l2geth --platform linux/amd64 ./common/testcontainers/docker/l2geth/
docker build -t scroll_l1geth ./common/testcontainers/docker/l1geth/
docker build -t scroll_l2geth ./common/testcontainers/docker/l2geth/
clean: ## Empty out the bin folder
@rm -rf build/bin

View File

@@ -18,7 +18,7 @@
├── <a href="./database">database</a>: Database client and schema definition
├── <a href="./prover">prover</a>: Prover client that runs proof generation for zkEVM circuit and aggregation circuit
├── <a href="./rollup">rollup</a>: Rollup-related services
├── <a href="https://github.com/scroll-tech/scroll-contracts.git">scroll-contracts</a>: solidity code for Scroll L1 bridge and rollup contracts and L2 bridge and pre-deployed contracts.
├── <a href="./scroll-contracts">scroll-contracts</a>: solidity code for Scroll L1 bridge and rollup contracts and L2 bridge and pre-deployed contracts.
└── <a href="./tests">tests</a>: Integration tests
</pre>

File diff suppressed because one or more lines are too long

View File

@@ -19,9 +19,7 @@
"ScrollChainAddr": "0xa13BAF47339d63B743e7Da8741db5456DAc1E556",
"GatewayRouterAddr": "0xF8B1378579659D8F7EE5f3C929c2f3E332E41Fd6",
"MessageQueueAddr": "0x0d7E906BD9cAFa154b048cFa766Cc1E54E39AF9B",
"BatchBridgeGatewayAddr": "0x5Bcfd99c34cf7E06fc756f6f5aE7400504852bc4",
"GasTokenGatewayAddr": "0x0000000000000000000000000000000000000000",
"WrappedTokenGatewayAddr": "0x0000000000000000000000000000000000000000"
"BatchBridgeGatewayAddr": "0x5Bcfd99c34cf7E06fc756f6f5aE7400504852bc4"
},
"L2": {
"confirmation": 0,

View File

@@ -89,7 +89,7 @@ require (
github.com/rjeczalik/notify v0.9.1 // indirect
github.com/rs/cors v1.7.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb // indirect
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 // indirect
github.com/scroll-tech/zktrie v0.8.4 // indirect
github.com/sethvargo/go-retry v0.2.4 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect

View File

@@ -308,8 +308,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 h1:QKgfS8G0btzg7nmFjSjllaxGkns3yg7g2/tG1nWExEI=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,7 +6,6 @@ import (
"path/filepath"
"scroll-tech/common/database"
"scroll-tech/common/utils"
)
// FetcherConfig is the configuration of Layer1 or Layer2 fetcher.
@@ -31,8 +30,6 @@ type FetcherConfig struct {
GatewayRouterAddr string `json:"GatewayRouterAddr"`
MessageQueueAddr string `json:"MessageQueueAddr"`
BatchBridgeGatewayAddr string `json:"BatchBridgeGatewayAddr"`
GasTokenGatewayAddr string `json:"GasTokenGatewayAddr"`
WrappedTokenGatewayAddr string `json:"WrappedTokenGatewayAddr"`
}
// RedisConfig redis config
@@ -67,11 +64,5 @@ func NewConfig(file string) (*Config, error) {
return nil, err
}
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_BRIDGE_HISTORY")
if err != nil {
return nil, err
}
return cfg, nil
}

View File

@@ -2,7 +2,7 @@ package logic
import (
"context"
"errors"
"fmt"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
@@ -153,7 +153,7 @@ func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, bat
if withdrawTrie.NextMessageNonce != l2WithdrawMessages[0].MessageNonce {
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actual next message nonce", l2WithdrawMessages[0].MessageNonce)
return errors.New("nonce mismatch")
return fmt.Errorf("nonce mismatch")
}
messageHashes := make([]common.Hash, len(l2WithdrawMessages))

View File

@@ -168,14 +168,6 @@ func (e *L1EventParser) ParseL1SingleCrossChainEventLogs(ctx context.Context, lo
lastMessage.L2TokenAddress = event.L2Token.String()
lastMessage.TokenIDs = utils.ConvertBigIntArrayToString(event.TokenIDs)
lastMessage.TokenAmounts = utils.ConvertBigIntArrayToString(event.TokenAmounts)
case backendabi.L1DepositWrappedTokenSig:
event := backendabi.WrappedTokenMessageEvent{}
if err := utils.UnpackLog(backendabi.L1WrappedTokenGatewayABI, &event, "DepositWrappedToken", vlog); err != nil {
log.Error("Failed to unpack DepositWrappedToken event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
lastMessage.Sender = event.From.String()
case backendabi.L1SentMessageEventSig:
event := backendabi.L1SentMessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ScrollMessengerABI, &event, "SentMessage", vlog); err != nil {
@@ -328,16 +320,6 @@ func (e *L1EventParser) ParseL1MessageQueueEventLogs(logs []types.Log, l1Deposit
QueueIndex: index,
})
}
case backendabi.L1ResetDequeuedTransactionEventSig:
event := backendabi.L1ResetDequeuedTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "ResetDequeuedTransaction", vlog); err != nil {
log.Error("Failed to unpack ResetDequeuedTransaction event", "err", err)
return nil, err
}
l1MessageQueueEvents = append(l1MessageQueueEvents, &orm.MessageQueueEvent{
EventType: btypes.MessageQueueEventTypeResetDequeuedTransaction,
QueueIndex: event.StartIndex.Uint64(),
})
case backendabi.L1DropTransactionEventSig:
event := backendabi.L1DropTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "DropTransaction", vlog); err != nil {

View File

@@ -51,8 +51,11 @@ type L1FetcherLogic struct {
// NewL1FetcherLogic creates L1 fetcher logic
func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L1FetcherLogic {
addressList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -66,8 +69,11 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
}
gatewayList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -99,26 +105,6 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
gatewayList = append(gatewayList, common.HexToAddress(cfg.BatchBridgeGatewayAddr))
}
if common.HexToAddress(cfg.ETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.ETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.ETHGatewayAddr))
}
if common.HexToAddress(cfg.WETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WETHGatewayAddr))
}
if common.HexToAddress(cfg.GasTokenGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.GasTokenGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.GasTokenGatewayAddr))
}
if common.HexToAddress(cfg.WrappedTokenGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WrappedTokenGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WrappedTokenGatewayAddr))
}
log.Info("L1 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L1FetcherLogic{
@@ -224,7 +210,7 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
Topics: make([][]common.Hash, 1),
}
query.Topics[0] = make([]common.Hash, 16)
query.Topics[0] = make([]common.Hash, 14)
query.Topics[0][0] = backendabi.L1DepositETHSig
query.Topics[0][1] = backendabi.L1DepositERC20Sig
query.Topics[0][2] = backendabi.L1DepositERC721Sig
@@ -238,9 +224,7 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
query.Topics[0][10] = backendabi.L1QueueTransactionEventSig
query.Topics[0][11] = backendabi.L1DequeueTransactionEventSig
query.Topics[0][12] = backendabi.L1DropTransactionEventSig
query.Topics[0][13] = backendabi.L1ResetDequeuedTransactionEventSig
query.Topics[0][14] = backendabi.L1BridgeBatchDepositSig
query.Topics[0][15] = backendabi.L1DepositWrappedTokenSig
query.Topics[0][13] = backendabi.L1BridgeBatchDepositSig
eventLogs, err := f.client.FilterLogs(ctx, query)
if err != nil {
@@ -355,10 +339,6 @@ func (f *L1FetcherLogic) updateMetrics(res L1FilterResult) {
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_skip_message").Add(1)
case btypes.MessageQueueEventTypeDropTransaction:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_drop_message").Add(1)
// one ResetDequeuedTransaction event could indicate reset multiple skipped messages,
// this metric only counts the number of events, not the number of skipped messages.
case btypes.MessageQueueEventTypeResetDequeuedTransaction:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_reset_skipped_messages").Add(1)
}
}

View File

@@ -54,6 +54,7 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -67,6 +68,7 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -98,11 +100,6 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
gatewayList = append(gatewayList, common.HexToAddress(cfg.BatchBridgeGatewayAddr))
}
if common.HexToAddress(cfg.WETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WETHGatewayAddr))
}
log.Info("L2 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L2FetcherLogic{

View File

@@ -217,12 +217,6 @@ func (c *CrossMessage) UpdateL1MessageQueueEventsInfo(ctx context.Context, l1Mes
db = db.Where("message_nonce = ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", btypes.MessageTypeL1SentMessage)
txStatusUpdateFields["tx_status"] = types.TxStatusTypeDropped
case btypes.MessageQueueEventTypeResetDequeuedTransaction:
db = db.Where("tx_status = ?", types.TxStatusTypeSkipped)
// reset skipped messages that the nonce is greater than or equal to the queue index.
db = db.Where("message_nonce >= ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", btypes.MessageTypeL1SentMessage)
txStatusUpdateFields["tx_status"] = types.TxStatusTypeSent
}
if err := db.Updates(txStatusUpdateFields).Error; err != nil {
return fmt.Errorf("failed to update tx statuses of L1 message queue events, update fields: %v, error: %w", txStatusUpdateFields, err)
@@ -236,7 +230,7 @@ func (c *CrossMessage) UpdateL1MessageQueueEventsInfo(ctx context.Context, l1Mes
db = db.Model(&CrossMessage{})
txHashUpdateFields := make(map[string]interface{})
switch l1MessageQueueEvent.EventType {
case btypes.MessageQueueEventTypeDequeueTransaction, btypes.MessageQueueEventTypeResetDequeuedTransaction:
case btypes.MessageQueueEventTypeDequeueTransaction:
continue
case btypes.MessageQueueEventTypeQueueTransaction:
// only replayMessages or enforced txs (whose message hashes would not be found), sendMessages have been filtered out.

View File

@@ -70,7 +70,6 @@ const (
MessageQueueEventTypeQueueTransaction
MessageQueueEventTypeDequeueTransaction
MessageQueueEventTypeDropTransaction
MessageQueueEventTypeResetDequeuedTransaction
)
// BatchStatusType represents the type of batch status.

View File

@@ -38,7 +38,7 @@ func GetBlockNumber(ctx context.Context, client *ethclient.Client, confirmations
// @todo: add unit test.
func UnpackLog(c *abi.ABI, out interface{}, event string, log types.Log) error {
if log.Topics[0] != c.Events[event].ID {
return errors.New("event signature mismatch")
return fmt.Errorf("event signature mismatch")
}
if len(log.Data) > 0 {
if err := c.UnpackIntoInterface(out, event, log.Data); err != nil {

View File

@@ -17,7 +17,7 @@ RUN --mount=target=. \
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install vim netcat-openbsd net-tools curl -y
COPY --from=builder /bin/bridgehistoryapi-api /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-api"]

View File

@@ -17,8 +17,7 @@ RUN --mount=target=. \
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install ca-certificates vim netcat-openbsd net-tools curl -y
RUN update-ca-certificates
COPY --from=builder /bin/bridgehistoryapi-fetcher /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-fetcher"]

View File

@@ -40,7 +40,6 @@ FROM ubuntu:20.04
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
# ENV CHAIN_ID=534353
RUN apt update && apt install vim netcat-openbsd net-tools curl jq -y
RUN mkdir -p /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/coordinator_api /bin/

View File

@@ -19,8 +19,9 @@ RUN --mount=target=. \
# Pull coordinator into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install vim netcat-openbsd net-tools curl -y
COPY --from=builder /bin/coordinator_cron /bin/
WORKDIR /app
ENTRYPOINT ["coordinator_cron"]

View File

@@ -21,7 +21,7 @@ RUN --mount=target=. \
# Pull gas_oracle into a second stage deploy ubuntu container
FROM ubuntu:20.04
RUN apt update && apt install vim netcat-openbsd net-tools curl ca-certificates -y
RUN apt update && apt install ca-certificates -y
ENV CGO_LDFLAGS="-ldl"

View File

@@ -21,7 +21,7 @@ RUN --mount=target=. \
# Pull rollup_relayer into a second stage deploy ubuntu container
FROM ubuntu:20.04
RUN apt update && apt install vim netcat-openbsd net-tools curl ca-certificates -y
RUN apt update && apt install ca-certificates -y
ENV CGO_LDFLAGS="-ldl"

View File

@@ -1,12 +1,83 @@
package forks
import (
"math"
"math/big"
"sort"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/go-ethereum/params"
)
// CollectSortedForkHeights returns a sorted set of block numbers that one or more forks are activated on
func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]bool, map[string]uint64) {
type nameFork struct {
name string
block *big.Int
}
forkHeightNameMap := make(map[uint64]string)
for _, fork := range []nameFork{
{name: "homestead", block: config.HomesteadBlock},
{name: "daoFork", block: config.DAOForkBlock},
{name: "eip150", block: config.EIP150Block},
{name: "eip155", block: config.EIP155Block},
{name: "eip158", block: config.EIP158Block},
{name: "byzantium", block: config.ByzantiumBlock},
{name: "constantinople", block: config.ConstantinopleBlock},
{name: "petersburg", block: config.PetersburgBlock},
{name: "istanbul", block: config.IstanbulBlock},
{name: "muirGlacier", block: config.MuirGlacierBlock},
{name: "berlin", block: config.BerlinBlock},
{name: "london", block: config.LondonBlock},
{name: "arrowGlacier", block: config.ArrowGlacierBlock},
{name: "archimedes", block: config.ArchimedesBlock},
{name: "shanghai", block: config.ShanghaiBlock},
{name: "bernoulli", block: config.BernoulliBlock},
{name: "curie", block: config.CurieBlock},
} {
if fork.block == nil {
continue
}
height := fork.block.Uint64()
// only keep latest fork for at each height, discard the rest
forkHeightNameMap[height] = fork.name
}
forkHeightsMap := make(map[uint64]bool)
forkNameHeightMap := make(map[string]uint64)
for height, name := range forkHeightNameMap {
forkHeightsMap[height] = true
forkNameHeightMap[name] = height
}
var forkHeights []uint64
for height := range forkHeightsMap {
forkHeights = append(forkHeights, height)
}
sort.Slice(forkHeights, func(i, j int) bool {
return forkHeights[i] < forkHeights[j]
})
return forkHeights, forkHeightsMap, forkNameHeightMap
}
// BlockRange returns the block range of the hard fork
// Need ensure the forkHeights is incremental
func BlockRange(currentForkHeight uint64, forkHeights []uint64) (from, to uint64) {
to = math.MaxInt64
for _, height := range forkHeights {
if currentForkHeight < height {
to = height
return
}
from = height
}
return
}
// GetHardforkName returns the name of the hardfork active at the given block height and timestamp.
// It checks the chain configuration to determine which hardfork is active.
func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uint64) string {
@@ -16,10 +87,8 @@ func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uin
return "bernoulli"
} else if !config.IsDarwin(blockTimestamp) {
return "curie"
} else if !config.IsDarwinV2(blockTimestamp) {
return "darwin"
} else {
return "darwinV2"
return "darwin"
}
}
@@ -32,10 +101,8 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin
return encoding.CodecV1
} else if !config.IsDarwin(blockTimestamp) {
return encoding.CodecV2
} else if !config.IsDarwinV2(blockTimestamp) {
return encoding.CodecV3
} else {
return encoding.CodecV4
return encoding.CodecV3
}
}
@@ -48,8 +115,6 @@ func GetMaxChunksPerBatch(config *params.ChainConfig, blockHeight, blockTimestam
return 15
} else if !config.IsDarwin(blockTimestamp) {
return 45
} else if !config.IsDarwinV2(blockTimestamp) {
return 45
} else {
return 45
}

102
common/forks/forks_test.go Normal file
View File

@@ -0,0 +1,102 @@
package forks
import (
"math"
"math/big"
"testing"
"github.com/scroll-tech/go-ethereum/params"
"github.com/stretchr/testify/require"
)
func TestCollectSortedForkBlocks(t *testing.T) {
l, m, n := CollectSortedForkHeights(&params.ChainConfig{
ArchimedesBlock: big.NewInt(0),
ShanghaiBlock: big.NewInt(3),
BernoulliBlock: big.NewInt(3),
CurieBlock: big.NewInt(4),
})
require.Equal(t, l, []uint64{
0,
3,
4,
})
require.Equal(t, map[uint64]bool{
3: true,
4: true,
0: true,
}, m)
require.Equal(t, map[string]uint64{
"archimedes": 0,
"bernoulli": 3,
"curie": 4,
}, n)
}
func TestBlockRange(t *testing.T) {
tests := []struct {
name string
forkHeight uint64
forkHeights []uint64
expectedFrom uint64
expectedTo uint64
}{
{
name: "ToInfinite",
forkHeight: 300,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 300,
expectedTo: math.MaxInt64,
},
{
name: "To300",
forkHeight: 200,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 200,
expectedTo: 300,
},
{
name: "To200",
forkHeight: 100,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 100,
expectedTo: 200,
},
{
name: "To100",
forkHeight: 0,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 0,
expectedTo: 100,
},
{
name: "To200-1",
forkHeight: 100,
forkHeights: []uint64{100, 200},
expectedFrom: 100,
expectedTo: 200,
},
{
name: "To2",
forkHeight: 1,
forkHeights: []uint64{1, 2},
expectedFrom: 1,
expectedTo: 2,
},
{
name: "ToInfinite-1",
forkHeight: 0,
forkHeights: []uint64{0},
expectedFrom: 0,
expectedTo: math.MaxInt64,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
from, to := BlockRange(test.forkHeight, test.forkHeights)
require.Equal(t, test.expectedFrom, from)
require.Equal(t, test.expectedTo, to)
})
}
}

View File

@@ -13,7 +13,7 @@ require (
github.com/modern-go/reflect2 v1.0.2
github.com/orcaman/concurrent-map v1.0.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/stretchr/testify v1.9.0
github.com/testcontainers/testcontainers-go v0.30.0

View File

@@ -633,8 +633,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 h1:QKgfS8G0btzg7nmFjSjllaxGkns3yg7g2/tG1nWExEI=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,8 +6,6 @@ export RUST_BACKTRACE=full
export RUST_LOG=debug
export RUST_MIN_STACK=100000000
export PROVER_OUTPUT_DIR=test_zkp_test
export SCROLL_PROVER_ASSETS_DIR=/assets/test_assets
export DARWIN_V2_TEST_DIR=/assets
#export LD_LIBRARY_PATH=/:/usr/local/cuda/lib64
mkdir -p $PROVER_OUTPUT_DIR
@@ -15,16 +13,32 @@ mkdir -p $PROVER_OUTPUT_DIR
REPO=$(realpath ../..)
function build_test_bins() {
cd impl
cargo build --release
ln -f -s $(realpath target/release/libzkp.so) $REPO/prover/core/lib
ln -f -s $(realpath target/release/libzkp.so) $REPO/coordinator/internal/logic/verifier/lib
cd $REPO/prover
make tests_binary
go test -tags="gpu ffi" -timeout 0 -c core/prover_test.go
cd $REPO/coordinator
make libzkp
go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier
cd $REPO/common/libzkp
}
function build_test_bins_old() {
cd $REPO
cd prover
make libzkp
go test -tags="gpu ffi" -timeout 0 -c core/prover_test.go
cd ..
cd coordinator
make libzkp
go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier
cd ..
cd common/libzkp
}
build_test_bins
rm -rf $PROVER_OUTPUT_DIR/*
#rm -rf test_zkp_test/*
#rm -rf prover.log verifier.log
$REPO/prover/prover.test --exact zk_circuits_handler::darwin_v2::tests::test_circuits 2>&1 | tee prover.log
#$REPO/prover/core.test -test.v 2>&1 | tee prover.log
$REPO/coordinator/verifier.test -test.v 2>&1 | tee verifier.log

View File

@@ -28,10 +28,44 @@ dependencies = [
"cpufeatures",
]
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor",
"encoder",
"env_logger 0.10.0",
"eth-types 0.11.0",
"ethers-core",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.11.0",
]
[[package]]
name = "aggregator"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -62,40 +96,6 @@ dependencies = [
"zkevm-circuits 0.12.0",
]
[[package]]
name = "aggregator"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor",
"encoder",
"env_logger 0.10.0",
"eth-types 0.13.0",
"ethers-core",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.13.0",
]
[[package]]
name = "ahash"
version = "0.8.3"
@@ -171,9 +171,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.86"
version = "1.0.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
[[package]]
name = "arc-swap"
@@ -568,10 +568,37 @@ version = "3.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"ethers-core",
"ethers-providers",
"ethers-signers",
"external-tracer 0.11.0",
"gadgets 0.11.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"poseidon-circuit",
"rand",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.12.0",
"ethers-core",
@@ -593,31 +620,6 @@ dependencies = [
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"ethers-core",
"ethers-providers",
"ethers-signers",
"gadgets 0.13.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"poseidon-circuit",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "byte-slice-cast"
version = "1.2.2"
@@ -1182,8 +1184,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"base64 0.13.1",
"ethers-core",
@@ -1200,6 +1202,7 @@ dependencies = [
"revm-primitives",
"serde",
"serde_json",
"serde_stacker",
"serde_with",
"sha3 0.10.8",
"strum 0.25.0",
@@ -1210,8 +1213,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"base64 0.13.1",
"ethers-core",
@@ -1366,11 +1369,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"geth-utils 0.12.0",
"eth-types 0.11.0",
"geth-utils 0.11.0",
"log",
"serde",
"serde_json",
@@ -1379,11 +1382,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.13.0",
"geth-utils 0.13.0",
"eth-types 0.12.0",
"geth-utils 0.12.0",
"log",
"serde",
"serde_json",
@@ -1561,10 +1564,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types 0.11.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1573,10 +1576,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.13.0",
"eth-types 0.12.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1596,8 +1599,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"env_logger 0.10.0",
"gobuild",
@@ -1606,8 +1609,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"env_logger 0.10.0",
"gobuild",
@@ -2353,10 +2356,25 @@ dependencies = [
"subtle",
]
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"ethers-core",
"ethers-signers",
"external-tracer 0.11.0",
"itertools 0.11.0",
"log",
"rand",
"rand_chacha",
]
[[package]]
name = "mock"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.12.0",
"ethers-core",
@@ -2369,24 +2387,23 @@ dependencies = [
]
[[package]]
name = "mock"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
name = "mpt-zktrie"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.13.0",
"ethers-core",
"ethers-signers",
"external-tracer 0.13.0",
"itertools 0.11.0",
"eth-types 0.11.0",
"halo2curves",
"hex",
"log",
"rand",
"rand_chacha",
"num-bigint",
"poseidon-base",
"zktrie",
]
[[package]]
name = "mpt-zktrie"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.12.0",
"halo2curves",
@@ -2394,21 +2411,7 @@ dependencies = [
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "mpt-zktrie"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie",
]
[[package]]
@@ -2869,10 +2872,44 @@ dependencies = [
"unarray",
]
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"aggregator 0.11.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.11.0",
"chrono",
"dotenvy",
"eth-types 0.11.0",
"ethers-core",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.11.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.11.0",
]
[[package]]
name = "prover"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"aggregator 0.12.0",
"anyhow",
@@ -2903,40 +2940,6 @@ dependencies = [
"zkevm-circuits 0.12.0",
]
[[package]]
name = "prover"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"aggregator 0.13.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.13.0",
"chrono",
"dotenvy",
"eth-types 0.13.0",
"ethers-core",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.13.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.13.0",
]
[[package]]
name = "psm"
version = "0.1.21"
@@ -4540,10 +4543,52 @@ dependencies = [
"syn 2.0.27",
]
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"array-init",
"bus-mapping 0.11.0",
"either",
"env_logger 0.10.0",
"eth-types 0.11.0",
"ethers-core",
"ethers-signers",
"ff",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"array-init",
"bus-mapping 0.12.0",
@@ -4582,61 +4627,18 @@ dependencies = [
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"array-init",
"bus-mapping 0.13.0",
"either",
"env_logger 0.10.0",
"eth-types 0.13.0",
"ethers-core",
"ethers-signers",
"ff",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zkp"
version = "0.1.0"
dependencies = [
"anyhow",
"base64 0.13.1",
"env_logger 0.9.3",
"halo2_proofs",
"libc",
"log",
"once_cell",
"prover 0.11.0",
"prover 0.12.0",
"prover 0.13.0",
"serde",
"serde_derive",
"serde_json",
@@ -4649,16 +4651,7 @@ version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie_rust",
]
[[package]]
@@ -4675,20 +4668,6 @@ dependencies = [
"strum_macros 0.24.3",
]
[[package]]
name = "zktrie_rust"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"hex",
"lazy_static",
"num",
"num-derive",
"num-traits",
"strum 0.24.1",
"strum_macros 0.24.3",
]
[[package]]
name = "zstd"
version = "0.13.0"

View File

@@ -13,6 +13,8 @@ halo2curves = { git = "https://github.com/scroll-tech/halo2curves", branch = "v0
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
#ethers-etherscan = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
#ethers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
[patch."https://github.com/privacy-scaling-explorations/halo2.git"]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
@@ -24,10 +26,10 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
# curie
prover_v3 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin
prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin_v2
prover_v5 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.0-rc.3", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.0"
env_logger = "0.9.0"
@@ -37,7 +39,6 @@ once_cell = "1.19"
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0.66"
anyhow = "1.0.86"
[profile.test]
opt-level = 3

View File

@@ -0,0 +1,262 @@
use crate::{
types::{CheckChunkProofsResponse, ProofResult},
utils::{
c_char_to_str, c_char_to_vec, file_exists, panic_catch, string_to_c_char, vec_to_c_char,
OUTPUT_DIR,
},
};
use libc::c_char;
use prover_v3::BatchProof as BatchProofV3;
use prover_v4::{
aggregator::{Prover, Verifier as VerifierV4},
check_chunk_hashes,
consts::BATCH_VK_FILENAME,
utils::{chunk_trace_to_witness_block, init_env_and_log},
BatchHeader, BatchProof as BatchProofV4, BatchProvingTask, BlockTrace, BundleProof,
BundleProvingTask, ChunkInfo, ChunkProof, MAX_AGG_SNARKS,
};
use snark_verifier_sdk::verify_evm_calldata;
use std::{cell::OnceCell, env, ptr::null};
static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER_V4: OnceCell<VerifierV4> = OnceCell::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_batch_prover(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_batch_prove");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &BATCH_VK_FILENAME) {
panic!("{} must exist in folder {}", *BATCH_VK_FILENAME, assets_dir);
}
let prover = Prover::from_dirs(params_dir, assets_dir);
PROVER.set(prover).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_batch_verifier(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_batch_verify");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier_v4 = VerifierV4::from_dirs(params_dir, assets_dir);
VERIFIER_V4.set(verifier_v4).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_batch_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_batch_vk());
vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_bundle_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_bundle_vk());
vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn check_chunk_proofs(chunk_proofs: *const c_char) -> *const c_char {
let check_result: Result<bool, String> = panic_catch(|| {
let chunk_proofs = c_char_to_vec(chunk_proofs);
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs)
.map_err(|e| format!("failed to deserialize chunk proofs: {e:?}"))?;
if chunk_proofs.is_empty() {
return Err("provided chunk proofs are empty.".to_string());
}
let prover_ref = PROVER.get().expect("failed to get reference to PROVER.");
let valid = prover_ref.check_protocol_of_chunks(&chunk_proofs);
Ok(valid)
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match check_result {
Ok(valid) => CheckChunkProofsResponse {
ok: valid,
error: None,
},
Err(err) => CheckChunkProofsResponse {
ok: false,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_batch_proof(
chunk_hashes: *const c_char,
chunk_proofs: *const c_char,
batch_header: *const c_char,
) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let chunk_hashes = c_char_to_vec(chunk_hashes);
let chunk_proofs = c_char_to_vec(chunk_proofs);
let batch_header = c_char_to_vec(batch_header);
let chunk_hashes = serde_json::from_slice::<Vec<ChunkInfo>>(&chunk_hashes)
.map_err(|e| format!("failed to deserialize chunk hashes: {e:?}"))?;
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs)
.map_err(|e| format!("failed to deserialize chunk proofs: {e:?}"))?;
let batch_header = serde_json::from_slice::<BatchHeader<MAX_AGG_SNARKS>>(&batch_header)
.map_err(|e| format!("failed to deserialize batch header: {e:?}"))?;
if chunk_hashes.len() != chunk_proofs.len() {
return Err(format!("chunk hashes and chunk proofs lengths mismatch: chunk_hashes.len() = {}, chunk_proofs.len() = {}",
chunk_hashes.len(), chunk_proofs.len()));
}
let chunk_hashes_proofs: Vec<(_,_)> = chunk_hashes
.into_iter()
.zip(chunk_proofs.clone())
.collect();
check_chunk_hashes("", &chunk_hashes_proofs).map_err(|e| format!("failed to check chunk info: {e:?}"))?;
let batch = BatchProvingTask {
chunk_proofs,
batch_header,
};
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_batch_proof(batch, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate proof: {e:?}"))?;
serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
// As of upgrade #3 (Curie), we verify batch proofs on-chain (EVM).
let proof = serde_json::from_slice::<BatchProofV3>(proof.as_slice()).unwrap();
verify_evm_calldata(
include_bytes!("plonk_verifier_0.11.4.bin").to_vec(),
proof.calldata(),
)
} else {
// Post upgrade #4 (Darwin), batch proofs are not EVM-verifiable. Instead they are
// halo2 proofs meant to be bundled recursively.
let proof = serde_json::from_slice::<BatchProofV4>(proof.as_slice()).unwrap();
VERIFIER_V4.get().unwrap().verify_batch_proof(&proof)
}
});
verified.unwrap_or(false) as c_char
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_bundle_proof(batch_proofs: *const c_char) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let batch_proofs = c_char_to_vec(batch_proofs);
let batch_proofs = serde_json::from_slice::<Vec<BatchProofV4>>(&batch_proofs)
.map_err(|e| format!("failed to deserialize batch proofs: {e:?}"))?;
let bundle = BundleProvingTask { batch_proofs };
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_bundle_proof(bundle, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate bundle proof: {e:?}"))?;
serde_json::to_vec(&proof)
.map_err(|e| format!("failed to serialize the bundle proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(proof: *const c_char) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
let verified = panic_catch(|| VERIFIER_V4.get().unwrap().verify_bundle_proof(proof));
verified.unwrap_or(false) as c_char
}
// This function is only used for debugging on Go side.
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn block_traces_to_chunk_info(block_traces: *const c_char) -> *const c_char {
let block_traces = c_char_to_vec(block_traces);
let block_traces = serde_json::from_slice::<Vec<BlockTrace>>(&block_traces).unwrap();
let witness_block = chunk_trace_to_witness_block(block_traces).unwrap();
let chunk_info = ChunkInfo::from_witness_block(&witness_block, false);
let chunk_info_bytes = serde_json::to_vec(&chunk_info).unwrap();
vec_to_c_char(chunk_info_bytes)
}

View File

@@ -0,0 +1,131 @@
use crate::{
types::ProofResult,
utils::{
c_char_to_str, c_char_to_vec, file_exists, panic_catch, string_to_c_char, vec_to_c_char,
OUTPUT_DIR,
},
};
use libc::c_char;
use prover_v3::{zkevm::Verifier as VerifierV3, ChunkProof as ChunkProofV3};
use prover_v4::{
consts::CHUNK_VK_FILENAME,
utils::init_env_and_log,
zkevm::{Prover, Verifier as VerifierV4},
BlockTrace, ChunkProof as ChunkProofV4, ChunkProvingTask,
};
use std::{cell::OnceCell, env, ptr::null};
static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER_V3: OnceCell<VerifierV3> = OnceCell::new();
static mut VERIFIER_V4: OnceCell<VerifierV4> = OnceCell::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_chunk_prover(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_chunk_prove");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &CHUNK_VK_FILENAME) {
panic!("{} must exist in folder {}", *CHUNK_VK_FILENAME, assets_dir);
}
let prover = Prover::from_dirs(params_dir, assets_dir);
PROVER.set(prover).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_chunk_verifier(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_chunk_verify");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier_v3 = VerifierV3::from_dirs(params_dir, assets_dir);
let verifier_v4 = VerifierV4::from_dirs(params_dir, assets_dir);
VERIFIER_V3.set(verifier_v3).unwrap();
VERIFIER_V4.set(verifier_v4).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_chunk_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_vk());
vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_chunk_proof(block_traces: *const c_char) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let block_traces = c_char_to_vec(block_traces);
let block_traces = serde_json::from_slice::<Vec<BlockTrace>>(&block_traces)
.map_err(|e| format!("failed to deserialize block traces: {e:?}"))?;
let chunk = ChunkProvingTask::from(block_traces);
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_chunk_proof(chunk, None, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate proof: {e:?}"))?;
serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
let proof = serde_json::from_slice::<ChunkProofV3>(proof.as_slice()).unwrap();
VERIFIER_V3.get().unwrap().verify_chunk_proof(proof)
} else {
let proof = serde_json::from_slice::<ChunkProofV4>(proof.as_slice()).unwrap();
VERIFIER_V4.get().unwrap().verify_chunk_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}

View File

@@ -1,63 +1,4 @@
mod batch;
mod chunk;
mod types;
mod utils;
mod verifier;
use crate::utils::{c_char_to_str, c_char_to_vec};
use libc::c_char;
use prover_v5::utils::init_env_and_log;
use verifier::{TaskType, VerifierConfig};
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init(config: *const c_char) {
init_env_and_log("ffi_init");
let config_str = c_char_to_str(config);
let verifier_config = serde_json::from_str::<VerifierConfig>(config_str).unwrap();
verifier::init(verifier_config);
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Chunk)
}
fn verify_proof(proof: *const c_char, fork_name: *const c_char, task_type: TaskType) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let verifier = verifier::get_verifier(fork_name_str);
if let Err(e) = verifier {
log::warn!("failed to get verifier, error: {:#}", e);
return 0 as c_char;
}
match verifier.unwrap().verify(task_type, proof) {
Err(e) => {
log::error!("{:?} verify failed, error: {:#}", task_type, e);
false as c_char
}
Ok(result) => result as c_char,
}
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Batch)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Bundle)
}

View File

@@ -0,0 +1,22 @@
use serde::{Deserialize, Serialize};
// Represents the result of a chunk proof checking operation.
// `ok` indicates whether the proof checking was successful.
// `error` provides additional details in case the check failed.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CheckChunkProofsResponse {
pub ok: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}
// Encapsulates the result from generating a proof.
// `message` holds the generated proof in byte slice format.
// `error` provides additional details in case the proof generation failed.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ProofResult {
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<Vec<u8>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}

View File

@@ -1,9 +1,29 @@
use once_cell::sync::Lazy;
use std::{
ffi::CStr,
env,
ffi::{CStr, CString},
os::raw::c_char,
panic::{catch_unwind, AssertUnwindSafe},
path::PathBuf,
};
// Only used for debugging.
pub(crate) static OUTPUT_DIR: Lazy<Option<String>> =
Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
/// # Safety
#[no_mangle]
pub extern "C" fn free_c_chars(ptr: *mut c_char) {
if ptr.is_null() {
log::warn!("Try to free an empty pointer!");
return;
}
unsafe {
let _ = CString::from_raw(ptr);
}
}
pub(crate) fn c_char_to_str(c: *const c_char) -> &'static str {
let cstr = unsafe { CStr::from_ptr(c) };
cstr.to_str().unwrap()
@@ -14,6 +34,21 @@ pub(crate) fn c_char_to_vec(c: *const c_char) -> Vec<u8> {
cstr.to_bytes().to_vec()
}
pub(crate) fn string_to_c_char(string: String) -> *const c_char {
CString::new(string).unwrap().into_raw()
}
pub(crate) fn vec_to_c_char(bytes: Vec<u8>) -> *const c_char {
CString::new(bytes).unwrap().into_raw()
}
pub(crate) fn file_exists(dir: &str, filename: &str) -> bool {
let mut path = PathBuf::from(dir);
path.push(filename);
path.exists()
}
pub(crate) fn panic_catch<F: FnOnce() -> R, R>(f: F) -> Result<R, String> {
catch_unwind(AssertUnwindSafe(f)).map_err(|err| {
if let Some(s) = err.downcast_ref::<String>() {

View File

@@ -1,110 +0,0 @@
mod darwin;
mod darwin_v2;
use anyhow::{bail, Result};
use darwin::DarwinVerifier;
use darwin_v2::DarwinV2Verifier;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use prover_v4::utils::load_params;
use serde::{Deserialize, Serialize};
use std::{cell::OnceCell, collections::BTreeMap, rc::Rc};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TaskType {
Chunk,
Batch,
Bundle,
}
pub trait ProofVerifier {
fn verify(&self, task_type: TaskType, proof: Vec<u8>) -> Result<bool>;
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CircuitConfig {
pub fork_name: String,
pub params_path: String,
pub assets_path: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VerifierConfig {
pub low_version_circuit: CircuitConfig,
pub high_version_circuit: CircuitConfig,
}
type HardForkName = String;
struct VerifierPair(HardForkName, Rc<Box<dyn ProofVerifier>>);
static mut VERIFIER_HIGH: OnceCell<VerifierPair> = OnceCell::new();
static mut VERIFIER_LOW: OnceCell<VerifierPair> = OnceCell::new();
static mut PARAMS_MAP: OnceCell<BTreeMap<u32, ParamsKZG<Bn256>>> = OnceCell::new();
pub fn init(config: VerifierConfig) {
let low_conf = config.low_version_circuit;
std::env::set_var("SCROLL_PROVER_ASSETS_DIR", &low_conf.assets_path);
let params_degrees = [
*prover_v4::config::LAYER2_DEGREE,
*prover_v4::config::LAYER4_DEGREE,
];
// params should be shared between low and high
let mut params_map = BTreeMap::new();
for degree in params_degrees {
if let std::collections::btree_map::Entry::Vacant(e) = params_map.entry(degree) {
match load_params(&low_conf.params_path, degree, None) {
Ok(params) => {
e.insert(params);
}
Err(e) => panic!(
"failed to load params, degree {}, dir {}, err {}",
degree, low_conf.params_path, e
),
}
}
}
unsafe {
PARAMS_MAP.set(params_map).unwrap_unchecked();
}
let verifier = DarwinVerifier::new(unsafe { PARAMS_MAP.get().unwrap() }, &low_conf.assets_path);
unsafe {
VERIFIER_LOW
.set(VerifierPair(
low_conf.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
let high_conf = config.high_version_circuit;
let verifier =
DarwinV2Verifier::new(unsafe { PARAMS_MAP.get().unwrap() }, &high_conf.assets_path);
unsafe {
VERIFIER_HIGH
.set(VerifierPair(
high_conf.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
}
pub fn get_verifier(fork_name: &str) -> Result<Rc<Box<dyn ProofVerifier>>> {
unsafe {
if let Some(verifier) = VERIFIER_LOW.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
}
if let Some(verifier) = VERIFIER_HIGH.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
}
}
bail!("failed to get verifier, key not found, {}", fork_name)
}

View File

@@ -1,48 +0,0 @@
use super::{ProofVerifier, TaskType};
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use crate::utils::panic_catch;
use prover_v4::{
aggregator::Verifier as AggVerifier, zkevm::Verifier, BatchProof, BundleProof, ChunkProof,
};
use std::{collections::BTreeMap, env};
pub struct DarwinVerifier<'params> {
verifier: Verifier<'params>,
agg_verifier: AggVerifier<'params>,
}
impl<'params> DarwinVerifier<'params> {
pub fn new(params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>, assets_dir: &str) -> Self {
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_params_and_assets(params_map, assets_dir);
let agg_verifier = AggVerifier::from_params_and_assets(params_map, assets_dir);
Self {
verifier,
agg_verifier,
}
}
}
impl<'params> ProofVerifier for DarwinVerifier<'params> {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
let result = panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.verifier.verify_chunk_proof(proof)
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_batch_proof(&proof)
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_bundle_proof(proof)
}
});
result.map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -1,48 +0,0 @@
use super::{ProofVerifier, TaskType};
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use crate::utils::panic_catch;
use prover_v5::{
aggregator::Verifier as AggVerifier, zkevm::Verifier, BatchProof, BundleProof, ChunkProof,
};
use std::{collections::BTreeMap, env};
pub struct DarwinV2Verifier<'params> {
verifier: Verifier<'params>,
agg_verifier: AggVerifier<'params>,
}
impl<'params> DarwinV2Verifier<'params> {
pub fn new(params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>, assets_dir: &str) -> Self {
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_params_and_assets(params_map, assets_dir);
let agg_verifier = AggVerifier::from_params_and_assets(params_map, assets_dir);
Self {
verifier,
agg_verifier,
}
}
}
impl<'params> ProofVerifier for DarwinV2Verifier<'params> {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
let result = panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.verifier.verify_chunk_proof(proof)
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_batch_proof(&proof)
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_bundle_proof(proof)
}
});
result.map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -1,10 +1,26 @@
// BatchProver is used to:
// - Batch a list of chunk proofs
// - Bundle a list of batch proofs
void init_batch_prover(char* params_dir, char* assets_dir);
// BatchVerifier is used to:
// - Verify a batch proof
// - Verify a bundle proof
void init(char* config);
void init_batch_verifier(char* params_dir, char* assets_dir);
char* get_batch_vk();
char* check_chunk_proofs(char* chunk_proofs);
char* gen_batch_proof(char* chunk_hashes, char* chunk_proofs, char* batch_header);
char verify_batch_proof(char* proof, char* fork_name);
char verify_bundle_proof(char* proof, char* fork_name);
char* get_bundle_vk();
char* gen_bundle_proof(char* batch_proofs);
char verify_bundle_proof(char* proof);
void init_chunk_prover(char* params_dir, char* assets_dir);
void init_chunk_verifier(char* params_dir, char* assets_dir);
char* get_chunk_vk();
char* gen_chunk_proof(char* block_traces);
char verify_chunk_proof(char* proof, char* fork_name);
char* block_traces_to_chunk_info(char* block_traces);
void free_c_chars(char* ptr);

View File

@@ -2,7 +2,6 @@ package testcontainers
import (
"context"
"errors"
"fmt"
"log"
"os"
@@ -21,10 +20,9 @@ import (
// TestcontainerApps testcontainers struct
type TestcontainerApps struct {
postgresContainer *postgres.PostgresContainer
l2GethContainer *testcontainers.DockerContainer
poSL1Container compose.ComposeStack
web3SignerContainer *testcontainers.DockerContainer
postgresContainer *postgres.PostgresContainer
l2GethContainer *testcontainers.DockerContainer
poSL1Container compose.ComposeStack
// common time stamp in nanoseconds.
Timestamp int
@@ -113,51 +111,10 @@ func (t *TestcontainerApps) StartPoSL1Container() error {
return nil
}
func (t *TestcontainerApps) StartWeb3SignerContainer(chainId int) error {
if t.web3SignerContainer != nil && t.web3SignerContainer.IsRunning() {
return nil
}
var (
err error
rootDir string
)
if rootDir, err = findProjectRootDir(); err != nil {
return fmt.Errorf("failed to find project root directory: %v", err)
}
// web3signerconf/keyconf.yaml may contain multiple keys configured and web3signer then choses one corresponding to from field of tx
web3SignerConfDir := filepath.Join(rootDir, "common", "testcontainers", "web3signerconf")
req := testcontainers.ContainerRequest{
Image: "consensys/web3signer:develop",
ExposedPorts: []string{"9000/tcp"},
Cmd: []string{"--key-config-path", "/web3signerconf/", "eth1", "--chain-id", fmt.Sprintf("%d", chainId)},
Files: []testcontainers.ContainerFile{
{
HostFilePath: web3SignerConfDir,
ContainerFilePath: "/",
FileMode: 0o777,
},
},
WaitingFor: wait.ForLog("ready to handle signing requests"),
}
genericContainerReq := testcontainers.GenericContainerRequest{
ContainerRequest: req,
Started: true,
}
container, err := testcontainers.GenericContainer(context.Background(), genericContainerReq)
if err != nil {
log.Printf("failed to start web3signer container: %s", err)
return err
}
t.web3SignerContainer, _ = container.(*testcontainers.DockerContainer)
return nil
}
// GetPoSL1EndPoint returns the endpoint of the running PoS L1 endpoint
func (t *TestcontainerApps) GetPoSL1EndPoint() (string, error) {
if t.poSL1Container == nil {
return "", errors.New("PoS L1 container is not running")
return "", fmt.Errorf("PoS L1 container is not running")
}
contrainer, err := t.poSL1Container.ServiceContainer(context.Background(), "geth")
if err != nil {
@@ -178,7 +135,7 @@ func (t *TestcontainerApps) GetPoSL1Client() (*ethclient.Client, error) {
// GetDBEndPoint returns the endpoint of the running postgres container
func (t *TestcontainerApps) GetDBEndPoint() (string, error) {
if t.postgresContainer == nil || !t.postgresContainer.IsRunning() {
return "", errors.New("postgres is not running")
return "", fmt.Errorf("postgres is not running")
}
return t.postgresContainer.ConnectionString(context.Background(), "sslmode=disable")
}
@@ -186,7 +143,7 @@ func (t *TestcontainerApps) GetDBEndPoint() (string, error) {
// GetL2GethEndPoint returns the endpoint of the running L2Geth container
func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) {
if t.l2GethContainer == nil || !t.l2GethContainer.IsRunning() {
return "", errors.New("l2 geth is not running")
return "", fmt.Errorf("l2 geth is not running")
}
endpoint, err := t.l2GethContainer.PortEndpoint(context.Background(), "8546/tcp", "ws")
if err != nil {
@@ -195,14 +152,6 @@ func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) {
return endpoint, nil
}
// GetL2GethEndPoint returns the endpoint of the running L2Geth container
func (t *TestcontainerApps) GetWeb3SignerEndpoint() (string, error) {
if t.web3SignerContainer == nil || !t.web3SignerContainer.IsRunning() {
return "", errors.New("web3signer is not running")
}
return t.web3SignerContainer.PortEndpoint(context.Background(), "9000/tcp", "http")
}
// GetGormDBClient returns a gorm.DB by connecting to the running postgres container
func (t *TestcontainerApps) GetGormDBClient() (*gorm.DB, error) {
endpoint, err := t.GetDBEndPoint()
@@ -251,11 +200,6 @@ func (t *TestcontainerApps) Free() {
t.poSL1Container = nil
}
}
if t.web3SignerContainer != nil && t.web3SignerContainer.IsRunning() {
if err := t.web3SignerContainer.Terminate(ctx); err != nil {
log.Printf("failed to stop web3signer container: %s", err)
}
}
}
// findProjectRootDir find project root directory
@@ -273,7 +217,7 @@ func findProjectRootDir() (string, error) {
parentDir := filepath.Dir(currentDir)
if parentDir == currentDir {
return "", errors.New("go.work file not found in any parent directory")
return "", fmt.Errorf("go.work file not found in any parent directory")
}
currentDir = parentDir

View File

@@ -44,11 +44,6 @@ func TestNewTestcontainerApps(t *testing.T) {
assert.NoError(t, err)
assert.NotNil(t, ethclient)
assert.NoError(t, testApps.StartWeb3SignerContainer(1))
endpoint, err = testApps.GetWeb3SignerEndpoint()
assert.NoError(t, err)
assert.NotEmpty(t, endpoint)
// test free testcontainers
testApps.Free()
endpoint, err = testApps.GetDBEndPoint()
@@ -62,8 +57,4 @@ func TestNewTestcontainerApps(t *testing.T) {
endpoint, err = testApps.GetPoSL1EndPoint()
assert.EqualError(t, err, "PoS L1 container is not running")
assert.Empty(t, endpoint)
endpoint, err = testApps.GetWeb3SignerEndpoint()
assert.EqualError(t, err, "web3signer is not running")
assert.Empty(t, endpoint)
}

View File

@@ -1,7 +0,0 @@
type: "file-raw"
keyType: "SECP256K1"
privateKey: "0x1313131313131313131313131313131313131313131313131313131313131313"
---
type: "file-raw"
keyType: "SECP256K1"
privateKey: "0x1212121212121212121212121212121212121212121212121212121212121212"

View File

@@ -109,10 +109,6 @@ const (
ProverTaskFailureTypeVerifiedFailed
// ProverTaskFailureTypeServerError collect occur error
ProverTaskFailureTypeServerError
// ProverTaskFailureTypeObjectAlreadyVerified object(batch/chunk) already verified, may exists in test env when ENABLE_TEST_ENV_BYPASS_FEATURES is true
ProverTaskFailureTypeObjectAlreadyVerified
// ProverTaskFailureTypeReassignedByAdmin reassigned by admin, this value is used in admin-system and defined here for clarity
ProverTaskFailureTypeReassignedByAdmin
)
func (r ProverTaskFailureType) String() string {
@@ -127,10 +123,6 @@ func (r ProverTaskFailureType) String() string {
return "prover task failure verified failed"
case ProverTaskFailureTypeServerError:
return "prover task failure server exception"
case ProverTaskFailureTypeObjectAlreadyVerified:
return "prover task failure object already verified"
case ProverTaskFailureTypeReassignedByAdmin:
return "prover task failure reassigned by admin"
default:
return fmt.Sprintf("illegal prover task failure type (%d)", int32(r))
}

View File

@@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/go-ethereum/common"
)
@@ -51,10 +52,9 @@ type ChunkTaskDetail struct {
// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
}
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.

View File

@@ -2,7 +2,6 @@ package utils
import (
"crypto/ecdsa"
"errors"
"fmt"
"os"
"path/filepath"
@@ -29,7 +28,7 @@ func LoadOrCreateKey(keystorePath string, keystorePassword string) (*ecdsa.Priva
} else if err != nil {
return nil, err
} else if fi.IsDir() {
return nil, errors.New("keystorePath cannot be a dir")
return nil, fmt.Errorf("keystorePath cannot be a dir")
}
keyjson, err := os.ReadFile(filepath.Clean(keystorePath))

View File

@@ -9,14 +9,10 @@ import (
"math/big"
"os"
"path/filepath"
"reflect"
"strconv"
"strings"
"time"
"github.com/modern-go/reflect2"
"github.com/scroll-tech/go-ethereum/core"
"github.com/scroll-tech/go-ethereum/log"
)
// TryTimes try run several times until the function return true.
@@ -82,89 +78,3 @@ func ReadGenesis(genesisPath string) (*core.Genesis, error) {
}
return genesis, file.Close()
}
// OverrideConfigWithEnv recursively overrides config values with environment variables
func OverrideConfigWithEnv(cfg interface{}, prefix string) error {
v := reflect.ValueOf(cfg)
if v.Kind() != reflect.Ptr || v.IsNil() {
return nil
}
v = v.Elem()
t := v.Type()
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
fieldValue := v.Field(i)
if !fieldValue.CanSet() {
continue
}
tag := field.Tag.Get("json")
if tag == "" {
tag = strings.ToLower(field.Name)
}
envKey := prefix + "_" + strings.ToUpper(tag)
switch fieldValue.Kind() {
case reflect.Ptr:
if !fieldValue.IsNil() {
err := OverrideConfigWithEnv(fieldValue.Interface(), envKey)
if err != nil {
return err
}
}
case reflect.Struct:
err := OverrideConfigWithEnv(fieldValue.Addr().Interface(), envKey)
if err != nil {
return err
}
default:
if envValue, exists := os.LookupEnv(envKey); exists {
log.Info("Overriding config with env var", "key", envKey)
err := setField(fieldValue, envValue)
if err != nil {
return err
}
}
}
}
return nil
}
// setField sets the value of a field based on the environment variable value
func setField(field reflect.Value, value string) error {
switch field.Kind() {
case reflect.String:
field.SetString(value)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
intValue, err := strconv.ParseInt(value, 10, 64)
if err != nil {
return err
}
field.SetInt(intValue)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
uintValue, err := strconv.ParseUint(value, 10, 64)
if err != nil {
return err
}
field.SetUint(uintValue)
case reflect.Float32, reflect.Float64:
floatValue, err := strconv.ParseFloat(value, 64)
if err != nil {
return err
}
field.SetFloat(floatValue)
case reflect.Bool:
boolValue, err := strconv.ParseBool(value)
if err != nil {
return err
}
field.SetBool(boolValue)
default:
return fmt.Errorf("unsupported type: %v", field.Kind())
}
return nil
}

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.4.66"
var tag = "v4.4.33"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -88,25 +88,13 @@ func (c *CoordinatorApp) MockConfig(store bool) error {
}
// Reset prover manager config for manager test cases.
cfg.ProverManager = &coordinatorConfig.ProverManager{
ProversPerSession: 1,
Verifier: &coordinatorConfig.VerifierConfig{
MockMode: true,
LowVersionCircuit: &coordinatorConfig.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "darwin",
MinProverVersion: "v4.2.0",
},
HighVersionCircuit: &coordinatorConfig.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "darwinV2",
MinProverVersion: "v4.3.0",
},
},
ProversPerSession: 1,
Verifier: &coordinatorConfig.VerifierConfig{MockMode: true},
BatchCollectionTimeSec: 60,
ChunkCollectionTimeSec: 60,
SessionAttempts: 10,
MaxVerifierWorkers: 4,
MinProverVersion: "v1.0.0",
}
endpoint, err := c.testApps.GetDBEndPoint()
if err != nil {

View File

@@ -25,7 +25,7 @@ func init() {
app = cli.NewApp()
app.Action = action
app.Name = "coordinator-tool"
app.Usage = "The Scroll L2 Coordinator Tool"
app.Usage = "The Scroll L2 Coordinator"
app.Version = version.Version
app.Flags = append(app.Flags, utils.CommonFlags...)
app.Before = func(ctx *cli.Context) error {
@@ -66,7 +66,7 @@ func action(ctx *cli.Context) error {
for _, batch := range batches {
var proof message.BatchProof
if encodeErr := json.Unmarshal(batch.Proof, &proof); encodeErr != nil {
log.Error("failed to unmarshal batch proof")
log.Error("failed to unmarshal proof")
return fmt.Errorf("failed to unmarshal proof: %w, bundle hash: %v, batch hash: %v", encodeErr, taskID, batch.Hash)
}
batchProofs = append(batchProofs, &proof)
@@ -78,8 +78,8 @@ func action(ctx *cli.Context) error {
batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil {
log.Error("failed to marshal batch proof")
return fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", taskID, err)
log.Error("failed to marshal proof")
return fmt.Errorf("failed to marshal chunk proofs, taskID:%s err:%w", taskID, err)
}
taskMsg := &coordinatorType.GetTaskSchema{

View File

@@ -6,20 +6,13 @@
"batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180,
"verifier": {
"fork_name": "bernoulli",
"mock_mode": true,
"low_version_circuit": {
"params_path": "params",
"assets_path": "assets",
"fork_name": "darwin",
"min_prover_version": "v4.4.43"
},
"high_version_circuit": {
"params_path": "params",
"assets_path": "assets",
"fork_name": "darwinV2",
"min_prover_version": "v4.4.45"
}
}
"params_path": "",
"assets_path": ""
},
"max_verifier_workers": 4,
"min_prover_version": "v1.0.0"
},
"db": {
"driver_name": "postgres",

View File

@@ -6,10 +6,7 @@ require (
github.com/appleboy/gin-jwt/v2 v2.9.1
github.com/gin-gonic/gin v1.9.1
github.com/go-resty/resty/v2 v2.7.0
github.com/google/uuid v1.6.0
github.com/mitchellh/mapstructure v1.5.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/shopspring/decimal v1.3.1
github.com/stretchr/testify v1.9.0
@@ -45,6 +42,12 @@ require (
google.golang.org/protobuf v1.33.0 // indirect
)
require (
github.com/google/uuid v1.6.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923
)
require (
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect

View File

@@ -173,8 +173,8 @@ github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjR
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068 h1:oVGwhg4cCq35B04eG/S4OBXDwXiFH7+LezuH2ZTRBPs=
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 h1:QKgfS8G0btzg7nmFjSjllaxGkns3yg7g2/tG1nWExEI=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,7 +6,6 @@ import (
"path/filepath"
"scroll-tech/common/database"
"scroll-tech/common/utils"
)
// ProverManager loads sequencer configuration items.
@@ -24,6 +23,10 @@ type ProverManager struct {
ChunkCollectionTimeSec int `json:"chunk_collection_time_sec"`
// BundleCollectionTimeSec bundle Proof collection time (in seconds).
BundleCollectionTimeSec int `json:"bundle_collection_time_sec"`
// Max number of workers in verifier worker pool
MaxVerifierWorkers int `json:"max_verifier_workers"`
// MinProverVersion is the minimum version of the prover that is required.
MinProverVersion string `json:"min_prover_version"`
}
// L2 loads l2geth configuration items.
@@ -47,19 +50,12 @@ type Config struct {
Auth *Auth `json:"auth"`
}
// CircuitConfig circuit items.
type CircuitConfig struct {
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
ForkName string `json:"fork_name"`
MinProverVersion string `json:"min_prover_version"`
}
// VerifierConfig load zk verifier config.
type VerifierConfig struct {
MockMode bool `json:"mock_mode"`
LowVersionCircuit *CircuitConfig `json:"low_version_circuit"`
HighVersionCircuit *CircuitConfig `json:"high_version_circuit"`
ForkName string `json:"fork_name"`
MockMode bool `json:"mock_mode"`
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
}
// NewConfig returns a new instance of Config.
@@ -75,11 +71,5 @@ func NewConfig(file string) (*Config, error) {
return nil, err
}
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_COORDINATOR")
if err != nil {
return nil, err
}
return cfg, nil
}

View File

@@ -44,33 +44,21 @@ func (a *AuthController) Login(c *gin.Context) (interface{}, error) {
return "", fmt.Errorf("check the login parameter failure: %w", err)
}
hardForkNames, err := a.loginLogic.ProverHardForkName(&login)
if err != nil {
return "", fmt.Errorf("prover hard name failure:%w", err)
}
// check the challenge is used, if used, return failure
if err := a.loginLogic.InsertChallengeString(c, login.Message.Challenge); err != nil {
return "", fmt.Errorf("login insert challenge string failure:%w", err)
}
returnData := types.LoginParameterWithHardForkName{
HardForkName: hardForkNames,
LoginParameter: login,
}
return returnData, nil
return login, nil
}
// PayloadFunc returns jwt.MapClaims with {public key, prover name}.
func (a *AuthController) PayloadFunc(data interface{}) jwt.MapClaims {
v, ok := data.(types.LoginParameterWithHardForkName)
v, ok := data.(types.LoginParameter)
if !ok {
return jwt.MapClaims{}
}
return jwt.MapClaims{
types.HardForkName: v.HardForkName,
types.PublicKey: v.PublicKey,
types.ProverName: v.Message.ProverName,
types.ProverVersion: v.Message.ProverVersion,
@@ -91,10 +79,5 @@ func (a *AuthController) IdentityHandler(c *gin.Context) interface{} {
if proverVersion, ok := claims[types.ProverVersion]; ok {
c.Set(types.ProverVersion, proverVersion)
}
if hardForkName, ok := claims[types.HardForkName]; ok {
c.Set(types.HardForkName, hardForkName)
}
return nil
}

View File

@@ -26,7 +26,7 @@ func InitController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.D
panic("proof receiver new verifier failure")
}
log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap, "bundleVerifier", vf.BundleVkMap)
log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap)
Auth = NewAuthController(db, cfg, vf)
GetTask = NewGetTaskController(cfg, chainCfg, db, reg)

View File

@@ -1,7 +1,6 @@
package api
import (
"errors"
"fmt"
"math/rand"
@@ -50,15 +49,15 @@ func NewGetTaskController(cfg *config.Config, chainCfg *params.ChainConfig, db *
func (ptc *GetTaskController) incGetTaskAccessCounter(ctx *gin.Context) error {
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
if !publicKeyExist {
return errors.New("get public key from context failed")
return fmt.Errorf("get public key from context failed")
}
proverName, proverNameExist := ctx.Get(coordinatorType.ProverName)
if !proverNameExist {
return errors.New("get prover name from context failed")
return fmt.Errorf("get prover name from context failed")
}
proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion)
if !proverVersionExist {
return errors.New("get prover version from context failed")
return fmt.Errorf("get prover version from context failed")
}
ptc.getTaskAccessCounter.With(prometheus.Labels{
@@ -98,7 +97,7 @@ func (ptc *GetTaskController) GetTasks(ctx *gin.Context) {
}
if result == nil {
nerr := errors.New("get empty prover task")
nerr := fmt.Errorf("get empty prover task")
types.RenderFailure(ctx, types.ErrCoordinatorEmptyProofData, nerr)
return
}
@@ -108,6 +107,10 @@ func (ptc *GetTaskController) GetTasks(ctx *gin.Context) {
func (ptc *GetTaskController) proofType(para *coordinatorType.GetTaskParameter) message.ProofType {
var proofTypes []message.ProofType
if para.TaskType != 0 {
proofTypes = append(proofTypes, message.ProofType(para.TaskType))
}
for _, proofType := range para.TaskTypes {
proofTypes = append(proofTypes, message.ProofType(proofType))
}

View File

@@ -363,7 +363,7 @@ func (c *Collector) checkBundleAllBatchReady() {
for _, bundle := range bundles {
allReady, checkErr := c.batchOrm.CheckIfBundleBatchProofsAreReady(c.ctx, bundle.Hash)
if checkErr != nil {
log.Warn("checkBundleAllBatchReady CheckIfBundleBatchProofsAreReady failure", "error", checkErr, "hash", bundle.Hash)
log.Warn("checkBatchAllChunkReady CheckIfBatchChunkProofsAreReady failure", "error", checkErr, "hash", bundle.Hash)
continue
}
@@ -371,8 +371,8 @@ func (c *Collector) checkBundleAllBatchReady() {
continue
}
if updateErr := c.bundleOrm.UpdateBatchProofsStatusByBundleHash(c.ctx, bundle.Hash, types.BatchProofsStatusReady); updateErr != nil {
log.Warn("checkBundleAllBatchReady UpdateBatchProofsStatusByBundleHash failure", "error", checkErr, "hash", bundle.Hash)
if updateErr := c.bundleOrm.UpdateBatchProofsStatusByBatchHash(c.ctx, bundle.Hash, types.BatchProofsStatusReady); updateErr != nil {
log.Warn("checkBundleAllBatchReady UpdateBatchProofsStatusByBatchHash failure", "error", checkErr, "hash", bundle.Hash)
}
}

View File

@@ -3,7 +3,6 @@ package auth
import (
"errors"
"fmt"
"strings"
"github.com/gin-gonic/gin"
"github.com/scroll-tech/go-ethereum/log"
@@ -24,35 +23,31 @@ type LoginLogic struct {
chunkVks map[string]struct{}
batchVKs map[string]struct{}
bundleVks map[string]struct{}
proverVersionHardForkMap map[string][]string
}
// NewLoginLogic new a LoginLogic
func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic {
proverVersionHardForkMap := make(map[string][]string)
if version.CheckScrollRepoVersion(cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion) {
log.Error("config file error, low verifier min_prover_version should not more than high verifier min_prover_version",
"low verifier min_prover_version", cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion,
"high verifier min_prover_version", cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion)
panic("verifier config file error")
l := &LoginLogic{
cfg: cfg,
chunkVks: make(map[string]struct{}),
batchVKs: make(map[string]struct{}),
bundleVks: make(map[string]struct{}),
challengeOrm: orm.NewChallenge(db),
}
var highHardForks []string
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.HighVersionCircuit.ForkName)
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.LowVersionCircuit.ForkName)
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = highHardForks
proverVersionHardForkMap[cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion] = []string{cfg.ProverManager.Verifier.LowVersionCircuit.ForkName}
return &LoginLogic{
cfg: cfg,
chunkVks: vf.ChunkVKMap,
batchVKs: vf.BatchVKMap,
bundleVks: vf.BundleVkMap,
challengeOrm: orm.NewChallenge(db),
proverVersionHardForkMap: proverVersionHardForkMap,
for _, vk := range vf.ChunkVKMap {
l.chunkVks[vk] = struct{}{}
}
for _, vk := range vf.BatchVKMap {
l.batchVKs[vk] = struct{}{}
}
for _, vk := range vf.BundleVkMap {
l.bundleVks[vk] = struct{}{}
}
return l
}
// InsertChallengeString insert and check the challenge string is existed
@@ -61,16 +56,16 @@ func (l *LoginLogic) InsertChallengeString(ctx *gin.Context, challenge string) e
}
func (l *LoginLogic) Check(login *types.LoginParameter) error {
verify, err := login.Verify()
if err != nil || !verify {
log.Error("auth message verify failure", "prover_name", login.Message.ProverName,
"prover_version", login.Message.ProverVersion, "message", login.Message)
return errors.New("auth message verify failure")
if login.PublicKey != "" {
verify, err := login.Verify()
if err != nil || !verify {
return errors.New("auth message verify failure")
}
}
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion) {
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.MinProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, login.Message.ProverVersion)
l.cfg.ProverManager.MinProverVersion, login.Message.ProverVersion)
}
if len(login.Message.ProverTypes) > 0 {
@@ -89,37 +84,21 @@ func (l *LoginLogic) Check(login *types.LoginParameter) error {
vks[vk] = struct{}{}
}
default:
log.Error("invalid prover_type", "value", proverType, "prover name", login.Message.ProverName, "prover_version", login.Message.ProverVersion)
log.Error("invalid prover_type", "value", proverType)
}
}
for _, vk := range login.Message.VKs {
if _, ok := vks[vk]; !ok {
log.Error("vk inconsistency", "prover vk", vk, "prover name", login.Message.ProverName,
"prover_version", login.Message.ProverVersion, "message", login.Message)
log.Error("vk inconsistency", "prover vk", vk)
if !version.CheckScrollProverVersion(login.Message.ProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s",
version.Version, login.Message.ProverVersion)
}
// if the prover reports a same prover version
return errors.New("incompatible vk. please check your params files or config files")
return fmt.Errorf("incompatible vk. please check your params files or config files")
}
}
}
return nil
}
// ProverHardForkName retrieves hard fork name which prover belongs to
func (l *LoginLogic) ProverHardForkName(login *types.LoginParameter) (string, error) {
proverVersionSplits := strings.Split(login.Message.ProverVersion, "-")
if len(proverVersionSplits) == 0 {
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}
proverVersion := proverVersionSplits[0]
if hardForkNames, ok := l.proverVersionHardForkMap[proverVersion]; ok {
return strings.Join(hardForkNames, ","), nil
}
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}

View File

@@ -9,9 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/da-codec/encoding/codecv4"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
@@ -31,8 +29,9 @@ import (
type BatchProverTask struct {
BaseProverTask
batchTaskGetTaskTotal *prometheus.CounterVec
batchTaskGetTaskProver *prometheus.CounterVec
batchAttemptsExceedTotal prometheus.Counter
batchTaskGetTaskTotal *prometheus.CounterVec
batchTaskGetTaskProver *prometheus.CounterVec
}
// NewBatchProverTask new a batch collector
@@ -48,6 +47,10 @@ func NewBatchProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *go
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
batchAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_batch_attempts_exceed_total",
Help: "Total number of batch attempts exceed.",
}),
batchTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_batch_get_task_total",
Help: "Total number of batch get task.",
@@ -120,15 +123,6 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// bp.recoverActiveAttempts(ctx, batchTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", batchTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{
TaskID: batchTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
@@ -214,9 +208,17 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
chunkInfos = append(chunkInfos, &chunkInfo)
}
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs)
if err != nil {
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
taskDetail := message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}
if hardForkName == "darwin" {
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(batch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header, taskID:%s err:%w", task.TaskID, decodeErr)
}
taskDetail.BatchHeader = batchHeader
}
chunkProofsBytes, err := json.Marshal(taskDetail)
@@ -239,34 +241,3 @@ func (bp *BatchProverTask) recoverActiveAttempts(ctx *gin.Context, batchTask *or
log.Error("failed to recover batch active attempts", "hash", batchTask.Hash, "error", err)
}
}
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []*message.ChunkProof) (*message.BatchTaskDetail, error) {
taskDetail := &message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}
if encoding.CodecVersion(dbBatch.CodecVersion) != encoding.CodecV3 && encoding.CodecVersion(dbBatch.CodecVersion) != encoding.CodecV4 {
return taskDetail, nil
}
if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV3 {
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v3) for batch %d: %w", dbBatch.Index, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail.BlobBytes = dbBatch.BlobBytes
} else {
batchHeader, decodeErr := codecv4.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v4) for batch %d: %w", dbBatch.Index, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail.BlobBytes = dbBatch.BlobBytes
}
return taskDetail, nil
}

View File

@@ -27,8 +27,9 @@ import (
type BundleProverTask struct {
BaseProverTask
bundleTaskGetTaskTotal *prometheus.CounterVec
bundleTaskGetTaskProver *prometheus.CounterVec
bundleAttemptsExceedTotal prometheus.Counter
bundleTaskGetTaskTotal *prometheus.CounterVec
bundleTaskGetTaskProver *prometheus.CounterVec
}
// NewBundleProverTask new a bundle collector
@@ -45,6 +46,10 @@ func NewBundleProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *g
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
bundleAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_bundle_attempts_exceed_total",
Help: "Total number of bundle attempts exceed.",
}),
bundleTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_bundle_get_task_total",
Help: "Total number of bundle get task.",
@@ -117,15 +122,6 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
return nil, ErrCoordinatorInternalFailure
}
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// bp.recoverActiveAttempts(ctx, bundleTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", bundleTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{
TaskID: bundleTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
@@ -209,7 +205,7 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil {
return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)
return nil, fmt.Errorf("failed to marshal chunk proofs, taskID:%s err:%w", task.TaskID, err)
}
taskMsg := &coordinatorType.GetTaskSchema{

View File

@@ -27,8 +27,9 @@ import (
type ChunkProverTask struct {
BaseProverTask
chunkTaskGetTaskTotal *prometheus.CounterVec
chunkTaskGetTaskProver *prometheus.CounterVec
chunkAttemptsExceedTotal prometheus.Counter
chunkTaskGetTaskTotal *prometheus.CounterVec
chunkTaskGetTaskProver *prometheus.CounterVec
}
// NewChunkProverTask new a chunk prover task
@@ -43,6 +44,10 @@ func NewChunkProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *go
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
chunkAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_chunk_attempts_exceed_total",
Help: "Total number of chunk attempts exceed.",
}),
chunkTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_chunk_get_task_total",
Help: "Total number of chunk get task.",
@@ -65,7 +70,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
for i := 0; i < 5; i++ {
var getTaskError error
var tmpChunkTask *orm.Chunk
tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
if getTaskError != nil {
log.Error("failed to get assigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
@@ -74,7 +79,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
// chunk to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
if tmpChunkTask == nil {
tmpChunkTask, getTaskError = cp.chunkOrm.GetUnassignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
tmpChunkTask, getTaskError = cp.chunkOrm.GetUnassignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
if getTaskError != nil {
log.Error("failed to get unassigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
@@ -115,15 +120,6 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// cp.recoverActiveAttempts(ctx, chunkTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", chunkTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{
TaskID: chunkTask.Hash,
ProverPublicKey: taskCtx.PublicKey,

View File

@@ -1,9 +1,7 @@
package provertask
import (
"errors"
"fmt"
"strings"
"sync"
"github.com/gin-gonic/gin"
@@ -19,7 +17,9 @@ import (
var (
// ErrCoordinatorInternalFailure coordinator internal db failure
ErrCoordinatorInternalFailure = errors.New("coordinator internal error")
ErrCoordinatorInternalFailure = fmt.Errorf("coordinator internal error")
// ErrHardForkName indicates client request with the wrong hard fork name
ErrHardForkName = fmt.Errorf("wrong hard fork name")
)
var (
@@ -50,41 +50,30 @@ type proverTaskContext struct {
PublicKey string
ProverName string
ProverVersion string
HardForkNames map[string]struct{}
}
// checkParameter check the prover task parameter illegal
func (b *BaseProverTask) checkParameter(ctx *gin.Context) (*proverTaskContext, error) {
var ptc proverTaskContext
ptc.HardForkNames = make(map[string]struct{})
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
if !publicKeyExist {
return nil, errors.New("get public key from context failed")
return nil, fmt.Errorf("get public key from context failed")
}
ptc.PublicKey = publicKey.(string)
proverName, proverNameExist := ctx.Get(coordinatorType.ProverName)
if !proverNameExist {
return nil, errors.New("get prover name from context failed")
return nil, fmt.Errorf("get prover name from context failed")
}
ptc.ProverName = proverName.(string)
proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion)
if !proverVersionExist {
return nil, errors.New("get prover version from context failed")
return nil, fmt.Errorf("get prover version from context failed")
}
ptc.ProverVersion = proverVersion.(string)
hardForkNamesStr, hardForkNameExist := ctx.Get(coordinatorType.HardForkName)
if !hardForkNameExist {
return nil, errors.New("get hard fork name from context failed")
}
hardForkNames := strings.Split(hardForkNamesStr.(string), ",")
for _, hardForkName := range hardForkNames {
ptc.HardForkNames[hardForkName] = struct{}{}
}
isBlocked, err := b.proverBlockListOrm.IsPublicKeyBlocked(ctx.Copy(), publicKey.(string))
if err != nil {
return nil, fmt.Errorf("failed to check whether the public key %s is blocked before assigning a chunk task, err: %w, proverName: %s, proverVersion: %s", publicKey, err, proverName, proverVersion)

View File

@@ -4,6 +4,7 @@ import (
"context"
"encoding/json"
"errors"
"fmt"
"strings"
"time"
@@ -137,11 +138,11 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
m.proofReceivedTotal.Inc()
pk := ctx.GetString(coordinatorType.PublicKey)
if len(pk) == 0 {
return errors.New("get public key from context failed")
return fmt.Errorf("get public key from context failed")
}
pv := ctx.GetString(coordinatorType.ProverVersion)
if len(pv) == 0 {
return errors.New("get ProverVersion from context failed")
return fmt.Errorf("get ProverVersion from context failed")
}
proverTask, err := m.proverTaskOrm.GetProverTaskByUUIDAndPublicKey(ctx.Copy(), proofParameter.UUID, pk)
@@ -187,7 +188,7 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof, hardForkName)
success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof)
}
if verifyErr != nil || !success {
@@ -296,7 +297,6 @@ func (m *ProofReceiverLogic) validator(ctx context.Context, proverTask *orm.Prov
// if the batch/chunk have proved and verifier success, need skip this submit proof
if m.checkIsTaskSuccess(ctx, proofParameter.TaskID, message.ProofType(proofParameter.TaskType)) {
m.proofRecover(ctx, proverTask, types.ProverTaskFailureTypeObjectAlreadyVerified, proofParameter)
m.validateFailureProverTaskHaveVerifier.Inc()
log.Info("the prove task have proved and verifier success, skip this submit proof", "hash", proofParameter.TaskID,
"taskType", proverTask.TaskType, "proverName", proverTask.ProverName, "proverPublicKey", pk)

View File

@@ -0,0 +1,11 @@
#!/bin/bash
work_dir="$(dirname -- "${BASH_SOURCE[0]}")"
work_dir="$(cd -- "$work_dir" && pwd)"
echo $work_dir
rm $work_dir/*.vkey
version=release-v0.11.4
wget https://circuit-release.s3.us-west-2.amazonaws.com/${version}/chunk_vk.vkey -O $work_dir/chunk_vk.vkey
wget https://circuit-release.s3.us-west-2.amazonaws.com/${version}/agg_vk.vkey -O $work_dir/agg_vk.vkey

View File

@@ -10,8 +10,8 @@ import (
// NewVerifier Sets up a mock verifier.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
batchVKMap := map[string]struct{}{"mock_vk": {}}
chunkVKMap := map[string]struct{}{"mock_vk": {}}
batchVKMap := map[string]string{cfg.ForkName: "mock_vk"}
chunkVKMap := map[string]string{cfg.ForkName: "mock_vk"}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap}, nil
}
@@ -32,7 +32,7 @@ func (v *Verifier) VerifyBatchProof(proof *message.BatchProof, forkName string)
}
// VerifyBundleProof return a mock verification result for a BundleProof.
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string) (bool, error) {
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof) (bool, error) {
if string(proof.Proof) == InvalidTestProof {
return false, nil
}

View File

@@ -10,7 +10,7 @@ const InvalidTestProof = "this is a invalid proof"
// Verifier represents a rust ffi to a halo2 verifier.
type Verifier struct {
cfg *config.VerifierConfig
ChunkVKMap map[string]struct{}
BatchVKMap map[string]struct{}
BundleVkMap map[string]struct{}
ChunkVKMap map[string]string
BatchVKMap map[string]string
BundleVkMap map[string]string
}

View File

@@ -11,9 +11,11 @@ package verifier
import "C" //nolint:typecheck
import (
"embed"
"encoding/base64"
"encoding/json"
"io"
"io/fs"
"os"
"path"
"unsafe"
@@ -25,87 +27,50 @@ import (
"scroll-tech/coordinator/internal/config"
)
// This struct maps to `CircuitConfig` in common/libzkp/impl/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.CircuitConfig` being changed
type rustCircuitConfig struct {
ForkName string `json:"fork_name"`
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
}
func newRustCircuitConfig(cfg *config.CircuitConfig) *rustCircuitConfig {
return &rustCircuitConfig{
ForkName: cfg.ForkName,
ParamsPath: cfg.ParamsPath,
AssetsPath: cfg.AssetsPath,
}
}
// This struct maps to `VerifierConfig` in common/libzkp/impl/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.VerifierConfig` being changed
type rustVerifierConfig struct {
LowVersionCircuit *rustCircuitConfig `json:"low_version_circuit"`
HighVersionCircuit *rustCircuitConfig `json:"high_version_circuit"`
}
func newRustVerifierConfig(cfg *config.VerifierConfig) *rustVerifierConfig {
return &rustVerifierConfig{
LowVersionCircuit: newRustCircuitConfig(cfg.LowVersionCircuit),
HighVersionCircuit: newRustCircuitConfig(cfg.HighVersionCircuit),
}
}
// NewVerifier Sets up a rust ffi to call verify.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
if cfg.MockMode {
chunkVKMap := map[string]struct{}{"mock_vk": {}}
batchVKMap := map[string]struct{}{"mock_vk": {}}
bundleVKMap := map[string]struct{}{"mock_vk": {}}
chunkVKMap := map[string]string{cfg.ForkName: "mock_vk"}
batchVKMap := map[string]string{cfg.ForkName: "mock_vk"}
bundleVKMap := map[string]string{cfg.ForkName: "mock_vk"}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap, BundleVkMap: bundleVKMap}, nil
}
verifierConfig := newRustVerifierConfig(cfg)
configBytes, err := json.Marshal(verifierConfig)
if err != nil {
return nil, err
}
configStr := C.CString(string(configBytes))
paramsPathStr := C.CString(cfg.ParamsPath)
assetsPathStr := C.CString(cfg.AssetsPath)
defer func() {
C.free(unsafe.Pointer(configStr))
C.free(unsafe.Pointer(paramsPathStr))
C.free(unsafe.Pointer(assetsPathStr))
}()
C.init(configStr)
C.init_batch_verifier(paramsPathStr, assetsPathStr)
C.init_chunk_verifier(paramsPathStr, assetsPathStr)
v := &Verifier{
cfg: cfg,
ChunkVKMap: make(map[string]struct{}),
BatchVKMap: make(map[string]struct{}),
BundleVkMap: make(map[string]struct{}),
ChunkVKMap: make(map[string]string),
BatchVKMap: make(map[string]string),
BundleVkMap: make(map[string]string),
}
bundleVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_bundle.vkey"))
bundleVK, err := v.readVK(path.Join(cfg.AssetsPath, "vk_bundle.vkey"))
if err != nil {
return nil, err
}
batchVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_batch.vkey"))
batchVK, err := v.readVK(path.Join(cfg.AssetsPath, "vk_batch.vkey"))
if err != nil {
return nil, err
}
chunkVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_chunk.vkey"))
chunkVK, err := v.readVK(path.Join(cfg.AssetsPath, "vk_chunk.vkey"))
if err != nil {
return nil, err
}
v.BundleVkMap[bundleVK] = struct{}{}
v.BatchVKMap[batchVK] = struct{}{}
v.ChunkVKMap[chunkVK] = struct{}{}
v.BundleVkMap[cfg.ForkName] = bundleVK
v.BatchVKMap[cfg.ForkName] = batchVK
v.ChunkVKMap[cfg.ForkName] = chunkVK
if err := v.loadLowVersionVKs(cfg); err != nil {
if err := v.loadEmbedVK(); err != nil {
return nil, err
}
v.loadCurieVersionVKs()
return v, nil
}
@@ -164,7 +129,7 @@ func (v *Verifier) VerifyChunkProof(proof *message.ChunkProof, forkName string)
}
// VerifyBundleProof Verify a ZkProof for a bundle of batches, by marshaling it and verifying it via the EVM verifier.
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string) (bool, error) {
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof) (bool, error) {
if v.cfg.MockMode {
log.Info("Mock mode, verifier disabled")
if string(proof.Proof) == InvalidTestProof {
@@ -179,14 +144,12 @@ func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string
}
proofStr := C.CString(string(buf))
forkNameStr := C.CString(forkName)
defer func() {
C.free(unsafe.Pointer(proofStr))
C.free(unsafe.Pointer(forkNameStr))
}()
log.Info("Start to verify bundle proof ...")
verified := C.verify_bundle_proof(proofStr, forkNameStr)
verified := C.verify_bundle_proof(proofStr)
return verified != 0, nil
}
@@ -202,27 +165,23 @@ func (v *Verifier) readVK(filePat string) (string, error) {
return base64.StdEncoding.EncodeToString(byt), nil
}
// load low version vks, current is darwin
func (v *Verifier) loadLowVersionVKs(cfg *config.VerifierConfig) error {
bundleVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_bundle.vkey"))
//go:embed legacy_vk/*
var legacyVKFS embed.FS
func (v *Verifier) loadEmbedVK() error {
batchVKBytes, err := fs.ReadFile(legacyVKFS, "legacy_vk/agg_vk.vkey")
if err != nil {
log.Error("load embed batch vk failure", "err", err)
return err
}
batchVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_batch.vkey"))
chunkVkBytes, err := fs.ReadFile(legacyVKFS, "legacy_vk/chunk_vk.vkey")
if err != nil {
log.Error("load embed chunk vk failure", "err", err)
return err
}
chunkVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_chunk.vkey"))
if err != nil {
return err
}
v.BundleVkMap[bundleVK] = struct{}{}
v.BatchVKMap[batchVK] = struct{}{}
v.ChunkVKMap[chunkVK] = struct{}{}
v.BatchVKMap["curie"] = base64.StdEncoding.EncodeToString(batchVKBytes)
v.ChunkVKMap["curie"] = base64.StdEncoding.EncodeToString(chunkVkBytes)
return nil
}
func (v *Verifier) loadCurieVersionVKs() {
v.BatchVKMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA="] = struct{}{}
v.ChunkVKMap["AAAAGQAAAATyWEABRbJ6hQQ5/zLX1gTasr7349minA9rSgMS6gDeHwZKqikRiO3md+pXjjxMHnKQtmXYgMXhJSvlmZ+Ws+cheuly2X1RuNQzcZuRImaKPR9LJsVZYsXfJbuqdKX8p0Gj8G83wMJOmTzNVUyUol0w0lTU+CEiTpHOnxBsTF3EWaW3s1u4ycOgWt1c9M6s7WmaBZLYgAWYCunO5CLCLApNGbCASeck/LuSoedEri5u6HccCKU2khG6zl6W07jvYSbDVLJktbjRiHv+/HQix+K14j8boo8Z/unhpwXCsPxkQA=="] = struct{}{}
}

View File

@@ -18,8 +18,7 @@ import (
var (
paramsPath = flag.String("params", "/assets/test_params", "params dir")
assetsPathLo = flag.String("assets_lo", "/assets/test_assets_lo", "assets dir")
assetsPathHi = flag.String("assets", "/assets/test_assets", "assets dir")
assetsPath = flag.String("assets", "/assets/test_assets", "assets dir")
batchProofPath = flag.String("batch_proof", "/assets/proof_data/batch_proof", "batch proof file path")
chunkProofPath1 = flag.String("chunk_proof1", "/assets/proof_data/chunk_proof1", "chunk proof file path 1")
chunkProofPath2 = flag.String("chunk_proof2", "/assets/proof_data/chunk_proof2", "chunk proof file path 2")
@@ -29,38 +28,28 @@ func TestFFI(t *testing.T) {
as := assert.New(t)
cfg := &config.VerifierConfig{
MockMode: false,
LowVersionCircuit: &config.CircuitConfig{
ParamsPath: *paramsPath,
AssetsPath: *assetsPathLo,
ForkName: "darwin",
MinProverVersion: "",
},
HighVersionCircuit: &config.CircuitConfig{
ParamsPath: *paramsPath,
AssetsPath: *assetsPathHi,
ForkName: "darwinV2",
MinProverVersion: "",
},
MockMode: false,
ParamsPath: *paramsPath,
AssetsPath: *assetsPath,
}
v, err := NewVerifier(cfg)
as.NoError(err)
chunkProof1 := readChunkProof(*chunkProofPath1, as)
chunkOk1, err := v.VerifyChunkProof(chunkProof1, "darwinV2")
chunkOk1, err := v.VerifyChunkProof(chunkProof1)
as.NoError(err)
as.True(chunkOk1)
t.Log("Verified chunk proof 1")
chunkProof2 := readChunkProof(*chunkProofPath2, as)
chunkOk2, err := v.VerifyChunkProof(chunkProof2, "darwinV2")
chunkOk2, err := v.VerifyChunkProof(chunkProof2)
as.NoError(err)
as.True(chunkOk2)
t.Log("Verified chunk proof 2")
batchProof := readBatchProof(*batchProofPath, as)
batchOk, err := v.VerifyBatchProof(batchProof, "darwinV2")
batchOk, err := v.VerifyBatchProof(batchProof, "curie")
as.NoError(err)
as.True(batchOk)
t.Log("Verified batch proof")

View File

@@ -31,9 +31,6 @@ type Batch struct {
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
// proof
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
@@ -214,6 +211,23 @@ func (o *Batch) GetBatchByHash(ctx context.Context, hash string) (*Batch, error)
return &batch, nil
}
// GetLatestFinalizedBatch retrieves the latest finalized batch from the database.
func (o *Batch) GetLatestFinalizedBatch(ctx context.Context) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("rollup_status = ?", int(types.RollupFinalized))
db = db.Order("index desc")
var latestFinalizedBatch Batch
if err := db.First(&latestFinalizedBatch).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return nil, nil
}
return nil, fmt.Errorf("Batch.GetLatestBatch error: %w", err)
}
return &latestFinalizedBatch, nil
}
// GetBatchesByBundleHash retrieves the given batch.
func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) ([]*Batch, error) {
db := o.db.WithContext(ctx)
@@ -228,19 +242,6 @@ func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) (
return batches, nil
}
// GetBatchByIndex retrieves the batch by the given index.
func (o *Batch) GetBatchByIndex(ctx context.Context, index uint64) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("index = ?", index)
var batch Batch
if err := db.First(&batch).Error; err != nil {
return nil, fmt.Errorf("Batch.GetBatchByIndex error: %w, index: %v", err, index)
}
return &batch, nil
}
// InsertBatch inserts a new batch into the database.
func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...*gorm.DB) (*Batch, error) {
if batch == nil {

View File

@@ -28,6 +28,7 @@ type Bundle struct {
BatchProofsStatus int16 `json:"batch_proofs_status" gorm:"column:batch_proofs_status;default:1"`
ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:1"`
Proof []byte `json:"proof" gorm:"column:proof;default:NULL"`
ProverAssignedAt *time.Time `json:"prover_assigned_at" gorm:"column:prover_assigned_at;default:NULL"`
ProvedAt *time.Time `json:"proved_at" gorm:"column:proved_at;default:NULL"`
ProofTimeSec int32 `json:"proof_time_sec" gorm:"column:proof_time_sec;default:NULL"`
TotalAttempts int16 `json:"total_attempts" gorm:"column:total_attempts;default:0"`
@@ -135,14 +136,14 @@ func (o *Bundle) GetUnassignedAndBatchesUnreadyBundles(ctx context.Context, offs
return bundles, nil
}
// UpdateBatchProofsStatusByBundleHash updates the status of batch_proofs_status field for a given bundle hash.
func (o *Bundle) UpdateBatchProofsStatusByBundleHash(ctx context.Context, bundleHash string, status types.BatchProofsStatus) error {
// UpdateBatchProofsStatusByBatchHash updates the status of batch_proofs_status field for a given bundle hash.
func (o *Bundle) UpdateBatchProofsStatusByBatchHash(ctx context.Context, bundleHash string, status types.BatchProofsStatus) error {
db := o.db.WithContext(ctx)
db = db.Model(&Bundle{})
db = db.Where("hash = ?", bundleHash)
if err := db.Update("batch_proofs_status", status).Error; err != nil {
return fmt.Errorf("Bundle.UpdateBatchProofsStatusByBundleHash error: %w, bundle hash: %v, status: %v", err, bundleHash, status.String())
return fmt.Errorf("Bundle.UpdateBatchProofsStatusByBatchHash error: %w, bundle hash: %v, status: %v", err, bundleHash, status.String())
}
return nil
}

View File

@@ -2,7 +2,6 @@ package orm
import (
"context"
"errors"
"fmt"
"time"
@@ -54,7 +53,7 @@ func (r *Challenge) InsertChallenge(ctx context.Context, challengeString string)
return fmt.Errorf("the challenge string:%s have been used", challengeString)
}
return errors.New("insert challenge string affected rows more than 1")
return fmt.Errorf("insert challenge string affected rows more than 1")
}
// DeleteExpireChallenge delete the expire challenge

View File

@@ -74,11 +74,11 @@ func (*Chunk) TableName() string {
// GetUnassignedChunk retrieves unassigned chunk based on the specified limit.
// The returned chunks are sorted in ascending order by their index.
func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (*Chunk, error) {
func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Chunk, error) {
var chunk Chunk
db := o.db.WithContext(ctx)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, height)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts)
err := db.Raw(sql).Scan(&chunk).Error
if err != nil {
return nil, fmt.Errorf("Chunk.GetUnassignedChunk error: %w", err)
@@ -91,11 +91,11 @@ func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTo
// GetAssignedChunk retrieves assigned chunk based on the specified limit.
// The returned chunks are sorted in ascending order by their index.
func (o *Chunk) GetAssignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (*Chunk, error) {
func (o *Chunk) GetAssignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Chunk, error) {
var chunk Chunk
db := o.db.WithContext(ctx)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, height)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts)
err := db.Raw(sql).Scan(&chunk).Error
if err != nil {
return nil, fmt.Errorf("Chunk.GetAssignedChunk error: %w", err)

View File

@@ -87,55 +87,6 @@ func (o *L2Block) GetL2BlockByNumber(ctx context.Context, blockNumber uint64) (*
return &l2Block, nil
}
// GetL2BlocksInRange retrieves the L2 blocks within the specified range (inclusive).
// The range is closed, i.e., it includes both start and end block numbers.
// The returned blocks are sorted in ascending order by their block number.
func (o *L2Block) GetL2BlocksInRange(ctx context.Context, startBlockNumber uint64, endBlockNumber uint64) ([]*encoding.Block, error) {
if startBlockNumber > endBlockNumber {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: start block number should be less than or equal to end block number, start block: %v, end block: %v", startBlockNumber, endBlockNumber)
}
db := o.db.WithContext(ctx)
db = db.Model(&L2Block{})
db = db.Select("header, transactions, withdraw_root, row_consumption")
db = db.Where("number >= ? AND number <= ?", startBlockNumber, endBlockNumber)
db = db.Order("number ASC")
var l2Blocks []L2Block
if err := db.Find(&l2Blocks).Error; err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
// sanity check
if uint64(len(l2Blocks)) != endBlockNumber-startBlockNumber+1 {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: unexpected number of results, expected: %v, got: %v", endBlockNumber-startBlockNumber+1, len(l2Blocks))
}
var blocks []*encoding.Block
for _, v := range l2Blocks {
var block encoding.Block
if err := json.Unmarshal([]byte(v.Transactions), &block.Transactions); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
block.Header = &gethTypes.Header{}
if err := json.Unmarshal([]byte(v.Header), block.Header); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
block.WithdrawRoot = common.HexToHash(v.WithdrawRoot)
if err := json.Unmarshal([]byte(v.RowConsumption), &block.RowConsumption); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
blocks = append(blocks, &block)
}
return blocks, nil
}
// InsertL2Blocks inserts l2 blocks into the "l2_block" table.
// for unit test
func (o *L2Block) InsertL2Blocks(ctx context.Context, blocks []*encoding.Block) error {

View File

@@ -116,6 +116,25 @@ func (o *ProverTask) GetProverTasksByHashes(ctx context.Context, taskType messag
return proverTasks, nil
}
// GetAssignedProverTaskByTaskIDAndProver get prover task taskID and public key
// TODO: when prover all upgrade need DEPRECATED this function
func (o *ProverTask) GetAssignedProverTaskByTaskIDAndProver(ctx context.Context, taskType message.ProofType, taskID, proverPublicKey, proverVersion string) (*ProverTask, error) {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("task_type", int(taskType))
db = db.Where("task_id", taskID)
db = db.Where("prover_public_key", proverPublicKey)
db = db.Where("prover_version", proverVersion)
db = db.Where("proving_status", types.ProverAssigned)
var proverTask ProverTask
err := db.First(&proverTask).Error
if err != nil {
return nil, fmt.Errorf("ProverTask.GetProverTaskByTaskIDAndProver err:%w, taskID:%s, pubkey:%s, prover_version:%s", err, taskID, proverPublicKey, proverVersion)
}
return &proverTask, nil
}
// GetProverTaskByUUIDAndPublicKey get prover task taskID by uuid and public key
func (o *ProverTask) GetProverTaskByUUIDAndPublicKey(ctx context.Context, uuid, publicKey string) (*ProverTask, error) {
db := o.db.WithContext(ctx)

View File

@@ -18,8 +18,6 @@ const (
ProverName = "prover_name"
// ProverVersion the prover version for context
ProverVersion = "prover_version"
// HardForkName the hard fork name for context
HardForkName = "hard_fork_name"
)
// LoginSchema for /login response
@@ -37,12 +35,6 @@ type Message struct {
VKs []string `form:"vks" json:"vks"`
}
// LoginParameterWithHardForkName constructs new payload for login
type LoginParameterWithHardForkName struct {
LoginParameter
HardForkName string `form:"hard_fork_name" json:"hard_fork_name"`
}
// LoginParameter for /login api
type LoginParameter struct {
Message Message `form:"message" json:"message" binding:"required"`

View File

@@ -60,10 +60,11 @@ func TestGenerateSignature(t *testing.T) {
authMsg := LoginParameter{
Message: Message{
ProverName: "test",
ProverVersion: "v4.4.45-37af5ef5-38a68e2-1c5093c",
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjQ4Mzg0ODUsIm9yaWdfaWF0IjoxNzI0ODM0ODg1LCJyYW5kb20iOiJ6QmdNZGstNGc4UzNUNTFrVEFsYk1RTXg2TGJ4SUs4czY3ejM2SlNuSFlJPSJ9.x9PvihhNx2w4_OX5uCrv8QJCNYVQkIi-K2k8XFXYmik",
ProverTypes: []ProverType{ProverTypeChunk},
VKs: []string{"mock_vk"},
ProverVersion: "v4.4.32-37af5ef5-38a68e2-1c5093c",
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjEzMjc5MTIsIm9yaWdfaWF0IjoxNzIxMzI0MzEyLCJyYW5kb20iOiJWMVFlT19yNEV5eGRmYUtDalprVExEa0ZIemEyNTdQRG93dTV4SnVxYTdZPSJ9.x-B_TnkTUvs8-hiMfJXejxetAP6rXfeRUmyZ3S0uBiM",
ProverTypes: []ProverType{ProverTypeBatch},
VKs: []string{"AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA=",
"AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD1DEjW4Kell67H07wazT5DdzrSh4+amh+cmosQHp9p9snFypyoBGt3UHtoJGQBZlywZWDS9ht5pnaEoGBdaKcQk+lFb+WxTiId0KOAa0mafTZTQw8yToy57Jple64qzlRu1dux30tZZGuerLN1CKzg5Xl2iOpMK+l87jCINwVp5cUtF/XrvhBbU7onKh3KBiy99iUqVyA3Y6iiIZhGKWBSuSA4bNgDYIoVkqjHpdL35aEShoRO6pNXt7rDzxFoPzH0JuPI54nE4OhVrzZXwtkAEosxVa/fszcE092FH+HhhtxZBYe/KEzwdISU9TOPdId3UF/UMYC0MiYOlqffVTgAg="},
},
PublicKey: publicKeyHex,
}

View File

@@ -3,6 +3,7 @@ package types
// GetTaskParameter for ProverTasks request parameter
type GetTaskParameter struct {
ProverHeight uint64 `form:"prover_height" json:"prover_height"`
TaskType int `form:"task_type" json:"task_type"`
TaskTypes []int `form:"task_types" json:"task_types"`
}

View File

@@ -33,6 +33,12 @@ import (
"scroll-tech/coordinator/internal/route"
)
const (
forkNumberTwo = 2
forkNumberOne = 1
minProverVersion = "v2.0.0"
)
var (
conf *config.Config
@@ -66,7 +72,7 @@ func randomURL() string {
return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64())
}
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, forks []string) (*cron.Collector, *http.Server) {
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, nameForkMap map[string]int64) (*cron.Collector, *http.Server) {
var err error
db, err = testApps.GetGormDBClient()
@@ -84,23 +90,13 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
ProversPerSession: proversPerSession,
Verifier: &config.VerifierConfig{
MockMode: true,
LowVersionCircuit: &config.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "homestead",
MinProverVersion: "v4.2.0",
},
HighVersionCircuit: &config.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "bernoulli",
MinProverVersion: "v4.3.0",
},
},
BatchCollectionTimeSec: 10,
ChunkCollectionTimeSec: 10,
BundleCollectionTimeSec: 10,
MaxVerifierWorkers: 10,
SessionAttempts: 5,
MinProverVersion: minProverVersion,
},
Auth: &config.Auth{
ChallengeExpireDurationSec: tokenTimeout,
@@ -109,12 +105,20 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
}
var chainConf params.ChainConfig
for _, forkName := range forks {
for forkName, forkNumber := range nameForkMap {
switch forkName {
case "shanghai":
chainConf.ShanghaiBlock = big.NewInt(forkNumber)
case "bernoulli":
chainConf.BernoulliBlock = big.NewInt(100)
chainConf.BernoulliBlock = big.NewInt(forkNumber)
case "london":
chainConf.LondonBlock = big.NewInt(forkNumber)
case "istanbul":
chainConf.IstanbulBlock = big.NewInt(forkNumber)
case "homestead":
chainConf.HomesteadBlock = big.NewInt(0)
chainConf.HomesteadBlock = big.NewInt(forkNumber)
case "eip155":
chainConf.EIP155Block = big.NewInt(forkNumber)
}
}
@@ -197,7 +201,7 @@ func TestApis(t *testing.T) {
func testHandshake(t *testing.T) {
// Setup coordinator and http server.
coordinatorURL := randomURL()
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
proofCollector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -210,7 +214,7 @@ func testHandshake(t *testing.T) {
func testFailedHandshake(t *testing.T) {
// Setup coordinator and http server.
coordinatorURL := randomURL()
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
proofCollector.Stop()
}()
@@ -228,7 +232,7 @@ func testFailedHandshake(t *testing.T) {
func testGetTaskBlocked(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -246,12 +250,12 @@ func testGetTaskBlocked(t *testing.T) {
expectedErr := fmt.Errorf("return prover task err:check prover task parameter failed, error:public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", chunkProver.publicKey(), chunkProver.proverName, chunkProver.proverVersion)
code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrCoordinatorGetTaskFailure, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
expectedErr = errors.New("get empty prover task")
expectedErr = fmt.Errorf("get empty prover task")
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrCoordinatorEmptyProofData, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
err = proverBlockListOrm.InsertProverPublicKey(context.Background(), batchProver.proverName, batchProver.publicKey())
assert.NoError(t, err)
@@ -259,20 +263,20 @@ func testGetTaskBlocked(t *testing.T) {
err = proverBlockListOrm.DeleteProverPublicKey(context.Background(), chunkProver.publicKey())
assert.NoError(t, err)
expectedErr = errors.New("get empty prover task")
expectedErr = fmt.Errorf("get empty prover task")
code, errMsg = chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrCoordinatorEmptyProofData, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
expectedErr = fmt.Errorf("return prover task err:check prover task parameter failed, error:public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", batchProver.publicKey(), batchProver.proverName, batchProver.proverVersion)
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrCoordinatorGetTaskFailure, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
}
func testOutdatedProverVersion(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -284,22 +288,20 @@ func testOutdatedProverVersion(t *testing.T) {
batchProver := newMockProver(t, "prover_batch_test", coordinatorURL, message.ProofTypeBatch, "v1.999.999")
assert.True(t, chunkProver.healthCheckSuccess(t))
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, chunkProver.proverVersion)
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", minProverVersion, chunkProver.proverVersion)
code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrJWTCommonErr, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, batchProver.proverVersion)
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", minProverVersion, batchProver.proverVersion)
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrJWTCommonErr, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
assert.Equal(t, expectedErr, fmt.Errorf(errMsg))
}
func testValidProof(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -382,7 +384,7 @@ func testValidProof(t *testing.T) {
func testInvalidProof(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -470,7 +472,7 @@ func testInvalidProof(t *testing.T) {
func testProofGeneratedFailed(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -571,7 +573,7 @@ func testProofGeneratedFailed(t *testing.T) {
func testTimeoutProof(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))

View File

@@ -160,7 +160,7 @@ func (r *mockProver) getProverTask(t *testing.T, proofType message.ProofType) (*
resp, err := client.R().
SetHeader("Content-Type", "application/json").
SetHeader("Authorization", fmt.Sprintf("Bearer %s", token)).
SetBody(map[string]interface{}{"prover_height": 100, "task_types": []int{int(proofType)}}).
SetBody(map[string]interface{}{"prover_height": 100, "task_type": int(proofType)}).
SetResult(&result).
Post("http://" + r.coordinatorURL + "/coordinator/v1/get_task")
assert.NoError(t, err)

View File

@@ -4,8 +4,6 @@ import (
"encoding/json"
"os"
"path/filepath"
"scroll-tech/common/utils"
)
// DBConfig db config
@@ -31,11 +29,5 @@ func NewConfig(file string) (*DBConfig, error) {
return nil, err
}
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_ROLLUP_DB_CONFIG")
if err != nil {
return nil, err
}
return cfg, nil
}

View File

@@ -59,20 +59,20 @@ func testResetDB(t *testing.T) {
cur, err := Current(pgDB)
assert.NoError(t, err)
// total number of tables.
assert.Equal(t, int64(24), cur)
assert.Equal(t, int64(22), cur)
}
func testMigrate(t *testing.T) {
assert.NoError(t, Migrate(pgDB))
cur, err := Current(pgDB)
assert.NoError(t, err)
assert.Equal(t, int64(24), cur)
assert.Equal(t, int64(22), cur)
}
func testRollback(t *testing.T) {
version, err := Current(pgDB)
assert.NoError(t, err)
assert.Equal(t, int64(24), version)
assert.Equal(t, int64(22), version)
assert.NoError(t, Rollback(pgDB, nil))

View File

@@ -14,6 +14,7 @@ CREATE TABLE bundle (
batch_proofs_status SMALLINT NOT NULL DEFAULT 1,
proving_status SMALLINT NOT NULL DEFAULT 1,
proof BYTEA DEFAULT NULL,
prover_assigned_at TIMESTAMP(0) DEFAULT NULL,
proved_at TIMESTAMP(0) DEFAULT NULL,
proof_time_sec INTEGER DEFAULT NULL,
total_attempts SMALLINT NOT NULL DEFAULT 0,

View File

@@ -1,23 +0,0 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE chunk
ADD COLUMN codec_version SMALLINT NOT NULL DEFAULT 0,
ADD COLUMN enable_compress BOOLEAN NOT NULL DEFAULT false;
ALTER TABLE batch
ADD COLUMN enable_compress BOOLEAN NOT NULL DEFAULT false;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS chunk
DROP COLUMN IF EXISTS enable_compress,
DROP COLUMN IF EXISTS codec_version;
ALTER TABLE IF EXISTS batch
DROP COLUMN IF EXISTS enable_compress;
-- +goose StatementEnd

View File

@@ -1,15 +0,0 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE batch
ADD COLUMN blob_bytes BYTEA;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS batch
DROP COLUMN IF EXISTS blob_bytes;
-- +goose StatementEnd

File diff suppressed because it is too large Load Diff

406
prover/Cargo.lock generated
View File

@@ -28,10 +28,44 @@ dependencies = [
"cpufeatures",
]
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.11.0",
]
[[package]]
name = "aggregator"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -62,40 +96,6 @@ dependencies = [
"zkevm-circuits 0.12.0",
]
[[package]]
name = "aggregator"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.13.0",
]
[[package]]
name = "ahash"
version = "0.8.11"
@@ -632,10 +632,37 @@ version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-providers 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ethers-signers",
"external-tracer 0.11.0",
"gadgets 0.11.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"poseidon-circuit",
"rand",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.12.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -657,31 +684,6 @@ dependencies = [
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-providers 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ethers-signers",
"gadgets 0.13.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"poseidon-circuit",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "byte-slice-cast"
version = "1.2.2"
@@ -1309,8 +1311,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1327,6 +1329,7 @@ dependencies = [
"revm-primitives",
"serde",
"serde_json",
"serde_stacker",
"serde_with",
"sha3 0.10.8",
"strum 0.25.0",
@@ -1337,8 +1340,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1557,11 +1560,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.12.0",
"geth-utils 0.12.0",
"eth-types 0.11.0",
"geth-utils 0.11.0",
"log",
"serde",
"serde_json",
@@ -1570,11 +1573,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.13.0",
"geth-utils 0.13.0",
"eth-types 0.12.0",
"geth-utils 0.12.0",
"log",
"serde",
"serde_json",
@@ -1787,10 +1790,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.12.0",
"eth-types 0.11.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1799,10 +1802,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.13.0",
"eth-types 0.12.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1822,8 +1825,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"env_logger 0.10.2",
"gobuild",
@@ -1832,8 +1835,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"env_logger 0.10.2",
"gobuild",
@@ -2670,10 +2673,25 @@ dependencies = [
"subtle",
]
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"external-tracer 0.11.0",
"itertools 0.11.0",
"log",
"rand",
"rand_chacha",
]
[[package]]
name = "mock"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.12.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -2686,24 +2704,23 @@ dependencies = [
]
[[package]]
name = "mock"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
name = "mpt-zktrie"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"external-tracer 0.13.0",
"itertools 0.11.0",
"eth-types 0.11.0",
"halo2curves",
"hex",
"log",
"rand",
"rand_chacha",
"num-bigint",
"poseidon-base",
"zktrie",
]
[[package]]
name = "mpt-zktrie"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"eth-types 0.12.0",
"halo2curves",
@@ -2711,21 +2728,7 @@ dependencies = [
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "mpt-zktrie"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie",
]
[[package]]
@@ -3291,8 +3294,8 @@ dependencies = [
"http 1.1.0",
"log",
"once_cell",
"prover 0.11.0",
"prover 0.12.0",
"prover 0.13.0",
"rand",
"reqwest 0.12.4",
"reqwest-middleware",
@@ -3306,10 +3309,44 @@ dependencies = [
"tokio",
]
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"aggregator 0.11.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.11.0",
"chrono",
"dotenvy",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.11.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.11.0",
]
[[package]]
name = "prover"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"aggregator 0.12.0",
"anyhow",
@@ -3340,40 +3377,6 @@ dependencies = [
"zkevm-circuits 0.12.0",
]
[[package]]
name = "prover"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"aggregator 0.13.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.13.0",
"chrono",
"dotenvy",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.13.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.13.0",
]
[[package]]
name = "psm"
version = "0.1.21"
@@ -5335,10 +5338,52 @@ dependencies = [
"syn 2.0.66",
]
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"array-init",
"bus-mapping 0.11.0",
"either",
"env_logger 0.10.2",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"ff",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402"
dependencies = [
"array-init",
"bus-mapping 0.12.0",
@@ -5377,64 +5422,13 @@ dependencies = [
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"array-init",
"bus-mapping 0.13.0",
"either",
"env_logger 0.10.2",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"ff",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie_rust",
]
[[package]]
@@ -5451,20 +5445,6 @@ dependencies = [
"strum_macros 0.24.3",
]
[[package]]
name = "zktrie_rust"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"hex",
"lazy_static",
"num",
"num-derive",
"num-traits",
"strum 0.24.1",
"strum_macros 0.24.3",
]
[[package]]
name = "zstd"
version = "0.13.0"

View File

@@ -29,8 +29,8 @@ ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_darwin_v2 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_curie = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.5", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.0-rc.3", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.1"
reqwest = { version = "0.12.4", features = ["gzip"] }
reqwest-middleware = "0.3"

View File

@@ -3,7 +3,7 @@
"keystore_path": "keystore.json",
"keystore_password": "prover-pwd",
"db_path": "unique-db-path-for-prover-1",
"prover_type": 2,
"proof_type": 2,
"low_version_circuit": {
"hard_fork_name": "bernoulli",
"params_path": "params",

View File

@@ -14,8 +14,6 @@ use types::*;
use crate::{config::Config, key_signer::KeySigner};
pub use errors::ProofStatusNotOKError;
pub struct CoordinatorClient<'a> {
api: Api,
token: Option<String>,

View File

@@ -1,6 +1,4 @@
use crate::{coordinator_client::ProofStatusNotOKError, types::ProofStatus};
use super::{errors::*, types::*};
use super::types::*;
use anyhow::{bail, Result};
use core::time::Duration;
use reqwest::{header::CONTENT_TYPE, Url};
@@ -78,23 +76,7 @@ impl Api {
token: &String,
) -> Result<Response<SubmitProofResponseData>> {
let method = "/coordinator/v1/submit_proof";
let response = self
.post_with_token::<SubmitProofRequest, Response<SubmitProofResponseData>>(
method, req, token,
)
.await?;
// when req's status already not ok, we mark the error returned from coordinator and will
// ignore it later.
if response.errcode == ErrorCode::ErrCoordinatorHandleZkProofFailure
&& req.status != ProofStatus::Ok
&& response
.errmsg
.contains("validator failure proof msg status not ok")
{
return Err(anyhow::anyhow!(ProofStatusNotOKError));
}
Ok(response)
self.post_with_token(method, req, token).await
}
async fn post_with_token<Req, Resp>(

View File

@@ -1,5 +1,4 @@
use serde::{Deserialize, Deserializer};
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ErrorCode {
@@ -52,14 +51,3 @@ impl<'de> Deserialize<'de> for ErrorCode {
Ok(ErrorCode::from_i32(v))
}
}
// ====================================================
#[derive(Debug, Clone)]
pub struct ProofStatusNotOKError;
impl fmt::Display for ProofStatusNotOKError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "proof status not ok")
}
}

View File

@@ -1,5 +1,4 @@
#![feature(lazy_cell)]
#![feature(core_intrinsics)]
mod config;
mod coordinator_client;
@@ -38,7 +37,7 @@ struct Args {
log_file: Option<String>,
}
fn start() -> Result<()> {
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Args::parse();
if args.version {
@@ -77,10 +76,3 @@ fn start() -> Result<()> {
Ok(())
}
fn main() {
let result = start();
if let Err(e) = result {
log::error!("main exit with error {:#}", e)
}
}

View File

@@ -71,6 +71,7 @@ impl<'a> Prover<'a> {
let mut req = GetTaskRequest {
task_types: get_task_types(self.config.prover_type),
prover_height: None,
// vks: self.circuits_handler_provider.borrow().get_vks(),
};
if self.config.prover_type == ProverType::Chunk {

View File

@@ -1,4 +1,4 @@
use super::{coordinator_client::ProofStatusNotOKError, prover::Prover, task_cache::TaskCache};
use super::{prover::Prover, task_cache::TaskCache};
use anyhow::{Context, Result};
use std::rc::Rc;
@@ -16,11 +16,7 @@ impl<'a> TaskProcessor<'a> {
loop {
log::info!("start a new round.");
if let Err(err) = self.prove_and_submit() {
if err.is::<ProofStatusNotOKError>() {
log::info!("proof status not ok, downgrade level to info.");
} else {
log::error!("encounter error: {:#}", err);
}
log::error!("encounter error: {:#}", err);
} else {
log::info!("prove & submit succeed.");
}
@@ -58,18 +54,11 @@ impl<'a> TaskProcessor<'a> {
);
let result = match self.prover.prove_task(&task_wrapper.task) {
Ok(proof_detail) => self.prover.submit_proof(proof_detail, &task_wrapper.task),
Err(error) => {
log::error!(
"failed to prove task, id: {}, error: {:#}",
&task_wrapper.task.id,
error
);
self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::NoPanic,
error,
)
}
Err(error) => self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::NoPanic,
error,
),
};
return result;
}

View File

@@ -54,7 +54,7 @@ impl Default for TaskType {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ProverType {
Chunk,
Batch,

View File

@@ -1,6 +1,5 @@
mod common;
mod curie;
mod darwin;
mod darwin_v2;
use super::geth_client::GethClient;
use crate::{
@@ -9,8 +8,8 @@ use crate::{
utils::get_task_types,
};
use anyhow::{bail, Result};
use curie::CurieHandler;
use darwin::DarwinHandler;
use darwin_v2::DarwinV2Handler;
use std::{cell::RefCell, collections::HashMap, rc::Rc};
type HardForkName = String;
@@ -39,7 +38,7 @@ pub struct CircuitsHandlerProvider<'a> {
geth_client: Option<Rc<RefCell<GethClient>>>,
circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>,
current_fork_name: Option<HardForkName>,
current_hard_fork_name: Option<HardForkName>,
current_circuit: Option<Rc<Box<dyn CircuitsHandler>>>,
}
@@ -61,7 +60,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
&config.low_version_circuit.hard_fork_name
);
AssetsDirEnvConfig::enable_first();
DarwinHandler::new(
CurieHandler::new(
prover_type,
&config.low_version_circuit.params_path,
&config.low_version_circuit.assets_path,
@@ -84,7 +83,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
&config.high_version_circuit.hard_fork_name
);
AssetsDirEnvConfig::enable_second();
DarwinV2Handler::new(
DarwinHandler::new(
prover_type,
&config.high_version_circuit.params_path,
&config.high_version_circuit.assets_path,
@@ -103,7 +102,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
config,
geth_client,
circuits_handler_builder_map: m,
current_fork_name: None,
current_hard_fork_name: None,
current_circuit: None,
};
@@ -114,12 +113,13 @@ impl<'a> CircuitsHandlerProvider<'a> {
&mut self,
hard_fork_name: &String,
) -> Result<Rc<Box<dyn CircuitsHandler>>> {
match &self.current_fork_name {
Some(fork_name) if fork_name == hard_fork_name => {
match &self.current_hard_fork_name {
Some(name) if name == hard_fork_name => {
log::info!("get circuits handler from cache");
if let Some(handler) = &self.current_circuit {
Ok(handler.clone())
} else {
log::error!("missing cached handler, there must be something wrong.");
bail!("missing cached handler, there must be something wrong.")
}
}
@@ -131,11 +131,12 @@ impl<'a> CircuitsHandlerProvider<'a> {
log::info!("building circuits handler for {hard_fork_name}");
let handler = builder(self.prover_type, self.config, self.geth_client.clone())
.expect("failed to build circuits handler");
self.current_fork_name = Some(hard_fork_name.clone());
self.current_hard_fork_name = Some(hard_fork_name.clone());
let rc_handler = Rc::new(handler);
self.current_circuit = Some(rc_handler.clone());
Ok(rc_handler)
} else {
log::error!("missing builder, there must be something wrong.");
bail!("missing builder, there must be something wrong.")
}
}

View File

@@ -1,33 +0,0 @@
use std::{collections::BTreeMap, rc::Rc};
use crate::types::ProverType;
use once_cell::sync::OnceCell;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
static mut PARAMS_MAP: OnceCell<Rc<BTreeMap<u32, ParamsKZG<Bn256>>>> = OnceCell::new();
pub fn get_params_map_instance<'a, F>(load_params_func: F) -> &'a BTreeMap<u32, ParamsKZG<Bn256>>
where
F: FnOnce() -> BTreeMap<u32, ParamsKZG<Bn256>>,
{
unsafe {
PARAMS_MAP.get_or_init(|| {
let params_map = load_params_func();
Rc::new(params_map)
})
}
}
pub fn get_degrees<F>(prover_types: &std::collections::HashSet<ProverType>, f: F) -> Vec<u32>
where
F: FnMut(&ProverType) -> Vec<u32>,
{
prover_types
.iter()
.flat_map(f)
.collect::<std::collections::HashSet<u32>>()
.into_iter()
.collect()
}

View File

@@ -1,4 +1,4 @@
use super::{common::*, CircuitsHandler};
use super::CircuitsHandler;
use crate::{
geth_client::GethClient,
types::{ProverType, TaskType},
@@ -10,29 +10,21 @@ use serde::Deserialize;
use crate::types::{CommonHash, Task};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
use prover_darwin_v2::{
aggregator::Prover as BatchProver,
check_chunk_hashes,
common::Prover as CommonProver,
config::{AGG_DEGREES, ZKEVM_DEGREES},
zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
ChunkProof, ChunkProvingTask,
use prover_curie::{
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
};
// Only used for debugging.
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
#[derive(Debug, Clone, Deserialize)]
#[derive(Deserialize)]
pub struct BatchTaskDetail {
pub chunk_infos: Vec<ChunkInfo>,
#[serde(flatten)]
pub batch_proving_task: BatchProvingTask,
pub chunk_proofs: Vec<ChunkProof>,
}
type BundleTaskDetail = BundleProvingTask;
#[derive(Debug, Clone, Deserialize)]
#[derive(Deserialize)]
pub struct ChunkTaskDetail {
pub block_hashes: Vec<CommonHash>,
}
@@ -42,66 +34,33 @@ fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
}
#[derive(Default)]
pub struct DarwinV2Handler {
chunk_prover: Option<RefCell<ChunkProver<'static>>>,
batch_prover: Option<RefCell<BatchProver<'static>>>,
pub struct CurieHandler {
chunk_prover: Option<RefCell<ChunkProver>>,
batch_prover: Option<RefCell<BatchProver>>,
geth_client: Option<Rc<RefCell<GethClient>>>,
}
impl DarwinV2Handler {
pub fn new_multi(
prover_types: Vec<ProverType>,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
let class_name = std::intrinsics::type_name::<Self>();
let prover_types_set = prover_types
.into_iter()
.collect::<std::collections::HashSet<ProverType>>();
let mut handler = Self {
batch_prover: None,
chunk_prover: None,
geth_client,
};
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
ProverType::Chunk => ZKEVM_DEGREES.clone(),
ProverType::Batch => AGG_DEGREES.clone(),
});
let params_map = get_params_map_instance(|| {
log::info!(
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
class_name,
prover_types_set,
degrees
);
CommonProver::load_params_map(params_dir, &degrees)
});
for prover_type in prover_types_set {
match prover_type {
ProverType::Chunk => {
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
params_map, assets_dir,
)));
}
ProverType::Batch => {
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
params_map, assets_dir,
)))
}
}
}
Ok(handler)
}
impl CurieHandler {
pub fn new(
prover_type: ProverType,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
match prover_type {
ProverType::Chunk => Ok(Self {
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
batch_prover: None,
geth_client,
}),
ProverType::Batch => Ok(Self {
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
chunk_prover: None,
geth_client,
}),
}
}
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
@@ -124,15 +83,11 @@ impl DarwinV2Handler {
Ok(serde_json::to_string(&chunk_proof)?)
}
fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
fn gen_batch_proof_raw(
&self,
chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)>,
) -> Result<BatchProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.batch_proving_task.chunk_proofs.clone())
.collect();
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
@@ -142,11 +97,11 @@ impl DarwinV2Handler {
bail!("non-match chunk protocol")
}
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
let batch_proof = prover.borrow_mut().gen_batch_proof(
batch_task_detail.batch_proving_task,
None,
self.get_output_dir(),
)?;
let batch = BatchProvingTask { chunk_proofs };
let batch_proof =
prover
.borrow_mut()
.gen_agg_evm_proof(batch, None, self.get_output_dir())?;
return Ok(batch_proof);
}
@@ -155,32 +110,12 @@ impl DarwinV2Handler {
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_batch_proof for task {}", task.id);
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
let batch_proof = self.gen_batch_proof_raw(batch_task_detail)?;
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
Ok(serde_json::to_string(&batch_proof)?)
}
fn gen_bundle_proof_raw(&self, bundle_task_detail: BundleTaskDetail) -> Result<BundleProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let bundle_proof = prover.borrow_mut().gen_bundle_proof(
bundle_task_detail,
None,
self.get_output_dir(),
)?;
return Ok(bundle_proof);
}
unreachable!("please check errors in proof_type logic")
}
fn gen_bundle_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_bundle_proof for task {}", task.id);
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&task.task_data)?;
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail)?;
Ok(serde_json::to_string(&bundle_proof)?)
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
@@ -190,6 +125,17 @@ impl DarwinV2Handler {
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
}
fn gen_chunk_hashes_proofs(&self, task: &Task) -> Result<Vec<(ChunkInfo, ChunkProof)>> {
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
Ok(batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.chunk_proofs.clone())
.collect())
}
fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> {
if block_hashes.is_empty() {
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
@@ -244,7 +190,7 @@ impl DarwinV2Handler {
}
}
impl CircuitsHandler for DarwinV2Handler {
impl CircuitsHandler for CurieHandler {
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
match task_type {
TaskType::Chunk => self
@@ -254,11 +200,8 @@ impl CircuitsHandler for DarwinV2Handler {
TaskType::Batch => self
.batch_prover
.as_ref()
.and_then(|prover| prover.borrow().get_batch_vk()),
TaskType::Bundle => self
.batch_prover
.as_ref()
.and_then(|prover| prover.borrow().get_bundle_vk()),
.and_then(|prover| prover.borrow().get_vk()),
TaskType::Bundle => None,
_ => unreachable!(),
}
}
@@ -267,7 +210,6 @@ impl CircuitsHandler for DarwinV2Handler {
match task_type {
TaskType::Chunk => self.gen_chunk_proof(task),
TaskType::Batch => self.gen_batch_proof(task),
TaskType::Bundle => self.gen_bundle_proof(task),
_ => unreachable!(),
}
}
@@ -279,11 +221,7 @@ impl CircuitsHandler for DarwinV2Handler {
mod tests {
use super::*;
use crate::zk_circuits_handler::utils::encode_vk;
use ethers_core::types::H256;
use prover_darwin_v2::{
aggregator::eip4844, utils::chunk_trace_to_witness_block, BatchData, BatchHeader,
MAX_AGG_SNARKS,
};
use prover_curie::utils::chunk_trace_to_witness_block;
use std::{path::PathBuf, sync::LazyLock};
#[ctor::ctor]
@@ -294,7 +232,7 @@ mod tests {
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("DARWIN_V2_TEST_DIR")
std::env::var("CURIE_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
@@ -306,9 +244,9 @@ mod tests {
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
@@ -318,27 +256,20 @@ mod tests {
#[test]
fn test_circuits() -> Result<()> {
let bi_handler = DarwinV2Handler::new_multi(
vec![ProverType::Chunk, ProverType::Batch],
&PARAMS_PATH,
&ASSETS_PATH,
None,
)?;
let chunk_handler = CurieHandler::new(ProverType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_handler = bi_handler;
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
let chunk_dir_paths = get_chunk_dir_paths()?;
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
let mut chunk_traces = vec![];
let mut chunk_infos = vec![];
let mut chunk_proofs = vec![];
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
let chunk_id = format!("chunk_proof{}", id + 1);
log::info!("start to process {chunk_id}");
let chunk_trace = read_chunk_trace(chunk_path)?;
chunk_traces.push(chunk_trace.clone());
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
chunk_infos.push(chunk_info);
@@ -349,96 +280,30 @@ mod tests {
chunk_proofs.push(chunk_proof);
}
let batch_handler = chunk_handler;
let batch_handler = CurieHandler::new(ProverType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
let batch_task_detail = make_batch_task_detail(chunk_traces, chunk_proofs, None);
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
log::info!("start to prove batch");
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail)?;
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
let proof_data = serde_json::to_string(&batch_proof)?;
dump_proof("batch_proof".to_string(), proof_data)?;
Ok(())
}
// copied from https://github.com/scroll-tech/scroll-prover/blob/main/integration/src/prove.rs
fn get_blob_from_chunks(chunks: &[ChunkInfo]) -> Vec<u8> {
let num_chunks = chunks.len();
let padded_chunk =
ChunkInfo::mock_padded_chunk_info_for_testing(chunks.last().as_ref().unwrap());
let chunks_with_padding = [
chunks.to_vec(),
vec![padded_chunk; MAX_AGG_SNARKS - num_chunks],
]
.concat();
let batch_data = BatchData::<{ MAX_AGG_SNARKS }>::new(chunks.len(), &chunks_with_padding);
let batch_bytes = batch_data.get_batch_data_bytes();
let blob_bytes = eip4844::get_blob_bytes(&batch_bytes);
log::info!("blob_bytes len {}", blob_bytes.len());
blob_bytes
}
// TODO: chunk_infos can be extracted from chunk_proofs.
// Still needed?
fn make_batch_task_detail(
chunk_traces: Vec<Vec<BlockTrace>>,
chunk_proofs: Vec<ChunkProof>,
last_batcher_header: Option<BatchHeader<{ MAX_AGG_SNARKS }>>,
) -> BatchTaskDetail {
// dummy parent batch hash
let dummy_parent_batch_hash = H256([
0xab, 0xac, 0xad, 0xae, 0xaf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
]);
let chunk_infos: Vec<_> = chunk_proofs.iter().map(|p| p.chunk_info.clone()).collect();
let l1_message_popped = chunk_traces
.iter()
.flatten()
.map(|chunk| chunk.num_l1_txs())
.sum();
let last_block_timestamp = chunk_traces.last().map_or(0, |block_traces| {
block_traces
.last()
.map_or(0, |block_trace| block_trace.header.timestamp.as_u64())
});
let blob_bytes = get_blob_from_chunks(&chunk_infos);
let batch_header = BatchHeader::construct_from_chunks(
last_batcher_header.map_or(4, |header| header.version),
last_batcher_header.map_or(123, |header| header.batch_index + 1),
l1_message_popped,
last_batcher_header.map_or(l1_message_popped, |header| {
header.total_l1_message_popped + l1_message_popped
}),
last_batcher_header.map_or(dummy_parent_batch_hash, |header| header.batch_hash()),
last_block_timestamp,
&chunk_infos,
&blob_bytes,
);
BatchTaskDetail {
chunk_infos,
batch_proving_task: BatchProvingTask {
chunk_proofs,
batch_header,
blob_bytes,
},
}
}
fn check_vk(proof_type: TaskType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", proof_type);
let vk_from_file = read_vk(proof_type).unwrap();
fn check_vk(task_type: TaskType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", task_type);
let vk_from_file = read_vk(task_type).unwrap();
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
}
fn read_vk(proof_type: TaskType) -> Result<String> {
log::info!("read_vk, {:?}", proof_type);
let vk_file = match proof_type {
fn read_vk(task_type: TaskType) -> Result<String> {
log::info!("read_vk, {:?}", task_type);
let vk_file = match task_type {
TaskType::Chunk => CHUNK_VK_PATH.clone(),
TaskType::Batch => BATCH_VK_PATH.clone(),
TaskType::Bundle => todo!(),
TaskType::Bundle => unreachable!(),
TaskType::Undefined => unreachable!(),
};

View File

@@ -1,4 +1,4 @@
use super::{common::*, CircuitsHandler};
use super::CircuitsHandler;
use crate::{
geth_client::GethClient,
types::{ProverType, TaskType},
@@ -11,11 +11,7 @@ use crate::types::{CommonHash, Task};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
use prover_darwin::{
aggregator::Prover as BatchProver,
check_chunk_hashes,
common::Prover as CommonProver,
config::{AGG_DEGREES, ZKEVM_DEGREES},
zkevm::Prover as ChunkProver,
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
ChunkProof, ChunkProvingTask,
};
@@ -43,65 +39,32 @@ fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
#[derive(Default)]
pub struct DarwinHandler {
chunk_prover: Option<RefCell<ChunkProver<'static>>>,
batch_prover: Option<RefCell<BatchProver<'static>>>,
chunk_prover: Option<RefCell<ChunkProver>>,
batch_prover: Option<RefCell<BatchProver>>,
geth_client: Option<Rc<RefCell<GethClient>>>,
}
impl DarwinHandler {
pub fn new_multi(
prover_types: Vec<ProverType>,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
let class_name = std::intrinsics::type_name::<Self>();
let prover_types_set = prover_types
.into_iter()
.collect::<std::collections::HashSet<ProverType>>();
let mut handler = Self {
batch_prover: None,
chunk_prover: None,
geth_client,
};
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
ProverType::Chunk => ZKEVM_DEGREES.clone(),
ProverType::Batch => AGG_DEGREES.clone(),
});
let params_map = get_params_map_instance(|| {
log::info!(
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
class_name,
prover_types_set,
degrees
);
CommonProver::load_params_map(params_dir, &degrees)
});
for prover_type in prover_types_set {
match prover_type {
ProverType::Chunk => {
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
params_map, assets_dir,
)));
}
ProverType::Batch => {
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
params_map, assets_dir,
)))
}
}
}
Ok(handler)
}
pub fn new(
prover_type: ProverType,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
match prover_type {
ProverType::Chunk => Ok(Self {
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
batch_prover: None,
geth_client,
}),
ProverType::Batch => Ok(Self {
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
chunk_prover: None,
geth_client,
}),
}
}
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
@@ -290,7 +253,7 @@ mod tests {
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("DARWIN_TEST_DIR")
std::env::var("CURIE_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
@@ -302,9 +265,9 @@ mod tests {
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
@@ -314,14 +277,9 @@ mod tests {
#[test]
fn test_circuits() -> Result<()> {
let bi_handler = DarwinHandler::new_multi(
vec![ProverType::Chunk, ProverType::Batch],
&PARAMS_PATH,
&ASSETS_PATH,
None,
)?;
let chunk_handler =
DarwinHandler::new(ProverType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_handler = bi_handler;
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
@@ -344,7 +302,8 @@ mod tests {
chunk_proofs.push(chunk_proof);
}
let batch_handler = chunk_handler;
let batch_handler =
DarwinHandler::new(ProverType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
let batch_task_detail = make_batch_task_detail(chunk_infos, chunk_proofs);

Some files were not shown because too many files have changed in this diff Show More