Compare commits

..

48 Commits

Author SHA1 Message Date
georgehao
dbfca54907 add 2024-10-16 22:18:06 +08:00
georgehao
849765242e test coede 2024-10-16 22:15:57 +08:00
sbaizet
41ce22be05 add jq tool to coordinator-api (#1539) 2024-10-15 10:57:03 +02:00
Péter Garamvölgyi
3353e36d16 ci: push docker images to correct user (#1538) 2024-10-15 09:58:38 +02:00
Morty
f2a656d67b feat(gas-oracle): support gas token volatile exchange rate (#1526)
Co-authored-by: yiweichi <yiweichi@users.noreply.github.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-10-14 18:20:35 +08:00
colin
d6b9176ef2 fix(bundle-proposer): add get chunk nil check (#1537)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-10-14 15:44:57 +08:00
sbaizet
09f087ab16 add debugging tools on scroll services (#1534) 2024-10-11 09:28:36 +02:00
Nazarii Denha
e3c87e6547 feat(sender): support web3signer (#1524) 2024-10-03 12:49:12 +02:00
colin
a1d1cbc2e5 fix(rollup-relayer): wrong registration of metric (#1521)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-09-20 13:32:36 +08:00
colin
404a625cb4 feat(rollup-relayer): add sync height and throughput metrics (#1520) 2024-09-20 01:08:16 +08:00
colin
736d850be1 feat(l2-watcher): add nil row consumption block metric (#1518) 2024-09-18 12:46:20 +08:00
Morty
246bf38e69 feat(database): allow environment variables to override config.json (#1517) 2024-09-10 07:28:28 +08:00
georgehao
bce33834ab fix(coordinator): assign chunk task bug (#1515)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-08-31 01:29:22 +08:00
Mengran Lan
ae8c858a07 fix(prover): fix degrees logic in prover new (#1512) 2024-08-29 10:01:44 +08:00
Morty
2ee1c898f0 fix: override config values use viper (#1502)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-08-28 18:53:20 +08:00
georgehao
a528103260 disable hardfork check (#1514)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-08-28 17:36:04 +08:00
georgehao
7b00055a5d fix coordiantor bug (#1513)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-08-28 16:25:16 +08:00
sbaizet
bf48417433 Add ca certificates to bridge history (#1508)
Co-authored-by: Morty <yiweichi1@gmail.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-08-28 08:45:29 +02:00
Mengran Lan
5c9243e03f fix(prover): support prover with multi types constructor for test code (#1509) 2024-08-28 12:48:10 +08:00
georgehao
fcfd97ab6c bump v4.4.54 (#1511) 2024-08-28 12:41:27 +08:00
georgehao
e453c23b16 fix coordiantor curie vk check (#1510)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-08-28 12:39:14 +08:00
sbaizet
432c98b651 add ca-certificates package to bridge-history services (#1506) 2024-08-27 15:12:04 +02:00
Mengran Lan
865d65ac3a feat(prover): use params map to initialize provers (#1507)
Co-authored-by: amoylan2 <amoylan2@users.noreply.github.com>
2024-08-27 19:24:58 +08:00
Zhang Zhuo
7f86aecefd prover: fix loading of degrees (#1503)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-08-27 10:52:18 +08:00
Zhang Zhuo
4470c814c3 opt(libzkp): allow params be shared between multi versions (#1499)
Co-authored-by: georgehao <haohongfan@gmail.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Rohit Narurkar <rohit.narurkar@proton.me>
2024-08-27 09:49:16 +08:00
Morty
31a6141fc6 fix(gas-oracle): excluded genesis batch when check commit batch timeout (#1498)
Co-authored-by: Nazarii Denha <dengaaa2002@gmail.com>
2024-08-26 22:10:05 +08:00
georgehao
ed4bda601f add log to bundle vk (#1500) 2024-08-26 16:43:59 +08:00
georgehao
cd99f380ce bump version v4.4.48 (#1497) 2024-08-26 14:58:50 +08:00
Mengran Lan
a8663eb447 feat(coordinator): hide low version circuit (#1496) 2024-08-26 14:56:55 +08:00
colin
fa80b3e4a8 feat: darwinv2 (#1484)
Co-authored-by: Xi Lin <zimpha@gmail.com>
Co-authored-by: Zhang Zhuo <mycinbrin@gmail.com>
Co-authored-by: Mengran Lan <mengran@scroll.io>
Co-authored-by: georgehao <haohongfan@gmail.com>
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-08-23 17:56:08 +08:00
Daniel Helm
e09b98f0ed feat(rollup, coordinator, bridge-history-api): Allow Environment Variables to override config.json (#1489)
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-08-23 17:36:52 +08:00
colin
a6665264db refactor(coordinator): remove unused metrics (#1476) 2024-08-23 17:35:45 +08:00
Mengran Lan
517469a55d feat(prover): log utilization (#1491) 2024-08-22 16:03:20 +08:00
Morty
7c95208178 feat(bridge-history): support alternative gas token (#1483)
Co-authored-by: georgehao <haohongfan@gmail.com>
2024-08-19 16:43:31 +08:00
Nazarii Denha
27affe4e6b feat: do not update fee if not committing batches (#1441)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: NazariiDenha <NazariiDenha@users.noreply.github.com>
2024-08-18 20:02:01 +02:00
colin
0313f1651c fix(rollup-relayer): move rollup logs into effective branches (#1481)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-08-14 23:49:38 +08:00
georgehao
160f4c447a Feat/fix coordiantor curie recover sig (#1475)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-08-06 21:43:25 +08:00
georgehao
72f88bae5e fix coordinator curie public key recover bug (#1474)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-08-06 18:38:13 +08:00
georgehao
daca3ae6eb add log to coordiantor log check (#1473)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
2024-08-06 14:42:18 +08:00
Mengran Lan
073e9e883c fix(coordinator): add compatible logic to jwt when curie prover not adding public_key in login request (#1472)
Co-authored-by: amoylan2 <amoylan2@users.noreply.github.com>
2024-08-06 11:07:25 +08:00
Nazarii Denha
cce5c6c62e feat: limit number of pending blob-carrying transactions (#1442)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: NazariiDenha <NazariiDenha@users.noreply.github.com>
2024-08-02 17:31:06 +08:00
CodeDragonVN
1ab9cf2de6 docs: restore broken link to scroll-contracts in README (#1462)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2024-08-02 13:59:27 +08:00
colin
85e2e7ae94 feat(sender): do not bump gas price for nonce-gapped transactions (#1458)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-08-02 13:57:59 +08:00
georgehao
04215f3e7b Upgrade#4 (#1394)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Rohit Narurkar <rohit.narurkar@proton.me>
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
Co-authored-by: Mengran Lan <mengran@scroll.io>
Co-authored-by: amoylan2 <amoylan2@users.noreply.github.com>
Co-authored-by: Mengran Lan <lanmengran@qq.com>
Co-authored-by: Suuuuuuperrrrr fred <FredrikaPhililip@proton.me>
Co-authored-by: sbaizet <74511063+sbaizet-ledger@users.noreply.github.com>
Co-authored-by: caseylove <casey4love@foxmail.com>
Co-authored-by: BoxChen <13927203+nishuzumi@users.noreply.github.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-07-31 17:30:09 +08:00
colin
dd6206fd59 feat(bridge-history-backend): track reset skipped message events (#1467) 2024-07-30 21:06:38 +08:00
yukionfire
d163abeffc refactor(all): use errors.New to replace fmt.Errorf with no parameters (#1445)
Co-authored-by: georgehao <haohongfan@gmail.com>
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2024-07-30 11:06:02 +08:00
Mengran Lan
e22af03774 fix(coordinator): recover prover_task when the related object is verified. (#1466)
Co-authored-by: amoylan2 <amoylan2@users.noreply.github.com>
2024-07-29 17:23:29 +08:00
colin
0fd7a877ce feat(rollup-relayer): make chunk & batch proposal intervals configurable (#1459) 2024-07-19 21:07:30 +08:00
137 changed files with 4136 additions and 1887 deletions

15
.coderabbit.yaml Normal file
View File

@@ -0,0 +1,15 @@
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
language: "en-US"
early_access: false
reviews:
profile: "chill"
request_changes_workflow: false
high_level_summary: true
poem: true
review_status: true
collapse_walkthrough: false
auto_review:
enabled: true
drafts: false
chat:
auto_reply: true

View File

@@ -49,8 +49,8 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest scrolltech/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -94,8 +94,8 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest scrolltech/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -139,8 +139,8 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest scrolltech/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -184,8 +184,8 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest scrolltech/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -229,8 +229,8 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest scrolltech/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -274,8 +274,8 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest scrolltech/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -318,8 +318,8 @@ jobs:
file: ./build/dockerfiles/coordinator-api.Dockerfile file: ./build/dockerfiles/coordinator-api.Dockerfile
push: true push: true
tags: | tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest scrolltech/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -363,7 +363,7 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest scrolltech/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest ${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest

View File

@@ -1,6 +1,6 @@
.PHONY: fmt dev_docker build_test_docker run_test_docker clean update .PHONY: fmt dev_docker build_test_docker run_test_docker clean update
L2GETH_TAG=scroll-v5.5.1 L2GETH_TAG=scroll-v5.6.3
help: ## Display this help message help: ## Display this help message
@grep -h \ @grep -h \
@@ -40,8 +40,8 @@ fmt: ## Format the code
dev_docker: ## Build docker images for development/testing usages dev_docker: ## Build docker images for development/testing usages
docker pull postgres docker pull postgres
docker build -t scroll_l1geth ./common/testcontainers/docker/l1geth/ docker build -t scroll_l1geth --platform linux/amd64 ./common/testcontainers/docker/l1geth/
docker build -t scroll_l2geth ./common/testcontainers/docker/l2geth/ docker build -t scroll_l2geth --platform linux/amd64 ./common/testcontainers/docker/l2geth/
clean: ## Empty out the bin folder clean: ## Empty out the bin folder
@rm -rf build/bin @rm -rf build/bin

View File

@@ -18,7 +18,7 @@
├── <a href="./database">database</a>: Database client and schema definition ├── <a href="./database">database</a>: Database client and schema definition
├── <a href="./prover">prover</a>: Prover client that runs proof generation for zkEVM circuit and aggregation circuit ├── <a href="./prover">prover</a>: Prover client that runs proof generation for zkEVM circuit and aggregation circuit
├── <a href="./rollup">rollup</a>: Rollup-related services ├── <a href="./rollup">rollup</a>: Rollup-related services
├── <a href="./scroll-contracts">scroll-contracts</a>: solidity code for Scroll L1 bridge and rollup contracts and L2 bridge and pre-deployed contracts. ├── <a href="https://github.com/scroll-tech/scroll-contracts.git">scroll-contracts</a>: solidity code for Scroll L1 bridge and rollup contracts and L2 bridge and pre-deployed contracts.
└── <a href="./tests">tests</a>: Integration tests └── <a href="./tests">tests</a>: Integration tests
</pre> </pre>

File diff suppressed because one or more lines are too long

View File

@@ -19,7 +19,9 @@
"ScrollChainAddr": "0xa13BAF47339d63B743e7Da8741db5456DAc1E556", "ScrollChainAddr": "0xa13BAF47339d63B743e7Da8741db5456DAc1E556",
"GatewayRouterAddr": "0xF8B1378579659D8F7EE5f3C929c2f3E332E41Fd6", "GatewayRouterAddr": "0xF8B1378579659D8F7EE5f3C929c2f3E332E41Fd6",
"MessageQueueAddr": "0x0d7E906BD9cAFa154b048cFa766Cc1E54E39AF9B", "MessageQueueAddr": "0x0d7E906BD9cAFa154b048cFa766Cc1E54E39AF9B",
"BatchBridgeGatewayAddr": "0x5Bcfd99c34cf7E06fc756f6f5aE7400504852bc4" "BatchBridgeGatewayAddr": "0x5Bcfd99c34cf7E06fc756f6f5aE7400504852bc4",
"GasTokenGatewayAddr": "0x0000000000000000000000000000000000000000",
"WrappedTokenGatewayAddr": "0x0000000000000000000000000000000000000000"
}, },
"L2": { "L2": {
"confirmation": 0, "confirmation": 0,

View File

@@ -89,7 +89,7 @@ require (
github.com/rjeczalik/notify v0.9.1 // indirect github.com/rjeczalik/notify v0.9.1 // indirect
github.com/rs/cors v1.7.0 // indirect github.com/rs/cors v1.7.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 // indirect github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb // indirect
github.com/scroll-tech/zktrie v0.8.4 // indirect github.com/scroll-tech/zktrie v0.8.4 // indirect
github.com/sethvargo/go-retry v0.2.4 // indirect github.com/sethvargo/go-retry v0.2.4 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect github.com/shirou/gopsutil v3.21.11+incompatible // indirect

View File

@@ -308,8 +308,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 h1:QKgfS8G0btzg7nmFjSjllaxGkns3yg7g2/tG1nWExEI= github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs= github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ= github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ= github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,6 +6,7 @@ import (
"path/filepath" "path/filepath"
"scroll-tech/common/database" "scroll-tech/common/database"
"scroll-tech/common/utils"
) )
// FetcherConfig is the configuration of Layer1 or Layer2 fetcher. // FetcherConfig is the configuration of Layer1 or Layer2 fetcher.
@@ -30,6 +31,8 @@ type FetcherConfig struct {
GatewayRouterAddr string `json:"GatewayRouterAddr"` GatewayRouterAddr string `json:"GatewayRouterAddr"`
MessageQueueAddr string `json:"MessageQueueAddr"` MessageQueueAddr string `json:"MessageQueueAddr"`
BatchBridgeGatewayAddr string `json:"BatchBridgeGatewayAddr"` BatchBridgeGatewayAddr string `json:"BatchBridgeGatewayAddr"`
GasTokenGatewayAddr string `json:"GasTokenGatewayAddr"`
WrappedTokenGatewayAddr string `json:"WrappedTokenGatewayAddr"`
} }
// RedisConfig redis config // RedisConfig redis config
@@ -64,5 +67,11 @@ func NewConfig(file string) (*Config, error) {
return nil, err return nil, err
} }
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_BRIDGE_HISTORY")
if err != nil {
return nil, err
}
return cfg, nil return cfg, nil
} }

View File

@@ -2,7 +2,7 @@ package logic
import ( import (
"context" "context"
"fmt" "errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/client_golang/prometheus/promauto"
@@ -153,7 +153,7 @@ func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, bat
if withdrawTrie.NextMessageNonce != l2WithdrawMessages[0].MessageNonce { if withdrawTrie.NextMessageNonce != l2WithdrawMessages[0].MessageNonce {
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actual next message nonce", l2WithdrawMessages[0].MessageNonce) log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actual next message nonce", l2WithdrawMessages[0].MessageNonce)
return fmt.Errorf("nonce mismatch") return errors.New("nonce mismatch")
} }
messageHashes := make([]common.Hash, len(l2WithdrawMessages)) messageHashes := make([]common.Hash, len(l2WithdrawMessages))

View File

@@ -168,6 +168,14 @@ func (e *L1EventParser) ParseL1SingleCrossChainEventLogs(ctx context.Context, lo
lastMessage.L2TokenAddress = event.L2Token.String() lastMessage.L2TokenAddress = event.L2Token.String()
lastMessage.TokenIDs = utils.ConvertBigIntArrayToString(event.TokenIDs) lastMessage.TokenIDs = utils.ConvertBigIntArrayToString(event.TokenIDs)
lastMessage.TokenAmounts = utils.ConvertBigIntArrayToString(event.TokenAmounts) lastMessage.TokenAmounts = utils.ConvertBigIntArrayToString(event.TokenAmounts)
case backendabi.L1DepositWrappedTokenSig:
event := backendabi.WrappedTokenMessageEvent{}
if err := utils.UnpackLog(backendabi.L1WrappedTokenGatewayABI, &event, "DepositWrappedToken", vlog); err != nil {
log.Error("Failed to unpack DepositWrappedToken event", "err", err)
return nil, nil, err
}
lastMessage := l1DepositMessages[len(l1DepositMessages)-1]
lastMessage.Sender = event.From.String()
case backendabi.L1SentMessageEventSig: case backendabi.L1SentMessageEventSig:
event := backendabi.L1SentMessageEvent{} event := backendabi.L1SentMessageEvent{}
if err := utils.UnpackLog(backendabi.IL1ScrollMessengerABI, &event, "SentMessage", vlog); err != nil { if err := utils.UnpackLog(backendabi.IL1ScrollMessengerABI, &event, "SentMessage", vlog); err != nil {
@@ -320,6 +328,16 @@ func (e *L1EventParser) ParseL1MessageQueueEventLogs(logs []types.Log, l1Deposit
QueueIndex: index, QueueIndex: index,
}) })
} }
case backendabi.L1ResetDequeuedTransactionEventSig:
event := backendabi.L1ResetDequeuedTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "ResetDequeuedTransaction", vlog); err != nil {
log.Error("Failed to unpack ResetDequeuedTransaction event", "err", err)
return nil, err
}
l1MessageQueueEvents = append(l1MessageQueueEvents, &orm.MessageQueueEvent{
EventType: btypes.MessageQueueEventTypeResetDequeuedTransaction,
QueueIndex: event.StartIndex.Uint64(),
})
case backendabi.L1DropTransactionEventSig: case backendabi.L1DropTransactionEventSig:
event := backendabi.L1DropTransactionEvent{} event := backendabi.L1DropTransactionEvent{}
if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "DropTransaction", vlog); err != nil { if err := utils.UnpackLog(backendabi.IL1MessageQueueABI, &event, "DropTransaction", vlog); err != nil {

View File

@@ -51,11 +51,8 @@ type L1FetcherLogic struct {
// NewL1FetcherLogic creates L1 fetcher logic // NewL1FetcherLogic creates L1 fetcher logic
func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L1FetcherLogic { func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L1FetcherLogic {
addressList := []common.Address{ addressList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
common.HexToAddress(cfg.StandardERC20GatewayAddr), common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr), common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr), common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr), common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -69,11 +66,8 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
} }
gatewayList := []common.Address{ gatewayList := []common.Address{
common.HexToAddress(cfg.ETHGatewayAddr),
common.HexToAddress(cfg.StandardERC20GatewayAddr), common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr), common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr), common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr), common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -105,6 +99,26 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
gatewayList = append(gatewayList, common.HexToAddress(cfg.BatchBridgeGatewayAddr)) gatewayList = append(gatewayList, common.HexToAddress(cfg.BatchBridgeGatewayAddr))
} }
if common.HexToAddress(cfg.ETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.ETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.ETHGatewayAddr))
}
if common.HexToAddress(cfg.WETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WETHGatewayAddr))
}
if common.HexToAddress(cfg.GasTokenGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.GasTokenGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.GasTokenGatewayAddr))
}
if common.HexToAddress(cfg.WrappedTokenGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WrappedTokenGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WrappedTokenGatewayAddr))
}
log.Info("L1 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList) log.Info("L1 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L1FetcherLogic{ f := &L1FetcherLogic{
@@ -210,7 +224,7 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
Topics: make([][]common.Hash, 1), Topics: make([][]common.Hash, 1),
} }
query.Topics[0] = make([]common.Hash, 14) query.Topics[0] = make([]common.Hash, 16)
query.Topics[0][0] = backendabi.L1DepositETHSig query.Topics[0][0] = backendabi.L1DepositETHSig
query.Topics[0][1] = backendabi.L1DepositERC20Sig query.Topics[0][1] = backendabi.L1DepositERC20Sig
query.Topics[0][2] = backendabi.L1DepositERC721Sig query.Topics[0][2] = backendabi.L1DepositERC721Sig
@@ -224,7 +238,9 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
query.Topics[0][10] = backendabi.L1QueueTransactionEventSig query.Topics[0][10] = backendabi.L1QueueTransactionEventSig
query.Topics[0][11] = backendabi.L1DequeueTransactionEventSig query.Topics[0][11] = backendabi.L1DequeueTransactionEventSig
query.Topics[0][12] = backendabi.L1DropTransactionEventSig query.Topics[0][12] = backendabi.L1DropTransactionEventSig
query.Topics[0][13] = backendabi.L1BridgeBatchDepositSig query.Topics[0][13] = backendabi.L1ResetDequeuedTransactionEventSig
query.Topics[0][14] = backendabi.L1BridgeBatchDepositSig
query.Topics[0][15] = backendabi.L1DepositWrappedTokenSig
eventLogs, err := f.client.FilterLogs(ctx, query) eventLogs, err := f.client.FilterLogs(ctx, query)
if err != nil { if err != nil {
@@ -339,6 +355,10 @@ func (f *L1FetcherLogic) updateMetrics(res L1FilterResult) {
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_skip_message").Add(1) f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_skip_message").Add(1)
case btypes.MessageQueueEventTypeDropTransaction: case btypes.MessageQueueEventTypeDropTransaction:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_drop_message").Add(1) f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_drop_message").Add(1)
// one ResetDequeuedTransaction event could indicate reset multiple skipped messages,
// this metric only counts the number of events, not the number of skipped messages.
case btypes.MessageQueueEventTypeResetDequeuedTransaction:
f.l1FetcherLogicFetchedTotal.WithLabelValues("L1_reset_skipped_messages").Add(1)
} }
} }

View File

@@ -54,7 +54,6 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
common.HexToAddress(cfg.StandardERC20GatewayAddr), common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr), common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr), common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr), common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -68,7 +67,6 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
common.HexToAddress(cfg.StandardERC20GatewayAddr), common.HexToAddress(cfg.StandardERC20GatewayAddr),
common.HexToAddress(cfg.CustomERC20GatewayAddr), common.HexToAddress(cfg.CustomERC20GatewayAddr),
common.HexToAddress(cfg.WETHGatewayAddr),
common.HexToAddress(cfg.DAIGatewayAddr), common.HexToAddress(cfg.DAIGatewayAddr),
common.HexToAddress(cfg.ERC721GatewayAddr), common.HexToAddress(cfg.ERC721GatewayAddr),
@@ -100,6 +98,11 @@ func NewL2FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
gatewayList = append(gatewayList, common.HexToAddress(cfg.BatchBridgeGatewayAddr)) gatewayList = append(gatewayList, common.HexToAddress(cfg.BatchBridgeGatewayAddr))
} }
if common.HexToAddress(cfg.WETHGatewayAddr) != (common.Address{}) {
addressList = append(addressList, common.HexToAddress(cfg.WETHGatewayAddr))
gatewayList = append(gatewayList, common.HexToAddress(cfg.WETHGatewayAddr))
}
log.Info("L2 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList) log.Info("L2 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
f := &L2FetcherLogic{ f := &L2FetcherLogic{

View File

@@ -217,6 +217,12 @@ func (c *CrossMessage) UpdateL1MessageQueueEventsInfo(ctx context.Context, l1Mes
db = db.Where("message_nonce = ?", l1MessageQueueEvent.QueueIndex) db = db.Where("message_nonce = ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", btypes.MessageTypeL1SentMessage) db = db.Where("message_type = ?", btypes.MessageTypeL1SentMessage)
txStatusUpdateFields["tx_status"] = types.TxStatusTypeDropped txStatusUpdateFields["tx_status"] = types.TxStatusTypeDropped
case btypes.MessageQueueEventTypeResetDequeuedTransaction:
db = db.Where("tx_status = ?", types.TxStatusTypeSkipped)
// reset skipped messages that the nonce is greater than or equal to the queue index.
db = db.Where("message_nonce >= ?", l1MessageQueueEvent.QueueIndex)
db = db.Where("message_type = ?", btypes.MessageTypeL1SentMessage)
txStatusUpdateFields["tx_status"] = types.TxStatusTypeSent
} }
if err := db.Updates(txStatusUpdateFields).Error; err != nil { if err := db.Updates(txStatusUpdateFields).Error; err != nil {
return fmt.Errorf("failed to update tx statuses of L1 message queue events, update fields: %v, error: %w", txStatusUpdateFields, err) return fmt.Errorf("failed to update tx statuses of L1 message queue events, update fields: %v, error: %w", txStatusUpdateFields, err)
@@ -230,7 +236,7 @@ func (c *CrossMessage) UpdateL1MessageQueueEventsInfo(ctx context.Context, l1Mes
db = db.Model(&CrossMessage{}) db = db.Model(&CrossMessage{})
txHashUpdateFields := make(map[string]interface{}) txHashUpdateFields := make(map[string]interface{})
switch l1MessageQueueEvent.EventType { switch l1MessageQueueEvent.EventType {
case btypes.MessageQueueEventTypeDequeueTransaction: case btypes.MessageQueueEventTypeDequeueTransaction, btypes.MessageQueueEventTypeResetDequeuedTransaction:
continue continue
case btypes.MessageQueueEventTypeQueueTransaction: case btypes.MessageQueueEventTypeQueueTransaction:
// only replayMessages or enforced txs (whose message hashes would not be found), sendMessages have been filtered out. // only replayMessages or enforced txs (whose message hashes would not be found), sendMessages have been filtered out.

View File

@@ -70,6 +70,7 @@ const (
MessageQueueEventTypeQueueTransaction MessageQueueEventTypeQueueTransaction
MessageQueueEventTypeDequeueTransaction MessageQueueEventTypeDequeueTransaction
MessageQueueEventTypeDropTransaction MessageQueueEventTypeDropTransaction
MessageQueueEventTypeResetDequeuedTransaction
) )
// BatchStatusType represents the type of batch status. // BatchStatusType represents the type of batch status.

View File

@@ -38,7 +38,7 @@ func GetBlockNumber(ctx context.Context, client *ethclient.Client, confirmations
// @todo: add unit test. // @todo: add unit test.
func UnpackLog(c *abi.ABI, out interface{}, event string, log types.Log) error { func UnpackLog(c *abi.ABI, out interface{}, event string, log types.Log) error {
if log.Topics[0] != c.Events[event].ID { if log.Topics[0] != c.Events[event].ID {
return fmt.Errorf("event signature mismatch") return errors.New("event signature mismatch")
} }
if len(log.Data) > 0 { if len(log.Data) > 0 {
if err := c.UnpackIntoInterface(out, event, log.Data); err != nil { if err := c.UnpackIntoInterface(out, event, log.Data); err != nil {

View File

@@ -17,7 +17,7 @@ RUN --mount=target=. \
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl" ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install vim netcat-openbsd net-tools curl -y
COPY --from=builder /bin/bridgehistoryapi-api /bin/ COPY --from=builder /bin/bridgehistoryapi-api /bin/
WORKDIR /app WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-api"] ENTRYPOINT ["bridgehistoryapi-api"]

View File

@@ -17,7 +17,8 @@ RUN --mount=target=. \
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl" ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install ca-certificates vim netcat-openbsd net-tools curl -y
RUN update-ca-certificates
COPY --from=builder /bin/bridgehistoryapi-fetcher /bin/ COPY --from=builder /bin/bridgehistoryapi-fetcher /bin/
WORKDIR /app WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-fetcher"] ENTRYPOINT ["bridgehistoryapi-fetcher"]

View File

@@ -40,6 +40,7 @@ FROM ubuntu:20.04
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl" ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
# ENV CHAIN_ID=534353 # ENV CHAIN_ID=534353
RUN apt update && apt install vim netcat-openbsd net-tools curl jq -y
RUN mkdir -p /src/coordinator/internal/logic/verifier/lib RUN mkdir -p /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/coordinator_api /bin/ COPY --from=builder /bin/coordinator_api /bin/

View File

@@ -19,9 +19,8 @@ RUN --mount=target=. \
# Pull coordinator into a second stage deploy ubuntu container # Pull coordinator into a second stage deploy ubuntu container
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl" ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install vim netcat-openbsd net-tools curl -y
COPY --from=builder /bin/coordinator_cron /bin/ COPY --from=builder /bin/coordinator_cron /bin/
WORKDIR /app WORKDIR /app
ENTRYPOINT ["coordinator_cron"] ENTRYPOINT ["coordinator_cron"]

View File

@@ -21,7 +21,7 @@ RUN --mount=target=. \
# Pull gas_oracle into a second stage deploy ubuntu container # Pull gas_oracle into a second stage deploy ubuntu container
FROM ubuntu:20.04 FROM ubuntu:20.04
RUN apt update && apt install ca-certificates -y RUN apt update && apt install vim netcat-openbsd net-tools curl ca-certificates -y
ENV CGO_LDFLAGS="-ldl" ENV CGO_LDFLAGS="-ldl"

View File

@@ -21,7 +21,7 @@ RUN --mount=target=. \
# Pull rollup_relayer into a second stage deploy ubuntu container # Pull rollup_relayer into a second stage deploy ubuntu container
FROM ubuntu:20.04 FROM ubuntu:20.04
RUN apt update && apt install ca-certificates -y RUN apt update && apt install vim netcat-openbsd net-tools curl ca-certificates -y
ENV CGO_LDFLAGS="-ldl" ENV CGO_LDFLAGS="-ldl"

View File

@@ -1,83 +1,12 @@
package forks package forks
import ( import (
"math"
"math/big" "math/big"
"sort"
"github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/go-ethereum/params" "github.com/scroll-tech/go-ethereum/params"
) )
// CollectSortedForkHeights returns a sorted set of block numbers that one or more forks are activated on
func CollectSortedForkHeights(config *params.ChainConfig) ([]uint64, map[uint64]bool, map[string]uint64) {
type nameFork struct {
name string
block *big.Int
}
forkHeightNameMap := make(map[uint64]string)
for _, fork := range []nameFork{
{name: "homestead", block: config.HomesteadBlock},
{name: "daoFork", block: config.DAOForkBlock},
{name: "eip150", block: config.EIP150Block},
{name: "eip155", block: config.EIP155Block},
{name: "eip158", block: config.EIP158Block},
{name: "byzantium", block: config.ByzantiumBlock},
{name: "constantinople", block: config.ConstantinopleBlock},
{name: "petersburg", block: config.PetersburgBlock},
{name: "istanbul", block: config.IstanbulBlock},
{name: "muirGlacier", block: config.MuirGlacierBlock},
{name: "berlin", block: config.BerlinBlock},
{name: "london", block: config.LondonBlock},
{name: "arrowGlacier", block: config.ArrowGlacierBlock},
{name: "archimedes", block: config.ArchimedesBlock},
{name: "shanghai", block: config.ShanghaiBlock},
{name: "bernoulli", block: config.BernoulliBlock},
{name: "curie", block: config.CurieBlock},
} {
if fork.block == nil {
continue
}
height := fork.block.Uint64()
// only keep latest fork for at each height, discard the rest
forkHeightNameMap[height] = fork.name
}
forkHeightsMap := make(map[uint64]bool)
forkNameHeightMap := make(map[string]uint64)
for height, name := range forkHeightNameMap {
forkHeightsMap[height] = true
forkNameHeightMap[name] = height
}
var forkHeights []uint64
for height := range forkHeightsMap {
forkHeights = append(forkHeights, height)
}
sort.Slice(forkHeights, func(i, j int) bool {
return forkHeights[i] < forkHeights[j]
})
return forkHeights, forkHeightsMap, forkNameHeightMap
}
// BlockRange returns the block range of the hard fork
// Need ensure the forkHeights is incremental
func BlockRange(currentForkHeight uint64, forkHeights []uint64) (from, to uint64) {
to = math.MaxInt64
for _, height := range forkHeights {
if currentForkHeight < height {
to = height
return
}
from = height
}
return
}
// GetHardforkName returns the name of the hardfork active at the given block height and timestamp. // GetHardforkName returns the name of the hardfork active at the given block height and timestamp.
// It checks the chain configuration to determine which hardfork is active. // It checks the chain configuration to determine which hardfork is active.
func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uint64) string { func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uint64) string {
@@ -87,8 +16,10 @@ func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uin
return "bernoulli" return "bernoulli"
} else if !config.IsDarwin(blockTimestamp) { } else if !config.IsDarwin(blockTimestamp) {
return "curie" return "curie"
} else { } else if !config.IsDarwinV2(blockTimestamp) {
return "darwin" return "darwin"
} else {
return "darwinV2"
} }
} }
@@ -101,8 +32,10 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin
return encoding.CodecV1 return encoding.CodecV1
} else if !config.IsDarwin(blockTimestamp) { } else if !config.IsDarwin(blockTimestamp) {
return encoding.CodecV2 return encoding.CodecV2
} else { } else if !config.IsDarwinV2(blockTimestamp) {
return encoding.CodecV3 return encoding.CodecV3
} else {
return encoding.CodecV4
} }
} }
@@ -115,6 +48,8 @@ func GetMaxChunksPerBatch(config *params.ChainConfig, blockHeight, blockTimestam
return 15 return 15
} else if !config.IsDarwin(blockTimestamp) { } else if !config.IsDarwin(blockTimestamp) {
return 45 return 45
} else if !config.IsDarwinV2(blockTimestamp) {
return 45
} else { } else {
return 45 return 45
} }

View File

@@ -1,102 +0,0 @@
package forks
import (
"math"
"math/big"
"testing"
"github.com/scroll-tech/go-ethereum/params"
"github.com/stretchr/testify/require"
)
func TestCollectSortedForkBlocks(t *testing.T) {
l, m, n := CollectSortedForkHeights(&params.ChainConfig{
ArchimedesBlock: big.NewInt(0),
ShanghaiBlock: big.NewInt(3),
BernoulliBlock: big.NewInt(3),
CurieBlock: big.NewInt(4),
})
require.Equal(t, l, []uint64{
0,
3,
4,
})
require.Equal(t, map[uint64]bool{
3: true,
4: true,
0: true,
}, m)
require.Equal(t, map[string]uint64{
"archimedes": 0,
"bernoulli": 3,
"curie": 4,
}, n)
}
func TestBlockRange(t *testing.T) {
tests := []struct {
name string
forkHeight uint64
forkHeights []uint64
expectedFrom uint64
expectedTo uint64
}{
{
name: "ToInfinite",
forkHeight: 300,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 300,
expectedTo: math.MaxInt64,
},
{
name: "To300",
forkHeight: 200,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 200,
expectedTo: 300,
},
{
name: "To200",
forkHeight: 100,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 100,
expectedTo: 200,
},
{
name: "To100",
forkHeight: 0,
forkHeights: []uint64{100, 200, 300},
expectedFrom: 0,
expectedTo: 100,
},
{
name: "To200-1",
forkHeight: 100,
forkHeights: []uint64{100, 200},
expectedFrom: 100,
expectedTo: 200,
},
{
name: "To2",
forkHeight: 1,
forkHeights: []uint64{1, 2},
expectedFrom: 1,
expectedTo: 2,
},
{
name: "ToInfinite-1",
forkHeight: 0,
forkHeights: []uint64{0},
expectedFrom: 0,
expectedTo: math.MaxInt64,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
from, to := BlockRange(test.forkHeight, test.forkHeights)
require.Equal(t, test.expectedFrom, from)
require.Equal(t, test.expectedTo, to)
})
}
}

View File

@@ -13,7 +13,7 @@ require (
github.com/modern-go/reflect2 v1.0.2 github.com/modern-go/reflect2 v1.0.2
github.com/orcaman/concurrent-map v1.0.0 github.com/orcaman/concurrent-map v1.0.0
github.com/prometheus/client_golang v1.19.0 github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/stretchr/testify v1.9.0 github.com/stretchr/testify v1.9.0
github.com/testcontainers/testcontainers-go v0.30.0 github.com/testcontainers/testcontainers-go v0.30.0

View File

@@ -633,8 +633,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 h1:QKgfS8G0btzg7nmFjSjllaxGkns3yg7g2/tG1nWExEI= github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs= github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ= github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ= github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,6 +6,8 @@ export RUST_BACKTRACE=full
export RUST_LOG=debug export RUST_LOG=debug
export RUST_MIN_STACK=100000000 export RUST_MIN_STACK=100000000
export PROVER_OUTPUT_DIR=test_zkp_test export PROVER_OUTPUT_DIR=test_zkp_test
export SCROLL_PROVER_ASSETS_DIR=/assets/test_assets
export DARWIN_V2_TEST_DIR=/assets
#export LD_LIBRARY_PATH=/:/usr/local/cuda/lib64 #export LD_LIBRARY_PATH=/:/usr/local/cuda/lib64
mkdir -p $PROVER_OUTPUT_DIR mkdir -p $PROVER_OUTPUT_DIR
@@ -13,32 +15,16 @@ mkdir -p $PROVER_OUTPUT_DIR
REPO=$(realpath ../..) REPO=$(realpath ../..)
function build_test_bins() { function build_test_bins() {
cd impl
cargo build --release
ln -f -s $(realpath target/release/libzkp.so) $REPO/prover/core/lib
ln -f -s $(realpath target/release/libzkp.so) $REPO/coordinator/internal/logic/verifier/lib
cd $REPO/prover cd $REPO/prover
go test -tags="gpu ffi" -timeout 0 -c core/prover_test.go make tests_binary
cd $REPO/coordinator cd $REPO/coordinator
make libzkp
go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier
cd $REPO/common/libzkp cd $REPO/common/libzkp
} }
function build_test_bins_old() {
cd $REPO
cd prover
make libzkp
go test -tags="gpu ffi" -timeout 0 -c core/prover_test.go
cd ..
cd coordinator
make libzkp
go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier
cd ..
cd common/libzkp
}
build_test_bins build_test_bins
#rm -rf test_zkp_test/* rm -rf $PROVER_OUTPUT_DIR/*
#rm -rf prover.log verifier.log #rm -rf prover.log verifier.log
#$REPO/prover/core.test -test.v 2>&1 | tee prover.log $REPO/prover/prover.test --exact zk_circuits_handler::darwin_v2::tests::test_circuits 2>&1 | tee prover.log
$REPO/coordinator/verifier.test -test.v 2>&1 | tee verifier.log $REPO/coordinator/verifier.test -test.v 2>&1 | tee verifier.log

View File

@@ -28,44 +28,10 @@ dependencies = [
"cpufeatures", "cpufeatures",
] ]
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor",
"encoder",
"env_logger 0.10.0",
"eth-types 0.11.0",
"ethers-core",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.11.0",
]
[[package]] [[package]]
name = "aggregator" name = "aggregator"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"ark-std 0.3.0", "ark-std 0.3.0",
"bitstream-io", "bitstream-io",
@@ -96,6 +62,40 @@ dependencies = [
"zkevm-circuits 0.12.0", "zkevm-circuits 0.12.0",
] ]
[[package]]
name = "aggregator"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor",
"encoder",
"env_logger 0.10.0",
"eth-types 0.13.0",
"ethers-core",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.13.0",
]
[[package]] [[package]]
name = "ahash" name = "ahash"
version = "0.8.3" version = "0.8.3"
@@ -171,9 +171,9 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.72" version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
@@ -568,37 +568,10 @@ version = "3.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"ethers-core",
"ethers-providers",
"ethers-signers",
"external-tracer 0.11.0",
"gadgets 0.11.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"poseidon-circuit",
"rand",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]] [[package]]
name = "bus-mapping" name = "bus-mapping"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.12.0",
"ethers-core", "ethers-core",
@@ -620,6 +593,31 @@ dependencies = [
"strum_macros 0.25.3", "strum_macros 0.25.3",
] ]
[[package]]
name = "bus-mapping"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"ethers-core",
"ethers-providers",
"ethers-signers",
"gadgets 0.13.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"poseidon-circuit",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]] [[package]]
name = "byte-slice-cast" name = "byte-slice-cast"
version = "1.2.2" version = "1.2.2"
@@ -1184,8 +1182,8 @@ dependencies = [
[[package]] [[package]]
name = "eth-types" name = "eth-types"
version = "0.11.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"base64 0.13.1", "base64 0.13.1",
"ethers-core", "ethers-core",
@@ -1202,7 +1200,6 @@ dependencies = [
"revm-primitives", "revm-primitives",
"serde", "serde",
"serde_json", "serde_json",
"serde_stacker",
"serde_with", "serde_with",
"sha3 0.10.8", "sha3 0.10.8",
"strum 0.25.0", "strum 0.25.0",
@@ -1213,8 +1210,8 @@ dependencies = [
[[package]] [[package]]
name = "eth-types" name = "eth-types"
version = "0.12.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"base64 0.13.1", "base64 0.13.1",
"ethers-core", "ethers-core",
@@ -1369,11 +1366,11 @@ dependencies = [
[[package]] [[package]]
name = "external-tracer" name = "external-tracer"
version = "0.11.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.11.0", "eth-types 0.12.0",
"geth-utils 0.11.0", "geth-utils 0.12.0",
"log", "log",
"serde", "serde",
"serde_json", "serde_json",
@@ -1382,11 +1379,11 @@ dependencies = [
[[package]] [[package]]
name = "external-tracer" name = "external-tracer"
version = "0.12.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.13.0",
"geth-utils 0.12.0", "geth-utils 0.13.0",
"log", "log",
"serde", "serde",
"serde_json", "serde_json",
@@ -1564,10 +1561,10 @@ dependencies = [
[[package]] [[package]]
name = "gadgets" name = "gadgets"
version = "0.11.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.11.0", "eth-types 0.12.0",
"halo2_proofs", "halo2_proofs",
"poseidon-base", "poseidon-base",
"sha3 0.10.8", "sha3 0.10.8",
@@ -1576,10 +1573,10 @@ dependencies = [
[[package]] [[package]]
name = "gadgets" name = "gadgets"
version = "0.12.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.13.0",
"halo2_proofs", "halo2_proofs",
"poseidon-base", "poseidon-base",
"sha3 0.10.8", "sha3 0.10.8",
@@ -1599,8 +1596,8 @@ dependencies = [
[[package]] [[package]]
name = "geth-utils" name = "geth-utils"
version = "0.11.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"env_logger 0.10.0", "env_logger 0.10.0",
"gobuild", "gobuild",
@@ -1609,8 +1606,8 @@ dependencies = [
[[package]] [[package]]
name = "geth-utils" name = "geth-utils"
version = "0.12.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"env_logger 0.10.0", "env_logger 0.10.0",
"gobuild", "gobuild",
@@ -2356,25 +2353,10 @@ dependencies = [
"subtle", "subtle",
] ]
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"ethers-core",
"ethers-signers",
"external-tracer 0.11.0",
"itertools 0.11.0",
"log",
"rand",
"rand_chacha",
]
[[package]] [[package]]
name = "mock" name = "mock"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.12.0",
"ethers-core", "ethers-core",
@@ -2387,23 +2369,24 @@ dependencies = [
] ]
[[package]] [[package]]
name = "mpt-zktrie" name = "mock"
version = "0.11.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"eth-types 0.11.0", "eth-types 0.13.0",
"halo2curves", "ethers-core",
"hex", "ethers-signers",
"external-tracer 0.13.0",
"itertools 0.11.0",
"log", "log",
"num-bigint", "rand",
"poseidon-base", "rand_chacha",
"zktrie",
] ]
[[package]] [[package]]
name = "mpt-zktrie" name = "mpt-zktrie"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.12.0",
"halo2curves", "halo2curves",
@@ -2411,7 +2394,21 @@ dependencies = [
"log", "log",
"num-bigint", "num-bigint",
"poseidon-base", "poseidon-base",
"zktrie", "zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "mpt-zktrie"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
] ]
[[package]] [[package]]
@@ -2872,44 +2869,10 @@ dependencies = [
"unarray", "unarray",
] ]
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"aggregator 0.11.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.11.0",
"chrono",
"dotenvy",
"eth-types 0.11.0",
"ethers-core",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.11.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.11.0",
]
[[package]] [[package]]
name = "prover" name = "prover"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"aggregator 0.12.0", "aggregator 0.12.0",
"anyhow", "anyhow",
@@ -2940,6 +2903,40 @@ dependencies = [
"zkevm-circuits 0.12.0", "zkevm-circuits 0.12.0",
] ]
[[package]]
name = "prover"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"aggregator 0.13.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.13.0",
"chrono",
"dotenvy",
"eth-types 0.13.0",
"ethers-core",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.13.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.13.0",
]
[[package]] [[package]]
name = "psm" name = "psm"
version = "0.1.21" version = "0.1.21"
@@ -4543,52 +4540,10 @@ dependencies = [
"syn 2.0.27", "syn 2.0.27",
] ]
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"array-init",
"bus-mapping 0.11.0",
"either",
"env_logger 0.10.0",
"eth-types 0.11.0",
"ethers-core",
"ethers-signers",
"ff",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]] [[package]]
name = "zkevm-circuits" name = "zkevm-circuits"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"array-init", "array-init",
"bus-mapping 0.12.0", "bus-mapping 0.12.0",
@@ -4627,18 +4582,61 @@ dependencies = [
"subtle", "subtle",
] ]
[[package]]
name = "zkevm-circuits"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"array-init",
"bus-mapping 0.13.0",
"either",
"env_logger 0.10.0",
"eth-types 0.13.0",
"ethers-core",
"ethers-signers",
"ff",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]] [[package]]
name = "zkp" name = "zkp"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"base64 0.13.1", "base64 0.13.1",
"env_logger 0.9.3", "env_logger 0.9.3",
"halo2_proofs", "halo2_proofs",
"libc", "libc",
"log", "log",
"once_cell", "once_cell",
"prover 0.11.0",
"prover 0.12.0", "prover 0.12.0",
"prover 0.13.0",
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
@@ -4651,7 +4649,16 @@ version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8" source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8"
dependencies = [ dependencies = [
"gobuild", "gobuild",
"zktrie_rust", "zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
] ]
[[package]] [[package]]
@@ -4668,6 +4675,20 @@ dependencies = [
"strum_macros 0.24.3", "strum_macros 0.24.3",
] ]
[[package]]
name = "zktrie_rust"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"hex",
"lazy_static",
"num",
"num-derive",
"num-traits",
"strum 0.24.1",
"strum_macros 0.24.3",
]
[[package]] [[package]]
name = "zstd" name = "zstd"
version = "0.13.0" version = "0.13.0"

View File

@@ -13,8 +13,6 @@ halo2curves = { git = "https://github.com/scroll-tech/halo2curves", branch = "v0
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" } ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" } ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" } ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
#ethers-etherscan = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
#ethers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
[patch."https://github.com/privacy-scaling-explorations/halo2.git"] [patch."https://github.com/privacy-scaling-explorations/halo2.git"]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" } halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"] [patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
@@ -26,10 +24,10 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" } halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] } snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
# curie
prover_v3 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin # darwin
prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.0-rc.3", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] } prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin_v2
prover_v5 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.0" base64 = "0.13.0"
env_logger = "0.9.0" env_logger = "0.9.0"
@@ -39,6 +37,7 @@ once_cell = "1.19"
serde = "1.0" serde = "1.0"
serde_derive = "1.0" serde_derive = "1.0"
serde_json = "1.0.66" serde_json = "1.0.66"
anyhow = "1.0.86"
[profile.test] [profile.test]
opt-level = 3 opt-level = 3

View File

@@ -1,262 +0,0 @@
use crate::{
types::{CheckChunkProofsResponse, ProofResult},
utils::{
c_char_to_str, c_char_to_vec, file_exists, panic_catch, string_to_c_char, vec_to_c_char,
OUTPUT_DIR,
},
};
use libc::c_char;
use prover_v3::BatchProof as BatchProofV3;
use prover_v4::{
aggregator::{Prover, Verifier as VerifierV4},
check_chunk_hashes,
consts::BATCH_VK_FILENAME,
utils::{chunk_trace_to_witness_block, init_env_and_log},
BatchHeader, BatchProof as BatchProofV4, BatchProvingTask, BlockTrace, BundleProof,
BundleProvingTask, ChunkInfo, ChunkProof, MAX_AGG_SNARKS,
};
use snark_verifier_sdk::verify_evm_calldata;
use std::{cell::OnceCell, env, ptr::null};
static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER_V4: OnceCell<VerifierV4> = OnceCell::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_batch_prover(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_batch_prove");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &BATCH_VK_FILENAME) {
panic!("{} must exist in folder {}", *BATCH_VK_FILENAME, assets_dir);
}
let prover = Prover::from_dirs(params_dir, assets_dir);
PROVER.set(prover).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_batch_verifier(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_batch_verify");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier_v4 = VerifierV4::from_dirs(params_dir, assets_dir);
VERIFIER_V4.set(verifier_v4).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_batch_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_batch_vk());
vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_bundle_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_bundle_vk());
vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn check_chunk_proofs(chunk_proofs: *const c_char) -> *const c_char {
let check_result: Result<bool, String> = panic_catch(|| {
let chunk_proofs = c_char_to_vec(chunk_proofs);
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs)
.map_err(|e| format!("failed to deserialize chunk proofs: {e:?}"))?;
if chunk_proofs.is_empty() {
return Err("provided chunk proofs are empty.".to_string());
}
let prover_ref = PROVER.get().expect("failed to get reference to PROVER.");
let valid = prover_ref.check_protocol_of_chunks(&chunk_proofs);
Ok(valid)
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match check_result {
Ok(valid) => CheckChunkProofsResponse {
ok: valid,
error: None,
},
Err(err) => CheckChunkProofsResponse {
ok: false,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_batch_proof(
chunk_hashes: *const c_char,
chunk_proofs: *const c_char,
batch_header: *const c_char,
) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let chunk_hashes = c_char_to_vec(chunk_hashes);
let chunk_proofs = c_char_to_vec(chunk_proofs);
let batch_header = c_char_to_vec(batch_header);
let chunk_hashes = serde_json::from_slice::<Vec<ChunkInfo>>(&chunk_hashes)
.map_err(|e| format!("failed to deserialize chunk hashes: {e:?}"))?;
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs)
.map_err(|e| format!("failed to deserialize chunk proofs: {e:?}"))?;
let batch_header = serde_json::from_slice::<BatchHeader<MAX_AGG_SNARKS>>(&batch_header)
.map_err(|e| format!("failed to deserialize batch header: {e:?}"))?;
if chunk_hashes.len() != chunk_proofs.len() {
return Err(format!("chunk hashes and chunk proofs lengths mismatch: chunk_hashes.len() = {}, chunk_proofs.len() = {}",
chunk_hashes.len(), chunk_proofs.len()));
}
let chunk_hashes_proofs: Vec<(_,_)> = chunk_hashes
.into_iter()
.zip(chunk_proofs.clone())
.collect();
check_chunk_hashes("", &chunk_hashes_proofs).map_err(|e| format!("failed to check chunk info: {e:?}"))?;
let batch = BatchProvingTask {
chunk_proofs,
batch_header,
};
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_batch_proof(batch, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate proof: {e:?}"))?;
serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
// As of upgrade #3 (Curie), we verify batch proofs on-chain (EVM).
let proof = serde_json::from_slice::<BatchProofV3>(proof.as_slice()).unwrap();
verify_evm_calldata(
include_bytes!("plonk_verifier_0.11.4.bin").to_vec(),
proof.calldata(),
)
} else {
// Post upgrade #4 (Darwin), batch proofs are not EVM-verifiable. Instead they are
// halo2 proofs meant to be bundled recursively.
let proof = serde_json::from_slice::<BatchProofV4>(proof.as_slice()).unwrap();
VERIFIER_V4.get().unwrap().verify_batch_proof(&proof)
}
});
verified.unwrap_or(false) as c_char
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_bundle_proof(batch_proofs: *const c_char) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let batch_proofs = c_char_to_vec(batch_proofs);
let batch_proofs = serde_json::from_slice::<Vec<BatchProofV4>>(&batch_proofs)
.map_err(|e| format!("failed to deserialize batch proofs: {e:?}"))?;
let bundle = BundleProvingTask { batch_proofs };
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_bundle_proof(bundle, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate bundle proof: {e:?}"))?;
serde_json::to_vec(&proof)
.map_err(|e| format!("failed to serialize the bundle proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(proof: *const c_char) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
let verified = panic_catch(|| VERIFIER_V4.get().unwrap().verify_bundle_proof(proof));
verified.unwrap_or(false) as c_char
}
// This function is only used for debugging on Go side.
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn block_traces_to_chunk_info(block_traces: *const c_char) -> *const c_char {
let block_traces = c_char_to_vec(block_traces);
let block_traces = serde_json::from_slice::<Vec<BlockTrace>>(&block_traces).unwrap();
let witness_block = chunk_trace_to_witness_block(block_traces).unwrap();
let chunk_info = ChunkInfo::from_witness_block(&witness_block, false);
let chunk_info_bytes = serde_json::to_vec(&chunk_info).unwrap();
vec_to_c_char(chunk_info_bytes)
}

View File

@@ -1,131 +0,0 @@
use crate::{
types::ProofResult,
utils::{
c_char_to_str, c_char_to_vec, file_exists, panic_catch, string_to_c_char, vec_to_c_char,
OUTPUT_DIR,
},
};
use libc::c_char;
use prover_v3::{zkevm::Verifier as VerifierV3, ChunkProof as ChunkProofV3};
use prover_v4::{
consts::CHUNK_VK_FILENAME,
utils::init_env_and_log,
zkevm::{Prover, Verifier as VerifierV4},
BlockTrace, ChunkProof as ChunkProofV4, ChunkProvingTask,
};
use std::{cell::OnceCell, env, ptr::null};
static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER_V3: OnceCell<VerifierV3> = OnceCell::new();
static mut VERIFIER_V4: OnceCell<VerifierV4> = OnceCell::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_chunk_prover(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_chunk_prove");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &CHUNK_VK_FILENAME) {
panic!("{} must exist in folder {}", *CHUNK_VK_FILENAME, assets_dir);
}
let prover = Prover::from_dirs(params_dir, assets_dir);
PROVER.set(prover).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_chunk_verifier(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_chunk_verify");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier_v3 = VerifierV3::from_dirs(params_dir, assets_dir);
let verifier_v4 = VerifierV4::from_dirs(params_dir, assets_dir);
VERIFIER_V3.set(verifier_v3).unwrap();
VERIFIER_V4.set(verifier_v4).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_chunk_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_vk());
vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_chunk_proof(block_traces: *const c_char) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let block_traces = c_char_to_vec(block_traces);
let block_traces = serde_json::from_slice::<Vec<BlockTrace>>(&block_traces)
.map_err(|e| format!("failed to deserialize block traces: {e:?}"))?;
let chunk = ChunkProvingTask::from(block_traces);
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_chunk_proof(chunk, None, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate proof: {e:?}"))?;
serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));
let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};
serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
let proof = serde_json::from_slice::<ChunkProofV3>(proof.as_slice()).unwrap();
VERIFIER_V3.get().unwrap().verify_chunk_proof(proof)
} else {
let proof = serde_json::from_slice::<ChunkProofV4>(proof.as_slice()).unwrap();
VERIFIER_V4.get().unwrap().verify_chunk_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}

View File

@@ -1,4 +1,63 @@
mod batch;
mod chunk;
mod types;
mod utils; mod utils;
mod verifier;
use crate::utils::{c_char_to_str, c_char_to_vec};
use libc::c_char;
use prover_v5::utils::init_env_and_log;
use verifier::{TaskType, VerifierConfig};
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init(config: *const c_char) {
init_env_and_log("ffi_init");
let config_str = c_char_to_str(config);
let verifier_config = serde_json::from_str::<VerifierConfig>(config_str).unwrap();
verifier::init(verifier_config);
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Chunk)
}
fn verify_proof(proof: *const c_char, fork_name: *const c_char, task_type: TaskType) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let verifier = verifier::get_verifier(fork_name_str);
if let Err(e) = verifier {
log::warn!("failed to get verifier, error: {:#}", e);
return 0 as c_char;
}
match verifier.unwrap().verify(task_type, proof) {
Err(e) => {
log::error!("{:?} verify failed, error: {:#}", task_type, e);
false as c_char
}
Ok(result) => result as c_char,
}
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Batch)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Bundle)
}

View File

@@ -1,22 +0,0 @@
use serde::{Deserialize, Serialize};
// Represents the result of a chunk proof checking operation.
// `ok` indicates whether the proof checking was successful.
// `error` provides additional details in case the check failed.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CheckChunkProofsResponse {
pub ok: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}
// Encapsulates the result from generating a proof.
// `message` holds the generated proof in byte slice format.
// `error` provides additional details in case the proof generation failed.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ProofResult {
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<Vec<u8>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}

View File

@@ -1,29 +1,9 @@
use once_cell::sync::Lazy;
use std::{ use std::{
env, ffi::CStr,
ffi::{CStr, CString},
os::raw::c_char, os::raw::c_char,
panic::{catch_unwind, AssertUnwindSafe}, panic::{catch_unwind, AssertUnwindSafe},
path::PathBuf,
}; };
// Only used for debugging.
pub(crate) static OUTPUT_DIR: Lazy<Option<String>> =
Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
/// # Safety
#[no_mangle]
pub extern "C" fn free_c_chars(ptr: *mut c_char) {
if ptr.is_null() {
log::warn!("Try to free an empty pointer!");
return;
}
unsafe {
let _ = CString::from_raw(ptr);
}
}
pub(crate) fn c_char_to_str(c: *const c_char) -> &'static str { pub(crate) fn c_char_to_str(c: *const c_char) -> &'static str {
let cstr = unsafe { CStr::from_ptr(c) }; let cstr = unsafe { CStr::from_ptr(c) };
cstr.to_str().unwrap() cstr.to_str().unwrap()
@@ -34,21 +14,6 @@ pub(crate) fn c_char_to_vec(c: *const c_char) -> Vec<u8> {
cstr.to_bytes().to_vec() cstr.to_bytes().to_vec()
} }
pub(crate) fn string_to_c_char(string: String) -> *const c_char {
CString::new(string).unwrap().into_raw()
}
pub(crate) fn vec_to_c_char(bytes: Vec<u8>) -> *const c_char {
CString::new(bytes).unwrap().into_raw()
}
pub(crate) fn file_exists(dir: &str, filename: &str) -> bool {
let mut path = PathBuf::from(dir);
path.push(filename);
path.exists()
}
pub(crate) fn panic_catch<F: FnOnce() -> R, R>(f: F) -> Result<R, String> { pub(crate) fn panic_catch<F: FnOnce() -> R, R>(f: F) -> Result<R, String> {
catch_unwind(AssertUnwindSafe(f)).map_err(|err| { catch_unwind(AssertUnwindSafe(f)).map_err(|err| {
if let Some(s) = err.downcast_ref::<String>() { if let Some(s) = err.downcast_ref::<String>() {

View File

@@ -0,0 +1,110 @@
mod darwin;
mod darwin_v2;
use anyhow::{bail, Result};
use darwin::DarwinVerifier;
use darwin_v2::DarwinV2Verifier;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use prover_v4::utils::load_params;
use serde::{Deserialize, Serialize};
use std::{cell::OnceCell, collections::BTreeMap, rc::Rc};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TaskType {
Chunk,
Batch,
Bundle,
}
pub trait ProofVerifier {
fn verify(&self, task_type: TaskType, proof: Vec<u8>) -> Result<bool>;
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CircuitConfig {
pub fork_name: String,
pub params_path: String,
pub assets_path: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VerifierConfig {
pub low_version_circuit: CircuitConfig,
pub high_version_circuit: CircuitConfig,
}
type HardForkName = String;
struct VerifierPair(HardForkName, Rc<Box<dyn ProofVerifier>>);
static mut VERIFIER_HIGH: OnceCell<VerifierPair> = OnceCell::new();
static mut VERIFIER_LOW: OnceCell<VerifierPair> = OnceCell::new();
static mut PARAMS_MAP: OnceCell<BTreeMap<u32, ParamsKZG<Bn256>>> = OnceCell::new();
pub fn init(config: VerifierConfig) {
let low_conf = config.low_version_circuit;
std::env::set_var("SCROLL_PROVER_ASSETS_DIR", &low_conf.assets_path);
let params_degrees = [
*prover_v4::config::LAYER2_DEGREE,
*prover_v4::config::LAYER4_DEGREE,
];
// params should be shared between low and high
let mut params_map = BTreeMap::new();
for degree in params_degrees {
if let std::collections::btree_map::Entry::Vacant(e) = params_map.entry(degree) {
match load_params(&low_conf.params_path, degree, None) {
Ok(params) => {
e.insert(params);
}
Err(e) => panic!(
"failed to load params, degree {}, dir {}, err {}",
degree, low_conf.params_path, e
),
}
}
}
unsafe {
PARAMS_MAP.set(params_map).unwrap_unchecked();
}
let verifier = DarwinVerifier::new(unsafe { PARAMS_MAP.get().unwrap() }, &low_conf.assets_path);
unsafe {
VERIFIER_LOW
.set(VerifierPair(
low_conf.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
let high_conf = config.high_version_circuit;
let verifier =
DarwinV2Verifier::new(unsafe { PARAMS_MAP.get().unwrap() }, &high_conf.assets_path);
unsafe {
VERIFIER_HIGH
.set(VerifierPair(
high_conf.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
}
pub fn get_verifier(fork_name: &str) -> Result<Rc<Box<dyn ProofVerifier>>> {
unsafe {
if let Some(verifier) = VERIFIER_LOW.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
}
if let Some(verifier) = VERIFIER_HIGH.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
}
}
bail!("failed to get verifier, key not found, {}", fork_name)
}

View File

@@ -0,0 +1,48 @@
use super::{ProofVerifier, TaskType};
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use crate::utils::panic_catch;
use prover_v4::{
aggregator::Verifier as AggVerifier, zkevm::Verifier, BatchProof, BundleProof, ChunkProof,
};
use std::{collections::BTreeMap, env};
pub struct DarwinVerifier<'params> {
verifier: Verifier<'params>,
agg_verifier: AggVerifier<'params>,
}
impl<'params> DarwinVerifier<'params> {
pub fn new(params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>, assets_dir: &str) -> Self {
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_params_and_assets(params_map, assets_dir);
let agg_verifier = AggVerifier::from_params_and_assets(params_map, assets_dir);
Self {
verifier,
agg_verifier,
}
}
}
impl<'params> ProofVerifier for DarwinVerifier<'params> {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
let result = panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.verifier.verify_chunk_proof(proof)
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_batch_proof(&proof)
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_bundle_proof(proof)
}
});
result.map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -0,0 +1,48 @@
use super::{ProofVerifier, TaskType};
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use crate::utils::panic_catch;
use prover_v5::{
aggregator::Verifier as AggVerifier, zkevm::Verifier, BatchProof, BundleProof, ChunkProof,
};
use std::{collections::BTreeMap, env};
pub struct DarwinV2Verifier<'params> {
verifier: Verifier<'params>,
agg_verifier: AggVerifier<'params>,
}
impl<'params> DarwinV2Verifier<'params> {
pub fn new(params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>, assets_dir: &str) -> Self {
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_params_and_assets(params_map, assets_dir);
let agg_verifier = AggVerifier::from_params_and_assets(params_map, assets_dir);
Self {
verifier,
agg_verifier,
}
}
}
impl<'params> ProofVerifier for DarwinV2Verifier<'params> {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
let result = panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.verifier.verify_chunk_proof(proof)
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_batch_proof(&proof)
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_bundle_proof(proof)
}
});
result.map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -1,26 +1,10 @@
// BatchProver is used to:
// - Batch a list of chunk proofs
// - Bundle a list of batch proofs
void init_batch_prover(char* params_dir, char* assets_dir);
// BatchVerifier is used to: // BatchVerifier is used to:
// - Verify a batch proof // - Verify a batch proof
// - Verify a bundle proof // - Verify a bundle proof
void init_batch_verifier(char* params_dir, char* assets_dir); void init(char* config);
char* get_batch_vk();
char* check_chunk_proofs(char* chunk_proofs);
char* gen_batch_proof(char* chunk_hashes, char* chunk_proofs, char* batch_header);
char verify_batch_proof(char* proof, char* fork_name); char verify_batch_proof(char* proof, char* fork_name);
char* get_bundle_vk(); char verify_bundle_proof(char* proof, char* fork_name);
char* gen_bundle_proof(char* batch_proofs);
char verify_bundle_proof(char* proof);
void init_chunk_prover(char* params_dir, char* assets_dir);
void init_chunk_verifier(char* params_dir, char* assets_dir);
char* get_chunk_vk();
char* gen_chunk_proof(char* block_traces);
char verify_chunk_proof(char* proof, char* fork_name); char verify_chunk_proof(char* proof, char* fork_name);
char* block_traces_to_chunk_info(char* block_traces);
void free_c_chars(char* ptr);

View File

@@ -2,6 +2,7 @@ package testcontainers
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"log" "log"
"os" "os"
@@ -20,9 +21,10 @@ import (
// TestcontainerApps testcontainers struct // TestcontainerApps testcontainers struct
type TestcontainerApps struct { type TestcontainerApps struct {
postgresContainer *postgres.PostgresContainer postgresContainer *postgres.PostgresContainer
l2GethContainer *testcontainers.DockerContainer l2GethContainer *testcontainers.DockerContainer
poSL1Container compose.ComposeStack poSL1Container compose.ComposeStack
web3SignerContainer *testcontainers.DockerContainer
// common time stamp in nanoseconds. // common time stamp in nanoseconds.
Timestamp int Timestamp int
@@ -111,10 +113,51 @@ func (t *TestcontainerApps) StartPoSL1Container() error {
return nil return nil
} }
func (t *TestcontainerApps) StartWeb3SignerContainer(chainId int) error {
if t.web3SignerContainer != nil && t.web3SignerContainer.IsRunning() {
return nil
}
var (
err error
rootDir string
)
if rootDir, err = findProjectRootDir(); err != nil {
return fmt.Errorf("failed to find project root directory: %v", err)
}
// web3signerconf/keyconf.yaml may contain multiple keys configured and web3signer then choses one corresponding to from field of tx
web3SignerConfDir := filepath.Join(rootDir, "common", "testcontainers", "web3signerconf")
req := testcontainers.ContainerRequest{
Image: "consensys/web3signer:develop",
ExposedPorts: []string{"9000/tcp"},
Cmd: []string{"--key-config-path", "/web3signerconf/", "eth1", "--chain-id", fmt.Sprintf("%d", chainId)},
Files: []testcontainers.ContainerFile{
{
HostFilePath: web3SignerConfDir,
ContainerFilePath: "/",
FileMode: 0o777,
},
},
WaitingFor: wait.ForLog("ready to handle signing requests"),
}
genericContainerReq := testcontainers.GenericContainerRequest{
ContainerRequest: req,
Started: true,
}
container, err := testcontainers.GenericContainer(context.Background(), genericContainerReq)
if err != nil {
log.Printf("failed to start web3signer container: %s", err)
return err
}
t.web3SignerContainer, _ = container.(*testcontainers.DockerContainer)
return nil
}
// GetPoSL1EndPoint returns the endpoint of the running PoS L1 endpoint // GetPoSL1EndPoint returns the endpoint of the running PoS L1 endpoint
func (t *TestcontainerApps) GetPoSL1EndPoint() (string, error) { func (t *TestcontainerApps) GetPoSL1EndPoint() (string, error) {
if t.poSL1Container == nil { if t.poSL1Container == nil {
return "", fmt.Errorf("PoS L1 container is not running") return "", errors.New("PoS L1 container is not running")
} }
contrainer, err := t.poSL1Container.ServiceContainer(context.Background(), "geth") contrainer, err := t.poSL1Container.ServiceContainer(context.Background(), "geth")
if err != nil { if err != nil {
@@ -135,7 +178,7 @@ func (t *TestcontainerApps) GetPoSL1Client() (*ethclient.Client, error) {
// GetDBEndPoint returns the endpoint of the running postgres container // GetDBEndPoint returns the endpoint of the running postgres container
func (t *TestcontainerApps) GetDBEndPoint() (string, error) { func (t *TestcontainerApps) GetDBEndPoint() (string, error) {
if t.postgresContainer == nil || !t.postgresContainer.IsRunning() { if t.postgresContainer == nil || !t.postgresContainer.IsRunning() {
return "", fmt.Errorf("postgres is not running") return "", errors.New("postgres is not running")
} }
return t.postgresContainer.ConnectionString(context.Background(), "sslmode=disable") return t.postgresContainer.ConnectionString(context.Background(), "sslmode=disable")
} }
@@ -143,7 +186,7 @@ func (t *TestcontainerApps) GetDBEndPoint() (string, error) {
// GetL2GethEndPoint returns the endpoint of the running L2Geth container // GetL2GethEndPoint returns the endpoint of the running L2Geth container
func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) { func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) {
if t.l2GethContainer == nil || !t.l2GethContainer.IsRunning() { if t.l2GethContainer == nil || !t.l2GethContainer.IsRunning() {
return "", fmt.Errorf("l2 geth is not running") return "", errors.New("l2 geth is not running")
} }
endpoint, err := t.l2GethContainer.PortEndpoint(context.Background(), "8546/tcp", "ws") endpoint, err := t.l2GethContainer.PortEndpoint(context.Background(), "8546/tcp", "ws")
if err != nil { if err != nil {
@@ -152,6 +195,14 @@ func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) {
return endpoint, nil return endpoint, nil
} }
// GetL2GethEndPoint returns the endpoint of the running L2Geth container
func (t *TestcontainerApps) GetWeb3SignerEndpoint() (string, error) {
if t.web3SignerContainer == nil || !t.web3SignerContainer.IsRunning() {
return "", errors.New("web3signer is not running")
}
return t.web3SignerContainer.PortEndpoint(context.Background(), "9000/tcp", "http")
}
// GetGormDBClient returns a gorm.DB by connecting to the running postgres container // GetGormDBClient returns a gorm.DB by connecting to the running postgres container
func (t *TestcontainerApps) GetGormDBClient() (*gorm.DB, error) { func (t *TestcontainerApps) GetGormDBClient() (*gorm.DB, error) {
endpoint, err := t.GetDBEndPoint() endpoint, err := t.GetDBEndPoint()
@@ -200,6 +251,11 @@ func (t *TestcontainerApps) Free() {
t.poSL1Container = nil t.poSL1Container = nil
} }
} }
if t.web3SignerContainer != nil && t.web3SignerContainer.IsRunning() {
if err := t.web3SignerContainer.Terminate(ctx); err != nil {
log.Printf("failed to stop web3signer container: %s", err)
}
}
} }
// findProjectRootDir find project root directory // findProjectRootDir find project root directory
@@ -217,7 +273,7 @@ func findProjectRootDir() (string, error) {
parentDir := filepath.Dir(currentDir) parentDir := filepath.Dir(currentDir)
if parentDir == currentDir { if parentDir == currentDir {
return "", fmt.Errorf("go.work file not found in any parent directory") return "", errors.New("go.work file not found in any parent directory")
} }
currentDir = parentDir currentDir = parentDir

View File

@@ -44,6 +44,11 @@ func TestNewTestcontainerApps(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
assert.NotNil(t, ethclient) assert.NotNil(t, ethclient)
assert.NoError(t, testApps.StartWeb3SignerContainer(1))
endpoint, err = testApps.GetWeb3SignerEndpoint()
assert.NoError(t, err)
assert.NotEmpty(t, endpoint)
// test free testcontainers // test free testcontainers
testApps.Free() testApps.Free()
endpoint, err = testApps.GetDBEndPoint() endpoint, err = testApps.GetDBEndPoint()
@@ -57,4 +62,8 @@ func TestNewTestcontainerApps(t *testing.T) {
endpoint, err = testApps.GetPoSL1EndPoint() endpoint, err = testApps.GetPoSL1EndPoint()
assert.EqualError(t, err, "PoS L1 container is not running") assert.EqualError(t, err, "PoS L1 container is not running")
assert.Empty(t, endpoint) assert.Empty(t, endpoint)
endpoint, err = testApps.GetWeb3SignerEndpoint()
assert.EqualError(t, err, "web3signer is not running")
assert.Empty(t, endpoint)
} }

View File

@@ -0,0 +1,7 @@
type: "file-raw"
keyType: "SECP256K1"
privateKey: "0x1313131313131313131313131313131313131313131313131313131313131313"
---
type: "file-raw"
keyType: "SECP256K1"
privateKey: "0x1212121212121212121212121212121212121212121212121212121212121212"

View File

@@ -109,6 +109,10 @@ const (
ProverTaskFailureTypeVerifiedFailed ProverTaskFailureTypeVerifiedFailed
// ProverTaskFailureTypeServerError collect occur error // ProverTaskFailureTypeServerError collect occur error
ProverTaskFailureTypeServerError ProverTaskFailureTypeServerError
// ProverTaskFailureTypeObjectAlreadyVerified object(batch/chunk) already verified, may exists in test env when ENABLE_TEST_ENV_BYPASS_FEATURES is true
ProverTaskFailureTypeObjectAlreadyVerified
// ProverTaskFailureTypeReassignedByAdmin reassigned by admin, this value is used in admin-system and defined here for clarity
ProverTaskFailureTypeReassignedByAdmin
) )
func (r ProverTaskFailureType) String() string { func (r ProverTaskFailureType) String() string {
@@ -123,6 +127,10 @@ func (r ProverTaskFailureType) String() string {
return "prover task failure verified failed" return "prover task failure verified failed"
case ProverTaskFailureTypeServerError: case ProverTaskFailureTypeServerError:
return "prover task failure server exception" return "prover task failure server exception"
case ProverTaskFailureTypeObjectAlreadyVerified:
return "prover task failure object already verified"
case ProverTaskFailureTypeReassignedByAdmin:
return "prover task failure reassigned by admin"
default: default:
return fmt.Sprintf("illegal prover task failure type (%d)", int32(r)) return fmt.Sprintf("illegal prover task failure type (%d)", int32(r))
} }

View File

@@ -4,7 +4,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common"
) )
@@ -52,9 +51,10 @@ type ChunkTaskDetail struct {
// BatchTaskDetail is a type containing BatchTask detail. // BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct { type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"` ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"` ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader *codecv3.DABatch `json:"batch_header"` BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
} }
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches. // BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.

View File

@@ -2,6 +2,7 @@ package utils
import ( import (
"crypto/ecdsa" "crypto/ecdsa"
"errors"
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
@@ -28,7 +29,7 @@ func LoadOrCreateKey(keystorePath string, keystorePassword string) (*ecdsa.Priva
} else if err != nil { } else if err != nil {
return nil, err return nil, err
} else if fi.IsDir() { } else if fi.IsDir() {
return nil, fmt.Errorf("keystorePath cannot be a dir") return nil, errors.New("keystorePath cannot be a dir")
} }
keyjson, err := os.ReadFile(filepath.Clean(keystorePath)) keyjson, err := os.ReadFile(filepath.Clean(keystorePath))

View File

@@ -9,10 +9,14 @@ import (
"math/big" "math/big"
"os" "os"
"path/filepath" "path/filepath"
"reflect"
"strconv"
"strings"
"time" "time"
"github.com/modern-go/reflect2" "github.com/modern-go/reflect2"
"github.com/scroll-tech/go-ethereum/core" "github.com/scroll-tech/go-ethereum/core"
"github.com/scroll-tech/go-ethereum/log"
) )
// TryTimes try run several times until the function return true. // TryTimes try run several times until the function return true.
@@ -78,3 +82,89 @@ func ReadGenesis(genesisPath string) (*core.Genesis, error) {
} }
return genesis, file.Close() return genesis, file.Close()
} }
// OverrideConfigWithEnv recursively overrides config values with environment variables
func OverrideConfigWithEnv(cfg interface{}, prefix string) error {
v := reflect.ValueOf(cfg)
if v.Kind() != reflect.Ptr || v.IsNil() {
return nil
}
v = v.Elem()
t := v.Type()
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
fieldValue := v.Field(i)
if !fieldValue.CanSet() {
continue
}
tag := field.Tag.Get("json")
if tag == "" {
tag = strings.ToLower(field.Name)
}
envKey := prefix + "_" + strings.ToUpper(tag)
switch fieldValue.Kind() {
case reflect.Ptr:
if !fieldValue.IsNil() {
err := OverrideConfigWithEnv(fieldValue.Interface(), envKey)
if err != nil {
return err
}
}
case reflect.Struct:
err := OverrideConfigWithEnv(fieldValue.Addr().Interface(), envKey)
if err != nil {
return err
}
default:
if envValue, exists := os.LookupEnv(envKey); exists {
log.Info("Overriding config with env var", "key", envKey)
err := setField(fieldValue, envValue)
if err != nil {
return err
}
}
}
}
return nil
}
// setField sets the value of a field based on the environment variable value
func setField(field reflect.Value, value string) error {
switch field.Kind() {
case reflect.String:
field.SetString(value)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
intValue, err := strconv.ParseInt(value, 10, 64)
if err != nil {
return err
}
field.SetInt(intValue)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
uintValue, err := strconv.ParseUint(value, 10, 64)
if err != nil {
return err
}
field.SetUint(uintValue)
case reflect.Float32, reflect.Float64:
floatValue, err := strconv.ParseFloat(value, 64)
if err != nil {
return err
}
field.SetFloat(floatValue)
case reflect.Bool:
boolValue, err := strconv.ParseBool(value)
if err != nil {
return err
}
field.SetBool(boolValue)
default:
return fmt.Errorf("unsupported type: %v", field.Kind())
}
return nil
}

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug" "runtime/debug"
) )
var tag = "v4.4.33" var tag = "v4.4.66"
var commit = func() string { var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok { if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -88,13 +88,25 @@ func (c *CoordinatorApp) MockConfig(store bool) error {
} }
// Reset prover manager config for manager test cases. // Reset prover manager config for manager test cases.
cfg.ProverManager = &coordinatorConfig.ProverManager{ cfg.ProverManager = &coordinatorConfig.ProverManager{
ProversPerSession: 1, ProversPerSession: 1,
Verifier: &coordinatorConfig.VerifierConfig{MockMode: true}, Verifier: &coordinatorConfig.VerifierConfig{
MockMode: true,
LowVersionCircuit: &coordinatorConfig.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "darwin",
MinProverVersion: "v4.2.0",
},
HighVersionCircuit: &coordinatorConfig.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "darwinV2",
MinProverVersion: "v4.3.0",
},
},
BatchCollectionTimeSec: 60, BatchCollectionTimeSec: 60,
ChunkCollectionTimeSec: 60, ChunkCollectionTimeSec: 60,
SessionAttempts: 10, SessionAttempts: 10,
MaxVerifierWorkers: 4,
MinProverVersion: "v1.0.0",
} }
endpoint, err := c.testApps.GetDBEndPoint() endpoint, err := c.testApps.GetDBEndPoint()
if err != nil { if err != nil {

View File

@@ -25,7 +25,7 @@ func init() {
app = cli.NewApp() app = cli.NewApp()
app.Action = action app.Action = action
app.Name = "coordinator-tool" app.Name = "coordinator-tool"
app.Usage = "The Scroll L2 Coordinator" app.Usage = "The Scroll L2 Coordinator Tool"
app.Version = version.Version app.Version = version.Version
app.Flags = append(app.Flags, utils.CommonFlags...) app.Flags = append(app.Flags, utils.CommonFlags...)
app.Before = func(ctx *cli.Context) error { app.Before = func(ctx *cli.Context) error {
@@ -66,7 +66,7 @@ func action(ctx *cli.Context) error {
for _, batch := range batches { for _, batch := range batches {
var proof message.BatchProof var proof message.BatchProof
if encodeErr := json.Unmarshal(batch.Proof, &proof); encodeErr != nil { if encodeErr := json.Unmarshal(batch.Proof, &proof); encodeErr != nil {
log.Error("failed to unmarshal proof") log.Error("failed to unmarshal batch proof")
return fmt.Errorf("failed to unmarshal proof: %w, bundle hash: %v, batch hash: %v", encodeErr, taskID, batch.Hash) return fmt.Errorf("failed to unmarshal proof: %w, bundle hash: %v, batch hash: %v", encodeErr, taskID, batch.Hash)
} }
batchProofs = append(batchProofs, &proof) batchProofs = append(batchProofs, &proof)
@@ -78,8 +78,8 @@ func action(ctx *cli.Context) error {
batchProofsBytes, err := json.Marshal(taskDetail) batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil { if err != nil {
log.Error("failed to marshal proof") log.Error("failed to marshal batch proof")
return fmt.Errorf("failed to marshal chunk proofs, taskID:%s err:%w", taskID, err) return fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", taskID, err)
} }
taskMsg := &coordinatorType.GetTaskSchema{ taskMsg := &coordinatorType.GetTaskSchema{

View File

@@ -6,13 +6,20 @@
"batch_collection_time_sec": 180, "batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180, "chunk_collection_time_sec": 180,
"verifier": { "verifier": {
"fork_name": "bernoulli",
"mock_mode": true, "mock_mode": true,
"params_path": "", "low_version_circuit": {
"assets_path": "" "params_path": "params",
}, "assets_path": "assets",
"max_verifier_workers": 4, "fork_name": "darwin",
"min_prover_version": "v1.0.0" "min_prover_version": "v4.4.43"
},
"high_version_circuit": {
"params_path": "params",
"assets_path": "assets",
"fork_name": "darwinV2",
"min_prover_version": "v4.4.45"
}
}
}, },
"db": { "db": {
"driver_name": "postgres", "driver_name": "postgres",

View File

@@ -6,7 +6,10 @@ require (
github.com/appleboy/gin-jwt/v2 v2.9.1 github.com/appleboy/gin-jwt/v2 v2.9.1
github.com/gin-gonic/gin v1.9.1 github.com/gin-gonic/gin v1.9.1
github.com/go-resty/resty/v2 v2.7.0 github.com/go-resty/resty/v2 v2.7.0
github.com/google/uuid v1.6.0
github.com/mitchellh/mapstructure v1.5.0 github.com/mitchellh/mapstructure v1.5.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/shopspring/decimal v1.3.1 github.com/shopspring/decimal v1.3.1
github.com/stretchr/testify v1.9.0 github.com/stretchr/testify v1.9.0
@@ -42,12 +45,6 @@ require (
google.golang.org/protobuf v1.33.0 // indirect google.golang.org/protobuf v1.33.0 // indirect
) )
require (
github.com/google/uuid v1.6.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923
)
require ( require (
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect github.com/bits-and-blooms/bitset v1.13.0 // indirect

View File

@@ -173,8 +173,8 @@ github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjR
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923 h1:QKgfS8G0btzg7nmFjSjllaxGkns3yg7g2/tG1nWExEI= github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068 h1:oVGwhg4cCq35B04eG/S4OBXDwXiFH7+LezuH2ZTRBPs=
github.com/scroll-tech/da-codec v0.0.0-20240718144756-1875fd490923/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs= github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ= github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ= github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -6,6 +6,7 @@ import (
"path/filepath" "path/filepath"
"scroll-tech/common/database" "scroll-tech/common/database"
"scroll-tech/common/utils"
) )
// ProverManager loads sequencer configuration items. // ProverManager loads sequencer configuration items.
@@ -23,10 +24,6 @@ type ProverManager struct {
ChunkCollectionTimeSec int `json:"chunk_collection_time_sec"` ChunkCollectionTimeSec int `json:"chunk_collection_time_sec"`
// BundleCollectionTimeSec bundle Proof collection time (in seconds). // BundleCollectionTimeSec bundle Proof collection time (in seconds).
BundleCollectionTimeSec int `json:"bundle_collection_time_sec"` BundleCollectionTimeSec int `json:"bundle_collection_time_sec"`
// Max number of workers in verifier worker pool
MaxVerifierWorkers int `json:"max_verifier_workers"`
// MinProverVersion is the minimum version of the prover that is required.
MinProverVersion string `json:"min_prover_version"`
} }
// L2 loads l2geth configuration items. // L2 loads l2geth configuration items.
@@ -50,12 +47,19 @@ type Config struct {
Auth *Auth `json:"auth"` Auth *Auth `json:"auth"`
} }
// CircuitConfig circuit items.
type CircuitConfig struct {
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
ForkName string `json:"fork_name"`
MinProverVersion string `json:"min_prover_version"`
}
// VerifierConfig load zk verifier config. // VerifierConfig load zk verifier config.
type VerifierConfig struct { type VerifierConfig struct {
ForkName string `json:"fork_name"` MockMode bool `json:"mock_mode"`
MockMode bool `json:"mock_mode"` LowVersionCircuit *CircuitConfig `json:"low_version_circuit"`
ParamsPath string `json:"params_path"` HighVersionCircuit *CircuitConfig `json:"high_version_circuit"`
AssetsPath string `json:"assets_path"`
} }
// NewConfig returns a new instance of Config. // NewConfig returns a new instance of Config.
@@ -71,5 +75,11 @@ func NewConfig(file string) (*Config, error) {
return nil, err return nil, err
} }
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_COORDINATOR")
if err != nil {
return nil, err
}
return cfg, nil return cfg, nil
} }

View File

@@ -44,21 +44,33 @@ func (a *AuthController) Login(c *gin.Context) (interface{}, error) {
return "", fmt.Errorf("check the login parameter failure: %w", err) return "", fmt.Errorf("check the login parameter failure: %w", err)
} }
hardForkNames, err := a.loginLogic.ProverHardForkName(&login)
if err != nil {
return "", fmt.Errorf("prover hard name failure:%w", err)
}
// check the challenge is used, if used, return failure // check the challenge is used, if used, return failure
if err := a.loginLogic.InsertChallengeString(c, login.Message.Challenge); err != nil { if err := a.loginLogic.InsertChallengeString(c, login.Message.Challenge); err != nil {
return "", fmt.Errorf("login insert challenge string failure:%w", err) return "", fmt.Errorf("login insert challenge string failure:%w", err)
} }
return login, nil
returnData := types.LoginParameterWithHardForkName{
HardForkName: hardForkNames,
LoginParameter: login,
}
return returnData, nil
} }
// PayloadFunc returns jwt.MapClaims with {public key, prover name}. // PayloadFunc returns jwt.MapClaims with {public key, prover name}.
func (a *AuthController) PayloadFunc(data interface{}) jwt.MapClaims { func (a *AuthController) PayloadFunc(data interface{}) jwt.MapClaims {
v, ok := data.(types.LoginParameter) v, ok := data.(types.LoginParameterWithHardForkName)
if !ok { if !ok {
return jwt.MapClaims{} return jwt.MapClaims{}
} }
return jwt.MapClaims{ return jwt.MapClaims{
types.HardForkName: v.HardForkName,
types.PublicKey: v.PublicKey, types.PublicKey: v.PublicKey,
types.ProverName: v.Message.ProverName, types.ProverName: v.Message.ProverName,
types.ProverVersion: v.Message.ProverVersion, types.ProverVersion: v.Message.ProverVersion,
@@ -79,5 +91,10 @@ func (a *AuthController) IdentityHandler(c *gin.Context) interface{} {
if proverVersion, ok := claims[types.ProverVersion]; ok { if proverVersion, ok := claims[types.ProverVersion]; ok {
c.Set(types.ProverVersion, proverVersion) c.Set(types.ProverVersion, proverVersion)
} }
if hardForkName, ok := claims[types.HardForkName]; ok {
c.Set(types.HardForkName, hardForkName)
}
return nil return nil
} }

View File

@@ -26,7 +26,7 @@ func InitController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.D
panic("proof receiver new verifier failure") panic("proof receiver new verifier failure")
} }
log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap) log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap, "bundleVerifier", vf.BundleVkMap)
Auth = NewAuthController(db, cfg, vf) Auth = NewAuthController(db, cfg, vf)
GetTask = NewGetTaskController(cfg, chainCfg, db, reg) GetTask = NewGetTaskController(cfg, chainCfg, db, reg)

View File

@@ -1,6 +1,7 @@
package api package api
import ( import (
"errors"
"fmt" "fmt"
"math/rand" "math/rand"
@@ -49,15 +50,15 @@ func NewGetTaskController(cfg *config.Config, chainCfg *params.ChainConfig, db *
func (ptc *GetTaskController) incGetTaskAccessCounter(ctx *gin.Context) error { func (ptc *GetTaskController) incGetTaskAccessCounter(ctx *gin.Context) error {
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey) publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
if !publicKeyExist { if !publicKeyExist {
return fmt.Errorf("get public key from context failed") return errors.New("get public key from context failed")
} }
proverName, proverNameExist := ctx.Get(coordinatorType.ProverName) proverName, proverNameExist := ctx.Get(coordinatorType.ProverName)
if !proverNameExist { if !proverNameExist {
return fmt.Errorf("get prover name from context failed") return errors.New("get prover name from context failed")
} }
proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion) proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion)
if !proverVersionExist { if !proverVersionExist {
return fmt.Errorf("get prover version from context failed") return errors.New("get prover version from context failed")
} }
ptc.getTaskAccessCounter.With(prometheus.Labels{ ptc.getTaskAccessCounter.With(prometheus.Labels{
@@ -97,7 +98,7 @@ func (ptc *GetTaskController) GetTasks(ctx *gin.Context) {
} }
if result == nil { if result == nil {
nerr := fmt.Errorf("get empty prover task") nerr := errors.New("get empty prover task")
types.RenderFailure(ctx, types.ErrCoordinatorEmptyProofData, nerr) types.RenderFailure(ctx, types.ErrCoordinatorEmptyProofData, nerr)
return return
} }
@@ -107,10 +108,6 @@ func (ptc *GetTaskController) GetTasks(ctx *gin.Context) {
func (ptc *GetTaskController) proofType(para *coordinatorType.GetTaskParameter) message.ProofType { func (ptc *GetTaskController) proofType(para *coordinatorType.GetTaskParameter) message.ProofType {
var proofTypes []message.ProofType var proofTypes []message.ProofType
if para.TaskType != 0 {
proofTypes = append(proofTypes, message.ProofType(para.TaskType))
}
for _, proofType := range para.TaskTypes { for _, proofType := range para.TaskTypes {
proofTypes = append(proofTypes, message.ProofType(proofType)) proofTypes = append(proofTypes, message.ProofType(proofType))
} }

View File

@@ -363,7 +363,7 @@ func (c *Collector) checkBundleAllBatchReady() {
for _, bundle := range bundles { for _, bundle := range bundles {
allReady, checkErr := c.batchOrm.CheckIfBundleBatchProofsAreReady(c.ctx, bundle.Hash) allReady, checkErr := c.batchOrm.CheckIfBundleBatchProofsAreReady(c.ctx, bundle.Hash)
if checkErr != nil { if checkErr != nil {
log.Warn("checkBatchAllChunkReady CheckIfBatchChunkProofsAreReady failure", "error", checkErr, "hash", bundle.Hash) log.Warn("checkBundleAllBatchReady CheckIfBundleBatchProofsAreReady failure", "error", checkErr, "hash", bundle.Hash)
continue continue
} }
@@ -371,8 +371,8 @@ func (c *Collector) checkBundleAllBatchReady() {
continue continue
} }
if updateErr := c.bundleOrm.UpdateBatchProofsStatusByBatchHash(c.ctx, bundle.Hash, types.BatchProofsStatusReady); updateErr != nil { if updateErr := c.bundleOrm.UpdateBatchProofsStatusByBundleHash(c.ctx, bundle.Hash, types.BatchProofsStatusReady); updateErr != nil {
log.Warn("checkBundleAllBatchReady UpdateBatchProofsStatusByBatchHash failure", "error", checkErr, "hash", bundle.Hash) log.Warn("checkBundleAllBatchReady UpdateBatchProofsStatusByBundleHash failure", "error", checkErr, "hash", bundle.Hash)
} }
} }

View File

@@ -3,6 +3,7 @@ package auth
import ( import (
"errors" "errors"
"fmt" "fmt"
"strings"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/scroll-tech/go-ethereum/log" "github.com/scroll-tech/go-ethereum/log"
@@ -23,31 +24,35 @@ type LoginLogic struct {
chunkVks map[string]struct{} chunkVks map[string]struct{}
batchVKs map[string]struct{} batchVKs map[string]struct{}
bundleVks map[string]struct{} bundleVks map[string]struct{}
proverVersionHardForkMap map[string][]string
} }
// NewLoginLogic new a LoginLogic // NewLoginLogic new a LoginLogic
func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic { func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic {
l := &LoginLogic{ proverVersionHardForkMap := make(map[string][]string)
cfg: cfg, if version.CheckScrollRepoVersion(cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion) {
chunkVks: make(map[string]struct{}), log.Error("config file error, low verifier min_prover_version should not more than high verifier min_prover_version",
batchVKs: make(map[string]struct{}), "low verifier min_prover_version", cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion,
bundleVks: make(map[string]struct{}), "high verifier min_prover_version", cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion)
challengeOrm: orm.NewChallenge(db), panic("verifier config file error")
} }
for _, vk := range vf.ChunkVKMap { var highHardForks []string
l.chunkVks[vk] = struct{}{} highHardForks = append(highHardForks, cfg.ProverManager.Verifier.HighVersionCircuit.ForkName)
} highHardForks = append(highHardForks, cfg.ProverManager.Verifier.LowVersionCircuit.ForkName)
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = highHardForks
for _, vk := range vf.BatchVKMap { proverVersionHardForkMap[cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion] = []string{cfg.ProverManager.Verifier.LowVersionCircuit.ForkName}
l.batchVKs[vk] = struct{}{}
}
for _, vk := range vf.BundleVkMap { return &LoginLogic{
l.bundleVks[vk] = struct{}{} cfg: cfg,
chunkVks: vf.ChunkVKMap,
batchVKs: vf.BatchVKMap,
bundleVks: vf.BundleVkMap,
challengeOrm: orm.NewChallenge(db),
proverVersionHardForkMap: proverVersionHardForkMap,
} }
return l
} }
// InsertChallengeString insert and check the challenge string is existed // InsertChallengeString insert and check the challenge string is existed
@@ -56,16 +61,16 @@ func (l *LoginLogic) InsertChallengeString(ctx *gin.Context, challenge string) e
} }
func (l *LoginLogic) Check(login *types.LoginParameter) error { func (l *LoginLogic) Check(login *types.LoginParameter) error {
if login.PublicKey != "" { verify, err := login.Verify()
verify, err := login.Verify() if err != nil || !verify {
if err != nil || !verify { log.Error("auth message verify failure", "prover_name", login.Message.ProverName,
return errors.New("auth message verify failure") "prover_version", login.Message.ProverVersion, "message", login.Message)
} return errors.New("auth message verify failure")
} }
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.MinProverVersion) { if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
l.cfg.ProverManager.MinProverVersion, login.Message.ProverVersion) l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, login.Message.ProverVersion)
} }
if len(login.Message.ProverTypes) > 0 { if len(login.Message.ProverTypes) > 0 {
@@ -84,21 +89,37 @@ func (l *LoginLogic) Check(login *types.LoginParameter) error {
vks[vk] = struct{}{} vks[vk] = struct{}{}
} }
default: default:
log.Error("invalid prover_type", "value", proverType) log.Error("invalid prover_type", "value", proverType, "prover name", login.Message.ProverName, "prover_version", login.Message.ProverVersion)
} }
} }
for _, vk := range login.Message.VKs { for _, vk := range login.Message.VKs {
if _, ok := vks[vk]; !ok { if _, ok := vks[vk]; !ok {
log.Error("vk inconsistency", "prover vk", vk) log.Error("vk inconsistency", "prover vk", vk, "prover name", login.Message.ProverName,
"prover_version", login.Message.ProverVersion, "message", login.Message)
if !version.CheckScrollProverVersion(login.Message.ProverVersion) { if !version.CheckScrollProverVersion(login.Message.ProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s", return fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s",
version.Version, login.Message.ProverVersion) version.Version, login.Message.ProverVersion)
} }
// if the prover reports a same prover version // if the prover reports a same prover version
return fmt.Errorf("incompatible vk. please check your params files or config files") return errors.New("incompatible vk. please check your params files or config files")
} }
} }
} }
return nil return nil
} }
// ProverHardForkName retrieves hard fork name which prover belongs to
func (l *LoginLogic) ProverHardForkName(login *types.LoginParameter) (string, error) {
proverVersionSplits := strings.Split(login.Message.ProverVersion, "-")
if len(proverVersionSplits) == 0 {
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}
proverVersion := proverVersionSplits[0]
if hardForkNames, ok := l.proverVersionHardForkMap[proverVersion]; ok {
return strings.Join(hardForkNames, ","), nil
}
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}

View File

@@ -9,7 +9,9 @@ import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/da-codec/encoding/codecv3" "github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/da-codec/encoding/codecv4"
"github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log" "github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params" "github.com/scroll-tech/go-ethereum/params"
@@ -29,9 +31,8 @@ import (
type BatchProverTask struct { type BatchProverTask struct {
BaseProverTask BaseProverTask
batchAttemptsExceedTotal prometheus.Counter batchTaskGetTaskTotal *prometheus.CounterVec
batchTaskGetTaskTotal *prometheus.CounterVec batchTaskGetTaskProver *prometheus.CounterVec
batchTaskGetTaskProver *prometheus.CounterVec
} }
// NewBatchProverTask new a batch collector // NewBatchProverTask new a batch collector
@@ -47,10 +48,6 @@ func NewBatchProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *go
proverTaskOrm: orm.NewProverTask(db), proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db), proverBlockListOrm: orm.NewProverBlockList(db),
}, },
batchAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_batch_attempts_exceed_total",
Help: "Total number of batch attempts exceed.",
}),
batchTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{ batchTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_batch_get_task_total", Name: "coordinator_batch_get_task_total",
Help: "Total number of batch get task.", Help: "Total number of batch get task.",
@@ -123,6 +120,15 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure return nil, ErrCoordinatorInternalFailure
} }
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// bp.recoverActiveAttempts(ctx, batchTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", batchTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{ proverTask := orm.ProverTask{
TaskID: batchTask.Hash, TaskID: batchTask.Hash,
ProverPublicKey: taskCtx.PublicKey, ProverPublicKey: taskCtx.PublicKey,
@@ -208,17 +214,9 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
chunkInfos = append(chunkInfos, &chunkInfo) chunkInfos = append(chunkInfos, &chunkInfo)
} }
taskDetail := message.BatchTaskDetail{ taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs)
ChunkInfos: chunkInfos, if err != nil {
ChunkProofs: chunkProofs, return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
}
if hardForkName == "darwin" {
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(batch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header, taskID:%s err:%w", task.TaskID, decodeErr)
}
taskDetail.BatchHeader = batchHeader
} }
chunkProofsBytes, err := json.Marshal(taskDetail) chunkProofsBytes, err := json.Marshal(taskDetail)
@@ -241,3 +239,34 @@ func (bp *BatchProverTask) recoverActiveAttempts(ctx *gin.Context, batchTask *or
log.Error("failed to recover batch active attempts", "hash", batchTask.Hash, "error", err) log.Error("failed to recover batch active attempts", "hash", batchTask.Hash, "error", err)
} }
} }
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []*message.ChunkProof) (*message.BatchTaskDetail, error) {
taskDetail := &message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}
if encoding.CodecVersion(dbBatch.CodecVersion) != encoding.CodecV3 && encoding.CodecVersion(dbBatch.CodecVersion) != encoding.CodecV4 {
return taskDetail, nil
}
if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV3 {
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v3) for batch %d: %w", dbBatch.Index, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail.BlobBytes = dbBatch.BlobBytes
} else {
batchHeader, decodeErr := codecv4.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v4) for batch %d: %w", dbBatch.Index, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail.BlobBytes = dbBatch.BlobBytes
}
return taskDetail, nil
}

View File

@@ -27,9 +27,8 @@ import (
type BundleProverTask struct { type BundleProverTask struct {
BaseProverTask BaseProverTask
bundleAttemptsExceedTotal prometheus.Counter bundleTaskGetTaskTotal *prometheus.CounterVec
bundleTaskGetTaskTotal *prometheus.CounterVec bundleTaskGetTaskProver *prometheus.CounterVec
bundleTaskGetTaskProver *prometheus.CounterVec
} }
// NewBundleProverTask new a bundle collector // NewBundleProverTask new a bundle collector
@@ -46,10 +45,6 @@ func NewBundleProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *g
proverTaskOrm: orm.NewProverTask(db), proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db), proverBlockListOrm: orm.NewProverBlockList(db),
}, },
bundleAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_bundle_attempts_exceed_total",
Help: "Total number of bundle attempts exceed.",
}),
bundleTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{ bundleTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_bundle_get_task_total", Name: "coordinator_bundle_get_task_total",
Help: "Total number of bundle get task.", Help: "Total number of bundle get task.",
@@ -122,6 +117,15 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
return nil, ErrCoordinatorInternalFailure return nil, ErrCoordinatorInternalFailure
} }
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// bp.recoverActiveAttempts(ctx, bundleTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", bundleTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{ proverTask := orm.ProverTask{
TaskID: bundleTask.Hash, TaskID: bundleTask.Hash,
ProverPublicKey: taskCtx.PublicKey, ProverPublicKey: taskCtx.PublicKey,
@@ -205,7 +209,7 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
batchProofsBytes, err := json.Marshal(taskDetail) batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to marshal chunk proofs, taskID:%s err:%w", task.TaskID, err) return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)
} }
taskMsg := &coordinatorType.GetTaskSchema{ taskMsg := &coordinatorType.GetTaskSchema{

View File

@@ -27,9 +27,8 @@ import (
type ChunkProverTask struct { type ChunkProverTask struct {
BaseProverTask BaseProverTask
chunkAttemptsExceedTotal prometheus.Counter chunkTaskGetTaskTotal *prometheus.CounterVec
chunkTaskGetTaskTotal *prometheus.CounterVec chunkTaskGetTaskProver *prometheus.CounterVec
chunkTaskGetTaskProver *prometheus.CounterVec
} }
// NewChunkProverTask new a chunk prover task // NewChunkProverTask new a chunk prover task
@@ -44,10 +43,6 @@ func NewChunkProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *go
proverTaskOrm: orm.NewProverTask(db), proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db), proverBlockListOrm: orm.NewProverBlockList(db),
}, },
chunkAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_chunk_attempts_exceed_total",
Help: "Total number of chunk attempts exceed.",
}),
chunkTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{ chunkTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_chunk_get_task_total", Name: "coordinator_chunk_get_task_total",
Help: "Total number of chunk get task.", Help: "Total number of chunk get task.",
@@ -70,7 +65,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
for i := 0; i < 5; i++ { for i := 0; i < 5; i++ {
var getTaskError error var getTaskError error
var tmpChunkTask *orm.Chunk var tmpChunkTask *orm.Chunk
tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts) tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
if getTaskError != nil { if getTaskError != nil {
log.Error("failed to get assigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError) log.Error("failed to get assigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure return nil, ErrCoordinatorInternalFailure
@@ -79,7 +74,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned` // Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
// chunk to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql. // chunk to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
if tmpChunkTask == nil { if tmpChunkTask == nil {
tmpChunkTask, getTaskError = cp.chunkOrm.GetUnassignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts) tmpChunkTask, getTaskError = cp.chunkOrm.GetUnassignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
if getTaskError != nil { if getTaskError != nil {
log.Error("failed to get unassigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError) log.Error("failed to get unassigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure return nil, ErrCoordinatorInternalFailure
@@ -120,6 +115,15 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure return nil, ErrCoordinatorInternalFailure
} }
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// cp.recoverActiveAttempts(ctx, chunkTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", chunkTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{ proverTask := orm.ProverTask{
TaskID: chunkTask.Hash, TaskID: chunkTask.Hash,
ProverPublicKey: taskCtx.PublicKey, ProverPublicKey: taskCtx.PublicKey,

View File

@@ -1,7 +1,9 @@
package provertask package provertask
import ( import (
"errors"
"fmt" "fmt"
"strings"
"sync" "sync"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
@@ -17,9 +19,7 @@ import (
var ( var (
// ErrCoordinatorInternalFailure coordinator internal db failure // ErrCoordinatorInternalFailure coordinator internal db failure
ErrCoordinatorInternalFailure = fmt.Errorf("coordinator internal error") ErrCoordinatorInternalFailure = errors.New("coordinator internal error")
// ErrHardForkName indicates client request with the wrong hard fork name
ErrHardForkName = fmt.Errorf("wrong hard fork name")
) )
var ( var (
@@ -50,30 +50,41 @@ type proverTaskContext struct {
PublicKey string PublicKey string
ProverName string ProverName string
ProverVersion string ProverVersion string
HardForkNames map[string]struct{}
} }
// checkParameter check the prover task parameter illegal // checkParameter check the prover task parameter illegal
func (b *BaseProverTask) checkParameter(ctx *gin.Context) (*proverTaskContext, error) { func (b *BaseProverTask) checkParameter(ctx *gin.Context) (*proverTaskContext, error) {
var ptc proverTaskContext var ptc proverTaskContext
ptc.HardForkNames = make(map[string]struct{})
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey) publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
if !publicKeyExist { if !publicKeyExist {
return nil, fmt.Errorf("get public key from context failed") return nil, errors.New("get public key from context failed")
} }
ptc.PublicKey = publicKey.(string) ptc.PublicKey = publicKey.(string)
proverName, proverNameExist := ctx.Get(coordinatorType.ProverName) proverName, proverNameExist := ctx.Get(coordinatorType.ProverName)
if !proverNameExist { if !proverNameExist {
return nil, fmt.Errorf("get prover name from context failed") return nil, errors.New("get prover name from context failed")
} }
ptc.ProverName = proverName.(string) ptc.ProverName = proverName.(string)
proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion) proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion)
if !proverVersionExist { if !proverVersionExist {
return nil, fmt.Errorf("get prover version from context failed") return nil, errors.New("get prover version from context failed")
} }
ptc.ProverVersion = proverVersion.(string) ptc.ProverVersion = proverVersion.(string)
hardForkNamesStr, hardForkNameExist := ctx.Get(coordinatorType.HardForkName)
if !hardForkNameExist {
return nil, errors.New("get hard fork name from context failed")
}
hardForkNames := strings.Split(hardForkNamesStr.(string), ",")
for _, hardForkName := range hardForkNames {
ptc.HardForkNames[hardForkName] = struct{}{}
}
isBlocked, err := b.proverBlockListOrm.IsPublicKeyBlocked(ctx.Copy(), publicKey.(string)) isBlocked, err := b.proverBlockListOrm.IsPublicKeyBlocked(ctx.Copy(), publicKey.(string))
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to check whether the public key %s is blocked before assigning a chunk task, err: %w, proverName: %s, proverVersion: %s", publicKey, err, proverName, proverVersion) return nil, fmt.Errorf("failed to check whether the public key %s is blocked before assigning a chunk task, err: %w, proverName: %s, proverVersion: %s", publicKey, err, proverName, proverVersion)

View File

@@ -4,7 +4,6 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt"
"strings" "strings"
"time" "time"
@@ -138,11 +137,11 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
m.proofReceivedTotal.Inc() m.proofReceivedTotal.Inc()
pk := ctx.GetString(coordinatorType.PublicKey) pk := ctx.GetString(coordinatorType.PublicKey)
if len(pk) == 0 { if len(pk) == 0 {
return fmt.Errorf("get public key from context failed") return errors.New("get public key from context failed")
} }
pv := ctx.GetString(coordinatorType.ProverVersion) pv := ctx.GetString(coordinatorType.ProverVersion)
if len(pv) == 0 { if len(pv) == 0 {
return fmt.Errorf("get ProverVersion from context failed") return errors.New("get ProverVersion from context failed")
} }
proverTask, err := m.proverTaskOrm.GetProverTaskByUUIDAndPublicKey(ctx.Copy(), proofParameter.UUID, pk) proverTask, err := m.proverTaskOrm.GetProverTaskByUUIDAndPublicKey(ctx.Copy(), proofParameter.UUID, pk)
@@ -188,7 +187,7 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil { if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {
return unmarshalErr return unmarshalErr
} }
success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof) success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof, hardForkName)
} }
if verifyErr != nil || !success { if verifyErr != nil || !success {
@@ -297,6 +296,7 @@ func (m *ProofReceiverLogic) validator(ctx context.Context, proverTask *orm.Prov
// if the batch/chunk have proved and verifier success, need skip this submit proof // if the batch/chunk have proved and verifier success, need skip this submit proof
if m.checkIsTaskSuccess(ctx, proofParameter.TaskID, message.ProofType(proofParameter.TaskType)) { if m.checkIsTaskSuccess(ctx, proofParameter.TaskID, message.ProofType(proofParameter.TaskType)) {
m.proofRecover(ctx, proverTask, types.ProverTaskFailureTypeObjectAlreadyVerified, proofParameter)
m.validateFailureProverTaskHaveVerifier.Inc() m.validateFailureProverTaskHaveVerifier.Inc()
log.Info("the prove task have proved and verifier success, skip this submit proof", "hash", proofParameter.TaskID, log.Info("the prove task have proved and verifier success, skip this submit proof", "hash", proofParameter.TaskID,
"taskType", proverTask.TaskType, "proverName", proverTask.ProverName, "proverPublicKey", pk) "taskType", proverTask.TaskType, "proverName", proverTask.ProverName, "proverPublicKey", pk)

View File

@@ -1,11 +0,0 @@
#!/bin/bash
work_dir="$(dirname -- "${BASH_SOURCE[0]}")"
work_dir="$(cd -- "$work_dir" && pwd)"
echo $work_dir
rm $work_dir/*.vkey
version=release-v0.11.4
wget https://circuit-release.s3.us-west-2.amazonaws.com/${version}/chunk_vk.vkey -O $work_dir/chunk_vk.vkey
wget https://circuit-release.s3.us-west-2.amazonaws.com/${version}/agg_vk.vkey -O $work_dir/agg_vk.vkey

View File

@@ -10,8 +10,8 @@ import (
// NewVerifier Sets up a mock verifier. // NewVerifier Sets up a mock verifier.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) { func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
batchVKMap := map[string]string{cfg.ForkName: "mock_vk"} batchVKMap := map[string]struct{}{"mock_vk": {}}
chunkVKMap := map[string]string{cfg.ForkName: "mock_vk"} chunkVKMap := map[string]struct{}{"mock_vk": {}}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap}, nil return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap}, nil
} }
@@ -32,7 +32,7 @@ func (v *Verifier) VerifyBatchProof(proof *message.BatchProof, forkName string)
} }
// VerifyBundleProof return a mock verification result for a BundleProof. // VerifyBundleProof return a mock verification result for a BundleProof.
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof) (bool, error) { func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string) (bool, error) {
if string(proof.Proof) == InvalidTestProof { if string(proof.Proof) == InvalidTestProof {
return false, nil return false, nil
} }

View File

@@ -10,7 +10,7 @@ const InvalidTestProof = "this is a invalid proof"
// Verifier represents a rust ffi to a halo2 verifier. // Verifier represents a rust ffi to a halo2 verifier.
type Verifier struct { type Verifier struct {
cfg *config.VerifierConfig cfg *config.VerifierConfig
ChunkVKMap map[string]string ChunkVKMap map[string]struct{}
BatchVKMap map[string]string BatchVKMap map[string]struct{}
BundleVkMap map[string]string BundleVkMap map[string]struct{}
} }

View File

@@ -11,11 +11,9 @@ package verifier
import "C" //nolint:typecheck import "C" //nolint:typecheck
import ( import (
"embed"
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
"io" "io"
"io/fs"
"os" "os"
"path" "path"
"unsafe" "unsafe"
@@ -27,50 +25,87 @@ import (
"scroll-tech/coordinator/internal/config" "scroll-tech/coordinator/internal/config"
) )
// This struct maps to `CircuitConfig` in common/libzkp/impl/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.CircuitConfig` being changed
type rustCircuitConfig struct {
ForkName string `json:"fork_name"`
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
}
func newRustCircuitConfig(cfg *config.CircuitConfig) *rustCircuitConfig {
return &rustCircuitConfig{
ForkName: cfg.ForkName,
ParamsPath: cfg.ParamsPath,
AssetsPath: cfg.AssetsPath,
}
}
// This struct maps to `VerifierConfig` in common/libzkp/impl/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.VerifierConfig` being changed
type rustVerifierConfig struct {
LowVersionCircuit *rustCircuitConfig `json:"low_version_circuit"`
HighVersionCircuit *rustCircuitConfig `json:"high_version_circuit"`
}
func newRustVerifierConfig(cfg *config.VerifierConfig) *rustVerifierConfig {
return &rustVerifierConfig{
LowVersionCircuit: newRustCircuitConfig(cfg.LowVersionCircuit),
HighVersionCircuit: newRustCircuitConfig(cfg.HighVersionCircuit),
}
}
// NewVerifier Sets up a rust ffi to call verify. // NewVerifier Sets up a rust ffi to call verify.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) { func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
if cfg.MockMode { if cfg.MockMode {
chunkVKMap := map[string]string{cfg.ForkName: "mock_vk"} chunkVKMap := map[string]struct{}{"mock_vk": {}}
batchVKMap := map[string]string{cfg.ForkName: "mock_vk"} batchVKMap := map[string]struct{}{"mock_vk": {}}
bundleVKMap := map[string]string{cfg.ForkName: "mock_vk"} bundleVKMap := map[string]struct{}{"mock_vk": {}}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap, BundleVkMap: bundleVKMap}, nil return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap, BundleVkMap: bundleVKMap}, nil
} }
paramsPathStr := C.CString(cfg.ParamsPath) verifierConfig := newRustVerifierConfig(cfg)
assetsPathStr := C.CString(cfg.AssetsPath) configBytes, err := json.Marshal(verifierConfig)
if err != nil {
return nil, err
}
configStr := C.CString(string(configBytes))
defer func() { defer func() {
C.free(unsafe.Pointer(paramsPathStr)) C.free(unsafe.Pointer(configStr))
C.free(unsafe.Pointer(assetsPathStr))
}() }()
C.init_batch_verifier(paramsPathStr, assetsPathStr) C.init(configStr)
C.init_chunk_verifier(paramsPathStr, assetsPathStr)
v := &Verifier{ v := &Verifier{
cfg: cfg, cfg: cfg,
ChunkVKMap: make(map[string]string), ChunkVKMap: make(map[string]struct{}),
BatchVKMap: make(map[string]string), BatchVKMap: make(map[string]struct{}),
BundleVkMap: make(map[string]string), BundleVkMap: make(map[string]struct{}),
} }
bundleVK, err := v.readVK(path.Join(cfg.AssetsPath, "vk_bundle.vkey")) bundleVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_bundle.vkey"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
batchVK, err := v.readVK(path.Join(cfg.AssetsPath, "vk_batch.vkey")) batchVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_batch.vkey"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
chunkVK, err := v.readVK(path.Join(cfg.AssetsPath, "vk_chunk.vkey")) chunkVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_chunk.vkey"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
v.BundleVkMap[cfg.ForkName] = bundleVK v.BundleVkMap[bundleVK] = struct{}{}
v.BatchVKMap[cfg.ForkName] = batchVK v.BatchVKMap[batchVK] = struct{}{}
v.ChunkVKMap[cfg.ForkName] = chunkVK v.ChunkVKMap[chunkVK] = struct{}{}
if err := v.loadEmbedVK(); err != nil { if err := v.loadLowVersionVKs(cfg); err != nil {
return nil, err return nil, err
} }
v.loadCurieVersionVKs()
return v, nil return v, nil
} }
@@ -129,7 +164,7 @@ func (v *Verifier) VerifyChunkProof(proof *message.ChunkProof, forkName string)
} }
// VerifyBundleProof Verify a ZkProof for a bundle of batches, by marshaling it and verifying it via the EVM verifier. // VerifyBundleProof Verify a ZkProof for a bundle of batches, by marshaling it and verifying it via the EVM verifier.
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof) (bool, error) { func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string) (bool, error) {
if v.cfg.MockMode { if v.cfg.MockMode {
log.Info("Mock mode, verifier disabled") log.Info("Mock mode, verifier disabled")
if string(proof.Proof) == InvalidTestProof { if string(proof.Proof) == InvalidTestProof {
@@ -144,12 +179,14 @@ func (v *Verifier) VerifyBundleProof(proof *message.BundleProof) (bool, error) {
} }
proofStr := C.CString(string(buf)) proofStr := C.CString(string(buf))
forkNameStr := C.CString(forkName)
defer func() { defer func() {
C.free(unsafe.Pointer(proofStr)) C.free(unsafe.Pointer(proofStr))
C.free(unsafe.Pointer(forkNameStr))
}() }()
log.Info("Start to verify bundle proof ...") log.Info("Start to verify bundle proof ...")
verified := C.verify_bundle_proof(proofStr) verified := C.verify_bundle_proof(proofStr, forkNameStr)
return verified != 0, nil return verified != 0, nil
} }
@@ -165,23 +202,27 @@ func (v *Verifier) readVK(filePat string) (string, error) {
return base64.StdEncoding.EncodeToString(byt), nil return base64.StdEncoding.EncodeToString(byt), nil
} }
//go:embed legacy_vk/* // load low version vks, current is darwin
var legacyVKFS embed.FS func (v *Verifier) loadLowVersionVKs(cfg *config.VerifierConfig) error {
bundleVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_bundle.vkey"))
func (v *Verifier) loadEmbedVK() error {
batchVKBytes, err := fs.ReadFile(legacyVKFS, "legacy_vk/agg_vk.vkey")
if err != nil { if err != nil {
log.Error("load embed batch vk failure", "err", err)
return err return err
} }
batchVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_batch.vkey"))
chunkVkBytes, err := fs.ReadFile(legacyVKFS, "legacy_vk/chunk_vk.vkey")
if err != nil { if err != nil {
log.Error("load embed chunk vk failure", "err", err)
return err return err
} }
chunkVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_chunk.vkey"))
v.BatchVKMap["curie"] = base64.StdEncoding.EncodeToString(batchVKBytes) if err != nil {
v.ChunkVKMap["curie"] = base64.StdEncoding.EncodeToString(chunkVkBytes) return err
}
v.BundleVkMap[bundleVK] = struct{}{}
v.BatchVKMap[batchVK] = struct{}{}
v.ChunkVKMap[chunkVK] = struct{}{}
return nil return nil
} }
func (v *Verifier) loadCurieVersionVKs() {
v.BatchVKMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA="] = struct{}{}
v.ChunkVKMap["AAAAGQAAAATyWEABRbJ6hQQ5/zLX1gTasr7349minA9rSgMS6gDeHwZKqikRiO3md+pXjjxMHnKQtmXYgMXhJSvlmZ+Ws+cheuly2X1RuNQzcZuRImaKPR9LJsVZYsXfJbuqdKX8p0Gj8G83wMJOmTzNVUyUol0w0lTU+CEiTpHOnxBsTF3EWaW3s1u4ycOgWt1c9M6s7WmaBZLYgAWYCunO5CLCLApNGbCASeck/LuSoedEri5u6HccCKU2khG6zl6W07jvYSbDVLJktbjRiHv+/HQix+K14j8boo8Z/unhpwXCsPxkQA=="] = struct{}{}
}

View File

@@ -18,7 +18,8 @@ import (
var ( var (
paramsPath = flag.String("params", "/assets/test_params", "params dir") paramsPath = flag.String("params", "/assets/test_params", "params dir")
assetsPath = flag.String("assets", "/assets/test_assets", "assets dir") assetsPathLo = flag.String("assets_lo", "/assets/test_assets_lo", "assets dir")
assetsPathHi = flag.String("assets", "/assets/test_assets", "assets dir")
batchProofPath = flag.String("batch_proof", "/assets/proof_data/batch_proof", "batch proof file path") batchProofPath = flag.String("batch_proof", "/assets/proof_data/batch_proof", "batch proof file path")
chunkProofPath1 = flag.String("chunk_proof1", "/assets/proof_data/chunk_proof1", "chunk proof file path 1") chunkProofPath1 = flag.String("chunk_proof1", "/assets/proof_data/chunk_proof1", "chunk proof file path 1")
chunkProofPath2 = flag.String("chunk_proof2", "/assets/proof_data/chunk_proof2", "chunk proof file path 2") chunkProofPath2 = flag.String("chunk_proof2", "/assets/proof_data/chunk_proof2", "chunk proof file path 2")
@@ -28,28 +29,38 @@ func TestFFI(t *testing.T) {
as := assert.New(t) as := assert.New(t)
cfg := &config.VerifierConfig{ cfg := &config.VerifierConfig{
MockMode: false, MockMode: false,
ParamsPath: *paramsPath, LowVersionCircuit: &config.CircuitConfig{
AssetsPath: *assetsPath, ParamsPath: *paramsPath,
AssetsPath: *assetsPathLo,
ForkName: "darwin",
MinProverVersion: "",
},
HighVersionCircuit: &config.CircuitConfig{
ParamsPath: *paramsPath,
AssetsPath: *assetsPathHi,
ForkName: "darwinV2",
MinProverVersion: "",
},
} }
v, err := NewVerifier(cfg) v, err := NewVerifier(cfg)
as.NoError(err) as.NoError(err)
chunkProof1 := readChunkProof(*chunkProofPath1, as) chunkProof1 := readChunkProof(*chunkProofPath1, as)
chunkOk1, err := v.VerifyChunkProof(chunkProof1) chunkOk1, err := v.VerifyChunkProof(chunkProof1, "darwinV2")
as.NoError(err) as.NoError(err)
as.True(chunkOk1) as.True(chunkOk1)
t.Log("Verified chunk proof 1") t.Log("Verified chunk proof 1")
chunkProof2 := readChunkProof(*chunkProofPath2, as) chunkProof2 := readChunkProof(*chunkProofPath2, as)
chunkOk2, err := v.VerifyChunkProof(chunkProof2) chunkOk2, err := v.VerifyChunkProof(chunkProof2, "darwinV2")
as.NoError(err) as.NoError(err)
as.True(chunkOk2) as.True(chunkOk2)
t.Log("Verified chunk proof 2") t.Log("Verified chunk proof 2")
batchProof := readBatchProof(*batchProofPath, as) batchProof := readBatchProof(*batchProofPath, as)
batchOk, err := v.VerifyBatchProof(batchProof, "curie") batchOk, err := v.VerifyBatchProof(batchProof, "darwinV2")
as.NoError(err) as.NoError(err)
as.True(batchOk) as.True(batchOk)
t.Log("Verified batch proof") t.Log("Verified batch proof")

View File

@@ -31,6 +31,9 @@ type Batch struct {
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"` WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"` ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"` BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
// proof // proof
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"` ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
@@ -211,23 +214,6 @@ func (o *Batch) GetBatchByHash(ctx context.Context, hash string) (*Batch, error)
return &batch, nil return &batch, nil
} }
// GetLatestFinalizedBatch retrieves the latest finalized batch from the database.
func (o *Batch) GetLatestFinalizedBatch(ctx context.Context) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("rollup_status = ?", int(types.RollupFinalized))
db = db.Order("index desc")
var latestFinalizedBatch Batch
if err := db.First(&latestFinalizedBatch).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return nil, nil
}
return nil, fmt.Errorf("Batch.GetLatestBatch error: %w", err)
}
return &latestFinalizedBatch, nil
}
// GetBatchesByBundleHash retrieves the given batch. // GetBatchesByBundleHash retrieves the given batch.
func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) ([]*Batch, error) { func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) ([]*Batch, error) {
db := o.db.WithContext(ctx) db := o.db.WithContext(ctx)
@@ -242,6 +228,19 @@ func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) (
return batches, nil return batches, nil
} }
// GetBatchByIndex retrieves the batch by the given index.
func (o *Batch) GetBatchByIndex(ctx context.Context, index uint64) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("index = ?", index)
var batch Batch
if err := db.First(&batch).Error; err != nil {
return nil, fmt.Errorf("Batch.GetBatchByIndex error: %w, index: %v", err, index)
}
return &batch, nil
}
// InsertBatch inserts a new batch into the database. // InsertBatch inserts a new batch into the database.
func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...*gorm.DB) (*Batch, error) { func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...*gorm.DB) (*Batch, error) {
if batch == nil { if batch == nil {

View File

@@ -28,7 +28,6 @@ type Bundle struct {
BatchProofsStatus int16 `json:"batch_proofs_status" gorm:"column:batch_proofs_status;default:1"` BatchProofsStatus int16 `json:"batch_proofs_status" gorm:"column:batch_proofs_status;default:1"`
ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:1"` ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:1"`
Proof []byte `json:"proof" gorm:"column:proof;default:NULL"` Proof []byte `json:"proof" gorm:"column:proof;default:NULL"`
ProverAssignedAt *time.Time `json:"prover_assigned_at" gorm:"column:prover_assigned_at;default:NULL"`
ProvedAt *time.Time `json:"proved_at" gorm:"column:proved_at;default:NULL"` ProvedAt *time.Time `json:"proved_at" gorm:"column:proved_at;default:NULL"`
ProofTimeSec int32 `json:"proof_time_sec" gorm:"column:proof_time_sec;default:NULL"` ProofTimeSec int32 `json:"proof_time_sec" gorm:"column:proof_time_sec;default:NULL"`
TotalAttempts int16 `json:"total_attempts" gorm:"column:total_attempts;default:0"` TotalAttempts int16 `json:"total_attempts" gorm:"column:total_attempts;default:0"`
@@ -136,14 +135,14 @@ func (o *Bundle) GetUnassignedAndBatchesUnreadyBundles(ctx context.Context, offs
return bundles, nil return bundles, nil
} }
// UpdateBatchProofsStatusByBatchHash updates the status of batch_proofs_status field for a given bundle hash. // UpdateBatchProofsStatusByBundleHash updates the status of batch_proofs_status field for a given bundle hash.
func (o *Bundle) UpdateBatchProofsStatusByBatchHash(ctx context.Context, bundleHash string, status types.BatchProofsStatus) error { func (o *Bundle) UpdateBatchProofsStatusByBundleHash(ctx context.Context, bundleHash string, status types.BatchProofsStatus) error {
db := o.db.WithContext(ctx) db := o.db.WithContext(ctx)
db = db.Model(&Bundle{}) db = db.Model(&Bundle{})
db = db.Where("hash = ?", bundleHash) db = db.Where("hash = ?", bundleHash)
if err := db.Update("batch_proofs_status", status).Error; err != nil { if err := db.Update("batch_proofs_status", status).Error; err != nil {
return fmt.Errorf("Bundle.UpdateBatchProofsStatusByBatchHash error: %w, bundle hash: %v, status: %v", err, bundleHash, status.String()) return fmt.Errorf("Bundle.UpdateBatchProofsStatusByBundleHash error: %w, bundle hash: %v, status: %v", err, bundleHash, status.String())
} }
return nil return nil
} }

View File

@@ -2,6 +2,7 @@ package orm
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"time" "time"
@@ -53,7 +54,7 @@ func (r *Challenge) InsertChallenge(ctx context.Context, challengeString string)
return fmt.Errorf("the challenge string:%s have been used", challengeString) return fmt.Errorf("the challenge string:%s have been used", challengeString)
} }
return fmt.Errorf("insert challenge string affected rows more than 1") return errors.New("insert challenge string affected rows more than 1")
} }
// DeleteExpireChallenge delete the expire challenge // DeleteExpireChallenge delete the expire challenge

View File

@@ -74,11 +74,11 @@ func (*Chunk) TableName() string {
// GetUnassignedChunk retrieves unassigned chunk based on the specified limit. // GetUnassignedChunk retrieves unassigned chunk based on the specified limit.
// The returned chunks are sorted in ascending order by their index. // The returned chunks are sorted in ascending order by their index.
func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Chunk, error) { func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (*Chunk, error) {
var chunk Chunk var chunk Chunk
db := o.db.WithContext(ctx) db := o.db.WithContext(ctx)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;", sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts) int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, height)
err := db.Raw(sql).Scan(&chunk).Error err := db.Raw(sql).Scan(&chunk).Error
if err != nil { if err != nil {
return nil, fmt.Errorf("Chunk.GetUnassignedChunk error: %w", err) return nil, fmt.Errorf("Chunk.GetUnassignedChunk error: %w", err)
@@ -91,11 +91,11 @@ func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTo
// GetAssignedChunk retrieves assigned chunk based on the specified limit. // GetAssignedChunk retrieves assigned chunk based on the specified limit.
// The returned chunks are sorted in ascending order by their index. // The returned chunks are sorted in ascending order by their index.
func (o *Chunk) GetAssignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Chunk, error) { func (o *Chunk) GetAssignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (*Chunk, error) {
var chunk Chunk var chunk Chunk
db := o.db.WithContext(ctx) db := o.db.WithContext(ctx)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;", sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts) int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, height)
err := db.Raw(sql).Scan(&chunk).Error err := db.Raw(sql).Scan(&chunk).Error
if err != nil { if err != nil {
return nil, fmt.Errorf("Chunk.GetAssignedChunk error: %w", err) return nil, fmt.Errorf("Chunk.GetAssignedChunk error: %w", err)

View File

@@ -87,6 +87,55 @@ func (o *L2Block) GetL2BlockByNumber(ctx context.Context, blockNumber uint64) (*
return &l2Block, nil return &l2Block, nil
} }
// GetL2BlocksInRange retrieves the L2 blocks within the specified range (inclusive).
// The range is closed, i.e., it includes both start and end block numbers.
// The returned blocks are sorted in ascending order by their block number.
func (o *L2Block) GetL2BlocksInRange(ctx context.Context, startBlockNumber uint64, endBlockNumber uint64) ([]*encoding.Block, error) {
if startBlockNumber > endBlockNumber {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: start block number should be less than or equal to end block number, start block: %v, end block: %v", startBlockNumber, endBlockNumber)
}
db := o.db.WithContext(ctx)
db = db.Model(&L2Block{})
db = db.Select("header, transactions, withdraw_root, row_consumption")
db = db.Where("number >= ? AND number <= ?", startBlockNumber, endBlockNumber)
db = db.Order("number ASC")
var l2Blocks []L2Block
if err := db.Find(&l2Blocks).Error; err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
// sanity check
if uint64(len(l2Blocks)) != endBlockNumber-startBlockNumber+1 {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: unexpected number of results, expected: %v, got: %v", endBlockNumber-startBlockNumber+1, len(l2Blocks))
}
var blocks []*encoding.Block
for _, v := range l2Blocks {
var block encoding.Block
if err := json.Unmarshal([]byte(v.Transactions), &block.Transactions); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
block.Header = &gethTypes.Header{}
if err := json.Unmarshal([]byte(v.Header), block.Header); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
block.WithdrawRoot = common.HexToHash(v.WithdrawRoot)
if err := json.Unmarshal([]byte(v.RowConsumption), &block.RowConsumption); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
blocks = append(blocks, &block)
}
return blocks, nil
}
// InsertL2Blocks inserts l2 blocks into the "l2_block" table. // InsertL2Blocks inserts l2 blocks into the "l2_block" table.
// for unit test // for unit test
func (o *L2Block) InsertL2Blocks(ctx context.Context, blocks []*encoding.Block) error { func (o *L2Block) InsertL2Blocks(ctx context.Context, blocks []*encoding.Block) error {

View File

@@ -116,25 +116,6 @@ func (o *ProverTask) GetProverTasksByHashes(ctx context.Context, taskType messag
return proverTasks, nil return proverTasks, nil
} }
// GetAssignedProverTaskByTaskIDAndProver get prover task taskID and public key
// TODO: when prover all upgrade need DEPRECATED this function
func (o *ProverTask) GetAssignedProverTaskByTaskIDAndProver(ctx context.Context, taskType message.ProofType, taskID, proverPublicKey, proverVersion string) (*ProverTask, error) {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("task_type", int(taskType))
db = db.Where("task_id", taskID)
db = db.Where("prover_public_key", proverPublicKey)
db = db.Where("prover_version", proverVersion)
db = db.Where("proving_status", types.ProverAssigned)
var proverTask ProverTask
err := db.First(&proverTask).Error
if err != nil {
return nil, fmt.Errorf("ProverTask.GetProverTaskByTaskIDAndProver err:%w, taskID:%s, pubkey:%s, prover_version:%s", err, taskID, proverPublicKey, proverVersion)
}
return &proverTask, nil
}
// GetProverTaskByUUIDAndPublicKey get prover task taskID by uuid and public key // GetProverTaskByUUIDAndPublicKey get prover task taskID by uuid and public key
func (o *ProverTask) GetProverTaskByUUIDAndPublicKey(ctx context.Context, uuid, publicKey string) (*ProverTask, error) { func (o *ProverTask) GetProverTaskByUUIDAndPublicKey(ctx context.Context, uuid, publicKey string) (*ProverTask, error) {
db := o.db.WithContext(ctx) db := o.db.WithContext(ctx)

View File

@@ -18,6 +18,8 @@ const (
ProverName = "prover_name" ProverName = "prover_name"
// ProverVersion the prover version for context // ProverVersion the prover version for context
ProverVersion = "prover_version" ProverVersion = "prover_version"
// HardForkName the hard fork name for context
HardForkName = "hard_fork_name"
) )
// LoginSchema for /login response // LoginSchema for /login response
@@ -35,6 +37,12 @@ type Message struct {
VKs []string `form:"vks" json:"vks"` VKs []string `form:"vks" json:"vks"`
} }
// LoginParameterWithHardForkName constructs new payload for login
type LoginParameterWithHardForkName struct {
LoginParameter
HardForkName string `form:"hard_fork_name" json:"hard_fork_name"`
}
// LoginParameter for /login api // LoginParameter for /login api
type LoginParameter struct { type LoginParameter struct {
Message Message `form:"message" json:"message" binding:"required"` Message Message `form:"message" json:"message" binding:"required"`

View File

@@ -60,11 +60,10 @@ func TestGenerateSignature(t *testing.T) {
authMsg := LoginParameter{ authMsg := LoginParameter{
Message: Message{ Message: Message{
ProverName: "test", ProverName: "test",
ProverVersion: "v4.4.32-37af5ef5-38a68e2-1c5093c", ProverVersion: "v4.4.45-37af5ef5-38a68e2-1c5093c",
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjEzMjc5MTIsIm9yaWdfaWF0IjoxNzIxMzI0MzEyLCJyYW5kb20iOiJWMVFlT19yNEV5eGRmYUtDalprVExEa0ZIemEyNTdQRG93dTV4SnVxYTdZPSJ9.x-B_TnkTUvs8-hiMfJXejxetAP6rXfeRUmyZ3S0uBiM", Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjQ4Mzg0ODUsIm9yaWdfaWF0IjoxNzI0ODM0ODg1LCJyYW5kb20iOiJ6QmdNZGstNGc4UzNUNTFrVEFsYk1RTXg2TGJ4SUs4czY3ejM2SlNuSFlJPSJ9.x9PvihhNx2w4_OX5uCrv8QJCNYVQkIi-K2k8XFXYmik",
ProverTypes: []ProverType{ProverTypeBatch}, ProverTypes: []ProverType{ProverTypeChunk},
VKs: []string{"AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA=", VKs: []string{"mock_vk"},
"AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD1DEjW4Kell67H07wazT5DdzrSh4+amh+cmosQHp9p9snFypyoBGt3UHtoJGQBZlywZWDS9ht5pnaEoGBdaKcQk+lFb+WxTiId0KOAa0mafTZTQw8yToy57Jple64qzlRu1dux30tZZGuerLN1CKzg5Xl2iOpMK+l87jCINwVp5cUtF/XrvhBbU7onKh3KBiy99iUqVyA3Y6iiIZhGKWBSuSA4bNgDYIoVkqjHpdL35aEShoRO6pNXt7rDzxFoPzH0JuPI54nE4OhVrzZXwtkAEosxVa/fszcE092FH+HhhtxZBYe/KEzwdISU9TOPdId3UF/UMYC0MiYOlqffVTgAg="},
}, },
PublicKey: publicKeyHex, PublicKey: publicKeyHex,
} }

View File

@@ -3,7 +3,6 @@ package types
// GetTaskParameter for ProverTasks request parameter // GetTaskParameter for ProverTasks request parameter
type GetTaskParameter struct { type GetTaskParameter struct {
ProverHeight uint64 `form:"prover_height" json:"prover_height"` ProverHeight uint64 `form:"prover_height" json:"prover_height"`
TaskType int `form:"task_type" json:"task_type"`
TaskTypes []int `form:"task_types" json:"task_types"` TaskTypes []int `form:"task_types" json:"task_types"`
} }

View File

@@ -33,12 +33,6 @@ import (
"scroll-tech/coordinator/internal/route" "scroll-tech/coordinator/internal/route"
) )
const (
forkNumberTwo = 2
forkNumberOne = 1
minProverVersion = "v2.0.0"
)
var ( var (
conf *config.Config conf *config.Config
@@ -72,7 +66,7 @@ func randomURL() string {
return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64()) return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64())
} }
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, nameForkMap map[string]int64) (*cron.Collector, *http.Server) { func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, forks []string) (*cron.Collector, *http.Server) {
var err error var err error
db, err = testApps.GetGormDBClient() db, err = testApps.GetGormDBClient()
@@ -90,13 +84,23 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
ProversPerSession: proversPerSession, ProversPerSession: proversPerSession,
Verifier: &config.VerifierConfig{ Verifier: &config.VerifierConfig{
MockMode: true, MockMode: true,
LowVersionCircuit: &config.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "homestead",
MinProverVersion: "v4.2.0",
},
HighVersionCircuit: &config.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "bernoulli",
MinProverVersion: "v4.3.0",
},
}, },
BatchCollectionTimeSec: 10, BatchCollectionTimeSec: 10,
ChunkCollectionTimeSec: 10, ChunkCollectionTimeSec: 10,
BundleCollectionTimeSec: 10, BundleCollectionTimeSec: 10,
MaxVerifierWorkers: 10,
SessionAttempts: 5, SessionAttempts: 5,
MinProverVersion: minProverVersion,
}, },
Auth: &config.Auth{ Auth: &config.Auth{
ChallengeExpireDurationSec: tokenTimeout, ChallengeExpireDurationSec: tokenTimeout,
@@ -105,20 +109,12 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
} }
var chainConf params.ChainConfig var chainConf params.ChainConfig
for forkName, forkNumber := range nameForkMap { for _, forkName := range forks {
switch forkName { switch forkName {
case "shanghai":
chainConf.ShanghaiBlock = big.NewInt(forkNumber)
case "bernoulli": case "bernoulli":
chainConf.BernoulliBlock = big.NewInt(forkNumber) chainConf.BernoulliBlock = big.NewInt(100)
case "london":
chainConf.LondonBlock = big.NewInt(forkNumber)
case "istanbul":
chainConf.IstanbulBlock = big.NewInt(forkNumber)
case "homestead": case "homestead":
chainConf.HomesteadBlock = big.NewInt(forkNumber) chainConf.HomesteadBlock = big.NewInt(0)
case "eip155":
chainConf.EIP155Block = big.NewInt(forkNumber)
} }
} }
@@ -201,7 +197,7 @@ func TestApis(t *testing.T) {
func testHandshake(t *testing.T) { func testHandshake(t *testing.T) {
// Setup coordinator and http server. // Setup coordinator and http server.
coordinatorURL := randomURL() coordinatorURL := randomURL()
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"homestead": forkNumberOne}) proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
defer func() { defer func() {
proofCollector.Stop() proofCollector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background())) assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -214,7 +210,7 @@ func testHandshake(t *testing.T) {
func testFailedHandshake(t *testing.T) { func testFailedHandshake(t *testing.T) {
// Setup coordinator and http server. // Setup coordinator and http server.
coordinatorURL := randomURL() coordinatorURL := randomURL()
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"homestead": forkNumberOne}) proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
defer func() { defer func() {
proofCollector.Stop() proofCollector.Stop()
}() }()
@@ -232,7 +228,7 @@ func testFailedHandshake(t *testing.T) {
func testGetTaskBlocked(t *testing.T) { func testGetTaskBlocked(t *testing.T) {
coordinatorURL := randomURL() coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"homestead": forkNumberOne}) collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
defer func() { defer func() {
collector.Stop() collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background())) assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -250,12 +246,12 @@ func testGetTaskBlocked(t *testing.T) {
expectedErr := fmt.Errorf("return prover task err:check prover task parameter failed, error:public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", chunkProver.publicKey(), chunkProver.proverName, chunkProver.proverVersion) expectedErr := fmt.Errorf("return prover task err:check prover task parameter failed, error:public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", chunkProver.publicKey(), chunkProver.proverName, chunkProver.proverVersion)
code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk) code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrCoordinatorGetTaskFailure, code) assert.Equal(t, types.ErrCoordinatorGetTaskFailure, code)
assert.Equal(t, expectedErr, fmt.Errorf(errMsg)) assert.Equal(t, expectedErr, errors.New(errMsg))
expectedErr = fmt.Errorf("get empty prover task") expectedErr = errors.New("get empty prover task")
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch) code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrCoordinatorEmptyProofData, code) assert.Equal(t, types.ErrCoordinatorEmptyProofData, code)
assert.Equal(t, expectedErr, fmt.Errorf(errMsg)) assert.Equal(t, expectedErr, errors.New(errMsg))
err = proverBlockListOrm.InsertProverPublicKey(context.Background(), batchProver.proverName, batchProver.publicKey()) err = proverBlockListOrm.InsertProverPublicKey(context.Background(), batchProver.proverName, batchProver.publicKey())
assert.NoError(t, err) assert.NoError(t, err)
@@ -263,20 +259,20 @@ func testGetTaskBlocked(t *testing.T) {
err = proverBlockListOrm.DeleteProverPublicKey(context.Background(), chunkProver.publicKey()) err = proverBlockListOrm.DeleteProverPublicKey(context.Background(), chunkProver.publicKey())
assert.NoError(t, err) assert.NoError(t, err)
expectedErr = fmt.Errorf("get empty prover task") expectedErr = errors.New("get empty prover task")
code, errMsg = chunkProver.tryGetProverTask(t, message.ProofTypeChunk) code, errMsg = chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrCoordinatorEmptyProofData, code) assert.Equal(t, types.ErrCoordinatorEmptyProofData, code)
assert.Equal(t, expectedErr, fmt.Errorf(errMsg)) assert.Equal(t, expectedErr, errors.New(errMsg))
expectedErr = fmt.Errorf("return prover task err:check prover task parameter failed, error:public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", batchProver.publicKey(), batchProver.proverName, batchProver.proverVersion) expectedErr = fmt.Errorf("return prover task err:check prover task parameter failed, error:public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", batchProver.publicKey(), batchProver.proverName, batchProver.proverVersion)
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch) code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrCoordinatorGetTaskFailure, code) assert.Equal(t, types.ErrCoordinatorGetTaskFailure, code)
assert.Equal(t, expectedErr, fmt.Errorf(errMsg)) assert.Equal(t, expectedErr, errors.New(errMsg))
} }
func testOutdatedProverVersion(t *testing.T) { func testOutdatedProverVersion(t *testing.T) {
coordinatorURL := randomURL() coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"homestead": forkNumberOne}) collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
defer func() { defer func() {
collector.Stop() collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background())) assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -288,20 +284,22 @@ func testOutdatedProverVersion(t *testing.T) {
batchProver := newMockProver(t, "prover_batch_test", coordinatorURL, message.ProofTypeBatch, "v1.999.999") batchProver := newMockProver(t, "prover_batch_test", coordinatorURL, message.ProofTypeBatch, "v1.999.999")
assert.True(t, chunkProver.healthCheckSuccess(t)) assert.True(t, chunkProver.healthCheckSuccess(t))
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", minProverVersion, chunkProver.proverVersion) expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, chunkProver.proverVersion)
code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk) code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrJWTCommonErr, code) assert.Equal(t, types.ErrJWTCommonErr, code)
assert.Equal(t, expectedErr, fmt.Errorf(errMsg)) assert.Equal(t, expectedErr, errors.New(errMsg))
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", minProverVersion, batchProver.proverVersion) expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, batchProver.proverVersion)
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch) code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrJWTCommonErr, code) assert.Equal(t, types.ErrJWTCommonErr, code)
assert.Equal(t, expectedErr, fmt.Errorf(errMsg)) assert.Equal(t, expectedErr, errors.New(errMsg))
} }
func testValidProof(t *testing.T) { func testValidProof(t *testing.T) {
coordinatorURL := randomURL() coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo}) collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
defer func() { defer func() {
collector.Stop() collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background())) assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -384,7 +382,7 @@ func testValidProof(t *testing.T) {
func testInvalidProof(t *testing.T) { func testInvalidProof(t *testing.T) {
// Setup coordinator and ws server. // Setup coordinator and ws server.
coordinatorURL := randomURL() coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo}) collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
defer func() { defer func() {
collector.Stop() collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background())) assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -472,7 +470,7 @@ func testInvalidProof(t *testing.T) {
func testProofGeneratedFailed(t *testing.T) { func testProofGeneratedFailed(t *testing.T) {
// Setup coordinator and ws server. // Setup coordinator and ws server.
coordinatorURL := randomURL() coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo}) collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
defer func() { defer func() {
collector.Stop() collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background())) assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -573,7 +571,7 @@ func testProofGeneratedFailed(t *testing.T) {
func testTimeoutProof(t *testing.T) { func testTimeoutProof(t *testing.T) {
// Setup coordinator and ws server. // Setup coordinator and ws server.
coordinatorURL := randomURL() coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo}) collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"darwinV2"})
defer func() { defer func() {
collector.Stop() collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background())) assert.NoError(t, httpHandler.Shutdown(context.Background()))

View File

@@ -160,7 +160,7 @@ func (r *mockProver) getProverTask(t *testing.T, proofType message.ProofType) (*
resp, err := client.R(). resp, err := client.R().
SetHeader("Content-Type", "application/json"). SetHeader("Content-Type", "application/json").
SetHeader("Authorization", fmt.Sprintf("Bearer %s", token)). SetHeader("Authorization", fmt.Sprintf("Bearer %s", token)).
SetBody(map[string]interface{}{"prover_height": 100, "task_type": int(proofType)}). SetBody(map[string]interface{}{"prover_height": 100, "task_types": []int{int(proofType)}}).
SetResult(&result). SetResult(&result).
Post("http://" + r.coordinatorURL + "/coordinator/v1/get_task") Post("http://" + r.coordinatorURL + "/coordinator/v1/get_task")
assert.NoError(t, err) assert.NoError(t, err)

View File

@@ -4,6 +4,8 @@ import (
"encoding/json" "encoding/json"
"os" "os"
"path/filepath" "path/filepath"
"scroll-tech/common/utils"
) )
// DBConfig db config // DBConfig db config
@@ -29,5 +31,11 @@ func NewConfig(file string) (*DBConfig, error) {
return nil, err return nil, err
} }
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_ROLLUP_DB_CONFIG")
if err != nil {
return nil, err
}
return cfg, nil return cfg, nil
} }

View File

@@ -59,20 +59,20 @@ func testResetDB(t *testing.T) {
cur, err := Current(pgDB) cur, err := Current(pgDB)
assert.NoError(t, err) assert.NoError(t, err)
// total number of tables. // total number of tables.
assert.Equal(t, int64(22), cur) assert.Equal(t, int64(24), cur)
} }
func testMigrate(t *testing.T) { func testMigrate(t *testing.T) {
assert.NoError(t, Migrate(pgDB)) assert.NoError(t, Migrate(pgDB))
cur, err := Current(pgDB) cur, err := Current(pgDB)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, int64(22), cur) assert.Equal(t, int64(24), cur)
} }
func testRollback(t *testing.T) { func testRollback(t *testing.T) {
version, err := Current(pgDB) version, err := Current(pgDB)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, int64(22), version) assert.Equal(t, int64(24), version)
assert.NoError(t, Rollback(pgDB, nil)) assert.NoError(t, Rollback(pgDB, nil))

View File

@@ -14,7 +14,6 @@ CREATE TABLE bundle (
batch_proofs_status SMALLINT NOT NULL DEFAULT 1, batch_proofs_status SMALLINT NOT NULL DEFAULT 1,
proving_status SMALLINT NOT NULL DEFAULT 1, proving_status SMALLINT NOT NULL DEFAULT 1,
proof BYTEA DEFAULT NULL, proof BYTEA DEFAULT NULL,
prover_assigned_at TIMESTAMP(0) DEFAULT NULL,
proved_at TIMESTAMP(0) DEFAULT NULL, proved_at TIMESTAMP(0) DEFAULT NULL,
proof_time_sec INTEGER DEFAULT NULL, proof_time_sec INTEGER DEFAULT NULL,
total_attempts SMALLINT NOT NULL DEFAULT 0, total_attempts SMALLINT NOT NULL DEFAULT 0,

View File

@@ -0,0 +1,23 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE chunk
ADD COLUMN codec_version SMALLINT NOT NULL DEFAULT 0,
ADD COLUMN enable_compress BOOLEAN NOT NULL DEFAULT false;
ALTER TABLE batch
ADD COLUMN enable_compress BOOLEAN NOT NULL DEFAULT false;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS chunk
DROP COLUMN IF EXISTS enable_compress,
DROP COLUMN IF EXISTS codec_version;
ALTER TABLE IF EXISTS batch
DROP COLUMN IF EXISTS enable_compress;
-- +goose StatementEnd

View File

@@ -0,0 +1,15 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE batch
ADD COLUMN blob_bytes BYTEA;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS batch
DROP COLUMN IF EXISTS blob_bytes;
-- +goose StatementEnd

File diff suppressed because it is too large Load Diff

406
prover/Cargo.lock generated
View File

@@ -28,44 +28,10 @@ dependencies = [
"cpufeatures", "cpufeatures",
] ]
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.11.0",
]
[[package]] [[package]]
name = "aggregator" name = "aggregator"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"ark-std 0.3.0", "ark-std 0.3.0",
"bitstream-io", "bitstream-io",
@@ -96,6 +62,40 @@ dependencies = [
"zkevm-circuits 0.12.0", "zkevm-circuits 0.12.0",
] ]
[[package]]
name = "aggregator"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.13.0",
]
[[package]] [[package]]
name = "ahash" name = "ahash"
version = "0.8.11" version = "0.8.11"
@@ -632,37 +632,10 @@ version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-providers 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ethers-signers",
"external-tracer 0.11.0",
"gadgets 0.11.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"poseidon-circuit",
"rand",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]] [[package]]
name = "bus-mapping" name = "bus-mapping"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.12.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)", "ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -684,6 +657,31 @@ dependencies = [
"strum_macros 0.25.3", "strum_macros 0.25.3",
] ]
[[package]]
name = "bus-mapping"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-providers 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ethers-signers",
"gadgets 0.13.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"poseidon-circuit",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]] [[package]]
name = "byte-slice-cast" name = "byte-slice-cast"
version = "1.2.2" version = "1.2.2"
@@ -1311,8 +1309,8 @@ dependencies = [
[[package]] [[package]]
name = "eth-types" name = "eth-types"
version = "0.11.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"base64 0.13.1", "base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)", "ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1329,7 +1327,6 @@ dependencies = [
"revm-primitives", "revm-primitives",
"serde", "serde",
"serde_json", "serde_json",
"serde_stacker",
"serde_with", "serde_with",
"sha3 0.10.8", "sha3 0.10.8",
"strum 0.25.0", "strum 0.25.0",
@@ -1340,8 +1337,8 @@ dependencies = [
[[package]] [[package]]
name = "eth-types" name = "eth-types"
version = "0.12.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"base64 0.13.1", "base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)", "ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1560,11 +1557,11 @@ dependencies = [
[[package]] [[package]]
name = "external-tracer" name = "external-tracer"
version = "0.11.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.11.0", "eth-types 0.12.0",
"geth-utils 0.11.0", "geth-utils 0.12.0",
"log", "log",
"serde", "serde",
"serde_json", "serde_json",
@@ -1573,11 +1570,11 @@ dependencies = [
[[package]] [[package]]
name = "external-tracer" name = "external-tracer"
version = "0.12.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.13.0",
"geth-utils 0.12.0", "geth-utils 0.13.0",
"log", "log",
"serde", "serde",
"serde_json", "serde_json",
@@ -1790,10 +1787,10 @@ dependencies = [
[[package]] [[package]]
name = "gadgets" name = "gadgets"
version = "0.11.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.11.0", "eth-types 0.12.0",
"halo2_proofs", "halo2_proofs",
"poseidon-base", "poseidon-base",
"sha3 0.10.8", "sha3 0.10.8",
@@ -1802,10 +1799,10 @@ dependencies = [
[[package]] [[package]]
name = "gadgets" name = "gadgets"
version = "0.12.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.13.0",
"halo2_proofs", "halo2_proofs",
"poseidon-base", "poseidon-base",
"sha3 0.10.8", "sha3 0.10.8",
@@ -1825,8 +1822,8 @@ dependencies = [
[[package]] [[package]]
name = "geth-utils" name = "geth-utils"
version = "0.11.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"env_logger 0.10.2", "env_logger 0.10.2",
"gobuild", "gobuild",
@@ -1835,8 +1832,8 @@ dependencies = [
[[package]] [[package]]
name = "geth-utils" name = "geth-utils"
version = "0.12.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"env_logger 0.10.2", "env_logger 0.10.2",
"gobuild", "gobuild",
@@ -2673,25 +2670,10 @@ dependencies = [
"subtle", "subtle",
] ]
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"external-tracer 0.11.0",
"itertools 0.11.0",
"log",
"rand",
"rand_chacha",
]
[[package]] [[package]]
name = "mock" name = "mock"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.12.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)", "ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -2704,23 +2686,24 @@ dependencies = [
] ]
[[package]] [[package]]
name = "mpt-zktrie" name = "mock"
version = "0.11.0" version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [ dependencies = [
"eth-types 0.11.0", "eth-types 0.13.0",
"halo2curves", "ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"hex", "ethers-signers",
"external-tracer 0.13.0",
"itertools 0.11.0",
"log", "log",
"num-bigint", "rand",
"poseidon-base", "rand_chacha",
"zktrie",
] ]
[[package]] [[package]]
name = "mpt-zktrie" name = "mpt-zktrie"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"eth-types 0.12.0", "eth-types 0.12.0",
"halo2curves", "halo2curves",
@@ -2728,7 +2711,21 @@ dependencies = [
"log", "log",
"num-bigint", "num-bigint",
"poseidon-base", "poseidon-base",
"zktrie", "zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "mpt-zktrie"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
] ]
[[package]] [[package]]
@@ -3294,8 +3291,8 @@ dependencies = [
"http 1.1.0", "http 1.1.0",
"log", "log",
"once_cell", "once_cell",
"prover 0.11.0",
"prover 0.12.0", "prover 0.12.0",
"prover 0.13.0",
"rand", "rand",
"reqwest 0.12.4", "reqwest 0.12.4",
"reqwest-middleware", "reqwest-middleware",
@@ -3309,44 +3306,10 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"aggregator 0.11.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.11.0",
"chrono",
"dotenvy",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.11.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.11.0",
]
[[package]] [[package]]
name = "prover" name = "prover"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"aggregator 0.12.0", "aggregator 0.12.0",
"anyhow", "anyhow",
@@ -3377,6 +3340,40 @@ dependencies = [
"zkevm-circuits 0.12.0", "zkevm-circuits 0.12.0",
] ]
[[package]]
name = "prover"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"aggregator 0.13.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.13.0",
"chrono",
"dotenvy",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.13.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.13.0",
]
[[package]] [[package]]
name = "psm" name = "psm"
version = "0.1.21" version = "0.1.21"
@@ -5338,52 +5335,10 @@ dependencies = [
"syn 2.0.66", "syn 2.0.66",
] ]
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"array-init",
"bus-mapping 0.11.0",
"either",
"env_logger 0.10.2",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"ff",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]] [[package]]
name = "zkevm-circuits" name = "zkevm-circuits"
version = "0.12.0" version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0-rc.3#706ba9ee7292b8e15a4fa0d4634a65dffa999402" source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
dependencies = [ dependencies = [
"array-init", "array-init",
"bus-mapping 0.12.0", "bus-mapping 0.12.0",
@@ -5422,13 +5377,64 @@ dependencies = [
"subtle", "subtle",
] ]
[[package]]
name = "zkevm-circuits"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"array-init",
"bus-mapping 0.13.0",
"either",
"env_logger 0.10.2",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"ff",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]] [[package]]
name = "zktrie" name = "zktrie"
version = "0.3.0" version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8" source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8"
dependencies = [ dependencies = [
"gobuild", "gobuild",
"zktrie_rust", "zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
] ]
[[package]] [[package]]
@@ -5445,6 +5451,20 @@ dependencies = [
"strum_macros 0.24.3", "strum_macros 0.24.3",
] ]
[[package]]
name = "zktrie_rust"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"hex",
"lazy_static",
"num",
"num-derive",
"num-traits",
"strum 0.24.1",
"strum_macros 0.24.3",
]
[[package]] [[package]]
name = "zstd" name = "zstd"
version = "0.13.0" version = "0.13.0"

View File

@@ -29,8 +29,8 @@ ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" } ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" } halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] } snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover_curie = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.5", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] } prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.0-rc.3", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] } prover_darwin_v2 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.1" base64 = "0.13.1"
reqwest = { version = "0.12.4", features = ["gzip"] } reqwest = { version = "0.12.4", features = ["gzip"] }
reqwest-middleware = "0.3" reqwest-middleware = "0.3"

View File

@@ -3,7 +3,7 @@
"keystore_path": "keystore.json", "keystore_path": "keystore.json",
"keystore_password": "prover-pwd", "keystore_password": "prover-pwd",
"db_path": "unique-db-path-for-prover-1", "db_path": "unique-db-path-for-prover-1",
"proof_type": 2, "prover_type": 2,
"low_version_circuit": { "low_version_circuit": {
"hard_fork_name": "bernoulli", "hard_fork_name": "bernoulli",
"params_path": "params", "params_path": "params",

View File

@@ -14,6 +14,8 @@ use types::*;
use crate::{config::Config, key_signer::KeySigner}; use crate::{config::Config, key_signer::KeySigner};
pub use errors::ProofStatusNotOKError;
pub struct CoordinatorClient<'a> { pub struct CoordinatorClient<'a> {
api: Api, api: Api,
token: Option<String>, token: Option<String>,

View File

@@ -1,4 +1,6 @@
use super::types::*; use crate::{coordinator_client::ProofStatusNotOKError, types::ProofStatus};
use super::{errors::*, types::*};
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use core::time::Duration; use core::time::Duration;
use reqwest::{header::CONTENT_TYPE, Url}; use reqwest::{header::CONTENT_TYPE, Url};
@@ -76,7 +78,23 @@ impl Api {
token: &String, token: &String,
) -> Result<Response<SubmitProofResponseData>> { ) -> Result<Response<SubmitProofResponseData>> {
let method = "/coordinator/v1/submit_proof"; let method = "/coordinator/v1/submit_proof";
self.post_with_token(method, req, token).await let response = self
.post_with_token::<SubmitProofRequest, Response<SubmitProofResponseData>>(
method, req, token,
)
.await?;
// when req's status already not ok, we mark the error returned from coordinator and will
// ignore it later.
if response.errcode == ErrorCode::ErrCoordinatorHandleZkProofFailure
&& req.status != ProofStatus::Ok
&& response
.errmsg
.contains("validator failure proof msg status not ok")
{
return Err(anyhow::anyhow!(ProofStatusNotOKError));
}
Ok(response)
} }
async fn post_with_token<Req, Resp>( async fn post_with_token<Req, Resp>(

View File

@@ -1,4 +1,5 @@
use serde::{Deserialize, Deserializer}; use serde::{Deserialize, Deserializer};
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq)] #[derive(Debug, Clone, Copy, PartialEq)]
pub enum ErrorCode { pub enum ErrorCode {
@@ -51,3 +52,14 @@ impl<'de> Deserialize<'de> for ErrorCode {
Ok(ErrorCode::from_i32(v)) Ok(ErrorCode::from_i32(v))
} }
} }
// ====================================================
#[derive(Debug, Clone)]
pub struct ProofStatusNotOKError;
impl fmt::Display for ProofStatusNotOKError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "proof status not ok")
}
}

View File

@@ -1,4 +1,5 @@
#![feature(lazy_cell)] #![feature(lazy_cell)]
#![feature(core_intrinsics)]
mod config; mod config;
mod coordinator_client; mod coordinator_client;
@@ -37,7 +38,7 @@ struct Args {
log_file: Option<String>, log_file: Option<String>,
} }
fn main() -> Result<(), Box<dyn std::error::Error>> { fn start() -> Result<()> {
let args = Args::parse(); let args = Args::parse();
if args.version { if args.version {
@@ -76,3 +77,10 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
Ok(()) Ok(())
} }
fn main() {
let result = start();
if let Err(e) = result {
log::error!("main exit with error {:#}", e)
}
}

View File

@@ -71,7 +71,6 @@ impl<'a> Prover<'a> {
let mut req = GetTaskRequest { let mut req = GetTaskRequest {
task_types: get_task_types(self.config.prover_type), task_types: get_task_types(self.config.prover_type),
prover_height: None, prover_height: None,
// vks: self.circuits_handler_provider.borrow().get_vks(),
}; };
if self.config.prover_type == ProverType::Chunk { if self.config.prover_type == ProverType::Chunk {

View File

@@ -1,4 +1,4 @@
use super::{prover::Prover, task_cache::TaskCache}; use super::{coordinator_client::ProofStatusNotOKError, prover::Prover, task_cache::TaskCache};
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use std::rc::Rc; use std::rc::Rc;
@@ -16,7 +16,11 @@ impl<'a> TaskProcessor<'a> {
loop { loop {
log::info!("start a new round."); log::info!("start a new round.");
if let Err(err) = self.prove_and_submit() { if let Err(err) = self.prove_and_submit() {
log::error!("encounter error: {:#}", err); if err.is::<ProofStatusNotOKError>() {
log::info!("proof status not ok, downgrade level to info.");
} else {
log::error!("encounter error: {:#}", err);
}
} else { } else {
log::info!("prove & submit succeed."); log::info!("prove & submit succeed.");
} }
@@ -54,11 +58,18 @@ impl<'a> TaskProcessor<'a> {
); );
let result = match self.prover.prove_task(&task_wrapper.task) { let result = match self.prover.prove_task(&task_wrapper.task) {
Ok(proof_detail) => self.prover.submit_proof(proof_detail, &task_wrapper.task), Ok(proof_detail) => self.prover.submit_proof(proof_detail, &task_wrapper.task),
Err(error) => self.prover.submit_error( Err(error) => {
&task_wrapper.task, log::error!(
super::types::ProofFailureType::NoPanic, "failed to prove task, id: {}, error: {:#}",
error, &task_wrapper.task.id,
), error
);
self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::NoPanic,
error,
)
}
}; };
return result; return result;
} }

View File

@@ -54,7 +54,7 @@ impl Default for TaskType {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ProverType { pub enum ProverType {
Chunk, Chunk,
Batch, Batch,

View File

@@ -1,5 +1,6 @@
mod curie; mod common;
mod darwin; mod darwin;
mod darwin_v2;
use super::geth_client::GethClient; use super::geth_client::GethClient;
use crate::{ use crate::{
@@ -8,8 +9,8 @@ use crate::{
utils::get_task_types, utils::get_task_types,
}; };
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use curie::CurieHandler;
use darwin::DarwinHandler; use darwin::DarwinHandler;
use darwin_v2::DarwinV2Handler;
use std::{cell::RefCell, collections::HashMap, rc::Rc}; use std::{cell::RefCell, collections::HashMap, rc::Rc};
type HardForkName = String; type HardForkName = String;
@@ -38,7 +39,7 @@ pub struct CircuitsHandlerProvider<'a> {
geth_client: Option<Rc<RefCell<GethClient>>>, geth_client: Option<Rc<RefCell<GethClient>>>,
circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>, circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>,
current_hard_fork_name: Option<HardForkName>, current_fork_name: Option<HardForkName>,
current_circuit: Option<Rc<Box<dyn CircuitsHandler>>>, current_circuit: Option<Rc<Box<dyn CircuitsHandler>>>,
} }
@@ -60,7 +61,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
&config.low_version_circuit.hard_fork_name &config.low_version_circuit.hard_fork_name
); );
AssetsDirEnvConfig::enable_first(); AssetsDirEnvConfig::enable_first();
CurieHandler::new( DarwinHandler::new(
prover_type, prover_type,
&config.low_version_circuit.params_path, &config.low_version_circuit.params_path,
&config.low_version_circuit.assets_path, &config.low_version_circuit.assets_path,
@@ -83,7 +84,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
&config.high_version_circuit.hard_fork_name &config.high_version_circuit.hard_fork_name
); );
AssetsDirEnvConfig::enable_second(); AssetsDirEnvConfig::enable_second();
DarwinHandler::new( DarwinV2Handler::new(
prover_type, prover_type,
&config.high_version_circuit.params_path, &config.high_version_circuit.params_path,
&config.high_version_circuit.assets_path, &config.high_version_circuit.assets_path,
@@ -102,7 +103,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
config, config,
geth_client, geth_client,
circuits_handler_builder_map: m, circuits_handler_builder_map: m,
current_hard_fork_name: None, current_fork_name: None,
current_circuit: None, current_circuit: None,
}; };
@@ -113,13 +114,12 @@ impl<'a> CircuitsHandlerProvider<'a> {
&mut self, &mut self,
hard_fork_name: &String, hard_fork_name: &String,
) -> Result<Rc<Box<dyn CircuitsHandler>>> { ) -> Result<Rc<Box<dyn CircuitsHandler>>> {
match &self.current_hard_fork_name { match &self.current_fork_name {
Some(name) if name == hard_fork_name => { Some(fork_name) if fork_name == hard_fork_name => {
log::info!("get circuits handler from cache"); log::info!("get circuits handler from cache");
if let Some(handler) = &self.current_circuit { if let Some(handler) = &self.current_circuit {
Ok(handler.clone()) Ok(handler.clone())
} else { } else {
log::error!("missing cached handler, there must be something wrong.");
bail!("missing cached handler, there must be something wrong.") bail!("missing cached handler, there must be something wrong.")
} }
} }
@@ -131,12 +131,11 @@ impl<'a> CircuitsHandlerProvider<'a> {
log::info!("building circuits handler for {hard_fork_name}"); log::info!("building circuits handler for {hard_fork_name}");
let handler = builder(self.prover_type, self.config, self.geth_client.clone()) let handler = builder(self.prover_type, self.config, self.geth_client.clone())
.expect("failed to build circuits handler"); .expect("failed to build circuits handler");
self.current_hard_fork_name = Some(hard_fork_name.clone()); self.current_fork_name = Some(hard_fork_name.clone());
let rc_handler = Rc::new(handler); let rc_handler = Rc::new(handler);
self.current_circuit = Some(rc_handler.clone()); self.current_circuit = Some(rc_handler.clone());
Ok(rc_handler) Ok(rc_handler)
} else { } else {
log::error!("missing builder, there must be something wrong.");
bail!("missing builder, there must be something wrong.") bail!("missing builder, there must be something wrong.")
} }
} }

View File

@@ -0,0 +1,33 @@
use std::{collections::BTreeMap, rc::Rc};
use crate::types::ProverType;
use once_cell::sync::OnceCell;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
static mut PARAMS_MAP: OnceCell<Rc<BTreeMap<u32, ParamsKZG<Bn256>>>> = OnceCell::new();
pub fn get_params_map_instance<'a, F>(load_params_func: F) -> &'a BTreeMap<u32, ParamsKZG<Bn256>>
where
F: FnOnce() -> BTreeMap<u32, ParamsKZG<Bn256>>,
{
unsafe {
PARAMS_MAP.get_or_init(|| {
let params_map = load_params_func();
Rc::new(params_map)
})
}
}
pub fn get_degrees<F>(prover_types: &std::collections::HashSet<ProverType>, f: F) -> Vec<u32>
where
F: FnMut(&ProverType) -> Vec<u32>,
{
prover_types
.iter()
.flat_map(f)
.collect::<std::collections::HashSet<u32>>()
.into_iter()
.collect()
}

View File

@@ -1,4 +1,4 @@
use super::CircuitsHandler; use super::{common::*, CircuitsHandler};
use crate::{ use crate::{
geth_client::GethClient, geth_client::GethClient,
types::{ProverType, TaskType}, types::{ProverType, TaskType},
@@ -11,7 +11,11 @@ use crate::types::{CommonHash, Task};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc}; use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
use prover_darwin::{ use prover_darwin::{
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver, aggregator::Prover as BatchProver,
check_chunk_hashes,
common::Prover as CommonProver,
config::{AGG_DEGREES, ZKEVM_DEGREES},
zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo, BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
ChunkProof, ChunkProvingTask, ChunkProof, ChunkProvingTask,
}; };
@@ -39,32 +43,65 @@ fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
#[derive(Default)] #[derive(Default)]
pub struct DarwinHandler { pub struct DarwinHandler {
chunk_prover: Option<RefCell<ChunkProver>>, chunk_prover: Option<RefCell<ChunkProver<'static>>>,
batch_prover: Option<RefCell<BatchProver>>, batch_prover: Option<RefCell<BatchProver<'static>>>,
geth_client: Option<Rc<RefCell<GethClient>>>, geth_client: Option<Rc<RefCell<GethClient>>>,
} }
impl DarwinHandler { impl DarwinHandler {
pub fn new_multi(
prover_types: Vec<ProverType>,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
let class_name = std::intrinsics::type_name::<Self>();
let prover_types_set = prover_types
.into_iter()
.collect::<std::collections::HashSet<ProverType>>();
let mut handler = Self {
batch_prover: None,
chunk_prover: None,
geth_client,
};
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
ProverType::Chunk => ZKEVM_DEGREES.clone(),
ProverType::Batch => AGG_DEGREES.clone(),
});
let params_map = get_params_map_instance(|| {
log::info!(
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
class_name,
prover_types_set,
degrees
);
CommonProver::load_params_map(params_dir, &degrees)
});
for prover_type in prover_types_set {
match prover_type {
ProverType::Chunk => {
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
params_map, assets_dir,
)));
}
ProverType::Batch => {
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
params_map, assets_dir,
)))
}
}
}
Ok(handler)
}
pub fn new( pub fn new(
prover_type: ProverType, prover_type: ProverType,
params_dir: &str, params_dir: &str,
assets_dir: &str, assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>, geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> { ) -> Result<Self> {
match prover_type { Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
ProverType::Chunk => Ok(Self {
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
batch_prover: None,
geth_client,
}),
ProverType::Batch => Ok(Self {
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
chunk_prover: None,
geth_client,
}),
}
} }
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> { fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
@@ -253,7 +290,7 @@ mod tests {
static DEFAULT_WORK_DIR: &str = "/assets"; static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| { static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("CURIE_TEST_DIR") std::env::var("DARWIN_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR)) .unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/') .trim_end_matches('/')
.to_string() .to_string()
@@ -265,9 +302,9 @@ mod tests {
static BATCH_DIR_PATH: LazyLock<String> = static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR)); LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> = static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR)); LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> = static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR)); LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
#[test] #[test]
fn it_works() { fn it_works() {
@@ -277,9 +314,14 @@ mod tests {
#[test] #[test]
fn test_circuits() -> Result<()> { fn test_circuits() -> Result<()> {
let chunk_handler = let bi_handler = DarwinHandler::new_multi(
DarwinHandler::new(ProverType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?; vec![ProverType::Chunk, ProverType::Batch],
&PARAMS_PATH,
&ASSETS_PATH,
None,
)?;
let chunk_handler = bi_handler;
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap(); let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available"); check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
@@ -302,8 +344,7 @@ mod tests {
chunk_proofs.push(chunk_proof); chunk_proofs.push(chunk_proof);
} }
let batch_handler = let batch_handler = chunk_handler;
DarwinHandler::new(ProverType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap(); let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
check_vk(TaskType::Batch, batch_vk, "batch vk must be available"); check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
let batch_task_detail = make_batch_task_detail(chunk_infos, chunk_proofs); let batch_task_detail = make_batch_task_detail(chunk_infos, chunk_proofs);

View File

@@ -1,4 +1,4 @@
use super::CircuitsHandler; use super::{common::*, CircuitsHandler};
use crate::{ use crate::{
geth_client::GethClient, geth_client::GethClient,
types::{ProverType, TaskType}, types::{ProverType, TaskType},
@@ -10,21 +10,29 @@ use serde::Deserialize;
use crate::types::{CommonHash, Task}; use crate::types::{CommonHash, Task};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc}; use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
use prover_curie::{ use prover_darwin_v2::{
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver, aggregator::Prover as BatchProver,
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask, check_chunk_hashes,
common::Prover as CommonProver,
config::{AGG_DEGREES, ZKEVM_DEGREES},
zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
ChunkProof, ChunkProvingTask,
}; };
// Only used for debugging. // Only used for debugging.
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok()); static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
#[derive(Deserialize)] #[derive(Debug, Clone, Deserialize)]
pub struct BatchTaskDetail { pub struct BatchTaskDetail {
pub chunk_infos: Vec<ChunkInfo>, pub chunk_infos: Vec<ChunkInfo>,
pub chunk_proofs: Vec<ChunkProof>, #[serde(flatten)]
pub batch_proving_task: BatchProvingTask,
} }
#[derive(Deserialize)] type BundleTaskDetail = BundleProvingTask;
#[derive(Debug, Clone, Deserialize)]
pub struct ChunkTaskDetail { pub struct ChunkTaskDetail {
pub block_hashes: Vec<CommonHash>, pub block_hashes: Vec<CommonHash>,
} }
@@ -34,33 +42,66 @@ fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
} }
#[derive(Default)] #[derive(Default)]
pub struct CurieHandler { pub struct DarwinV2Handler {
chunk_prover: Option<RefCell<ChunkProver>>, chunk_prover: Option<RefCell<ChunkProver<'static>>>,
batch_prover: Option<RefCell<BatchProver>>, batch_prover: Option<RefCell<BatchProver<'static>>>,
geth_client: Option<Rc<RefCell<GethClient>>>, geth_client: Option<Rc<RefCell<GethClient>>>,
} }
impl CurieHandler { impl DarwinV2Handler {
pub fn new_multi(
prover_types: Vec<ProverType>,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
let class_name = std::intrinsics::type_name::<Self>();
let prover_types_set = prover_types
.into_iter()
.collect::<std::collections::HashSet<ProverType>>();
let mut handler = Self {
batch_prover: None,
chunk_prover: None,
geth_client,
};
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
ProverType::Chunk => ZKEVM_DEGREES.clone(),
ProverType::Batch => AGG_DEGREES.clone(),
});
let params_map = get_params_map_instance(|| {
log::info!(
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
class_name,
prover_types_set,
degrees
);
CommonProver::load_params_map(params_dir, &degrees)
});
for prover_type in prover_types_set {
match prover_type {
ProverType::Chunk => {
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
params_map, assets_dir,
)));
}
ProverType::Batch => {
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
params_map, assets_dir,
)))
}
}
}
Ok(handler)
}
pub fn new( pub fn new(
prover_type: ProverType, prover_type: ProverType,
params_dir: &str, params_dir: &str,
assets_dir: &str, assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>, geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> { ) -> Result<Self> {
match prover_type { Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
ProverType::Chunk => Ok(Self {
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
batch_prover: None,
geth_client,
}),
ProverType::Batch => Ok(Self {
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
chunk_prover: None,
geth_client,
}),
}
} }
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> { fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
@@ -83,11 +124,15 @@ impl CurieHandler {
Ok(serde_json::to_string(&chunk_proof)?) Ok(serde_json::to_string(&chunk_proof)?)
} }
fn gen_batch_proof_raw( fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
&self,
chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)>,
) -> Result<BatchProof> {
if let Some(prover) = self.batch_prover.as_ref() { if let Some(prover) = self.batch_prover.as_ref() {
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.batch_proving_task.chunk_proofs.clone())
.collect();
let chunk_proofs: Vec<ChunkProof> = let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect(); chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
@@ -97,11 +142,11 @@ impl CurieHandler {
bail!("non-match chunk protocol") bail!("non-match chunk protocol")
} }
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?; check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
let batch = BatchProvingTask { chunk_proofs }; let batch_proof = prover.borrow_mut().gen_batch_proof(
let batch_proof = batch_task_detail.batch_proving_task,
prover None,
.borrow_mut() self.get_output_dir(),
.gen_agg_evm_proof(batch, None, self.get_output_dir())?; )?;
return Ok(batch_proof); return Ok(batch_proof);
} }
@@ -110,12 +155,32 @@ impl CurieHandler {
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> { fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_batch_proof for task {}", task.id); log::info!("[circuit] gen_batch_proof for task {}", task.id);
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?; let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?; let batch_proof = self.gen_batch_proof_raw(batch_task_detail)?;
Ok(serde_json::to_string(&batch_proof)?) Ok(serde_json::to_string(&batch_proof)?)
} }
fn gen_bundle_proof_raw(&self, bundle_task_detail: BundleTaskDetail) -> Result<BundleProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let bundle_proof = prover.borrow_mut().gen_bundle_proof(
bundle_task_detail,
None,
self.get_output_dir(),
)?;
return Ok(bundle_proof);
}
unreachable!("please check errors in proof_type logic")
}
fn gen_bundle_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_bundle_proof for task {}", task.id);
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&task.task_data)?;
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail)?;
Ok(serde_json::to_string(&bundle_proof)?)
}
fn get_output_dir(&self) -> Option<&str> { fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref() OUTPUT_DIR.as_deref()
} }
@@ -125,17 +190,6 @@ impl CurieHandler {
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes) self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
} }
fn gen_chunk_hashes_proofs(&self, task: &Task) -> Result<Vec<(ChunkInfo, ChunkProof)>> {
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
Ok(batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.chunk_proofs.clone())
.collect())
}
fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> { fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> {
if block_hashes.is_empty() { if block_hashes.is_empty() {
log::error!("[prover] failed to get sorted traces: block_hashes are empty"); log::error!("[prover] failed to get sorted traces: block_hashes are empty");
@@ -190,7 +244,7 @@ impl CurieHandler {
} }
} }
impl CircuitsHandler for CurieHandler { impl CircuitsHandler for DarwinV2Handler {
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> { fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
match task_type { match task_type {
TaskType::Chunk => self TaskType::Chunk => self
@@ -200,8 +254,11 @@ impl CircuitsHandler for CurieHandler {
TaskType::Batch => self TaskType::Batch => self
.batch_prover .batch_prover
.as_ref() .as_ref()
.and_then(|prover| prover.borrow().get_vk()), .and_then(|prover| prover.borrow().get_batch_vk()),
TaskType::Bundle => None, TaskType::Bundle => self
.batch_prover
.as_ref()
.and_then(|prover| prover.borrow().get_bundle_vk()),
_ => unreachable!(), _ => unreachable!(),
} }
} }
@@ -210,6 +267,7 @@ impl CircuitsHandler for CurieHandler {
match task_type { match task_type {
TaskType::Chunk => self.gen_chunk_proof(task), TaskType::Chunk => self.gen_chunk_proof(task),
TaskType::Batch => self.gen_batch_proof(task), TaskType::Batch => self.gen_batch_proof(task),
TaskType::Bundle => self.gen_bundle_proof(task),
_ => unreachable!(), _ => unreachable!(),
} }
} }
@@ -221,7 +279,11 @@ impl CircuitsHandler for CurieHandler {
mod tests { mod tests {
use super::*; use super::*;
use crate::zk_circuits_handler::utils::encode_vk; use crate::zk_circuits_handler::utils::encode_vk;
use prover_curie::utils::chunk_trace_to_witness_block; use ethers_core::types::H256;
use prover_darwin_v2::{
aggregator::eip4844, utils::chunk_trace_to_witness_block, BatchData, BatchHeader,
MAX_AGG_SNARKS,
};
use std::{path::PathBuf, sync::LazyLock}; use std::{path::PathBuf, sync::LazyLock};
#[ctor::ctor] #[ctor::ctor]
@@ -232,7 +294,7 @@ mod tests {
static DEFAULT_WORK_DIR: &str = "/assets"; static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| { static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("CURIE_TEST_DIR") std::env::var("DARWIN_V2_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR)) .unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/') .trim_end_matches('/')
.to_string() .to_string()
@@ -244,9 +306,9 @@ mod tests {
static BATCH_DIR_PATH: LazyLock<String> = static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR)); LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> = static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR)); LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> = static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR)); LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
#[test] #[test]
fn it_works() { fn it_works() {
@@ -256,20 +318,27 @@ mod tests {
#[test] #[test]
fn test_circuits() -> Result<()> { fn test_circuits() -> Result<()> {
let chunk_handler = CurieHandler::new(ProverType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?; let bi_handler = DarwinV2Handler::new_multi(
vec![ProverType::Chunk, ProverType::Batch],
&PARAMS_PATH,
&ASSETS_PATH,
None,
)?;
let chunk_handler = bi_handler;
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap(); let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available"); check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
let chunk_dir_paths = get_chunk_dir_paths()?; let chunk_dir_paths = get_chunk_dir_paths()?;
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths); log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
let mut chunk_traces = vec![];
let mut chunk_infos = vec![]; let mut chunk_infos = vec![];
let mut chunk_proofs = vec![]; let mut chunk_proofs = vec![];
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() { for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
let chunk_id = format!("chunk_proof{}", id + 1); let chunk_id = format!("chunk_proof{}", id + 1);
log::info!("start to process {chunk_id}"); log::info!("start to process {chunk_id}");
let chunk_trace = read_chunk_trace(chunk_path)?; let chunk_trace = read_chunk_trace(chunk_path)?;
chunk_traces.push(chunk_trace.clone());
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?; let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
chunk_infos.push(chunk_info); chunk_infos.push(chunk_info);
@@ -280,30 +349,96 @@ mod tests {
chunk_proofs.push(chunk_proof); chunk_proofs.push(chunk_proof);
} }
let batch_handler = CurieHandler::new(ProverType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?; let batch_handler = chunk_handler;
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap(); let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
check_vk(TaskType::Batch, batch_vk, "batch vk must be available"); check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect(); let batch_task_detail = make_batch_task_detail(chunk_traces, chunk_proofs, None);
log::info!("start to prove batch"); log::info!("start to prove batch");
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?; let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail)?;
let proof_data = serde_json::to_string(&batch_proof)?; let proof_data = serde_json::to_string(&batch_proof)?;
dump_proof("batch_proof".to_string(), proof_data)?; dump_proof("batch_proof".to_string(), proof_data)?;
Ok(()) Ok(())
} }
fn check_vk(task_type: TaskType, vk: Vec<u8>, info: &str) { // copied from https://github.com/scroll-tech/scroll-prover/blob/main/integration/src/prove.rs
log::info!("check_vk, {:?}", task_type); fn get_blob_from_chunks(chunks: &[ChunkInfo]) -> Vec<u8> {
let vk_from_file = read_vk(task_type).unwrap(); let num_chunks = chunks.len();
let padded_chunk =
ChunkInfo::mock_padded_chunk_info_for_testing(chunks.last().as_ref().unwrap());
let chunks_with_padding = [
chunks.to_vec(),
vec![padded_chunk; MAX_AGG_SNARKS - num_chunks],
]
.concat();
let batch_data = BatchData::<{ MAX_AGG_SNARKS }>::new(chunks.len(), &chunks_with_padding);
let batch_bytes = batch_data.get_batch_data_bytes();
let blob_bytes = eip4844::get_blob_bytes(&batch_bytes);
log::info!("blob_bytes len {}", blob_bytes.len());
blob_bytes
}
// TODO: chunk_infos can be extracted from chunk_proofs.
// Still needed?
fn make_batch_task_detail(
chunk_traces: Vec<Vec<BlockTrace>>,
chunk_proofs: Vec<ChunkProof>,
last_batcher_header: Option<BatchHeader<{ MAX_AGG_SNARKS }>>,
) -> BatchTaskDetail {
// dummy parent batch hash
let dummy_parent_batch_hash = H256([
0xab, 0xac, 0xad, 0xae, 0xaf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
]);
let chunk_infos: Vec<_> = chunk_proofs.iter().map(|p| p.chunk_info.clone()).collect();
let l1_message_popped = chunk_traces
.iter()
.flatten()
.map(|chunk| chunk.num_l1_txs())
.sum();
let last_block_timestamp = chunk_traces.last().map_or(0, |block_traces| {
block_traces
.last()
.map_or(0, |block_trace| block_trace.header.timestamp.as_u64())
});
let blob_bytes = get_blob_from_chunks(&chunk_infos);
let batch_header = BatchHeader::construct_from_chunks(
last_batcher_header.map_or(4, |header| header.version),
last_batcher_header.map_or(123, |header| header.batch_index + 1),
l1_message_popped,
last_batcher_header.map_or(l1_message_popped, |header| {
header.total_l1_message_popped + l1_message_popped
}),
last_batcher_header.map_or(dummy_parent_batch_hash, |header| header.batch_hash()),
last_block_timestamp,
&chunk_infos,
&blob_bytes,
);
BatchTaskDetail {
chunk_infos,
batch_proving_task: BatchProvingTask {
chunk_proofs,
batch_header,
blob_bytes,
},
}
}
fn check_vk(proof_type: TaskType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", proof_type);
let vk_from_file = read_vk(proof_type).unwrap();
assert_eq!(vk_from_file, encode_vk(vk), "{info}") assert_eq!(vk_from_file, encode_vk(vk), "{info}")
} }
fn read_vk(task_type: TaskType) -> Result<String> { fn read_vk(proof_type: TaskType) -> Result<String> {
log::info!("read_vk, {:?}", task_type); log::info!("read_vk, {:?}", proof_type);
let vk_file = match task_type { let vk_file = match proof_type {
TaskType::Chunk => CHUNK_VK_PATH.clone(), TaskType::Chunk => CHUNK_VK_PATH.clone(),
TaskType::Batch => BATCH_VK_PATH.clone(), TaskType::Batch => BATCH_VK_PATH.clone(),
TaskType::Bundle => unreachable!(), TaskType::Bundle => todo!(),
TaskType::Undefined => unreachable!(), TaskType::Undefined => unreachable!(),
}; };

Some files were not shown because too many files have changed in this diff Show More