Compare commits

...

169 Commits

Author SHA1 Message Date
colinlyguo
83c0a0870c fix 2025-03-27 16:22:49 +08:00
colinlyguo
78a458daa5 update logs 2025-03-27 16:20:16 +08:00
colinlyguo
01d0e48e9a add more details in the log 2025-03-27 16:16:13 +08:00
colinlyguo
5cfb8b6a69 change string to var 2025-03-27 15:51:25 +08:00
colinlyguo
b59db732c3 hardcode openvm vks 2025-03-27 15:43:57 +08:00
kunxian xia
899476731d upgrade stark-gpu crate 2025-03-27 10:32:50 +08:00
Ho
1bec964097 Adapt to rc11 (unified phase) (#1637)
Signed-off-by: noelwei <fan@scroll.io>
Co-authored-by: kunxian xia <xiakunxian130@gmail.com>
Co-authored-by: colinlyguo <colinlyguo@scroll.io>
2025-03-27 00:28:09 +08:00
Morty
b73acca200 update scroll-proving-sdk commit 2025-03-20 19:01:56 +08:00
colinlyguo
77dceaea35 fix 2025-03-19 23:05:48 +08:00
colinlyguo
d0cb8b9aa5 apply changes for sanity checks 2025-03-19 18:31:49 +08:00
colinlyguo
ed057286d9 add back fixme 2025-03-18 18:40:20 +08:00
colinlyguo
b3e46673f6 remove FIXME 2025-03-18 13:50:33 +08:00
colin
2fb27ceb3d Merge branch 'develop' into feat/openvm-euclid-v2 2025-03-18 11:36:46 +08:00
colinlyguo
e3332885ed merge message.LegacyChunkTaskDetail and message.EuclidV2ChunkTaskDetail 2025-03-18 11:35:12 +08:00
colinlyguo
3ee2d2b39c tweaks 2025-03-18 01:30:06 +08:00
Morty
4b21c79443 update scroll-proving-sdk commit 2025-03-17 18:08:21 +08:00
xkx
c6f0299373 feat: euclid v2 GPU prover (#1623)
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: Morty <70688412+yiweichi@users.noreply.github.com>
2025-03-17 15:32:44 +08:00
Morty
3454c6c670 update scroll-proving-sdk commit 2025-03-16 06:03:28 +08:00
colinlyguo
901693a2c0 remove a log 2025-03-14 18:42:31 +08:00
Ho
0bb53140f5 fix: byte48 type required in prover (#1627)
Signed-off-by: noelwei <fan@scroll.io>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2025-03-14 17:39:45 +08:00
Morty
09790c4448 fix: clap deprecated warnings 2025-03-14 15:23:30 +08:00
colinlyguo
ae212a919a fix logs 2025-03-14 14:02:07 +08:00
colinlyguo
9b5c42e9d9 tweak logs 2025-03-14 13:46:44 +08:00
Rohit Narurkar
60877d3c16 fix: reduce segment len in prover config (#1631) 2025-03-13 21:28:28 +08:00
Morty
07d1621310 update scroll-proving-sdk commit (#1630) 2025-03-13 19:08:48 +08:00
colinlyguo
11afeb1354 update verifier 2025-03-13 15:31:07 +08:00
Morty
cf41048c0a Update rc3 (#1629) 2025-03-13 15:23:17 +08:00
colinlyguo
77d63226c5 fix 2025-03-13 14:29:53 +08:00
colinlyguo
135073c0ad Reapply "feat: add euclidv2 verifier"
This reverts commit 7d5b77a36c.
2025-03-13 14:26:31 +08:00
colin
bab0e4f8d6 Merge branch 'develop' into feat/openvm-euclid-v2 2025-03-13 13:35:00 +08:00
colinlyguo
2d620ddf4f fix 2025-03-13 05:40:02 +08:00
colinlyguo
8befb84910 fix 2025-03-13 04:51:27 +08:00
colinlyguo
4822d38aba fix 2025-03-13 04:45:54 +08:00
colinlyguo
cb87c7aedd another type fix 2025-03-13 04:24:35 +08:00
colinlyguo
3a3db5fe32 add logs 2025-03-13 03:52:01 +08:00
colinlyguo
b4546af434 remove load vks 2025-03-13 03:29:38 +08:00
colinlyguo
459941d942 use hexutil.Big 2025-03-13 03:27:38 +08:00
colinlyguo
9f480e5397 fix 2025-03-13 03:12:11 +08:00
colinlyguo
7d4ff80edf update type error 2025-03-13 03:07:52 +08:00
colinlyguo
5869bfd825 load low version open vm circuit 2025-03-13 02:59:42 +08:00
colinlyguo
12a262ad99 align new types 2025-03-13 02:25:49 +08:00
colinlyguo
7d5b77a36c Revert "feat: add euclidv2 verifier"
This reverts commit ef9e25f14c.
2025-03-13 00:40:24 +08:00
Morty
5f8bb53dce update scroll-proving-sdk version (#1626) 2025-03-12 23:43:40 +08:00
colinlyguo
87e1235c7f fix 2025-03-12 23:31:47 +08:00
colin
86e6555a54 Merge branch 'develop' into feat/openvm-euclid-v2 2025-03-12 23:19:12 +08:00
colinlyguo
e3b17a0740 add PostMsgQueueHash in chunk info 2025-03-12 23:18:06 +08:00
Ömer Faruk Irmak
ef9e25f14c feat: add euclidv2 verifier 2025-03-12 18:17:59 +03:00
Morty
0fc28cb511 fix(coordinator): euclid v2 prover compatibility (#1625) 2025-03-12 22:22:06 +08:00
colinlyguo
ad2e94e190 support euclidV2 in proof handling 2025-03-12 22:07:44 +08:00
colinlyguo
2846ecffa5 fix a compilatin error 2025-03-12 19:51:26 +08:00
colinlyguo
0e82c63ac4 add back dbg logs 2025-03-12 19:30:02 +08:00
colinlyguo
9996af6227 Revert "revert dbg commits"
This reverts commit fe6451b76c.
2025-03-12 19:29:05 +08:00
colinlyguo
8cf087c63b update scroll-proving-sdk 2025-03-12 18:22:40 +08:00
colinlyguo
b984341991 euclid & euclidv2 compatible logic 2025-03-12 17:44:58 +08:00
colinlyguo
7486236a7a update scroll-proving-sdk 2025-03-12 17:26:45 +08:00
kunxian xia
a6ed321666 update scroll-zkvm-prover v0.1.1-rc.2 2025-03-12 17:10:16 +08:00
colinlyguo
8db4e5c77d update scroll-proving-sdk 2025-03-12 15:55:57 +08:00
colinlyguo
5cf8cda8a7 add logs 2025-03-12 15:06:21 +08:00
colinlyguo
bcc6b0f7e0 add PrevMsgQueueHash in chunk task detail 2025-03-12 14:52:27 +08:00
Rohit Narurkar
fe6451b76c revert dbg commits 2025-03-11 18:34:05 +00:00
Rohit Narurkar
be88ef6c39 dbg: more println 2025-03-11 17:54:00 +00:00
Rohit Narurkar
64368f9a79 dbg: proof types from req 2025-03-11 17:27:21 +00:00
Rohit Narurkar
f288179451 dbg: add dbg logs for prover vks 2025-03-11 17:09:34 +00:00
Rohit Narurkar
b8c7ec2b22 fix: compilation 2025-03-11 17:01:32 +00:00
colinlyguo
88da49383c update prover client 2025-03-12 00:30:16 +08:00
colin
1ea9acafa3 Merge branch 'develop' into feat/openvm-euclid-v2 2025-03-11 22:39:46 +08:00
colin
c743efd99e Merge branch 'develop' into feat/openvm-euclid-v2 2025-03-11 21:48:56 +08:00
Rohit Narurkar
2d40f0f942 fix: chunk task deserialisation from prove request input (#1620) 2025-03-11 21:45:53 +08:00
colinlyguo
fcbaa674c6 update dependencies 2025-03-11 21:45:53 +08:00
colinlyguo
110083c6c8 update rust version in CI 2025-03-11 21:45:53 +08:00
colinlyguo
b3c1df7557 revert a change 2025-03-11 21:45:53 +08:00
colinlyguo
893bf18d62 update Cargo.lock 2025-03-11 21:45:52 +08:00
colinlyguo
7ec6d478b3 feat: openvm euclid v2 2025-03-11 21:45:51 +08:00
colin
eacdc78ba7 fix(bridge-history): overwrite l2geth dependency by replace (#1617) 2025-03-11 19:18:13 +08:00
georgehao
2cc9f65852 chore: auto version bump [bot] 2025-03-11 10:45:50 +00:00
jonastheis
af381223f3 chore: auto version bump [bot] 2025-03-10 23:14:41 +00:00
Jonas Theis
bb6ee2c932 Merge branch 'develop' into feat/use-codec-v6 2025-03-11 07:14:23 +08:00
jonastheis
e99a8515b9 Merge branch 'feat/use-codec-v6' of github.com:scroll-tech/scroll into feat/use-codec-v6 2025-03-10 20:29:20 +08:00
jonastheis
38b3239c6b chore: auto version bump [bot] 2025-03-10 12:29:04 +00:00
jonastheis
d987931e30 go mod tidy 2025-03-10 20:28:59 +08:00
jonastheis
90d15637eb Merge remote-tracking branch 'origin/develop' into feat/use-codec-v6 2025-03-10 20:28:29 +08:00
jonastheis
4d677b344b address review comments 2025-03-10 18:13:02 +08:00
jonastheis
d57e6b0e7b chore: auto version bump [bot] 2025-03-10 09:59:24 +00:00
jonastheis
9b462e4c98 go mod tidy 2025-03-10 17:54:52 +08:00
jonastheis
c9f6e8c6e1 Merge remote-tracking branch 'origin/develop' into feat/use-codec-v6 2025-03-10 17:54:23 +08:00
jonastheis
867307d576 Merge remote-tracking branch 'origin/omerfirmak/euclid-prover' into feat/use-codec-v6 2025-03-10 16:47:34 +08:00
omerfirmak
20dffe4ea5 chore: auto version bump [bot] 2025-03-10 08:43:29 +00:00
Ömer Faruk Irmak
57d50b7183 Merge branch 'develop' into omerfirmak/euclid-prover 2025-03-10 11:33:11 +03:00
jonastheis
7a70e374b8 fix test 2025-03-10 15:22:14 +08:00
jonastheis
0799dd48f2 fix linter 2025-03-10 14:34:46 +08:00
jonastheis
224546e380 add new ABI and add more tests 2025-03-10 14:33:43 +08:00
jonastheis
95adcc378f fix tests 2025-03-10 14:03:02 +08:00
jonastheis
47219f2d86 run goimports 2025-03-10 12:15:04 +08:00
jonastheis
ab7038c0a7 Merge branch 'feat/use-codec-v6' of github.com:scroll-tech/scroll into feat/use-codec-v6 2025-03-10 12:13:23 +08:00
jonastheis
d79aaef35a fix CI 2025-03-10 12:12:50 +08:00
colinlyguo
da963313b6 tweak comments and some renamings 2025-03-09 22:58:29 +08:00
jonastheis
f27ddb7f8e remove debug line 2025-03-08 16:42:53 +08:00
colin
94bee1903a feat(bridge-history): support codecv7 (#1609)
Co-authored-by: jonastheis <4181434+jonastheis@users.noreply.github.com>
2025-03-08 16:35:27 +08:00
jonastheis
b7e7d1a1f1 Merge remote-tracking branch 'origin/omerfirmak/euclid-prover' into feat/use-codec-v6 2025-03-08 11:59:41 +08:00
jonastheis
f1ea4b315c process task with CodecV7, add check for BlobDataProof length and add some comments 2025-03-08 11:57:36 +08:00
colinlyguo
8b08a57f63 Revert "simplify version checks"
This reverts commit a868bc1531.
2025-03-08 01:12:48 +08:00
colinlyguo
a868bc1531 simplify version checks 2025-03-08 01:08:29 +08:00
colinlyguo
101cc46bd9 update dependencies 2025-03-07 23:58:59 +08:00
Morty
9f4c9ee150 fix: prover version (#1611) 2025-03-07 23:12:50 +08:00
Ömer Faruk Irmak
03c63a62cf update batch/chunk details for phase2 2025-03-07 16:15:20 +03:00
colin
b30f4d0b00 chore(zkvm-circuit): upgrade to 0.1.0-rc.6 (#1610) 2025-03-07 13:28:49 +08:00
colinlyguo
4333d51bef Revert "feat(bridge-history): support codecv7"
This reverts commit 82dd5e0e5e.
2025-03-07 01:53:10 +08:00
colinlyguo
82dd5e0e5e feat(bridge-history): support codecv7 2025-03-07 01:52:35 +08:00
Péter Garamvölgyi
f91c999005 fix batch proposer panic 2025-03-06 14:29:11 +01:00
colinlyguo
c8b614fd2f unit tests fix 2025-03-06 19:20:46 +08:00
jonastheis
a1c4562432 add configuration parameter maxChunksPerBatch for batch proposer 2025-03-06 18:42:36 +08:00
jonastheis
d6674e8a3d add configuration parameter maxChunksPerBatch for batch proposer 2025-03-06 18:35:01 +08:00
jonastheis
55b32e1c0c add debug log message 2025-03-06 13:37:57 +08:00
jonastheis
8ea431514d update go.work.sum 2025-03-06 13:36:32 +08:00
jonastheis
26a49cb2a3 Merge remote-tracking branch 'origin/omerfirmak/euclid-prover' into feat/use-codec-v6 2025-03-06 13:35:30 +08:00
Ömer Faruk Irmak
e27ab5a396 Merge remote-tracking branch 'origin/develop' into omerfirmak/euclid-prover 2025-03-05 10:43:19 +03:00
Ömer Faruk Irmak
554a233928 refactor: move euclid prover to new subdir 2025-03-04 14:17:53 +03:00
jonastheis
673777fe63 use go 1.22 in Dockerfile builder 2025-02-28 21:50:09 +07:00
jonastheis
7353f30ff6 update l2geth version in go.mod 2025-02-28 21:49:32 +07:00
Rohit Narurkar
eb5758b693 feat: bump to zkvm-prover rc5 2025-02-28 13:03:55 +03:00
jonastheis
47a6c23b1f fix bug where chunk and batch blocks mismatch 2025-02-28 08:25:52 +07:00
jonastheis
081d28988d update da-codec dependency 2025-02-27 15:33:54 +08:00
jonastheis
782e019f9c Merge branch 'feat/use-codec-v6' of github.com:scroll-tech/scroll into feat/use-codec-v6 2025-02-27 15:05:37 +08:00
jonastheis
89ede0d315 build rollup images with --platform=linux/amd64 2025-02-27 15:04:26 +08:00
colin
a55de1fc09 feat: set code tx support (#1600) 2025-02-27 13:49:32 +08:00
jonastheis
ed394a6369 make sure that all batches committed in the same tx are part of the same bundle 2025-02-27 10:50:09 +08:00
jonastheis
121ce09c80 update config and adjust to new contract ABI 2025-02-27 10:47:56 +08:00
jonastheis
0125dd62a6 add new contract ABI 2025-02-27 10:43:48 +08:00
Ömer Faruk Irmak
bb9d404e85 fix: force byte arrays to be marshaled as JSON arrays 2025-02-24 18:14:28 +03:00
jonastheis
e1a0bab452 add new contract ABI and adjust submission to it 2025-02-24 19:41:53 +08:00
Ömer Faruk Irmak
50ebf179fd fix: generate evm proofs for bundles 2025-02-21 21:25:25 +03:00
Ömer Faruk Irmak
01fa3b34a7 feat: use verifier-only types in coordinator 2025-02-21 16:06:27 +03:00
Ömer Faruk Irmak
2e9827a750 fix: properly propagate verifier errors 2025-02-21 12:05:58 +03:00
Ömer Faruk Irmak
867fda6952 fix: add batch tasks details for v6 codec 2025-02-20 20:06:57 +03:00
jonastheis
fbc14ac91b address review comments 2025-02-20 20:20:13 +08:00
Ömer Faruk Irmak
37924b0ae7 feat: bump zkvm-prover to rc4 2025-02-20 15:06:45 +03:00
jonastheis
8b57dd6381 fixes after merge 2025-02-20 16:27:37 +08:00
jonastheis
f13863e542 Merge remote-tracking branch 'origin/omerfirmak/euclid-prover' into feat/use-codec-v6 2025-02-20 16:25:57 +08:00
jonastheis
d3acd6b510 adjust to recent CodecV7 changes. remove initialL1MessageQueueIndex and renaming to prev and post L1MessageQueueHash 2025-02-19 20:09:56 +08:00
Ömer Faruk Irmak
83c73f8458 fix: coordinator ignore euclid transition chunk 2025-02-17 14:50:08 +03:00
Ömer Faruk Irmak
bf084368c5 fix: prover using wrong resources for batch/bundle circuits 2025-02-17 13:27:20 +03:00
Ömer Faruk Irmak
d503d4a990 fix: base64 encode VKs 2025-02-17 12:52:49 +03:00
Ömer Faruk Irmak
ac17696171 feat: update contracts to euclid version 2025-02-16 16:39:52 +03:00
Ömer Faruk Irmak
b424cef816 feat: add CPU prover dockerfile 2025-02-16 16:39:52 +03:00
Ömer Faruk Irmak
e5ad9c618d feat: allow stopping fake finalization at fork boundary 2025-02-16 16:39:52 +03:00
Ömer Faruk Irmak
848d3a6827 fix: re-enable supported forks check 2025-02-16 16:39:52 +03:00
Ömer Faruk Irmak
2bd0655fda feat: Add Euclid verifier 2025-02-16 16:39:49 +03:00
Ömer Faruk Irmak
f01af24908 fix: potentially flaky testResubmitZeroGasPriceTransaction 2025-02-11 17:48:21 +03:00
Ömer Faruk Irmak
2de45f0d54 feat(coordinator): add euclid proof types
Co-authored-by: noelwei <fan@scroll.io>
2025-02-11 17:48:21 +03:00
Ömer Faruk Irmak
c3a3bad800 feat(rollup-relayer): add Euclid support 2025-02-11 17:48:21 +03:00
Ömer Faruk Irmak
9412c7ff3a feat(coordinator): abstract proof types behind an interface 2025-02-11 17:48:21 +03:00
Ömer Faruk Irmak
5f2295043e feat(prover): euclid prover first draft 2025-02-11 17:48:15 +03:00
jonastheis
69a80d4a4a add testCommitBatchAndFinalizeBundleCodecV7 for relayer 2025-02-10 12:51:47 +08:00
jonastheis
8db5339c1f fix issues with chunks when handling CodecV7 2025-02-10 12:22:25 +08:00
jonastheis
99c0a9fac5 update to latest da-codec 2025-02-10 12:21:45 +08:00
jonastheis
f4e17bcca6 fix ci 2025-02-06 15:30:58 +08:00
jonastheis
e713424e5c update contextIDFromBatches prefix to v7 2025-02-05 17:25:07 +08:00
jonastheis
2efbbd7d77 add support for bundles with CodecV7 2025-02-05 17:05:17 +08:00
jonastheis
310abdd543 add InitialL1MessageQueueHash and LastL1MessageQueueHash to chunk and batch 2025-02-05 13:31:14 +08:00
jonastheis
5a479c3a08 address review comments 2025-02-05 11:28:49 +08:00
jonastheis
783b965deb update go.mod to latest da-codec version V7 2025-02-05 11:28:38 +08:00
jonastheis
182f8e307c update go.mod to latest da-codec version V7 2025-01-28 11:18:28 +08:00
jonastheis
b460d4a717 Merge remote-tracking branch 'origin/omerfirmak/mpt' into feat/use-codec-v6 2025-01-28 11:17:09 +08:00
Ömer Faruk Irmak
421afe9c30 deprecate halo2 provers, start openvm prover 2025-01-27 14:03:03 +03:00
Ömer Faruk Irmak
ca8d930bd6 feat(rollup-relayer): add Euclid support 2025-01-13 11:23:11 +03:00
jonastheis
940fde0cbf implement missing part of commit logic for CodecV6 2024-12-31 14:11:40 +08:00
jonastheis
78c99636dc implement batch submission of multiple batches per transaction and contextID logic to support multiple batches by concatenating batch hashes 2024-12-31 13:56:51 +08:00
jonastheis
0c0c417829 adjust Sender to support multiple blobs when sending a transaction 2024-12-31 13:55:38 +08:00
jonastheis
41606fe7d7 support first version of CodecV6 in relayer and add functionality to submit multiple batches in a single transaction 2024-12-30 18:48:18 +08:00
22 changed files with 2094 additions and 1325 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -14,8 +14,8 @@ ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-openvm-v1.0.0-rc.1" }
[dependencies]
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-prover" }
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-verifier" }
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.11", package = "scroll-zkvm-prover" }
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.11", package = "scroll-zkvm-verifier" }
base64 = "0.13.0"
env_logger = "0.9.0"

View File

@@ -1,9 +1,11 @@
#![allow(static_mut_refs)]
mod euclid;
mod euclidv2;
use anyhow::{bail, Result};
use euclid::EuclidVerifier;
use euclidv2::EuclidV2Verifier;
use serde::{Deserialize, Serialize};
use std::{cell::OnceCell, path::Path, rc::Rc};
@@ -51,7 +53,17 @@ pub fn init(config: VerifierConfig) {
unsafe {
VERIFIER_LOW
.set(VerifierPair(
config.high_version_circuit.fork_name,
"euclid".to_string(),
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
let verifier = EuclidV2Verifier::new(&config.high_version_circuit.assets_path);
unsafe {
VERIFIER_HIGH
.set(VerifierPair(
"euclidV2".to_string(),
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();

View File

@@ -4,13 +4,13 @@ use anyhow::Result;
use crate::utils::panic_catch;
use euclid_prover::{BatchProof, BundleProof, ChunkProof};
use euclid_verifier::verifier::{BatchVerifier, BundleVerifier, ChunkVerifier};
use euclid_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV1, ChunkVerifier};
use std::{fs::File, path::Path};
pub struct EuclidVerifier {
chunk_verifier: ChunkVerifier,
batch_verifier: BatchVerifier,
bundle_verifier: BundleVerifier,
bundle_verifier: BundleVerifierEuclidV1,
}
impl EuclidVerifier {
@@ -24,7 +24,7 @@ impl EuclidVerifier {
.expect("Setting up chunk verifier"),
batch_verifier: BatchVerifier::setup(&config, &exe, &verifier_bin)
.expect("Setting up batch verifier"),
bundle_verifier: BundleVerifier::setup(&config, &exe, &verifier_bin)
bundle_verifier: BundleVerifierEuclidV1::setup(&config, &exe, &verifier_bin)
.expect("Setting up bundle verifier"),
}
}

View File

@@ -0,0 +1,65 @@
use super::{ProofVerifier, TaskType, VKDump};
use anyhow::Result;
use crate::utils::panic_catch;
use euclid_prover::{BatchProof, BundleProof, ChunkProof};
use euclid_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV2, ChunkVerifier};
use std::{fs::File, path::Path};
pub struct EuclidV2Verifier {
chunk_verifier: ChunkVerifier,
batch_verifier: BatchVerifier,
bundle_verifier: BundleVerifierEuclidV2,
}
impl EuclidV2Verifier {
pub fn new(assets_dir: &str) -> Self {
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
let config = Path::new(assets_dir).join("root-verifier-vm-config");
let exe = Path::new(assets_dir).join("root-verifier-committed-exe");
Self {
chunk_verifier: ChunkVerifier::setup(&config, &exe, &verifier_bin)
.expect("Setting up chunk verifier"),
batch_verifier: BatchVerifier::setup(&config, &exe, &verifier_bin)
.expect("Setting up batch verifier"),
bundle_verifier: BundleVerifierEuclidV2::setup(&config, &exe, &verifier_bin)
.expect("Setting up bundle verifier"),
}
}
}
impl ProofVerifier for EuclidV2Verifier {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.chunk_verifier
.verify_proof(proof.proof.as_root_proof().unwrap())
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.batch_verifier
.verify_proof(proof.proof.as_root_proof().unwrap())
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.bundle_verifier
.verify_proof_evm(&proof.proof.as_evm_proof().unwrap())
}
})
.map_err(|err_str: String| anyhow::anyhow!(err_str))
}
fn dump_vk(&self, file: &Path) {
let f = File::create(file).expect("Failed to open file to dump VK");
let dump = VKDump {
chunk_vk: base64::encode(self.chunk_verifier.get_app_vk()),
batch_vk: base64::encode(self.batch_verifier.get_app_vk()),
bundle_vk: base64::encode(self.bundle_verifier.get_app_vk()),
};
serde_json::to_writer(f, &dump).expect("Failed to dump VK");
}
}

View File

@@ -4,12 +4,18 @@ import (
"encoding/json"
"errors"
"fmt"
"math/big"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
)
const (
euclidFork = "euclid"
EuclidFork = "euclid"
EuclidV2Fork = "euclidV2"
EuclidForkNameForProver = "euclidv1"
EuclidV2ForkNameForProver = "euclidv2"
)
// ProofType represents the type of task.
@@ -39,38 +45,102 @@ const (
ProofTypeBundle
)
// ChunkTaskDetail is a type containing ChunkTask detail.
// ChunkTaskDetail is a type containing ChunkTask detail for chunk task.
type ChunkTaskDetail struct {
BlockHashes []common.Hash `json:"block_hashes"`
// use one of the string of EuclidFork / EuclidV2Fork
ForkName string `json:"fork_name"`
BlockHashes []common.Hash `json:"block_hashes"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
}
// it is a hex encoded big with fixed length on 48 bytes
type Byte48 struct {
hexutil.Big
}
func (e Byte48) MarshalText() ([]byte, error) {
i := e.ToInt()
// overrite encode big
if sign := i.Sign(); sign < 0 {
// sanity check
return nil, errors.New("Byte48 must be positive integer")
} else {
s := i.Text(16)
if len(s) > 96 {
return nil, errors.New("integer Exceed 384bit")
}
return []byte(fmt.Sprintf("0x%0*s", 96, s)), nil
}
}
func isString(input []byte) bool {
return len(input) >= 2 && input[0] == '"' && input[len(input)-1] == '"'
}
// hexutil.Big has limition of 256bit so we have to override it ...
func (e *Byte48) UnmarshalJSON(input []byte) error {
if !isString(input) {
return errors.New("not hex string")
}
b, err := hexutil.Decode(string(input[1 : len(input)-1]))
if err != nil {
return err
}
if len(b) != 48 {
return fmt.Errorf("not a 48 bytes hex string: %d", len(b))
}
var dec big.Int
dec.SetBytes(b)
*e = Byte48{(hexutil.Big)(dec)}
return nil
}
// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
KzgProof []byte `json:"kzg_proof"`
KzgCommitment []byte `json:"kzg_commitment"`
Challenge common.Hash `json:"challenge"`
// use one of the string of EuclidFork / EuclidV2Fork
ForkName string `json:"fork_name"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
KzgProof Byte48 `json:"kzg_proof,omitempty"`
KzgCommitment Byte48 `json:"kzg_commitment,omitempty"`
ChallengeDigest *common.Hash `json:"challenge_digest,omitempty"`
}
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
type BundleTaskDetail struct {
BatchProofs []BatchProof `json:"batch_proofs"`
// use one of the string of EuclidFork / EuclidV2Fork
ForkName string `json:"fork_name"`
BatchProofs []BatchProof `json:"batch_proofs"`
BundleInfo *OpenVMBundleInfo `json:"bundle_info,omitempty"`
}
// ChunkInfo is for calculating pi_hash for chunk
type ChunkInfo struct {
ChainID uint64 `json:"chain_id"`
PrevStateRoot common.Hash `json:"prev_state_root"`
PostStateRoot common.Hash `json:"post_state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
DataHash common.Hash `json:"data_hash"`
IsPadding bool `json:"is_padding"`
TxBytes []byte `json:"tx_bytes"`
TxBytesHash common.Hash `json:"tx_data_digest"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
ChainID uint64 `json:"chain_id"`
PrevStateRoot common.Hash `json:"prev_state_root"`
PostStateRoot common.Hash `json:"post_state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
DataHash common.Hash `json:"data_hash"`
IsPadding bool `json:"is_padding"`
TxBytes []byte `json:"tx_bytes"`
TxBytesHash common.Hash `json:"tx_data_digest"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
TxDataLength uint64 `json:"tx_data_length"`
InitialBlockNumber uint64 `json:"initial_block_number"`
BlockCtxs []BlockContextV2 `json:"block_ctxs"`
}
// BlockContextV2 is the block context for euclid v2
type BlockContextV2 struct {
Timestamp uint64 `json:"timestamp"`
BaseFee hexutil.Big `json:"base_fee"`
GasLimit uint64 `json:"gas_limit"`
NumTxs uint16 `json:"num_txs"`
NumL1Msgs uint16 `json:"num_l1_msgs"`
}
// SubCircuitRowUsage tracing info added in v0.11.0rc8
@@ -87,7 +157,7 @@ type ChunkProof interface {
// NewChunkProof creates a new ChunkProof instance.
func NewChunkProof(hardForkName string) ChunkProof {
switch hardForkName {
case euclidFork:
case EuclidFork, EuclidV2Fork:
return &OpenVMChunkProof{}
default:
return &Halo2ChunkProof{}
@@ -121,7 +191,7 @@ type BatchProof interface {
// NewBatchProof creates a new BatchProof instance.
func NewBatchProof(hardForkName string) BatchProof {
switch hardForkName {
case euclidFork:
case EuclidFork, EuclidV2Fork:
return &OpenVMBatchProof{}
default:
return &Halo2BatchProof{}
@@ -178,7 +248,7 @@ type BundleProof interface {
// NewBundleProof creates a new BundleProof instance.
func NewBundleProof(hardForkName string) BundleProof {
switch hardForkName {
case euclidFork:
case EuclidFork, EuclidV2Fork:
return &OpenVMBundleProof{}
default:
return &Halo2BundleProof{}
@@ -258,12 +328,14 @@ func (p *OpenVMChunkProof) Proof() []byte {
// OpenVMBatchInfo is for calculating pi_hash for batch header
type OpenVMBatchInfo struct {
ParentBatchHash common.Hash `json:"parent_batch_hash"`
ParentStateRoot common.Hash `json:"parent_state_root"`
StateRoot common.Hash `json:"state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
BatchHash common.Hash `json:"batch_hash"`
ChainID uint64 `json:"chain_id"`
ParentBatchHash common.Hash `json:"parent_batch_hash"`
ParentStateRoot common.Hash `json:"parent_state_root"`
StateRoot common.Hash `json:"state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
BatchHash common.Hash `json:"batch_hash"`
ChainID uint64 `json:"chain_id"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
}
// BatchProof includes the proof info that are required for batch verification and rollup.
@@ -323,6 +395,7 @@ type OpenVMBundleInfo struct {
NumBatches uint32 `json:"num_batches"`
PrevBatchHash common.Hash `json:"prev_batch_hash"`
BatchHash common.Hash `json:"batch_hash"`
MsgQueueHash common.Hash `json:"msg_queue_hash"`
}
// OpenVMBundleProof includes the proof info that are required for verification of a bundle of batch proofs.

View File

@@ -0,0 +1,22 @@
package message
import (
"fmt"
"testing"
)
func TestBytes48(t *testing.T) {
ti := &Byte48{}
ti.UnmarshalText([]byte("0x1"))
if s, err := ti.MarshalText(); err == nil {
if len(s) != 98 {
panic(fmt.Sprintf("wrong str: %s", s))
}
}
ti.UnmarshalText([]byte("0x0"))
if s, err := ti.MarshalText(); err == nil {
if len(s) != 98 {
panic(fmt.Sprintf("wrong str: %s", s))
}
}
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/scroll-tech/go-ethereum/log"
"gorm.io/gorm"
"scroll-tech/common/types/message"
"scroll-tech/common/version"
"scroll-tech/coordinator/internal/config"
@@ -42,7 +43,7 @@ func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *Logi
var highHardForks []string
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.HighVersionCircuit.ForkName)
if cfg.ProverManager.Verifier.HighVersionCircuit.ForkName != "euclid" {
if cfg.ProverManager.Verifier.HighVersionCircuit.ForkName != message.EuclidFork && cfg.ProverManager.Verifier.HighVersionCircuit.ForkName != message.EuclidV2Fork {
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.LowVersionCircuit.ForkName)
}
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = highHardForks

View File

@@ -4,6 +4,7 @@ import (
"context"
"encoding/json"
"fmt"
"math/big"
"time"
"github.com/gin-gonic/gin"
@@ -11,6 +12,7 @@ import (
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
@@ -211,17 +213,23 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
WithdrawRoot: common.HexToHash(chunk.WithdrawRoot),
DataHash: common.HexToHash(chunk.Hash),
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
PostMsgQueueHash: common.HexToHash(chunk.PostL1MessageQueueHash),
IsPadding: false,
}
if haloProot, ok := proof.(*message.Halo2ChunkProof); ok {
if haloProot.ChunkInfo != nil {
chunkInfo.TxBytes = haloProot.ChunkInfo.TxBytes
if halo2Proof, ok := proof.(*message.Halo2ChunkProof); ok {
if halo2Proof.ChunkInfo != nil {
chunkInfo.TxBytes = halo2Proof.ChunkInfo.TxBytes
}
}
if openvmProof, ok := proof.(*message.OpenVMChunkProof); ok {
chunkInfo.InitialBlockNumber = openvmProof.MetaData.ChunkInfo.InitialBlockNumber
chunkInfo.BlockCtxs = openvmProof.MetaData.ChunkInfo.BlockCtxs
chunkInfo.TxDataLength = openvmProof.MetaData.ChunkInfo.TxDataLength
}
chunkInfos = append(chunkInfos, &chunkInfo)
}
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs)
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs, hardForkName)
if err != nil {
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
}
@@ -238,6 +246,9 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
TaskData: string(chunkProofsBytes),
HardForkName: hardForkName,
}
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeBatch.String(), "hard_fork_name", hardForkName, "task_data", taskMsg.TaskData)
return taskMsg, nil
}
@@ -247,12 +258,18 @@ func (bp *BatchProverTask) recoverActiveAttempts(ctx *gin.Context, batchTask *or
}
}
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []message.ChunkProof) (*message.BatchTaskDetail, error) {
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []message.ChunkProof, hardForkName string) (*message.BatchTaskDetail, error) {
taskDetail := &message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}
if hardForkName == message.EuclidV2Fork {
taskDetail.ForkName = message.EuclidV2ForkNameForProver
} else if hardForkName == message.EuclidFork {
taskDetail.ForkName = message.EuclidForkNameForProver
}
dbBatchCodecVersion := encoding.CodecVersion(dbBatch.CodecVersion)
switch dbBatchCodecVersion {
case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6, encoding.CodecV7:
@@ -280,8 +297,12 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*
// | z | y | kzg_commitment | kzg_proof |
// |---------|---------|----------------|-----------|
// | bytes32 | bytes32 | bytes48 | bytes48 |
taskDetail.KzgProof = dbBatch.BlobDataProof[112:160]
taskDetail.KzgCommitment = dbBatch.BlobDataProof[64:112]
taskDetail.Challenge = common.Hash(dbBatch.BlobDataProof[0:32])
taskDetail.KzgProof = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[112:160]))}
taskDetail.KzgCommitment = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[64:112]))}
// FIXME: Challenge = ChallengeDigest % BLS_MODULUS, get the original ChallengeDigest.
// Simply omit the field now to skip sanity check in prover side
// Resume it later (or FIXME has worked or the prover side has relax its checking)
// taskDetail.ChallengeDigest = new(common.Hash)
// *taskDetail.ChallengeDigest = common.BytesToHash(dbBatch.BlobDataProof[0:32])
return taskDetail, nil
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
@@ -194,6 +195,11 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
return nil, fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", task.TaskID)
}
parentBatch, err := bp.batchOrm.GetBatchByHash(ctx, batches[0].ParentBatchHash)
if err != nil {
return nil, fmt.Errorf("failed to get parent batch for batch task id:%s err:%w", task.TaskID, err)
}
var batchProofs []message.BatchProof
for _, batch := range batches {
proof := message.NewBatchProof(hardForkName)
@@ -207,6 +213,26 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
BatchProofs: batchProofs,
}
if hardForkName == message.EuclidV2Fork {
taskDetail.ForkName = message.EuclidV2ForkNameForProver
} else if hardForkName == message.EuclidFork {
taskDetail.ForkName = message.EuclidForkNameForProver
}
taskDetail.BundleInfo = &message.OpenVMBundleInfo{
ChainID: bp.cfg.L2.ChainID,
PrevStateRoot: common.HexToHash(parentBatch.StateRoot),
PostStateRoot: common.HexToHash(batches[len(batches)-1].StateRoot),
WithdrawRoot: common.HexToHash(batches[len(batches)-1].WithdrawRoot),
NumBatches: uint32(len(batches)),
PrevBatchHash: common.HexToHash(batches[0].ParentBatchHash),
BatchHash: common.HexToHash(batches[len(batches)-1].Hash),
}
if hardForkName == message.EuclidV2Fork {
taskDetail.BundleInfo.MsgQueueHash = common.HexToHash(batches[len(batches)-1].PostL1MessageQueueHash)
}
batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil {
return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)
@@ -219,6 +245,9 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
TaskData: string(batchProofsBytes),
HardForkName: hardForkName,
}
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeBundle.String(), "hard_fork_name", hardForkName, "task_data", taskMsg.TaskData)
return taskMsg, nil
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
@@ -162,7 +163,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, hardForkName)
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, chunkTask, hardForkName)
if err != nil {
cp.recoverActiveAttempts(ctx, chunkTask)
log.Error("format prover task failure", "task_id", chunkTask.Hash, "err", err)
@@ -179,17 +180,27 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return taskMsg, nil
}
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, chunk *orm.Chunk, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
// Get block hashes.
blockHashes, dbErr := cp.blockOrm.GetL2BlockHashesByChunkHash(ctx, task.TaskID)
if dbErr != nil || len(blockHashes) == 0 {
return nil, fmt.Errorf("failed to fetch block hashes of a chunk, chunk hash:%s err:%w", task.TaskID, dbErr)
}
var taskDetailBytes []byte
taskDetail := message.ChunkTaskDetail{
BlockHashes: blockHashes,
BlockHashes: blockHashes,
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
}
blockHashesBytes, err := json.Marshal(taskDetail)
if hardForkName == message.EuclidV2Fork {
taskDetail.ForkName = message.EuclidV2ForkNameForProver
} else if hardForkName == message.EuclidFork {
taskDetail.ForkName = message.EuclidForkNameForProver
}
var err error
taskDetailBytes, err = json.Marshal(taskDetail)
if err != nil {
return nil, fmt.Errorf("failed to marshal block hashes hash:%s, err:%w", task.TaskID, err)
}
@@ -198,10 +209,12 @@ func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.Prove
UUID: task.UUID.String(),
TaskID: task.TaskID,
TaskType: int(message.ProofTypeChunk),
TaskData: string(blockHashesBytes),
TaskData: string(taskDetailBytes),
HardForkName: hardForkName,
}
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeChunk.String(), "hard_fork_name", hardForkName, "task_data", proverTaskSchema.TaskData)
return proverTaskSchema, nil
}

View File

@@ -122,7 +122,7 @@ func (b *BaseProverTask) hardForkSanityCheck(ctx *gin.Context, taskCtx *proverTa
}
if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
return "", errors.New("to be assigned prover task's hard-fork name is not the same as prover")
return "", fmt.Errorf("to be assigned prover task's hard-fork name is not the same as prover, proverName: %s, proverVersion: %s, proverSupportHardForkNames: %s, taskHardForkName: %v", taskCtx.ProverName, taskCtx.ProverVersion, taskCtx.HardForkNames, hardForkName)
}
return hardForkName, nil
}

View File

@@ -103,11 +103,16 @@ func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
return nil, err
}
if err := v.loadOpenVMVks(cfg.HighVersionCircuit.ForkName); err != nil {
if err := v.loadOpenVMVks(message.EuclidFork); err != nil {
return nil, err
}
v.loadCurieVersionVKs()
if err := v.loadOpenVMVks(message.EuclidV2Fork); err != nil {
return nil, err
}
v.loadDarwinVKs()
return v, nil
}
@@ -224,8 +229,9 @@ func (v *Verifier) loadLowVersionVKs(cfg *config.VerifierConfig) error {
return nil
}
func (v *Verifier) loadCurieVersionVKs() {
v.BatchVKMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA="] = struct{}{}
func (v *Verifier) loadDarwinVKs() {
v.BundleVkMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD5dsp1rEy7PSqiIFikkkOPqKokLW2mZSwCbtKdkfLQcvTxARUwHSe4iZe27PRJ5WWaLqtRV1+x6+pSVKtcPtaV4kE7v2YJRf0582hxiAF0IBaOoREdpyNfA2a9cvhWb2TMaPrUYP9EDQ7CUiW1FQzxbjGc95ua2htscnpU7d9S5stHWzKb7okkCG7bTIL9aG6qTQo2YXW7n3H3Ir47oVJB7IKrUzKGvI5Wmanh2zpZOJ9Qm4/wY24cT7cJz+Ux6wAg=="] = struct{}{}
v.BatchVKMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD1DEjW4Kell67H07wazT5DdzrSh4+amh+cmosQHp9p9snFypyoBGt3UHtoJGQBZlywZWDS9ht5pnaEoGBdaKcQk+lFb+WxTiId0KOAa0mafTZTQw8yToy57Jple64qzlRu1dux30tZZGuerLN1CKzg5Xl2iOpMK+l87jCINwVp5cUtF/XrvhBbU7onKh3KBiy99iUqVyA3Y6iiIZhGKWBSuSA4bNgDYIoVkqjHpdL35aEShoRO6pNXt7rDzxFoPzH0JuPI54nE4OhVrzZXwtkAEosxVa/fszcE092FH+HhhtxZBYe/KEzwdISU9TOPdId3UF/UMYC0MiYOlqffVTgAg="] = struct{}{}
v.ChunkVKMap["AAAAGQAAAATyWEABRbJ6hQQ5/zLX1gTasr7349minA9rSgMS6gDeHwZKqikRiO3md+pXjjxMHnKQtmXYgMXhJSvlmZ+Ws+cheuly2X1RuNQzcZuRImaKPR9LJsVZYsXfJbuqdKX8p0Gj8G83wMJOmTzNVUyUol0w0lTU+CEiTpHOnxBsTF3EWaW3s1u4ycOgWt1c9M6s7WmaBZLYgAWYCunO5CLCLApNGbCASeck/LuSoedEri5u6HccCKU2khG6zl6W07jvYSbDVLJktbjRiHv+/HQix+K14j8boo8Z/unhpwXCsPxkQA=="] = struct{}{}
}

View File

@@ -19,20 +19,22 @@ type Batch struct {
db *gorm.DB `gorm:"column:-"`
// batch
Index uint64 `json:"index" gorm:"column:index"`
Hash string `json:"hash" gorm:"column:hash"`
DataHash string `json:"data_hash" gorm:"column:data_hash"`
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
EndChunkHash string `json:"end_chunk_hash" gorm:"column:end_chunk_hash"`
StateRoot string `json:"state_root" gorm:"column:state_root"`
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
Index uint64 `json:"index" gorm:"column:index"`
Hash string `json:"hash" gorm:"column:hash"`
DataHash string `json:"data_hash" gorm:"column:data_hash"`
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
EndChunkHash string `json:"end_chunk_hash" gorm:"column:end_chunk_hash"`
StateRoot string `json:"state_root" gorm:"column:state_root"`
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
PrevL1MessageQueueHash string `json:"prev_l1_message_queue_hash" gorm:"column:prev_l1_message_queue_hash"`
PostL1MessageQueueHash string `json:"post_l1_message_queue_hash" gorm:"column:post_l1_message_queue_hash"`
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
// proof
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`

1991
zkvm-prover/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -18,13 +18,16 @@ serde = { version = "1.0.198", features = ["derive"] }
serde_json = "1.0.116"
futures = "0.3.30"
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.1.0-rc.6" }
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.1.0-rc.11", package = "scroll-zkvm-prover" }
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "af95d2a", features = [
#scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "c81e5f2", features = [
# "openvm",
#] }
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", branch = "feat/openvm-euclid-upgrade", features = [
"openvm",
] }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-v2", features = [
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade", features = [
"scroll",
] }
base64 = "0.13.1"
@@ -45,3 +48,34 @@ clap = { version = "4.5", features = ["derive"] }
ctor = "0.2.8"
url = "2.5.4"
serde_bytes = "0.11.15"
[patch."https://github.com/openvm-org/stark-backend.git"]
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
openvm-stark-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
[patch."https://github.com/Plonky3/Plonky3.git"]
p3-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-field = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-commit = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-matrix = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-baby-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", features = [
"nightly-features",
], tag = "v0.1.0" }
p3-util = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-challenger = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-dft = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-fri = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-goldilocks = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-keccak = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-keccak-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-blake3 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-mds = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-merkle-tree = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-monty-31 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-poseidon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-poseidon2 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }

View File

@@ -23,8 +23,8 @@
"db_path": "unique-db-path-for-prover-1"
},
"circuits": {
"euclid": {
"hard_fork_name": "euclid",
"euclidV2": {
"hard_fork_name": "euclidV2",
"workspace_path": "/home/ubuntu/prover-workdir"
}
}

View File

@@ -10,7 +10,7 @@ use scroll_proving_sdk::{
};
#[derive(Parser, Debug)]
#[clap(disable_version_flag = true)]
#[command(disable_version_flag = true)]
struct Args {
/// Path of config file
#[arg(long = "config", default_value = "conf/config.json")]

View File

@@ -1,4 +1,6 @@
use crate::zk_circuits_handler::{euclid::EuclidHandler, CircuitsHandler};
use crate::zk_circuits_handler::{
euclid::EuclidHandler, euclidV2::EuclidV2Handler, CircuitsHandler,
};
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use scroll_proving_sdk::{
@@ -181,9 +183,14 @@ impl LocalProver {
// coordinator
let config = self.config.circuits.get(hard_fork_name).unwrap();
Arc::new(match hard_fork_name {
"euclid" => Arc::new(Mutex::new(EuclidHandler::new(&config.workspace_path))),
match hard_fork_name {
"euclid" => Arc::new(Arc::new(Mutex::new(EuclidHandler::new(
&config.workspace_path,
)))) as Arc<dyn CircuitsHandler>,
"euclidV2" => Arc::new(Arc::new(Mutex::new(EuclidV2Handler::new(
&config.workspace_path,
)))) as Arc<dyn CircuitsHandler>,
_ => unreachable!(),
}) as Arc<dyn CircuitsHandler>
}
}
}

View File

@@ -1,5 +1,8 @@
pub mod euclid;
#[allow(non_snake_case)]
pub mod euclidV2;
use anyhow::Result;
use async_trait::async_trait;
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};

View File

@@ -4,37 +4,95 @@ use super::CircuitsHandler;
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
use scroll_zkvm_prover::{
use scroll_zkvm_prover_euclid::{
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
BatchProver, BundleProver, ChunkProver,
BatchProver, BundleProverEuclidV1, ChunkProver, ProverConfig,
};
use tokio::sync::Mutex;
pub struct EuclidHandler {
chunk_prover: ChunkProver,
batch_prover: BatchProver,
bundle_prover: BundleProver,
bundle_prover: BundleProverEuclidV1,
}
#[derive(Clone, Copy)]
pub(crate) enum Phase {
EuclidV1,
EuclidV2,
}
impl Phase {
pub fn as_str(&self) -> &str {
match self {
Phase::EuclidV1 => "euclidv1",
Phase::EuclidV2 => "euclidv2",
}
}
pub fn phase_spec_chunk(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_exe = workspace_path.join("chunk/app.vmexe");
let path_app_config = workspace_path.join("chunk/openvm.toml");
let segment_len = Some((1 << 22) - 100);
ProverConfig {
dir_cache,
path_app_config,
path_app_exe,
segment_len,
..Default::default()
}
}
pub fn phase_spec_batch(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_exe = workspace_path.join("batch/app.vmexe");
let path_app_config = workspace_path.join("batch/openvm.toml");
let segment_len = Some((1 << 22) - 100);
ProverConfig {
dir_cache,
path_app_config,
path_app_exe,
segment_len,
..Default::default()
}
}
pub fn phase_spec_bundle(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_config = workspace_path.join("bundle/openvm.toml");
let segment_len = Some((1 << 22) - 100);
match self {
Phase::EuclidV1 => ProverConfig {
dir_cache,
path_app_config,
segment_len,
path_app_exe: workspace_path.join("bundle/app_euclidv1.vmexe"),
..Default::default()
},
Phase::EuclidV2 => ProverConfig {
dir_cache,
path_app_config,
segment_len,
path_app_exe: workspace_path.join("bundle/app.vmexe"),
..Default::default()
},
}
}
}
unsafe impl Send for EuclidHandler {}
impl EuclidHandler {
pub fn new(workspace_path: &str) -> Self {
let p = Phase::EuclidV1;
let workspace_path = Path::new(workspace_path);
let cache_dir = workspace_path.join("cache");
let chunk_exe = workspace_path.join("chunk/app.vmexe");
let chunk_app_config = workspace_path.join("chunk/openvm.toml");
let chunk_prover = ChunkProver::setup(chunk_exe, chunk_app_config, Some(cache_dir.clone()))
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
.expect("Failed to setup chunk prover");
let batch_exe = workspace_path.join("batch/app.vmexe");
let batch_app_config = workspace_path.join("batch/openvm.toml");
let batch_prover = BatchProver::setup(batch_exe, batch_app_config, Some(cache_dir.clone()))
let batch_prover = BatchProver::setup(p.phase_spec_batch(workspace_path))
.expect("Failed to setup batch prover");
let bundle_exe = workspace_path.join("bundle/app.vmexe");
let bundle_app_config = workspace_path.join("bundle/openvm.toml");
let bundle_prover = BundleProver::setup(bundle_exe, bundle_app_config, Some(cache_dir))
let bundle_prover = BundleProverEuclidV1::setup(p.phase_spec_bundle(workspace_path))
.expect("Failed to setup bundle prover");
Self {
@@ -68,6 +126,8 @@ impl CircuitsHandler for Arc<Mutex<EuclidHandler>> {
.chunk_prover
.gen_proof(&ChunkProvingTask {
block_witnesses: witnesses,
prev_msg_queue_hash: Default::default(),
fork_name: Phase::EuclidV1.as_str().to_string(),
})?;
Ok(serde_json::to_string(&proof)?)

View File

@@ -0,0 +1,79 @@
use std::{path::Path, sync::Arc};
use super::{euclid::Phase, CircuitsHandler};
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
use scroll_zkvm_prover_euclid::{
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
BatchProver, BundleProverEuclidV2, ChunkProver,
};
use tokio::sync::Mutex;
pub struct EuclidV2Handler {
chunk_prover: ChunkProver,
batch_prover: BatchProver,
bundle_prover: BundleProverEuclidV2,
}
unsafe impl Send for EuclidV2Handler {}
impl EuclidV2Handler {
pub fn new(workspace_path: &str) -> Self {
let p = Phase::EuclidV2;
let workspace_path = Path::new(workspace_path);
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
.expect("Failed to setup chunk prover");
let batch_prover = BatchProver::setup(p.phase_spec_batch(workspace_path))
.expect("Failed to setup batch prover");
let bundle_prover = BundleProverEuclidV2::setup(p.phase_spec_bundle(workspace_path))
.expect("Failed to setup bundle prover");
Self {
chunk_prover,
batch_prover,
bundle_prover,
}
}
}
#[async_trait]
impl CircuitsHandler for Arc<Mutex<EuclidV2Handler>> {
async fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
Some(match task_type {
ProofType::Chunk => self.try_lock().unwrap().chunk_prover.get_app_vk(),
ProofType::Batch => self.try_lock().unwrap().batch_prover.get_app_vk(),
ProofType::Bundle => self.try_lock().unwrap().bundle_prover.get_app_vk(),
_ => unreachable!("Unsupported proof type"),
})
}
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
match prove_request.proof_type {
ProofType::Chunk => {
let task: ChunkProvingTask = serde_json::from_str(&prove_request.input)?;
let proof = self.try_lock().unwrap().chunk_prover.gen_proof(&task)?;
Ok(serde_json::to_string(&proof)?)
}
ProofType::Batch => {
let task: BatchProvingTask = serde_json::from_str(&prove_request.input)?;
let proof = self.try_lock().unwrap().batch_prover.gen_proof(&task)?;
Ok(serde_json::to_string(&proof)?)
}
ProofType::Bundle => {
let batch_proofs: BundleProvingTask = serde_json::from_str(&prove_request.input)?;
let proof = self
.try_lock()
.unwrap()
.bundle_prover
.gen_proof_evm(&batch_proofs)?;
Ok(serde_json::to_string(&proof)?)
}
_ => Err(anyhow!("Unsupported proof type")),
}
}
}