mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-12 23:48:15 -05:00
Compare commits
190 Commits
develop
...
tools/get-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
634cb73ff2 | ||
|
|
dced0c6a82 | ||
|
|
f045984c99 | ||
|
|
38af170acc | ||
|
|
c65622b7f6 | ||
|
|
80af42695d | ||
|
|
4c4cff0ca4 | ||
|
|
0df9ede841 | ||
|
|
d1138653e0 | ||
|
|
1572680566 | ||
|
|
8f4fc9af95 | ||
|
|
917b143557 | ||
|
|
c4849251c6 | ||
|
|
9bb768e454 | ||
|
|
9033471930 | ||
|
|
886af073c1 | ||
|
|
2b318ec7c7 | ||
|
|
4c2a75576f | ||
|
|
2a8330c346 | ||
|
|
d82e109360 | ||
|
|
42bfcb84d7 | ||
|
|
83c0a0870c | ||
|
|
78a458daa5 | ||
|
|
01d0e48e9a | ||
|
|
5cfb8b6a69 | ||
|
|
b59db732c3 | ||
|
|
899476731d | ||
|
|
1bec964097 | ||
|
|
b73acca200 | ||
|
|
77dceaea35 | ||
|
|
d0cb8b9aa5 | ||
|
|
ed057286d9 | ||
|
|
b3e46673f6 | ||
|
|
2fb27ceb3d | ||
|
|
e3332885ed | ||
|
|
3ee2d2b39c | ||
|
|
4b21c79443 | ||
|
|
c6f0299373 | ||
|
|
3454c6c670 | ||
|
|
901693a2c0 | ||
|
|
0bb53140f5 | ||
|
|
09790c4448 | ||
|
|
ae212a919a | ||
|
|
9b5c42e9d9 | ||
|
|
60877d3c16 | ||
|
|
07d1621310 | ||
|
|
11afeb1354 | ||
|
|
cf41048c0a | ||
|
|
77d63226c5 | ||
|
|
135073c0ad | ||
|
|
bab0e4f8d6 | ||
|
|
2d620ddf4f | ||
|
|
8befb84910 | ||
|
|
4822d38aba | ||
|
|
cb87c7aedd | ||
|
|
3a3db5fe32 | ||
|
|
b4546af434 | ||
|
|
459941d942 | ||
|
|
9f480e5397 | ||
|
|
7d4ff80edf | ||
|
|
5869bfd825 | ||
|
|
12a262ad99 | ||
|
|
7d5b77a36c | ||
|
|
5f8bb53dce | ||
|
|
87e1235c7f | ||
|
|
86e6555a54 | ||
|
|
e3b17a0740 | ||
|
|
ef9e25f14c | ||
|
|
0fc28cb511 | ||
|
|
ad2e94e190 | ||
|
|
2846ecffa5 | ||
|
|
0e82c63ac4 | ||
|
|
9996af6227 | ||
|
|
8cf087c63b | ||
|
|
b984341991 | ||
|
|
7486236a7a | ||
|
|
a6ed321666 | ||
|
|
8db4e5c77d | ||
|
|
5cf8cda8a7 | ||
|
|
bcc6b0f7e0 | ||
|
|
fe6451b76c | ||
|
|
be88ef6c39 | ||
|
|
64368f9a79 | ||
|
|
f288179451 | ||
|
|
b8c7ec2b22 | ||
|
|
88da49383c | ||
|
|
1ea9acafa3 | ||
|
|
c743efd99e | ||
|
|
2d40f0f942 | ||
|
|
fcbaa674c6 | ||
|
|
110083c6c8 | ||
|
|
b3c1df7557 | ||
|
|
893bf18d62 | ||
|
|
7ec6d478b3 | ||
|
|
eacdc78ba7 | ||
|
|
2cc9f65852 | ||
|
|
af381223f3 | ||
|
|
bb6ee2c932 | ||
|
|
e99a8515b9 | ||
|
|
38b3239c6b | ||
|
|
d987931e30 | ||
|
|
90d15637eb | ||
|
|
4d677b344b | ||
|
|
d57e6b0e7b | ||
|
|
9b462e4c98 | ||
|
|
c9f6e8c6e1 | ||
|
|
867307d576 | ||
|
|
20dffe4ea5 | ||
|
|
57d50b7183 | ||
|
|
7a70e374b8 | ||
|
|
0799dd48f2 | ||
|
|
224546e380 | ||
|
|
95adcc378f | ||
|
|
47219f2d86 | ||
|
|
ab7038c0a7 | ||
|
|
d79aaef35a | ||
|
|
da963313b6 | ||
|
|
f27ddb7f8e | ||
|
|
94bee1903a | ||
|
|
b7e7d1a1f1 | ||
|
|
f1ea4b315c | ||
|
|
8b08a57f63 | ||
|
|
a868bc1531 | ||
|
|
101cc46bd9 | ||
|
|
9f4c9ee150 | ||
|
|
03c63a62cf | ||
|
|
b30f4d0b00 | ||
|
|
4333d51bef | ||
|
|
82dd5e0e5e | ||
|
|
f91c999005 | ||
|
|
c8b614fd2f | ||
|
|
a1c4562432 | ||
|
|
d6674e8a3d | ||
|
|
55b32e1c0c | ||
|
|
8ea431514d | ||
|
|
26a49cb2a3 | ||
|
|
e27ab5a396 | ||
|
|
554a233928 | ||
|
|
673777fe63 | ||
|
|
7353f30ff6 | ||
|
|
eb5758b693 | ||
|
|
47a6c23b1f | ||
|
|
081d28988d | ||
|
|
782e019f9c | ||
|
|
89ede0d315 | ||
|
|
a55de1fc09 | ||
|
|
ed394a6369 | ||
|
|
121ce09c80 | ||
|
|
0125dd62a6 | ||
|
|
bb9d404e85 | ||
|
|
e1a0bab452 | ||
|
|
50ebf179fd | ||
|
|
01fa3b34a7 | ||
|
|
2e9827a750 | ||
|
|
867fda6952 | ||
|
|
fbc14ac91b | ||
|
|
37924b0ae7 | ||
|
|
8b57dd6381 | ||
|
|
f13863e542 | ||
|
|
d3acd6b510 | ||
|
|
83c73f8458 | ||
|
|
bf084368c5 | ||
|
|
d503d4a990 | ||
|
|
ac17696171 | ||
|
|
b424cef816 | ||
|
|
e5ad9c618d | ||
|
|
848d3a6827 | ||
|
|
2bd0655fda | ||
|
|
f01af24908 | ||
|
|
2de45f0d54 | ||
|
|
c3a3bad800 | ||
|
|
9412c7ff3a | ||
|
|
5f2295043e | ||
|
|
69a80d4a4a | ||
|
|
8db5339c1f | ||
|
|
99c0a9fac5 | ||
|
|
f4e17bcca6 | ||
|
|
e713424e5c | ||
|
|
2efbbd7d77 | ||
|
|
310abdd543 | ||
|
|
5a479c3a08 | ||
|
|
783b965deb | ||
|
|
182f8e307c | ||
|
|
b460d4a717 | ||
|
|
421afe9c30 | ||
|
|
ca8d930bd6 | ||
|
|
940fde0cbf | ||
|
|
78c99636dc | ||
|
|
0c0c417829 | ||
|
|
41606fe7d7 |
856
common/libzkp/impl/Cargo.lock
generated
856
common/libzkp/impl/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -14,8 +14,8 @@ ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
|
|||||||
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-openvm-v1.0.0-rc.1" }
|
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-openvm-v1.0.0-rc.1" }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-prover" }
|
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.2.0", package = "scroll-zkvm-prover" }
|
||||||
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-verifier" }
|
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.2.0", package = "scroll-zkvm-verifier" }
|
||||||
|
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
env_logger = "0.9.0"
|
env_logger = "0.9.0"
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
#![allow(static_mut_refs)]
|
#![allow(static_mut_refs)]
|
||||||
|
|
||||||
mod euclid;
|
mod euclid;
|
||||||
|
mod euclidv2;
|
||||||
|
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
use euclid::EuclidVerifier;
|
use euclid::EuclidVerifier;
|
||||||
|
use euclidv2::EuclidV2Verifier;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{cell::OnceCell, path::Path, rc::Rc};
|
use std::{cell::OnceCell, path::Path, rc::Rc};
|
||||||
|
|
||||||
@@ -51,7 +53,17 @@ pub fn init(config: VerifierConfig) {
|
|||||||
unsafe {
|
unsafe {
|
||||||
VERIFIER_LOW
|
VERIFIER_LOW
|
||||||
.set(VerifierPair(
|
.set(VerifierPair(
|
||||||
config.high_version_circuit.fork_name,
|
"euclid".to_string(),
|
||||||
|
Rc::new(Box::new(verifier)),
|
||||||
|
))
|
||||||
|
.unwrap_unchecked();
|
||||||
|
}
|
||||||
|
|
||||||
|
let verifier = EuclidV2Verifier::new(&config.high_version_circuit.assets_path);
|
||||||
|
unsafe {
|
||||||
|
VERIFIER_HIGH
|
||||||
|
.set(VerifierPair(
|
||||||
|
"euclidV2".to_string(),
|
||||||
Rc::new(Box::new(verifier)),
|
Rc::new(Box::new(verifier)),
|
||||||
))
|
))
|
||||||
.unwrap_unchecked();
|
.unwrap_unchecked();
|
||||||
|
|||||||
@@ -4,13 +4,13 @@ use anyhow::Result;
|
|||||||
|
|
||||||
use crate::utils::panic_catch;
|
use crate::utils::panic_catch;
|
||||||
use euclid_prover::{BatchProof, BundleProof, ChunkProof};
|
use euclid_prover::{BatchProof, BundleProof, ChunkProof};
|
||||||
use euclid_verifier::verifier::{BatchVerifier, BundleVerifier, ChunkVerifier};
|
use euclid_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV1, ChunkVerifier};
|
||||||
use std::{fs::File, path::Path};
|
use std::{fs::File, path::Path};
|
||||||
|
|
||||||
pub struct EuclidVerifier {
|
pub struct EuclidVerifier {
|
||||||
chunk_verifier: ChunkVerifier,
|
chunk_verifier: ChunkVerifier,
|
||||||
batch_verifier: BatchVerifier,
|
batch_verifier: BatchVerifier,
|
||||||
bundle_verifier: BundleVerifier,
|
bundle_verifier: BundleVerifierEuclidV1,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EuclidVerifier {
|
impl EuclidVerifier {
|
||||||
@@ -24,7 +24,7 @@ impl EuclidVerifier {
|
|||||||
.expect("Setting up chunk verifier"),
|
.expect("Setting up chunk verifier"),
|
||||||
batch_verifier: BatchVerifier::setup(&config, &exe, &verifier_bin)
|
batch_verifier: BatchVerifier::setup(&config, &exe, &verifier_bin)
|
||||||
.expect("Setting up batch verifier"),
|
.expect("Setting up batch verifier"),
|
||||||
bundle_verifier: BundleVerifier::setup(&config, &exe, &verifier_bin)
|
bundle_verifier: BundleVerifierEuclidV1::setup(&config, &exe, &verifier_bin)
|
||||||
.expect("Setting up bundle verifier"),
|
.expect("Setting up bundle verifier"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
65
common/libzkp/impl/src/verifier/euclidv2.rs
Normal file
65
common/libzkp/impl/src/verifier/euclidv2.rs
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
use super::{ProofVerifier, TaskType, VKDump};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use crate::utils::panic_catch;
|
||||||
|
use euclid_prover::{BatchProof, BundleProof, ChunkProof};
|
||||||
|
use euclid_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV2, ChunkVerifier};
|
||||||
|
use std::{fs::File, path::Path};
|
||||||
|
|
||||||
|
pub struct EuclidV2Verifier {
|
||||||
|
chunk_verifier: ChunkVerifier,
|
||||||
|
batch_verifier: BatchVerifier,
|
||||||
|
bundle_verifier: BundleVerifierEuclidV2,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EuclidV2Verifier {
|
||||||
|
pub fn new(assets_dir: &str) -> Self {
|
||||||
|
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
|
||||||
|
let config = Path::new(assets_dir).join("root-verifier-vm-config");
|
||||||
|
let exe = Path::new(assets_dir).join("root-verifier-committed-exe");
|
||||||
|
|
||||||
|
Self {
|
||||||
|
chunk_verifier: ChunkVerifier::setup(&config, &exe, &verifier_bin)
|
||||||
|
.expect("Setting up chunk verifier"),
|
||||||
|
batch_verifier: BatchVerifier::setup(&config, &exe, &verifier_bin)
|
||||||
|
.expect("Setting up batch verifier"),
|
||||||
|
bundle_verifier: BundleVerifierEuclidV2::setup(&config, &exe, &verifier_bin)
|
||||||
|
.expect("Setting up bundle verifier"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProofVerifier for EuclidV2Verifier {
|
||||||
|
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
|
||||||
|
panic_catch(|| match task_type {
|
||||||
|
TaskType::Chunk => {
|
||||||
|
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
|
||||||
|
self.chunk_verifier
|
||||||
|
.verify_proof(proof.proof.as_root_proof().unwrap())
|
||||||
|
}
|
||||||
|
TaskType::Batch => {
|
||||||
|
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
|
||||||
|
self.batch_verifier
|
||||||
|
.verify_proof(proof.proof.as_root_proof().unwrap())
|
||||||
|
}
|
||||||
|
TaskType::Bundle => {
|
||||||
|
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
|
||||||
|
self.bundle_verifier
|
||||||
|
.verify_proof_evm(&proof.proof.as_evm_proof().unwrap())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map_err(|err_str: String| anyhow::anyhow!(err_str))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dump_vk(&self, file: &Path) {
|
||||||
|
let f = File::create(file).expect("Failed to open file to dump VK");
|
||||||
|
|
||||||
|
let dump = VKDump {
|
||||||
|
chunk_vk: base64::encode(self.chunk_verifier.get_app_vk()),
|
||||||
|
batch_vk: base64::encode(self.batch_verifier.get_app_vk()),
|
||||||
|
bundle_vk: base64::encode(self.bundle_verifier.get_app_vk()),
|
||||||
|
};
|
||||||
|
serde_json::to_writer(f, &dump).expect("Failed to dump VK");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,12 +4,18 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math/big"
|
||||||
|
|
||||||
"github.com/scroll-tech/go-ethereum/common"
|
"github.com/scroll-tech/go-ethereum/common"
|
||||||
|
"github.com/scroll-tech/go-ethereum/common/hexutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
euclidFork = "euclid"
|
EuclidFork = "euclid"
|
||||||
|
EuclidV2Fork = "euclidV2"
|
||||||
|
|
||||||
|
EuclidForkNameForProver = "euclidv1"
|
||||||
|
EuclidV2ForkNameForProver = "euclidv2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ProofType represents the type of task.
|
// ProofType represents the type of task.
|
||||||
@@ -39,38 +45,102 @@ const (
|
|||||||
ProofTypeBundle
|
ProofTypeBundle
|
||||||
)
|
)
|
||||||
|
|
||||||
// ChunkTaskDetail is a type containing ChunkTask detail.
|
// ChunkTaskDetail is a type containing ChunkTask detail for chunk task.
|
||||||
type ChunkTaskDetail struct {
|
type ChunkTaskDetail struct {
|
||||||
BlockHashes []common.Hash `json:"block_hashes"`
|
// use one of the string of EuclidFork / EuclidV2Fork
|
||||||
|
ForkName string `json:"fork_name"`
|
||||||
|
BlockHashes []common.Hash `json:"block_hashes"`
|
||||||
|
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// it is a hex encoded big with fixed length on 48 bytes
|
||||||
|
type Byte48 struct {
|
||||||
|
hexutil.Big
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e Byte48) MarshalText() ([]byte, error) {
|
||||||
|
i := e.ToInt()
|
||||||
|
// overrite encode big
|
||||||
|
if sign := i.Sign(); sign < 0 {
|
||||||
|
// sanity check
|
||||||
|
return nil, errors.New("Byte48 must be positive integer")
|
||||||
|
} else {
|
||||||
|
s := i.Text(16)
|
||||||
|
if len(s) > 96 {
|
||||||
|
return nil, errors.New("integer Exceed 384bit")
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf("0x%0*s", 96, s)), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isString(input []byte) bool {
|
||||||
|
return len(input) >= 2 && input[0] == '"' && input[len(input)-1] == '"'
|
||||||
|
}
|
||||||
|
|
||||||
|
// hexutil.Big has limition of 256bit so we have to override it ...
|
||||||
|
func (e *Byte48) UnmarshalJSON(input []byte) error {
|
||||||
|
if !isString(input) {
|
||||||
|
return errors.New("not hex string")
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := hexutil.Decode(string(input[1 : len(input)-1]))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(b) != 48 {
|
||||||
|
return fmt.Errorf("not a 48 bytes hex string: %d", len(b))
|
||||||
|
}
|
||||||
|
var dec big.Int
|
||||||
|
dec.SetBytes(b)
|
||||||
|
*e = Byte48{(hexutil.Big)(dec)}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// BatchTaskDetail is a type containing BatchTask detail.
|
// BatchTaskDetail is a type containing BatchTask detail.
|
||||||
type BatchTaskDetail struct {
|
type BatchTaskDetail struct {
|
||||||
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
|
// use one of the string of EuclidFork / EuclidV2Fork
|
||||||
ChunkProofs []ChunkProof `json:"chunk_proofs"`
|
ForkName string `json:"fork_name"`
|
||||||
BatchHeader interface{} `json:"batch_header"`
|
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
|
||||||
BlobBytes []byte `json:"blob_bytes"`
|
ChunkProofs []ChunkProof `json:"chunk_proofs"`
|
||||||
KzgProof []byte `json:"kzg_proof"`
|
BatchHeader interface{} `json:"batch_header"`
|
||||||
KzgCommitment []byte `json:"kzg_commitment"`
|
BlobBytes []byte `json:"blob_bytes"`
|
||||||
Challenge common.Hash `json:"challenge"`
|
KzgProof Byte48 `json:"kzg_proof,omitempty"`
|
||||||
|
KzgCommitment Byte48 `json:"kzg_commitment,omitempty"`
|
||||||
|
ChallengeDigest common.Hash `json:"challenge_digest,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
|
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
|
||||||
type BundleTaskDetail struct {
|
type BundleTaskDetail struct {
|
||||||
BatchProofs []BatchProof `json:"batch_proofs"`
|
// use one of the string of EuclidFork / EuclidV2Fork
|
||||||
|
ForkName string `json:"fork_name"`
|
||||||
|
BatchProofs []BatchProof `json:"batch_proofs"`
|
||||||
|
BundleInfo *OpenVMBundleInfo `json:"bundle_info,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChunkInfo is for calculating pi_hash for chunk
|
// ChunkInfo is for calculating pi_hash for chunk
|
||||||
type ChunkInfo struct {
|
type ChunkInfo struct {
|
||||||
ChainID uint64 `json:"chain_id"`
|
ChainID uint64 `json:"chain_id"`
|
||||||
PrevStateRoot common.Hash `json:"prev_state_root"`
|
PrevStateRoot common.Hash `json:"prev_state_root"`
|
||||||
PostStateRoot common.Hash `json:"post_state_root"`
|
PostStateRoot common.Hash `json:"post_state_root"`
|
||||||
WithdrawRoot common.Hash `json:"withdraw_root"`
|
WithdrawRoot common.Hash `json:"withdraw_root"`
|
||||||
DataHash common.Hash `json:"data_hash"`
|
DataHash common.Hash `json:"data_hash"`
|
||||||
IsPadding bool `json:"is_padding"`
|
IsPadding bool `json:"is_padding"`
|
||||||
TxBytes []byte `json:"tx_bytes"`
|
TxBytes []byte `json:"tx_bytes"`
|
||||||
TxBytesHash common.Hash `json:"tx_data_digest"`
|
TxBytesHash common.Hash `json:"tx_data_digest"`
|
||||||
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
|
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
|
||||||
|
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
|
||||||
|
TxDataLength uint64 `json:"tx_data_length"`
|
||||||
|
InitialBlockNumber uint64 `json:"initial_block_number"`
|
||||||
|
BlockCtxs []BlockContextV2 `json:"block_ctxs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BlockContextV2 is the block context for euclid v2
|
||||||
|
type BlockContextV2 struct {
|
||||||
|
Timestamp uint64 `json:"timestamp"`
|
||||||
|
BaseFee hexutil.Big `json:"base_fee"`
|
||||||
|
GasLimit uint64 `json:"gas_limit"`
|
||||||
|
NumTxs uint16 `json:"num_txs"`
|
||||||
|
NumL1Msgs uint16 `json:"num_l1_msgs"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SubCircuitRowUsage tracing info added in v0.11.0rc8
|
// SubCircuitRowUsage tracing info added in v0.11.0rc8
|
||||||
@@ -87,7 +157,7 @@ type ChunkProof interface {
|
|||||||
// NewChunkProof creates a new ChunkProof instance.
|
// NewChunkProof creates a new ChunkProof instance.
|
||||||
func NewChunkProof(hardForkName string) ChunkProof {
|
func NewChunkProof(hardForkName string) ChunkProof {
|
||||||
switch hardForkName {
|
switch hardForkName {
|
||||||
case euclidFork:
|
case EuclidFork, EuclidV2Fork:
|
||||||
return &OpenVMChunkProof{}
|
return &OpenVMChunkProof{}
|
||||||
default:
|
default:
|
||||||
return &Halo2ChunkProof{}
|
return &Halo2ChunkProof{}
|
||||||
@@ -121,7 +191,7 @@ type BatchProof interface {
|
|||||||
// NewBatchProof creates a new BatchProof instance.
|
// NewBatchProof creates a new BatchProof instance.
|
||||||
func NewBatchProof(hardForkName string) BatchProof {
|
func NewBatchProof(hardForkName string) BatchProof {
|
||||||
switch hardForkName {
|
switch hardForkName {
|
||||||
case euclidFork:
|
case EuclidFork, EuclidV2Fork:
|
||||||
return &OpenVMBatchProof{}
|
return &OpenVMBatchProof{}
|
||||||
default:
|
default:
|
||||||
return &Halo2BatchProof{}
|
return &Halo2BatchProof{}
|
||||||
@@ -178,7 +248,7 @@ type BundleProof interface {
|
|||||||
// NewBundleProof creates a new BundleProof instance.
|
// NewBundleProof creates a new BundleProof instance.
|
||||||
func NewBundleProof(hardForkName string) BundleProof {
|
func NewBundleProof(hardForkName string) BundleProof {
|
||||||
switch hardForkName {
|
switch hardForkName {
|
||||||
case euclidFork:
|
case EuclidFork, EuclidV2Fork:
|
||||||
return &OpenVMBundleProof{}
|
return &OpenVMBundleProof{}
|
||||||
default:
|
default:
|
||||||
return &Halo2BundleProof{}
|
return &Halo2BundleProof{}
|
||||||
@@ -258,12 +328,14 @@ func (p *OpenVMChunkProof) Proof() []byte {
|
|||||||
|
|
||||||
// OpenVMBatchInfo is for calculating pi_hash for batch header
|
// OpenVMBatchInfo is for calculating pi_hash for batch header
|
||||||
type OpenVMBatchInfo struct {
|
type OpenVMBatchInfo struct {
|
||||||
ParentBatchHash common.Hash `json:"parent_batch_hash"`
|
ParentBatchHash common.Hash `json:"parent_batch_hash"`
|
||||||
ParentStateRoot common.Hash `json:"parent_state_root"`
|
ParentStateRoot common.Hash `json:"parent_state_root"`
|
||||||
StateRoot common.Hash `json:"state_root"`
|
StateRoot common.Hash `json:"state_root"`
|
||||||
WithdrawRoot common.Hash `json:"withdraw_root"`
|
WithdrawRoot common.Hash `json:"withdraw_root"`
|
||||||
BatchHash common.Hash `json:"batch_hash"`
|
BatchHash common.Hash `json:"batch_hash"`
|
||||||
ChainID uint64 `json:"chain_id"`
|
ChainID uint64 `json:"chain_id"`
|
||||||
|
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
|
||||||
|
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// BatchProof includes the proof info that are required for batch verification and rollup.
|
// BatchProof includes the proof info that are required for batch verification and rollup.
|
||||||
@@ -323,6 +395,7 @@ type OpenVMBundleInfo struct {
|
|||||||
NumBatches uint32 `json:"num_batches"`
|
NumBatches uint32 `json:"num_batches"`
|
||||||
PrevBatchHash common.Hash `json:"prev_batch_hash"`
|
PrevBatchHash common.Hash `json:"prev_batch_hash"`
|
||||||
BatchHash common.Hash `json:"batch_hash"`
|
BatchHash common.Hash `json:"batch_hash"`
|
||||||
|
MsgQueueHash common.Hash `json:"msg_queue_hash"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// OpenVMBundleProof includes the proof info that are required for verification of a bundle of batch proofs.
|
// OpenVMBundleProof includes the proof info that are required for verification of a bundle of batch proofs.
|
||||||
|
|||||||
22
common/types/message/message_test.go
Normal file
22
common/types/message/message_test.go
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package message
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBytes48(t *testing.T) {
|
||||||
|
ti := &Byte48{}
|
||||||
|
ti.UnmarshalText([]byte("0x1"))
|
||||||
|
if s, err := ti.MarshalText(); err == nil {
|
||||||
|
if len(s) != 98 {
|
||||||
|
panic(fmt.Sprintf("wrong str: %s", s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ti.UnmarshalText([]byte("0x0"))
|
||||||
|
if s, err := ti.MarshalText(); err == nil {
|
||||||
|
if len(s) != 98 {
|
||||||
|
panic(fmt.Sprintf("wrong str: %s", s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,7 +5,7 @@ import (
|
|||||||
"runtime/debug"
|
"runtime/debug"
|
||||||
)
|
)
|
||||||
|
|
||||||
var tag = "v4.4.99"
|
var tag = "v4.5.0"
|
||||||
|
|
||||||
var commit = func() string {
|
var commit = func() string {
|
||||||
if info, ok := debug.ReadBuildInfo(); ok {
|
if info, ok := debug.ReadBuildInfo(); ok {
|
||||||
|
|||||||
@@ -2,8 +2,6 @@ module scroll-tech/coordinator
|
|||||||
|
|
||||||
go 1.22
|
go 1.22
|
||||||
|
|
||||||
toolchain go1.22.2
|
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/appleboy/gin-jwt/v2 v2.9.1
|
github.com/appleboy/gin-jwt/v2 v2.9.1
|
||||||
github.com/gin-gonic/gin v1.9.1
|
github.com/gin-gonic/gin v1.9.1
|
||||||
@@ -11,7 +9,7 @@ require (
|
|||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/mitchellh/mapstructure v1.5.0
|
github.com/mitchellh/mapstructure v1.5.0
|
||||||
github.com/prometheus/client_golang v1.19.0
|
github.com/prometheus/client_golang v1.19.0
|
||||||
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435
|
github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601
|
||||||
github.com/shopspring/decimal v1.3.1
|
github.com/shopspring/decimal v1.3.1
|
||||||
github.com/stretchr/testify v1.10.0
|
github.com/stretchr/testify v1.10.0
|
||||||
@@ -20,6 +18,8 @@ require (
|
|||||||
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
|
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
|
||||||
)
|
)
|
||||||
|
|
||||||
|
replace github.com/scroll-tech/da-codec => github.com/scroll-tech/da-codec v0.1.3-0.20250327153440-cd3e5728df9c
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/bytedance/sonic v1.10.1 // indirect
|
github.com/bytedance/sonic v1.10.1 // indirect
|
||||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||||
|
|||||||
@@ -177,8 +177,8 @@ github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU
|
|||||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435 h1:X9fkvjrYBY79lGgKEPpUhuiJ4vWpWwzOVw4H8CU8L54=
|
github.com/scroll-tech/da-codec v0.1.3-0.20250327153440-cd3e5728df9c h1:MCbuwFynRgxQeoyXwt/wUAPo3vfb61rMWxqADE2he4A=
|
||||||
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
|
github.com/scroll-tech/da-codec v0.1.3-0.20250327153440-cd3e5728df9c/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 h1:NEsjCG6uSvLRBlsP3+x6PL1kM+Ojs3g8UGotIPgJSz8=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 h1:NEsjCG6uSvLRBlsP3+x6PL1kM+Ojs3g8UGotIPgJSz8=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ=
|
||||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/scroll-tech/go-ethereum/log"
|
"github.com/scroll-tech/go-ethereum/log"
|
||||||
"gorm.io/gorm"
|
"gorm.io/gorm"
|
||||||
|
|
||||||
|
"scroll-tech/common/types/message"
|
||||||
"scroll-tech/common/version"
|
"scroll-tech/common/version"
|
||||||
|
|
||||||
"scroll-tech/coordinator/internal/config"
|
"scroll-tech/coordinator/internal/config"
|
||||||
@@ -42,9 +43,10 @@ func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *Logi
|
|||||||
|
|
||||||
var highHardForks []string
|
var highHardForks []string
|
||||||
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.HighVersionCircuit.ForkName)
|
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.HighVersionCircuit.ForkName)
|
||||||
if cfg.ProverManager.Verifier.HighVersionCircuit.ForkName != "euclid" {
|
if cfg.ProverManager.Verifier.HighVersionCircuit.ForkName != message.EuclidFork && cfg.ProverManager.Verifier.HighVersionCircuit.ForkName != message.EuclidV2Fork {
|
||||||
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.LowVersionCircuit.ForkName)
|
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.LowVersionCircuit.ForkName)
|
||||||
}
|
}
|
||||||
|
highHardForks = append(highHardForks, message.EuclidFork, message.EuclidV2Fork)
|
||||||
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = highHardForks
|
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = highHardForks
|
||||||
|
|
||||||
proverVersionHardForkMap[cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion] = []string{cfg.ProverManager.Verifier.LowVersionCircuit.ForkName}
|
proverVersionHardForkMap[cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion] = []string{cfg.ProverManager.Verifier.LowVersionCircuit.ForkName}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math/big"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
@@ -11,6 +12,7 @@ import (
|
|||||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||||
"github.com/scroll-tech/da-codec/encoding"
|
"github.com/scroll-tech/da-codec/encoding"
|
||||||
"github.com/scroll-tech/go-ethereum/common"
|
"github.com/scroll-tech/go-ethereum/common"
|
||||||
|
"github.com/scroll-tech/go-ethereum/common/hexutil"
|
||||||
"github.com/scroll-tech/go-ethereum/log"
|
"github.com/scroll-tech/go-ethereum/log"
|
||||||
"github.com/scroll-tech/go-ethereum/params"
|
"github.com/scroll-tech/go-ethereum/params"
|
||||||
"gorm.io/gorm"
|
"gorm.io/gorm"
|
||||||
@@ -121,7 +123,7 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
|||||||
for i := 0; i < len(proverTasks); i++ {
|
for i := 0; i < len(proverTasks); i++ {
|
||||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||||
log.Debug("get empty batch, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
log.Debug("get empty batch, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpBatchTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -211,17 +213,23 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
|
|||||||
WithdrawRoot: common.HexToHash(chunk.WithdrawRoot),
|
WithdrawRoot: common.HexToHash(chunk.WithdrawRoot),
|
||||||
DataHash: common.HexToHash(chunk.Hash),
|
DataHash: common.HexToHash(chunk.Hash),
|
||||||
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
|
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
|
||||||
|
PostMsgQueueHash: common.HexToHash(chunk.PostL1MessageQueueHash),
|
||||||
IsPadding: false,
|
IsPadding: false,
|
||||||
}
|
}
|
||||||
if haloProot, ok := proof.(*message.Halo2ChunkProof); ok {
|
if halo2Proof, ok := proof.(*message.Halo2ChunkProof); ok {
|
||||||
if haloProot.ChunkInfo != nil {
|
if halo2Proof.ChunkInfo != nil {
|
||||||
chunkInfo.TxBytes = haloProot.ChunkInfo.TxBytes
|
chunkInfo.TxBytes = halo2Proof.ChunkInfo.TxBytes
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if openvmProof, ok := proof.(*message.OpenVMChunkProof); ok {
|
||||||
|
chunkInfo.InitialBlockNumber = openvmProof.MetaData.ChunkInfo.InitialBlockNumber
|
||||||
|
chunkInfo.BlockCtxs = openvmProof.MetaData.ChunkInfo.BlockCtxs
|
||||||
|
chunkInfo.TxDataLength = openvmProof.MetaData.ChunkInfo.TxDataLength
|
||||||
|
}
|
||||||
chunkInfos = append(chunkInfos, &chunkInfo)
|
chunkInfos = append(chunkInfos, &chunkInfo)
|
||||||
}
|
}
|
||||||
|
|
||||||
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs)
|
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs, hardForkName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
|
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
|
||||||
}
|
}
|
||||||
@@ -238,6 +246,9 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
|
|||||||
TaskData: string(chunkProofsBytes),
|
TaskData: string(chunkProofsBytes),
|
||||||
HardForkName: hardForkName,
|
HardForkName: hardForkName,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeBatch.String(), "hard_fork_name", hardForkName, "task_data", taskMsg.TaskData)
|
||||||
|
|
||||||
return taskMsg, nil
|
return taskMsg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -247,12 +258,18 @@ func (bp *BatchProverTask) recoverActiveAttempts(ctx *gin.Context, batchTask *or
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []message.ChunkProof) (*message.BatchTaskDetail, error) {
|
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []message.ChunkProof, hardForkName string) (*message.BatchTaskDetail, error) {
|
||||||
taskDetail := &message.BatchTaskDetail{
|
taskDetail := &message.BatchTaskDetail{
|
||||||
ChunkInfos: chunkInfos,
|
ChunkInfos: chunkInfos,
|
||||||
ChunkProofs: chunkProofs,
|
ChunkProofs: chunkProofs,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if hardForkName == message.EuclidV2Fork {
|
||||||
|
taskDetail.ForkName = message.EuclidV2ForkNameForProver
|
||||||
|
} else if hardForkName == message.EuclidFork {
|
||||||
|
taskDetail.ForkName = message.EuclidForkNameForProver
|
||||||
|
}
|
||||||
|
|
||||||
dbBatchCodecVersion := encoding.CodecVersion(dbBatch.CodecVersion)
|
dbBatchCodecVersion := encoding.CodecVersion(dbBatch.CodecVersion)
|
||||||
switch dbBatchCodecVersion {
|
switch dbBatchCodecVersion {
|
||||||
case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6, encoding.CodecV7:
|
case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6, encoding.CodecV7:
|
||||||
@@ -271,17 +288,13 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*
|
|||||||
}
|
}
|
||||||
taskDetail.BatchHeader = batchHeader
|
taskDetail.BatchHeader = batchHeader
|
||||||
taskDetail.BlobBytes = dbBatch.BlobBytes
|
taskDetail.BlobBytes = dbBatch.BlobBytes
|
||||||
|
taskDetail.ChallengeDigest = common.HexToHash(dbBatch.ChallengeDigest)
|
||||||
if len(dbBatch.BlobDataProof) < 160 {
|
|
||||||
return nil, fmt.Errorf("blob data proof length is less than 160 bytes = %d, taskID: %s: %s", len(dbBatch.BlobDataProof), dbBatch.Hash, common.Bytes2Hex(dbBatch.BlobDataProof))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Memory layout of `BlobDataProof`: used in Codec.BlobDataProofForPointEvaluation()
|
// Memory layout of `BlobDataProof`: used in Codec.BlobDataProofForPointEvaluation()
|
||||||
// | z | y | kzg_commitment | kzg_proof |
|
// | z | y | kzg_commitment | kzg_proof |
|
||||||
// |---------|---------|----------------|-----------|
|
// |---------|---------|----------------|-----------|
|
||||||
// | bytes32 | bytes32 | bytes48 | bytes48 |
|
// | bytes32 | bytes32 | bytes48 | bytes48 |
|
||||||
taskDetail.KzgProof = dbBatch.BlobDataProof[112:160]
|
taskDetail.KzgProof = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[112:160]))}
|
||||||
taskDetail.KzgCommitment = dbBatch.BlobDataProof[64:112]
|
taskDetail.KzgCommitment = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[64:112]))}
|
||||||
taskDetail.Challenge = common.Hash(dbBatch.BlobDataProof[0:32])
|
|
||||||
return taskDetail, nil
|
return taskDetail, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||||
|
"github.com/scroll-tech/go-ethereum/common"
|
||||||
"github.com/scroll-tech/go-ethereum/log"
|
"github.com/scroll-tech/go-ethereum/log"
|
||||||
"github.com/scroll-tech/go-ethereum/params"
|
"github.com/scroll-tech/go-ethereum/params"
|
||||||
"gorm.io/gorm"
|
"gorm.io/gorm"
|
||||||
@@ -120,7 +121,7 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
|
|||||||
for i := 0; i < len(proverTasks); i++ {
|
for i := 0; i < len(proverTasks); i++ {
|
||||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||||
log.Debug("get empty bundle, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
log.Debug("get empty bundle, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpBundleTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -194,6 +195,11 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
|
|||||||
return nil, fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", task.TaskID)
|
return nil, fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", task.TaskID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
parentBatch, err := bp.batchOrm.GetBatchByHash(ctx, batches[0].ParentBatchHash)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get parent batch for batch task id:%s err:%w", task.TaskID, err)
|
||||||
|
}
|
||||||
|
|
||||||
var batchProofs []message.BatchProof
|
var batchProofs []message.BatchProof
|
||||||
for _, batch := range batches {
|
for _, batch := range batches {
|
||||||
proof := message.NewBatchProof(hardForkName)
|
proof := message.NewBatchProof(hardForkName)
|
||||||
@@ -207,6 +213,26 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
|
|||||||
BatchProofs: batchProofs,
|
BatchProofs: batchProofs,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if hardForkName == message.EuclidV2Fork {
|
||||||
|
taskDetail.ForkName = message.EuclidV2ForkNameForProver
|
||||||
|
} else if hardForkName == message.EuclidFork {
|
||||||
|
taskDetail.ForkName = message.EuclidForkNameForProver
|
||||||
|
}
|
||||||
|
|
||||||
|
taskDetail.BundleInfo = &message.OpenVMBundleInfo{
|
||||||
|
ChainID: bp.cfg.L2.ChainID,
|
||||||
|
PrevStateRoot: common.HexToHash(parentBatch.StateRoot),
|
||||||
|
PostStateRoot: common.HexToHash(batches[len(batches)-1].StateRoot),
|
||||||
|
WithdrawRoot: common.HexToHash(batches[len(batches)-1].WithdrawRoot),
|
||||||
|
NumBatches: uint32(len(batches)),
|
||||||
|
PrevBatchHash: common.HexToHash(batches[0].ParentBatchHash),
|
||||||
|
BatchHash: common.HexToHash(batches[len(batches)-1].Hash),
|
||||||
|
}
|
||||||
|
|
||||||
|
if hardForkName == message.EuclidV2Fork {
|
||||||
|
taskDetail.BundleInfo.MsgQueueHash = common.HexToHash(batches[len(batches)-1].PostL1MessageQueueHash)
|
||||||
|
}
|
||||||
|
|
||||||
batchProofsBytes, err := json.Marshal(taskDetail)
|
batchProofsBytes, err := json.Marshal(taskDetail)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)
|
return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)
|
||||||
@@ -219,6 +245,9 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
|
|||||||
TaskData: string(batchProofsBytes),
|
TaskData: string(batchProofsBytes),
|
||||||
HardForkName: hardForkName,
|
HardForkName: hardForkName,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeBundle.String(), "hard_fork_name", hardForkName, "task_data", taskMsg.TaskData)
|
||||||
|
|
||||||
return taskMsg, nil
|
return taskMsg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||||
|
"github.com/scroll-tech/go-ethereum/common"
|
||||||
"github.com/scroll-tech/go-ethereum/log"
|
"github.com/scroll-tech/go-ethereum/log"
|
||||||
"github.com/scroll-tech/go-ethereum/params"
|
"github.com/scroll-tech/go-ethereum/params"
|
||||||
"gorm.io/gorm"
|
"gorm.io/gorm"
|
||||||
@@ -118,7 +119,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
|||||||
for i := 0; i < len(proverTasks); i++ {
|
for i := 0; i < len(proverTasks); i++ {
|
||||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||||
log.Debug("get empty chunk, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
log.Debug("get empty chunk, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpChunkTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -162,7 +163,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
|||||||
return nil, ErrCoordinatorInternalFailure
|
return nil, ErrCoordinatorInternalFailure
|
||||||
}
|
}
|
||||||
|
|
||||||
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, hardForkName)
|
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, chunkTask, hardForkName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
cp.recoverActiveAttempts(ctx, chunkTask)
|
cp.recoverActiveAttempts(ctx, chunkTask)
|
||||||
log.Error("format prover task failure", "task_id", chunkTask.Hash, "err", err)
|
log.Error("format prover task failure", "task_id", chunkTask.Hash, "err", err)
|
||||||
@@ -179,17 +180,27 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
|||||||
return taskMsg, nil
|
return taskMsg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
|
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, chunk *orm.Chunk, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
|
||||||
// Get block hashes.
|
// Get block hashes.
|
||||||
blockHashes, dbErr := cp.blockOrm.GetL2BlockHashesByChunkHash(ctx, task.TaskID)
|
blockHashes, dbErr := cp.blockOrm.GetL2BlockHashesByChunkHash(ctx, task.TaskID)
|
||||||
if dbErr != nil || len(blockHashes) == 0 {
|
if dbErr != nil || len(blockHashes) == 0 {
|
||||||
return nil, fmt.Errorf("failed to fetch block hashes of a chunk, chunk hash:%s err:%w", task.TaskID, dbErr)
|
return nil, fmt.Errorf("failed to fetch block hashes of a chunk, chunk hash:%s err:%w", task.TaskID, dbErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var taskDetailBytes []byte
|
||||||
taskDetail := message.ChunkTaskDetail{
|
taskDetail := message.ChunkTaskDetail{
|
||||||
BlockHashes: blockHashes,
|
BlockHashes: blockHashes,
|
||||||
|
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
|
||||||
}
|
}
|
||||||
blockHashesBytes, err := json.Marshal(taskDetail)
|
|
||||||
|
if hardForkName == message.EuclidV2Fork {
|
||||||
|
taskDetail.ForkName = message.EuclidV2ForkNameForProver
|
||||||
|
} else if hardForkName == message.EuclidFork {
|
||||||
|
taskDetail.ForkName = message.EuclidForkNameForProver
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
taskDetailBytes, err = json.Marshal(taskDetail)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to marshal block hashes hash:%s, err:%w", task.TaskID, err)
|
return nil, fmt.Errorf("failed to marshal block hashes hash:%s, err:%w", task.TaskID, err)
|
||||||
}
|
}
|
||||||
@@ -198,10 +209,12 @@ func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.Prove
|
|||||||
UUID: task.UUID.String(),
|
UUID: task.UUID.String(),
|
||||||
TaskID: task.TaskID,
|
TaskID: task.TaskID,
|
||||||
TaskType: int(message.ProofTypeChunk),
|
TaskType: int(message.ProofTypeChunk),
|
||||||
TaskData: string(blockHashesBytes),
|
TaskData: string(taskDetailBytes),
|
||||||
HardForkName: hardForkName,
|
HardForkName: hardForkName,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeChunk.String(), "hard_fork_name", hardForkName, "task_data", proverTaskSchema.TaskData)
|
||||||
|
|
||||||
return proverTaskSchema, nil
|
return proverTaskSchema, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -122,7 +122,7 @@ func (b *BaseProverTask) hardForkSanityCheck(ctx *gin.Context, taskCtx *proverTa
|
|||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
|
if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
|
||||||
return "", errors.New("to be assigned prover task's hard-fork name is not the same as prover")
|
return "", fmt.Errorf("to be assigned prover task's hard-fork name is not the same as prover, proverName: %s, proverVersion: %s, proverSupportHardForkNames: %s, taskHardForkName: %v", taskCtx.ProverName, taskCtx.ProverVersion, taskCtx.HardForkNames, hardForkName)
|
||||||
}
|
}
|
||||||
return hardForkName, nil
|
return hardForkName, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -103,11 +103,16 @@ func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := v.loadOpenVMVks(cfg.HighVersionCircuit.ForkName); err != nil {
|
if err := v.loadOpenVMVks(message.EuclidFork); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
v.loadCurieVersionVKs()
|
if err := v.loadOpenVMVks(message.EuclidV2Fork); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
v.loadDarwinVKs()
|
||||||
|
|
||||||
return v, nil
|
return v, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -224,8 +229,9 @@ func (v *Verifier) loadLowVersionVKs(cfg *config.VerifierConfig) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v *Verifier) loadCurieVersionVKs() {
|
func (v *Verifier) loadDarwinVKs() {
|
||||||
v.BatchVKMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA="] = struct{}{}
|
v.BundleVkMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD5dsp1rEy7PSqiIFikkkOPqKokLW2mZSwCbtKdkfLQcvTxARUwHSe4iZe27PRJ5WWaLqtRV1+x6+pSVKtcPtaV4kE7v2YJRf0582hxiAF0IBaOoREdpyNfA2a9cvhWb2TMaPrUYP9EDQ7CUiW1FQzxbjGc95ua2htscnpU7d9S5stHWzKb7okkCG7bTIL9aG6qTQo2YXW7n3H3Ir47oVJB7IKrUzKGvI5Wmanh2zpZOJ9Qm4/wY24cT7cJz+Ux6wAg=="] = struct{}{}
|
||||||
|
v.BatchVKMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD1DEjW4Kell67H07wazT5DdzrSh4+amh+cmosQHp9p9snFypyoBGt3UHtoJGQBZlywZWDS9ht5pnaEoGBdaKcQk+lFb+WxTiId0KOAa0mafTZTQw8yToy57Jple64qzlRu1dux30tZZGuerLN1CKzg5Xl2iOpMK+l87jCINwVp5cUtF/XrvhBbU7onKh3KBiy99iUqVyA3Y6iiIZhGKWBSuSA4bNgDYIoVkqjHpdL35aEShoRO6pNXt7rDzxFoPzH0JuPI54nE4OhVrzZXwtkAEosxVa/fszcE092FH+HhhtxZBYe/KEzwdISU9TOPdId3UF/UMYC0MiYOlqffVTgAg="] = struct{}{}
|
||||||
v.ChunkVKMap["AAAAGQAAAATyWEABRbJ6hQQ5/zLX1gTasr7349minA9rSgMS6gDeHwZKqikRiO3md+pXjjxMHnKQtmXYgMXhJSvlmZ+Ws+cheuly2X1RuNQzcZuRImaKPR9LJsVZYsXfJbuqdKX8p0Gj8G83wMJOmTzNVUyUol0w0lTU+CEiTpHOnxBsTF3EWaW3s1u4ycOgWt1c9M6s7WmaBZLYgAWYCunO5CLCLApNGbCASeck/LuSoedEri5u6HccCKU2khG6zl6W07jvYSbDVLJktbjRiHv+/HQix+K14j8boo8Z/unhpwXCsPxkQA=="] = struct{}{}
|
v.ChunkVKMap["AAAAGQAAAATyWEABRbJ6hQQ5/zLX1gTasr7349minA9rSgMS6gDeHwZKqikRiO3md+pXjjxMHnKQtmXYgMXhJSvlmZ+Ws+cheuly2X1RuNQzcZuRImaKPR9LJsVZYsXfJbuqdKX8p0Gj8G83wMJOmTzNVUyUol0w0lTU+CEiTpHOnxBsTF3EWaW3s1u4ycOgWt1c9M6s7WmaBZLYgAWYCunO5CLCLApNGbCASeck/LuSoedEri5u6HccCKU2khG6zl6W07jvYSbDVLJktbjRiHv+/HQix+K14j8boo8Z/unhpwXCsPxkQA=="] = struct{}{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -19,20 +19,23 @@ type Batch struct {
|
|||||||
db *gorm.DB `gorm:"column:-"`
|
db *gorm.DB `gorm:"column:-"`
|
||||||
|
|
||||||
// batch
|
// batch
|
||||||
Index uint64 `json:"index" gorm:"column:index"`
|
Index uint64 `json:"index" gorm:"column:index"`
|
||||||
Hash string `json:"hash" gorm:"column:hash"`
|
Hash string `json:"hash" gorm:"column:hash"`
|
||||||
DataHash string `json:"data_hash" gorm:"column:data_hash"`
|
DataHash string `json:"data_hash" gorm:"column:data_hash"`
|
||||||
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
|
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
|
||||||
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
|
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
|
||||||
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
|
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
|
||||||
EndChunkHash string `json:"end_chunk_hash" gorm:"column:end_chunk_hash"`
|
EndChunkHash string `json:"end_chunk_hash" gorm:"column:end_chunk_hash"`
|
||||||
StateRoot string `json:"state_root" gorm:"column:state_root"`
|
StateRoot string `json:"state_root" gorm:"column:state_root"`
|
||||||
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
|
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
|
||||||
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
|
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
|
||||||
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
|
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
|
||||||
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
|
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
|
||||||
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
|
PrevL1MessageQueueHash string `json:"prev_l1_message_queue_hash" gorm:"column:prev_l1_message_queue_hash"`
|
||||||
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
|
PostL1MessageQueueHash string `json:"post_l1_message_queue_hash" gorm:"column:post_l1_message_queue_hash"`
|
||||||
|
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
|
||||||
|
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
|
||||||
|
ChallengeDigest string `json:"challenge_digest" gorm:"column:challenge_digest"`
|
||||||
|
|
||||||
// proof
|
// proof
|
||||||
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
|
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
|
||||||
|
|||||||
173
coordinator/internal/orm/script/main.go
Normal file
173
coordinator/internal/orm/script/main.go
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"math/big"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/scroll-tech/da-codec/encoding"
|
||||||
|
"github.com/scroll-tech/go-ethereum/common"
|
||||||
|
"github.com/scroll-tech/go-ethereum/common/hexutil"
|
||||||
|
"github.com/scroll-tech/go-ethereum/log"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
|
||||||
|
"scroll-tech/common/database"
|
||||||
|
"scroll-tech/common/types/message"
|
||||||
|
"scroll-tech/coordinator/internal/orm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
glogger := log.NewGlogHandler(log.StreamHandler(os.Stderr, log.LogfmtFormat()))
|
||||||
|
glogger.Verbosity(log.LvlInfo)
|
||||||
|
log.Root().SetHandler(glogger)
|
||||||
|
|
||||||
|
if len(os.Args) < 2 {
|
||||||
|
log.Crit("no batch index range provided")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
indexRange := os.Args[1]
|
||||||
|
indices := strings.Split(indexRange, "-")
|
||||||
|
if len(indices) != 2 {
|
||||||
|
log.Crit("invalid batch index range format. Use start-end", "providedRange", indexRange)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
startIndex, err := strconv.Atoi(indices[0])
|
||||||
|
endIndex, err2 := strconv.Atoi(indices[1])
|
||||||
|
if err != nil || err2 != nil || startIndex > endIndex {
|
||||||
|
log.Crit("invalid batch index range", "start", indices[0], "end", indices[1], "err", err, "err2", err2)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := database.InitDB(&database.Config{
|
||||||
|
DriverName: "postgres",
|
||||||
|
DSN: os.Getenv("DB_DSN"),
|
||||||
|
MaxOpenNum: 200,
|
||||||
|
MaxIdleNum: 20,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Crit("failed to init db", "err", err)
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if deferErr := database.CloseDB(db); deferErr != nil {
|
||||||
|
log.Error("failed to close db", "err", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
for i := startIndex; i <= endIndex; i++ {
|
||||||
|
batchIndex := uint64(i)
|
||||||
|
resultBytes, err := getBatchTask(db, batchIndex)
|
||||||
|
if err != nil {
|
||||||
|
log.Crit("failed to get batch task", "batchIndex", batchIndex, "err", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
outputFilename := fmt.Sprintf("batch_task_%d.json", batchIndex)
|
||||||
|
if err = os.WriteFile(outputFilename, resultBytes, 0644); err != nil {
|
||||||
|
log.Crit("failed to write output file", "filename", outputFilename, "err", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getBatchTask(db *gorm.DB, batchIndex uint64) ([]byte, error) {
|
||||||
|
batch, err := orm.NewBatch(db).GetBatchByIndex(context.Background(), batchIndex)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("failed to get batch hash by index: %d err: %w ", batchIndex, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
chunks, err := orm.NewChunk(db).GetChunksByBatchHash(context.Background(), batch.Hash)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("failed to get chunk proofs for batch task id: %s err: %w ", batch.Hash, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var chunkProofs []message.ChunkProof
|
||||||
|
var chunkInfos []*message.ChunkInfo
|
||||||
|
for _, chunk := range chunks {
|
||||||
|
fmt.Println("chunk index: ", chunk.Index)
|
||||||
|
fmt.Print("chunk proof: ", chunk.Proof)
|
||||||
|
proof := message.NewChunkProof("euclid")
|
||||||
|
if encodeErr := json.Unmarshal(chunk.Proof, &proof); encodeErr != nil {
|
||||||
|
return nil, fmt.Errorf("Chunk.GetProofsByBatchHash unmarshal proof error: %w, batch hash: %v, chunk hash: %v", encodeErr, batch.Hash, chunk.Hash)
|
||||||
|
}
|
||||||
|
chunkProofs = append(chunkProofs, proof)
|
||||||
|
|
||||||
|
chunkInfo := message.ChunkInfo{
|
||||||
|
ChainID: 534351,
|
||||||
|
PrevStateRoot: common.HexToHash(chunk.ParentChunkStateRoot),
|
||||||
|
PostStateRoot: common.HexToHash(chunk.StateRoot),
|
||||||
|
WithdrawRoot: common.HexToHash(chunk.WithdrawRoot),
|
||||||
|
DataHash: common.HexToHash(chunk.Hash),
|
||||||
|
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
|
||||||
|
PostMsgQueueHash: common.HexToHash(chunk.PostL1MessageQueueHash),
|
||||||
|
IsPadding: false,
|
||||||
|
}
|
||||||
|
if openvmProof, ok := proof.(*message.OpenVMChunkProof); ok {
|
||||||
|
chunkInfo.InitialBlockNumber = openvmProof.MetaData.ChunkInfo.InitialBlockNumber
|
||||||
|
chunkInfo.BlockCtxs = openvmProof.MetaData.ChunkInfo.BlockCtxs
|
||||||
|
chunkInfo.TxDataLength = openvmProof.MetaData.ChunkInfo.TxDataLength
|
||||||
|
}
|
||||||
|
chunkInfos = append(chunkInfos, &chunkInfo)
|
||||||
|
}
|
||||||
|
|
||||||
|
taskDetail, err := getBatchTaskDetail(batch, chunkInfos, chunkProofs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", batch.Hash, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
chunkProofsBytes, err := json.MarshalIndent(taskDetail, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to marshal chunk proofs, taskID:%s err:%w", batch.Hash, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return chunkProofsBytes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []message.ChunkProof) (*message.BatchTaskDetail, error) {
|
||||||
|
taskDetail := &message.BatchTaskDetail{
|
||||||
|
ChunkInfos: chunkInfos,
|
||||||
|
ChunkProofs: chunkProofs,
|
||||||
|
}
|
||||||
|
|
||||||
|
dbBatchCodecVersion := encoding.CodecVersion(dbBatch.CodecVersion)
|
||||||
|
switch dbBatchCodecVersion {
|
||||||
|
case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6:
|
||||||
|
default:
|
||||||
|
return taskDetail, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if dbBatchCodecVersion >= encoding.CodecV7 {
|
||||||
|
taskDetail.ForkName = message.EuclidV2ForkNameForProver
|
||||||
|
} else {
|
||||||
|
taskDetail.ForkName = message.EuclidForkNameForProver
|
||||||
|
}
|
||||||
|
|
||||||
|
codec, err := encoding.CodecFromVersion(encoding.CodecVersion(dbBatch.CodecVersion))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get codec from version %d, err: %w", dbBatch.CodecVersion, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
batchHeader, decodeErr := codec.NewDABatchFromBytes(dbBatch.BatchHeader)
|
||||||
|
if decodeErr != nil {
|
||||||
|
return nil, fmt.Errorf("failed to decode batch header version %d: %w", dbBatch.CodecVersion, decodeErr)
|
||||||
|
}
|
||||||
|
taskDetail.BatchHeader = batchHeader
|
||||||
|
taskDetail.BlobBytes = dbBatch.BlobBytes
|
||||||
|
|
||||||
|
challengeDigest, kzgCommitment, kzgProof, err := codec.BlobDataProofFromBlobBytes(dbBatch.BlobBytes)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get challenge digest from blob bytes, taskID: %s, err: %w", dbBatch.Hash, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
taskDetail.ChallengeDigest = challengeDigest
|
||||||
|
taskDetail.KzgProof = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(kzgProof[:]))}
|
||||||
|
taskDetail.KzgCommitment = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(kzgCommitment[:]))}
|
||||||
|
|
||||||
|
return taskDetail, nil
|
||||||
|
}
|
||||||
@@ -59,20 +59,20 @@ func testResetDB(t *testing.T) {
|
|||||||
cur, err := Current(pgDB)
|
cur, err := Current(pgDB)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
// total number of tables.
|
// total number of tables.
|
||||||
assert.Equal(t, int64(25), cur)
|
assert.Equal(t, int64(26), cur)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testMigrate(t *testing.T) {
|
func testMigrate(t *testing.T) {
|
||||||
assert.NoError(t, Migrate(pgDB))
|
assert.NoError(t, Migrate(pgDB))
|
||||||
cur, err := Current(pgDB)
|
cur, err := Current(pgDB)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(25), cur)
|
assert.Equal(t, int64(26), cur)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testRollback(t *testing.T) {
|
func testRollback(t *testing.T) {
|
||||||
version, err := Current(pgDB)
|
version, err := Current(pgDB)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(25), version)
|
assert.Equal(t, int64(26), version)
|
||||||
|
|
||||||
assert.NoError(t, Rollback(pgDB, nil))
|
assert.NoError(t, Rollback(pgDB, nil))
|
||||||
|
|
||||||
|
|||||||
15
database/migrate/migrations/00026_add_challenge_digest.sql
Normal file
15
database/migrate/migrations/00026_add_challenge_digest.sql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
-- +goose Up
|
||||||
|
-- +goose StatementBegin
|
||||||
|
|
||||||
|
ALTER TABLE batch
|
||||||
|
ADD COLUMN challenge_digest VARCHAR DEFAULT '';
|
||||||
|
|
||||||
|
-- +goose StatementEnd
|
||||||
|
|
||||||
|
-- +goose Down
|
||||||
|
-- +goose StatementBegin
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS batch
|
||||||
|
DROP COLUMN IF EXISTS challenge_digest;
|
||||||
|
|
||||||
|
-- +goose StatementEnd
|
||||||
@@ -1357,13 +1357,12 @@ github.com/scroll-tech/da-codec v0.1.1-0.20241014152913-2703f226fb0b h1:5H6V6yba
|
|||||||
github.com/scroll-tech/da-codec v0.1.1-0.20241014152913-2703f226fb0b/go.mod h1:48uxaqVgpD8ulH8p+nrBtfeLHZ9tX82bVVdPNkW3rPE=
|
github.com/scroll-tech/da-codec v0.1.1-0.20241014152913-2703f226fb0b/go.mod h1:48uxaqVgpD8ulH8p+nrBtfeLHZ9tX82bVVdPNkW3rPE=
|
||||||
github.com/scroll-tech/da-codec v0.1.3-0.20250227072756-a1482833595f h1:YYbhuUwjowqI4oyXtECRofck7Fyj18e1tcRjuQlZpJE=
|
github.com/scroll-tech/da-codec v0.1.3-0.20250227072756-a1482833595f h1:YYbhuUwjowqI4oyXtECRofck7Fyj18e1tcRjuQlZpJE=
|
||||||
github.com/scroll-tech/da-codec v0.1.3-0.20250227072756-a1482833595f/go.mod h1:xECEHZLVzbdUn+tNbRJhRIjLGTOTmnFQuTgUTeVLX58=
|
github.com/scroll-tech/da-codec v0.1.3-0.20250227072756-a1482833595f/go.mod h1:xECEHZLVzbdUn+tNbRJhRIjLGTOTmnFQuTgUTeVLX58=
|
||||||
|
github.com/scroll-tech/da-codec v0.1.3-0.20250327153440-cd3e5728df9c/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22/go.mod h1:r9FwtxCtybMkTbWYCyBuevT9TW3zHmOTHqD082Uh+Oo=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22/go.mod h1:r9FwtxCtybMkTbWYCyBuevT9TW3zHmOTHqD082Uh+Oo=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250225152658-bcfdb48dd939/go.mod h1:AgU8JJxC7+nfs7R7ma35AU7dMAGW7wCw3dRZRefIKyQ=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20250225152658-bcfdb48dd939/go.mod h1:AgU8JJxC7+nfs7R7ma35AU7dMAGW7wCw3dRZRefIKyQ=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305084331-57148478e950 h1:qfOaRflvH1vtnFWloB7BveKlP/VqYgMqLJ6e9TlBJ/8=
|
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305084331-57148478e950/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ=
|
|
||||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
|
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
|
||||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||||
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
|
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
|
||||||
|
|||||||
@@ -2,8 +2,6 @@ module scroll-tech/rollup
|
|||||||
|
|
||||||
go 1.22
|
go 1.22
|
||||||
|
|
||||||
toolchain go1.22.2
|
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/agiledragon/gomonkey/v2 v2.12.0
|
github.com/agiledragon/gomonkey/v2 v2.12.0
|
||||||
github.com/consensys/gnark-crypto v0.16.0
|
github.com/consensys/gnark-crypto v0.16.0
|
||||||
@@ -13,7 +11,7 @@ require (
|
|||||||
github.com/holiman/uint256 v1.3.2
|
github.com/holiman/uint256 v1.3.2
|
||||||
github.com/mitchellh/mapstructure v1.5.0
|
github.com/mitchellh/mapstructure v1.5.0
|
||||||
github.com/prometheus/client_golang v1.16.0
|
github.com/prometheus/client_golang v1.16.0
|
||||||
github.com/scroll-tech/da-codec v0.1.3-0.20250313030021-a8341d04bc4e
|
github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601
|
||||||
github.com/smartystreets/goconvey v1.8.0
|
github.com/smartystreets/goconvey v1.8.0
|
||||||
github.com/spf13/viper v1.19.0
|
github.com/spf13/viper v1.19.0
|
||||||
|
|||||||
@@ -249,8 +249,8 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke
|
|||||||
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
|
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
|
||||||
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
|
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
|
||||||
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
|
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
|
||||||
github.com/scroll-tech/da-codec v0.1.3-0.20250313030021-a8341d04bc4e h1:0IkSVltsMrKCprOOfQyJsLeqhPEuA0sTp41pZBpDeDk=
|
github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493 h1:Ioc01J0WEMxuwFvEPGJeBKXdf2KY4Yc3XbFky/IxLlI=
|
||||||
github.com/scroll-tech/da-codec v0.1.3-0.20250313030021-a8341d04bc4e/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
|
github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 h1:NEsjCG6uSvLRBlsP3+x6PL1kM+Ojs3g8UGotIPgJSz8=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 h1:NEsjCG6uSvLRBlsP3+x6PL1kM+Ojs3g8UGotIPgJSz8=
|
||||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ=
|
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ=
|
||||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ type Batch struct {
|
|||||||
PostL1MessageQueueHash string `json:"post_l1_message_queue_hash" gorm:"column:post_l1_message_queue_hash"`
|
PostL1MessageQueueHash string `json:"post_l1_message_queue_hash" gorm:"column:post_l1_message_queue_hash"`
|
||||||
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"` // use for debug
|
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"` // use for debug
|
||||||
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
|
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
|
||||||
|
ChallengeDigest string `json:"challenge_digest" gorm:"column:challenge_digest"`
|
||||||
|
|
||||||
// proof
|
// proof
|
||||||
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
|
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
|
||||||
@@ -305,6 +306,7 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, codecVer
|
|||||||
PostL1MessageQueueHash: batch.PostL1MessageQueueHash.Hex(),
|
PostL1MessageQueueHash: batch.PostL1MessageQueueHash.Hex(),
|
||||||
EnableCompress: enableCompress,
|
EnableCompress: enableCompress,
|
||||||
BlobBytes: batchMeta.BlobBytes,
|
BlobBytes: batchMeta.BlobBytes,
|
||||||
|
ChallengeDigest: batchMeta.ChallengeDigest.Hex(),
|
||||||
ChunkProofsStatus: int16(types.ChunkProofsStatusPending),
|
ChunkProofsStatus: int16(types.ChunkProofsStatusPending),
|
||||||
ProvingStatus: int16(types.ProvingTaskUnassigned),
|
ProvingStatus: int16(types.ProvingTaskUnassigned),
|
||||||
RollupStatus: int16(types.RollupPending),
|
RollupStatus: int16(types.RollupPending),
|
||||||
|
|||||||
@@ -164,6 +164,7 @@ type BatchMetadata struct {
|
|||||||
StartChunkHash common.Hash
|
StartChunkHash common.Hash
|
||||||
EndChunkHash common.Hash
|
EndChunkHash common.Hash
|
||||||
BlobBytes []byte
|
BlobBytes []byte
|
||||||
|
ChallengeDigest common.Hash
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetBatchMetadata retrieves the metadata of a batch.
|
// GetBatchMetadata retrieves the metadata of a batch.
|
||||||
@@ -179,10 +180,11 @@ func GetBatchMetadata(batch *encoding.Batch, codecVersion encoding.CodecVersion)
|
|||||||
}
|
}
|
||||||
|
|
||||||
batchMeta := &BatchMetadata{
|
batchMeta := &BatchMetadata{
|
||||||
BatchHash: daBatch.Hash(),
|
BatchHash: daBatch.Hash(),
|
||||||
BatchDataHash: daBatch.DataHash(),
|
BatchDataHash: daBatch.DataHash(),
|
||||||
BatchBytes: daBatch.Encode(),
|
BatchBytes: daBatch.Encode(),
|
||||||
BlobBytes: daBatch.BlobBytes(),
|
BlobBytes: daBatch.BlobBytes(),
|
||||||
|
ChallengeDigest: daBatch.ChallengeDigest(),
|
||||||
}
|
}
|
||||||
|
|
||||||
batchMeta.BatchBlobDataProof, err = daBatch.BlobDataProofForPointEvaluation()
|
batchMeta.BatchBlobDataProof, err = daBatch.BlobDataProofForPointEvaluation()
|
||||||
|
|||||||
2023
zkvm-prover/Cargo.lock
generated
2023
zkvm-prover/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -18,13 +18,13 @@ serde = { version = "1.0.198", features = ["derive"] }
|
|||||||
serde_json = "1.0.116"
|
serde_json = "1.0.116"
|
||||||
futures = "0.3.30"
|
futures = "0.3.30"
|
||||||
|
|
||||||
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.1.0-rc.6" }
|
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.2.0", package = "scroll-zkvm-prover" }
|
||||||
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||||
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||||
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "af95d2a", features = [
|
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", branch = "main", features = [
|
||||||
"openvm",
|
"openvm",
|
||||||
] }
|
] }
|
||||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-v2", features = [
|
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade", features = [
|
||||||
"scroll",
|
"scroll",
|
||||||
] }
|
] }
|
||||||
base64 = "0.13.1"
|
base64 = "0.13.1"
|
||||||
@@ -45,3 +45,34 @@ clap = { version = "4.5", features = ["derive"] }
|
|||||||
ctor = "0.2.8"
|
ctor = "0.2.8"
|
||||||
url = "2.5.4"
|
url = "2.5.4"
|
||||||
serde_bytes = "0.11.15"
|
serde_bytes = "0.11.15"
|
||||||
|
|
||||||
|
[patch."https://github.com/openvm-org/stark-backend.git"]
|
||||||
|
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||||
|
openvm-stark-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||||
|
|
||||||
|
[patch."https://github.com/Plonky3/Plonky3.git"]
|
||||||
|
p3-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-field = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-commit = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-matrix = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-baby-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", features = [
|
||||||
|
"nightly-features",
|
||||||
|
], tag = "v0.1.0" }
|
||||||
|
p3-util = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-challenger = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-dft = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-fri = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-goldilocks = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-keccak = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-keccak-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-blake3 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-mds = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-merkle-tree = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-monty-31 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-poseidon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-poseidon2 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
|
||||||
|
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
|
||||||
|
|||||||
@@ -23,8 +23,8 @@
|
|||||||
"db_path": "unique-db-path-for-prover-1"
|
"db_path": "unique-db-path-for-prover-1"
|
||||||
},
|
},
|
||||||
"circuits": {
|
"circuits": {
|
||||||
"euclid": {
|
"euclidV2": {
|
||||||
"hard_fork_name": "euclid",
|
"hard_fork_name": "euclidV2",
|
||||||
"workspace_path": "/home/ubuntu/prover-workdir"
|
"workspace_path": "/home/ubuntu/prover-workdir"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ use scroll_proving_sdk::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[clap(disable_version_flag = true)]
|
#[command(disable_version_flag = true)]
|
||||||
struct Args {
|
struct Args {
|
||||||
/// Path of config file
|
/// Path of config file
|
||||||
#[arg(long = "config", default_value = "conf/config.json")]
|
#[arg(long = "config", default_value = "conf/config.json")]
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
use crate::zk_circuits_handler::{euclid::EuclidHandler, CircuitsHandler};
|
use crate::zk_circuits_handler::{
|
||||||
|
euclid::EuclidHandler, euclidV2::EuclidV2Handler, CircuitsHandler,
|
||||||
|
};
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use scroll_proving_sdk::{
|
use scroll_proving_sdk::{
|
||||||
@@ -181,9 +183,14 @@ impl LocalProver {
|
|||||||
// coordinator
|
// coordinator
|
||||||
let config = self.config.circuits.get(hard_fork_name).unwrap();
|
let config = self.config.circuits.get(hard_fork_name).unwrap();
|
||||||
|
|
||||||
Arc::new(match hard_fork_name {
|
match hard_fork_name {
|
||||||
"euclid" => Arc::new(Mutex::new(EuclidHandler::new(&config.workspace_path))),
|
"euclid" => Arc::new(Arc::new(Mutex::new(EuclidHandler::new(
|
||||||
|
&config.workspace_path,
|
||||||
|
)))) as Arc<dyn CircuitsHandler>,
|
||||||
|
"euclidV2" => Arc::new(Arc::new(Mutex::new(EuclidV2Handler::new(
|
||||||
|
&config.workspace_path,
|
||||||
|
)))) as Arc<dyn CircuitsHandler>,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}) as Arc<dyn CircuitsHandler>
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
pub mod euclid;
|
pub mod euclid;
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub mod euclidV2;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
||||||
|
|||||||
@@ -4,37 +4,95 @@ use super::CircuitsHandler;
|
|||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
||||||
use scroll_zkvm_prover::{
|
use scroll_zkvm_prover_euclid::{
|
||||||
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
|
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
|
||||||
BatchProver, BundleProver, ChunkProver,
|
BatchProver, BundleProverEuclidV1, ChunkProver, ProverConfig,
|
||||||
};
|
};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
pub struct EuclidHandler {
|
pub struct EuclidHandler {
|
||||||
chunk_prover: ChunkProver,
|
chunk_prover: ChunkProver,
|
||||||
batch_prover: BatchProver,
|
batch_prover: BatchProver,
|
||||||
bundle_prover: BundleProver,
|
bundle_prover: BundleProverEuclidV1,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub(crate) enum Phase {
|
||||||
|
EuclidV1,
|
||||||
|
EuclidV2,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Phase {
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
Phase::EuclidV1 => "euclidv1",
|
||||||
|
Phase::EuclidV2 => "euclidv2",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn phase_spec_chunk(&self, workspace_path: &Path) -> ProverConfig {
|
||||||
|
let dir_cache = Some(workspace_path.join("cache"));
|
||||||
|
let path_app_exe = workspace_path.join("chunk/app.vmexe");
|
||||||
|
let path_app_config = workspace_path.join("chunk/openvm.toml");
|
||||||
|
let segment_len = Some((1 << 22) - 100);
|
||||||
|
ProverConfig {
|
||||||
|
dir_cache,
|
||||||
|
path_app_config,
|
||||||
|
path_app_exe,
|
||||||
|
segment_len,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn phase_spec_batch(&self, workspace_path: &Path) -> ProverConfig {
|
||||||
|
let dir_cache = Some(workspace_path.join("cache"));
|
||||||
|
let path_app_exe = workspace_path.join("batch/app.vmexe");
|
||||||
|
let path_app_config = workspace_path.join("batch/openvm.toml");
|
||||||
|
let segment_len = Some((1 << 22) - 100);
|
||||||
|
ProverConfig {
|
||||||
|
dir_cache,
|
||||||
|
path_app_config,
|
||||||
|
path_app_exe,
|
||||||
|
segment_len,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn phase_spec_bundle(&self, workspace_path: &Path) -> ProverConfig {
|
||||||
|
let dir_cache = Some(workspace_path.join("cache"));
|
||||||
|
let path_app_config = workspace_path.join("bundle/openvm.toml");
|
||||||
|
let segment_len = Some((1 << 22) - 100);
|
||||||
|
match self {
|
||||||
|
Phase::EuclidV1 => ProverConfig {
|
||||||
|
dir_cache,
|
||||||
|
path_app_config,
|
||||||
|
segment_len,
|
||||||
|
path_app_exe: workspace_path.join("bundle/app_euclidv1.vmexe"),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
Phase::EuclidV2 => ProverConfig {
|
||||||
|
dir_cache,
|
||||||
|
path_app_config,
|
||||||
|
segment_len,
|
||||||
|
path_app_exe: workspace_path.join("bundle/app.vmexe"),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl Send for EuclidHandler {}
|
unsafe impl Send for EuclidHandler {}
|
||||||
|
|
||||||
impl EuclidHandler {
|
impl EuclidHandler {
|
||||||
pub fn new(workspace_path: &str) -> Self {
|
pub fn new(workspace_path: &str) -> Self {
|
||||||
|
let p = Phase::EuclidV1;
|
||||||
let workspace_path = Path::new(workspace_path);
|
let workspace_path = Path::new(workspace_path);
|
||||||
|
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
|
||||||
let cache_dir = workspace_path.join("cache");
|
|
||||||
let chunk_exe = workspace_path.join("chunk/app.vmexe");
|
|
||||||
let chunk_app_config = workspace_path.join("chunk/openvm.toml");
|
|
||||||
let chunk_prover = ChunkProver::setup(chunk_exe, chunk_app_config, Some(cache_dir.clone()))
|
|
||||||
.expect("Failed to setup chunk prover");
|
.expect("Failed to setup chunk prover");
|
||||||
|
|
||||||
let batch_exe = workspace_path.join("batch/app.vmexe");
|
let batch_prover = BatchProver::setup(p.phase_spec_batch(workspace_path))
|
||||||
let batch_app_config = workspace_path.join("batch/openvm.toml");
|
|
||||||
let batch_prover = BatchProver::setup(batch_exe, batch_app_config, Some(cache_dir.clone()))
|
|
||||||
.expect("Failed to setup batch prover");
|
.expect("Failed to setup batch prover");
|
||||||
|
|
||||||
let bundle_exe = workspace_path.join("bundle/app.vmexe");
|
let bundle_prover = BundleProverEuclidV1::setup(p.phase_spec_bundle(workspace_path))
|
||||||
let bundle_app_config = workspace_path.join("bundle/openvm.toml");
|
|
||||||
let bundle_prover = BundleProver::setup(bundle_exe, bundle_app_config, Some(cache_dir))
|
|
||||||
.expect("Failed to setup bundle prover");
|
.expect("Failed to setup bundle prover");
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
@@ -59,16 +117,8 @@ impl CircuitsHandler for Arc<Mutex<EuclidHandler>> {
|
|||||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
||||||
match prove_request.proof_type {
|
match prove_request.proof_type {
|
||||||
ProofType::Chunk => {
|
ProofType::Chunk => {
|
||||||
let witnesses: Vec<sbv_primitives::types::BlockWitness> =
|
let task: ChunkProvingTask = serde_json::from_str(&prove_request.input)?;
|
||||||
serde_json::from_str(&prove_request.input)?;
|
let proof = self.try_lock().unwrap().chunk_prover.gen_proof(&task)?;
|
||||||
|
|
||||||
let proof = self
|
|
||||||
.try_lock()
|
|
||||||
.unwrap()
|
|
||||||
.chunk_prover
|
|
||||||
.gen_proof(&ChunkProvingTask {
|
|
||||||
block_witnesses: witnesses,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(serde_json::to_string(&proof)?)
|
Ok(serde_json::to_string(&proof)?)
|
||||||
}
|
}
|
||||||
|
|||||||
79
zkvm-prover/src/zk_circuits_handler/euclidV2.rs
Normal file
79
zkvm-prover/src/zk_circuits_handler/euclidV2.rs
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
use std::{path::Path, sync::Arc};
|
||||||
|
|
||||||
|
use super::{euclid::Phase, CircuitsHandler};
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
||||||
|
use scroll_zkvm_prover_euclid::{
|
||||||
|
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
|
||||||
|
BatchProver, BundleProverEuclidV2, ChunkProver,
|
||||||
|
};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
pub struct EuclidV2Handler {
|
||||||
|
chunk_prover: ChunkProver,
|
||||||
|
batch_prover: BatchProver,
|
||||||
|
bundle_prover: BundleProverEuclidV2,
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Send for EuclidV2Handler {}
|
||||||
|
|
||||||
|
impl EuclidV2Handler {
|
||||||
|
pub fn new(workspace_path: &str) -> Self {
|
||||||
|
let p = Phase::EuclidV2;
|
||||||
|
let workspace_path = Path::new(workspace_path);
|
||||||
|
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
|
||||||
|
.expect("Failed to setup chunk prover");
|
||||||
|
|
||||||
|
let batch_prover = BatchProver::setup(p.phase_spec_batch(workspace_path))
|
||||||
|
.expect("Failed to setup batch prover");
|
||||||
|
|
||||||
|
let bundle_prover = BundleProverEuclidV2::setup(p.phase_spec_bundle(workspace_path))
|
||||||
|
.expect("Failed to setup bundle prover");
|
||||||
|
|
||||||
|
Self {
|
||||||
|
chunk_prover,
|
||||||
|
batch_prover,
|
||||||
|
bundle_prover,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl CircuitsHandler for Arc<Mutex<EuclidV2Handler>> {
|
||||||
|
async fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
|
||||||
|
Some(match task_type {
|
||||||
|
ProofType::Chunk => self.try_lock().unwrap().chunk_prover.get_app_vk(),
|
||||||
|
ProofType::Batch => self.try_lock().unwrap().batch_prover.get_app_vk(),
|
||||||
|
ProofType::Bundle => self.try_lock().unwrap().bundle_prover.get_app_vk(),
|
||||||
|
_ => unreachable!("Unsupported proof type"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
||||||
|
match prove_request.proof_type {
|
||||||
|
ProofType::Chunk => {
|
||||||
|
let task: ChunkProvingTask = serde_json::from_str(&prove_request.input)?;
|
||||||
|
let proof = self.try_lock().unwrap().chunk_prover.gen_proof(&task)?;
|
||||||
|
|
||||||
|
Ok(serde_json::to_string(&proof)?)
|
||||||
|
}
|
||||||
|
ProofType::Batch => {
|
||||||
|
let task: BatchProvingTask = serde_json::from_str(&prove_request.input)?;
|
||||||
|
let proof = self.try_lock().unwrap().batch_prover.gen_proof(&task)?;
|
||||||
|
|
||||||
|
Ok(serde_json::to_string(&proof)?)
|
||||||
|
}
|
||||||
|
ProofType::Bundle => {
|
||||||
|
let batch_proofs: BundleProvingTask = serde_json::from_str(&prove_request.input)?;
|
||||||
|
let proof = self
|
||||||
|
.try_lock()
|
||||||
|
.unwrap()
|
||||||
|
.bundle_prover
|
||||||
|
.gen_proof_evm(&batch_proofs)?;
|
||||||
|
|
||||||
|
Ok(serde_json::to_string(&proof)?)
|
||||||
|
}
|
||||||
|
_ => Err(anyhow!("Unsupported proof type")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user