Compare commits

...

24 Commits

Author SHA1 Message Date
colinlyguo
07213fbfea update prover interfaces 2025-03-20 23:49:57 +08:00
colinlyguo
56cfbdb7c7 update to rc.10 2025-03-20 21:17:25 +08:00
Morty
0b246ad8be update scroll-proving-sdk commit 2025-03-20 19:18:08 +08:00
kunxian xia
a9201dfc32 pin plonky3-gpu v0.1.0 and openvm-stark-gpu v1.0.0-rc.2 2025-03-20 18:30:32 +08:00
colinlyguo
f225274bbc change chain id ground truth 2025-03-20 01:29:44 +08:00
colinlyguo
838e642cc5 fix comments 2025-03-19 22:48:20 +08:00
colinlyguo
b8b7ece600 make challenge digest optional 2025-03-19 22:43:11 +08:00
kunxian xia
f62dd755af upgrade gpu crates 2025-03-19 19:22:33 +08:00
colinlyguo
a65359a5e7 remove omitempty 2025-03-19 19:11:50 +08:00
colinlyguo
d3196780f9 add the optional bundle info 2025-03-19 18:49:10 +08:00
colinlyguo
0364057a43 remove an outdated comment 2025-03-19 18:32:23 +08:00
colinlyguo
e86951ec09 update circuit to v0.1.0-rc.9 2025-03-19 18:29:06 +08:00
noelwei
819ded9389 resume dep to rc.8 and skip ChallengeDigest field
Signed-off-by: noelwei <fan@scroll.io>
2025-03-19 08:44:56 +09:00
colinlyguo
6c101f870d revert 2025-03-18 21:30:07 +08:00
colinlyguo
bea228c58f update proof length 2025-03-18 21:04:37 +08:00
colinlyguo
3571c596ab fix CI 2025-03-18 18:53:48 +08:00
colin
7e69b622a2 Merge branch 'develop' into feat-bump-zkvm-prover 2025-03-18 18:39:02 +08:00
colinlyguo
00d932ed99 migrate interfaces fixes 2025-03-18 18:38:26 +08:00
colinlyguo
2d64f5588a update dependencies 2025-03-18 14:07:55 +08:00
xkx
445fabe98a feat: euclid v1 GPU prover (#1608)
Co-authored-by: Morty <70688412+yiweichi@users.noreply.github.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2025-03-17 15:30:11 +08:00
noelwei
b56d35b24a fix compalition error
Signed-off-by: noelwei <fan@scroll.io>
2025-03-17 15:28:23 +09:00
kunxian xia
3e8f7a69da change sbv-primitives dependency 2025-03-17 14:12:46 +08:00
kunxian xia
ad32dd6fb4 bump zkvm-prover from 0.1.0-rc.7 to 0.1.0-rc.8 2025-03-17 14:05:41 +08:00
colinlyguo
27b0dabd02 feat: bump zkvm prover 2025-03-16 22:16:31 +08:00
9 changed files with 2231 additions and 1142 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -14,8 +14,8 @@ ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-openvm-v1.0.0-rc.1" }
[dependencies]
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-prover" }
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-verifier" }
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.10", package = "scroll-zkvm-prover" }
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.10", package = "scroll-zkvm-verifier" }
base64 = "0.13.0"
env_logger = "0.9.0"

View File

@@ -4,12 +4,14 @@ import (
"encoding/json"
"errors"
"fmt"
"math/big"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
)
const (
euclidFork = "euclid"
EuclidFork = "euclid"
)
// ProofType represents the type of task.
@@ -41,36 +43,94 @@ const (
// ChunkTaskDetail is a type containing ChunkTask detail.
type ChunkTaskDetail struct {
BlockHashes []common.Hash `json:"block_hashes"`
BlockHashes []common.Hash `json:"block_hashes"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
}
// it is a hex encoded big with fixed length on 48 bytes
type Byte48 struct {
hexutil.Big
}
func (e Byte48) MarshalText() ([]byte, error) {
i := e.ToInt()
// overrite encode big
if sign := i.Sign(); sign < 0 {
// sanity check
return nil, fmt.Errorf("Byte48 must be positive integer")
} else {
s := i.Text(16)
if len(s) > 96 {
return nil, fmt.Errorf("integer Exceed 384bit")
}
return []byte(fmt.Sprintf("0x%0*s", 96, s)), nil
}
}
func isString(input []byte) bool {
return len(input) >= 2 && input[0] == '"' && input[len(input)-1] == '"'
}
// hexutil.Big has limition of 256bit so we have to override it ...
func (e *Byte48) UnmarshalJSON(input []byte) error {
if !isString(input) {
return fmt.Errorf("not hex string")
}
b, err := hexutil.Decode(string(input[1 : len(input)-1]))
if err != nil {
return err
}
if len(b) != 48 {
return fmt.Errorf("not a 48 bytes hex string: %d", len(b))
}
var dec big.Int
dec.SetBytes(b)
*e = Byte48{(hexutil.Big)(dec)}
return nil
}
// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
KzgProof []byte `json:"kzg_proof"`
KzgCommitment []byte `json:"kzg_commitment"`
Challenge common.Hash `json:"challenge"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
KzgProof Byte48 `json:"kzg_proof,omitempty"`
KzgCommitment Byte48 `json:"kzg_commitment,omitempty"`
ChallengeDigest *common.Hash `json:"challenge_digest,omitempty"`
}
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
type BundleTaskDetail struct {
BatchProofs []BatchProof `json:"batch_proofs"`
BatchProofs []BatchProof `json:"batch_proofs"`
BundleInfo *OpenVMBundleInfo `json:"bundle_info,omitempty"`
}
// ChunkInfo is for calculating pi_hash for chunk
type ChunkInfo struct {
ChainID uint64 `json:"chain_id"`
PrevStateRoot common.Hash `json:"prev_state_root"`
PostStateRoot common.Hash `json:"post_state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
DataHash common.Hash `json:"data_hash"`
IsPadding bool `json:"is_padding"`
TxBytes []byte `json:"tx_bytes"`
TxBytesHash common.Hash `json:"tx_data_digest"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
ChainID uint64 `json:"chain_id"`
PrevStateRoot common.Hash `json:"prev_state_root"`
PostStateRoot common.Hash `json:"post_state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
DataHash common.Hash `json:"data_hash"`
IsPadding bool `json:"is_padding"`
TxBytes []byte `json:"tx_bytes"`
TxBytesHash common.Hash `json:"tx_data_digest"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
TxDataLength uint64 `json:"tx_data_length"`
InitialBlockNumber uint64 `json:"initial_block_number"`
BlockCtxs []BlockContextV2 `json:"block_ctxs"`
}
// BlockContextV2 is the block context for euclid v2
type BlockContextV2 struct {
Timestamp uint64 `json:"timestamp"`
BaseFee hexutil.Big `json:"base_fee"`
GasLimit uint64 `json:"gas_limit"`
NumTxs uint16 `json:"num_txs"`
NumL1Msgs uint16 `json:"num_l1_msgs"`
}
// SubCircuitRowUsage tracing info added in v0.11.0rc8
@@ -87,7 +147,7 @@ type ChunkProof interface {
// NewChunkProof creates a new ChunkProof instance.
func NewChunkProof(hardForkName string) ChunkProof {
switch hardForkName {
case euclidFork:
case EuclidFork:
return &OpenVMChunkProof{}
default:
return &Halo2ChunkProof{}
@@ -121,7 +181,7 @@ type BatchProof interface {
// NewBatchProof creates a new BatchProof instance.
func NewBatchProof(hardForkName string) BatchProof {
switch hardForkName {
case euclidFork:
case EuclidFork:
return &OpenVMBatchProof{}
default:
return &Halo2BatchProof{}
@@ -178,7 +238,7 @@ type BundleProof interface {
// NewBundleProof creates a new BundleProof instance.
func NewBundleProof(hardForkName string) BundleProof {
switch hardForkName {
case euclidFork:
case EuclidFork:
return &OpenVMBundleProof{}
default:
return &Halo2BundleProof{}
@@ -258,12 +318,14 @@ func (p *OpenVMChunkProof) Proof() []byte {
// OpenVMBatchInfo is for calculating pi_hash for batch header
type OpenVMBatchInfo struct {
ParentBatchHash common.Hash `json:"parent_batch_hash"`
ParentStateRoot common.Hash `json:"parent_state_root"`
StateRoot common.Hash `json:"state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
BatchHash common.Hash `json:"batch_hash"`
ChainID uint64 `json:"chain_id"`
ParentBatchHash common.Hash `json:"parent_batch_hash"`
ParentStateRoot common.Hash `json:"parent_state_root"`
StateRoot common.Hash `json:"state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
BatchHash common.Hash `json:"batch_hash"`
ChainID uint64 `json:"chain_id"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
}
// BatchProof includes the proof info that are required for batch verification and rollup.
@@ -323,6 +385,7 @@ type OpenVMBundleInfo struct {
NumBatches uint32 `json:"num_batches"`
PrevBatchHash common.Hash `json:"prev_batch_hash"`
BatchHash common.Hash `json:"batch_hash"`
MsgQueueHash common.Hash `json:"msg_queue_hash"`
}
// OpenVMBundleProof includes the proof info that are required for verification of a bundle of batch proofs.

View File

@@ -4,6 +4,7 @@ import (
"context"
"encoding/json"
"fmt"
"math/big"
"time"
"github.com/gin-gonic/gin"
@@ -11,6 +12,7 @@ import (
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
@@ -211,13 +213,19 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
WithdrawRoot: common.HexToHash(chunk.WithdrawRoot),
DataHash: common.HexToHash(chunk.Hash),
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
PostMsgQueueHash: common.HexToHash(chunk.PostL1MessageQueueHash),
IsPadding: false,
}
if haloProot, ok := proof.(*message.Halo2ChunkProof); ok {
if haloProot.ChunkInfo != nil {
chunkInfo.TxBytes = haloProot.ChunkInfo.TxBytes
if halo2Proof, ok := proof.(*message.Halo2ChunkProof); ok {
if halo2Proof.ChunkInfo != nil {
chunkInfo.TxBytes = halo2Proof.ChunkInfo.TxBytes
}
}
if openvmProof, ok := proof.(*message.OpenVMChunkProof); ok {
chunkInfo.InitialBlockNumber = openvmProof.MetaData.ChunkInfo.InitialBlockNumber
chunkInfo.BlockCtxs = openvmProof.MetaData.ChunkInfo.BlockCtxs
chunkInfo.TxDataLength = openvmProof.MetaData.ChunkInfo.TxDataLength
}
chunkInfos = append(chunkInfos, &chunkInfo)
}
@@ -280,8 +288,12 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*
// | z | y | kzg_commitment | kzg_proof |
// |---------|---------|----------------|-----------|
// | bytes32 | bytes32 | bytes48 | bytes48 |
taskDetail.KzgProof = dbBatch.BlobDataProof[112:160]
taskDetail.KzgCommitment = dbBatch.BlobDataProof[64:112]
taskDetail.Challenge = common.Hash(dbBatch.BlobDataProof[0:32])
taskDetail.KzgProof = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[112:160]))}
taskDetail.KzgCommitment = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[64:112]))}
// FIXME: Challenge = ChallengeDigest % BLS_MODULUS, get the original ChallengeDigest.
// Simply omit the field now to skip sanity check in prover side
// Resume it later (or FIXME has worked or the prover side has relax its checking)
// taskDetail.ChallengeDigest = new(common.Hash)
// *taskDetail.ChallengeDigest = common.BytesToHash(dbBatch.BlobDataProof[0:32])
return taskDetail, nil
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
@@ -194,6 +195,11 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
return nil, fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", task.TaskID)
}
parentBatch, err := bp.batchOrm.GetBatchByHash(ctx, batches[0].ParentBatchHash)
if err != nil {
return nil, fmt.Errorf("failed to get parent batch for batch task id:%s err:%w", task.TaskID, err)
}
var batchProofs []message.BatchProof
for _, batch := range batches {
proof := message.NewBatchProof(hardForkName)
@@ -207,6 +213,17 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
BatchProofs: batchProofs,
}
taskDetail.BundleInfo = &message.OpenVMBundleInfo{
ChainID: bp.cfg.L2.ChainID,
PrevStateRoot: common.HexToHash(parentBatch.StateRoot),
PostStateRoot: common.HexToHash(batches[len(batches)-1].StateRoot),
WithdrawRoot: common.HexToHash(batches[len(batches)-1].WithdrawRoot),
NumBatches: uint32(len(batches)),
PrevBatchHash: common.HexToHash(batches[0].ParentBatchHash),
BatchHash: common.HexToHash(batches[len(batches)-1].Hash),
// MsgQueueHash is omittable here, because it is introduced in phase-2.
}
batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil {
return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)

View File

@@ -9,6 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
@@ -162,7 +163,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, hardForkName)
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, chunkTask, hardForkName)
if err != nil {
cp.recoverActiveAttempts(ctx, chunkTask)
log.Error("format prover task failure", "task_id", chunkTask.Hash, "err", err)
@@ -179,7 +180,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return taskMsg, nil
}
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, chunk *orm.Chunk, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
// Get block hashes.
blockHashes, dbErr := cp.blockOrm.GetL2BlockHashesByChunkHash(ctx, task.TaskID)
if dbErr != nil || len(blockHashes) == 0 {
@@ -187,7 +188,8 @@ func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.Prove
}
taskDetail := message.ChunkTaskDetail{
BlockHashes: blockHashes,
BlockHashes: blockHashes,
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
}
blockHashesBytes, err := json.Marshal(taskDetail)
if err != nil {

2539
zkvm-prover/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -18,13 +18,13 @@ serde = { version = "1.0.198", features = ["derive"] }
serde_json = "1.0.116"
futures = "0.3.30"
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.1.0-rc.6" }
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.1.0-rc.10" }
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "af95d2a", features = [
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "02c2cac", features = [
"openvm",
] }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-v2", features = [
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade", features = [
"scroll",
] }
base64 = "0.13.1"
@@ -45,3 +45,34 @@ clap = { version = "4.5", features = ["derive"] }
ctor = "0.2.8"
url = "2.5.4"
serde_bytes = "0.11.15"
[patch."https://github.com/openvm-org/stark-backend.git"]
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", tag = "v1.0.0-rc.2", features = ["gpu"] }
openvm-stark-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", tag = "v1.0.0-rc.2", features = ["gpu"] }
[patch."https://github.com/Plonky3/Plonky3.git"]
p3-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-field = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-commit = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-matrix = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-baby-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", features = [
"nightly-features",
], tag = "v0.1.0" }
p3-util = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-challenger = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-dft = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-fri = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-goldilocks = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-keccak = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-keccak-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-blake3 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-mds = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-merkle-tree = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-monty-31 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-poseidon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-poseidon2 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.0" }

View File

@@ -6,7 +6,7 @@ use async_trait::async_trait;
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
use scroll_zkvm_prover::{
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
BatchProver, BundleProver, ChunkProver,
BatchProver, BundleProver, ChunkProver, ProverConfig,
};
use tokio::sync::Mutex;
pub struct EuclidHandler {
@@ -24,18 +24,39 @@ impl EuclidHandler {
let cache_dir = workspace_path.join("cache");
let chunk_exe = workspace_path.join("chunk/app.vmexe");
let chunk_app_config = workspace_path.join("chunk/openvm.toml");
let chunk_prover = ChunkProver::setup(chunk_exe, chunk_app_config, Some(cache_dir.clone()))
.expect("Failed to setup chunk prover");
let chunk_prover_config = ProverConfig {
path_app_exe: chunk_exe,
path_app_config: chunk_app_config,
dir_cache: Some(cache_dir.clone()),
segment_len: Some((1 << 22) - 100),
..Default::default()
};
let chunk_prover =
ChunkProver::setup(chunk_prover_config).expect("Failed to setup chunk prover");
let batch_exe = workspace_path.join("batch/app.vmexe");
let batch_app_config = workspace_path.join("batch/openvm.toml");
let batch_prover = BatchProver::setup(batch_exe, batch_app_config, Some(cache_dir.clone()))
.expect("Failed to setup batch prover");
let batch_prover_config = ProverConfig {
path_app_exe: batch_exe,
path_app_config: batch_app_config,
dir_cache: Some(cache_dir.clone()),
segment_len: Some((1 << 22) - 100),
..Default::default()
};
let batch_prover =
BatchProver::setup(batch_prover_config).expect("Failed to setup batch prover");
let bundle_exe = workspace_path.join("bundle/app.vmexe");
let bundle_app_config = workspace_path.join("bundle/openvm.toml");
let bundle_prover = BundleProver::setup(bundle_exe, bundle_app_config, Some(cache_dir))
.expect("Failed to setup bundle prover");
let bundle_prover_config = ProverConfig {
path_app_exe: bundle_exe,
path_app_config: bundle_app_config,
dir_cache: Some(cache_dir.clone()),
segment_len: Some((1 << 22) - 100),
..Default::default()
};
let bundle_prover =
BundleProver::setup(bundle_prover_config).expect("Failed to setup bundle prover");
Self {
chunk_prover,
@@ -68,6 +89,7 @@ impl CircuitsHandler for Arc<Mutex<EuclidHandler>> {
.chunk_prover
.gen_proof(&ChunkProvingTask {
block_witnesses: witnesses,
prev_msg_queue_hash: Default::default(),
})?;
Ok(serde_json::to_string(&proof)?)