Compare commits

...

11 Commits

Author SHA1 Message Date
colinlyguo
28e54ac656 fix a typo and a clean up 2025-07-03 00:43:05 +08:00
Ho
adac0404a0 fix: bundle verifier 2025-07-02 14:00:46 +09:00
Ho
1515cc5554 enable tracing in libzkp 2025-07-02 08:59:36 +09:00
Ho
59b0f1134e update config sample 2025-07-02 08:29:24 +09:00
Ho
9d3991b608 purge unused crates 2025-07-01 20:38:48 +09:00
Ho
f1d2415ef2 + enable dumping all stuffs from prover
+ fix compatibility of universal task for old fork
2025-07-01 20:35:31 +09:00
Ho
ff7d663732 debugging 2025-07-01 17:33:48 +09:00
Ho
489a23b773 upgrade dumping util 2025-07-01 16:52:04 +09:00
Ho
ce1c58bafa fix linking issue 2025-07-01 16:51:34 +09:00
Ho
0d8d2d82bf purge unexpected file 2025-07-01 15:02:17 +09:00
Ho
b5822ac1af update dep 2025-07-01 14:43:53 +09:00
27 changed files with 1984 additions and 1191 deletions

2862
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -17,22 +17,22 @@ repository = "https://github.com/scroll-tech/scroll"
version = "4.5.8"
[workspace.dependencies]
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "6078604", package = "scroll-zkvm-prover" }
scroll-zkvm-verifier-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "6078604", package = "scroll-zkvm-verifier" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "6078604" }
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "09e998f", package = "scroll-zkvm-prover" }
scroll-zkvm-verifier-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "09e998f", package = "scroll-zkvm-verifier" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "09e998f" }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade", features = ["scroll"] }
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade" }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/upgrade", features = ["scroll"] }
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/upgrade" }
metrics = "0.23.0"
metrics-util = "0.17"
metrics-tracing-context = "0.16.0"
anyhow = "1.0"
alloy = { version = "0.11", default-features = false }
alloy-primitives = { version = "0.8", default-features = false }
alloy = { version = "1", default-features = false }
alloy-primitives = { version = "1.2", default-features = false, features = ["tiny-keccak"] }
# also use this to trigger "serde" feature for primitives
alloy-serde = { version = "0.8", default-features = false }
alloy-serde = { version = "1", default-features = false }
rkyv = "0.8"
serde = { version = "1", default-features = false, features = ["derive"] }
@@ -43,22 +43,25 @@ itertools = "0.14"
tiny-keccak = "2.0"
tracing = "0.1"
eyre = "0.6"
bincode_v1 = { version = "1.3", package = "bincode"}
snark-verifier-sdk = { version = "0.2.0", default-features = false, features = [
"loader_halo2",
"halo2-axiom",
"display",
] }
once_cell = "1.20"
base64 = "0.22"
#TODO: upgrade when Feynman
vm-zstd = { git = "https://github.com/scroll-tech/rust-zstd-decompressor.git", tag = "v0.1.1" }
[patch.crates-io]
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "v0.8.18-euclid-upgrade" }
ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-euclid-upgrade" }
revm = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-bytecode = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-context = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-context-interface = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-database = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-database-interface = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-handler = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-inspector = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-interpreter = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-precompile = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-primitives = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
revm-state = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v74" }
ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.15.0" }
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "v1.2.0" }
[profile.maxperf]
inherits = "release"

View File

@@ -28,7 +28,7 @@ func verify(cCtx *cli.Context) error {
proofType = cCtx.Args().Get(1)
proofPath = cCtx.Args().Get(2)
}
log.Info("verify proof in: ", proofPath, "type", proofType, "forkName", forkName)
log.Info("verify proof", "in", proofPath, "type", proofType, "forkName", forkName)
// Load the content of the proof file
data, err := os.ReadFile(filepath.Clean(proofPath))
@@ -53,7 +53,7 @@ func verify(cCtx *cli.Context) error {
return fmt.Errorf("no vk loaded for fork %s", forkName)
}
if len(proof.Vk) != 0 {
if bytes.Equal(proof.Vk, vk) {
if !bytes.Equal(proof.Vk, vk) {
return fmt.Errorf("unmatch vk with expected: expected %s, get %s",
base64.StdEncoding.EncodeToString(vk),
base64.StdEncoding.EncodeToString(proof.Vk),
@@ -74,7 +74,7 @@ func verify(cCtx *cli.Context) error {
return fmt.Errorf("no vk loaded for fork %s", forkName)
}
if len(proof.Vk) != 0 {
if bytes.Equal(proof.Vk, vk) {
if !bytes.Equal(proof.Vk, vk) {
return fmt.Errorf("unmatch vk with expected: expected %s, get %s",
base64.StdEncoding.EncodeToString(vk),
base64.StdEncoding.EncodeToString(proof.Vk),
@@ -94,16 +94,7 @@ func verify(cCtx *cli.Context) error {
if !ok {
return fmt.Errorf("no vk loaded for fork %s", forkName)
}
if len(proof.Vk) != 0 {
if bytes.Equal(proof.Vk, vk) {
return fmt.Errorf("unmatch vk with expected: expected %s, get %s",
base64.StdEncoding.EncodeToString(vk),
base64.StdEncoding.EncodeToString(proof.Vk),
)
}
} else {
proof.Vk = vk
}
proof.Vk = vk
ret, err = vf.VerifyBundleProof(proof, forkName)
default:

View File

@@ -11,7 +11,12 @@
"verifiers": [{
"assets_path": "assets",
"fork_name": "euclidV2"
}]
}
,{
"assets_path": "assets",
"fork_name": "feynman"
}
]
}
},
"db": {

View File

@@ -17,6 +17,10 @@ import (
"scroll-tech/common/types/message"
)
func init() {
C.init_tracing()
}
// Helper function to convert Go string to C string and handle cleanup
func goToCString(s string) *C.char {
return C.CString(s)

View File

@@ -8,6 +8,9 @@
#include <stddef.h> // For size_t
// Init log tracing
void init_tracing();
// Initialize the verifier with configuration
void init_verifier(char* config);

View File

@@ -197,7 +197,7 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
taskMsg, metadata, err = bp.applyUniversal(taskMsg)
if err != nil {
bp.recoverActiveAttempts(ctx, batchTask)
log.Error("Generate universal prover task failure", "task_id", batchTask.Hash, "type", "batch")
log.Error("Generate universal prover task failure", "task_id", batchTask.Hash, "type", "batch", "err", err)
return nil, ErrCoordinatorInternalFailure
}
proverTask.Metadata = metadata

View File

@@ -193,7 +193,7 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
taskMsg, metadata, err = bp.applyUniversal(taskMsg)
if err != nil {
bp.recoverActiveAttempts(ctx, bundleTask)
log.Error("Generate universal prover task failure", "task_id", bundleTask.Hash, "type", "bundle")
log.Error("Generate universal prover task failure", "task_id", bundleTask.Hash, "type", "bundle", "err", err)
return nil, ErrCoordinatorInternalFailure
}
// bundle proof require snark

View File

@@ -191,7 +191,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
taskMsg, metadata, err = cp.applyUniversal(taskMsg)
if err != nil {
cp.recoverActiveAttempts(ctx, chunkTask)
log.Error("Generate universal prover task failure", "task_id", chunkTask.Hash, "type", "chunk")
log.Error("Generate universal prover task failure", "task_id", chunkTask.Hash, "type", "chunk", "err", err)
return nil, ErrCoordinatorInternalFailure
}
proverTask.Metadata = metadata

View File

@@ -117,19 +117,6 @@ func (v *Verifier) VerifyBundleProof(proof *message.OpenVMBundleProof, forkName
return libzkp.VerifyBundleProof(string(buf), forkName), nil
}
// func (v *Verifier) ReadVK(filePat string) (string, error) {
// f, err := os.Open(filepath.Clean(filePat))
// if err != nil {
// return "", err
// }
// byt, err := io.ReadAll(f)
// if err != nil {
// return "", err
// }
// return base64.StdEncoding.EncodeToString(byt), nil
// }
func (v *Verifier) loadOpenVMVks(cfg config.AssetConfig) error {
vkFileName := cfg.Vkfile

View File

@@ -269,7 +269,7 @@ func (o *ProverTask) UpdateProverTaskProvingStatusAndFailureType(ctx context.Con
return nil
}
// UpdateProverTaskProvingStatusAndFailureType updates the proving_status of a specific ProverTask record.
// UpdateProverTaskAssignedTime updates the assigned_at time of a specific ProverTask record.
func (o *ProverTask) UpdateProverTaskAssignedTime(ctx context.Context, uuid uuid.UUID, t time.Time, dbTX ...*gorm.DB) error {
db := o.db
if len(dbTX) > 0 && dbTX[0] != nil {

View File

@@ -76,7 +76,7 @@ impl RpcClientCore {
let client = ClientBuilder::default().layer(retry_layer).http(rpc);
Ok(Self {
provider: ProviderBuilder::<_, _, Network>::default().on_client(client),
provider: ProviderBuilder::<_, _, Network>::default().connect_client(client),
rt,
})
}
@@ -100,13 +100,13 @@ impl ChunkInterpreter for RpcClient<'_> {
block_hash: sbv_primitives::B256,
prev_witness: Option<&sbv_primitives::types::BlockWitness>,
) -> Result<sbv_primitives::types::BlockWitness> {
use alloy::network::primitives::BlockTransactionsKind;
use sbv_utils::{rpc::ProviderExt, witness::WitnessBuilder};
let chain_id = provider.get_chain_id().await?;
let block = provider
.get_block_by_hash(block_hash, BlockTransactionsKind::Full)
.get_block_by_hash(block_hash)
.full()
.await?
.ok_or_else(|| eyre::eyre!("Block not found"))?;

View File

@@ -8,6 +8,7 @@ edition.workspace = true
scroll-zkvm-types.workspace = true
scroll-zkvm-verifier-euclid.workspace = true
alloy-primitives.workspace = true #depress the effect of "native-keccak"
sbv-primitives.workspace = true
base64.workspace = true
serde.workspace = true
@@ -19,5 +20,5 @@ eyre.workspace = true
git-version = "0.3.5"
serde_stacker = "0.1"
regex = "1.11"
c-kzg = { version = "1.0", features = ["serde"] }
c-kzg = { version = "2.0", features = ["serde"] }

View File

@@ -214,11 +214,7 @@ impl<Metadata: ProofMetadata> PersistableProof for WrappedProof<Metadata> {
mod tests {
use base64::{prelude::BASE64_STANDARD, Engine};
use sbv_primitives::B256;
use scroll_zkvm_types::{
bundle::{BundleInfo, BundleInfoV1},
proof::EvmProof,
public_inputs::PublicInputs,
};
use scroll_zkvm_types::{bundle::BundleInfo, proof::EvmProof, public_inputs::ForkName};
use super::*;
@@ -245,7 +241,7 @@ mod tests {
fn test_dummy_proof() -> eyre::Result<()> {
// 1. Metadata
let metadata = {
let bundle_info: BundleInfoV1 = BundleInfo {
let bundle_info = BundleInfo {
chain_id: 12345,
num_batches: 12,
prev_state_root: B256::repeat_byte(1),
@@ -254,11 +250,10 @@ mod tests {
batch_hash: B256::repeat_byte(4),
withdraw_root: B256::repeat_byte(5),
msg_queue_hash: B256::repeat_byte(6),
}
.into();
let bundle_pi_hash = bundle_info.pi_hash();
};
let bundle_pi_hash = bundle_info.pi_hash(ForkName::EuclidV1);
BundleProofMetadata {
bundle_info: bundle_info.0,
bundle_info,
bundle_pi_hash,
}
};

View File

@@ -4,8 +4,8 @@ use eyre::Result;
use sbv_primitives::{B256, U256};
use scroll_zkvm_types::{
batch::{
BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchInfo, BatchWitness, EnvelopeV6, EnvelopeV7,
PointEvalWitness, ReferenceHeader, ToArchievedWitness, N_BLOB_BYTES,
BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchInfo, BatchWitness, Envelope, EnvelopeV6,
EnvelopeV7, PointEvalWitness, ReferenceHeader, ToArchievedWitness, N_BLOB_BYTES,
},
public_inputs::ForkName,
task::ProvingTask,
@@ -117,7 +117,8 @@ impl BatchProvingTask {
"hardfork mismatch for da-codec@v6 header: found={fork_name:?}, expected={:?}",
ForkName::EuclidV1,
);
EnvelopeV6::from(self.blob_bytes.as_slice()).challenge_digest(versioned_hash)
EnvelopeV6::from_slice(self.blob_bytes.as_slice())
.challenge_digest(versioned_hash)
}
BatchHeaderV::V7(_) => {
match fork_name {
@@ -131,7 +132,8 @@ impl BatchProvingTask {
padded_blob_bytes.resize(N_BLOB_BYTES, 0);
padded_blob_bytes
};
EnvelopeV7::from(padded_blob_bytes.as_slice()).challenge_digest(versioned_hash)
EnvelopeV7::from_slice(padded_blob_bytes.as_slice())
.challenge_digest(versioned_hash)
}
};

View File

@@ -42,7 +42,8 @@ pub mod point_eval {
/// Get the KZG commitment from an EIP-4844 blob.
pub fn blob_to_kzg_commitment(blob: &c_kzg::Blob) -> c_kzg::KzgCommitment {
c_kzg::KzgCommitment::blob_to_kzg_commitment(blob, c_kzg::ethereum_kzg_settings())
c_kzg::ethereum_kzg_settings(0)
.blob_to_kzg_commitment(blob)
.expect("blob to kzg commitment should succeed")
}
@@ -65,12 +66,9 @@ pub mod point_eval {
pub fn get_kzg_proof(blob: &c_kzg::Blob, challenge: H256) -> (c_kzg::KzgProof, U256) {
let challenge = get_x_from_challenge(challenge);
let (proof, y) = c_kzg::KzgProof::compute_kzg_proof(
blob,
&c_kzg::Bytes32::new(challenge.to_be_bytes()),
c_kzg::ethereum_kzg_settings(),
)
.expect("kzg proof should succeed");
let (proof, y) = c_kzg::ethereum_kzg_settings(0)
.compute_kzg_proof(blob, &c_kzg::Bytes32::new(challenge.to_be_bytes()))
.expect("kzg proof should succeed");
(proof, U256::from_be_slice(y.as_slice()))
}

View File

@@ -4,7 +4,6 @@ use scroll_zkvm_types::{
bundle::{BundleInfo, BundleWitness, ToArchievedWitness},
public_inputs::ForkName,
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
};
/// Message indicating a sanity check failure.
@@ -47,6 +46,7 @@ impl BundleProvingTask {
.iter()
.map(|wrapped_proof| wrapped_proof.metadata.batch_info.clone())
.collect(),
fork_name: self.fork_name.to_lowercase().as_str().into(),
}
}
@@ -83,7 +83,7 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
.into_iter()
.map(|w_proof| w_proof.proof.into_root_proof().expect("expect root proof"))
.collect(),
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.to_vec()],
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
vk: Vec::new(),
})
}

View File

@@ -4,7 +4,6 @@ use sbv_primitives::{types::BlockWitness, B256};
use scroll_zkvm_types::{
chunk::{execute, ChunkInfo, ChunkWitness, ToArchievedWitness},
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
};
/// The type aligned with coordinator's defination
@@ -72,7 +71,7 @@ impl TryFrom<ChunkProvingTask> for ProvingTask {
identifier: value.identifier(),
fork_name: value.fork_name,
aggregated_proofs: Vec::new(),
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.to_vec()],
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
vk: Vec::new(),
})
}
@@ -119,11 +118,11 @@ impl ChunkProvingTask {
}
fn build_guest_input(&self) -> ChunkWitness {
ChunkWitness {
blocks: self.block_witnesses.to_vec(),
prev_msg_queue_hash: self.prev_msg_queue_hash,
fork_name: self.fork_name.to_lowercase().as_str().into(),
}
ChunkWitness::new(
&self.block_witnesses,
self.prev_msg_queue_hash,
self.fork_name.to_lowercase().as_str().into(),
)
}
fn insert_state(&mut self, node: sbv_primitives::Bytes) {

File diff suppressed because one or more lines are too long

View File

@@ -11,4 +11,5 @@ crate-type = ["cdylib"]
[dependencies]
libzkp = { path = "../libzkp" }
l2geth = { path = "../l2geth"}
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing.workspace = true

View File

@@ -5,6 +5,37 @@ use std::ffi::{c_char, CString};
use libzkp::TaskType;
use utils::{c_char_to_str, c_char_to_vec};
use std::sync::OnceLock;
static LOG_SETTINGS: OnceLock<Result<(), String>> = OnceLock::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_tracing() {
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
LOG_SETTINGS
.get_or_init(|| {
tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy(),
)
.with_ansi(false)
.with_level(true)
.with_target(true)
.try_init()
.map_err(|e| format!("{e}"))?;
Ok(())
})
.clone()
.expect("Failed to initialize tracing subscriber");
tracing::info!("Tracing has been initialized normally");
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_verifier(config: *const c_char) {
@@ -104,6 +135,7 @@ pub unsafe extern "C" fn gen_universal_task(
str
}
Err(e) => {
println!("gen_universal_task failed at pre interpret step, error: {e}");
tracing::error!("gen_universal_task failed at pre interpret step, error: {e}");
return failed_handling_result();
}

View File

@@ -5,10 +5,9 @@ mod zk_circuits_handler;
use clap::{ArgAction, Parser, Subcommand};
use prover::{LocalProver, LocalProverConfig};
use scroll_proving_sdk::{
prover::{types::ProofType, ProverBuilder},
prover::ProverBuilder,
utils::{get_version, init_tracing},
};
use std::{fs::File, path::Path};
#[derive(Parser, Debug)]
#[command(disable_version_flag = true)]
@@ -36,33 +35,11 @@ struct Args {
enum Commands {
/// Dump vk of this prover
Dump {
/// File to save the vks
file_name: String,
/// path to save the verifier's asset
asset_path: String,
},
}
fn dump_vk(file: &Path, prover: &LocalProver, fork_name: &str) -> eyre::Result<()> {
let f = File::create(file)?;
#[derive(Debug, serde::Serialize)]
struct VKDump {
pub chunk_vk: String,
pub batch_vk: String,
pub bundle_vk: String,
}
let handler = prover.new_handler(fork_name);
let dump = VKDump {
chunk_vk: handler.get_vk(ProofType::Chunk),
batch_vk: handler.get_vk(ProofType::Batch),
bundle_vk: handler.get_vk(ProofType::Bundle),
};
serde_json::to_writer(f, &dump)?;
Ok(())
}
#[tokio::main]
async fn main() -> eyre::Result<()> {
init_tracing();
@@ -80,10 +57,10 @@ async fn main() -> eyre::Result<()> {
let local_prover = LocalProver::new(cfg.clone());
match args.command {
Some(Commands::Dump { file_name }) => {
Some(Commands::Dump { asset_path }) => {
let fork_name = args.fork_name.unwrap_or(default_fork_name);
println!("dump vk for {fork_name}");
dump_vk(Path::new(&file_name), &local_prover, &fork_name)?;
println!("dump assets for {fork_name} into {asset_path}");
local_prover.dump_verifier_assets(&fork_name, asset_path.as_ref())?;
}
None => {
let prover = ProverBuilder::new(sdk_config, local_prover)

View File

@@ -16,6 +16,7 @@ use serde::{Deserialize, Serialize};
use std::{
collections::HashMap,
fs::File,
path::Path,
sync::{Arc, OnceLock},
time::{SystemTime, UNIX_EPOCH},
};
@@ -71,7 +72,7 @@ impl ProvingService for LocalProver {
vks.push(vk.clone())
} else {
let handler = self.get_or_init_handler(hard_fork_name);
vks.push(handler.get_vk(*proof_type));
vks.push(handler.get_vk(*proof_type).await);
}
}
}
@@ -194,4 +195,51 @@ impl LocalProver {
as Arc<dyn CircuitsHandler>,
}
}
pub fn dump_verifier_assets(&self, hard_fork_name: &str, out_path: &Path) -> Result<()> {
let config = self
.config
.circuits
.get(hard_fork_name)
.ok_or_else(|| eyre::eyre!("no corresponding config for fork {hard_fork_name}"))?;
let workspace_path = &config.workspace_path;
let universal_prover = EuclidV2Handler::new(config);
let _ = universal_prover
.get_prover()
.dump_universal_verifier(Some(out_path))?;
#[derive(Debug, serde::Serialize)]
struct VKDump {
pub chunk_vk: String,
pub batch_vk: String,
pub bundle_vk: String,
}
let dump = VKDump {
chunk_vk: universal_prover.get_vk_and_cache(ProofType::Chunk),
batch_vk: universal_prover.get_vk_and_cache(ProofType::Batch),
bundle_vk: universal_prover.get_vk_and_cache(ProofType::Bundle),
};
let f = File::create(out_path.join("openVmVk.json"))?;
serde_json::to_writer(f, &dump)?;
// Copy verifier.bin from workspace bundle directory to output path
let bundle_verifier_path = Path::new(workspace_path)
.join("bundle")
.join("verifier.bin");
if bundle_verifier_path.exists() {
let dest_path = out_path.join("verifier.bin");
std::fs::copy(&bundle_verifier_path, &dest_path)
.map_err(|e| eyre::eyre!("Failed to copy verifier.bin: {}", e))?;
} else {
eprintln!(
"Warning: verifier.bin not found at {:?}",
bundle_verifier_path
);
}
Ok(())
}
}

View File

@@ -11,7 +11,7 @@ use std::path::Path;
#[async_trait]
pub trait CircuitsHandler: Sync + Send {
fn get_vk(&self, task_type: ProofType) -> String;
async fn get_vk(&self, task_type: ProofType) -> String;
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String>;
}

View File

@@ -57,6 +57,12 @@ impl EuclidV2Handler {
}
}
/// get_prover get the inner prover, later we would replace chunk/batch/bundle_prover with
/// universal prover, before that, use bundle_prover as the represent one
pub fn get_prover(&self) -> &BundleProverEuclidV2 {
&self.bundle_prover
}
pub fn get_vk_and_cache(&self, task_type: ProofType) -> String {
match task_type {
ProofType::Chunk => self.cached_vks[&ProofType::Chunk]
@@ -64,7 +70,7 @@ impl EuclidV2Handler {
ProofType::Batch => self.cached_vks[&ProofType::Batch]
.get_or_init(|| BASE64_STANDARD.encode(self.batch_prover.get_app_vk())),
ProofType::Bundle => self.cached_vks[&ProofType::Bundle]
.get_or_init(|| BASE64_STANDARD.encode(self.bundle_prover.get_evm_vk())),
.get_or_init(|| BASE64_STANDARD.encode(self.bundle_prover.get_app_vk())),
_ => unreachable!("Unsupported proof type {:?}", task_type),
}
.clone()
@@ -73,10 +79,8 @@ impl EuclidV2Handler {
#[async_trait]
impl CircuitsHandler for Arc<Mutex<EuclidV2Handler>> {
fn get_vk(&self, task_type: ProofType) -> String {
self.try_lock()
.expect("get vk is on called before other entry is used")
.get_vk_and_cache(task_type)
async fn get_vk(&self, task_type: ProofType) -> String {
self.lock().await.get_vk_and_cache(task_type)
}
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {

View File

@@ -15,7 +15,7 @@ proof_of_work_bits = 16
[app_vm_config.castf]
[app_vm_config.modular]
supported_modulus = [
supported_moduli = [
"4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787",
"52435875175126190479447740508185965837690552500527637822603658699938581184513",
]
@@ -24,11 +24,12 @@ supported_modulus = [
supported_curves = ["Bls12_381"]
[app_vm_config.sha256]
[app_vm_config.fp2]
supported_modulus = [
"4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787",
supported_moduli = [
["Bls12_381Fp2","4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787"]
]
[[app_vm_config.ecc.supported_curves]]
struct_name = "Bls12_381G1Affine"
modulus = "4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787"
scalar = "52435875175126190479447740508185965837690552500527637822603658699938581184513"
a = "0"
b = "4"
b = "4"

View File

@@ -17,7 +17,7 @@ range_tuple_checker_sizes = [256, 8192]
range_tuple_checker_sizes = [256, 8192]
[app_vm_config.modular]
supported_modulus = [
supported_moduli = [
"21888242871839275222246405745257275088696311157297823662689037894645226208583",
"21888242871839275222246405745257275088548364400416034343698204186575808495617",
"115792089237316195423570985008687907853269984665640564039457584007908834671663",
@@ -27,7 +27,9 @@ supported_modulus = [
]
[app_vm_config.fp2]
supported_modulus = ["21888242871839275222246405745257275088696311157297823662689037894645226208583"]
supported_moduli = [
["Bn254Fp2","21888242871839275222246405745257275088696311157297823662689037894645226208583"]
]
[app_vm_config.pairing]
supported_curves = ["Bn254"]
@@ -35,19 +37,22 @@ supported_curves = ["Bn254"]
[app_vm_config.sha256]
[[app_vm_config.ecc.supported_curves]]
struct_name = "Secp256k1Point"
modulus = "115792089237316195423570985008687907853269984665640564039457584007908834671663"
scalar = "115792089237316195423570985008687907852837564279074904382605163141518161494337"
a = "0"
b = "7"
[[app_vm_config.ecc.supported_curves]]
struct_name = "P256Point"
modulus = "115792089210356248762697446949407573530086143415290314195533631308867097853951"
scalar = "115792089210356248762697446949407573529996955224135760342422259061068512044369"
a = "115792089210356248762697446949407573530086143415290314195533631308867097853948"
b = "41058363725152142129326129780047268409114441015993725554835256314039467401291"
[[app_vm_config.ecc.supported_curves]]
struct_name = "Bn254G1Affine"
modulus = "21888242871839275222246405745257275088696311157297823662689037894645226208583"
scalar = "21888242871839275222246405745257275088548364400416034343698204186575808495617"
a = "0"
b = "3"
b = "3"