mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-12 23:48:15 -05:00
Compare commits
52 Commits
develop
...
feat/axiom
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42221c9b2a | ||
|
|
6b4c106eb5 | ||
|
|
43f1895ecf | ||
|
|
d169990168 | ||
|
|
d7d29f52a9 | ||
|
|
b5c398a711 | ||
|
|
5dba980cea | ||
|
|
ea275cbb8a | ||
|
|
763d2b7d7d | ||
|
|
1f263bb730 | ||
|
|
d3837daf3a | ||
|
|
69a20b610a | ||
|
|
48ecc66de8 | ||
|
|
9923856c9d | ||
|
|
612212da4d | ||
|
|
b9156ab149 | ||
|
|
056326ee3e | ||
|
|
54c9e278c8 | ||
|
|
7d6aea89aa | ||
|
|
8a05300ab3 | ||
|
|
5d378a015d | ||
|
|
88066d72e8 | ||
|
|
226d32f9bf | ||
|
|
c950ecc213 | ||
|
|
4cc4cc1064 | ||
|
|
ee8e4a39be | ||
|
|
a450fad09d | ||
|
|
3f95ffc3d7 | ||
|
|
49c0b1a844 | ||
|
|
970f8d488e | ||
|
|
bf60d16ea8 | ||
|
|
e962e713d8 | ||
|
|
785ce615d5 | ||
|
|
b64015c54d | ||
|
|
4e0573a820 | ||
|
|
4bc57bb95e | ||
|
|
4bed6845c3 | ||
|
|
b9846293b2 | ||
|
|
397f327776 | ||
|
|
fe9ce35249 | ||
|
|
4062c554a3 | ||
|
|
2b38078e02 | ||
|
|
ab6490ef35 | ||
|
|
d5f9d55075 | ||
|
|
ecfdbb342d | ||
|
|
98775e0bbb | ||
|
|
6a17d2c715 | ||
|
|
22b8ac7204 | ||
|
|
8f3346c738 | ||
|
|
2c7117ebc3 | ||
|
|
3b3bd5f9ee | ||
|
|
2d12839a8c |
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@@ -0,0 +1 @@
|
|||||||
|
target/
|
||||||
3228
Cargo.lock
generated
3228
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
17
Cargo.toml
17
Cargo.toml
@@ -10,7 +10,7 @@ resolver = "2"
|
|||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
authors = ["Scroll developers"]
|
authors = ["Scroll developers"]
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
homepage = "https://scroll.io"
|
homepage = "https://scroll.io"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/scroll-tech/scroll"
|
repository = "https://github.com/scroll-tech/scroll"
|
||||||
@@ -24,26 +24,19 @@ scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", tag =
|
|||||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91.2", features = ["scroll", "rkyv"] }
|
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91.2", features = ["scroll", "rkyv"] }
|
||||||
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91.2" }
|
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91.2" }
|
||||||
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91.2", features = ["scroll"] }
|
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91.2", features = ["scroll"] }
|
||||||
|
axiom-sdk = { git = "https://github.com/axiom-crypto/axiom-api-cli.git", tag = "v1.0.9" }
|
||||||
|
|
||||||
metrics = "0.23.0"
|
|
||||||
metrics-util = "0.17"
|
|
||||||
metrics-tracing-context = "0.16.0"
|
|
||||||
|
|
||||||
anyhow = "1.0"
|
|
||||||
alloy = { version = "1", default-features = false }
|
alloy = { version = "1", default-features = false }
|
||||||
alloy-primitives = { version = "1.4.1", default-features = false, features = ["tiny-keccak"] }
|
alloy-primitives = { version = "1.4.1", default-features = false, features = ["tiny-keccak"] }
|
||||||
# also use this to trigger "serde" feature for primitives
|
|
||||||
alloy-serde = { version = "1", default-features = false }
|
|
||||||
|
|
||||||
|
jiff = "0.2"
|
||||||
serde = { version = "1", default-features = false, features = ["derive"] }
|
serde = { version = "1", default-features = false, features = ["derive"] }
|
||||||
serde_json = { version = "1.0" }
|
serde_json = { version = "1.0" }
|
||||||
serde_derive = "1.0"
|
serde_derive = "1.0"
|
||||||
serde_with = "3"
|
tokio = "1"
|
||||||
itertools = "0.14"
|
|
||||||
tiny-keccak = "2.0"
|
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
eyre = "0.6"
|
eyre = "0.6"
|
||||||
once_cell = "1.20"
|
|
||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
16
build/common.mk
Normal file
16
build/common.mk
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
UNAME_S := $(shell uname -s)
|
||||||
|
IS_DARWIN := $(findstring Darwin,$(UNAME_S))
|
||||||
|
|
||||||
|
SHLIB_EXT := so
|
||||||
|
ifeq ($(UNAME_S),Darwin)
|
||||||
|
SHLIB_EXT := dylib
|
||||||
|
endif
|
||||||
|
|
||||||
|
LIB_ZKP_NAME := libzkp.$(SHLIB_EXT)
|
||||||
|
|
||||||
|
define macos_codesign
|
||||||
|
@if [ -n "$(IS_DARWIN)" ]; then \
|
||||||
|
codesign --force --sign - '$(1)'; \
|
||||||
|
codesign --verify --deep --verbose '$(1)'; \
|
||||||
|
fi
|
||||||
|
endef
|
||||||
3
coordinator/.gitignore
vendored
3
coordinator/.gitignore
vendored
@@ -1,4 +1,5 @@
|
|||||||
/build/bin
|
/build/bin
|
||||||
.idea
|
.idea
|
||||||
internal/logic/verifier/lib
|
internal/logic/verifier/lib
|
||||||
internal/libzkp/lib/libzkp.so
|
libzkp.so
|
||||||
|
libzkp.dylib
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
|
include ../build/common.mk
|
||||||
|
|
||||||
.PHONY: lint docker clean coordinator coordinator_skip_libzkp mock_coordinator libzkp
|
.PHONY: lint docker clean coordinator coordinator_skip_libzkp mock_coordinator libzkp
|
||||||
|
|
||||||
IMAGE_VERSION=latest
|
IMAGE_VERSION=latest
|
||||||
REPO_ROOT_DIR=./..
|
REPO_ROOT_DIR=./..
|
||||||
LIBZKP_PATH=./internal/logic/libzkp/lib/libzkp.so
|
LIBZKP_PATH=./internal/logic/libzkp/lib/$(LIB_ZKP_NAME)
|
||||||
|
|
||||||
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
||||||
ZKVM_VERSION=$(shell grep -m 1 "zkvm-prover?" ../Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
ZKVM_VERSION=$(shell grep -m 1 "zkvm-prover?" ../Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||||
@@ -27,6 +29,7 @@ libzkp: clean_libzkp $(LIBZKP_PATH)
|
|||||||
|
|
||||||
coordinator_api: $(LIBZKP_PATH) ## Builds the Coordinator api instance.
|
coordinator_api: $(LIBZKP_PATH) ## Builds the Coordinator api instance.
|
||||||
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_api ./cmd/api
|
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_api ./cmd/api
|
||||||
|
$(call macos_codesign,$(PWD)/build/bin/coordinator_api)
|
||||||
|
|
||||||
coordinator_cron:
|
coordinator_cron:
|
||||||
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_cron ./cmd/cron
|
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_cron ./cmd/cron
|
||||||
@@ -46,6 +49,8 @@ localsetup: coordinator_api ## Local setup: build coordinator_api, copy config,
|
|||||||
@echo "Setting up releases..."
|
@echo "Setting up releases..."
|
||||||
cd $(CURDIR)/build && bash setup_releases.sh
|
cd $(CURDIR)/build && bash setup_releases.sh
|
||||||
|
|
||||||
|
run_coordinator_api: coordinator_api
|
||||||
|
cd build/bin && ./coordinator_api
|
||||||
|
|
||||||
#coordinator_api_skip_libzkp:
|
#coordinator_api_skip_libzkp:
|
||||||
# go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_api ./cmd/api
|
# go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_api ./cmd/api
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ See [monorepo prerequisites](../README.md#prerequisites).
|
|||||||
|
|
||||||
## Build
|
## Build
|
||||||
|
|
||||||
|
Using Go version 1.22
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make clean
|
make clean
|
||||||
make coordinator_api
|
make coordinator_api
|
||||||
|
|||||||
@@ -64,9 +64,10 @@ for ((i=0; i<$VERIFIER_COUNT; i++)); do
|
|||||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/verifier.bin -O ${ASSET_DIR}/verifier.bin
|
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/verifier.bin -O ${ASSET_DIR}/verifier.bin
|
||||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/root_verifier_vk -O ${ASSET_DIR}/root_verifier_vk
|
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/root_verifier_vk -O ${ASSET_DIR}/root_verifier_vk
|
||||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/openVmVk.json -O ${ASSET_DIR}/openVmVk.json
|
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/openVmVk.json -O ${ASSET_DIR}/openVmVk.json
|
||||||
|
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/axiom_program_ids.json -O ${ASSET_DIR}/axiom_program_ids.json
|
||||||
|
|
||||||
echo "Completed downloading assets for $FORK_NAME"
|
echo "Completed downloading assets for $FORK_NAME"
|
||||||
echo "---"
|
echo "---"
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "All verifier assets downloaded successfully"
|
echo "All verifier assets downloaded successfully"
|
||||||
|
|||||||
@@ -1,16 +1,20 @@
|
|||||||
.PHONY: help fmt clippy test test-ci test-all
|
include ../../../../build/common.mk
|
||||||
|
|
||||||
|
.PHONY: help fmt clippy test test-ci test-all clean build
|
||||||
|
|
||||||
|
all: build
|
||||||
|
|
||||||
build:
|
build:
|
||||||
@cargo build --release -p libzkp-c
|
@cargo build --release -p libzkp-c
|
||||||
@mkdir -p lib
|
@mkdir -p lib
|
||||||
@cp -f ../../../../target/release/libzkp.so lib/
|
@cp -f ../../../../target/release/$(LIB_ZKP_NAME) lib/
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
@cargo fmt --all -- --check
|
@cargo fmt --all -- --check
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
@cargo clean --release -p libzkp -p libzkp-c -p l2geth
|
@cargo clean --release -p libzkp -p libzkp-c -p l2geth
|
||||||
@rm -f lib/libzkp.so
|
@rm -f lib/$(LIB_ZKP_NAME)
|
||||||
|
|
||||||
clippy:
|
clippy:
|
||||||
@cargo check --release --all-features
|
@cargo check --release --all-features
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
package libzkp
|
package libzkp
|
||||||
|
|
||||||
/*
|
/*
|
||||||
#cgo LDFLAGS: -lzkp -lm -ldl -L${SRCDIR}/lib -Wl,-rpath=${SRCDIR}/lib
|
#cgo linux LDFLAGS: -lzkp -lm -ldl -L${SRCDIR}/lib -Wl,-rpath=${SRCDIR}/lib
|
||||||
|
#cgo darwin LDFLAGS: -lzkp -lm -ldl -L${SRCDIR}/lib -Wl,-rpath,${SRCDIR}/lib
|
||||||
#cgo gpu LDFLAGS: -lzkp -lm -ldl -lgmp -lstdc++ -lprocps -L/usr/local/cuda/lib64/ -lcudart -L${SRCDIR}/lib/ -Wl,-rpath=${SRCDIR}/lib
|
#cgo gpu LDFLAGS: -lzkp -lm -ldl -lgmp -lstdc++ -lprocps -L/usr/local/cuda/lib64/ -lcudart -L${SRCDIR}/lib/ -Wl,-rpath=${SRCDIR}/lib
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include "libzkp.h"
|
#include "libzkp.h"
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ use alloy::{
|
|||||||
};
|
};
|
||||||
use eyre::Result;
|
use eyre::Result;
|
||||||
use libzkp::tasks::ChunkInterpreter;
|
use libzkp::tasks::ChunkInterpreter;
|
||||||
use sbv_primitives::types::{consensus::TxL1Message, Network};
|
use sbv_primitives::types::{Network, consensus::TxL1Message};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
fn default_max_retry() -> u32 {
|
fn default_max_retry() -> u32 {
|
||||||
|
|||||||
@@ -3,12 +3,15 @@ name = "libzkp"
|
|||||||
version.workspace = true
|
version.workspace = true
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
crate-type = ["rlib", "cdylib"]
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
[dependencies]
|
[dependencies]
|
||||||
scroll-zkvm-types = { workspace = true, features = ["scroll"] }
|
scroll-zkvm-types = { workspace = true, features = ["scroll"] }
|
||||||
scroll-zkvm-verifier.workspace = true
|
scroll-zkvm-verifier.workspace = true
|
||||||
|
|
||||||
alloy-primitives.workspace = true #depress the effect of "native-keccak"
|
alloy-primitives.workspace = true # depress the effect of "native-keccak"
|
||||||
sbv-primitives = {workspace = true, features = ["scroll-compress-info", "scroll"]}
|
sbv-primitives = {workspace = true, features = ["scroll-compress-info", "scroll"]}
|
||||||
sbv-core = { workspace = true, features = ["scroll"] }
|
sbv-core = { workspace = true, features = ["scroll"] }
|
||||||
base64.workspace = true
|
base64.workspace = true
|
||||||
|
|||||||
@@ -138,7 +138,10 @@ pub fn gen_universal_task(
|
|||||||
// always respect the fork_name_str (which has been normalized) being passed
|
// always respect the fork_name_str (which has been normalized) being passed
|
||||||
// if the fork_name wrapped in task is not match, consider it a malformed task
|
// if the fork_name wrapped in task is not match, consider it a malformed task
|
||||||
if fork_name_str != task.fork_name.as_str() {
|
if fork_name_str != task.fork_name.as_str() {
|
||||||
eyre::bail!("fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
|
eyre::bail!(
|
||||||
|
"fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}",
|
||||||
|
task.fork_name
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if fork_name_str != version.fork.as_str() {
|
if fork_name_str != version.fork.as_str() {
|
||||||
eyre::bail!(
|
eyre::bail!(
|
||||||
@@ -156,7 +159,10 @@ pub fn gen_universal_task(
|
|||||||
task.fork_name = task.fork_name.to_lowercase();
|
task.fork_name = task.fork_name.to_lowercase();
|
||||||
let version = Version::from(task.version);
|
let version = Version::from(task.version);
|
||||||
if fork_name_str != task.fork_name.as_str() {
|
if fork_name_str != task.fork_name.as_str() {
|
||||||
eyre::bail!("fork name in batch task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
|
eyre::bail!(
|
||||||
|
"fork name in batch task not match the calling arg, expected {fork_name_str}, get {}",
|
||||||
|
task.fork_name
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if fork_name_str != version.fork.as_str() {
|
if fork_name_str != version.fork.as_str() {
|
||||||
eyre::bail!(
|
eyre::bail!(
|
||||||
@@ -174,7 +180,10 @@ pub fn gen_universal_task(
|
|||||||
task.fork_name = task.fork_name.to_lowercase();
|
task.fork_name = task.fork_name.to_lowercase();
|
||||||
let version = Version::from(task.version);
|
let version = Version::from(task.version);
|
||||||
if fork_name_str != task.fork_name.as_str() {
|
if fork_name_str != task.fork_name.as_str() {
|
||||||
eyre::bail!("fork name in bundle task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
|
eyre::bail!(
|
||||||
|
"fork name in bundle task not match the calling arg, expected {fork_name_str}, get {}",
|
||||||
|
task.fork_name
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if fork_name_str != version.fork.as_str() {
|
if fork_name_str != version.fork.as_str() {
|
||||||
eyre::bail!(
|
eyre::bail!(
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ use scroll_zkvm_types::{
|
|||||||
utils::{serialize_vk, vec_as_base64},
|
utils::{serialize_vk, vec_as_base64},
|
||||||
version,
|
version,
|
||||||
};
|
};
|
||||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||||
|
|
||||||
/// A wrapper around the actual inner proof.
|
/// A wrapper around the actual inner proof.
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
#[derive(Clone, Serialize, Deserialize)]
|
||||||
@@ -213,7 +213,7 @@ impl<Metadata: ProofMetadata> PersistableProof for WrappedProof<Metadata> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
use base64::{Engine, prelude::BASE64_STANDARD};
|
||||||
use sbv_primitives::B256;
|
use sbv_primitives::B256;
|
||||||
use scroll_zkvm_types::{bundle::BundleInfo, proof::EvmProof};
|
use scroll_zkvm_types::{bundle::BundleInfo, proof::EvmProof};
|
||||||
|
|
||||||
|
|||||||
@@ -3,14 +3,14 @@ use eyre::Result;
|
|||||||
use sbv_primitives::{B256, U256};
|
use sbv_primitives::{B256, U256};
|
||||||
use scroll_zkvm_types::{
|
use scroll_zkvm_types::{
|
||||||
batch::{
|
batch::{
|
||||||
build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderValidium,
|
BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderValidium, BatchInfo, BatchWitness,
|
||||||
BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, LegacyBatchWitness,
|
Envelope, EnvelopeV6, EnvelopeV7, LegacyBatchWitness, N_BLOB_BYTES, ReferenceHeader,
|
||||||
ReferenceHeader, N_BLOB_BYTES,
|
build_point_eval_witness,
|
||||||
},
|
},
|
||||||
chunk::ChunkInfo,
|
chunk::ChunkInfo,
|
||||||
public_inputs::{ForkName, MultiVersionPublicInputs, Version},
|
public_inputs::{ForkName, MultiVersionPublicInputs, Version},
|
||||||
task::ProvingTask,
|
task::ProvingTask,
|
||||||
utils::{to_rkyv_bytes, RancorError},
|
utils::{RancorError, to_rkyv_bytes},
|
||||||
version::{Codec, Domain, STFVersion},
|
version::{Codec, Domain, STFVersion},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -150,18 +150,32 @@ impl BatchProvingTask {
|
|||||||
match &self.batch_header {
|
match &self.batch_header {
|
||||||
BatchHeaderV::Validium(_) => assert!(
|
BatchHeaderV::Validium(_) => assert!(
|
||||||
version.is_validium(),
|
version.is_validium(),
|
||||||
"version {:?} is not match with parsed header, get validium header but version is not validium", version,
|
"version {:?} is not match with parsed header, get validium header but version is not validium",
|
||||||
|
version,
|
||||||
),
|
),
|
||||||
BatchHeaderV::V6(_) => assert_eq!(version.fork, ForkName::EuclidV1,
|
BatchHeaderV::V6(_) => assert_eq!(
|
||||||
|
version.fork,
|
||||||
|
ForkName::EuclidV1,
|
||||||
"hardfork mismatch for da-codec@v6 header: found={:?}, expected={:?}",
|
"hardfork mismatch for da-codec@v6 header: found={:?}, expected={:?}",
|
||||||
version.fork,
|
version.fork,
|
||||||
ForkName::EuclidV1,
|
ForkName::EuclidV1,
|
||||||
),
|
),
|
||||||
BatchHeaderV::V7_to_V10(_) => assert!(
|
BatchHeaderV::V7_to_V10(_) => assert!(
|
||||||
matches!(version.fork, ForkName::EuclidV2 | ForkName::Feynman | ForkName::Galileo | ForkName::GalileoV2),
|
matches!(
|
||||||
|
version.fork,
|
||||||
|
ForkName::EuclidV2
|
||||||
|
| ForkName::Feynman
|
||||||
|
| ForkName::Galileo
|
||||||
|
| ForkName::GalileoV2
|
||||||
|
),
|
||||||
"hardfork mismatch for da-codec@v7/8/9/10 header: found={}, expected={:?}",
|
"hardfork mismatch for da-codec@v7/8/9/10 header: found={}, expected={:?}",
|
||||||
version.fork,
|
version.fork,
|
||||||
[ForkName::EuclidV2, ForkName::Feynman, ForkName::Galileo, ForkName::GalileoV2],
|
[
|
||||||
|
ForkName::EuclidV2,
|
||||||
|
ForkName::Feynman,
|
||||||
|
ForkName::Galileo,
|
||||||
|
ForkName::GalileoV2
|
||||||
|
],
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ pub mod base64 {
|
|||||||
|
|
||||||
pub mod point_eval {
|
pub mod point_eval {
|
||||||
use c_kzg;
|
use c_kzg;
|
||||||
use sbv_primitives::{types::eips::eip4844::BLS_MODULUS, B256 as H256, U256};
|
use sbv_primitives::{B256 as H256, U256, types::eips::eip4844::BLS_MODULUS};
|
||||||
use scroll_zkvm_types::utils::sha256_rv32;
|
use scroll_zkvm_types::utils::sha256_rv32;
|
||||||
|
|
||||||
/// Given the blob-envelope, translate it to a fixed size EIP-4844 blob.
|
/// Given the blob-envelope, translate it to a fixed size EIP-4844 blob.
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use scroll_zkvm_types::{
|
|||||||
bundle::{BundleInfo, BundleWitness, LegacyBundleWitness},
|
bundle::{BundleInfo, BundleWitness, LegacyBundleWitness},
|
||||||
public_inputs::{MultiVersionPublicInputs, Version},
|
public_inputs::{MultiVersionPublicInputs, Version},
|
||||||
task::ProvingTask,
|
task::ProvingTask,
|
||||||
utils::{to_rkyv_bytes, RancorError},
|
utils::{RancorError, to_rkyv_bytes},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::proofs::BatchProof;
|
use crate::proofs::BatchProof;
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
use eyre::Result;
|
use eyre::Result;
|
||||||
use sbv_core::BlockWitness;
|
use sbv_core::BlockWitness;
|
||||||
use sbv_primitives::{types::consensus::BlockHeader, B256};
|
use sbv_primitives::{B256, types::consensus::BlockHeader};
|
||||||
use scroll_zkvm_types::{
|
use scroll_zkvm_types::{
|
||||||
chunk::{execute, ChunkInfo, ChunkWitness, LegacyChunkWitness, ValidiumInputs},
|
chunk::{ChunkInfo, ChunkWitness, LegacyChunkWitness, ValidiumInputs, execute},
|
||||||
public_inputs::{MultiVersionPublicInputs, Version},
|
public_inputs::{MultiVersionPublicInputs, Version},
|
||||||
task::ProvingTask,
|
task::ProvingTask,
|
||||||
utils::{to_rkyv_bytes, RancorError},
|
utils::{RancorError, to_rkyv_bytes},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::chunk_interpreter::*;
|
use super::chunk_interpreter::*;
|
||||||
@@ -224,8 +224,8 @@ impl ChunkProvingTask {
|
|||||||
attempts += 1;
|
attempts += 1;
|
||||||
if attempts >= MAX_FETCH_NODES_ATTEMPTS {
|
if attempts >= MAX_FETCH_NODES_ATTEMPTS {
|
||||||
return Err(eyre!(
|
return Err(eyre!(
|
||||||
"failed to fetch nodes after {MAX_FETCH_NODES_ATTEMPTS} attempts: {e}"
|
"failed to fetch nodes after {MAX_FETCH_NODES_ATTEMPTS} attempts: {e}"
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let node_hash =
|
let node_hash =
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use eyre::Result;
|
use eyre::Result;
|
||||||
use sbv_core::BlockWitness;
|
use sbv_core::BlockWitness;
|
||||||
use sbv_primitives::{types::consensus::TxL1Message, Bytes, B256};
|
use sbv_primitives::{B256, Bytes, types::consensus::TxL1Message};
|
||||||
|
|
||||||
/// An interpreter which is cirtical in translating chunk data
|
/// An interpreter which is cirtical in translating chunk data
|
||||||
/// since we need to grep block witness and storage node data
|
/// since we need to grep block witness and storage node data
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
use std::{
|
use std::{
|
||||||
panic::{catch_unwind, AssertUnwindSafe},
|
panic::{AssertUnwindSafe, catch_unwind},
|
||||||
path::Path,
|
path::Path,
|
||||||
};
|
};
|
||||||
|
|
||||||
use git_version::git_version;
|
use git_version::git_version;
|
||||||
use serde::{
|
use serde::{
|
||||||
de::{Deserialize, DeserializeOwned},
|
|
||||||
Serialize,
|
Serialize,
|
||||||
|
de::{Deserialize, DeserializeOwned},
|
||||||
};
|
};
|
||||||
|
|
||||||
use eyre::Result;
|
use eyre::Result;
|
||||||
|
|||||||
@@ -11,5 +11,5 @@ crate-type = ["cdylib"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
libzkp = { path = "../libzkp" }
|
libzkp = { path = "../libzkp" }
|
||||||
l2geth = { path = "../l2geth"}
|
l2geth = { path = "../l2geth"}
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
tracing-subscriber.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use std::ffi::{c_char, CString};
|
use std::ffi::{CString, c_char};
|
||||||
|
|
||||||
use libzkp::TaskType;
|
use libzkp::TaskType;
|
||||||
use utils::{c_char_to_str, c_char_to_vec};
|
use utils::{c_char_to_str, c_char_to_vec};
|
||||||
@@ -20,7 +20,7 @@ fn enable_dump() -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn init_tracing() {
|
pub unsafe extern "C" fn init_tracing() {
|
||||||
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
|
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
|
||||||
|
|
||||||
@@ -47,14 +47,14 @@ pub unsafe extern "C" fn init_tracing() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn init_verifier(config: *const c_char) {
|
pub unsafe extern "C" fn init_verifier(config: *const c_char) {
|
||||||
let config_str = c_char_to_str(config);
|
let config_str = c_char_to_str(config);
|
||||||
libzkp::verifier_init(config_str).unwrap();
|
libzkp::verifier_init(config_str).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn init_l2geth(config: *const c_char) {
|
pub unsafe extern "C" fn init_l2geth(config: *const c_char) {
|
||||||
let config_str = c_char_to_str(config);
|
let config_str = c_char_to_str(config);
|
||||||
l2geth::init(config_str).unwrap();
|
l2geth::init(config_str).unwrap();
|
||||||
@@ -92,7 +92,7 @@ fn verify_proof(proof: *const c_char, fork_name: *const c_char, task_type: TaskT
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn verify_chunk_proof(
|
pub unsafe extern "C" fn verify_chunk_proof(
|
||||||
proof: *const c_char,
|
proof: *const c_char,
|
||||||
fork_name: *const c_char,
|
fork_name: *const c_char,
|
||||||
@@ -101,7 +101,7 @@ pub unsafe extern "C" fn verify_chunk_proof(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn verify_batch_proof(
|
pub unsafe extern "C" fn verify_batch_proof(
|
||||||
proof: *const c_char,
|
proof: *const c_char,
|
||||||
fork_name: *const c_char,
|
fork_name: *const c_char,
|
||||||
@@ -110,7 +110,7 @@ pub unsafe extern "C" fn verify_batch_proof(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn verify_bundle_proof(
|
pub unsafe extern "C" fn verify_bundle_proof(
|
||||||
proof: *const c_char,
|
proof: *const c_char,
|
||||||
fork_name: *const c_char,
|
fork_name: *const c_char,
|
||||||
@@ -119,7 +119,7 @@ pub unsafe extern "C" fn verify_bundle_proof(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn dump_vk(fork_name: *const c_char, file: *const c_char) {
|
pub unsafe extern "C" fn dump_vk(fork_name: *const c_char, file: *const c_char) {
|
||||||
let fork_name_str = c_char_to_str(fork_name);
|
let fork_name_str = c_char_to_str(fork_name);
|
||||||
let file_str = c_char_to_str(file);
|
let file_str = c_char_to_str(file);
|
||||||
@@ -145,7 +145,7 @@ fn failed_handling_result() -> HandlingResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn gen_universal_task(
|
pub unsafe extern "C" fn gen_universal_task(
|
||||||
task_type: i32,
|
task_type: i32,
|
||||||
task: *const c_char,
|
task: *const c_char,
|
||||||
@@ -166,10 +166,7 @@ pub unsafe extern "C" fn gen_universal_task(
|
|||||||
);
|
);
|
||||||
return failed_handling_result();
|
return failed_handling_result();
|
||||||
}
|
}
|
||||||
Some(std::slice::from_raw_parts(
|
Some(unsafe { std::slice::from_raw_parts(decryption_key, decryption_key_len) })
|
||||||
decryption_key,
|
|
||||||
decryption_key_len,
|
|
||||||
))
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
@@ -185,7 +182,7 @@ pub unsafe extern "C" fn gen_universal_task(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let expected_vk = if expected_vk_len > 0 {
|
let expected_vk = if expected_vk_len > 0 {
|
||||||
std::slice::from_raw_parts(expected_vk, expected_vk_len)
|
unsafe { std::slice::from_raw_parts(expected_vk, expected_vk_len) }
|
||||||
} else {
|
} else {
|
||||||
&[]
|
&[]
|
||||||
};
|
};
|
||||||
@@ -224,18 +221,18 @@ pub unsafe extern "C" fn gen_universal_task(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn release_task_result(result: HandlingResult) {
|
pub unsafe extern "C" fn release_task_result(result: HandlingResult) {
|
||||||
if !result.universal_task.is_null() {
|
if !result.universal_task.is_null() {
|
||||||
let _ = CString::from_raw(result.universal_task);
|
let _ = unsafe { CString::from_raw(result.universal_task) };
|
||||||
}
|
}
|
||||||
if !result.metadata.is_null() {
|
if !result.metadata.is_null() {
|
||||||
let _ = CString::from_raw(result.metadata);
|
let _ = unsafe { CString::from_raw(result.metadata) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn gen_wrapped_proof(
|
pub unsafe extern "C" fn gen_wrapped_proof(
|
||||||
proof: *const c_char,
|
proof: *const c_char,
|
||||||
metadata: *const c_char,
|
metadata: *const c_char,
|
||||||
@@ -244,7 +241,7 @@ pub unsafe extern "C" fn gen_wrapped_proof(
|
|||||||
) -> *mut c_char {
|
) -> *mut c_char {
|
||||||
let proof_str = c_char_to_str(proof);
|
let proof_str = c_char_to_str(proof);
|
||||||
let metadata_str = c_char_to_str(metadata);
|
let metadata_str = c_char_to_str(metadata);
|
||||||
let vk_data = std::slice::from_raw_parts(vk as *const u8, vk_len);
|
let vk_data = unsafe { std::slice::from_raw_parts(vk as *const u8, vk_len) };
|
||||||
|
|
||||||
match libzkp::gen_wrapped_proof(proof_str, metadata_str, vk_data) {
|
match libzkp::gen_wrapped_proof(proof_str, metadata_str, vk_data) {
|
||||||
Ok(result) => CString::new(result).unwrap().into_raw(),
|
Ok(result) => CString::new(result).unwrap().into_raw(),
|
||||||
@@ -256,7 +253,7 @@ pub unsafe extern "C" fn gen_wrapped_proof(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn univ_task_compatibility_fix(task_json: *const c_char) -> *mut c_char {
|
pub unsafe extern "C" fn univ_task_compatibility_fix(task_json: *const c_char) -> *mut c_char {
|
||||||
let task_json_str = c_char_to_str(task_json);
|
let task_json_str = c_char_to_str(task_json);
|
||||||
match libzkp::univ_task_compatibility_fix(task_json_str) {
|
match libzkp::univ_task_compatibility_fix(task_json_str) {
|
||||||
@@ -269,9 +266,9 @@ pub unsafe extern "C" fn univ_task_compatibility_fix(task_json: *const c_char) -
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
#[no_mangle]
|
#[unsafe(no_mangle)]
|
||||||
pub unsafe extern "C" fn release_string(ptr: *mut c_char) {
|
pub unsafe extern "C" fn release_string(ptr: *mut c_char) {
|
||||||
if !ptr.is_null() {
|
if !ptr.is_null() {
|
||||||
let _ = CString::from_raw(ptr);
|
let _ = unsafe { CString::from_raw(ptr) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,35 +6,28 @@ edition.workspace = true
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
axiom-sdk.workspace = true
|
||||||
scroll-zkvm-types.workspace = true
|
scroll-zkvm-types.workspace = true
|
||||||
scroll-zkvm-prover.workspace = true
|
scroll-zkvm-prover.workspace = true
|
||||||
libzkp = { path = "../libzkp"}
|
libzkp = { path = "../libzkp"}
|
||||||
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "05648db" }
|
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "504e71f" }
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
once_cell.workspace =true
|
|
||||||
base64.workspace = true
|
|
||||||
tiny-keccak = { workspace = true, features = ["sha3", "keccak"] }
|
|
||||||
eyre.workspace = true
|
eyre.workspace = true
|
||||||
|
tracing.workspace = true
|
||||||
|
|
||||||
futures = "0.3.30"
|
|
||||||
futures-util = "0.3"
|
futures-util = "0.3"
|
||||||
|
|
||||||
reqwest = { version = "0.12.4", features = ["gzip", "stream"] }
|
reqwest = { version = "0.12", features = ["gzip", "stream"] }
|
||||||
reqwest-middleware = "0.3"
|
|
||||||
reqwest-retry = "0.5"
|
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
|
|
||||||
rand = "0.8.5"
|
jiff.workspace = true
|
||||||
tokio = "1.37.0"
|
tokio = { workspace = true, features = ["full"] }
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
sled = "0.34.7"
|
|
||||||
http = "1.1.0"
|
|
||||||
clap = { version = "4.5", features = ["derive"] }
|
clap = { version = "4.5", features = ["derive"] }
|
||||||
ctor = "0.2.8"
|
url = { version = "2.5.4", features = ["serde"] }
|
||||||
url = { version = "2.5.4", features = ["serde",] }
|
tempfile = "3.24"
|
||||||
serde_bytes = "0.11.15"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
cuda = ["scroll-zkvm-prover/cuda"]
|
cuda = ["scroll-zkvm-prover/cuda"]
|
||||||
|
|||||||
@@ -1,21 +1,32 @@
|
|||||||
|
#[macro_use]
|
||||||
|
extern crate tracing;
|
||||||
|
|
||||||
mod prover;
|
mod prover;
|
||||||
mod types;
|
mod types;
|
||||||
mod zk_circuits_handler;
|
mod zk_circuits_handler;
|
||||||
|
|
||||||
|
use crate::prover::ProverKind;
|
||||||
use clap::{ArgAction, Parser, Subcommand};
|
use clap::{ArgAction, Parser, Subcommand};
|
||||||
use prover::{LocalProver, LocalProverConfig};
|
|
||||||
use scroll_proving_sdk::{
|
use scroll_proving_sdk::{
|
||||||
prover::{types::ProofType, ProverBuilder},
|
prover::{ProverBuilder, types::ProofType},
|
||||||
utils::{get_version, init_tracing},
|
utils::{VERSION, init_tracing},
|
||||||
|
};
|
||||||
|
use std::{
|
||||||
|
fs::File,
|
||||||
|
io::BufReader,
|
||||||
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
use std::{fs::File, io::BufReader, path::Path};
|
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(disable_version_flag = true)]
|
#[command(disable_version_flag = true)]
|
||||||
struct Args {
|
struct Args {
|
||||||
|
/// Prover kind
|
||||||
|
#[arg(long = "prover.kind", value_enum, default_value_t = ProverKind::Local)]
|
||||||
|
prover_kind: ProverKind,
|
||||||
|
|
||||||
/// Path of config file
|
/// Path of config file
|
||||||
#[arg(long = "config", default_value = "conf/config.json")]
|
#[arg(long = "config", default_value = "conf/config.json")]
|
||||||
config_file: String,
|
config_file: PathBuf,
|
||||||
|
|
||||||
#[arg(long = "forkname")]
|
#[arg(long = "forkname")]
|
||||||
fork_name: Option<String>,
|
fork_name: Option<String>,
|
||||||
@@ -42,8 +53,11 @@ enum Commands {
|
|||||||
|
|
||||||
#[derive(Debug, serde::Deserialize)]
|
#[derive(Debug, serde::Deserialize)]
|
||||||
struct HandleSet {
|
struct HandleSet {
|
||||||
|
#[serde(default)]
|
||||||
chunks: Vec<String>,
|
chunks: Vec<String>,
|
||||||
|
#[serde(default)]
|
||||||
batches: Vec<String>,
|
batches: Vec<String>,
|
||||||
|
#[serde(default)]
|
||||||
bundles: Vec<String>,
|
bundles: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -54,13 +68,13 @@ async fn main() -> eyre::Result<()> {
|
|||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
if args.version {
|
if args.version {
|
||||||
println!("version is {}", get_version());
|
println!("version is {VERSION}");
|
||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
}
|
}
|
||||||
|
info!(version = %VERSION, "Starting prover");
|
||||||
|
|
||||||
let cfg = LocalProverConfig::from_file(args.config_file)?;
|
let (sdk_config, prover) = args.prover_kind.create_from_file(&args.config_file)?;
|
||||||
let sdk_config = cfg.sdk_config.clone();
|
info!(prover = ?prover, "Loaded prover");
|
||||||
let local_prover = LocalProver::new(cfg.clone());
|
|
||||||
|
|
||||||
match args.command {
|
match args.command {
|
||||||
Some(Commands::Handle { task_path }) => {
|
Some(Commands::Handle { task_path }) => {
|
||||||
@@ -68,37 +82,37 @@ async fn main() -> eyre::Result<()> {
|
|||||||
let reader = BufReader::new(file);
|
let reader = BufReader::new(file);
|
||||||
let handle_set: HandleSet = serde_json::from_reader(reader)?;
|
let handle_set: HandleSet = serde_json::from_reader(reader)?;
|
||||||
|
|
||||||
let prover = ProverBuilder::new(sdk_config, local_prover)
|
let prover = ProverBuilder::new(sdk_config, prover)
|
||||||
.build()
|
.build()
|
||||||
.await
|
.await
|
||||||
.map_err(|e| eyre::eyre!("build prover fail: {e}"))?;
|
.map_err(|e| eyre::eyre!("build prover fail: {e}"))?;
|
||||||
|
|
||||||
let prover = std::sync::Arc::new(prover);
|
let prover = std::sync::Arc::new(prover);
|
||||||
println!("Handling task set 1: chunks ...");
|
info!("Handling task set 1: chunks ...");
|
||||||
assert!(
|
assert!(
|
||||||
prover
|
prover
|
||||||
.clone()
|
.clone()
|
||||||
.one_shot(&handle_set.chunks, ProofType::Chunk)
|
.one_shot(&handle_set.chunks, ProofType::Chunk)
|
||||||
.await
|
.await
|
||||||
);
|
);
|
||||||
println!("Done! Handling task set 2: batches ...");
|
info!("Done! Handling task set 2: batches ...");
|
||||||
assert!(
|
assert!(
|
||||||
prover
|
prover
|
||||||
.clone()
|
.clone()
|
||||||
.one_shot(&handle_set.batches, ProofType::Batch)
|
.one_shot(&handle_set.batches, ProofType::Batch)
|
||||||
.await
|
.await
|
||||||
);
|
);
|
||||||
println!("Done! Handling task set 3: bundles ...");
|
info!("Done! Handling task set 3: bundles ...");
|
||||||
assert!(
|
assert!(
|
||||||
prover
|
prover
|
||||||
.clone()
|
.clone()
|
||||||
.one_shot(&handle_set.bundles, ProofType::Bundle)
|
.one_shot(&handle_set.bundles, ProofType::Bundle)
|
||||||
.await
|
.await
|
||||||
);
|
);
|
||||||
println!("All done!");
|
info!("All done!");
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let prover = ProverBuilder::new(sdk_config, local_prover)
|
let prover = ProverBuilder::new(sdk_config, prover)
|
||||||
.build()
|
.build()
|
||||||
.await
|
.await
|
||||||
.map_err(|e| eyre::eyre!("build prover fail: {e}"))?;
|
.map_err(|e| eyre::eyre!("build prover fail: {e}"))?;
|
||||||
|
|||||||
@@ -1,329 +1,96 @@
|
|||||||
use crate::zk_circuits_handler::{universal::UniversalHandler, CircuitsHandler};
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use eyre::Result;
|
|
||||||
use scroll_proving_sdk::{
|
use scroll_proving_sdk::{
|
||||||
config::Config as SdkConfig,
|
config::Config as SdkConfig,
|
||||||
prover::{
|
prover::{
|
||||||
|
ProvingService,
|
||||||
proving_service::{
|
proving_service::{
|
||||||
GetVkRequest, GetVkResponse, ProveRequest, ProveResponse, QueryTaskRequest,
|
GetVkRequest, GetVkResponse, ProveRequest, ProveResponse, QueryTaskRequest,
|
||||||
QueryTaskResponse, TaskStatus,
|
QueryTaskResponse,
|
||||||
},
|
},
|
||||||
types::ProofType,
|
|
||||||
ProvingService,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use scroll_zkvm_types::ProvingTask;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::path::Path;
|
||||||
collections::HashMap,
|
|
||||||
fs::File,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
sync::{Arc, LazyLock},
|
|
||||||
time::{SystemTime, UNIX_EPOCH},
|
|
||||||
};
|
|
||||||
use tokio::{runtime::Handle, sync::Mutex, task::JoinHandle};
|
|
||||||
|
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
mod local;
|
||||||
pub struct AssetsLocationData {
|
pub use local::{LocalProver, LocalProverConfig};
|
||||||
/// the base url to form a general downloading url for an asset, MUST HAVE A TRAILING SLASH
|
|
||||||
pub base_url: url::Url,
|
mod axiom;
|
||||||
#[serde(default)]
|
pub use axiom::{AxiomProver, AxiomProverConfig};
|
||||||
/// a altered url for specififed vk
|
|
||||||
pub asset_detours: HashMap<String, url::Url>,
|
#[derive(Debug)]
|
||||||
|
pub enum Prover {
|
||||||
|
Local(LocalProver),
|
||||||
|
Axiom(AxiomProver),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AssetsLocationData {
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, clap::ValueEnum)]
|
||||||
pub fn gen_asset_url(&self, vk_as_path: &str, proof_type: ProofType) -> Result<url::Url> {
|
pub enum ProverKind {
|
||||||
Ok(self.base_url.join(
|
Local,
|
||||||
match proof_type {
|
Axiom,
|
||||||
ProofType::Chunk => format!("chunk/{vk_as_path}/"),
|
}
|
||||||
ProofType::Batch => format!("batch/{vk_as_path}/"),
|
|
||||||
ProofType::Bundle => format!("bundle/{vk_as_path}/"),
|
|
||||||
t => eyre::bail!("unrecognized proof type: {}", t as u8),
|
|
||||||
}
|
|
||||||
.as_str(),
|
|
||||||
)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn validate(&self) -> Result<()> {
|
impl ProverKind {
|
||||||
if !self.base_url.path().ends_with('/') {
|
pub fn create_from_file<P: AsRef<Path>>(
|
||||||
eyre::bail!(
|
|
||||||
"base_url must have a trailing slash, got: {}",
|
|
||||||
self.base_url
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_asset(
|
|
||||||
&self,
|
&self,
|
||||||
vk: &str,
|
file_name: P,
|
||||||
url_base: &url::Url,
|
) -> eyre::Result<(SdkConfig, Prover)> {
|
||||||
base_path: impl AsRef<Path>,
|
match self {
|
||||||
) -> Result<PathBuf> {
|
ProverKind::Local => {
|
||||||
let download_files = ["app.vmexe", "openvm.toml"];
|
let config = LocalProverConfig::from_file(file_name)?;
|
||||||
|
let sdk_config = config.sdk_config.clone();
|
||||||
// Step 1: Create a local path for storage
|
let prover = LocalProver::new(config);
|
||||||
let storage_path = base_path.as_ref().join(vk);
|
Ok((sdk_config, Prover::Local(prover)))
|
||||||
std::fs::create_dir_all(&storage_path)?;
|
|
||||||
|
|
||||||
// Step 2 & 3: Download each file if needed
|
|
||||||
let client = reqwest::Client::new();
|
|
||||||
|
|
||||||
for filename in download_files.iter() {
|
|
||||||
let local_file_path = storage_path.join(filename);
|
|
||||||
let download_url = url_base.join(filename)?;
|
|
||||||
|
|
||||||
// Check if file already exists
|
|
||||||
if local_file_path.exists() {
|
|
||||||
// Get file metadata to check size
|
|
||||||
if let Ok(metadata) = std::fs::metadata(&local_file_path) {
|
|
||||||
// Make a HEAD request to get remote file size
|
|
||||||
|
|
||||||
if let Ok(head_resp) = client.head(download_url.clone()).send().await {
|
|
||||||
if let Some(content_length) = head_resp.headers().get("content-length") {
|
|
||||||
if let Ok(remote_size) =
|
|
||||||
content_length.to_str().unwrap_or("0").parse::<u64>()
|
|
||||||
{
|
|
||||||
// If sizes match, skip download
|
|
||||||
if metadata.len() == remote_size {
|
|
||||||
println!("File {} already exists with matching size, skipping download", filename);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
ProverKind::Axiom => {
|
||||||
println!("Downloading {} from {}", filename, download_url);
|
let config = AxiomProverConfig::from_file(file_name)?;
|
||||||
|
let sdk_config = config.sdk_config.clone();
|
||||||
let response = client.get(download_url).send().await?;
|
let prover = AxiomProver::new(config);
|
||||||
if !response.status().is_success() {
|
Ok((sdk_config, Prover::Axiom(prover)))
|
||||||
eyre::bail!(
|
|
||||||
"Failed to download {}: HTTP status {}",
|
|
||||||
filename,
|
|
||||||
response.status()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stream the content directly to file instead of loading into memory
|
|
||||||
let mut file = std::fs::File::create(&local_file_path)?;
|
|
||||||
let mut stream = response.bytes_stream();
|
|
||||||
|
|
||||||
use futures_util::StreamExt;
|
|
||||||
while let Some(chunk) = stream.next().await {
|
|
||||||
std::io::Write::write_all(&mut file, &chunk?)?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 4: Return the storage path
|
|
||||||
Ok(storage_path)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
|
||||||
pub struct LocalProverConfig {
|
|
||||||
pub sdk_config: SdkConfig,
|
|
||||||
pub circuits: HashMap<String, CircuitConfig>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LocalProverConfig {
|
|
||||||
pub fn from_reader<R>(reader: R) -> Result<Self>
|
|
||||||
where
|
|
||||||
R: std::io::Read,
|
|
||||||
{
|
|
||||||
serde_json::from_reader(reader).map_err(|e| eyre::eyre!(e))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_file(file_name: String) -> Result<Self> {
|
|
||||||
let file = File::open(file_name)?;
|
|
||||||
Self::from_reader(&file)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
|
||||||
pub struct CircuitConfig {
|
|
||||||
/// The path to save assets for a specified hard fork phase
|
|
||||||
pub workspace_path: String,
|
|
||||||
#[serde(flatten)]
|
|
||||||
/// The location data for dynamic loading
|
|
||||||
pub location_data: AssetsLocationData,
|
|
||||||
/// cached vk value to save some initial cost, for debugging only
|
|
||||||
#[serde(default)]
|
|
||||||
pub vks: HashMap<ProofType, String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct LocalProver {
|
|
||||||
config: LocalProverConfig,
|
|
||||||
next_task_id: u64,
|
|
||||||
current_task: Option<JoinHandle<Result<String>>>,
|
|
||||||
|
|
||||||
handlers: HashMap<String, Arc<dyn CircuitsHandler>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl ProvingService for LocalProver {
|
impl ProvingService for Prover {
|
||||||
fn is_local(&self) -> bool {
|
fn is_local(&self) -> bool {
|
||||||
true
|
match self {
|
||||||
}
|
Prover::Local(p) => p.is_local(),
|
||||||
async fn get_vks(&self, _: GetVkRequest) -> GetVkResponse {
|
Prover::Axiom(p) => p.is_local(),
|
||||||
// get vk has been deprecated in new prover with dynamic asset loading scheme
|
|
||||||
GetVkResponse {
|
|
||||||
vks: vec![],
|
|
||||||
error: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn get_vks(&self, req: GetVkRequest) -> GetVkResponse {
|
||||||
|
match self {
|
||||||
|
Prover::Local(p) => p.get_vks(req).await,
|
||||||
|
Prover::Axiom(p) => p.get_vks(req).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fn prove(&mut self, req: ProveRequest) -> ProveResponse {
|
async fn prove(&mut self, req: ProveRequest) -> ProveResponse {
|
||||||
match self.do_prove(req).await {
|
match self {
|
||||||
Ok(resp) => resp,
|
Prover::Local(p) => p.prove(req).await,
|
||||||
Err(e) => ProveResponse {
|
Prover::Axiom(p) => p.prove(req).await,
|
||||||
status: TaskStatus::Failed,
|
|
||||||
error: Some(format!("failed to request proof: {}", e)),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn query_task(&mut self, req: QueryTaskRequest) -> QueryTaskResponse {
|
async fn query_task(&mut self, req: QueryTaskRequest) -> QueryTaskResponse {
|
||||||
if let Some(handle) = &mut self.current_task {
|
match self {
|
||||||
if handle.is_finished() {
|
Prover::Local(p) => p.query_task(req).await,
|
||||||
return match handle.await {
|
Prover::Axiom(p) => p.query_task(req).await,
|
||||||
Ok(Ok(proof)) => QueryTaskResponse {
|
|
||||||
task_id: req.task_id,
|
|
||||||
status: TaskStatus::Success,
|
|
||||||
proof: Some(proof),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
Ok(Err(e)) => QueryTaskResponse {
|
|
||||||
task_id: req.task_id,
|
|
||||||
status: TaskStatus::Failed,
|
|
||||||
error: Some(format!("proving task failed: {}", e)),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
Err(e) => QueryTaskResponse {
|
|
||||||
task_id: req.task_id,
|
|
||||||
status: TaskStatus::Failed,
|
|
||||||
error: Some(format!("proving task panicked: {}", e)),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
return QueryTaskResponse {
|
|
||||||
task_id: req.task_id,
|
|
||||||
status: TaskStatus::Proving,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If no handle is found
|
|
||||||
QueryTaskResponse {
|
|
||||||
task_id: req.task_id,
|
|
||||||
status: TaskStatus::Failed,
|
|
||||||
error: Some("no proving task is running".to_string()),
|
|
||||||
..Default::default()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static GLOBAL_ASSET_URLS: LazyLock<HashMap<String, HashMap<String, url::Url>>> =
|
impl From<LocalProver> for Prover {
|
||||||
LazyLock::new(|| {
|
fn from(p: LocalProver) -> Self {
|
||||||
const ASSETS_JSON: &str = include_str!("../assets_url_preset.json");
|
Prover::Local(p)
|
||||||
serde_json::from_str(ASSETS_JSON).expect("Failed to parse assets_url_preset.json")
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
impl LocalProver {
|
impl From<AxiomProver> for Prover {
|
||||||
pub fn new(mut config: LocalProverConfig) -> Self {
|
fn from(p: AxiomProver) -> Self {
|
||||||
for (fork_name, circuit_config) in config.circuits.iter_mut() {
|
Prover::Axiom(p)
|
||||||
// validate each base url
|
|
||||||
circuit_config.location_data.validate().unwrap();
|
|
||||||
let mut template_url_mapping = GLOBAL_ASSET_URLS
|
|
||||||
.get(&fork_name.to_lowercase())
|
|
||||||
.cloned()
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
// apply default settings in template
|
|
||||||
for (key, url) in circuit_config.location_data.asset_detours.drain() {
|
|
||||||
template_url_mapping.insert(key, url);
|
|
||||||
}
|
|
||||||
|
|
||||||
circuit_config.location_data.asset_detours = template_url_mapping;
|
|
||||||
|
|
||||||
// validate each detours url
|
|
||||||
for url in circuit_config.location_data.asset_detours.values() {
|
|
||||||
assert!(
|
|
||||||
url.path().ends_with('/'),
|
|
||||||
"url {} must be end with /",
|
|
||||||
url.as_str()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Self {
|
|
||||||
config,
|
|
||||||
next_task_id: 0,
|
|
||||||
current_task: None,
|
|
||||||
handlers: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn do_prove(&mut self, req: ProveRequest) -> Result<ProveResponse> {
|
|
||||||
self.next_task_id += 1;
|
|
||||||
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
|
||||||
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
|
|
||||||
|
|
||||||
let prover_task = UniversalHandler::get_task_from_input(&req.input)?;
|
|
||||||
let is_openvm_13 = prover_task.use_openvm_13;
|
|
||||||
let prover_task: ProvingTask = prover_task.into();
|
|
||||||
let vk = hex::encode(&prover_task.vk);
|
|
||||||
let handler = if let Some(handler) = self.handlers.get(&vk) {
|
|
||||||
handler.clone()
|
|
||||||
} else {
|
|
||||||
let base_config = self
|
|
||||||
.config
|
|
||||||
.circuits
|
|
||||||
.get(&req.hard_fork_name)
|
|
||||||
.ok_or_else(|| {
|
|
||||||
eyre::eyre!(
|
|
||||||
"coordinator sent unexpected forkname {}",
|
|
||||||
req.hard_fork_name
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
let url_base = if let Some(url) = base_config.location_data.asset_detours.get(&vk) {
|
|
||||||
url.clone()
|
|
||||||
} else {
|
|
||||||
base_config
|
|
||||||
.location_data
|
|
||||||
.gen_asset_url(&vk, req.proof_type)?
|
|
||||||
};
|
|
||||||
let asset_path = base_config
|
|
||||||
.location_data
|
|
||||||
.get_asset(&vk, &url_base, &base_config.workspace_path)
|
|
||||||
.await?;
|
|
||||||
let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new(
|
|
||||||
&asset_path,
|
|
||||||
is_openvm_13,
|
|
||||||
)?));
|
|
||||||
self.handlers.insert(vk, circuits_handler.clone());
|
|
||||||
circuits_handler
|
|
||||||
};
|
|
||||||
|
|
||||||
let handle = Handle::current();
|
|
||||||
let is_evm = req.proof_type == ProofType::Bundle;
|
|
||||||
let task_handle = tokio::task::spawn_blocking(move || {
|
|
||||||
handle.block_on(handler.get_proof_data(&prover_task, is_evm))
|
|
||||||
});
|
|
||||||
self.current_task = Some(task_handle);
|
|
||||||
|
|
||||||
Ok(ProveResponse {
|
|
||||||
task_id: self.next_task_id.to_string(),
|
|
||||||
proof_type: req.proof_type,
|
|
||||||
circuit_version: req.circuit_version,
|
|
||||||
hard_fork_name: req.hard_fork_name,
|
|
||||||
status: TaskStatus::Proving,
|
|
||||||
created_at,
|
|
||||||
input: Some(req.input),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
329
crates/prover-bin/src/prover/axiom.rs
Normal file
329
crates/prover-bin/src/prover/axiom.rs
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
use crate::zk_circuits_handler::universal::UniversalHandler;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use axiom_sdk::{
|
||||||
|
AxiomSdk, ProofType as AxiomProofType,
|
||||||
|
build::BuildSdk,
|
||||||
|
input::Input as AxiomInput,
|
||||||
|
prove::{ProveArgs, ProveSdk},
|
||||||
|
};
|
||||||
|
use eyre::Context;
|
||||||
|
use jiff::Timestamp;
|
||||||
|
use scroll_proving_sdk::{
|
||||||
|
config::Config as SdkConfig,
|
||||||
|
prover::{
|
||||||
|
ProofType, ProvingService,
|
||||||
|
proving_service::{
|
||||||
|
GetVkRequest, GetVkResponse, ProveRequest, ProveResponse, QueryTaskRequest,
|
||||||
|
QueryTaskResponse, TaskStatus,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use scroll_zkvm_types::{
|
||||||
|
ProvingTask,
|
||||||
|
proof::{OpenVmEvmProof, OpenVmVersionedVmStarkProof, ProofEnum},
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{collections::HashMap, fs::File, io::Write, path::Path};
|
||||||
|
use tempfile::NamedTempFile;
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct AxiomProverConfig {
|
||||||
|
pub axiom: AxiomConfig,
|
||||||
|
pub sdk_config: SdkConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct AxiomConfig {
|
||||||
|
pub api_key: String,
|
||||||
|
// vk to program mapping
|
||||||
|
pub programs: HashMap<String, AxiomProgram>,
|
||||||
|
pub num_gpus: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct AxiomProgram {
|
||||||
|
pub program_id: String,
|
||||||
|
pub config_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct AxiomProver {
|
||||||
|
config: AxiomProverConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AxiomProverConfig {
|
||||||
|
pub fn from_reader<R>(reader: R) -> eyre::Result<Self>
|
||||||
|
where
|
||||||
|
R: std::io::Read,
|
||||||
|
{
|
||||||
|
serde_json::from_reader(reader).map_err(|e| eyre::eyre!(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_file<P: AsRef<Path>>(file_name: P) -> eyre::Result<Self> {
|
||||||
|
let file = File::open(file_name)?;
|
||||||
|
Self::from_reader(&file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ProvingService for AxiomProver {
|
||||||
|
fn is_local(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_vks(&self, _: GetVkRequest) -> GetVkResponse {
|
||||||
|
// get vk has been deprecated in new prover with dynamic asset loading scheme
|
||||||
|
GetVkResponse {
|
||||||
|
vks: vec![],
|
||||||
|
error: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(self), ret, level = Level::DEBUG)]
|
||||||
|
async fn prove(&mut self, req: ProveRequest) -> ProveResponse {
|
||||||
|
self.prove_inner(req)
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|e| ProveResponse {
|
||||||
|
status: TaskStatus::Failed,
|
||||||
|
error: Some(format!("failed to submit proof task to axiom: {}", e)),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(self), ret, level = Level::DEBUG)]
|
||||||
|
async fn query_task(&mut self, req: QueryTaskRequest) -> QueryTaskResponse {
|
||||||
|
let task_id = req.task_id.clone();
|
||||||
|
self.query_task_inner(req)
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|e| QueryTaskResponse {
|
||||||
|
task_id,
|
||||||
|
status: TaskStatus::Failed,
|
||||||
|
error: Some(format!("failed to query axiom task: {}", e)),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AxiomProver {
|
||||||
|
pub fn new(config: AxiomProverConfig) -> Self {
|
||||||
|
Self { config }
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn make_axiom_request<R: Send + 'static>(
|
||||||
|
&self,
|
||||||
|
config_id: Option<String>,
|
||||||
|
req: impl FnOnce(AxiomSdk) -> eyre::Result<R> + Send + 'static,
|
||||||
|
) -> eyre::Result<R> {
|
||||||
|
let api_key = self.config.axiom.api_key.clone();
|
||||||
|
tokio::task::spawn_blocking(move || {
|
||||||
|
let config = axiom_sdk::AxiomConfig {
|
||||||
|
api_key: Some(api_key),
|
||||||
|
config_id,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let sdk = AxiomSdk::new(config);
|
||||||
|
req(sdk)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.context("failed to join axiom request")
|
||||||
|
.flatten()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, ret, err, level = Level::DEBUG)]
|
||||||
|
fn get_program(&self, vk: &[u8]) -> eyre::Result<AxiomProgram> {
|
||||||
|
let vk = hex::encode(vk);
|
||||||
|
debug!(vk = %vk);
|
||||||
|
self.config
|
||||||
|
.axiom
|
||||||
|
.programs
|
||||||
|
.get(vk.as_str())
|
||||||
|
.cloned()
|
||||||
|
.ok_or_else(|| eyre::eyre!("no axiom program configured for vk: {vk}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, err, level = Level::DEBUG)]
|
||||||
|
async fn prove_inner(&mut self, req: ProveRequest) -> eyre::Result<ProveResponse> {
|
||||||
|
let prover_task = UniversalHandler::get_task_from_input(&req.input)?;
|
||||||
|
if prover_task.use_openvm_13 {
|
||||||
|
eyre::bail!("axiom prover does not support openvm v1.3 tasks");
|
||||||
|
}
|
||||||
|
|
||||||
|
let prover_task: ProvingTask = prover_task.into();
|
||||||
|
|
||||||
|
let program = self.get_program(&prover_task.vk)?;
|
||||||
|
let num_gpus = self.config.axiom.num_gpus;
|
||||||
|
|
||||||
|
let mut input_file = NamedTempFile::new()?;
|
||||||
|
let input = prover_task.build_openvm_input();
|
||||||
|
serde_json::to_writer(&mut input_file, &input)?;
|
||||||
|
input_file.flush()?;
|
||||||
|
|
||||||
|
let proof_type = if req.proof_type == ProofType::Bundle {
|
||||||
|
AxiomProofType::Evm
|
||||||
|
} else {
|
||||||
|
AxiomProofType::Stark
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut response = ProveResponse {
|
||||||
|
proof_type: req.proof_type,
|
||||||
|
created_at: Timestamp::now().as_duration().as_secs_f64(),
|
||||||
|
status: TaskStatus::Proving,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
response.task_id = self
|
||||||
|
.make_axiom_request(Some(program.config_id), move |sdk| {
|
||||||
|
sdk.generate_new_proof(ProveArgs {
|
||||||
|
program_id: Some(program.program_id.clone()),
|
||||||
|
input: Some(AxiomInput::FilePath(input_file.path().to_path_buf())),
|
||||||
|
proof_type: Some(proof_type),
|
||||||
|
num_gpus,
|
||||||
|
priority: None,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
info!(
|
||||||
|
proof_type = ?req.proof_type,
|
||||||
|
identifier = %prover_task.identifier,
|
||||||
|
task_id = %response.task_id,
|
||||||
|
"submitted axiom proving task"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, err, level = Level::DEBUG)]
|
||||||
|
async fn query_task_inner(&mut self, req: QueryTaskRequest) -> eyre::Result<QueryTaskResponse> {
|
||||||
|
let mut response = QueryTaskResponse {
|
||||||
|
task_id: req.task_id.clone(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let task_id = req.task_id.clone();
|
||||||
|
|
||||||
|
let (status, proof_type, proof) = self
|
||||||
|
.make_axiom_request(None, move |sdk| {
|
||||||
|
let status = sdk.get_proof_status(&task_id)?;
|
||||||
|
debug!(status = ?status, "fetched axiom task status");
|
||||||
|
|
||||||
|
let program_status = sdk.get_build_status(&status.program_uuid)?;
|
||||||
|
let proof_type = match program_status.name.as_str() {
|
||||||
|
"chunk" => ProofType::Chunk,
|
||||||
|
"batch" => ProofType::Batch,
|
||||||
|
"bundle" => ProofType::Bundle,
|
||||||
|
_ => {
|
||||||
|
return Err(eyre::eyre!("unrecognized program in: {program_status:#?}",));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let axiom_proof_type: AxiomProofType = status.proof_type.parse()?;
|
||||||
|
let proof = if status.state == "Succeeded" {
|
||||||
|
let file = NamedTempFile::new()?;
|
||||||
|
sdk.get_generated_proof(
|
||||||
|
&status.id,
|
||||||
|
&axiom_proof_type,
|
||||||
|
Some(file.path().to_path_buf()),
|
||||||
|
)?;
|
||||||
|
Some(file)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((status, proof_type, proof))
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Queued, Executing, Executed, AppProving, AppProvingDone, PostProcessing, Failed,
|
||||||
|
// Succeeded
|
||||||
|
response.status = match status.state.as_str() {
|
||||||
|
"Queued" => TaskStatus::Queued,
|
||||||
|
"Executing" | "Executed" | "AppProving" | "AppProvingDone" | "PostProcessing" => {
|
||||||
|
TaskStatus::Proving
|
||||||
|
}
|
||||||
|
"Succeeded" => TaskStatus::Success,
|
||||||
|
"Failed" => TaskStatus::Failed,
|
||||||
|
other => {
|
||||||
|
return Err(eyre::eyre!("unrecognized axiom task status: {other}"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
debug!(status = ?response.status, "mapped axiom task status");
|
||||||
|
|
||||||
|
if response.status == TaskStatus::Failed {
|
||||||
|
response.error = Some(
|
||||||
|
status
|
||||||
|
.error_message
|
||||||
|
.unwrap_or_else(|| "unknown error".to_string()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
response.proof_type = proof_type;
|
||||||
|
|
||||||
|
let created_at: Timestamp = status.created_at.parse()?;
|
||||||
|
response.created_at = created_at.as_duration().as_secs_f64();
|
||||||
|
if let Some(launched_at) = status.launched_at
|
||||||
|
&& !launched_at.is_empty()
|
||||||
|
{
|
||||||
|
let started_at: Timestamp = launched_at.parse()?;
|
||||||
|
let started_at = started_at.as_duration();
|
||||||
|
response.started_at = Some(started_at.as_secs_f64());
|
||||||
|
|
||||||
|
if let Some(terminated_at) = status.terminated_at
|
||||||
|
&& !terminated_at.is_empty()
|
||||||
|
{
|
||||||
|
let finished_at: Timestamp = terminated_at.parse()?;
|
||||||
|
let finished_at = finished_at.as_duration();
|
||||||
|
response.finished_at = Some(finished_at.as_secs_f64());
|
||||||
|
|
||||||
|
let duration = finished_at.checked_sub(started_at).ok_or_else(|| {
|
||||||
|
eyre::eyre!(
|
||||||
|
"invalid timestamps: started_at={:?}, finished_at={:?}",
|
||||||
|
started_at,
|
||||||
|
finished_at
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
response.compute_time_sec = Some(duration.as_secs_f64());
|
||||||
|
info!(
|
||||||
|
task_id = %req.task_id,
|
||||||
|
launched_at = %format_args!("{launched_at:#}"),
|
||||||
|
terminated_at = %format_args!("{terminated_at:#}"),
|
||||||
|
duration = %format_args!("{duration:#}"),
|
||||||
|
priority = %status.priority,
|
||||||
|
"completed"
|
||||||
|
);
|
||||||
|
info!(
|
||||||
|
task_id = %req.task_id,
|
||||||
|
cells_used = %status.cells_used,
|
||||||
|
num_gpus = %status.num_gpus,
|
||||||
|
"resource usage"
|
||||||
|
);
|
||||||
|
if let Some(num_instructions) = status.num_instructions {
|
||||||
|
let mhz = num_instructions as f64 / (duration.as_secs_f64() * 1_000_000.0);
|
||||||
|
info!(
|
||||||
|
task_id = %req.task_id,
|
||||||
|
cycles = %num_instructions,
|
||||||
|
MHz = %format_args!("{mhz:.2}"),
|
||||||
|
"performance"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(proof_file) = proof {
|
||||||
|
let proof = match proof_type {
|
||||||
|
ProofType::Bundle => {
|
||||||
|
let proof: OpenVmEvmProof = serde_json::from_reader(proof_file)?;
|
||||||
|
ProofEnum::Evm(proof.into())
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
let proof: OpenVmVersionedVmStarkProof = serde_json::from_reader(proof_file)?;
|
||||||
|
ProofEnum::Stark(proof.try_into()?)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
response.proof = Some(serde_json::to_string(&proof)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
}
|
||||||
342
crates/prover-bin/src/prover/local.rs
Normal file
342
crates/prover-bin/src/prover/local.rs
Normal file
@@ -0,0 +1,342 @@
|
|||||||
|
use crate::zk_circuits_handler::{CircuitsHandler, universal::UniversalHandler};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use eyre::Result;
|
||||||
|
use scroll_proving_sdk::{
|
||||||
|
config::Config as SdkConfig,
|
||||||
|
prover::{
|
||||||
|
ProvingService,
|
||||||
|
proving_service::{
|
||||||
|
GetVkRequest, GetVkResponse, ProveRequest, ProveResponse, QueryTaskRequest,
|
||||||
|
QueryTaskResponse, TaskStatus,
|
||||||
|
},
|
||||||
|
types::ProofType,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use scroll_zkvm_types::ProvingTask;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
fmt,
|
||||||
|
fs::File,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::{Arc, LazyLock},
|
||||||
|
time::{SystemTime, UNIX_EPOCH},
|
||||||
|
};
|
||||||
|
use tokio::{runtime::Handle, sync::Mutex, task::JoinHandle};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct AssetsLocationData {
|
||||||
|
/// the base url to form a general downloading url for an asset, MUST HAVE A TRAILING SLASH
|
||||||
|
pub base_url: url::Url,
|
||||||
|
#[serde(default)]
|
||||||
|
/// a altered url for specififed vk
|
||||||
|
pub asset_detours: HashMap<String, url::Url>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AssetsLocationData {
|
||||||
|
pub fn gen_asset_url(&self, vk_as_path: &str, proof_type: ProofType) -> Result<url::Url> {
|
||||||
|
Ok(self.base_url.join(
|
||||||
|
match proof_type {
|
||||||
|
ProofType::Chunk => format!("chunk/{vk_as_path}/"),
|
||||||
|
ProofType::Batch => format!("batch/{vk_as_path}/"),
|
||||||
|
ProofType::Bundle => format!("bundle/{vk_as_path}/"),
|
||||||
|
t => eyre::bail!("unrecognized proof type: {}", t as u8),
|
||||||
|
}
|
||||||
|
.as_str(),
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn validate(&self) -> Result<()> {
|
||||||
|
if !self.base_url.path().ends_with('/') {
|
||||||
|
eyre::bail!(
|
||||||
|
"base_url must have a trailing slash, got: {}",
|
||||||
|
self.base_url
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_asset(
|
||||||
|
&self,
|
||||||
|
vk: &str,
|
||||||
|
url_base: &url::Url,
|
||||||
|
base_path: impl AsRef<Path>,
|
||||||
|
) -> Result<PathBuf> {
|
||||||
|
let download_files = ["app.vmexe", "openvm.toml"];
|
||||||
|
|
||||||
|
// Step 1: Create a local path for storage
|
||||||
|
let storage_path = base_path.as_ref().join(vk);
|
||||||
|
std::fs::create_dir_all(&storage_path)?;
|
||||||
|
|
||||||
|
// Step 2 & 3: Download each file if needed
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
for filename in download_files.iter() {
|
||||||
|
let local_file_path = storage_path.join(filename);
|
||||||
|
let download_url = url_base.join(filename)?;
|
||||||
|
|
||||||
|
// Check if file already exists
|
||||||
|
if local_file_path.exists() {
|
||||||
|
// Get file metadata to check size
|
||||||
|
if let Ok(metadata) = std::fs::metadata(&local_file_path) {
|
||||||
|
// Make a HEAD request to get remote file size
|
||||||
|
|
||||||
|
if let Ok(head_resp) = client.head(download_url.clone()).send().await {
|
||||||
|
if let Some(content_length) = head_resp.headers().get("content-length") {
|
||||||
|
if let Ok(remote_size) =
|
||||||
|
content_length.to_str().unwrap_or("0").parse::<u64>()
|
||||||
|
{
|
||||||
|
// If sizes match, skip download
|
||||||
|
if metadata.len() == remote_size {
|
||||||
|
println!(
|
||||||
|
"File {} already exists with matching size, skipping download",
|
||||||
|
filename
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Downloading {} from {}", filename, download_url);
|
||||||
|
|
||||||
|
let response = client.get(download_url).send().await?;
|
||||||
|
if !response.status().is_success() {
|
||||||
|
eyre::bail!(
|
||||||
|
"Failed to download {}: HTTP status {}",
|
||||||
|
filename,
|
||||||
|
response.status()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stream the content directly to file instead of loading into memory
|
||||||
|
let mut file = std::fs::File::create(&local_file_path)?;
|
||||||
|
let mut stream = response.bytes_stream();
|
||||||
|
|
||||||
|
use futures_util::StreamExt;
|
||||||
|
while let Some(chunk) = stream.next().await {
|
||||||
|
std::io::Write::write_all(&mut file, &chunk?)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Return the storage path
|
||||||
|
Ok(storage_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LocalProverConfig {
|
||||||
|
pub sdk_config: SdkConfig,
|
||||||
|
pub circuits: HashMap<String, CircuitConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LocalProverConfig {
|
||||||
|
pub fn from_reader<R>(reader: R) -> Result<Self>
|
||||||
|
where
|
||||||
|
R: std::io::Read,
|
||||||
|
{
|
||||||
|
serde_json::from_reader(reader).map_err(|e| eyre::eyre!(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_file<P: AsRef<Path>>(file_name: P) -> Result<Self> {
|
||||||
|
let file = File::open(file_name)?;
|
||||||
|
Self::from_reader(&file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CircuitConfig {
|
||||||
|
/// The path to save assets for a specified hard fork phase
|
||||||
|
pub workspace_path: String,
|
||||||
|
#[serde(flatten)]
|
||||||
|
/// The location data for dynamic loading
|
||||||
|
pub location_data: AssetsLocationData,
|
||||||
|
/// cached vk value to save some initial cost, for debugging only
|
||||||
|
#[serde(default)]
|
||||||
|
pub vks: HashMap<ProofType, String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LocalProver {
|
||||||
|
config: LocalProverConfig,
|
||||||
|
next_task_id: u64,
|
||||||
|
current_task: Option<JoinHandle<Result<String>>>,
|
||||||
|
|
||||||
|
handlers: HashMap<String, Arc<dyn CircuitsHandler>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for LocalProver {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("LocalProver")
|
||||||
|
.field("config", &self.config)
|
||||||
|
.field("next_task_id", &self.next_task_id)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ProvingService for LocalProver {
|
||||||
|
fn is_local(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
async fn get_vks(&self, _: GetVkRequest) -> GetVkResponse {
|
||||||
|
// get vk has been deprecated in new prover with dynamic asset loading scheme
|
||||||
|
GetVkResponse {
|
||||||
|
vks: vec![],
|
||||||
|
error: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn prove(&mut self, req: ProveRequest) -> ProveResponse {
|
||||||
|
match self.do_prove(req).await {
|
||||||
|
Ok(resp) => resp,
|
||||||
|
Err(e) => ProveResponse {
|
||||||
|
status: TaskStatus::Failed,
|
||||||
|
error: Some(format!("failed to request proof: {}", e)),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn query_task(&mut self, req: QueryTaskRequest) -> QueryTaskResponse {
|
||||||
|
if let Some(handle) = &mut self.current_task {
|
||||||
|
if handle.is_finished() {
|
||||||
|
return match handle.await {
|
||||||
|
Ok(Ok(proof)) => QueryTaskResponse {
|
||||||
|
task_id: req.task_id,
|
||||||
|
status: TaskStatus::Success,
|
||||||
|
proof: Some(proof),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
Ok(Err(e)) => QueryTaskResponse {
|
||||||
|
task_id: req.task_id,
|
||||||
|
status: TaskStatus::Failed,
|
||||||
|
error: Some(format!("proving task failed: {}", e)),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
Err(e) => QueryTaskResponse {
|
||||||
|
task_id: req.task_id,
|
||||||
|
status: TaskStatus::Failed,
|
||||||
|
error: Some(format!("proving task panicked: {}", e)),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return QueryTaskResponse {
|
||||||
|
task_id: req.task_id,
|
||||||
|
status: TaskStatus::Proving,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If no handle is found
|
||||||
|
QueryTaskResponse {
|
||||||
|
task_id: req.task_id,
|
||||||
|
status: TaskStatus::Failed,
|
||||||
|
error: Some("no proving task is running".to_string()),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static GLOBAL_ASSET_URLS: LazyLock<HashMap<String, HashMap<String, url::Url>>> =
|
||||||
|
LazyLock::new(|| {
|
||||||
|
const ASSETS_JSON: &str = include_str!("../../assets_url_preset.json");
|
||||||
|
serde_json::from_str(ASSETS_JSON).expect("Failed to parse assets_url_preset.json")
|
||||||
|
});
|
||||||
|
|
||||||
|
impl LocalProver {
|
||||||
|
pub fn new(mut config: LocalProverConfig) -> Self {
|
||||||
|
for (fork_name, circuit_config) in config.circuits.iter_mut() {
|
||||||
|
// validate each base url
|
||||||
|
circuit_config.location_data.validate().unwrap();
|
||||||
|
let mut template_url_mapping = GLOBAL_ASSET_URLS
|
||||||
|
.get(&fork_name.to_lowercase())
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
// apply default settings in template
|
||||||
|
for (key, url) in circuit_config.location_data.asset_detours.drain() {
|
||||||
|
template_url_mapping.insert(key, url);
|
||||||
|
}
|
||||||
|
|
||||||
|
circuit_config.location_data.asset_detours = template_url_mapping;
|
||||||
|
|
||||||
|
// validate each detours url
|
||||||
|
for url in circuit_config.location_data.asset_detours.values() {
|
||||||
|
assert!(
|
||||||
|
url.path().ends_with('/'),
|
||||||
|
"url {} must be end with /",
|
||||||
|
url.as_str()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self {
|
||||||
|
config,
|
||||||
|
next_task_id: 0,
|
||||||
|
current_task: None,
|
||||||
|
handlers: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn do_prove(&mut self, req: ProveRequest) -> Result<ProveResponse> {
|
||||||
|
self.next_task_id += 1;
|
||||||
|
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||||
|
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
|
||||||
|
|
||||||
|
let prover_task = UniversalHandler::get_task_from_input(&req.input)?;
|
||||||
|
let is_openvm_13 = prover_task.use_openvm_13;
|
||||||
|
let prover_task: ProvingTask = prover_task.into();
|
||||||
|
let vk = hex::encode(&prover_task.vk);
|
||||||
|
let handler = if let Some(handler) = self.handlers.get(&vk) {
|
||||||
|
handler.clone()
|
||||||
|
} else {
|
||||||
|
let base_config = self
|
||||||
|
.config
|
||||||
|
.circuits
|
||||||
|
.get(&req.hard_fork_name)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
eyre::eyre!(
|
||||||
|
"coordinator sent unexpected forkname {}",
|
||||||
|
req.hard_fork_name
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let url_base = if let Some(url) = base_config.location_data.asset_detours.get(&vk) {
|
||||||
|
url.clone()
|
||||||
|
} else {
|
||||||
|
base_config
|
||||||
|
.location_data
|
||||||
|
.gen_asset_url(&vk, req.proof_type)?
|
||||||
|
};
|
||||||
|
let asset_path = base_config
|
||||||
|
.location_data
|
||||||
|
.get_asset(&vk, &url_base, &base_config.workspace_path)
|
||||||
|
.await?;
|
||||||
|
let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new(
|
||||||
|
&asset_path,
|
||||||
|
is_openvm_13,
|
||||||
|
)?));
|
||||||
|
self.handlers.insert(vk, circuits_handler.clone());
|
||||||
|
circuits_handler
|
||||||
|
};
|
||||||
|
|
||||||
|
let handle = Handle::current();
|
||||||
|
let is_evm = req.proof_type == ProofType::Bundle;
|
||||||
|
let task_handle = tokio::task::spawn_blocking(move || {
|
||||||
|
handle.block_on(handler.get_proof_data(&prover_task, is_evm))
|
||||||
|
});
|
||||||
|
self.current_task = Some(task_handle);
|
||||||
|
|
||||||
|
Ok(ProveResponse {
|
||||||
|
task_id: self.next_task_id.to_string(),
|
||||||
|
proof_type: req.proof_type,
|
||||||
|
circuit_version: req.circuit_version,
|
||||||
|
hard_fork_name: req.hard_fork_name,
|
||||||
|
status: TaskStatus::Proving,
|
||||||
|
created_at,
|
||||||
|
input: Some(req.input),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
include ../../build/common.mk
|
||||||
|
|
||||||
.PHONY: clean setup_db test_tool all check_vars
|
.PHONY: clean setup_db test_tool all check_vars
|
||||||
|
|
||||||
include conf/.make.env
|
include conf/.make.env
|
||||||
@@ -57,6 +59,7 @@ reset_db:
|
|||||||
|
|
||||||
test_tool:
|
test_tool:
|
||||||
go build -o $(PWD)/build/bin/e2e_tool ../../rollup/tests/integration_tool
|
go build -o $(PWD)/build/bin/e2e_tool ../../rollup/tests/integration_tool
|
||||||
|
$(call macos_codesign,$(PWD)/build/bin/e2e_tool)
|
||||||
|
|
||||||
build/bin/e2e_tool: test_tool
|
build/bin/e2e_tool: test_tool
|
||||||
|
|
||||||
@@ -67,4 +70,4 @@ reimport_data: reset_db import_data
|
|||||||
|
|
||||||
coordinator_setup:
|
coordinator_setup:
|
||||||
SCROLL_FORK_NAME=${SCROLL_FORK_NAME} $(MAKE) -C ../../coordinator localsetup
|
SCROLL_FORK_NAME=${SCROLL_FORK_NAME} $(MAKE) -C ../../coordinator localsetup
|
||||||
cp -f conf/genesis.json ../../coordinator/build/bin/conf
|
cp -f conf/genesis.json ../../coordinator/build/bin/conf
|
||||||
|
|||||||
@@ -2,15 +2,34 @@
|
|||||||
|
|
||||||
It contains data from some blocks in a specified testnet, and helps to generate a series of chunks/batches/bundles from these blocks, filling the DB for the coordinator, so an e2e test (from chunk to bundle) can be run completely local
|
It contains data from some blocks in a specified testnet, and helps to generate a series of chunks/batches/bundles from these blocks, filling the DB for the coordinator, so an e2e test (from chunk to bundle) can be run completely local
|
||||||
|
|
||||||
|
### Pre
|
||||||
|
|
||||||
|
1. install [goose](https://github.com/pressly/goose)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go install github.com/pressly/goose/v3/cmd/goose@latest
|
||||||
|
```
|
||||||
|
|
||||||
Prepare:
|
Prepare:
|
||||||
link the staff dir as "conf" from one of the dir with staff set, currently we have following staff sets:
|
link the staff dir as "conf" from one of the dir with staff set, currently we have following staff sets:
|
||||||
+ sepolia: with blocks from scroll sepolia
|
|
||||||
+ cloak-xen: with blocks from xen sepolia, which is a cloak network
|
- sepolia: with blocks from scroll sepolia forking, e.g. `ln -s sepolia-galileo conf`
|
||||||
|
- galileo: with blocks from scroll galileo forking
|
||||||
|
- cloak-xen: with blocks from xen sepolia, which is a cloak network
|
||||||
|
|
||||||
Steps:
|
Steps:
|
||||||
|
|
||||||
1. run `make all` under `tests/prover-e2e`, it would launch a postgreSql db in local docker container, which is ready to be used by coordinator (include some chunks/batches/bundles waiting to be proven)
|
1. run `make all` under `tests/prover-e2e`, it would launch a postgreSql db in local docker container, which is ready to be used by coordinator (include some chunks/batches/bundles waiting to be proven)
|
||||||
2. setup assets by run `make coordinator_setup`
|
2. setup assets by run `make coordinator_setup`, `SCROLL_ZKVM_VERSION` must be sepcified, and if we do e2e test for other forking than `Galileo`, `SCROLL_FORK_NAME` is also required, example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
SCROLL_FORK_NAME=feynman SCROLL_ZKVM_VERSION=v0.7.0 make coordinator_setup
|
||||||
|
```
|
||||||
|
|
||||||
3. in `coordinator/build/bin/conf`, update necessary items in `config.template.json` and rename it as `config.json`
|
3. in `coordinator/build/bin/conf`, update necessary items in `config.template.json` and rename it as `config.json`
|
||||||
4. build and launch `coordinator_api` service locally
|
4. build and launch `coordinator_api` service locally
|
||||||
5. setup the `config.json` for zkvm prover to connect with the locally launched coordinator api
|
5. setup the `config.json` for zkvm prover to connect with the locally launched coordinator api:
|
||||||
6. in `zkvm-prover`, launch `make test_e2e_run`, which would specific prover run locally, connect to the local coordinator api service according to the `config.json`, and prove all tasks being injected to db in step 1.
|
|
||||||
|
- set the `sdk_config.coordinator.base_url` field into "http://localhost:8390",
|
||||||
|
|
||||||
|
6. in `zkvm-prover`, launch `make test_e2e_run`, which would specific prover run locally, connect to the local coordinator api service according to the `config.json`, and prove all tasks being injected to db in step 1.
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
BEGIN_BLOCK?=15206785
|
BEGIN_BLOCK?=15206785
|
||||||
END_BLOCK?=15206794
|
END_BLOCK?=15206794
|
||||||
SCROLL_FORK_NAME=galileo
|
SCROLL_FORK_NAME=galileo
|
||||||
|
|||||||
16
zkvm-prover/Dockerfile
Normal file
16
zkvm-prover/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
FROM scrolltech/cuda-go-rust-builder:cuda-12.9.1-go-1.22.12-rust-nightly-2025-08-18 AS builder
|
||||||
|
WORKDIR /app
|
||||||
|
COPY . .
|
||||||
|
RUN cd /app/zkvm-prover && make cpu_prover
|
||||||
|
|
||||||
|
FROM debian:trixie-slim AS runtime
|
||||||
|
WORKDIR app
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends ca-certificates \
|
||||||
|
&& update-ca-certificates \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
ENV RUST_LOG='off,scroll_proving_sdk=info,prover=info'
|
||||||
|
|
||||||
|
COPY --from=builder /app/target/release/prover ./prover
|
||||||
|
ENTRYPOINT ["./prover"]
|
||||||
@@ -35,6 +35,9 @@ ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_VERSION}
|
|||||||
E2E_HANDLE_SET ?= ../tests/prover-e2e/testset.json
|
E2E_HANDLE_SET ?= ../tests/prover-e2e/testset.json
|
||||||
DUMP_DIR ?= .work
|
DUMP_DIR ?= .work
|
||||||
|
|
||||||
|
cpu_prover:
|
||||||
|
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release -p prover
|
||||||
|
|
||||||
prover:
|
prover:
|
||||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release --features cuda -p prover
|
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release --features cuda -p prover
|
||||||
|
|
||||||
@@ -60,8 +63,11 @@ test_run:
|
|||||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json
|
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json
|
||||||
|
|
||||||
test_e2e_run: ${E2E_HANDLE_SET}
|
test_e2e_run: ${E2E_HANDLE_SET}
|
||||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json handle ${E2E_HANDLE_SET}
|
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --prover.kind local --config ./config.json handle ${E2E_HANDLE_SET}
|
||||||
|
|
||||||
test_e2e_run_gpu: ${E2E_HANDLE_SET}
|
test_e2e_run_gpu: ${E2E_HANDLE_SET}
|
||||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release --features cuda -p prover -- --config ./config.json handle ${E2E_HANDLE_SET}
|
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release --features cuda -p prover -- --config ./config.json handle ${E2E_HANDLE_SET}
|
||||||
|
|
||||||
|
test_axiom_e2e_run: ${E2E_HANDLE_SET}
|
||||||
|
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --prover.kind axiom --config ./config.json handle ${E2E_HANDLE_SET}
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,8 @@
|
|||||||
"base_url": "<the url of coordinator>",
|
"base_url": "<the url of coordinator>",
|
||||||
"retry_count": 10,
|
"retry_count": 10,
|
||||||
"retry_wait_time_sec": 10,
|
"retry_wait_time_sec": 10,
|
||||||
"connection_timeout_sec": 1800
|
"connection_timeout_sec": 1800,
|
||||||
|
"suppress_empty_task_error": false
|
||||||
},
|
},
|
||||||
"prover": {
|
"prover": {
|
||||||
"supported_proof_types": [
|
"supported_proof_types": [
|
||||||
@@ -14,7 +15,10 @@
|
|||||||
2,
|
2,
|
||||||
3
|
3
|
||||||
],
|
],
|
||||||
"circuit_version": "v0.13.1"
|
"circuit_version": "v0.13.1",
|
||||||
|
"n_workers": 1,
|
||||||
|
"poll_interval_sec": 20,
|
||||||
|
"randomized_delay_sec": 0
|
||||||
},
|
},
|
||||||
"health_listener_addr": "127.0.0.1:10080",
|
"health_listener_addr": "127.0.0.1:10080",
|
||||||
"db_path": ".work/db"
|
"db_path": ".work/db"
|
||||||
@@ -31,6 +35,13 @@
|
|||||||
"galileoV2": {
|
"galileoV2": {
|
||||||
"base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/galileov2/",
|
"base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/galileov2/",
|
||||||
"workspace_path": ".work/galileo"
|
"workspace_path": ".work/galileo"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"axiom_api_key": "<axiom api key>",
|
||||||
|
"axiom_programs": {
|
||||||
|
"<vk hex string>": {
|
||||||
|
"program_id": "prg_<axiom program id>",
|
||||||
|
"config_id": "cfg_<axiom config id>"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user