mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-13 07:57:58 -05:00
Compare commits
14 Commits
develop
...
feat/valid
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
07a9f0e106 | ||
|
|
ad13b56d7c | ||
|
|
722cc5ee76 | ||
|
|
ab8df8e4b5 | ||
|
|
755ed6074e | ||
|
|
0d6eaf74fc | ||
|
|
953ba50c07 | ||
|
|
9998069515 | ||
|
|
fcda68b5b3 | ||
|
|
f3d1b151b2 | ||
|
|
e33d11ddc7 | ||
|
|
642ee2f975 | ||
|
|
8fcd27333f | ||
|
|
f640ef9377 |
2
.github/workflows/common.yml
vendored
2
.github/workflows/common.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2025-02-14
|
toolchain: nightly-2025-08-18
|
||||||
override: true
|
override: true
|
||||||
components: rustfmt, clippy
|
components: rustfmt, clippy
|
||||||
- name: Install Go
|
- name: Install Go
|
||||||
|
|||||||
2
.github/workflows/coordinator.yml
vendored
2
.github/workflows/coordinator.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2025-02-14
|
toolchain: nightly-2025-08-18
|
||||||
override: true
|
override: true
|
||||||
components: rustfmt, clippy
|
components: rustfmt, clippy
|
||||||
- name: Install Go
|
- name: Install Go
|
||||||
|
|||||||
6
.github/workflows/intermediate-docker.yml
vendored
6
.github/workflows/intermediate-docker.yml
vendored
@@ -22,11 +22,9 @@ on:
|
|||||||
required: true
|
required: true
|
||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- nightly-2023-12-03
|
|
||||||
- nightly-2022-12-10
|
|
||||||
- 1.86.0
|
- 1.86.0
|
||||||
- nightly-2025-02-14
|
- nightly-2025-08-18
|
||||||
default: "nightly-2023-12-03"
|
default: "nightly-2025-08-18"
|
||||||
PYTHON_VERSION:
|
PYTHON_VERSION:
|
||||||
description: "Python version"
|
description: "Python version"
|
||||||
required: false
|
required: false
|
||||||
|
|||||||
2196
Cargo.lock
generated
2196
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
43
Cargo.toml
43
Cargo.toml
@@ -14,15 +14,16 @@ edition = "2021"
|
|||||||
homepage = "https://scroll.io"
|
homepage = "https://scroll.io"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/scroll-tech/scroll"
|
repository = "https://github.com/scroll-tech/scroll"
|
||||||
version = "4.5.8"
|
version = "4.5.47"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "ad0efe7" }
|
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "5c361ad" }
|
||||||
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "ad0efe7" }
|
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "5c361ad" }
|
||||||
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "ad0efe7" }
|
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "5c361ad" }
|
||||||
|
|
||||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/openvm-1.3", features = ["scroll"] }
|
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll", "rkyv"] }
|
||||||
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/openvm-1.3" }
|
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master" }
|
||||||
|
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll"] }
|
||||||
|
|
||||||
metrics = "0.23.0"
|
metrics = "0.23.0"
|
||||||
metrics-util = "0.17"
|
metrics-util = "0.17"
|
||||||
@@ -30,7 +31,8 @@ metrics-tracing-context = "0.16.0"
|
|||||||
|
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
alloy = { version = "1", default-features = false }
|
alloy = { version = "1", default-features = false }
|
||||||
alloy-primitives = { version = "1.2", default-features = false, features = ["tiny-keccak"] }
|
alloy-primitives = { version = "1.3", default-features = false, features = ["tiny-keccak"] }
|
||||||
|
alloy-sol-types = { version = "1.3", default-features = false }
|
||||||
# also use this to trigger "serde" feature for primitives
|
# also use this to trigger "serde" feature for primitives
|
||||||
alloy-serde = { version = "1", default-features = false }
|
alloy-serde = { version = "1", default-features = false }
|
||||||
|
|
||||||
@@ -46,21 +48,20 @@ once_cell = "1.20"
|
|||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
revm = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-bytecode = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-bytecode = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-context = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-context = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-context-interface = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-context-interface = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-database = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-database = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-database-interface = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-database-interface = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-handler = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-handler = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-inspector = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-inspector = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-interpreter = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-interpreter = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-precompile = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-precompile = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-primitives = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-primitives = { git = "https://github.com/scroll-tech/revm" }
|
||||||
revm-state = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
revm-state = { git = "https://github.com/scroll-tech/revm" }
|
||||||
|
|
||||||
ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.15.0" }
|
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "feat/rkyv" }
|
||||||
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "v1.2.0" }
|
|
||||||
|
|
||||||
[profile.maxperf]
|
[profile.maxperf]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
|
|||||||
@@ -66,6 +66,7 @@ type AssetConfig struct {
|
|||||||
// VerifierConfig load zk verifier config.
|
// VerifierConfig load zk verifier config.
|
||||||
type VerifierConfig struct {
|
type VerifierConfig struct {
|
||||||
MinProverVersion string `json:"min_prover_version"`
|
MinProverVersion string `json:"min_prover_version"`
|
||||||
|
Features string `json:"features,omitempty"`
|
||||||
Verifiers []AssetConfig `json:"verifiers"`
|
Verifiers []AssetConfig `json:"verifiers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -140,3 +140,10 @@ func DumpVk(forkName, filePath string) error {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set dynamic feature flags that control libzkp runtime behavior
|
||||||
|
func SetDynamicFeature(feats string) {
|
||||||
|
cFeats := goToCString(feats)
|
||||||
|
defer freeCString(cFeats)
|
||||||
|
C.set_dynamic_feature(cFeats)
|
||||||
|
}
|
||||||
|
|||||||
@@ -54,4 +54,7 @@ char* gen_wrapped_proof(char* proof_json, char* metadata, char* vk, size_t vk_le
|
|||||||
// Release memory allocated for a string returned by gen_wrapped_proof
|
// Release memory allocated for a string returned by gen_wrapped_proof
|
||||||
void release_string(char* string_ptr);
|
void release_string(char* string_ptr);
|
||||||
|
|
||||||
|
void set_dynamic_feature(const char* feats);
|
||||||
|
|
||||||
|
|
||||||
#endif /* LIBZKP_H */
|
#endif /* LIBZKP_H */
|
||||||
|
|||||||
@@ -67,6 +67,9 @@ func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if cfg.Features != "" {
|
||||||
|
libzkp.SetDynamicFeature(cfg.Features)
|
||||||
|
}
|
||||||
libzkp.InitVerifier(string(configBytes))
|
libzkp.InitVerifier(string(configBytes))
|
||||||
|
|
||||||
v := &Verifier{
|
v := &Verifier{
|
||||||
|
|||||||
@@ -1,45 +0,0 @@
|
|||||||
|
|
||||||
[patch."https://github.com/openvm-org/openvm.git"]
|
|
||||||
openvm-build = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-continuations = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-instructions ={ git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-native-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-native-compiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-native-recursion = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-native-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-rv32im-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
openvm-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false, features = ["parallel", "bench-metrics", "evm-prove"] }
|
|
||||||
openvm-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
|
||||||
|
|
||||||
[patch."https://github.com/openvm-org/stark-backend.git"]
|
|
||||||
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
|
||||||
openvm-stark-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
|
||||||
|
|
||||||
[patch."https://github.com/Plonky3/Plonky3.git"]
|
|
||||||
p3-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-field = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-commit = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-matrix = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-baby-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", features = [
|
|
||||||
"nightly-features",
|
|
||||||
], tag = "v0.2.1" }
|
|
||||||
p3-koala-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-util = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-challenger = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-dft = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-fri = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-goldilocks = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-keccak = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-keccak-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-blake3 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-mds = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-merkle-tree = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-monty-31 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-poseidon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-poseidon2 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
|
|
||||||
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
|
||||||
2374
crates/gpu_override/Cargo.lock
generated
2374
crates/gpu_override/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,23 +0,0 @@
|
|||||||
.PHONY: build update clean
|
|
||||||
|
|
||||||
ZKVM_COMMIT ?= freebuild
|
|
||||||
PLONKY3_GPU_VERSION=$(shell ./print_plonky3gpu_version.sh | sed -n '2p')
|
|
||||||
$(info PLONKY3_GPU_VERSION is ${PLONKY3_GPU_VERSION})
|
|
||||||
|
|
||||||
GIT_REV ?= $(shell git rev-parse --short HEAD)
|
|
||||||
GO_TAG ?= $(shell grep "var tag = " ../../common/version/version.go | cut -d "\"" -f2)
|
|
||||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_GPU_VERSION}
|
|
||||||
$(info ZK_GPU_VERSION is ${ZK_VERSION})
|
|
||||||
|
|
||||||
clean:
|
|
||||||
cargo clean -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
|
||||||
|
|
||||||
# build gpu prover, never touch lock file
|
|
||||||
build:
|
|
||||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
|
||||||
|
|
||||||
version:
|
|
||||||
echo ${GO_TAG}-${GIT_REV}-${ZK_VERSION}
|
|
||||||
# update Cargo.lock while override config has been updated
|
|
||||||
#update:
|
|
||||||
# GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
higher_plonky3_item=`grep "plonky3-gpu" ./Cargo.lock | sort | uniq | awk -F "[#=]" '{print $3" "$4}' | sort -k 1 | tail -n 1`
|
|
||||||
|
|
||||||
higher_version=`echo $higher_plonky3_item | awk '{print $1}'`
|
|
||||||
|
|
||||||
higher_commit=`echo $higher_plonky3_item | cut -d ' ' -f2 | cut -c-7`
|
|
||||||
|
|
||||||
echo "$higher_version"
|
|
||||||
echo "$higher_commit"
|
|
||||||
@@ -13,6 +13,7 @@ libzkp = { path = "../libzkp" }
|
|||||||
alloy = { workspace = true, features = ["provider-http", "transport-http", "reqwest", "reqwest-rustls-tls", "json-rpc"] }
|
alloy = { workspace = true, features = ["provider-http", "transport-http", "reqwest", "reqwest-rustls-tls", "json-rpc"] }
|
||||||
sbv-primitives = { workspace = true, features = ["scroll"] }
|
sbv-primitives = { workspace = true, features = ["scroll"] }
|
||||||
sbv-utils = { workspace = true, features = ["scroll"] }
|
sbv-utils = { workspace = true, features = ["scroll"] }
|
||||||
|
sbv-core = { workspace = true, features = ["scroll"] }
|
||||||
|
|
||||||
eyre.workspace = true
|
eyre.workspace = true
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pub fn init(config: &str) -> eyre::Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_client() -> rpc_client::RpcClient<'static> {
|
pub fn get_client() -> impl libzkp::tasks::ChunkInterpreter {
|
||||||
GLOBAL_L2GETH_CLI
|
GLOBAL_L2GETH_CLI
|
||||||
.get()
|
.get()
|
||||||
.expect("must has been inited")
|
.expect("must has been inited")
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use alloy::{
|
use alloy::{
|
||||||
providers::{Provider, ProviderBuilder, RootProvider},
|
providers::{Provider, ProviderBuilder},
|
||||||
rpc::client::ClientBuilder,
|
rpc::client::ClientBuilder,
|
||||||
transports::layers::RetryBackoffLayer,
|
transports::layers::RetryBackoffLayer,
|
||||||
};
|
};
|
||||||
@@ -49,13 +49,13 @@ pub struct RpcConfig {
|
|||||||
/// so it can be run in block mode (i.e. inside dynamic library without a global entry)
|
/// so it can be run in block mode (i.e. inside dynamic library without a global entry)
|
||||||
pub struct RpcClientCore {
|
pub struct RpcClientCore {
|
||||||
/// rpc prover
|
/// rpc prover
|
||||||
provider: RootProvider<Network>,
|
client: alloy::rpc::client::RpcClient,
|
||||||
rt: tokio::runtime::Runtime,
|
rt: tokio::runtime::Runtime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub struct RpcClient<'a> {
|
pub struct RpcClient<'a, T: Provider<Network>> {
|
||||||
provider: &'a RootProvider<Network>,
|
provider: T,
|
||||||
handle: &'a tokio::runtime::Handle,
|
handle: &'a tokio::runtime::Handle,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -75,80 +75,78 @@ impl RpcClientCore {
|
|||||||
let retry_layer = RetryBackoffLayer::new(config.max_retry, config.backoff, config.cups);
|
let retry_layer = RetryBackoffLayer::new(config.max_retry, config.backoff, config.cups);
|
||||||
let client = ClientBuilder::default().layer(retry_layer).http(rpc);
|
let client = ClientBuilder::default().layer(retry_layer).http(rpc);
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self { client, rt })
|
||||||
provider: ProviderBuilder::<_, _, Network>::default().connect_client(client),
|
|
||||||
rt,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_client(&self) -> RpcClient {
|
pub fn get_client(&self) -> RpcClient<'_, impl Provider<Network>> {
|
||||||
RpcClient {
|
RpcClient {
|
||||||
provider: &self.provider,
|
provider: ProviderBuilder::<_, _, Network>::default()
|
||||||
|
.connect_client(self.client.clone()),
|
||||||
handle: self.rt.handle(),
|
handle: self.rt.handle(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ChunkInterpreter for RpcClient<'_> {
|
impl<T: Provider<Network>> ChunkInterpreter for RpcClient<'_, T> {
|
||||||
fn try_fetch_block_witness(
|
fn try_fetch_block_witness(
|
||||||
&self,
|
&self,
|
||||||
block_hash: sbv_primitives::B256,
|
block_hash: sbv_primitives::B256,
|
||||||
prev_witness: Option<&sbv_primitives::types::BlockWitness>,
|
prev_witness: Option<&sbv_core::BlockWitness>,
|
||||||
) -> Result<sbv_primitives::types::BlockWitness> {
|
) -> Result<sbv_core::BlockWitness> {
|
||||||
async fn fetch_witness_async(
|
async fn fetch_witness_async(
|
||||||
provider: &RootProvider<Network>,
|
provider: impl Provider<Network>,
|
||||||
block_hash: sbv_primitives::B256,
|
block_hash: sbv_primitives::B256,
|
||||||
prev_witness: Option<&sbv_primitives::types::BlockWitness>,
|
prev_witness: Option<&sbv_core::BlockWitness>,
|
||||||
) -> Result<sbv_primitives::types::BlockWitness> {
|
) -> Result<sbv_core::BlockWitness> {
|
||||||
use sbv_utils::{rpc::ProviderExt, witness::WitnessBuilder};
|
use sbv_utils::rpc::ProviderExt;
|
||||||
|
|
||||||
let chain_id = provider.get_chain_id().await?;
|
let (chain_id, block_num, prev_state_root) = if let Some(w) = prev_witness {
|
||||||
|
(w.chain_id, w.header.number + 1, w.header.state_root)
|
||||||
|
} else {
|
||||||
|
let chain_id = provider.get_chain_id().await?;
|
||||||
|
let block = provider
|
||||||
|
.get_block_by_hash(block_hash)
|
||||||
|
.full()
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| eyre::eyre!("Block {block_hash} not found"))?;
|
||||||
|
|
||||||
let block = provider
|
let parent_block = provider
|
||||||
.get_block_by_hash(block_hash)
|
.get_block_by_hash(block.header.parent_hash)
|
||||||
.full()
|
.await?
|
||||||
.await?
|
.ok_or_else(|| {
|
||||||
.ok_or_else(|| eyre::eyre!("Block {block_hash} not found"))?;
|
eyre::eyre!(
|
||||||
|
"parent block for block {} should exist",
|
||||||
|
block.header.number
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
let number = block.header.number;
|
(
|
||||||
let parent_hash = block.header.parent_hash;
|
chain_id,
|
||||||
if number == 0 {
|
block.header.number,
|
||||||
eyre::bail!("no number in header or use block 0");
|
parent_block.header.state_root,
|
||||||
}
|
)
|
||||||
|
|
||||||
let mut witness_builder = WitnessBuilder::new()
|
|
||||||
.block(block)
|
|
||||||
.chain_id(chain_id)
|
|
||||||
.execution_witness(provider.debug_execution_witness(number.into()).await?);
|
|
||||||
|
|
||||||
let prev_state_root = match prev_witness {
|
|
||||||
Some(witness) => {
|
|
||||||
if witness.header.number != number - 1 {
|
|
||||||
eyre::bail!(
|
|
||||||
"the ref witness is not the previous block, expected {} get {}",
|
|
||||||
number - 1,
|
|
||||||
witness.header.number,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
witness.header.state_root
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let parent_block = provider
|
|
||||||
.get_block_by_hash(parent_hash)
|
|
||||||
.await?
|
|
||||||
.expect("parent block should exist");
|
|
||||||
|
|
||||||
parent_block.header.state_root
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
witness_builder = witness_builder.prev_state_root(prev_state_root);
|
|
||||||
|
|
||||||
Ok(witness_builder.build()?)
|
let req = provider
|
||||||
|
.dump_block_witness(block_num)
|
||||||
|
.with_chain_id(chain_id)
|
||||||
|
.with_prev_state_root(prev_state_root);
|
||||||
|
|
||||||
|
let witness = req
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.transpose()
|
||||||
|
.ok_or_else(|| eyre::eyre!("Block witness {block_num} not available"))??;
|
||||||
|
|
||||||
|
Ok(witness)
|
||||||
}
|
}
|
||||||
|
|
||||||
tracing::debug!("fetch witness for {block_hash}");
|
tracing::debug!("fetch witness for {block_hash}");
|
||||||
self.handle
|
self.handle.block_on(fetch_witness_async(
|
||||||
.block_on(fetch_witness_async(self.provider, block_hash, prev_witness))
|
&self.provider,
|
||||||
|
block_hash,
|
||||||
|
prev_witness,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_fetch_storage_node(
|
fn try_fetch_storage_node(
|
||||||
@@ -156,7 +154,7 @@ impl ChunkInterpreter for RpcClient<'_> {
|
|||||||
node_hash: sbv_primitives::B256,
|
node_hash: sbv_primitives::B256,
|
||||||
) -> Result<sbv_primitives::Bytes> {
|
) -> Result<sbv_primitives::Bytes> {
|
||||||
async fn fetch_storage_node_async(
|
async fn fetch_storage_node_async(
|
||||||
provider: &RootProvider<Network>,
|
provider: impl Provider<Network>,
|
||||||
node_hash: sbv_primitives::B256,
|
node_hash: sbv_primitives::B256,
|
||||||
) -> Result<sbv_primitives::Bytes> {
|
) -> Result<sbv_primitives::Bytes> {
|
||||||
let ret = provider
|
let ret = provider
|
||||||
@@ -168,7 +166,7 @@ impl ChunkInterpreter for RpcClient<'_> {
|
|||||||
|
|
||||||
tracing::debug!("fetch storage node for {node_hash}");
|
tracing::debug!("fetch storage node for {node_hash}");
|
||||||
self.handle
|
self.handle
|
||||||
.block_on(fetch_storage_node_async(self.provider, node_hash))
|
.block_on(fetch_storage_node_async(&self.provider, node_hash))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -194,10 +192,10 @@ mod tests {
|
|||||||
let client_core = RpcClientCore::create(&config).expect("Failed to create RPC client");
|
let client_core = RpcClientCore::create(&config).expect("Failed to create RPC client");
|
||||||
let client = client_core.get_client();
|
let client = client_core.get_client();
|
||||||
|
|
||||||
// latest - 1 block in 2025.6.15
|
// latest - 1 block in 2025.9.11
|
||||||
let block_hash = B256::from(
|
let block_hash = B256::from(
|
||||||
hex::const_decode_to_array(
|
hex::const_decode_to_array(
|
||||||
b"0x9535a6970bc4db9031749331a214e35ed8c8a3f585f6f456d590a0bc780a1368",
|
b"0x093fb6bf2e556a659b35428ac447cd9f0635382fc40ffad417b5910824f9e932",
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
);
|
);
|
||||||
@@ -207,10 +205,10 @@ mod tests {
|
|||||||
.try_fetch_block_witness(block_hash, None)
|
.try_fetch_block_witness(block_hash, None)
|
||||||
.expect("should success");
|
.expect("should success");
|
||||||
|
|
||||||
// latest block in 2025.6.15
|
// block selected in 2025.9.11
|
||||||
let block_hash = B256::from(
|
let block_hash = B256::from(
|
||||||
hex::const_decode_to_array(
|
hex::const_decode_to_array(
|
||||||
b"0xd47088cdb6afc68aa082e633bb7da9340d29c73841668afacfb9c1e66e557af0",
|
b"0x77cc84dd7a4dedf6fe5fb9b443aeb5a4fb0623ad088a365d3232b7b23fc848e5",
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
);
|
);
|
||||||
@@ -220,26 +218,4 @@ mod tests {
|
|||||||
|
|
||||||
println!("{}", serde_json::to_string_pretty(&wit2).unwrap());
|
println!("{}", serde_json::to_string_pretty(&wit2).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore = "Requires L2GETH_ENDPOINT environment variable"]
|
|
||||||
fn test_try_fetch_storage_node() {
|
|
||||||
let config = create_config_from_env();
|
|
||||||
let client_core = RpcClientCore::create(&config).expect("Failed to create RPC client");
|
|
||||||
let client = client_core.get_client();
|
|
||||||
|
|
||||||
// the root node (state root) of the block in unittest above
|
|
||||||
let node_hash = B256::from(
|
|
||||||
hex::const_decode_to_array(
|
|
||||||
b"0xb9e67403a2eb35afbb0475fe942918cf9a330a1d7532704c24554506be62b27c",
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
|
|
||||||
// This is expected to fail since we're using a dummy hash, but it tests the code path
|
|
||||||
let node = client
|
|
||||||
.try_fetch_storage_node(node_hash)
|
|
||||||
.expect("should success");
|
|
||||||
println!("{}", serde_json::to_string_pretty(&node).unwrap());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ scroll-zkvm-verifier.workspace = true
|
|||||||
|
|
||||||
alloy-primitives.workspace = true #depress the effect of "native-keccak"
|
alloy-primitives.workspace = true #depress the effect of "native-keccak"
|
||||||
sbv-primitives = {workspace = true, features = ["scroll-compress-ratio", "scroll"]}
|
sbv-primitives = {workspace = true, features = ["scroll-compress-ratio", "scroll"]}
|
||||||
|
sbv-core = { workspace = true, features = ["scroll"] }
|
||||||
base64.workspace = true
|
base64.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_derive.workspace = true
|
serde_derive.workspace = true
|
||||||
@@ -18,6 +19,7 @@ tracing.workspace = true
|
|||||||
eyre.workspace = true
|
eyre.workspace = true
|
||||||
|
|
||||||
git-version = "0.3.5"
|
git-version = "0.3.5"
|
||||||
|
bincode = { version = "2", features = ["serde"] }
|
||||||
serde_stacker = "0.1"
|
serde_stacker = "0.1"
|
||||||
regex = "1.11"
|
regex = "1.11"
|
||||||
c-kzg = { version = "2.0", features = ["serde"] }
|
c-kzg = { version = "2.0", features = ["serde"] }
|
||||||
|
|||||||
@@ -11,6 +11,27 @@ use serde_json::value::RawValue;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter};
|
use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter};
|
||||||
|
|
||||||
|
/// global features: use legacy encoding for witness
|
||||||
|
static mut LEGACY_WITNESS_ENCODING: bool = false;
|
||||||
|
pub(crate) fn witness_use_legacy_mode() -> bool {
|
||||||
|
unsafe { LEGACY_WITNESS_ENCODING }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_dynamic_feature(feats: &str) {
|
||||||
|
for feat_s in feats.split(':') {
|
||||||
|
match feat_s.trim().to_lowercase().as_str() {
|
||||||
|
"legacy_witness" => {
|
||||||
|
tracing::info!("set witness encoding for legacy mode");
|
||||||
|
unsafe {
|
||||||
|
// the function is only called while initialize step
|
||||||
|
LEGACY_WITNESS_ENCODING = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s => tracing::warn!("unrecognized dynamic feature: {s}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Turn the coordinator's chunk task into a json string for formal chunk proving
|
/// Turn the coordinator's chunk task into a json string for formal chunk proving
|
||||||
/// task (with full witnesses)
|
/// task (with full witnesses)
|
||||||
pub fn checkout_chunk_task(
|
pub fn checkout_chunk_task(
|
||||||
@@ -32,7 +53,6 @@ pub fn gen_universal_task(
|
|||||||
task_json: &str,
|
task_json: &str,
|
||||||
fork_name_str: &str,
|
fork_name_str: &str,
|
||||||
expected_vk: &[u8],
|
expected_vk: &[u8],
|
||||||
interpreter: Option<impl ChunkInterpreter>,
|
|
||||||
) -> eyre::Result<(B256, String, String)> {
|
) -> eyre::Result<(B256, String, String)> {
|
||||||
use proofs::*;
|
use proofs::*;
|
||||||
use tasks::*;
|
use tasks::*;
|
||||||
@@ -56,10 +76,9 @@ pub fn gen_universal_task(
|
|||||||
if fork_name_str != task.fork_name.as_str() {
|
if fork_name_str != task.fork_name.as_str() {
|
||||||
eyre::bail!("fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
|
eyre::bail!("fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
|
||||||
}
|
}
|
||||||
let (pi_hash, metadata, u_task) = utils::panic_catch(move || {
|
let (pi_hash, metadata, u_task) =
|
||||||
gen_universal_chunk_task(task, fork_name_str.into(), interpreter)
|
utils::panic_catch(move || gen_universal_chunk_task(task, fork_name_str.into()))
|
||||||
})
|
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
|
||||||
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
|
|
||||||
(pi_hash, AnyMetaData::Chunk(metadata), u_task)
|
(pi_hash, AnyMetaData::Chunk(metadata), u_task)
|
||||||
}
|
}
|
||||||
x if x == TaskType::Batch as i32 => {
|
x if x == TaskType::Batch as i32 => {
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ use scroll_zkvm_types::{
|
|||||||
chunk::ChunkInfo,
|
chunk::ChunkInfo,
|
||||||
proof::{EvmProof, OpenVmEvmProof, ProofEnum, StarkProof},
|
proof::{EvmProof, OpenVmEvmProof, ProofEnum, StarkProof},
|
||||||
public_inputs::{ForkName, MultiVersionPublicInputs},
|
public_inputs::{ForkName, MultiVersionPublicInputs},
|
||||||
types_agg::{AggregationInput, ProgramCommitment},
|
types_agg::AggregationInput,
|
||||||
utils::vec_as_base64,
|
utils::{serialize_vk, vec_as_base64},
|
||||||
};
|
};
|
||||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||||
|
|
||||||
@@ -172,7 +172,7 @@ impl<Metadata> From<&WrappedProof<Metadata>> for AggregationInput {
|
|||||||
fn from(value: &WrappedProof<Metadata>) -> Self {
|
fn from(value: &WrappedProof<Metadata>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
public_values: value.proof.public_values(),
|
public_values: value.proof.public_values(),
|
||||||
commitment: ProgramCommitment::deserialize(&value.vk),
|
commitment: serialize_vk::deserialize(&value.vk),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -252,6 +252,7 @@ mod tests {
|
|||||||
batch_hash: B256::repeat_byte(4),
|
batch_hash: B256::repeat_byte(4),
|
||||||
withdraw_root: B256::repeat_byte(5),
|
withdraw_root: B256::repeat_byte(5),
|
||||||
msg_queue_hash: B256::repeat_byte(6),
|
msg_queue_hash: B256::repeat_byte(6),
|
||||||
|
encryption_key: None,
|
||||||
};
|
};
|
||||||
let bundle_pi_hash = bundle_info.pi_hash(ForkName::EuclidV1);
|
let bundle_pi_hash = bundle_info.pi_hash(ForkName::EuclidV1);
|
||||||
BundleProofMetadata {
|
BundleProofMetadata {
|
||||||
|
|||||||
@@ -16,6 +16,11 @@ use crate::{
|
|||||||
use sbv_primitives::B256;
|
use sbv_primitives::B256;
|
||||||
use scroll_zkvm_types::public_inputs::{ForkName, MultiVersionPublicInputs};
|
use scroll_zkvm_types::public_inputs::{ForkName, MultiVersionPublicInputs};
|
||||||
|
|
||||||
|
fn encode_task_to_witness<T: serde::Serialize>(task: &T) -> eyre::Result<Vec<u8>> {
|
||||||
|
let config = bincode::config::standard();
|
||||||
|
Ok(bincode::serde::encode_to_vec(task, config)?)
|
||||||
|
}
|
||||||
|
|
||||||
fn check_aggregation_proofs<Metadata>(
|
fn check_aggregation_proofs<Metadata>(
|
||||||
proofs: &[proofs::WrappedProof<Metadata>],
|
proofs: &[proofs::WrappedProof<Metadata>],
|
||||||
fork_name: ForkName,
|
fork_name: ForkName,
|
||||||
@@ -25,9 +30,9 @@ where
|
|||||||
{
|
{
|
||||||
panic_catch(|| {
|
panic_catch(|| {
|
||||||
for w in proofs.windows(2) {
|
for w in proofs.windows(2) {
|
||||||
w[1].metadata
|
// w[1].metadata
|
||||||
.pi_hash_info()
|
// .pi_hash_info()
|
||||||
.validate(w[0].metadata.pi_hash_info(), fork_name);
|
// .validate(w[0].metadata.pi_hash_info(), fork_name);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.map_err(|e| eyre::eyre!("Chunk data validation failed: {}", e))?;
|
.map_err(|e| eyre::eyre!("Chunk data validation failed: {}", e))?;
|
||||||
@@ -37,13 +42,9 @@ where
|
|||||||
|
|
||||||
/// Generate required staff for chunk proving
|
/// Generate required staff for chunk proving
|
||||||
pub fn gen_universal_chunk_task(
|
pub fn gen_universal_chunk_task(
|
||||||
mut task: ChunkProvingTask,
|
task: ChunkProvingTask,
|
||||||
fork_name: ForkName,
|
fork_name: ForkName,
|
||||||
interpreter: Option<impl ChunkInterpreter>,
|
|
||||||
) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> {
|
) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> {
|
||||||
if let Some(interpreter) = interpreter {
|
|
||||||
task.prepare_task_via_interpret(interpreter)?;
|
|
||||||
}
|
|
||||||
let chunk_total_gas = task.stats().total_gas_used;
|
let chunk_total_gas = task.stats().total_gas_used;
|
||||||
let chunk_info = task.precheck_and_build_metadata()?;
|
let chunk_info = task.precheck_and_build_metadata()?;
|
||||||
let proving_task = task.try_into()?;
|
let proving_task = task.try_into()?;
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ use eyre::Result;
|
|||||||
use sbv_primitives::{B256, U256};
|
use sbv_primitives::{B256, U256};
|
||||||
use scroll_zkvm_types::{
|
use scroll_zkvm_types::{
|
||||||
batch::{
|
batch::{
|
||||||
BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderV8, BatchInfo, BatchWitness,
|
build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderV8,
|
||||||
Envelope, EnvelopeV6, EnvelopeV7, EnvelopeV8, PointEvalWitness, ReferenceHeader,
|
BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, EnvelopeV8, LegacyBatchWitness,
|
||||||
ToArchievedWitness, N_BLOB_BYTES,
|
ReferenceHeader, N_BLOB_BYTES,
|
||||||
},
|
},
|
||||||
public_inputs::ForkName,
|
public_inputs::ForkName,
|
||||||
task::ProvingTask,
|
task::ProvingTask,
|
||||||
@@ -84,6 +84,12 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
|
|||||||
|
|
||||||
fn try_from(value: BatchProvingTask) -> Result<Self> {
|
fn try_from(value: BatchProvingTask) -> Result<Self> {
|
||||||
let witness = value.build_guest_input();
|
let witness = value.build_guest_input();
|
||||||
|
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||||
|
let legacy_witness = LegacyBatchWitness::from(witness);
|
||||||
|
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
|
||||||
|
} else {
|
||||||
|
super::encode_task_to_witness(&witness)?
|
||||||
|
};
|
||||||
|
|
||||||
Ok(ProvingTask {
|
Ok(ProvingTask {
|
||||||
identifier: value.batch_header.batch_hash().to_string(),
|
identifier: value.batch_header.batch_hash().to_string(),
|
||||||
@@ -93,7 +99,7 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
|
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
|
||||||
.collect(),
|
.collect(),
|
||||||
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.into_vec()],
|
serialized_witness: vec![serialized_witness],
|
||||||
vk: Vec::new(),
|
vk: Vec::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -161,10 +167,10 @@ impl BatchProvingTask {
|
|||||||
assert_eq!(p, kzg_proof);
|
assert_eq!(p, kzg_proof);
|
||||||
}
|
}
|
||||||
|
|
||||||
let point_eval_witness = PointEvalWitness {
|
let point_eval_witness = Some(build_point_eval_witness(
|
||||||
kzg_commitment: kzg_commitment.into_inner(),
|
kzg_commitment.into_inner(),
|
||||||
kzg_proof: kzg_proof.into_inner(),
|
kzg_proof.into_inner(),
|
||||||
};
|
));
|
||||||
|
|
||||||
let reference_header = match fork_name {
|
let reference_header = match fork_name {
|
||||||
ForkName::EuclidV1 => ReferenceHeader::V6(*self.batch_header.must_v6_header()),
|
ForkName::EuclidV1 => ReferenceHeader::V6(*self.batch_header.must_v6_header()),
|
||||||
@@ -183,6 +189,7 @@ impl BatchProvingTask {
|
|||||||
blob_bytes: self.blob_bytes.clone(),
|
blob_bytes: self.blob_bytes.clone(),
|
||||||
reference_header,
|
reference_header,
|
||||||
point_eval_witness,
|
point_eval_witness,
|
||||||
|
version: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -192,12 +199,7 @@ impl BatchProvingTask {
|
|||||||
// 1. generate data for metadata from the witness
|
// 1. generate data for metadata from the witness
|
||||||
// 2. validate every adjacent proof pair
|
// 2. validate every adjacent proof pair
|
||||||
let witness = self.build_guest_input();
|
let witness = self.build_guest_input();
|
||||||
let archieved = ToArchievedWitness::create(&witness)
|
let metadata = BatchInfo::from(&witness);
|
||||||
.map_err(|e| eyre::eyre!("archieve batch witness fail: {e}"))?;
|
|
||||||
let archieved_witness = archieved
|
|
||||||
.access()
|
|
||||||
.map_err(|e| eyre::eyre!("access archieved batch witness fail: {e}"))?;
|
|
||||||
let metadata: BatchInfo = archieved_witness.into();
|
|
||||||
|
|
||||||
super::check_aggregation_proofs(self.chunk_proofs.as_slice(), fork_name)?;
|
super::check_aggregation_proofs(self.chunk_proofs.as_slice(), fork_name)?;
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
use crate::proofs::BatchProof;
|
use crate::proofs::BatchProof;
|
||||||
use eyre::Result;
|
use eyre::Result;
|
||||||
use scroll_zkvm_types::{
|
use scroll_zkvm_types::{
|
||||||
bundle::{BundleInfo, BundleWitness, ToArchievedWitness},
|
bundle::{BundleInfo, BundleWitness},
|
||||||
public_inputs::ForkName,
|
public_inputs::ForkName,
|
||||||
task::ProvingTask,
|
task::ProvingTask,
|
||||||
|
utils::{to_rkyv_bytes, RancorError},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Message indicating a sanity check failure.
|
/// Message indicating a sanity check failure.
|
||||||
@@ -40,6 +41,7 @@ impl BundleProvingTask {
|
|||||||
|
|
||||||
fn build_guest_input(&self) -> BundleWitness {
|
fn build_guest_input(&self) -> BundleWitness {
|
||||||
BundleWitness {
|
BundleWitness {
|
||||||
|
version: 0,
|
||||||
batch_proofs: self.batch_proofs.iter().map(|proof| proof.into()).collect(),
|
batch_proofs: self.batch_proofs.iter().map(|proof| proof.into()).collect(),
|
||||||
batch_infos: self
|
batch_infos: self
|
||||||
.batch_proofs
|
.batch_proofs
|
||||||
@@ -56,12 +58,7 @@ impl BundleProvingTask {
|
|||||||
// 1. generate data for metadata from the witness
|
// 1. generate data for metadata from the witness
|
||||||
// 2. validate every adjacent proof pair
|
// 2. validate every adjacent proof pair
|
||||||
let witness = self.build_guest_input();
|
let witness = self.build_guest_input();
|
||||||
let archieved = ToArchievedWitness::create(&witness)
|
let metadata = BundleInfo::from(&witness);
|
||||||
.map_err(|e| eyre::eyre!("archieve bundle witness fail: {e}"))?;
|
|
||||||
let archieved_witness = archieved
|
|
||||||
.access()
|
|
||||||
.map_err(|e| eyre::eyre!("access archieved bundle witness fail: {e}"))?;
|
|
||||||
let metadata: BundleInfo = archieved_witness.into();
|
|
||||||
|
|
||||||
super::check_aggregation_proofs(self.batch_proofs.as_slice(), fork_name)?;
|
super::check_aggregation_proofs(self.batch_proofs.as_slice(), fork_name)?;
|
||||||
|
|
||||||
@@ -74,6 +71,12 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
|
|||||||
|
|
||||||
fn try_from(value: BundleProvingTask) -> Result<Self> {
|
fn try_from(value: BundleProvingTask) -> Result<Self> {
|
||||||
let witness = value.build_guest_input();
|
let witness = value.build_guest_input();
|
||||||
|
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||||
|
//to_rkyv_bytes::<RancorError>(&witness)?.into_vec()
|
||||||
|
unimplemented!();
|
||||||
|
} else {
|
||||||
|
super::encode_task_to_witness(&witness)?
|
||||||
|
};
|
||||||
|
|
||||||
Ok(ProvingTask {
|
Ok(ProvingTask {
|
||||||
identifier: value.identifier(),
|
identifier: value.identifier(),
|
||||||
@@ -83,7 +86,7 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
|
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
|
||||||
.collect(),
|
.collect(),
|
||||||
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
|
serialized_witness: vec![serialized_witness],
|
||||||
vk: Vec::new(),
|
vk: Vec::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
use super::chunk_interpreter::*;
|
use super::chunk_interpreter::*;
|
||||||
use eyre::Result;
|
use eyre::Result;
|
||||||
use sbv_primitives::{types::BlockWitness, B256};
|
use sbv_core::BlockWitness;
|
||||||
|
use sbv_primitives::B256;
|
||||||
use scroll_zkvm_types::{
|
use scroll_zkvm_types::{
|
||||||
chunk::{execute, ChunkInfo, ChunkWitness, ToArchievedWitness},
|
chunk::{execute, ChunkInfo, ChunkWitness, LegacyChunkWitness},
|
||||||
task::ProvingTask,
|
task::ProvingTask,
|
||||||
|
utils::{to_rkyv_bytes, RancorError},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// The type aligned with coordinator's defination
|
/// The type aligned with coordinator's defination
|
||||||
@@ -66,12 +68,18 @@ impl TryFrom<ChunkProvingTask> for ProvingTask {
|
|||||||
|
|
||||||
fn try_from(value: ChunkProvingTask) -> Result<Self> {
|
fn try_from(value: ChunkProvingTask) -> Result<Self> {
|
||||||
let witness = value.build_guest_input();
|
let witness = value.build_guest_input();
|
||||||
|
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||||
|
let legacy_witness = LegacyChunkWitness::from(witness);
|
||||||
|
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
|
||||||
|
} else {
|
||||||
|
super::encode_task_to_witness(&witness)?
|
||||||
|
};
|
||||||
|
|
||||||
Ok(ProvingTask {
|
Ok(ProvingTask {
|
||||||
identifier: value.identifier(),
|
identifier: value.identifier(),
|
||||||
fork_name: value.fork_name,
|
fork_name: value.fork_name,
|
||||||
aggregated_proofs: Vec::new(),
|
aggregated_proofs: Vec::new(),
|
||||||
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
|
serialized_witness: vec![serialized_witness],
|
||||||
vk: Vec::new(),
|
vk: Vec::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -83,7 +91,7 @@ impl ChunkProvingTask {
|
|||||||
let num_txs = self
|
let num_txs = self
|
||||||
.block_witnesses
|
.block_witnesses
|
||||||
.iter()
|
.iter()
|
||||||
.map(|b| b.transaction.len())
|
.map(|b| b.transactions.len())
|
||||||
.sum::<usize>();
|
.sum::<usize>();
|
||||||
let total_gas_used = self
|
let total_gas_used = self
|
||||||
.block_witnesses
|
.block_witnesses
|
||||||
@@ -119,9 +127,11 @@ impl ChunkProvingTask {
|
|||||||
|
|
||||||
fn build_guest_input(&self) -> ChunkWitness {
|
fn build_guest_input(&self) -> ChunkWitness {
|
||||||
ChunkWitness::new(
|
ChunkWitness::new(
|
||||||
|
0,
|
||||||
&self.block_witnesses,
|
&self.block_witnesses,
|
||||||
self.prev_msg_queue_hash,
|
self.prev_msg_queue_hash,
|
||||||
self.fork_name.to_lowercase().as_str().into(),
|
self.fork_name.to_lowercase().as_str().into(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -131,18 +141,14 @@ impl ChunkProvingTask {
|
|||||||
|
|
||||||
pub fn precheck_and_build_metadata(&self) -> Result<ChunkInfo> {
|
pub fn precheck_and_build_metadata(&self) -> Result<ChunkInfo> {
|
||||||
let witness = self.build_guest_input();
|
let witness = self.build_guest_input();
|
||||||
let archieved = ToArchievedWitness::create(&witness)
|
|
||||||
.map_err(|e| eyre::eyre!("archieve chunk witness fail: {e}"))?;
|
|
||||||
let archieved_witness = archieved
|
|
||||||
.access()
|
|
||||||
.map_err(|e| eyre::eyre!("access archieved chunk witness fail: {e}"))?;
|
|
||||||
|
|
||||||
let ret = ChunkInfo::try_from(archieved_witness).map_err(|e| eyre::eyre!("{e}"))?;
|
let ret = ChunkInfo::try_from(witness).map_err(|e| eyre::eyre!("{e}"))?;
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// this method check the validate of current task (there may be missing storage node)
|
/// this method check the validate of current task (there may be missing storage node)
|
||||||
/// and try fixing it until everything is ok
|
/// and try fixing it until everything is ok
|
||||||
|
#[deprecated]
|
||||||
pub fn prepare_task_via_interpret(
|
pub fn prepare_task_via_interpret(
|
||||||
&mut self,
|
&mut self,
|
||||||
interpreter: impl ChunkInterpreter,
|
interpreter: impl ChunkInterpreter,
|
||||||
@@ -166,13 +172,8 @@ impl ChunkProvingTask {
|
|||||||
let mut attempts = 0;
|
let mut attempts = 0;
|
||||||
loop {
|
loop {
|
||||||
let witness = self.build_guest_input();
|
let witness = self.build_guest_input();
|
||||||
let archieved = ToArchievedWitness::create(&witness)
|
|
||||||
.map_err(|e| eyre::eyre!("archieve chunk witness fail: {e}"))?;
|
|
||||||
let archieved_witness = archieved
|
|
||||||
.access()
|
|
||||||
.map_err(|e| eyre::eyre!("access archieved chunk witness fail: {e}"))?;
|
|
||||||
|
|
||||||
match execute(archieved_witness) {
|
match execute(witness) {
|
||||||
Ok(_) => return Ok(()),
|
Ok(_) => return Ok(()),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if let Some(caps) = err_parse_re.captures(&e) {
|
if let Some(caps) = err_parse_re.captures(&e) {
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
use eyre::Result;
|
use eyre::Result;
|
||||||
use sbv_primitives::{types::BlockWitness, Bytes, B256};
|
use sbv_core::BlockWitness;
|
||||||
|
use sbv_primitives::{Bytes, B256};
|
||||||
|
|
||||||
/// An interpreter which is cirtical in translating chunk data
|
/// An interpreter which is cirtical in translating chunk data
|
||||||
/// since we need to grep block witness and storage node data
|
/// since we need to grep block witness and storage node data
|
||||||
|
|||||||
@@ -17,10 +17,10 @@ pub struct Verifier {
|
|||||||
|
|
||||||
impl Verifier {
|
impl Verifier {
|
||||||
pub fn new(assets_dir: &str, fork: ForkName) -> Self {
|
pub fn new(assets_dir: &str, fork: ForkName) -> Self {
|
||||||
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
|
let verifier_bin = Path::new(assets_dir);
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
verifier: UniversalVerifier::setup(&verifier_bin).expect("Setting up chunk verifier"),
|
verifier: UniversalVerifier::setup(verifier_bin).expect("Setting up chunk verifier"),
|
||||||
fork,
|
fork,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -32,12 +32,16 @@ impl ProofVerifier for Verifier {
|
|||||||
TaskType::Chunk => {
|
TaskType::Chunk => {
|
||||||
let proof = serde_json::from_slice::<ChunkProof>(proof).unwrap();
|
let proof = serde_json::from_slice::<ChunkProof>(proof).unwrap();
|
||||||
assert!(proof.pi_hash_check(self.fork));
|
assert!(proof.pi_hash_check(self.fork));
|
||||||
UniversalVerifier::verify_stark_proof(proof.as_root_proof(), &proof.vk).unwrap()
|
self.verifier
|
||||||
|
.verify_stark_proof(proof.as_root_proof(), &proof.vk)
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
TaskType::Batch => {
|
TaskType::Batch => {
|
||||||
let proof = serde_json::from_slice::<BatchProof>(proof).unwrap();
|
let proof = serde_json::from_slice::<BatchProof>(proof).unwrap();
|
||||||
assert!(proof.pi_hash_check(self.fork));
|
assert!(proof.pi_hash_check(self.fork));
|
||||||
UniversalVerifier::verify_stark_proof(proof.as_root_proof(), &proof.vk).unwrap()
|
self.verifier
|
||||||
|
.verify_stark_proof(proof.as_root_proof(), &proof.vk)
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
TaskType::Bundle => {
|
TaskType::Bundle => {
|
||||||
let proof = serde_json::from_slice::<BundleProof>(proof).unwrap();
|
let proof = serde_json::from_slice::<BundleProof>(proof).unwrap();
|
||||||
|
|||||||
@@ -153,17 +153,12 @@ pub unsafe extern "C" fn gen_universal_task(
|
|||||||
expected_vk: *const u8,
|
expected_vk: *const u8,
|
||||||
expected_vk_len: usize,
|
expected_vk_len: usize,
|
||||||
) -> HandlingResult {
|
) -> HandlingResult {
|
||||||
let mut interpreter = None;
|
|
||||||
let task_json = if task_type == TaskType::Chunk as i32 {
|
let task_json = if task_type == TaskType::Chunk as i32 {
|
||||||
let pre_task_str = c_char_to_str(task);
|
let pre_task_str = c_char_to_str(task);
|
||||||
let cli = l2geth::get_client();
|
let cli = l2geth::get_client();
|
||||||
match libzkp::checkout_chunk_task(pre_task_str, cli) {
|
match libzkp::checkout_chunk_task(pre_task_str, cli) {
|
||||||
Ok(str) => {
|
Ok(str) => str,
|
||||||
interpreter.replace(cli);
|
|
||||||
str
|
|
||||||
}
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
println!("gen_universal_task failed at pre interpret step, error: {e}");
|
|
||||||
tracing::error!("gen_universal_task failed at pre interpret step, error: {e}");
|
tracing::error!("gen_universal_task failed at pre interpret step, error: {e}");
|
||||||
return failed_handling_result();
|
return failed_handling_result();
|
||||||
}
|
}
|
||||||
@@ -178,13 +173,8 @@ pub unsafe extern "C" fn gen_universal_task(
|
|||||||
&[]
|
&[]
|
||||||
};
|
};
|
||||||
|
|
||||||
let ret = libzkp::gen_universal_task(
|
let ret =
|
||||||
task_type,
|
libzkp::gen_universal_task(task_type, &task_json, c_char_to_str(fork_name), expected_vk);
|
||||||
&task_json,
|
|
||||||
c_char_to_str(fork_name),
|
|
||||||
expected_vk,
|
|
||||||
interpreter,
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Ok((pi_hash, meta_json, task_json)) = ret {
|
if let Ok((pi_hash, meta_json, task_json)) = ret {
|
||||||
let expected_pi_hash = pi_hash.0.map(|byte| byte as c_char);
|
let expected_pi_hash = pi_hash.0.map(|byte| byte as c_char);
|
||||||
@@ -255,3 +245,10 @@ pub unsafe extern "C" fn release_string(ptr: *mut c_char) {
|
|||||||
let _ = CString::from_raw(ptr);
|
let _ = CString::from_raw(ptr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// # Safety
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn set_dynamic_feature(feats: *const c_char) {
|
||||||
|
let feats_str = c_char_to_str(feats);
|
||||||
|
libzkp::set_dynamic_feature(feats_str);
|
||||||
|
}
|
||||||
|
|||||||
@@ -33,3 +33,7 @@ clap = { version = "4.5", features = ["derive"] }
|
|||||||
ctor = "0.2.8"
|
ctor = "0.2.8"
|
||||||
url = { version = "2.5.4", features = ["serde",] }
|
url = { version = "2.5.4", features = ["serde",] }
|
||||||
serde_bytes = "0.11.15"
|
serde_bytes = "0.11.15"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = []
|
||||||
|
cuda = ["scroll-zkvm-prover/cuda"]
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Default)]
|
#[derive(Serialize, Deserialize, Default)]
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ use std::path::Path;
|
|||||||
|
|
||||||
use super::CircuitsHandler;
|
use super::CircuitsHandler;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
|
||||||
use eyre::Result;
|
use eyre::Result;
|
||||||
use scroll_proving_sdk::prover::ProofType;
|
use scroll_proving_sdk::prover::ProofType;
|
||||||
use scroll_zkvm_prover::{Prover, ProverConfig};
|
use scroll_zkvm_prover::{Prover, ProverConfig};
|
||||||
@@ -12,10 +11,12 @@ pub struct UniversalHandler {
|
|||||||
prover: Prover,
|
prover: Prover,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Safe for current usage as `CircuitsHandler` trait (protected inside of Mutex and NEVER extract
|
||||||
|
/// the instance out by `into_inner`)
|
||||||
unsafe impl Send for UniversalHandler {}
|
unsafe impl Send for UniversalHandler {}
|
||||||
|
|
||||||
impl UniversalHandler {
|
impl UniversalHandler {
|
||||||
pub fn new(workspace_path: impl AsRef<Path>, proof_type: ProofType) -> Result<Self> {
|
pub fn new(workspace_path: impl AsRef<Path>, _proof_type: ProofType) -> Result<Self> {
|
||||||
let path_app_exe = workspace_path.as_ref().join("app.vmexe");
|
let path_app_exe = workspace_path.as_ref().join("app.vmexe");
|
||||||
let path_app_config = workspace_path.as_ref().join("openvm.toml");
|
let path_app_config = workspace_path.as_ref().join("openvm.toml");
|
||||||
let segment_len = Some((1 << 22) - 100);
|
let segment_len = Some((1 << 22) - 100);
|
||||||
@@ -25,16 +26,14 @@ impl UniversalHandler {
|
|||||||
segment_len,
|
segment_len,
|
||||||
};
|
};
|
||||||
|
|
||||||
let use_evm = proof_type == ProofType::Bundle;
|
let prover = Prover::setup(config, None)?;
|
||||||
|
|
||||||
let prover = Prover::setup(config, use_evm, None)?;
|
|
||||||
Ok(Self { prover })
|
Ok(Self { prover })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// get_prover get the inner prover, later we would replace chunk/batch/bundle_prover with
|
/// get_prover get the inner prover, later we would replace chunk/batch/bundle_prover with
|
||||||
/// universal prover, before that, use bundle_prover as the represent one
|
/// universal prover, before that, use bundle_prover as the represent one
|
||||||
pub fn get_prover(&self) -> &Prover {
|
pub fn get_prover(&mut self) -> &mut Prover {
|
||||||
&self.prover
|
&mut self.prover
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_task_from_input(input: &str) -> Result<ProvingTask> {
|
pub fn get_task_from_input(input: &str) -> Result<ProvingTask> {
|
||||||
@@ -45,14 +44,7 @@ impl UniversalHandler {
|
|||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl CircuitsHandler for Mutex<UniversalHandler> {
|
impl CircuitsHandler for Mutex<UniversalHandler> {
|
||||||
async fn get_proof_data(&self, u_task: &ProvingTask, need_snark: bool) -> Result<String> {
|
async fn get_proof_data(&self, u_task: &ProvingTask, need_snark: bool) -> Result<String> {
|
||||||
let handler_self = self.lock().await;
|
let mut handler_self = self.lock().await;
|
||||||
|
|
||||||
if need_snark && handler_self.prover.evm_prover.is_none() {
|
|
||||||
eyre::bail!(
|
|
||||||
"do not init prover for evm (vk: {})",
|
|
||||||
BASE64_STANDARD.encode(handler_self.get_prover().get_app_vk())
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let proof = handler_self
|
let proof = handler_self
|
||||||
.get_prover()
|
.get_prover()
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "nightly-2025-02-14"
|
channel = "nightly-2025-08-18"
|
||||||
targets = ["riscv32im-unknown-none-elf", "x86_64-unknown-linux-gnu"]
|
targets = ["riscv32im-unknown-none-elf", "x86_64-unknown-linux-gnu"]
|
||||||
|
components = ["llvm-tools", "rustc-dev"]
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: prover lint tests_binary
|
.PHONY: prover prover_cpu lint tests_binary test_e2e_run test_run
|
||||||
|
|
||||||
RUST_MIN_STACK ?= 16777216
|
RUST_MIN_STACK ?= 16777216
|
||||||
export RUST_MIN_STACK
|
export RUST_MIN_STACK
|
||||||
@@ -36,14 +36,16 @@ E2E_HANDLE_SET ?= ../tests/prover-e2e/testset.json
|
|||||||
DUMP_DIR ?= .work
|
DUMP_DIR ?= .work
|
||||||
|
|
||||||
prover:
|
prover:
|
||||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZKVM_COMMIT=${ZKVM_COMMIT} $(MAKE) -C ../crates/gpu_override build
|
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release -Z unstable-options --lockfile-path ../crates/gpu_override/Cargo.lock -p prover
|
||||||
|
|
||||||
version:
|
version:
|
||||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZKVM_COMMIT=${ZKVM_COMMIT} $(MAKE) -C ../crates/gpu_override version
|
echo ${GO_TAG}-${GIT_REV}-${ZK_VERSION}
|
||||||
|
|
||||||
prover_cpu:
|
prover_cpu:
|
||||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release -p prover
|
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release -p prover
|
||||||
|
|
||||||
|
clean:
|
||||||
|
cargo clean -Z unstable-options --release -p prover --lockfile-path ../crates/gpu_override/Cargo.lock
|
||||||
|
|
||||||
tests_binary:
|
tests_binary:
|
||||||
cargo clean && cargo test --release --no-run
|
cargo clean && cargo test --release --no-run
|
||||||
|
|||||||
Reference in New Issue
Block a user