mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-11 15:08:09 -05:00
Compare commits
68 Commits
test/code
...
refactor/p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c7ce3205da | ||
|
|
d85888a54e | ||
|
|
ca90b1ff01 | ||
|
|
e88336dcf9 | ||
|
|
3086a4c672 | ||
|
|
9a919a6d45 | ||
|
|
f77a008fc1 | ||
|
|
5ac5bec13b | ||
|
|
ec1d063e58 | ||
|
|
094f8583c3 | ||
|
|
47e477b42d | ||
|
|
f28ba157cb | ||
|
|
967fb653a2 | ||
|
|
5585a5434d | ||
|
|
40b4b743eb | ||
|
|
2f8532cb72 | ||
|
|
99b4603f77 | ||
|
|
ca3e7e45d7 | ||
|
|
4d9d014df8 | ||
|
|
c6d2c22bce | ||
|
|
971e6c5aa7 | ||
|
|
694d155437 | ||
|
|
0dd1bb3343 | ||
|
|
76429e524c | ||
|
|
92200d1bd4 | ||
|
|
372911b772 | ||
|
|
dc5b74e903 | ||
|
|
533da0047f | ||
|
|
66453af811 | ||
|
|
5acb2af74d | ||
|
|
094908404e | ||
|
|
74f3623ede | ||
|
|
5ad63aed29 | ||
|
|
43505d4780 | ||
|
|
1b20456928 | ||
|
|
277b75bc69 | ||
|
|
e5e68f5e28 | ||
|
|
e661c26743 | ||
|
|
282d6e996b | ||
|
|
2371edc617 | ||
|
|
0532653d34 | ||
|
|
f37556fdd5 | ||
|
|
32e95fb370 | ||
|
|
d8f8880ab9 | ||
|
|
81533c16dd | ||
|
|
f170ae5fe3 | ||
|
|
ee2a3956e6 | ||
|
|
e988cbb15d | ||
|
|
46f5849ae0 | ||
|
|
45d8f66864 | ||
|
|
94e1ea3a08 | ||
|
|
62c1f00d3b | ||
|
|
038d7a5bbf | ||
|
|
112e9ac42b | ||
|
|
728266ebad | ||
|
|
7b8f30d230 | ||
|
|
69ca648c83 | ||
|
|
00a07a8258 | ||
|
|
f87e5b5ca7 | ||
|
|
7b848f971b | ||
|
|
49166ec8d0 | ||
|
|
2d0c36eb5a | ||
|
|
445a8d592a | ||
|
|
eadc51d33b | ||
|
|
254a7faf58 | ||
|
|
173cbc4dc4 | ||
|
|
94bd5917ba | ||
|
|
107aa5792b |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -20,3 +20,5 @@ coverage.txt
|
||||
# misc
|
||||
sftp-config.json
|
||||
*~
|
||||
|
||||
target
|
||||
|
||||
5678
prover_rust/Cargo.lock
generated
Normal file
5678
prover_rust/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
47
prover_rust/Cargo.toml
Normal file
47
prover_rust/Cargo.toml
Normal file
@@ -0,0 +1,47 @@
|
||||
[package]
|
||||
name = "prover_rust"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
|
||||
[patch.crates-io]
|
||||
ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
halo2curves = { git = "https://github.com/scroll-tech/halo2curves", branch = "v0.1.0" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/halo2.git"]
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
|
||||
poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "main" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/bls12_381"]
|
||||
bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/impl_scalar_field" }
|
||||
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
log = "0.4"
|
||||
env_logger = "0.11.3"
|
||||
serde = { version = "1.0.198", features = ["derive"] }
|
||||
serde_json = "1.0.116"
|
||||
futures = "0.3.30"
|
||||
|
||||
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
|
||||
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "v0.10", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
|
||||
prover_next = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.0rc8-hotfix1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
base64 = "0.13.1"
|
||||
reqwest = { version = "0.12.4", features = ["gzip"] }
|
||||
reqwest-middleware = "0.3"
|
||||
reqwest-retry = "0.5"
|
||||
once_cell = "1.19.0"
|
||||
hex = "0.4.3"
|
||||
tiny-keccak = { version = "2.0.0", features = ["sha3", "keccak"] }
|
||||
rand = "0.8.5"
|
||||
eth-keystore = "0.5.0"
|
||||
rlp = "0.5.2"
|
||||
tokio = "1.37.0"
|
||||
sled = "0.34.7"
|
||||
http = "1.1.0"
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
43
prover_rust/Makefile
Normal file
43
prover_rust/Makefile
Normal file
@@ -0,0 +1,43 @@
|
||||
.PHONY: prover
|
||||
|
||||
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
||||
$(info ************ First ************)
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
else
|
||||
$(info ************ Second ************)
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "\#" -f2 | cut -c-7)
|
||||
endif
|
||||
|
||||
ZKEVM_VERSION=$(shell ./print_high_zkevm_version.sh)
|
||||
ifeq (${ZKEVM_VERSION},)
|
||||
$(error ZKEVM_VERSION not set)
|
||||
else
|
||||
$(info ZKEVM_VERSION is ${ZKEVM_VERSION})
|
||||
endif
|
||||
|
||||
ZKEVM_COMMIT=$(shell echo ${ZKEVM_VERSION} | cut -d " " -f2)
|
||||
$(info ZKEVM_COMMIT is ${ZKEVM_COMMIT})
|
||||
|
||||
HALO2_GPU_VERSION=$(shell ./print_halo2gpu_version.sh | sed -n '2p')
|
||||
|
||||
GIT_REV=$(shell git rev-parse --short HEAD)
|
||||
GO_TAG=$(shell grep "var tag = " ../common/version/version.go | cut -d "\"" -f2)
|
||||
|
||||
ifeq (${GO_TAG},)
|
||||
$(error GO_TAG not set)
|
||||
else
|
||||
$(info GO_TAG is ${GO_TAG})
|
||||
endif
|
||||
|
||||
ifeq (${HALO2_GPU_VERSION},)
|
||||
# use halo2_proofs with CPU
|
||||
ZK_VERSION=${ZKEVM_COMMIT}-${HALO2_VERSION}
|
||||
else
|
||||
# use halo2_gpu
|
||||
ZK_VERSION=${ZKEVM_COMMIT}-${HALO2_GPU_VERSION}
|
||||
endif
|
||||
|
||||
prover:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
|
||||
rm -rf ./lib && mkdir ./lib
|
||||
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
|
||||
26
prover_rust/config.json
Normal file
26
prover_rust/config.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"prover_name": "prover-1",
|
||||
"keystore_path": "keystore.json",
|
||||
"keystore_password": "prover-pwd",
|
||||
"db_path": "unique-db-path-for-prover-1",
|
||||
"proof_type": 2,
|
||||
"low_version_circuit": {
|
||||
"hard_fork_name": "bernoulli",
|
||||
"params_path": "params",
|
||||
"assets_path": "assets"
|
||||
},
|
||||
"high_version_circuit": {
|
||||
"hard_fork_name": "curie",
|
||||
"params_path": "params",
|
||||
"assets_path": "assets"
|
||||
},
|
||||
"coordinator": {
|
||||
"base_url": "http://localhost:8555",
|
||||
"retry_count": 10,
|
||||
"retry_wait_time_sec": 10,
|
||||
"connection_timeout_sec": 30
|
||||
},
|
||||
"l2geth": {
|
||||
"endpoint": "http://localhost:9999"
|
||||
}
|
||||
}
|
||||
21
prover_rust/print_halo2gpu_version.sh
Executable file
21
prover_rust/print_halo2gpu_version.sh
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
|
||||
config_file="$HOME/.cargo/config"
|
||||
|
||||
if [ ! -e "$config_file" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ $(head -n 1 "$config_file") == "#"* ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
halo2gpu_path=$(grep -Po '(?<=paths = \[")([^"]*)' $config_file)
|
||||
|
||||
pushd $halo2gpu_path
|
||||
|
||||
commit_hash=$(git log --pretty=format:%h -n 1)
|
||||
echo "${commit_hash:0:7}"
|
||||
|
||||
popd
|
||||
|
||||
9
prover_rust/print_high_zkevm_version.sh
Executable file
9
prover_rust/print_high_zkevm_version.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
|
||||
|
||||
higher_zkevm_item=`grep "zkevm-circuits.git" ./Cargo.lock | sort | uniq | awk -F "[#=]" '{print $3" "$4}' | sort -k 1 | tail -n 1`
|
||||
|
||||
higher_version=`echo $higher_zkevm_item | awk '{print $1}'`
|
||||
|
||||
higher_commit=`echo $higher_zkevm_item | cut -d ' ' -f2 | cut -c-7`
|
||||
|
||||
echo "$higher_version $higher_commit"
|
||||
1
prover_rust/rust-toolchain
Normal file
1
prover_rust/rust-toolchain
Normal file
@@ -0,0 +1 @@
|
||||
nightly-2023-12-03
|
||||
9
prover_rust/rustfmt.toml
Normal file
9
prover_rust/rustfmt.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
edition = "2021"
|
||||
|
||||
comment_width = 100
|
||||
imports_granularity = "Crate"
|
||||
max_width = 100
|
||||
newline_style = "Unix"
|
||||
# normalize_comments = true
|
||||
reorder_imports = true
|
||||
wrap_comments = true
|
||||
89
prover_rust/src/config.rs
Normal file
89
prover_rust/src/config.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
use anyhow::{bail, Result};
|
||||
|
||||
use crate::types::ProofType;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CircuitConfig {
|
||||
pub hard_fork_name: String,
|
||||
pub params_path: String,
|
||||
pub assets_path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CoordinatorConfig {
|
||||
pub base_url: String,
|
||||
pub retry_count: u32,
|
||||
pub retry_wait_time_sec: u64,
|
||||
pub connection_timeout_sec: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct L2GethConfig {
|
||||
pub endpoint: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
pub prover_name: String,
|
||||
pub keystore_path: String,
|
||||
pub keystore_password: String,
|
||||
pub db_path: String,
|
||||
#[serde(default)]
|
||||
pub proof_type: ProofType,
|
||||
pub low_version_circuit: CircuitConfig,
|
||||
pub high_version_circuit: CircuitConfig,
|
||||
pub coordinator: CoordinatorConfig,
|
||||
pub l2geth: Option<L2GethConfig>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn from_reader<R>(reader: R) -> Result<Self>
|
||||
where
|
||||
R: std::io::Read,
|
||||
{
|
||||
serde_json::from_reader(reader).map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
pub fn from_file(file_name: String) -> Result<Self> {
|
||||
let file = File::open(file_name)?;
|
||||
Config::from_reader(&file)
|
||||
}
|
||||
}
|
||||
|
||||
static SCROLL_PROVER_ASSETS_DIR_ENV_NAME: &str = "SCROLL_PROVER_ASSETS_DIR";
|
||||
static mut SCROLL_PROVER_ASSETS_DIRS: Vec<String> = vec![];
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AssetsDirEnvConfig {
|
||||
}
|
||||
|
||||
impl AssetsDirEnvConfig {
|
||||
pub fn init() -> Result<()> {
|
||||
let value = std::env::var(SCROLL_PROVER_ASSETS_DIR_ENV_NAME)?;
|
||||
let dirs: Vec<&str> = value.split(',').collect();
|
||||
if dirs.len() != 2 {
|
||||
bail!("env variable SCROLL_PROVER_ASSETS_DIR value must be 2 parts seperated by comma.")
|
||||
}
|
||||
unsafe {
|
||||
SCROLL_PROVER_ASSETS_DIRS = dirs.into_iter().map(|s| s.to_string()).collect();
|
||||
log::info!("init SCROLL_PROVER_ASSETS_DIRS: {:?}", SCROLL_PROVER_ASSETS_DIRS);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn enable_first() {
|
||||
unsafe {
|
||||
log::info!("set env {SCROLL_PROVER_ASSETS_DIR_ENV_NAME} to {}", &SCROLL_PROVER_ASSETS_DIRS[0]);
|
||||
std::env::set_var(SCROLL_PROVER_ASSETS_DIR_ENV_NAME, &SCROLL_PROVER_ASSETS_DIRS[0]);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enable_second() {
|
||||
unsafe {
|
||||
log::info!("set env {SCROLL_PROVER_ASSETS_DIR_ENV_NAME} to {}", &SCROLL_PROVER_ASSETS_DIRS[1]);
|
||||
std::env::set_var(SCROLL_PROVER_ASSETS_DIR_ENV_NAME, &SCROLL_PROVER_ASSETS_DIRS[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
135
prover_rust/src/coordinator_client.rs
Normal file
135
prover_rust/src/coordinator_client.rs
Normal file
@@ -0,0 +1,135 @@
|
||||
mod api;
|
||||
mod errors;
|
||||
pub mod listener;
|
||||
pub mod types;
|
||||
|
||||
use anyhow::{bail, Context, Ok, Result};
|
||||
use std::rc::Rc;
|
||||
|
||||
use api::API;
|
||||
use errors::*;
|
||||
use listener::Listener;
|
||||
use log;
|
||||
use tokio::runtime::Runtime;
|
||||
use types::*;
|
||||
|
||||
use crate::key_signer::KeySigner;
|
||||
use crate::config::Config;
|
||||
|
||||
pub struct CoordinatorClient<'a> {
|
||||
api: API,
|
||||
token: Option<String>,
|
||||
config: &'a Config,
|
||||
key_signer: Rc<KeySigner>,
|
||||
rt: Runtime,
|
||||
listener: Box<dyn Listener>,
|
||||
}
|
||||
|
||||
impl<'a> CoordinatorClient<'a> {
|
||||
pub fn new(
|
||||
config: &'a Config,
|
||||
key_signer: Rc<KeySigner>,
|
||||
listener: Box<dyn Listener>,
|
||||
) -> Result<Self> {
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()?;
|
||||
|
||||
let api = API::new(&config.coordinator.base_url,
|
||||
core::time::Duration::from_secs(config.coordinator.connection_timeout_sec),
|
||||
config.coordinator.retry_count,
|
||||
config.coordinator.retry_wait_time_sec
|
||||
)?;
|
||||
let mut client = Self {
|
||||
api,
|
||||
token: None,
|
||||
config,
|
||||
key_signer,
|
||||
rt,
|
||||
listener,
|
||||
};
|
||||
client.login()?;
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
fn login(&mut self) -> Result<()> {
|
||||
let api = &self.api;
|
||||
let challenge_response = self.rt.block_on(api.challenge())?;
|
||||
if challenge_response.errcode != ErrorCode::Success {
|
||||
bail!("challenge failed: {}", challenge_response.errmsg)
|
||||
}
|
||||
let mut token: String;
|
||||
if let Some(r) = challenge_response.data {
|
||||
token = r.token;
|
||||
} else {
|
||||
bail!("challenge failed: got empty token")
|
||||
}
|
||||
|
||||
let login_message = LoginMessage {
|
||||
challenge: token.clone(),
|
||||
prover_name: self.config.prover_name.clone(),
|
||||
prover_version: crate::version::get_version(),
|
||||
};
|
||||
|
||||
let buffer = login_message.rlp();
|
||||
let signature = self.key_signer.sign_buffer(&buffer)?;
|
||||
let login_request = LoginRequest {
|
||||
message: login_message,
|
||||
signature,
|
||||
};
|
||||
let login_response = self.rt.block_on(api.login(&login_request, &token))?;
|
||||
if login_response.errcode != ErrorCode::Success {
|
||||
bail!("login failed: {}", login_response.errmsg)
|
||||
}
|
||||
if let Some(r) = login_response.data {
|
||||
token = r.token;
|
||||
} else {
|
||||
bail!("login failed: got empty token")
|
||||
}
|
||||
self.token = Some(token);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn action_with_re_login<T, F, R>(&mut self, req: &R, mut f: F) -> Result<Response<T>>
|
||||
where
|
||||
F: FnMut(&mut Self, &R) -> Result<Response<T>>,
|
||||
{
|
||||
let response = f(self, req)?;
|
||||
if response.errcode == ErrorCode::ErrJWTTokenExpired {
|
||||
log::info!("JWT expired, attempting to re-login");
|
||||
self.login().context("JWT expired, re-login failed")?;
|
||||
log::info!("re-login success");
|
||||
return self.action_with_re_login(req, f);
|
||||
} else if response.errcode != ErrorCode::Success {
|
||||
bail!("action failed: {}", response.errmsg)
|
||||
}
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
fn do_get_task(&mut self, req: &GetTaskRequest) -> Result<Response<GetTaskResponseData>> {
|
||||
self.rt
|
||||
.block_on(self.api.get_task(req, self.token.as_ref().unwrap()))
|
||||
}
|
||||
|
||||
pub fn get_task(&mut self, req: &GetTaskRequest) -> Result<Response<GetTaskResponseData>> {
|
||||
self.action_with_re_login(req, |s, req| s.do_get_task(req))
|
||||
}
|
||||
|
||||
fn do_submit_proof(
|
||||
&mut self,
|
||||
req: &SubmitProofRequest,
|
||||
) -> Result<Response<SubmitProofResponseData>> {
|
||||
let response = self
|
||||
.rt
|
||||
.block_on(self.api.submit_proof(req, &self.token.as_ref().unwrap()))?;
|
||||
self.listener.on_proof_submitted(req);
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub fn submit_proof(
|
||||
&mut self,
|
||||
req: &SubmitProofRequest,
|
||||
) -> Result<Response<SubmitProofResponseData>> {
|
||||
self.action_with_re_login(req, |s, req| s.do_submit_proof(req))
|
||||
}
|
||||
}
|
||||
121
prover_rust/src/coordinator_client/api.rs
Normal file
121
prover_rust/src/coordinator_client/api.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use super::types::*;
|
||||
use anyhow::{bail, Result};
|
||||
use reqwest::{header::CONTENT_TYPE, Url};
|
||||
use serde::Serialize;
|
||||
use core::time::Duration;
|
||||
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
||||
use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
|
||||
|
||||
pub struct API {
|
||||
url_base: Url,
|
||||
send_timeout: Duration,
|
||||
pub client: ClientWithMiddleware,
|
||||
}
|
||||
|
||||
impl API {
|
||||
pub fn new(url_base: &String, send_timeout: Duration, retry_count: u32, retry_wait_time_sec: u64) -> Result<Self> {
|
||||
let retry_wait_duration = core::time::Duration::from_secs(retry_wait_time_sec);
|
||||
let retry_policy = ExponentialBackoff::builder()
|
||||
.retry_bounds(retry_wait_duration / 2, retry_wait_duration)
|
||||
.build_with_max_retries(retry_count);
|
||||
|
||||
let client = ClientBuilder::new(reqwest::Client::new())
|
||||
.with(RetryTransientMiddleware::new_with_policy(retry_policy))
|
||||
.build();
|
||||
|
||||
Ok(Self {
|
||||
url_base: Url::parse(&url_base)?,
|
||||
send_timeout,
|
||||
client,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn challenge(&self) -> Result<Response<ChallengeResponseData>> {
|
||||
let method = "/coordinator/v1/challenge";
|
||||
let url = self.build_url(method)?;
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.header(CONTENT_TYPE, "application/json")
|
||||
.timeout(self.send_timeout)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let response_body = response.text().await?;
|
||||
|
||||
serde_json::from_str(&response_body).map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
pub async fn login(
|
||||
&self,
|
||||
req: &LoginRequest,
|
||||
token: &String,
|
||||
) -> Result<Response<LoginResponseData>> {
|
||||
let method = "/coordinator/v1/login";
|
||||
self.post_with_token(&method, req, token).await
|
||||
}
|
||||
|
||||
pub async fn get_task(
|
||||
&self,
|
||||
req: &GetTaskRequest,
|
||||
token: &String,
|
||||
) -> Result<Response<GetTaskResponseData>> {
|
||||
let method = "/coordinator/v1/get_task";
|
||||
self.post_with_token(&method, req, token).await
|
||||
}
|
||||
|
||||
pub async fn submit_proof(
|
||||
&self,
|
||||
req: &SubmitProofRequest,
|
||||
token: &String,
|
||||
) -> Result<Response<SubmitProofResponseData>> {
|
||||
let method = "/coordinator/v1/submit_proof";
|
||||
self.post_with_token(&method, req, token).await
|
||||
}
|
||||
|
||||
async fn post_with_token<Req, Resp>(
|
||||
&self,
|
||||
method: &str,
|
||||
req: &Req,
|
||||
token: &String,
|
||||
) -> Result<Resp>
|
||||
where
|
||||
Req: ?Sized + Serialize,
|
||||
Resp: serde::de::DeserializeOwned,
|
||||
{
|
||||
let url = self.build_url(method)?;
|
||||
let request_body = serde_json::to_string(req)?;
|
||||
|
||||
log::info!("[coordinator client], {method}, request: {request_body}");
|
||||
let response = self
|
||||
.client
|
||||
.post(url)
|
||||
.header(CONTENT_TYPE, "application/json")
|
||||
.bearer_auth(token)
|
||||
.body(request_body)
|
||||
.timeout(self.send_timeout)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() != http::status::StatusCode::OK {
|
||||
log::error!(
|
||||
"[coordinator client], {method}, status not ok: {}",
|
||||
response.status()
|
||||
);
|
||||
bail!(
|
||||
"[coordinator client], {method}, status not ok: {}",
|
||||
response.status()
|
||||
)
|
||||
}
|
||||
|
||||
let response_body = response.text().await?;
|
||||
|
||||
log::info!("[coordinator client], {method}, response: {response_body}");
|
||||
serde_json::from_str(&response_body).map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
fn build_url(&self, method: &str) -> Result<Url> {
|
||||
self.url_base.join(method).map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
}
|
||||
54
prover_rust/src/coordinator_client/errors.rs
Normal file
54
prover_rust/src/coordinator_client/errors.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use log;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ErrorCode {
|
||||
Success,
|
||||
InternalServerError,
|
||||
|
||||
ErrProverStatsAPIParameterInvalidNo,
|
||||
ErrProverStatsAPIProverTaskFailure,
|
||||
ErrProverStatsAPIProverTotalRewardFailure,
|
||||
|
||||
ErrCoordinatorParameterInvalidNo,
|
||||
ErrCoordinatorGetTaskFailure,
|
||||
ErrCoordinatorHandleZkProofFailure,
|
||||
ErrCoordinatorEmptyProofData,
|
||||
|
||||
ErrJWTCommonErr,
|
||||
ErrJWTTokenExpired,
|
||||
|
||||
Undefined(i32),
|
||||
}
|
||||
|
||||
impl ErrorCode {
|
||||
fn from_i32(v: i32) -> Self {
|
||||
match v {
|
||||
0 => ErrorCode::Success,
|
||||
500 => ErrorCode::InternalServerError,
|
||||
10001 => ErrorCode::ErrProverStatsAPIParameterInvalidNo,
|
||||
10002 => ErrorCode::ErrProverStatsAPIProverTaskFailure,
|
||||
10003 => ErrorCode::ErrProverStatsAPIProverTotalRewardFailure,
|
||||
20001 => ErrorCode::ErrCoordinatorParameterInvalidNo,
|
||||
20002 => ErrorCode::ErrCoordinatorGetTaskFailure,
|
||||
20003 => ErrorCode::ErrCoordinatorHandleZkProofFailure,
|
||||
20004 => ErrorCode::ErrCoordinatorEmptyProofData,
|
||||
50000 => ErrorCode::ErrJWTCommonErr,
|
||||
50001 => ErrorCode::ErrJWTTokenExpired,
|
||||
_ => {
|
||||
log::error!("get unexpected error code from coordinator: {v}");
|
||||
ErrorCode::Undefined(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ErrorCode {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: i32 = i32::deserialize(deserializer)?;
|
||||
Ok(ErrorCode::from_i32(v))
|
||||
}
|
||||
}
|
||||
5
prover_rust/src/coordinator_client/listener.rs
Normal file
5
prover_rust/src/coordinator_client/listener.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
use super::SubmitProofRequest;
|
||||
|
||||
pub trait Listener {
|
||||
fn on_proof_submitted(&self, req: &SubmitProofRequest);
|
||||
}
|
||||
75
prover_rust/src/coordinator_client/types.rs
Normal file
75
prover_rust/src/coordinator_client/types.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use crate::types::{ProofFailureType, ProofStatus};
|
||||
use rlp::RlpStream;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use super::errors::ErrorCode;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Response<T> {
|
||||
pub errcode: ErrorCode,
|
||||
pub errmsg: String,
|
||||
pub data: Option<T>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct LoginMessage {
|
||||
pub challenge: String,
|
||||
pub prover_name: String,
|
||||
pub prover_version: String,
|
||||
}
|
||||
|
||||
impl LoginMessage {
|
||||
pub fn rlp(&self) -> Vec<u8> {
|
||||
let mut rlp = RlpStream::new();
|
||||
let num_fields = 3;
|
||||
rlp.begin_list(num_fields);
|
||||
rlp.append(&self.prover_name);
|
||||
rlp.append(&self.prover_version);
|
||||
rlp.append(&self.challenge);
|
||||
rlp.out().freeze().into()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct LoginRequest {
|
||||
pub message: LoginMessage,
|
||||
pub signature: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct LoginResponseData {
|
||||
pub time: String,
|
||||
pub token: String,
|
||||
}
|
||||
|
||||
pub type ChallengeResponseData = LoginResponseData;
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub struct GetTaskRequest {
|
||||
pub task_type: crate::types::ProofType,
|
||||
pub prover_height: Option<u64>,
|
||||
pub vks: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct GetTaskResponseData {
|
||||
pub uuid: String,
|
||||
pub task_id: String,
|
||||
pub task_type: crate::types::ProofType,
|
||||
pub task_data: String,
|
||||
pub hard_fork_name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct SubmitProofRequest {
|
||||
pub uuid: String,
|
||||
pub task_id: String,
|
||||
pub task_type: crate::types::ProofType,
|
||||
pub status: ProofStatus,
|
||||
pub proof: String,
|
||||
pub failure_type: Option<ProofFailureType>,
|
||||
pub failure_msg: Option<String>,
|
||||
pub hard_fork_name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SubmitProofResponseData {}
|
||||
124
prover_rust/src/geth_client.rs
Normal file
124
prover_rust/src/geth_client.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
use crate::types::CommonHash;
|
||||
use anyhow::Result;
|
||||
use ethers_core::types::BlockNumber;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
use ethers_core::types::{H256, U64};
|
||||
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||
use std::fmt::Debug;
|
||||
|
||||
use ethers_providers::{Http, Provider};
|
||||
|
||||
// ======================= types ============================
|
||||
|
||||
/// L2 block full trace which tracks to the version in golang.
|
||||
///
|
||||
/// The inner block_trace is a generic type, whose real implementation
|
||||
/// lies in two version's zkevm-circuits library.
|
||||
///
|
||||
/// The inner block_trace missed some fields compared to the go version.
|
||||
/// These fields are defined here for clarity although not used.
|
||||
#[derive(Deserialize, Serialize, Default, Debug, Clone)]
|
||||
pub struct BlockTrace<T> {
|
||||
#[serde(flatten)]
|
||||
pub block_trace: T,
|
||||
|
||||
pub version: String,
|
||||
|
||||
pub withdraw_trie_root: Option<CommonHash>,
|
||||
|
||||
#[serde(rename = "mptwitness", default)]
|
||||
pub mpt_witness: Vec<u8>,
|
||||
}
|
||||
|
||||
pub type TxHash = H256;
|
||||
|
||||
/// this struct is tracked to https://github.com/scroll-tech/go-ethereum/blob/0f0cd99f7a2e/core/types/block.go#Header
|
||||
/// the detail fields of struct are not 100% same as ethers_core::types::Block so this needs to be changed in
|
||||
/// some time currently only the `number` field is required
|
||||
#[derive(Debug, Deserialize, Serialize, Default)]
|
||||
pub struct Header {
|
||||
#[serde(flatten)]
|
||||
block: ethers_core::types::Block<TxHash>,
|
||||
}
|
||||
|
||||
impl Header {
|
||||
pub fn get_number(&self) -> Option<U64> {
|
||||
self.block.number
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize a type.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// If the type returns an error during serialization.
|
||||
pub fn serialize<T: serde::Serialize>(t: &T) -> serde_json::Value {
|
||||
serde_json::to_value(t).expect("Types never fail to serialize.")
|
||||
}
|
||||
|
||||
// ======================= geth client ============================
|
||||
|
||||
pub struct GethClient {
|
||||
id: String,
|
||||
provider: Provider<Http>,
|
||||
rt: Runtime,
|
||||
}
|
||||
|
||||
impl GethClient {
|
||||
pub fn new(id: &str, api_url: &str) -> Result<Self> {
|
||||
let provider = Provider::<Http>::try_from(api_url)?;
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()?;
|
||||
|
||||
Ok(Self {
|
||||
id: id.to_string(),
|
||||
provider,
|
||||
rt,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_block_trace_by_hash<T>(&mut self, hash: &CommonHash) -> Result<BlockTrace<T>>
|
||||
where T: Serialize + DeserializeOwned + Debug + Send {
|
||||
log::info!(
|
||||
"{}: calling get_block_trace_by_hash, hash: {:#?}",
|
||||
self.id,
|
||||
hash
|
||||
);
|
||||
|
||||
let trace_future = self
|
||||
.provider
|
||||
.request("scroll_getBlockTraceByNumberOrHash", [format!("{hash:#x}")]);
|
||||
|
||||
let trace = self.rt.block_on(trace_future)?;
|
||||
Ok(trace)
|
||||
}
|
||||
|
||||
pub fn header_by_number(&mut self, block_number: &BlockNumber) -> Result<Header> {
|
||||
log::info!(
|
||||
"{}: calling header_by_number, hash: {:#?}",
|
||||
self.id,
|
||||
block_number
|
||||
);
|
||||
|
||||
let hash = serialize(block_number);
|
||||
let include_txs = serialize(&false);
|
||||
|
||||
let trace_future = self
|
||||
.provider
|
||||
.request("eth_getBlockByNumber", [hash, include_txs]);
|
||||
|
||||
let trace = self.rt.block_on(trace_future)?;
|
||||
Ok(trace)
|
||||
}
|
||||
|
||||
pub fn block_number(&mut self) -> Result<BlockNumber> {
|
||||
log::info!("{}: calling block_number", self.id);
|
||||
|
||||
let trace_future = self.provider.request("eth_blockNumber", ());
|
||||
|
||||
let trace = self.rt.block_on(trace_future)?;
|
||||
Ok(trace)
|
||||
}
|
||||
}
|
||||
103
prover_rust/src/key_signer.rs
Normal file
103
prover_rust/src/key_signer.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use ethers_core::{
|
||||
k256::{
|
||||
ecdsa::{signature::hazmat::PrehashSigner, RecoveryId, Signature, SigningKey},
|
||||
elliptic_curve::{sec1::ToEncodedPoint, FieldBytes},
|
||||
PublicKey, Secp256k1, SecretKey,
|
||||
},
|
||||
types::Signature as EthSignature,
|
||||
};
|
||||
|
||||
use ethers_core::types::{H256, U256};
|
||||
use hex::ToHex;
|
||||
use tiny_keccak::{Hasher, Keccak};
|
||||
|
||||
pub struct KeySigner {
|
||||
public_key: PublicKey,
|
||||
signer: SigningKey,
|
||||
}
|
||||
|
||||
impl KeySigner {
|
||||
pub fn new(key_path: &str, passwd: &str) -> Result<Self> {
|
||||
let p = Path::new(key_path);
|
||||
|
||||
let secret = if !p.exists() {
|
||||
log::info!("[key_signer] key_path not exists, create one");
|
||||
let dir = p.parent().unwrap();
|
||||
let name = p.file_name().and_then(|s| s.to_str());
|
||||
let mut rng = rand::thread_rng();
|
||||
let (secret, _) = eth_keystore::new(dir, &mut rng, passwd, name)?;
|
||||
secret
|
||||
} else {
|
||||
log::info!("[key_signer] key_path already exists, load it");
|
||||
eth_keystore::decrypt_key(key_path, passwd).map_err(|e| anyhow::anyhow!(e))?
|
||||
};
|
||||
|
||||
let secret_key = SecretKey::from_bytes(secret.as_slice().into())?;
|
||||
|
||||
let signer = SigningKey::from(secret_key.clone());
|
||||
|
||||
Ok(Self {
|
||||
public_key: secret_key.public_key(),
|
||||
signer,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_public_key(&self) -> String {
|
||||
let v: Vec<u8> = Vec::from(self.public_key.to_encoded_point(true).as_bytes());
|
||||
buffer_to_hex(&v, false)
|
||||
}
|
||||
|
||||
/// Signs the provided hash.
|
||||
pub fn sign_hash(&self, hash: H256) -> Result<EthSignature> {
|
||||
let signer = &self.signer as &dyn PrehashSigner<(Signature, RecoveryId)>;
|
||||
let (recoverable_sig, recovery_id) = signer.sign_prehash(hash.as_ref())?;
|
||||
|
||||
let v = u8::from(recovery_id) as u64;
|
||||
|
||||
let r_bytes: FieldBytes<Secp256k1> = recoverable_sig.r().into();
|
||||
let s_bytes: FieldBytes<Secp256k1> = recoverable_sig.s().into();
|
||||
let r = U256::from_big_endian(r_bytes.as_slice());
|
||||
let s = U256::from_big_endian(s_bytes.as_slice());
|
||||
|
||||
Ok(EthSignature { r, s, v })
|
||||
}
|
||||
|
||||
pub fn sign_buffer<T>(&self, buffer: &T) -> Result<String>
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
let pre_hash = keccak256(buffer);
|
||||
|
||||
let hash = H256::from(pre_hash);
|
||||
let sig = self.sign_hash(hash)?;
|
||||
|
||||
Ok(buffer_to_hex(&sig.to_vec(), true))
|
||||
}
|
||||
}
|
||||
|
||||
fn buffer_to_hex<T>(buffer: &T, has_prefix: bool) -> String
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
if has_prefix {
|
||||
format!("0x{}", buffer.encode_hex::<String>())
|
||||
} else {
|
||||
buffer.encode_hex::<String>()
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute the Keccak-256 hash of input bytes.
|
||||
///
|
||||
/// Note that strings are interpreted as UTF-8 bytes,
|
||||
pub fn keccak256<T: AsRef<[u8]>>(bytes: T) -> [u8; 32] {
|
||||
let mut output = [0u8; 32];
|
||||
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(bytes.as_ref());
|
||||
hasher.finalize(&mut output);
|
||||
|
||||
output
|
||||
}
|
||||
76
prover_rust/src/main.rs
Normal file
76
prover_rust/src/main.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
mod config;
|
||||
mod coordinator_client;
|
||||
mod geth_client;
|
||||
mod key_signer;
|
||||
mod prover;
|
||||
mod task_cache;
|
||||
mod task_processor;
|
||||
mod types;
|
||||
mod utils;
|
||||
mod version;
|
||||
mod zk_circuits_handler;
|
||||
|
||||
use clap::{Parser, ArgAction};
|
||||
use anyhow::Result;
|
||||
use config::{Config, AssetsDirEnvConfig};
|
||||
use prover::Prover;
|
||||
use std::rc::Rc;
|
||||
use task_cache::{ClearCacheCoordinatorListener, TaskCache};
|
||||
use task_processor::TaskProcessor;
|
||||
use log;
|
||||
|
||||
/// Simple program to greet a person
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(disable_version_flag = true)]
|
||||
struct Args {
|
||||
/// Path of config file
|
||||
#[arg(long = "config", default_value = "conf/config.json")]
|
||||
config_file: String,
|
||||
|
||||
/// Version of this prover
|
||||
#[arg(short, long, action = ArgAction::SetTrue)]
|
||||
version: bool,
|
||||
|
||||
/// Path of log file
|
||||
#[arg(long = "log.file")]
|
||||
log_file: Option<String>,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = Args::parse();
|
||||
|
||||
if args.version {
|
||||
println!("version is {}", version::get_version());
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
utils::log_init(args.log_file);
|
||||
|
||||
let config: Config = Config::from_file(args.config_file)?;
|
||||
|
||||
if let Err(e) = AssetsDirEnvConfig::init() {
|
||||
log::error!("AssetsDirEnvConfig init failed: {:#}", e);
|
||||
std::process::exit(-2);
|
||||
}
|
||||
|
||||
let task_cache = Rc::new(TaskCache::new(&config.db_path)?);
|
||||
|
||||
let coordinator_listener = Box::new(ClearCacheCoordinatorListener {
|
||||
task_cache: task_cache.clone(),
|
||||
});
|
||||
|
||||
let prover = Prover::new(&config, coordinator_listener)?;
|
||||
|
||||
log::info!("prover start successfully. name: {}, type: {:?}, publickey: {}, version: {}",
|
||||
config.prover_name,
|
||||
config.proof_type,
|
||||
prover.get_public_key(),
|
||||
version::get_version(),
|
||||
);
|
||||
|
||||
let task_processor = TaskProcessor::new(&prover, task_cache);
|
||||
|
||||
task_processor.start();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
177
prover_rust/src/prover.rs
Normal file
177
prover_rust/src/prover.rs
Normal file
@@ -0,0 +1,177 @@
|
||||
use anyhow::{bail, Context, Error, Ok, Result};
|
||||
use ethers_core::types::U64;
|
||||
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
use log;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
coordinator_client::{
|
||||
listener::Listener, types::*, CoordinatorClient,
|
||||
},
|
||||
geth_client::GethClient,
|
||||
key_signer::KeySigner,
|
||||
types::{ProofFailureType, ProofStatus, ProofType},
|
||||
zk_circuits_handler::{CircuitsHandler, CircuitsHandlerProvider},
|
||||
};
|
||||
|
||||
use super::types::{ProofDetail, Task};
|
||||
|
||||
pub struct Prover<'a> {
|
||||
config: &'a Config,
|
||||
key_signer: Rc<KeySigner>,
|
||||
circuits_handler_provider: RefCell<CircuitsHandlerProvider<'a>>,
|
||||
coordinator_client: RefCell<CoordinatorClient<'a>>,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
}
|
||||
|
||||
impl<'a> Prover<'a> {
|
||||
pub fn new(config: &'a Config, coordinator_listener: Box<dyn Listener>) -> Result<Self> {
|
||||
let proof_type = config.proof_type;
|
||||
let keystore_path = &config.keystore_path;
|
||||
let keystore_password = &config.keystore_password;
|
||||
|
||||
let key_signer = Rc::new(KeySigner::new(&keystore_path, &keystore_password)?);
|
||||
let coordinator_client = CoordinatorClient::new(
|
||||
config,
|
||||
Rc::clone(&key_signer),
|
||||
coordinator_listener,
|
||||
).context("failed to create coordinator_client")?;
|
||||
|
||||
let geth_client = if config.proof_type == ProofType::ProofTypeChunk {
|
||||
Some(Rc::new(RefCell::new(GethClient::new(
|
||||
&config.prover_name,
|
||||
&config.l2geth.as_ref().unwrap().endpoint,
|
||||
).context("failed to create l2 geth_client")?)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let provider = CircuitsHandlerProvider::new(
|
||||
proof_type,
|
||||
config,
|
||||
geth_client.clone(),
|
||||
).context("failed to create circuits handler provider")?;
|
||||
|
||||
let prover = Prover {
|
||||
config,
|
||||
key_signer: Rc::clone(&key_signer),
|
||||
circuits_handler_provider: RefCell::new(provider),
|
||||
coordinator_client: RefCell::new(coordinator_client),
|
||||
geth_client,
|
||||
};
|
||||
|
||||
Ok(prover)
|
||||
}
|
||||
|
||||
pub fn get_proof_type(&self) -> ProofType {
|
||||
self.config.proof_type
|
||||
}
|
||||
|
||||
pub fn get_public_key(&self) -> String {
|
||||
self.key_signer.get_public_key()
|
||||
}
|
||||
|
||||
pub fn fetch_task(&self) -> Result<Task> {
|
||||
log::info!("[prover] start to fetch_task");
|
||||
let mut req = GetTaskRequest {
|
||||
task_type: self.get_proof_type(),
|
||||
prover_height: None,
|
||||
vks: self.circuits_handler_provider.borrow().get_vks(),
|
||||
};
|
||||
|
||||
if self.get_proof_type() == ProofType::ProofTypeChunk {
|
||||
let latest_block_number = self.get_latest_block_number_value()?;
|
||||
if let Some(v) = latest_block_number {
|
||||
if v.as_u64() == 0 {
|
||||
bail!("omit to prove task of the genesis block")
|
||||
}
|
||||
req.prover_height = Some(v.as_u64());
|
||||
} else {
|
||||
log::error!("[prover] failed to fetch latest confirmed block number, got None");
|
||||
bail!("failed to fetch latest confirmed block number, got None")
|
||||
}
|
||||
}
|
||||
let resp = self.coordinator_client.borrow_mut().get_task(&req)?;
|
||||
|
||||
match resp.data {
|
||||
Some(d) => Ok(Task::from(d)),
|
||||
None => {
|
||||
bail!("data of get_task empty, while error_code is success. there may be something wrong in response data or inner logic.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prove_task(&self, task: &Task) -> Result<ProofDetail> {
|
||||
log::info!("[prover] start to prove_task, task id: {}", task.id);
|
||||
self.circuits_handler_provider.borrow_mut().clear_circuits_handler();
|
||||
if let Some(handler) = self.circuits_handler_provider.borrow_mut().get_circuits_handler(&task.hard_fork_name) {
|
||||
self.do_prove(task, handler)
|
||||
} else {
|
||||
log::error!("failed to get a circuit handler");
|
||||
bail!("failed to get a circuit handler")
|
||||
}
|
||||
}
|
||||
|
||||
fn do_prove(&self, task: &Task, handler: &Box<dyn CircuitsHandler>) -> Result<ProofDetail> {
|
||||
let mut proof_detail = ProofDetail {
|
||||
id: task.id.clone(),
|
||||
proof_type: task.task_type,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
proof_detail.proof_data = handler.get_proof_data(task.task_type, task)?;
|
||||
Ok(proof_detail)
|
||||
}
|
||||
|
||||
pub fn submit_proof(&self, proof_detail: ProofDetail, task: &Task) -> Result<()> {
|
||||
log::info!("[prover] start to submit_proof, task id: {}", proof_detail.id);
|
||||
|
||||
let request = SubmitProofRequest {
|
||||
uuid: task.uuid.clone(),
|
||||
task_id: proof_detail.id,
|
||||
task_type: proof_detail.proof_type,
|
||||
status: ProofStatus::Ok,
|
||||
proof: proof_detail.proof_data,
|
||||
hard_fork_name: task.hard_fork_name.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
self.do_submit(&request)
|
||||
}
|
||||
|
||||
pub fn submit_error(
|
||||
&self,
|
||||
task: &Task,
|
||||
failure_type: ProofFailureType,
|
||||
error: Error,
|
||||
) -> Result<()> {
|
||||
log::info!("[prover] start to submit_error, task id: {}", task.id);
|
||||
let request = SubmitProofRequest {
|
||||
uuid: task.uuid.clone(),
|
||||
task_id: task.id.clone(),
|
||||
task_type: task.task_type,
|
||||
status: ProofStatus::Error,
|
||||
failure_type: Some(failure_type),
|
||||
failure_msg: Some(error.to_string()),
|
||||
hard_fork_name: task.hard_fork_name.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
self.do_submit(&request)
|
||||
}
|
||||
|
||||
fn do_submit(&self, request: &SubmitProofRequest) -> Result<()> {
|
||||
self.coordinator_client.borrow_mut().submit_proof(request)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_latest_block_number_value(&self) -> Result<Option<U64>> {
|
||||
let number = self
|
||||
.geth_client
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.borrow_mut()
|
||||
.block_number()?;
|
||||
Ok(number.as_number())
|
||||
}
|
||||
}
|
||||
61
prover_rust/src/task_cache.rs
Normal file
61
prover_rust/src/task_cache.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
use anyhow::{Ok, Result};
|
||||
|
||||
use super::coordinator_client::{listener::Listener, types::SubmitProofRequest};
|
||||
use crate::types::TaskWrapper;
|
||||
use sled::{Config, Db};
|
||||
use std::rc::Rc;
|
||||
use log;
|
||||
|
||||
pub struct TaskCache {
|
||||
db: Db,
|
||||
}
|
||||
|
||||
impl TaskCache {
|
||||
pub fn new(db_path: &String) -> Result<Self> {
|
||||
let config = Config::new().path(db_path);
|
||||
let db = config.open()?;
|
||||
log::info!("[task_cache] initiate successfully to {db_path}");
|
||||
Ok(Self { db })
|
||||
}
|
||||
|
||||
pub fn put_task(&self, task_wrapper: &TaskWrapper) -> Result<()> {
|
||||
let k = task_wrapper.task.id.clone().into_bytes();
|
||||
let v = serde_json::to_vec(task_wrapper)?;
|
||||
self.db.insert(k, v)?;
|
||||
log::info!("[task_cache] put_task with task_id: {}", task_wrapper.task.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_last_task(&self) -> Result<Option<TaskWrapper>> {
|
||||
let last = self.db.last()?;
|
||||
if let Some((k, v)) = last {
|
||||
let kk = std::str::from_utf8(k.as_ref())?;
|
||||
let task_wrapper: TaskWrapper = serde_json::from_slice(v.as_ref())?;
|
||||
log::info!("[task_cache] get_last_task with task_id: {kk}, count: {}", task_wrapper.get_count());
|
||||
return Ok(Some(task_wrapper));
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn delete_task(&self, task_id: String) -> Result<()> {
|
||||
let k = task_id.clone().into_bytes();
|
||||
self.db.remove(k)?;
|
||||
log::info!("[task cache] delete_task with task_id: {task_id}");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// ========================= listener ===========================
|
||||
|
||||
pub struct ClearCacheCoordinatorListener {
|
||||
pub task_cache: Rc<TaskCache>,
|
||||
}
|
||||
|
||||
impl Listener for ClearCacheCoordinatorListener {
|
||||
fn on_proof_submitted(&self, req: &SubmitProofRequest) {
|
||||
let result = self.task_cache.delete_task(req.task_id.clone());
|
||||
if let Err(e) = result {
|
||||
log::error!("delete task from embed db failed, {:#}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
81
prover_rust/src/task_processor.rs
Normal file
81
prover_rust/src/task_processor.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use super::{prover::Prover, task_cache::TaskCache};
|
||||
use anyhow::{Context, Result};
|
||||
use log;
|
||||
use std::rc::Rc;
|
||||
|
||||
pub struct TaskProcessor<'a> {
|
||||
prover: &'a Prover<'a>,
|
||||
task_cache: Rc<TaskCache>,
|
||||
}
|
||||
|
||||
impl<'a> TaskProcessor<'a> {
|
||||
pub fn new(prover: &'a Prover<'a>, task_cache: Rc<TaskCache>) -> Self {
|
||||
TaskProcessor { prover, task_cache }
|
||||
}
|
||||
|
||||
pub fn start(&self) {
|
||||
loop {
|
||||
log::info!("start a new round.");
|
||||
if let Err(err) = self.prove_and_submit() {
|
||||
log::error!("encounter error: {:#}", err);
|
||||
} else {
|
||||
log::info!("prove & submit succeed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prove_and_submit(&self) -> Result<()> {
|
||||
let task_from_cache = self
|
||||
.task_cache
|
||||
.get_last_task()
|
||||
.context("failed to peek from stack")?;
|
||||
|
||||
let mut task_wrapper = match task_from_cache {
|
||||
Some(t) => t,
|
||||
None => {
|
||||
let fetch_result = self.prover.fetch_task();
|
||||
if let Err(err) = fetch_result {
|
||||
std::thread::sleep(core::time::Duration::from_secs(10));
|
||||
return Err(err).context("failed to fetch task from coordinator");
|
||||
}
|
||||
fetch_result.unwrap().into()
|
||||
}
|
||||
};
|
||||
|
||||
if task_wrapper.get_count() <= 2 {
|
||||
task_wrapper.increment_count();
|
||||
self.task_cache
|
||||
.put_task(&task_wrapper)
|
||||
.context("failed to push task into stack, updating count")?;
|
||||
|
||||
log::info!(
|
||||
"start to prove task, task_type: {:?}, task_id: {}",
|
||||
task_wrapper.task.task_type,
|
||||
task_wrapper.task.id
|
||||
);
|
||||
let result = match self.prover.prove_task(&task_wrapper.task) {
|
||||
Ok(proof_detail) => self
|
||||
.prover
|
||||
.submit_proof(proof_detail, &task_wrapper.task),
|
||||
Err(error) => self.prover.submit_error(
|
||||
&task_wrapper.task,
|
||||
super::types::ProofFailureType::NoPanic,
|
||||
error,
|
||||
),
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
// if tried times >= 3, it's probably due to circuit proving panic
|
||||
log::error!(
|
||||
"zk proving panic for task, task_type: {:?}, task_id: {}",
|
||||
task_wrapper.task.task_type,
|
||||
task_wrapper.task.id
|
||||
);
|
||||
self.prover.submit_error(
|
||||
&task_wrapper.task,
|
||||
super::types::ProofFailureType::Panic,
|
||||
anyhow::anyhow!("zk proving panic for task"),
|
||||
)
|
||||
}
|
||||
}
|
||||
195
prover_rust/src/types.rs
Normal file
195
prover_rust/src/types.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
use ethers_core::types::H256;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
use crate::coordinator_client::types::GetTaskResponseData;
|
||||
|
||||
pub type CommonHash = H256;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ProofType {
|
||||
ProofTypeUndefined,
|
||||
ProofTypeChunk,
|
||||
ProofTypeBatch,
|
||||
}
|
||||
|
||||
impl ProofType {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
1 => ProofType::ProofTypeChunk,
|
||||
2 => ProofType::ProofTypeBatch,
|
||||
_ => ProofType::ProofTypeUndefined,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ProofType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match *self {
|
||||
ProofType::ProofTypeUndefined => serializer.serialize_i8(0),
|
||||
ProofType::ProofTypeChunk => serializer.serialize_i8(1),
|
||||
ProofType::ProofTypeBatch => serializer.serialize_i8(2),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ProofType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: u8 = u8::deserialize(deserializer)?;
|
||||
Ok(ProofType::from_u8(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProofType {
|
||||
fn default() -> Self {
|
||||
Self::ProofTypeUndefined
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct Task {
|
||||
pub uuid: String,
|
||||
pub id: String,
|
||||
#[serde(rename = "type", default)]
|
||||
pub task_type: ProofType,
|
||||
pub task_data: String,
|
||||
#[serde(default)]
|
||||
pub hard_fork_name: String,
|
||||
}
|
||||
|
||||
impl From<GetTaskResponseData> for Task {
|
||||
fn from(value: GetTaskResponseData) -> Self {
|
||||
Self {
|
||||
uuid: value.uuid,
|
||||
id: value.task_id,
|
||||
task_type: value.task_type,
|
||||
task_data: value.task_data,
|
||||
hard_fork_name: value.hard_fork_name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct TaskWrapper {
|
||||
pub task: Task,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl TaskWrapper {
|
||||
pub fn increment_count(&mut self) {
|
||||
self.count += 1;
|
||||
}
|
||||
|
||||
pub fn get_count(&self) -> usize {
|
||||
self.count
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Task> for TaskWrapper {
|
||||
fn from(task: Task) -> Self {
|
||||
TaskWrapper { task, count: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct ProofDetail {
|
||||
pub id: String,
|
||||
#[serde(rename = "type", default)]
|
||||
pub proof_type: ProofType,
|
||||
pub proof_data: String,
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ProofFailureType {
|
||||
Undefined,
|
||||
Panic,
|
||||
NoPanic,
|
||||
}
|
||||
|
||||
impl ProofFailureType {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
1 => ProofFailureType::Panic,
|
||||
2 => ProofFailureType::NoPanic,
|
||||
_ => ProofFailureType::Undefined,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ProofFailureType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match *self {
|
||||
ProofFailureType::Undefined => serializer.serialize_u8(0),
|
||||
ProofFailureType::Panic => serializer.serialize_u8(1),
|
||||
ProofFailureType::NoPanic => serializer.serialize_u8(2),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ProofFailureType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: u8 = u8::deserialize(deserializer)?;
|
||||
Ok(ProofFailureType::from_u8(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProofFailureType {
|
||||
fn default() -> Self {
|
||||
Self::Undefined
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ProofStatus {
|
||||
Ok,
|
||||
Error,
|
||||
}
|
||||
|
||||
impl ProofStatus {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
0 => ProofStatus::Ok,
|
||||
_ => ProofStatus::Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ProofStatus {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match *self {
|
||||
ProofStatus::Ok => serializer.serialize_u8(0),
|
||||
ProofStatus::Error => serializer.serialize_u8(1),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ProofStatus {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: u8 = u8::deserialize(deserializer)?;
|
||||
Ok(ProofStatus::from_u8(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProofStatus {
|
||||
fn default() -> Self {
|
||||
Self::Ok
|
||||
}
|
||||
}
|
||||
24
prover_rust/src/utils.rs
Normal file
24
prover_rust/src/utils.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use env_logger::Env;
|
||||
use std::sync::Once;
|
||||
use std::fs::OpenOptions;
|
||||
|
||||
static LOG_INIT: Once = Once::new();
|
||||
|
||||
/// Initialize log
|
||||
pub fn log_init(log_file: Option<String>) {
|
||||
LOG_INIT.call_once(|| {
|
||||
let mut builder = env_logger::Builder::from_env(Env::default().default_filter_or("info"));
|
||||
if let Some(file_path) = log_file {
|
||||
let target= Box::new(
|
||||
OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.open(file_path)
|
||||
.expect("Can't create log file")
|
||||
);
|
||||
builder.target(env_logger::Target::Pipe(target));
|
||||
}
|
||||
builder.init();
|
||||
});
|
||||
}
|
||||
18
prover_rust/src/version.rs
Normal file
18
prover_rust/src/version.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use std::cell::OnceCell;
|
||||
|
||||
static DEFAULT_COMMIT: &str = "unknown";
|
||||
static mut VERSION: OnceCell<String> = OnceCell::new();
|
||||
|
||||
pub const TAG: &str = "v0.0.0";
|
||||
pub const DEFAULT_ZK_VERSION: &str = "000000-000000";
|
||||
|
||||
fn init_version() -> String {
|
||||
let commit = option_env!("GIT_REV").unwrap_or(DEFAULT_COMMIT);
|
||||
let tag = option_env!("GO_TAG").unwrap_or(TAG);
|
||||
let zk_version = option_env!("ZK_VERSION").unwrap_or(DEFAULT_ZK_VERSION);
|
||||
format!("{tag}-{commit}-{zk_version}")
|
||||
}
|
||||
|
||||
pub fn get_version() -> String {
|
||||
unsafe { VERSION.get_or_init(init_version).clone() }
|
||||
}
|
||||
125
prover_rust/src/zk_circuits_handler.rs
Normal file
125
prover_rust/src/zk_circuits_handler.rs
Normal file
@@ -0,0 +1,125 @@
|
||||
mod bernoulli;
|
||||
mod curie;
|
||||
|
||||
use anyhow::Result;
|
||||
use bernoulli::BaseCircuitsHandler;
|
||||
use curie::NextCircuitsHandler;
|
||||
use std::collections::HashMap;
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
use super::geth_client::GethClient;
|
||||
use crate::{config::{Config, AssetsDirEnvConfig}, types::{ProofType, Task}};
|
||||
|
||||
type HardForkName = String;
|
||||
|
||||
pub mod utils {
|
||||
pub fn encode_vk(vk: Vec<u8>) -> String {
|
||||
base64::encode(vk)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CircuitsHandler {
|
||||
fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>>;
|
||||
|
||||
fn get_proof_data(&self, task_type: ProofType, task: &Task) -> Result<String>;
|
||||
}
|
||||
|
||||
type CircuitsHandlerBuilder = fn(proof_type: ProofType, config: &Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Box<dyn CircuitsHandler>>;
|
||||
|
||||
pub struct CircuitsHandlerProvider<'a> {
|
||||
proof_type: ProofType,
|
||||
config: &'a Config,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>,
|
||||
|
||||
current_hard_fork_name: Option<HardForkName>,
|
||||
current_circuit: Option<Box<dyn CircuitsHandler>>,
|
||||
vks: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> CircuitsHandlerProvider<'a> {
|
||||
pub fn new(proof_type: ProofType, config: &'a Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Self> {
|
||||
let mut m: HashMap<HardForkName, CircuitsHandlerBuilder> = HashMap::new();
|
||||
|
||||
fn handler_builder(proof_type: ProofType, config: &Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Box<dyn CircuitsHandler>> {
|
||||
log::info!("now init zk circuits handler, hard_fork_name: {}", &config.low_version_circuit.hard_fork_name);
|
||||
AssetsDirEnvConfig::enable_first();
|
||||
BaseCircuitsHandler::new(proof_type,
|
||||
&config.low_version_circuit.params_path,
|
||||
&config.low_version_circuit.assets_path,
|
||||
geth_client
|
||||
).map(|handler| Box::new(handler) as Box<dyn CircuitsHandler>)
|
||||
}
|
||||
m.insert(config.low_version_circuit.hard_fork_name.clone(), handler_builder);
|
||||
|
||||
fn next_handler_builder(proof_type: ProofType, config: &Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Box<dyn CircuitsHandler>> {
|
||||
log::info!("now init zk circuits handler, hard_fork_name: {}", &config.high_version_circuit.hard_fork_name);
|
||||
AssetsDirEnvConfig::enable_second();
|
||||
NextCircuitsHandler::new(proof_type,
|
||||
&config.high_version_circuit.params_path,
|
||||
&config.high_version_circuit.assets_path,
|
||||
geth_client
|
||||
).map(|handler| Box::new(handler) as Box<dyn CircuitsHandler>)
|
||||
}
|
||||
|
||||
m.insert(config.high_version_circuit.hard_fork_name.clone(), next_handler_builder);
|
||||
|
||||
let vks = CircuitsHandlerProvider::init_vks(proof_type, config, &m, geth_client.clone());
|
||||
|
||||
let provider = CircuitsHandlerProvider {
|
||||
proof_type,
|
||||
config,
|
||||
geth_client,
|
||||
circuits_handler_builder_map: m,
|
||||
current_hard_fork_name: None,
|
||||
current_circuit: None,
|
||||
vks,
|
||||
};
|
||||
|
||||
Ok(provider)
|
||||
}
|
||||
|
||||
pub fn get_circuits_handler(&mut self, hard_fork_name: &String) -> Option<&Box<dyn CircuitsHandler>> {
|
||||
match &self.current_hard_fork_name {
|
||||
Some(name) if name == hard_fork_name => {
|
||||
log::info!("get circuits handler from cache");
|
||||
(&self.current_circuit).as_ref()
|
||||
},
|
||||
_ => {
|
||||
log::info!("failed to get circuits handler from cache, create a new one: {hard_fork_name}");
|
||||
let builder = self.circuits_handler_builder_map.get(hard_fork_name);
|
||||
builder.and_then(|build| {
|
||||
log::info!("building circuits handler for {hard_fork_name}");
|
||||
let handler = build(self.proof_type, &self.config, self.geth_client.clone()).expect("failed to build circuits handler");
|
||||
self.current_hard_fork_name = Some(hard_fork_name.clone());
|
||||
self.current_circuit = Some(handler);
|
||||
(&self.current_circuit).as_ref()
|
||||
} )
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear_circuits_handler(&mut self) {
|
||||
log::info!("clear circuits handler to prevent cache, just for test");
|
||||
self.current_hard_fork_name = None;
|
||||
self.current_circuit = None;
|
||||
}
|
||||
|
||||
fn init_vks(proof_type: ProofType, config: &'a Config,
|
||||
circuits_handler_builder_map: &HashMap<HardForkName, CircuitsHandlerBuilder>,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>) -> Vec<String> {
|
||||
circuits_handler_builder_map
|
||||
.iter()
|
||||
.map(|(hard_fork_name, build)| {
|
||||
let handler = build(proof_type, config, geth_client.clone()).expect("failed to build circuits handler");
|
||||
let vk = handler.get_vk(proof_type)
|
||||
.map_or("".to_string(), |vk| utils::encode_vk(vk));
|
||||
log::info!("vk for {hard_fork_name} is {vk}");
|
||||
vk
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
}
|
||||
|
||||
pub fn get_vks(&self) -> Vec<String> {
|
||||
self.vks.clone()
|
||||
}
|
||||
}
|
||||
194
prover_rust/src/zk_circuits_handler/bernoulli.rs
Normal file
194
prover_rust/src/zk_circuits_handler/bernoulli.rs
Normal file
@@ -0,0 +1,194 @@
|
||||
use super::CircuitsHandler;
|
||||
use once_cell::sync::Lazy;
|
||||
use crate::{geth_client::GethClient, types::ProofType};
|
||||
use anyhow::{bail, Ok, Result};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::types::{Task, CommonHash};
|
||||
use std::{cell::RefCell, env, cmp::Ordering, rc::Rc};
|
||||
use prover::{aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver};
|
||||
use prover::{BlockTrace, ChunkHash, ChunkProof};
|
||||
|
||||
// Only used for debugging.
|
||||
pub(crate) static OUTPUT_DIR: Lazy<Option<String>> =
|
||||
Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
|
||||
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BatchTaskDetail {
|
||||
pub chunk_infos: Vec<ChunkHash>,
|
||||
pub chunk_proofs: Vec<ChunkProof>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ChunkTaskDetail {
|
||||
pub block_hashes: Vec<CommonHash>,
|
||||
}
|
||||
|
||||
fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
|
||||
block_trace.header.number.map(|n| n.as_u64())
|
||||
}
|
||||
|
||||
pub struct BaseCircuitsHandler {
|
||||
chunk_prover: Option<RefCell<ChunkProver>>,
|
||||
batch_prover: Option<RefCell<BatchProver>>,
|
||||
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
}
|
||||
|
||||
impl BaseCircuitsHandler {
|
||||
pub fn new(proof_type: ProofType, params_dir: &str, assets_dir: &str, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Self> {
|
||||
match proof_type {
|
||||
ProofType::ProofTypeChunk => Ok(Self {
|
||||
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
|
||||
batch_prover: None,
|
||||
geth_client,
|
||||
}),
|
||||
|
||||
ProofType::ProofTypeBatch => Ok(Self {
|
||||
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
|
||||
chunk_prover: None,
|
||||
geth_client,
|
||||
}),
|
||||
_ => bail!("proof type invalid"),
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk_proof = prover
|
||||
.borrow_mut()
|
||||
.gen_chunk_proof(chunk_trace, None, None, self.get_output_dir())?;
|
||||
|
||||
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)> =
|
||||
self.gen_chunk_hashes_proofs(task)?;
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let is_valid = prover
|
||||
.borrow_mut()
|
||||
.check_chunk_proofs(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol, task-id: {}", &task.id)
|
||||
}
|
||||
let batch_proof = prover
|
||||
.borrow_mut()
|
||||
.gen_agg_evm_proof(
|
||||
chunk_hashes_proofs,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
|
||||
fn gen_chunk_traces(&self, task: &Task) -> Result<Vec<BlockTrace>> {
|
||||
let chunk_task_detail: ChunkTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
|
||||
}
|
||||
|
||||
fn gen_chunk_hashes_proofs(&self, task: &Task) -> Result<Vec<(ChunkHash, ChunkProof)>> {
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
|
||||
Ok(batch_task_detail
|
||||
.chunk_infos
|
||||
.clone()
|
||||
.into_iter()
|
||||
.zip(batch_task_detail.chunk_proofs.clone())
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn get_sorted_traces_by_hashes(
|
||||
&self,
|
||||
block_hashes: &Vec<CommonHash>,
|
||||
) -> Result<Vec<BlockTrace>> {
|
||||
if block_hashes.len() == 0 {
|
||||
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
|
||||
bail!("block_hashes are empty")
|
||||
}
|
||||
|
||||
let mut block_traces = Vec::new();
|
||||
for (_, hash) in block_hashes.into_iter().enumerate() {
|
||||
let trace = self
|
||||
.geth_client
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.borrow_mut()
|
||||
.get_block_trace_by_hash(hash)?;
|
||||
block_traces.push(trace.block_trace);
|
||||
}
|
||||
|
||||
block_traces.sort_by(|a, b| {
|
||||
if get_block_number(a) == None {
|
||||
Ordering::Less
|
||||
} else if get_block_number(b) == None {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
get_block_number(a)
|
||||
.unwrap()
|
||||
.cmp(&get_block_number(b).unwrap())
|
||||
}
|
||||
});
|
||||
|
||||
let block_numbers: Vec<u64> = block_traces
|
||||
.iter()
|
||||
.map(|trace| match get_block_number(trace) {
|
||||
Some(v) => v,
|
||||
None => 0,
|
||||
})
|
||||
.collect();
|
||||
let mut i = 0;
|
||||
while i < block_numbers.len() - 1 {
|
||||
if block_numbers[i] + 1 != block_numbers[i + 1] {
|
||||
log::error!("[prover] block numbers are not continuous, got {} and {}", block_numbers[i], block_numbers[i + 1]);
|
||||
bail!(
|
||||
"block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
)
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Ok(block_traces)
|
||||
}
|
||||
}
|
||||
|
||||
impl CircuitsHandler for BaseCircuitsHandler {
|
||||
fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
ProofType::ProofTypeChunk => {
|
||||
self.chunk_prover.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_vk())
|
||||
},
|
||||
ProofType::ProofTypeBatch => {
|
||||
self.batch_prover.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_vk())
|
||||
},
|
||||
_ => unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_proof_data(&self, task_type: ProofType, task: &crate::types::Task) -> Result<String> {
|
||||
match task_type {
|
||||
ProofType::ProofTypeChunk => self.gen_chunk_proof(task),
|
||||
ProofType::ProofTypeBatch => self.gen_batch_proof(task),
|
||||
_ => unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
198
prover_rust/src/zk_circuits_handler/curie.rs
Normal file
198
prover_rust/src/zk_circuits_handler/curie.rs
Normal file
@@ -0,0 +1,198 @@
|
||||
use super::CircuitsHandler;
|
||||
use crate::{geth_client::GethClient, types::ProofType};
|
||||
use anyhow::{bail, Context, Ok, Result};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::types::{Task, CommonHash};
|
||||
use std::{cell::RefCell, cmp::Ordering, rc::Rc};
|
||||
|
||||
use prover_next::{BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask, BatchProvingTask};
|
||||
use prover_next::{aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver};
|
||||
|
||||
use super::bernoulli::OUTPUT_DIR;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BatchTaskDetail {
|
||||
pub chunk_infos: Vec<ChunkInfo>,
|
||||
pub chunk_proofs: Vec<ChunkProof>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ChunkTaskDetail {
|
||||
pub block_hashes: Vec<CommonHash>,
|
||||
}
|
||||
|
||||
fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
|
||||
block_trace.header.number.map(|n| n.as_u64())
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct NextCircuitsHandler {
|
||||
chunk_prover: Option<RefCell<ChunkProver>>,
|
||||
batch_prover: Option<RefCell<BatchProver>>,
|
||||
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
}
|
||||
|
||||
impl NextCircuitsHandler {
|
||||
pub fn new(proof_type: ProofType, params_dir: &str, assets_dir: &str, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Self> {
|
||||
match proof_type {
|
||||
ProofType::ProofTypeChunk => Ok(Self {
|
||||
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
|
||||
batch_prover: None,
|
||||
geth_client,
|
||||
}),
|
||||
|
||||
ProofType::ProofTypeBatch => Ok(Self {
|
||||
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
|
||||
chunk_prover: None,
|
||||
geth_client,
|
||||
}),
|
||||
_ => bail!("proof type invalid"),
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk = ChunkProvingTask::from(chunk_trace);
|
||||
|
||||
let chunk_proof = prover
|
||||
.borrow_mut()
|
||||
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
|
||||
|
||||
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
|
||||
self.gen_chunk_hashes_proofs(task)?;
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover
|
||||
.borrow_mut()
|
||||
.check_protocol_of_chunks(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol, task-id: {}", &task.id)
|
||||
}
|
||||
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
|
||||
let batch = BatchProvingTask {
|
||||
chunk_proofs
|
||||
};
|
||||
let batch_proof = prover
|
||||
.borrow_mut()
|
||||
.gen_agg_evm_proof(
|
||||
batch,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
|
||||
fn gen_chunk_traces(&self, task: &Task) -> Result<Vec<BlockTrace>> {
|
||||
let chunk_task_detail: ChunkTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
|
||||
}
|
||||
|
||||
fn gen_chunk_hashes_proofs(&self, task: &Task) -> Result<Vec<(ChunkInfo, ChunkProof)>> {
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
|
||||
Ok(batch_task_detail
|
||||
.chunk_infos
|
||||
.clone()
|
||||
.into_iter()
|
||||
.zip(batch_task_detail.chunk_proofs.clone())
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn get_sorted_traces_by_hashes(
|
||||
&self,
|
||||
block_hashes: &Vec<CommonHash>,
|
||||
) -> Result<Vec<BlockTrace>> {
|
||||
if block_hashes.len() == 0 {
|
||||
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
|
||||
bail!("block_hashes are empty")
|
||||
}
|
||||
|
||||
let mut block_traces = Vec::new();
|
||||
for (_, hash) in block_hashes.into_iter().enumerate() {
|
||||
let trace = self
|
||||
.geth_client
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.borrow_mut()
|
||||
.get_block_trace_by_hash(hash)?;
|
||||
block_traces.push(trace.block_trace);
|
||||
}
|
||||
|
||||
block_traces.sort_by(|a, b| {
|
||||
if get_block_number(a) == None {
|
||||
Ordering::Less
|
||||
} else if get_block_number(b) == None {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
get_block_number(a)
|
||||
.unwrap()
|
||||
.cmp(&get_block_number(b).unwrap())
|
||||
}
|
||||
});
|
||||
|
||||
let block_numbers: Vec<u64> = block_traces
|
||||
.iter()
|
||||
.map(|trace| match get_block_number(trace) {
|
||||
Some(v) => v,
|
||||
None => 0,
|
||||
})
|
||||
.collect();
|
||||
let mut i = 0;
|
||||
while i < block_numbers.len() - 1 {
|
||||
if block_numbers[i] + 1 != block_numbers[i + 1] {
|
||||
log::error!("[prover] block numbers are not continuous, got {} and {}", block_numbers[i], block_numbers[i + 1]);
|
||||
bail!(
|
||||
"block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
)
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Ok(block_traces)
|
||||
}
|
||||
}
|
||||
|
||||
impl CircuitsHandler for NextCircuitsHandler {
|
||||
fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
ProofType::ProofTypeChunk => {
|
||||
self.chunk_prover.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_vk())
|
||||
},
|
||||
ProofType::ProofTypeBatch => {
|
||||
self.batch_prover.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_vk())
|
||||
},
|
||||
_ => unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_proof_data(&self, task_type: ProofType, task: &crate::types::Task) -> Result<String> {
|
||||
match task_type {
|
||||
ProofType::ProofTypeChunk => self.gen_chunk_proof(task),
|
||||
ProofType::ProofTypeBatch => self.gen_batch_proof(task),
|
||||
_ => unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user