Compare commits

...

68 Commits

Author SHA1 Message Date
Mengran Lan
c7ce3205da revert rc9 to rc8-hotfix 2024-06-11 19:13:56 +08:00
Mengran Lan
d85888a54e clear cache before prove task 2024-06-11 19:12:10 +08:00
Mengran Lan
ca90b1ff01 update Cargo.lock 2024-06-11 19:08:02 +08:00
Mengran Lan
e88336dcf9 upgrade circuits to rc9 2024-06-11 18:08:37 +08:00
Mengran Lan
3086a4c672 print full hash value 2024-06-11 15:42:53 +08:00
Mengran Lan
9a919a6d45 error log add detail info 2024-06-07 16:12:51 +08:00
Mengran Lan
f77a008fc1 add comment 2024-06-07 10:38:35 +08:00
Mengran Lan
5ac5bec13b add logs 2024-06-07 10:33:54 +08:00
Mengran Lan
ec1d063e58 remove empty line 2024-06-07 10:31:07 +08:00
Mengran Lan
094f8583c3 add double check in prover.rs 2024-06-07 10:30:23 +08:00
Mengran Lan
47e477b42d fix bug in coordinator_client 2024-06-07 10:22:03 +08:00
colin
f28ba157cb Merge branch 'develop' into refactor/prover 2024-06-06 17:57:25 +08:00
Zhuo Zhang
967fb653a2 upgrade libzkp to v0.11.0rc8-hotfix1 2024-06-06 09:54:43 +00:00
Mengran Lan
5585a5434d remove redundant put_task 2024-06-06 15:38:59 +08:00
Zhang Zhuo
40b4b743eb upgrade prover old branch to fix incompatible chunk proof (#1364) 2024-06-06 11:11:23 +08:00
Mengran Lan
2f8532cb72 set default tag to v0.0.0 2024-06-05 22:18:55 +08:00
Mengran Lan
99b4603f77 trivial change 2024-06-05 22:17:55 +08:00
Mengran Lan
ca3e7e45d7 add vk&hard_fork_name pair log 2024-06-05 22:16:25 +08:00
Mengran Lan
4d9d014df8 fix typo 2024-06-05 22:10:53 +08:00
Mengran Lan
c6d2c22bce deal with env variable SCROLL_PROVER_ASSETS_DIR for two circuits (#1361) 2024-06-05 19:30:11 +08:00
colin
971e6c5aa7 Merge branch 'develop' into refactor/prover 2024-06-05 14:57:37 +08:00
Mengran Lan
694d155437 code utilize 2024-06-05 14:28:12 +08:00
colin
0dd1bb3343 Merge branch 'develop' into refactor/prover 2024-06-05 14:25:52 +08:00
Mengran Lan
76429e524c refactor zkevm traits to support rc8 (#1359) 2024-06-05 14:22:31 +08:00
Mengran Lan
92200d1bd4 support dynamic load circuits handler (#1357) 2024-06-04 14:20:54 +08:00
Mengran Lan
372911b772 remove vk field && add hard_fork_name when submitting proof 2024-06-04 00:54:31 +08:00
Mengran Lan
dc5b74e903 Apply suggestions from code review
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
2024-06-04 00:48:37 +08:00
Mengran Lan
533da0047f add logs 2024-05-31 19:04:28 +08:00
Mengran Lan
66453af811 add retry & timeout logic to coordinator client 2024-05-31 15:52:28 +08:00
Mengran Lan
5acb2af74d upgrade to v0.11.0rc4 2024-05-31 12:20:42 +08:00
Mengran Lan
094908404e refactor coordinator client errors to enums 2024-05-31 12:11:22 +08:00
Mengran Lan
74f3623ede no change, remove a comment 2024-05-31 11:04:47 +08:00
Mengran Lan
5ad63aed29 sleep 10s when failed to fetch task from coordinator 2024-05-31 10:41:45 +08:00
Mengran Lan
43505d4780 add args 2024-05-30 23:37:58 +08:00
Mengran Lan
1b20456928 Revert "upgrade circuits from 0.11.0rc2 to 0.11.0rc4"
This reverts commit 277b75bc69.
2024-05-30 23:30:27 +08:00
Mengran Lan
277b75bc69 upgrade circuits from 0.11.0rc2 to 0.11.0rc4 2024-05-30 23:12:32 +08:00
Mengran Lan
e5e68f5e28 enable gzip when sending http request 2024-05-30 14:43:14 +08:00
Mengran Lan
e661c26743 remove dependency of eth_types from zkevm-circuits 2024-05-29 18:51:05 +08:00
Mengran Lan
282d6e996b update makefile 2024-05-29 18:23:44 +08:00
Mengran Lan
2371edc617 complete interacte with coordinator && version improvement 2024-05-29 18:09:05 +08:00
Mengran Lan
0532653d34 upgrade circuits versions 2024-05-29 15:32:56 +08:00
Mengran Lan
f37556fdd5 remove l2geth confirmations config 2024-05-29 15:21:16 +08:00
Mengran Lan
32e95fb370 format code after changing layout 2024-05-27 23:35:33 +08:00
Mengran Lan
d8f8880ab9 change the layout 2024-05-27 23:35:13 +08:00
Mengran Lan
81533c16dd update denpendencies 2024-05-27 19:03:49 +08:00
Mengran Lan
f170ae5fe3 fix bug 2024-05-27 17:40:13 +08:00
Mengran Lan
ee2a3956e6 test multi circuits 2024-05-27 17:36:49 +08:00
Mengran Lan
e988cbb15d Revert "comment types for next"
This reverts commit 46f5849ae0.
2024-05-27 17:32:47 +08:00
Mengran Lan
46f5849ae0 comment types for next 2024-05-27 17:24:31 +08:00
Mengran Lan
45d8f66864 update cargo depends 2024-05-27 17:19:02 +08:00
Mengran Lan
94e1ea3a08 change prover_next version to fit the e2e test 2024-05-27 16:53:44 +08:00
Mengran Lan
62c1f00d3b copy libzktrie.so to lib dir 2024-05-27 15:54:49 +08:00
Mengran Lan
038d7a5bbf tmp commit, test next handler wrapper logic (set next handler as default) 2024-05-26 23:13:11 +08:00
Mengran Lan
112e9ac42b add task_cache logic 2024-05-24 13:07:10 +08:00
Mengran Lan
728266ebad add info logs for circuits handler 2024-05-23 11:31:09 +08:00
Mengran Lan
7b8f30d230 add second zkevm-handler && add proof_check when proving batch 2024-05-22 18:47:06 +08:00
Mengran Lan
69ca648c83 utilize proof_status logic 2024-05-22 15:50:06 +08:00
Mengran Lan
00a07a8258 build using --rlease && fix bug in proof status 2024-05-22 11:41:28 +08:00
Mengran Lan
f87e5b5ca7 fix bug, action not taken if re-login to coordinator 2024-05-21 23:48:29 +08:00
Mengran Lan
7b848f971b fmt code 2024-05-21 12:08:31 +08:00
Mengran Lan
49166ec8d0 change l2geth config to option 2024-05-21 12:06:55 +08:00
Mengran Lan
2d0c36eb5a geth client add tokio runtime 2024-05-20 22:47:46 +08:00
Mengran Lan
445a8d592a unify coordinator client api, add logs 2024-05-20 19:03:18 +08:00
Mengran Lan
eadc51d33b set vk in get task request 2024-05-20 16:18:00 +08:00
Mengran Lan
254a7faf58 init the log; add tokio runtime 2024-05-20 16:15:06 +08:00
Mengran Lan
173cbc4dc4 first compile-ready version 2024-05-16 11:17:16 +08:00
Mengran Lan
94bd5917ba finish most logic, leaving some rust-style compiler issue to be solved 2024-05-15 14:28:01 +08:00
Mengran Lan
107aa5792b tmp save 2024-05-13 15:59:06 +08:00
27 changed files with 7691 additions and 0 deletions

2
.gitignore vendored
View File

@@ -20,3 +20,5 @@ coverage.txt
# misc
sftp-config.json
*~
target

5678
prover_rust/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

47
prover_rust/Cargo.toml Normal file
View File

@@ -0,0 +1,47 @@
[package]
name = "prover_rust"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[patch.crates-io]
ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
halo2curves = { git = "https://github.com/scroll-tech/halo2curves", branch = "v0.1.0" }
[patch."https://github.com/privacy-scaling-explorations/halo2.git"]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "main" }
[patch."https://github.com/privacy-scaling-explorations/bls12_381"]
bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/impl_scalar_field" }
[dependencies]
anyhow = "1.0"
log = "0.4"
env_logger = "0.11.3"
serde = { version = "1.0.198", features = ["derive"] }
serde_json = "1.0.116"
futures = "0.3.30"
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "v0.10", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
prover_next = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.0rc8-hotfix1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.1"
reqwest = { version = "0.12.4", features = ["gzip"] }
reqwest-middleware = "0.3"
reqwest-retry = "0.5"
once_cell = "1.19.0"
hex = "0.4.3"
tiny-keccak = { version = "2.0.0", features = ["sha3", "keccak"] }
rand = "0.8.5"
eth-keystore = "0.5.0"
rlp = "0.5.2"
tokio = "1.37.0"
sled = "0.34.7"
http = "1.1.0"
clap = { version = "4.5", features = ["derive"] }

43
prover_rust/Makefile Normal file
View File

@@ -0,0 +1,43 @@
.PHONY: prover
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
$(info ************ First ************)
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "#" -f2 | cut -c-7)
else
$(info ************ Second ************)
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "\#" -f2 | cut -c-7)
endif
ZKEVM_VERSION=$(shell ./print_high_zkevm_version.sh)
ifeq (${ZKEVM_VERSION},)
$(error ZKEVM_VERSION not set)
else
$(info ZKEVM_VERSION is ${ZKEVM_VERSION})
endif
ZKEVM_COMMIT=$(shell echo ${ZKEVM_VERSION} | cut -d " " -f2)
$(info ZKEVM_COMMIT is ${ZKEVM_COMMIT})
HALO2_GPU_VERSION=$(shell ./print_halo2gpu_version.sh | sed -n '2p')
GIT_REV=$(shell git rev-parse --short HEAD)
GO_TAG=$(shell grep "var tag = " ../common/version/version.go | cut -d "\"" -f2)
ifeq (${GO_TAG},)
$(error GO_TAG not set)
else
$(info GO_TAG is ${GO_TAG})
endif
ifeq (${HALO2_GPU_VERSION},)
# use halo2_proofs with CPU
ZK_VERSION=${ZKEVM_COMMIT}-${HALO2_VERSION}
else
# use halo2_gpu
ZK_VERSION=${ZKEVM_COMMIT}-${HALO2_GPU_VERSION}
endif
prover:
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
rm -rf ./lib && mkdir ./lib
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib

26
prover_rust/config.json Normal file
View File

@@ -0,0 +1,26 @@
{
"prover_name": "prover-1",
"keystore_path": "keystore.json",
"keystore_password": "prover-pwd",
"db_path": "unique-db-path-for-prover-1",
"proof_type": 2,
"low_version_circuit": {
"hard_fork_name": "bernoulli",
"params_path": "params",
"assets_path": "assets"
},
"high_version_circuit": {
"hard_fork_name": "curie",
"params_path": "params",
"assets_path": "assets"
},
"coordinator": {
"base_url": "http://localhost:8555",
"retry_count": 10,
"retry_wait_time_sec": 10,
"connection_timeout_sec": 30
},
"l2geth": {
"endpoint": "http://localhost:9999"
}
}

View File

@@ -0,0 +1,21 @@
#!/bin/bash
config_file="$HOME/.cargo/config"
if [ ! -e "$config_file" ]; then
exit 0
fi
if [[ $(head -n 1 "$config_file") == "#"* ]]; then
exit 0
fi
halo2gpu_path=$(grep -Po '(?<=paths = \[")([^"]*)' $config_file)
pushd $halo2gpu_path
commit_hash=$(git log --pretty=format:%h -n 1)
echo "${commit_hash:0:7}"
popd

View File

@@ -0,0 +1,9 @@
higher_zkevm_item=`grep "zkevm-circuits.git" ./Cargo.lock | sort | uniq | awk -F "[#=]" '{print $3" "$4}' | sort -k 1 | tail -n 1`
higher_version=`echo $higher_zkevm_item | awk '{print $1}'`
higher_commit=`echo $higher_zkevm_item | cut -d ' ' -f2 | cut -c-7`
echo "$higher_version $higher_commit"

View File

@@ -0,0 +1 @@
nightly-2023-12-03

9
prover_rust/rustfmt.toml Normal file
View File

@@ -0,0 +1,9 @@
edition = "2021"
comment_width = 100
imports_granularity = "Crate"
max_width = 100
newline_style = "Unix"
# normalize_comments = true
reorder_imports = true
wrap_comments = true

89
prover_rust/src/config.rs Normal file
View File

@@ -0,0 +1,89 @@
use serde::{Deserialize, Serialize};
use std::fs::File;
use anyhow::{bail, Result};
use crate::types::ProofType;
#[derive(Debug, Serialize, Deserialize)]
pub struct CircuitConfig {
pub hard_fork_name: String,
pub params_path: String,
pub assets_path: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CoordinatorConfig {
pub base_url: String,
pub retry_count: u32,
pub retry_wait_time_sec: u64,
pub connection_timeout_sec: u64,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct L2GethConfig {
pub endpoint: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Config {
pub prover_name: String,
pub keystore_path: String,
pub keystore_password: String,
pub db_path: String,
#[serde(default)]
pub proof_type: ProofType,
pub low_version_circuit: CircuitConfig,
pub high_version_circuit: CircuitConfig,
pub coordinator: CoordinatorConfig,
pub l2geth: Option<L2GethConfig>,
}
impl Config {
pub fn from_reader<R>(reader: R) -> Result<Self>
where
R: std::io::Read,
{
serde_json::from_reader(reader).map_err(|e| anyhow::anyhow!(e))
}
pub fn from_file(file_name: String) -> Result<Self> {
let file = File::open(file_name)?;
Config::from_reader(&file)
}
}
static SCROLL_PROVER_ASSETS_DIR_ENV_NAME: &str = "SCROLL_PROVER_ASSETS_DIR";
static mut SCROLL_PROVER_ASSETS_DIRS: Vec<String> = vec![];
#[derive(Debug)]
pub struct AssetsDirEnvConfig {
}
impl AssetsDirEnvConfig {
pub fn init() -> Result<()> {
let value = std::env::var(SCROLL_PROVER_ASSETS_DIR_ENV_NAME)?;
let dirs: Vec<&str> = value.split(',').collect();
if dirs.len() != 2 {
bail!("env variable SCROLL_PROVER_ASSETS_DIR value must be 2 parts seperated by comma.")
}
unsafe {
SCROLL_PROVER_ASSETS_DIRS = dirs.into_iter().map(|s| s.to_string()).collect();
log::info!("init SCROLL_PROVER_ASSETS_DIRS: {:?}", SCROLL_PROVER_ASSETS_DIRS);
}
Ok(())
}
pub fn enable_first() {
unsafe {
log::info!("set env {SCROLL_PROVER_ASSETS_DIR_ENV_NAME} to {}", &SCROLL_PROVER_ASSETS_DIRS[0]);
std::env::set_var(SCROLL_PROVER_ASSETS_DIR_ENV_NAME, &SCROLL_PROVER_ASSETS_DIRS[0]);
}
}
pub fn enable_second() {
unsafe {
log::info!("set env {SCROLL_PROVER_ASSETS_DIR_ENV_NAME} to {}", &SCROLL_PROVER_ASSETS_DIRS[1]);
std::env::set_var(SCROLL_PROVER_ASSETS_DIR_ENV_NAME, &SCROLL_PROVER_ASSETS_DIRS[1]);
}
}
}

View File

@@ -0,0 +1,135 @@
mod api;
mod errors;
pub mod listener;
pub mod types;
use anyhow::{bail, Context, Ok, Result};
use std::rc::Rc;
use api::API;
use errors::*;
use listener::Listener;
use log;
use tokio::runtime::Runtime;
use types::*;
use crate::key_signer::KeySigner;
use crate::config::Config;
pub struct CoordinatorClient<'a> {
api: API,
token: Option<String>,
config: &'a Config,
key_signer: Rc<KeySigner>,
rt: Runtime,
listener: Box<dyn Listener>,
}
impl<'a> CoordinatorClient<'a> {
pub fn new(
config: &'a Config,
key_signer: Rc<KeySigner>,
listener: Box<dyn Listener>,
) -> Result<Self> {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()?;
let api = API::new(&config.coordinator.base_url,
core::time::Duration::from_secs(config.coordinator.connection_timeout_sec),
config.coordinator.retry_count,
config.coordinator.retry_wait_time_sec
)?;
let mut client = Self {
api,
token: None,
config,
key_signer,
rt,
listener,
};
client.login()?;
Ok(client)
}
fn login(&mut self) -> Result<()> {
let api = &self.api;
let challenge_response = self.rt.block_on(api.challenge())?;
if challenge_response.errcode != ErrorCode::Success {
bail!("challenge failed: {}", challenge_response.errmsg)
}
let mut token: String;
if let Some(r) = challenge_response.data {
token = r.token;
} else {
bail!("challenge failed: got empty token")
}
let login_message = LoginMessage {
challenge: token.clone(),
prover_name: self.config.prover_name.clone(),
prover_version: crate::version::get_version(),
};
let buffer = login_message.rlp();
let signature = self.key_signer.sign_buffer(&buffer)?;
let login_request = LoginRequest {
message: login_message,
signature,
};
let login_response = self.rt.block_on(api.login(&login_request, &token))?;
if login_response.errcode != ErrorCode::Success {
bail!("login failed: {}", login_response.errmsg)
}
if let Some(r) = login_response.data {
token = r.token;
} else {
bail!("login failed: got empty token")
}
self.token = Some(token);
Ok(())
}
fn action_with_re_login<T, F, R>(&mut self, req: &R, mut f: F) -> Result<Response<T>>
where
F: FnMut(&mut Self, &R) -> Result<Response<T>>,
{
let response = f(self, req)?;
if response.errcode == ErrorCode::ErrJWTTokenExpired {
log::info!("JWT expired, attempting to re-login");
self.login().context("JWT expired, re-login failed")?;
log::info!("re-login success");
return self.action_with_re_login(req, f);
} else if response.errcode != ErrorCode::Success {
bail!("action failed: {}", response.errmsg)
}
Ok(response)
}
fn do_get_task(&mut self, req: &GetTaskRequest) -> Result<Response<GetTaskResponseData>> {
self.rt
.block_on(self.api.get_task(req, self.token.as_ref().unwrap()))
}
pub fn get_task(&mut self, req: &GetTaskRequest) -> Result<Response<GetTaskResponseData>> {
self.action_with_re_login(req, |s, req| s.do_get_task(req))
}
fn do_submit_proof(
&mut self,
req: &SubmitProofRequest,
) -> Result<Response<SubmitProofResponseData>> {
let response = self
.rt
.block_on(self.api.submit_proof(req, &self.token.as_ref().unwrap()))?;
self.listener.on_proof_submitted(req);
Ok(response)
}
pub fn submit_proof(
&mut self,
req: &SubmitProofRequest,
) -> Result<Response<SubmitProofResponseData>> {
self.action_with_re_login(req, |s, req| s.do_submit_proof(req))
}
}

View File

@@ -0,0 +1,121 @@
use super::types::*;
use anyhow::{bail, Result};
use reqwest::{header::CONTENT_TYPE, Url};
use serde::Serialize;
use core::time::Duration;
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
use reqwest_retry::{RetryTransientMiddleware, policies::ExponentialBackoff};
pub struct API {
url_base: Url,
send_timeout: Duration,
pub client: ClientWithMiddleware,
}
impl API {
pub fn new(url_base: &String, send_timeout: Duration, retry_count: u32, retry_wait_time_sec: u64) -> Result<Self> {
let retry_wait_duration = core::time::Duration::from_secs(retry_wait_time_sec);
let retry_policy = ExponentialBackoff::builder()
.retry_bounds(retry_wait_duration / 2, retry_wait_duration)
.build_with_max_retries(retry_count);
let client = ClientBuilder::new(reqwest::Client::new())
.with(RetryTransientMiddleware::new_with_policy(retry_policy))
.build();
Ok(Self {
url_base: Url::parse(&url_base)?,
send_timeout,
client,
})
}
pub async fn challenge(&self) -> Result<Response<ChallengeResponseData>> {
let method = "/coordinator/v1/challenge";
let url = self.build_url(method)?;
let response = self
.client
.get(url)
.header(CONTENT_TYPE, "application/json")
.timeout(self.send_timeout)
.send()
.await?;
let response_body = response.text().await?;
serde_json::from_str(&response_body).map_err(|e| anyhow::anyhow!(e))
}
pub async fn login(
&self,
req: &LoginRequest,
token: &String,
) -> Result<Response<LoginResponseData>> {
let method = "/coordinator/v1/login";
self.post_with_token(&method, req, token).await
}
pub async fn get_task(
&self,
req: &GetTaskRequest,
token: &String,
) -> Result<Response<GetTaskResponseData>> {
let method = "/coordinator/v1/get_task";
self.post_with_token(&method, req, token).await
}
pub async fn submit_proof(
&self,
req: &SubmitProofRequest,
token: &String,
) -> Result<Response<SubmitProofResponseData>> {
let method = "/coordinator/v1/submit_proof";
self.post_with_token(&method, req, token).await
}
async fn post_with_token<Req, Resp>(
&self,
method: &str,
req: &Req,
token: &String,
) -> Result<Resp>
where
Req: ?Sized + Serialize,
Resp: serde::de::DeserializeOwned,
{
let url = self.build_url(method)?;
let request_body = serde_json::to_string(req)?;
log::info!("[coordinator client], {method}, request: {request_body}");
let response = self
.client
.post(url)
.header(CONTENT_TYPE, "application/json")
.bearer_auth(token)
.body(request_body)
.timeout(self.send_timeout)
.send()
.await?;
if response.status() != http::status::StatusCode::OK {
log::error!(
"[coordinator client], {method}, status not ok: {}",
response.status()
);
bail!(
"[coordinator client], {method}, status not ok: {}",
response.status()
)
}
let response_body = response.text().await?;
log::info!("[coordinator client], {method}, response: {response_body}");
serde_json::from_str(&response_body).map_err(|e| anyhow::anyhow!(e))
}
fn build_url(&self, method: &str) -> Result<Url> {
self.url_base.join(method).map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -0,0 +1,54 @@
use serde::{Deserialize, Deserializer};
use log;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ErrorCode {
Success,
InternalServerError,
ErrProverStatsAPIParameterInvalidNo,
ErrProverStatsAPIProverTaskFailure,
ErrProverStatsAPIProverTotalRewardFailure,
ErrCoordinatorParameterInvalidNo,
ErrCoordinatorGetTaskFailure,
ErrCoordinatorHandleZkProofFailure,
ErrCoordinatorEmptyProofData,
ErrJWTCommonErr,
ErrJWTTokenExpired,
Undefined(i32),
}
impl ErrorCode {
fn from_i32(v: i32) -> Self {
match v {
0 => ErrorCode::Success,
500 => ErrorCode::InternalServerError,
10001 => ErrorCode::ErrProverStatsAPIParameterInvalidNo,
10002 => ErrorCode::ErrProverStatsAPIProverTaskFailure,
10003 => ErrorCode::ErrProverStatsAPIProverTotalRewardFailure,
20001 => ErrorCode::ErrCoordinatorParameterInvalidNo,
20002 => ErrorCode::ErrCoordinatorGetTaskFailure,
20003 => ErrorCode::ErrCoordinatorHandleZkProofFailure,
20004 => ErrorCode::ErrCoordinatorEmptyProofData,
50000 => ErrorCode::ErrJWTCommonErr,
50001 => ErrorCode::ErrJWTTokenExpired,
_ => {
log::error!("get unexpected error code from coordinator: {v}");
ErrorCode::Undefined(v)
}
}
}
}
impl<'de> Deserialize<'de> for ErrorCode {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let v: i32 = i32::deserialize(deserializer)?;
Ok(ErrorCode::from_i32(v))
}
}

View File

@@ -0,0 +1,5 @@
use super::SubmitProofRequest;
pub trait Listener {
fn on_proof_submitted(&self, req: &SubmitProofRequest);
}

View File

@@ -0,0 +1,75 @@
use crate::types::{ProofFailureType, ProofStatus};
use rlp::RlpStream;
use serde::{Deserialize, Serialize};
use super::errors::ErrorCode;
#[derive(Deserialize)]
pub struct Response<T> {
pub errcode: ErrorCode,
pub errmsg: String,
pub data: Option<T>,
}
#[derive(Serialize, Deserialize)]
pub struct LoginMessage {
pub challenge: String,
pub prover_name: String,
pub prover_version: String,
}
impl LoginMessage {
pub fn rlp(&self) -> Vec<u8> {
let mut rlp = RlpStream::new();
let num_fields = 3;
rlp.begin_list(num_fields);
rlp.append(&self.prover_name);
rlp.append(&self.prover_version);
rlp.append(&self.challenge);
rlp.out().freeze().into()
}
}
#[derive(Serialize, Deserialize)]
pub struct LoginRequest {
pub message: LoginMessage,
pub signature: String,
}
#[derive(Serialize, Deserialize)]
pub struct LoginResponseData {
pub time: String,
pub token: String,
}
pub type ChallengeResponseData = LoginResponseData;
#[derive(Default, Serialize, Deserialize)]
pub struct GetTaskRequest {
pub task_type: crate::types::ProofType,
pub prover_height: Option<u64>,
pub vks: Vec<String>,
}
#[derive(Serialize, Deserialize)]
pub struct GetTaskResponseData {
pub uuid: String,
pub task_id: String,
pub task_type: crate::types::ProofType,
pub task_data: String,
pub hard_fork_name: String,
}
#[derive(Serialize, Deserialize, Default)]
pub struct SubmitProofRequest {
pub uuid: String,
pub task_id: String,
pub task_type: crate::types::ProofType,
pub status: ProofStatus,
pub proof: String,
pub failure_type: Option<ProofFailureType>,
pub failure_msg: Option<String>,
pub hard_fork_name: String,
}
#[derive(Serialize, Deserialize)]
pub struct SubmitProofResponseData {}

View File

@@ -0,0 +1,124 @@
use crate::types::CommonHash;
use anyhow::Result;
use ethers_core::types::BlockNumber;
use tokio::runtime::Runtime;
use ethers_core::types::{H256, U64};
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use std::fmt::Debug;
use ethers_providers::{Http, Provider};
// ======================= types ============================
/// L2 block full trace which tracks to the version in golang.
///
/// The inner block_trace is a generic type, whose real implementation
/// lies in two version's zkevm-circuits library.
///
/// The inner block_trace missed some fields compared to the go version.
/// These fields are defined here for clarity although not used.
#[derive(Deserialize, Serialize, Default, Debug, Clone)]
pub struct BlockTrace<T> {
#[serde(flatten)]
pub block_trace: T,
pub version: String,
pub withdraw_trie_root: Option<CommonHash>,
#[serde(rename = "mptwitness", default)]
pub mpt_witness: Vec<u8>,
}
pub type TxHash = H256;
/// this struct is tracked to https://github.com/scroll-tech/go-ethereum/blob/0f0cd99f7a2e/core/types/block.go#Header
/// the detail fields of struct are not 100% same as ethers_core::types::Block so this needs to be changed in
/// some time currently only the `number` field is required
#[derive(Debug, Deserialize, Serialize, Default)]
pub struct Header {
#[serde(flatten)]
block: ethers_core::types::Block<TxHash>,
}
impl Header {
pub fn get_number(&self) -> Option<U64> {
self.block.number
}
}
/// Serialize a type.
///
/// # Panics
///
/// If the type returns an error during serialization.
pub fn serialize<T: serde::Serialize>(t: &T) -> serde_json::Value {
serde_json::to_value(t).expect("Types never fail to serialize.")
}
// ======================= geth client ============================
pub struct GethClient {
id: String,
provider: Provider<Http>,
rt: Runtime,
}
impl GethClient {
pub fn new(id: &str, api_url: &str) -> Result<Self> {
let provider = Provider::<Http>::try_from(api_url)?;
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()?;
Ok(Self {
id: id.to_string(),
provider,
rt,
})
}
pub fn get_block_trace_by_hash<T>(&mut self, hash: &CommonHash) -> Result<BlockTrace<T>>
where T: Serialize + DeserializeOwned + Debug + Send {
log::info!(
"{}: calling get_block_trace_by_hash, hash: {:#?}",
self.id,
hash
);
let trace_future = self
.provider
.request("scroll_getBlockTraceByNumberOrHash", [format!("{hash:#x}")]);
let trace = self.rt.block_on(trace_future)?;
Ok(trace)
}
pub fn header_by_number(&mut self, block_number: &BlockNumber) -> Result<Header> {
log::info!(
"{}: calling header_by_number, hash: {:#?}",
self.id,
block_number
);
let hash = serialize(block_number);
let include_txs = serialize(&false);
let trace_future = self
.provider
.request("eth_getBlockByNumber", [hash, include_txs]);
let trace = self.rt.block_on(trace_future)?;
Ok(trace)
}
pub fn block_number(&mut self) -> Result<BlockNumber> {
log::info!("{}: calling block_number", self.id);
let trace_future = self.provider.request("eth_blockNumber", ());
let trace = self.rt.block_on(trace_future)?;
Ok(trace)
}
}

View File

@@ -0,0 +1,103 @@
use std::path::Path;
use anyhow::Result;
use ethers_core::{
k256::{
ecdsa::{signature::hazmat::PrehashSigner, RecoveryId, Signature, SigningKey},
elliptic_curve::{sec1::ToEncodedPoint, FieldBytes},
PublicKey, Secp256k1, SecretKey,
},
types::Signature as EthSignature,
};
use ethers_core::types::{H256, U256};
use hex::ToHex;
use tiny_keccak::{Hasher, Keccak};
pub struct KeySigner {
public_key: PublicKey,
signer: SigningKey,
}
impl KeySigner {
pub fn new(key_path: &str, passwd: &str) -> Result<Self> {
let p = Path::new(key_path);
let secret = if !p.exists() {
log::info!("[key_signer] key_path not exists, create one");
let dir = p.parent().unwrap();
let name = p.file_name().and_then(|s| s.to_str());
let mut rng = rand::thread_rng();
let (secret, _) = eth_keystore::new(dir, &mut rng, passwd, name)?;
secret
} else {
log::info!("[key_signer] key_path already exists, load it");
eth_keystore::decrypt_key(key_path, passwd).map_err(|e| anyhow::anyhow!(e))?
};
let secret_key = SecretKey::from_bytes(secret.as_slice().into())?;
let signer = SigningKey::from(secret_key.clone());
Ok(Self {
public_key: secret_key.public_key(),
signer,
})
}
pub fn get_public_key(&self) -> String {
let v: Vec<u8> = Vec::from(self.public_key.to_encoded_point(true).as_bytes());
buffer_to_hex(&v, false)
}
/// Signs the provided hash.
pub fn sign_hash(&self, hash: H256) -> Result<EthSignature> {
let signer = &self.signer as &dyn PrehashSigner<(Signature, RecoveryId)>;
let (recoverable_sig, recovery_id) = signer.sign_prehash(hash.as_ref())?;
let v = u8::from(recovery_id) as u64;
let r_bytes: FieldBytes<Secp256k1> = recoverable_sig.r().into();
let s_bytes: FieldBytes<Secp256k1> = recoverable_sig.s().into();
let r = U256::from_big_endian(r_bytes.as_slice());
let s = U256::from_big_endian(s_bytes.as_slice());
Ok(EthSignature { r, s, v })
}
pub fn sign_buffer<T>(&self, buffer: &T) -> Result<String>
where
T: AsRef<[u8]>,
{
let pre_hash = keccak256(buffer);
let hash = H256::from(pre_hash);
let sig = self.sign_hash(hash)?;
Ok(buffer_to_hex(&sig.to_vec(), true))
}
}
fn buffer_to_hex<T>(buffer: &T, has_prefix: bool) -> String
where
T: AsRef<[u8]>,
{
if has_prefix {
format!("0x{}", buffer.encode_hex::<String>())
} else {
buffer.encode_hex::<String>()
}
}
/// Compute the Keccak-256 hash of input bytes.
///
/// Note that strings are interpreted as UTF-8 bytes,
pub fn keccak256<T: AsRef<[u8]>>(bytes: T) -> [u8; 32] {
let mut output = [0u8; 32];
let mut hasher = Keccak::v256();
hasher.update(bytes.as_ref());
hasher.finalize(&mut output);
output
}

76
prover_rust/src/main.rs Normal file
View File

@@ -0,0 +1,76 @@
mod config;
mod coordinator_client;
mod geth_client;
mod key_signer;
mod prover;
mod task_cache;
mod task_processor;
mod types;
mod utils;
mod version;
mod zk_circuits_handler;
use clap::{Parser, ArgAction};
use anyhow::Result;
use config::{Config, AssetsDirEnvConfig};
use prover::Prover;
use std::rc::Rc;
use task_cache::{ClearCacheCoordinatorListener, TaskCache};
use task_processor::TaskProcessor;
use log;
/// Simple program to greet a person
#[derive(Parser, Debug)]
#[clap(disable_version_flag = true)]
struct Args {
/// Path of config file
#[arg(long = "config", default_value = "conf/config.json")]
config_file: String,
/// Version of this prover
#[arg(short, long, action = ArgAction::SetTrue)]
version: bool,
/// Path of log file
#[arg(long = "log.file")]
log_file: Option<String>,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Args::parse();
if args.version {
println!("version is {}", version::get_version());
std::process::exit(0);
}
utils::log_init(args.log_file);
let config: Config = Config::from_file(args.config_file)?;
if let Err(e) = AssetsDirEnvConfig::init() {
log::error!("AssetsDirEnvConfig init failed: {:#}", e);
std::process::exit(-2);
}
let task_cache = Rc::new(TaskCache::new(&config.db_path)?);
let coordinator_listener = Box::new(ClearCacheCoordinatorListener {
task_cache: task_cache.clone(),
});
let prover = Prover::new(&config, coordinator_listener)?;
log::info!("prover start successfully. name: {}, type: {:?}, publickey: {}, version: {}",
config.prover_name,
config.proof_type,
prover.get_public_key(),
version::get_version(),
);
let task_processor = TaskProcessor::new(&prover, task_cache);
task_processor.start();
Ok(())
}

177
prover_rust/src/prover.rs Normal file
View File

@@ -0,0 +1,177 @@
use anyhow::{bail, Context, Error, Ok, Result};
use ethers_core::types::U64;
use std::{cell::RefCell, rc::Rc};
use log;
use crate::{
config::Config,
coordinator_client::{
listener::Listener, types::*, CoordinatorClient,
},
geth_client::GethClient,
key_signer::KeySigner,
types::{ProofFailureType, ProofStatus, ProofType},
zk_circuits_handler::{CircuitsHandler, CircuitsHandlerProvider},
};
use super::types::{ProofDetail, Task};
pub struct Prover<'a> {
config: &'a Config,
key_signer: Rc<KeySigner>,
circuits_handler_provider: RefCell<CircuitsHandlerProvider<'a>>,
coordinator_client: RefCell<CoordinatorClient<'a>>,
geth_client: Option<Rc<RefCell<GethClient>>>,
}
impl<'a> Prover<'a> {
pub fn new(config: &'a Config, coordinator_listener: Box<dyn Listener>) -> Result<Self> {
let proof_type = config.proof_type;
let keystore_path = &config.keystore_path;
let keystore_password = &config.keystore_password;
let key_signer = Rc::new(KeySigner::new(&keystore_path, &keystore_password)?);
let coordinator_client = CoordinatorClient::new(
config,
Rc::clone(&key_signer),
coordinator_listener,
).context("failed to create coordinator_client")?;
let geth_client = if config.proof_type == ProofType::ProofTypeChunk {
Some(Rc::new(RefCell::new(GethClient::new(
&config.prover_name,
&config.l2geth.as_ref().unwrap().endpoint,
).context("failed to create l2 geth_client")?)))
} else {
None
};
let provider = CircuitsHandlerProvider::new(
proof_type,
config,
geth_client.clone(),
).context("failed to create circuits handler provider")?;
let prover = Prover {
config,
key_signer: Rc::clone(&key_signer),
circuits_handler_provider: RefCell::new(provider),
coordinator_client: RefCell::new(coordinator_client),
geth_client,
};
Ok(prover)
}
pub fn get_proof_type(&self) -> ProofType {
self.config.proof_type
}
pub fn get_public_key(&self) -> String {
self.key_signer.get_public_key()
}
pub fn fetch_task(&self) -> Result<Task> {
log::info!("[prover] start to fetch_task");
let mut req = GetTaskRequest {
task_type: self.get_proof_type(),
prover_height: None,
vks: self.circuits_handler_provider.borrow().get_vks(),
};
if self.get_proof_type() == ProofType::ProofTypeChunk {
let latest_block_number = self.get_latest_block_number_value()?;
if let Some(v) = latest_block_number {
if v.as_u64() == 0 {
bail!("omit to prove task of the genesis block")
}
req.prover_height = Some(v.as_u64());
} else {
log::error!("[prover] failed to fetch latest confirmed block number, got None");
bail!("failed to fetch latest confirmed block number, got None")
}
}
let resp = self.coordinator_client.borrow_mut().get_task(&req)?;
match resp.data {
Some(d) => Ok(Task::from(d)),
None => {
bail!("data of get_task empty, while error_code is success. there may be something wrong in response data or inner logic.")
}
}
}
pub fn prove_task(&self, task: &Task) -> Result<ProofDetail> {
log::info!("[prover] start to prove_task, task id: {}", task.id);
self.circuits_handler_provider.borrow_mut().clear_circuits_handler();
if let Some(handler) = self.circuits_handler_provider.borrow_mut().get_circuits_handler(&task.hard_fork_name) {
self.do_prove(task, handler)
} else {
log::error!("failed to get a circuit handler");
bail!("failed to get a circuit handler")
}
}
fn do_prove(&self, task: &Task, handler: &Box<dyn CircuitsHandler>) -> Result<ProofDetail> {
let mut proof_detail = ProofDetail {
id: task.id.clone(),
proof_type: task.task_type,
..Default::default()
};
proof_detail.proof_data = handler.get_proof_data(task.task_type, task)?;
Ok(proof_detail)
}
pub fn submit_proof(&self, proof_detail: ProofDetail, task: &Task) -> Result<()> {
log::info!("[prover] start to submit_proof, task id: {}", proof_detail.id);
let request = SubmitProofRequest {
uuid: task.uuid.clone(),
task_id: proof_detail.id,
task_type: proof_detail.proof_type,
status: ProofStatus::Ok,
proof: proof_detail.proof_data,
hard_fork_name: task.hard_fork_name.clone(),
..Default::default()
};
self.do_submit(&request)
}
pub fn submit_error(
&self,
task: &Task,
failure_type: ProofFailureType,
error: Error,
) -> Result<()> {
log::info!("[prover] start to submit_error, task id: {}", task.id);
let request = SubmitProofRequest {
uuid: task.uuid.clone(),
task_id: task.id.clone(),
task_type: task.task_type,
status: ProofStatus::Error,
failure_type: Some(failure_type),
failure_msg: Some(error.to_string()),
hard_fork_name: task.hard_fork_name.clone(),
..Default::default()
};
self.do_submit(&request)
}
fn do_submit(&self, request: &SubmitProofRequest) -> Result<()> {
self.coordinator_client.borrow_mut().submit_proof(request)?;
Ok(())
}
fn get_latest_block_number_value(&self) -> Result<Option<U64>> {
let number = self
.geth_client
.as_ref()
.unwrap()
.borrow_mut()
.block_number()?;
Ok(number.as_number())
}
}

View File

@@ -0,0 +1,61 @@
use anyhow::{Ok, Result};
use super::coordinator_client::{listener::Listener, types::SubmitProofRequest};
use crate::types::TaskWrapper;
use sled::{Config, Db};
use std::rc::Rc;
use log;
pub struct TaskCache {
db: Db,
}
impl TaskCache {
pub fn new(db_path: &String) -> Result<Self> {
let config = Config::new().path(db_path);
let db = config.open()?;
log::info!("[task_cache] initiate successfully to {db_path}");
Ok(Self { db })
}
pub fn put_task(&self, task_wrapper: &TaskWrapper) -> Result<()> {
let k = task_wrapper.task.id.clone().into_bytes();
let v = serde_json::to_vec(task_wrapper)?;
self.db.insert(k, v)?;
log::info!("[task_cache] put_task with task_id: {}", task_wrapper.task.id);
Ok(())
}
pub fn get_last_task(&self) -> Result<Option<TaskWrapper>> {
let last = self.db.last()?;
if let Some((k, v)) = last {
let kk = std::str::from_utf8(k.as_ref())?;
let task_wrapper: TaskWrapper = serde_json::from_slice(v.as_ref())?;
log::info!("[task_cache] get_last_task with task_id: {kk}, count: {}", task_wrapper.get_count());
return Ok(Some(task_wrapper));
}
Ok(None)
}
pub fn delete_task(&self, task_id: String) -> Result<()> {
let k = task_id.clone().into_bytes();
self.db.remove(k)?;
log::info!("[task cache] delete_task with task_id: {task_id}");
Ok(())
}
}
// ========================= listener ===========================
pub struct ClearCacheCoordinatorListener {
pub task_cache: Rc<TaskCache>,
}
impl Listener for ClearCacheCoordinatorListener {
fn on_proof_submitted(&self, req: &SubmitProofRequest) {
let result = self.task_cache.delete_task(req.task_id.clone());
if let Err(e) = result {
log::error!("delete task from embed db failed, {:#}", e);
}
}
}

View File

@@ -0,0 +1,81 @@
use super::{prover::Prover, task_cache::TaskCache};
use anyhow::{Context, Result};
use log;
use std::rc::Rc;
pub struct TaskProcessor<'a> {
prover: &'a Prover<'a>,
task_cache: Rc<TaskCache>,
}
impl<'a> TaskProcessor<'a> {
pub fn new(prover: &'a Prover<'a>, task_cache: Rc<TaskCache>) -> Self {
TaskProcessor { prover, task_cache }
}
pub fn start(&self) {
loop {
log::info!("start a new round.");
if let Err(err) = self.prove_and_submit() {
log::error!("encounter error: {:#}", err);
} else {
log::info!("prove & submit succeed.");
}
}
}
fn prove_and_submit(&self) -> Result<()> {
let task_from_cache = self
.task_cache
.get_last_task()
.context("failed to peek from stack")?;
let mut task_wrapper = match task_from_cache {
Some(t) => t,
None => {
let fetch_result = self.prover.fetch_task();
if let Err(err) = fetch_result {
std::thread::sleep(core::time::Duration::from_secs(10));
return Err(err).context("failed to fetch task from coordinator");
}
fetch_result.unwrap().into()
}
};
if task_wrapper.get_count() <= 2 {
task_wrapper.increment_count();
self.task_cache
.put_task(&task_wrapper)
.context("failed to push task into stack, updating count")?;
log::info!(
"start to prove task, task_type: {:?}, task_id: {}",
task_wrapper.task.task_type,
task_wrapper.task.id
);
let result = match self.prover.prove_task(&task_wrapper.task) {
Ok(proof_detail) => self
.prover
.submit_proof(proof_detail, &task_wrapper.task),
Err(error) => self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::NoPanic,
error,
),
};
return result;
}
// if tried times >= 3, it's probably due to circuit proving panic
log::error!(
"zk proving panic for task, task_type: {:?}, task_id: {}",
task_wrapper.task.task_type,
task_wrapper.task.id
);
self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::Panic,
anyhow::anyhow!("zk proving panic for task"),
)
}
}

195
prover_rust/src/types.rs Normal file
View File

@@ -0,0 +1,195 @@
use ethers_core::types::H256;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::coordinator_client::types::GetTaskResponseData;
pub type CommonHash = H256;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ProofType {
ProofTypeUndefined,
ProofTypeChunk,
ProofTypeBatch,
}
impl ProofType {
fn from_u8(v: u8) -> Self {
match v {
1 => ProofType::ProofTypeChunk,
2 => ProofType::ProofTypeBatch,
_ => ProofType::ProofTypeUndefined,
}
}
}
impl Serialize for ProofType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match *self {
ProofType::ProofTypeUndefined => serializer.serialize_i8(0),
ProofType::ProofTypeChunk => serializer.serialize_i8(1),
ProofType::ProofTypeBatch => serializer.serialize_i8(2),
}
}
}
impl<'de> Deserialize<'de> for ProofType {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let v: u8 = u8::deserialize(deserializer)?;
Ok(ProofType::from_u8(v))
}
}
impl Default for ProofType {
fn default() -> Self {
Self::ProofTypeUndefined
}
}
#[derive(Serialize, Deserialize, Default)]
pub struct Task {
pub uuid: String,
pub id: String,
#[serde(rename = "type", default)]
pub task_type: ProofType,
pub task_data: String,
#[serde(default)]
pub hard_fork_name: String,
}
impl From<GetTaskResponseData> for Task {
fn from(value: GetTaskResponseData) -> Self {
Self {
uuid: value.uuid,
id: value.task_id,
task_type: value.task_type,
task_data: value.task_data,
hard_fork_name: value.hard_fork_name,
}
}
}
#[derive(Serialize, Deserialize, Default)]
pub struct TaskWrapper {
pub task: Task,
count: usize,
}
impl TaskWrapper {
pub fn increment_count(&mut self) {
self.count += 1;
}
pub fn get_count(&self) -> usize {
self.count
}
}
impl From<Task> for TaskWrapper {
fn from(task: Task) -> Self {
TaskWrapper { task, count: 0 }
}
}
#[derive(Serialize, Deserialize, Default)]
pub struct ProofDetail {
pub id: String,
#[serde(rename = "type", default)]
pub proof_type: ProofType,
pub proof_data: String,
pub error: String,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ProofFailureType {
Undefined,
Panic,
NoPanic,
}
impl ProofFailureType {
fn from_u8(v: u8) -> Self {
match v {
1 => ProofFailureType::Panic,
2 => ProofFailureType::NoPanic,
_ => ProofFailureType::Undefined,
}
}
}
impl Serialize for ProofFailureType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match *self {
ProofFailureType::Undefined => serializer.serialize_u8(0),
ProofFailureType::Panic => serializer.serialize_u8(1),
ProofFailureType::NoPanic => serializer.serialize_u8(2),
}
}
}
impl<'de> Deserialize<'de> for ProofFailureType {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let v: u8 = u8::deserialize(deserializer)?;
Ok(ProofFailureType::from_u8(v))
}
}
impl Default for ProofFailureType {
fn default() -> Self {
Self::Undefined
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ProofStatus {
Ok,
Error,
}
impl ProofStatus {
fn from_u8(v: u8) -> Self {
match v {
0 => ProofStatus::Ok,
_ => ProofStatus::Error,
}
}
}
impl Serialize for ProofStatus {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match *self {
ProofStatus::Ok => serializer.serialize_u8(0),
ProofStatus::Error => serializer.serialize_u8(1),
}
}
}
impl<'de> Deserialize<'de> for ProofStatus {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let v: u8 = u8::deserialize(deserializer)?;
Ok(ProofStatus::from_u8(v))
}
}
impl Default for ProofStatus {
fn default() -> Self {
Self::Ok
}
}

24
prover_rust/src/utils.rs Normal file
View File

@@ -0,0 +1,24 @@
use env_logger::Env;
use std::sync::Once;
use std::fs::OpenOptions;
static LOG_INIT: Once = Once::new();
/// Initialize log
pub fn log_init(log_file: Option<String>) {
LOG_INIT.call_once(|| {
let mut builder = env_logger::Builder::from_env(Env::default().default_filter_or("info"));
if let Some(file_path) = log_file {
let target= Box::new(
OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(file_path)
.expect("Can't create log file")
);
builder.target(env_logger::Target::Pipe(target));
}
builder.init();
});
}

View File

@@ -0,0 +1,18 @@
use std::cell::OnceCell;
static DEFAULT_COMMIT: &str = "unknown";
static mut VERSION: OnceCell<String> = OnceCell::new();
pub const TAG: &str = "v0.0.0";
pub const DEFAULT_ZK_VERSION: &str = "000000-000000";
fn init_version() -> String {
let commit = option_env!("GIT_REV").unwrap_or(DEFAULT_COMMIT);
let tag = option_env!("GO_TAG").unwrap_or(TAG);
let zk_version = option_env!("ZK_VERSION").unwrap_or(DEFAULT_ZK_VERSION);
format!("{tag}-{commit}-{zk_version}")
}
pub fn get_version() -> String {
unsafe { VERSION.get_or_init(init_version).clone() }
}

View File

@@ -0,0 +1,125 @@
mod bernoulli;
mod curie;
use anyhow::Result;
use bernoulli::BaseCircuitsHandler;
use curie::NextCircuitsHandler;
use std::collections::HashMap;
use std::{cell::RefCell, rc::Rc};
use super::geth_client::GethClient;
use crate::{config::{Config, AssetsDirEnvConfig}, types::{ProofType, Task}};
type HardForkName = String;
pub mod utils {
pub fn encode_vk(vk: Vec<u8>) -> String {
base64::encode(vk)
}
}
pub trait CircuitsHandler {
fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>>;
fn get_proof_data(&self, task_type: ProofType, task: &Task) -> Result<String>;
}
type CircuitsHandlerBuilder = fn(proof_type: ProofType, config: &Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Box<dyn CircuitsHandler>>;
pub struct CircuitsHandlerProvider<'a> {
proof_type: ProofType,
config: &'a Config,
geth_client: Option<Rc<RefCell<GethClient>>>,
circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>,
current_hard_fork_name: Option<HardForkName>,
current_circuit: Option<Box<dyn CircuitsHandler>>,
vks: Vec<String>,
}
impl<'a> CircuitsHandlerProvider<'a> {
pub fn new(proof_type: ProofType, config: &'a Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Self> {
let mut m: HashMap<HardForkName, CircuitsHandlerBuilder> = HashMap::new();
fn handler_builder(proof_type: ProofType, config: &Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Box<dyn CircuitsHandler>> {
log::info!("now init zk circuits handler, hard_fork_name: {}", &config.low_version_circuit.hard_fork_name);
AssetsDirEnvConfig::enable_first();
BaseCircuitsHandler::new(proof_type,
&config.low_version_circuit.params_path,
&config.low_version_circuit.assets_path,
geth_client
).map(|handler| Box::new(handler) as Box<dyn CircuitsHandler>)
}
m.insert(config.low_version_circuit.hard_fork_name.clone(), handler_builder);
fn next_handler_builder(proof_type: ProofType, config: &Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Box<dyn CircuitsHandler>> {
log::info!("now init zk circuits handler, hard_fork_name: {}", &config.high_version_circuit.hard_fork_name);
AssetsDirEnvConfig::enable_second();
NextCircuitsHandler::new(proof_type,
&config.high_version_circuit.params_path,
&config.high_version_circuit.assets_path,
geth_client
).map(|handler| Box::new(handler) as Box<dyn CircuitsHandler>)
}
m.insert(config.high_version_circuit.hard_fork_name.clone(), next_handler_builder);
let vks = CircuitsHandlerProvider::init_vks(proof_type, config, &m, geth_client.clone());
let provider = CircuitsHandlerProvider {
proof_type,
config,
geth_client,
circuits_handler_builder_map: m,
current_hard_fork_name: None,
current_circuit: None,
vks,
};
Ok(provider)
}
pub fn get_circuits_handler(&mut self, hard_fork_name: &String) -> Option<&Box<dyn CircuitsHandler>> {
match &self.current_hard_fork_name {
Some(name) if name == hard_fork_name => {
log::info!("get circuits handler from cache");
(&self.current_circuit).as_ref()
},
_ => {
log::info!("failed to get circuits handler from cache, create a new one: {hard_fork_name}");
let builder = self.circuits_handler_builder_map.get(hard_fork_name);
builder.and_then(|build| {
log::info!("building circuits handler for {hard_fork_name}");
let handler = build(self.proof_type, &self.config, self.geth_client.clone()).expect("failed to build circuits handler");
self.current_hard_fork_name = Some(hard_fork_name.clone());
self.current_circuit = Some(handler);
(&self.current_circuit).as_ref()
} )
}
}
}
pub fn clear_circuits_handler(&mut self) {
log::info!("clear circuits handler to prevent cache, just for test");
self.current_hard_fork_name = None;
self.current_circuit = None;
}
fn init_vks(proof_type: ProofType, config: &'a Config,
circuits_handler_builder_map: &HashMap<HardForkName, CircuitsHandlerBuilder>,
geth_client: Option<Rc<RefCell<GethClient>>>) -> Vec<String> {
circuits_handler_builder_map
.iter()
.map(|(hard_fork_name, build)| {
let handler = build(proof_type, config, geth_client.clone()).expect("failed to build circuits handler");
let vk = handler.get_vk(proof_type)
.map_or("".to_string(), |vk| utils::encode_vk(vk));
log::info!("vk for {hard_fork_name} is {vk}");
vk
})
.collect::<Vec<String>>()
}
pub fn get_vks(&self) -> Vec<String> {
self.vks.clone()
}
}

View File

@@ -0,0 +1,194 @@
use super::CircuitsHandler;
use once_cell::sync::Lazy;
use crate::{geth_client::GethClient, types::ProofType};
use anyhow::{bail, Ok, Result};
use serde::Deserialize;
use crate::types::{Task, CommonHash};
use std::{cell::RefCell, env, cmp::Ordering, rc::Rc};
use prover::{aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver};
use prover::{BlockTrace, ChunkHash, ChunkProof};
// Only used for debugging.
pub(crate) static OUTPUT_DIR: Lazy<Option<String>> =
Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
#[derive(Deserialize)]
pub struct BatchTaskDetail {
pub chunk_infos: Vec<ChunkHash>,
pub chunk_proofs: Vec<ChunkProof>,
}
#[derive(Deserialize)]
pub struct ChunkTaskDetail {
pub block_hashes: Vec<CommonHash>,
}
fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
block_trace.header.number.map(|n| n.as_u64())
}
pub struct BaseCircuitsHandler {
chunk_prover: Option<RefCell<ChunkProver>>,
batch_prover: Option<RefCell<BatchProver>>,
geth_client: Option<Rc<RefCell<GethClient>>>,
}
impl BaseCircuitsHandler {
pub fn new(proof_type: ProofType, params_dir: &str, assets_dir: &str, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Self> {
match proof_type {
ProofType::ProofTypeChunk => Ok(Self {
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
batch_prover: None,
geth_client,
}),
ProofType::ProofTypeBatch => Ok(Self {
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
chunk_prover: None,
geth_client,
}),
_ => bail!("proof type invalid"),
}
}
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
if let Some(prover) = self.chunk_prover.as_ref() {
let chunk_proof = prover
.borrow_mut()
.gen_chunk_proof(chunk_trace, None, None, self.get_output_dir())?;
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
}
unreachable!("please check errors in proof_type logic")
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
if let Some(prover) = self.batch_prover.as_ref() {
let is_valid = prover
.borrow_mut()
.check_chunk_proofs(&chunk_proofs);
if !is_valid {
bail!("non-match chunk protocol, task-id: {}", &task.id)
}
let batch_proof = prover
.borrow_mut()
.gen_agg_evm_proof(
chunk_hashes_proofs,
None,
self.get_output_dir(),
)?;
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
}
unreachable!("please check errors in proof_type logic")
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
fn gen_chunk_traces(&self, task: &Task) -> Result<Vec<BlockTrace>> {
let chunk_task_detail: ChunkTaskDetail = serde_json::from_str(&task.task_data)?;
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
}
fn gen_chunk_hashes_proofs(&self, task: &Task) -> Result<Vec<(ChunkHash, ChunkProof)>> {
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
Ok(batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.chunk_proofs.clone())
.collect())
}
fn get_sorted_traces_by_hashes(
&self,
block_hashes: &Vec<CommonHash>,
) -> Result<Vec<BlockTrace>> {
if block_hashes.len() == 0 {
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
bail!("block_hashes are empty")
}
let mut block_traces = Vec::new();
for (_, hash) in block_hashes.into_iter().enumerate() {
let trace = self
.geth_client
.as_ref()
.unwrap()
.borrow_mut()
.get_block_trace_by_hash(hash)?;
block_traces.push(trace.block_trace);
}
block_traces.sort_by(|a, b| {
if get_block_number(a) == None {
Ordering::Less
} else if get_block_number(b) == None {
Ordering::Greater
} else {
get_block_number(a)
.unwrap()
.cmp(&get_block_number(b).unwrap())
}
});
let block_numbers: Vec<u64> = block_traces
.iter()
.map(|trace| match get_block_number(trace) {
Some(v) => v,
None => 0,
})
.collect();
let mut i = 0;
while i < block_numbers.len() - 1 {
if block_numbers[i] + 1 != block_numbers[i + 1] {
log::error!("[prover] block numbers are not continuous, got {} and {}", block_numbers[i], block_numbers[i + 1]);
bail!(
"block numbers are not continuous, got {} and {}",
block_numbers[i],
block_numbers[i + 1]
)
}
i += 1;
}
Ok(block_traces)
}
}
impl CircuitsHandler for BaseCircuitsHandler {
fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
match task_type {
ProofType::ProofTypeChunk => {
self.chunk_prover.as_ref()
.and_then(|prover| prover.borrow().get_vk())
},
ProofType::ProofTypeBatch => {
self.batch_prover.as_ref()
.and_then(|prover| prover.borrow().get_vk())
},
_ => unreachable!()
}
}
fn get_proof_data(&self, task_type: ProofType, task: &crate::types::Task) -> Result<String> {
match task_type {
ProofType::ProofTypeChunk => self.gen_chunk_proof(task),
ProofType::ProofTypeBatch => self.gen_batch_proof(task),
_ => unreachable!()
}
}
}

View File

@@ -0,0 +1,198 @@
use super::CircuitsHandler;
use crate::{geth_client::GethClient, types::ProofType};
use anyhow::{bail, Context, Ok, Result};
use serde::Deserialize;
use crate::types::{Task, CommonHash};
use std::{cell::RefCell, cmp::Ordering, rc::Rc};
use prover_next::{BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask, BatchProvingTask};
use prover_next::{aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver};
use super::bernoulli::OUTPUT_DIR;
#[derive(Deserialize)]
pub struct BatchTaskDetail {
pub chunk_infos: Vec<ChunkInfo>,
pub chunk_proofs: Vec<ChunkProof>,
}
#[derive(Deserialize)]
pub struct ChunkTaskDetail {
pub block_hashes: Vec<CommonHash>,
}
fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
block_trace.header.number.map(|n| n.as_u64())
}
#[derive(Default)]
pub struct NextCircuitsHandler {
chunk_prover: Option<RefCell<ChunkProver>>,
batch_prover: Option<RefCell<BatchProver>>,
geth_client: Option<Rc<RefCell<GethClient>>>,
}
impl NextCircuitsHandler {
pub fn new(proof_type: ProofType, params_dir: &str, assets_dir: &str, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Self> {
match proof_type {
ProofType::ProofTypeChunk => Ok(Self {
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
batch_prover: None,
geth_client,
}),
ProofType::ProofTypeBatch => Ok(Self {
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
chunk_prover: None,
geth_client,
}),
_ => bail!("proof type invalid"),
}
}
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
let chunk_trace = self.gen_chunk_traces(task)?;
if let Some(prover) = self.chunk_prover.as_ref() {
let chunk = ChunkProvingTask::from(chunk_trace);
let chunk_proof = prover
.borrow_mut()
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
}
unreachable!("please check errors in proof_type logic")
}
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
if let Some(prover) = self.batch_prover.as_ref() {
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
let is_valid = prover
.borrow_mut()
.check_protocol_of_chunks(&chunk_proofs);
if !is_valid {
bail!("non-match chunk protocol, task-id: {}", &task.id)
}
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
let batch = BatchProvingTask {
chunk_proofs
};
let batch_proof = prover
.borrow_mut()
.gen_agg_evm_proof(
batch,
None,
self.get_output_dir(),
)?;
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
}
unreachable!("please check errors in proof_type logic")
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
fn gen_chunk_traces(&self, task: &Task) -> Result<Vec<BlockTrace>> {
let chunk_task_detail: ChunkTaskDetail = serde_json::from_str(&task.task_data)?;
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
}
fn gen_chunk_hashes_proofs(&self, task: &Task) -> Result<Vec<(ChunkInfo, ChunkProof)>> {
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
Ok(batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.chunk_proofs.clone())
.collect())
}
fn get_sorted_traces_by_hashes(
&self,
block_hashes: &Vec<CommonHash>,
) -> Result<Vec<BlockTrace>> {
if block_hashes.len() == 0 {
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
bail!("block_hashes are empty")
}
let mut block_traces = Vec::new();
for (_, hash) in block_hashes.into_iter().enumerate() {
let trace = self
.geth_client
.as_ref()
.unwrap()
.borrow_mut()
.get_block_trace_by_hash(hash)?;
block_traces.push(trace.block_trace);
}
block_traces.sort_by(|a, b| {
if get_block_number(a) == None {
Ordering::Less
} else if get_block_number(b) == None {
Ordering::Greater
} else {
get_block_number(a)
.unwrap()
.cmp(&get_block_number(b).unwrap())
}
});
let block_numbers: Vec<u64> = block_traces
.iter()
.map(|trace| match get_block_number(trace) {
Some(v) => v,
None => 0,
})
.collect();
let mut i = 0;
while i < block_numbers.len() - 1 {
if block_numbers[i] + 1 != block_numbers[i + 1] {
log::error!("[prover] block numbers are not continuous, got {} and {}", block_numbers[i], block_numbers[i + 1]);
bail!(
"block numbers are not continuous, got {} and {}",
block_numbers[i],
block_numbers[i + 1]
)
}
i += 1;
}
Ok(block_traces)
}
}
impl CircuitsHandler for NextCircuitsHandler {
fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
match task_type {
ProofType::ProofTypeChunk => {
self.chunk_prover.as_ref()
.and_then(|prover| prover.borrow().get_vk())
},
ProofType::ProofTypeBatch => {
self.batch_prover.as_ref()
.and_then(|prover| prover.borrow().get_vk())
},
_ => unreachable!()
}
}
fn get_proof_data(&self, task_type: ProofType, task: &crate::types::Task) -> Result<String> {
match task_type {
ProofType::ProofTypeChunk => self.gen_chunk_proof(task),
ProofType::ProofTypeBatch => self.gen_batch_proof(task),
_ => unreachable!()
}
}
}