Merge pull request #837 from powdr-labs/clean-up-main-rs

Refactor `main.rs`
This commit is contained in:
Leo
2023-12-12 19:55:19 +00:00
committed by GitHub
9 changed files with 437 additions and 430 deletions

View File

@@ -1,6 +1,7 @@
use std::{
fmt::Display,
fs,
io::{BufWriter, Read, Write},
path::{Path, PathBuf},
time::Instant,
};
@@ -20,11 +21,11 @@ use executor::{
};
use log::Level;
use mktemp::Temp;
use number::FieldElement;
use number::{write_polys_csv_file, write_polys_file, CsvRenderMode, FieldElement};
use crate::{
inputs_to_query_callback,
verify::{write_commits_to_fs, write_constants_to_fs, write_constraints_to_fs},
util::{read_poly_set, FixedPolySet, WitnessPolySet},
};
pub struct GeneratedWitness<T: FieldElement> {
@@ -101,11 +102,23 @@ enum Artifact<T: FieldElement> {
Proof(ProofResult<T>),
}
/// Optional Arguments for various stages of the pipeline.
#[derive(Default)]
struct Arguments<T: FieldElement> {
/// Externally computed witness values for witness generation.
external_witness_values: Vec<(String, Vec<T>)>,
/// Callback for queries for witness generation.
query_callback: Option<Box<dyn QueryCallback<T>>>,
/// Backend to use for proving. If None, proving will fail.
backend: Option<BackendType>,
/// CSV render mode for witness generation.
csv_render_mode: CsvRenderMode,
/// Whether to export the witness as a CSV file.
export_witness_csv: bool,
/// The optional setup file to use for proving.
setup_file: Option<PathBuf>,
/// The optional existing proof file to use for aggregation.
existing_proof_file: Option<PathBuf>,
}
pub struct Pipeline<T: FieldElement> {
@@ -127,6 +140,7 @@ pub struct Pipeline<T: FieldElement> {
// Note that there is some redundancy with `output_dir`, but the Temp
// object has to live for the lifetime of the pipeline, so we keep it here.
tmp_dir: Option<Temp>,
/// Optional arguments for various stages of the pipeline.
arguments: Arguments<T>,
}
@@ -180,7 +194,7 @@ where
/// .with_backend(BackendType::PilStarkCli);
///
/// // Advance to some stage (which might have side effects)
/// pipeline.advance_to(Stage::Proof).unwrap();
/// pipeline.advance_to(Stage::OptimizedPil).unwrap();
///
/// // Get the result
/// let proof = pipeline.proof().unwrap();
@@ -204,47 +218,48 @@ impl<T: FieldElement> Pipeline<T> {
}
pub fn with_external_witness_values(
self,
external_witness_values: Vec<(&str, Vec<T>)>,
mut self,
external_witness_values: Vec<(String, Vec<T>)>,
) -> Self {
Pipeline {
arguments: Arguments {
external_witness_values: external_witness_values
.into_iter()
.map(|(name, values)| (name.to_string(), values))
.collect(),
..self.arguments
},
..self
}
self.arguments.external_witness_values = external_witness_values;
self
}
pub fn with_witness_csv_settings(
mut self,
export_witness_csv: bool,
csv_render_mode: CsvRenderMode,
) -> Self {
self.arguments.export_witness_csv = export_witness_csv;
self.arguments.csv_render_mode = csv_render_mode;
self
}
pub fn add_query_callback(self, query_callback: Box<dyn QueryCallback<T>>) -> Self {
pub fn add_query_callback(mut self, query_callback: Box<dyn QueryCallback<T>>) -> Self {
let query_callback = match self.arguments.query_callback {
Some(old_callback) => Box::new(chain_callbacks(old_callback, query_callback)),
None => query_callback,
};
Pipeline {
arguments: Arguments {
query_callback: Some(query_callback),
..self.arguments
},
..self
}
self.arguments.query_callback = Some(query_callback);
self
}
pub fn with_prover_inputs(self, inputs: Vec<T>) -> Self {
self.add_query_callback(Box::new(inputs_to_query_callback(inputs)))
}
pub fn with_backend(self, backend: BackendType) -> Self {
Pipeline {
arguments: Arguments {
backend: Some(backend),
..self.arguments
},
..self
}
pub fn with_backend(mut self, backend: BackendType) -> Self {
self.arguments.backend = Some(backend);
self
}
pub fn with_setup_file(mut self, setup_file: Option<PathBuf>) -> Self {
self.arguments.setup_file = setup_file;
self
}
pub fn with_existing_proof_file(mut self, existing_proof_file: Option<PathBuf>) -> Self {
self.arguments.existing_proof_file = existing_proof_file;
self
}
pub fn from_file(self, asm_file: PathBuf) -> Self {
@@ -289,6 +304,30 @@ impl<T: FieldElement> Pipeline<T> {
}
}
/// Reads a previously generated witness from the provided directory and
/// advances the pipeline to the `GeneratedWitness` stage.
pub fn read_generated_witness(mut self, directory: &Path) -> Self {
self.advance_to(Stage::OptimizedPil).unwrap();
let pil = match self.artifact.unwrap() {
Artifact::OptimzedPil(pil) => pil,
_ => panic!(),
};
let (fixed, degree_fixed) = read_poly_set::<FixedPolySet, T>(&pil, directory);
let (witness, degree_witness) = read_poly_set::<WitnessPolySet, T>(&pil, directory);
assert_eq!(degree_fixed, degree_witness);
Pipeline {
artifact: Some(Artifact::GeneratedWitness(GeneratedWitness {
pil,
constants: fixed,
witness: Some(witness),
})),
..self
}
}
fn name_from_path(path: &Path) -> String {
path.file_stem().unwrap().to_str().unwrap().to_string()
}
@@ -372,7 +411,8 @@ impl<T: FieldElement> Pipeline<T> {
let constants = constants
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect();
.collect::<Vec<_>>();
self.maybe_write_constants(&constants)?;
self.log(&format!("Took {}", start.elapsed().as_secs_f32()));
Artifact::PilWithConstants(PilWithConstants { pil, constants })
}
@@ -398,6 +438,8 @@ impl<T: FieldElement> Pipeline<T> {
.map(|(name, c)| (name.to_string(), c))
.collect::<Vec<_>>()
});
self.maybe_write_witness(&constants, &witness)?;
Artifact::GeneratedWitness(GeneratedWitness {
pil,
constants,
@@ -412,10 +454,20 @@ impl<T: FieldElement> Pipeline<T> {
let backend = self
.arguments
.backend
.take()
.expect("backend must be set before calling proving!");
let factory = backend.factory::<T>();
let backend = factory.create(pil.degree());
let backend = if let Some(path) = self.arguments.setup_file.as_ref() {
let mut file = fs::File::open(path).unwrap();
factory.create_from_setup(&mut file).unwrap()
} else {
factory.create(pil.degree())
};
let existing_proof = self.arguments.existing_proof_file.as_ref().map(|path| {
let mut buf = Vec::new();
fs::File::open(path).unwrap().read_to_end(&mut buf).unwrap();
buf
});
// Even if we don't have all constants and witnesses, some backends will
// still output the constraint serialization.
@@ -423,53 +475,123 @@ impl<T: FieldElement> Pipeline<T> {
&pil,
&constants,
witness.as_deref().unwrap_or_default(),
None,
existing_proof,
);
if let Some(output_dir) = &self.output_dir {
write_constants_to_fs(&constants, output_dir);
if let Some(witness) = &witness {
write_commits_to_fs(witness, output_dir);
}
if let Some(constraints_serialization) = &constraints_serialization {
write_constraints_to_fs(constraints_serialization, output_dir);
}
};
Artifact::Proof(ProofResult {
let proof_result = ProofResult {
constants,
witness,
proof,
constraints_serialization,
})
};
self.maybe_wite_proof(&proof_result)?;
Artifact::Proof(proof_result)
}
Artifact::Proof(_) => panic!("Last pipeline step!"),
});
Ok(())
}
/// Returns the path to the output file if the output directory is set.
/// Fails if the file already exists and `force_overwrite` is false.
fn path_if_should_write<F: FnOnce(&str) -> String>(
&self,
file_name_from_pipeline_name: F,
) -> Result<Option<PathBuf>, Vec<String>> {
self.output_dir
.as_ref()
.map(|output_dir| {
let name = self
.name
.as_ref()
.expect("name must be set if output_dir is set");
let path = output_dir.join(file_name_from_pipeline_name(name));
if path.exists() && !self.force_overwrite {
Err(vec![format!(
"{} already exists! Use --force to overwrite.",
path.to_str().unwrap()
)])?;
}
log::info!("Writing {}.", path.to_str().unwrap());
Ok(path)
})
.transpose()
}
fn maybe_write_pil<C: Display>(&self, content: &C, suffix: &str) -> Result<(), Vec<String>> {
if let Some(output_dir) = &self.output_dir {
let name = self
.name
.as_ref()
.expect("name must be set if output_dir is set");
let output_file = output_dir.join(format!("{name}{suffix}.pil"));
if !output_file.exists() || self.force_overwrite {
fs::write(&output_file, format!("{content}")).map_err(|e| {
vec![format!(
"Error writing {}: {e}",
output_file.to_str().unwrap()
)]
})?;
self.log(&format!("Wrote {}.", output_file.to_str().unwrap()));
} else {
return Err(vec![format!(
"{} already exists! Use --force to overwrite.",
output_file.to_str().unwrap()
)]);
if let Some(path) = self.path_if_should_write(|name| format!("{name}{suffix}.pil"))? {
fs::write(&path, format!("{content}"))
.map_err(|e| vec![format!("Error writing {}: {e}", path.to_str().unwrap())])?;
}
Ok(())
}
fn maybe_write_constants(&self, constants: &[(String, Vec<T>)]) -> Result<(), Vec<String>> {
if let Some(path) = self.path_if_should_write(|_| "constants.bin".to_string())? {
write_polys_file(
&mut BufWriter::new(&mut fs::File::create(path).unwrap()),
constants,
);
}
Ok(())
}
fn maybe_write_witness(
&self,
fixed: &[(String, Vec<T>)],
witness: &Option<Vec<(String, Vec<T>)>>,
) -> Result<(), Vec<String>> {
if let Some(witness) = witness.as_ref() {
if let Some(path) = self.path_if_should_write(|_| "commits.bin".to_string())? {
write_polys_file(
&mut BufWriter::new(&mut fs::File::create(path).unwrap()),
witness,
);
}
}
if self.arguments.export_witness_csv {
if let Some(path) = self.path_if_should_write(|_| "columns.csv".to_string())? {
let columns = fixed
.iter()
.chain(match witness.as_ref() {
Some(witness) => witness.iter(),
None => [].iter(),
})
.collect::<Vec<_>>();
let mut csv_file = fs::File::create(path).map_err(|e| vec![format!("{}", e)])?;
let mut csv_writer = BufWriter::new(&mut csv_file);
write_polys_csv_file(&mut csv_writer, self.arguments.csv_render_mode, &columns);
}
}
Ok(())
}
fn maybe_wite_proof(&self, proof_result: &ProofResult<T>) -> Result<(), Vec<String>> {
if let Some(constraints_serialization) = &proof_result.constraints_serialization {
if let Some(path) = self.path_if_should_write(|_| "constraints.json".to_string())? {
let mut file = fs::File::create(path).unwrap();
file.write_all(constraints_serialization.as_bytes())
.unwrap();
}
}
if let Some(proof) = &proof_result.proof {
let fname = if self.arguments.existing_proof_file.is_some() {
"proof_aggr.bin"
} else {
"proof.bin"
};
if let Some(path) = self.path_if_should_write(|_| fname.to_string())? {
let mut proof_file = fs::File::create(path).unwrap();
proof_file.write_all(proof).unwrap();
}
}
Ok(())
}
@@ -500,6 +622,14 @@ impl<T: FieldElement> Pipeline<T> {
Ok(())
}
pub fn asm_string(mut self) -> Result<String, Vec<String>> {
self.advance_to(Stage::AsmString)?;
match self.artifact.unwrap() {
Artifact::AsmString(_, asm_string) => Ok(asm_string),
_ => panic!(),
}
}
pub fn analyzed_asm(mut self) -> Result<AnalysisASMFile<T>, Vec<String>> {
self.advance_to(Stage::AnalyzedAsm)?;
let Artifact::AnalyzedAsm(analyzed_asm) = self.artifact.unwrap() else {

View File

@@ -16,7 +16,7 @@ pub fn resolve_test_file(file_name: &str) -> PathBuf {
pub fn verify_test_file<T: FieldElement>(
file_name: &str,
inputs: Vec<T>,
external_witness_values: Vec<(&str, Vec<T>)>,
external_witness_values: Vec<(String, Vec<T>)>,
) {
let pipeline = Pipeline::default().from_file(resolve_test_file(file_name));
verify_pipeline(pipeline, inputs, external_witness_values)
@@ -26,7 +26,7 @@ pub fn verify_asm_string<T: FieldElement>(
file_name: &str,
contents: &str,
inputs: Vec<T>,
external_witness_values: Vec<(&str, Vec<T>)>,
external_witness_values: Vec<(String, Vec<T>)>,
) {
let pipeline =
Pipeline::default().from_asm_string(contents.to_string(), Some(PathBuf::from(file_name)));
@@ -36,7 +36,7 @@ pub fn verify_asm_string<T: FieldElement>(
pub fn verify_pipeline<T: FieldElement>(
pipeline: Pipeline<T>,
inputs: Vec<T>,
external_witness_values: Vec<(&str, Vec<T>)>,
external_witness_values: Vec<(String, Vec<T>)>,
) {
let mut pipeline = pipeline
.with_tmp_output()

View File

@@ -1,33 +1,4 @@
use number::write_polys_file;
use number::FieldElement;
use std::{
fs,
io::{BufWriter, Write},
path::Path,
process::Command,
};
pub fn write_constants_to_fs<T: FieldElement>(constants: &[(String, Vec<T>)], output_dir: &Path) {
let to_write = output_dir.join("constants.bin");
write_polys_file(
&mut BufWriter::new(&mut fs::File::create(to_write).unwrap()),
constants,
);
}
pub fn write_commits_to_fs<T: FieldElement>(commits: &[(String, Vec<T>)], output_dir: &Path) {
let to_write = output_dir.join("commits.bin");
write_polys_file(
&mut BufWriter::new(&mut fs::File::create(to_write).unwrap()),
commits,
);
}
pub fn write_constraints_to_fs(constraints: &String, output_dir: &Path) {
let to_write = output_dir.join("constraints.json");
let mut file = fs::File::create(to_write).unwrap();
file.write_all(constraints.as_bytes()).unwrap();
}
use std::{path::Path, process::Command};
pub fn verify(temp_dir: &Path) {
let pilcom = std::env::var("PILCOM")

View File

@@ -42,7 +42,7 @@ fn mem_write_once_external_write() {
mem[17] = GoldilocksField::from(42);
mem[62] = GoldilocksField::from(123);
mem[255] = GoldilocksField::from(-1);
verify_test_file::<GoldilocksField>(f, vec![], vec![("main.v", mem)]);
verify_test_file::<GoldilocksField>(f, vec![], vec![("main.v".to_string(), mem)]);
}
#[test]

View File

@@ -41,14 +41,14 @@ fn test_external_witgen_fails_if_none_provided() {
#[test]
fn test_external_witgen_a_provided() {
let f = "pil/external_witgen.pil";
let external_witness = vec![("main.a", vec![GoldilocksField::from(3); 16])];
let external_witness = vec![("main.a".to_string(), vec![GoldilocksField::from(3); 16])];
verify_test_file(f, vec![], external_witness);
}
#[test]
fn test_external_witgen_b_provided() {
let f = "pil/external_witgen.pil";
let external_witness = vec![("main.b", vec![GoldilocksField::from(4); 16])];
let external_witness = vec![("main.b".to_string(), vec![GoldilocksField::from(4); 16])];
verify_test_file(f, vec![], external_witness);
}
@@ -56,8 +56,8 @@ fn test_external_witgen_b_provided() {
fn test_external_witgen_both_provided() {
let f = "pil/external_witgen.pil";
let external_witness = vec![
("main.a", vec![GoldilocksField::from(3); 16]),
("main.b", vec![GoldilocksField::from(4); 16]),
("main.a".to_string(), vec![GoldilocksField::from(3); 16]),
("main.b".to_string(), vec![GoldilocksField::from(4); 16]),
];
verify_test_file(f, vec![], external_witness);
}
@@ -67,9 +67,9 @@ fn test_external_witgen_both_provided() {
fn test_external_witgen_fails_on_conflicting_external_witness() {
let f = "pil/external_witgen.pil";
let external_witness = vec![
("main.a", vec![GoldilocksField::from(3); 16]),
("main.a".to_string(), vec![GoldilocksField::from(3); 16]),
// Does not satisfy b = a + 1
("main.b", vec![GoldilocksField::from(3); 16]),
("main.b".to_string(), vec![GoldilocksField::from(3); 16]),
];
verify_test_file(f, vec![], external_witness);
}

View File

@@ -11,12 +11,18 @@ pub enum CsvRenderMode {
Hex,
}
impl Default for CsvRenderMode {
fn default() -> Self {
Self::Hex
}
}
const ROW_NAME: &str = "Row";
pub fn write_polys_csv_file<T: FieldElement>(
file: &mut impl Write,
render_mode: CsvRenderMode,
polys: &[(String, Vec<T>)],
polys: &[&(String, Vec<T>)],
) {
let mut writer = Writer::from_writer(file);
@@ -172,6 +178,7 @@ mod tests {
.into_iter()
.map(|(name, values)| (name.to_string(), values))
.collect::<Vec<_>>();
let polys_ref = polys.iter().collect::<Vec<_>>();
for render_mode in &[
CsvRenderMode::SignedBase10,
@@ -179,7 +186,7 @@ mod tests {
CsvRenderMode::Hex,
] {
let mut buf: Vec<u8> = vec![];
write_polys_csv_file(&mut buf, *render_mode, &polys);
write_polys_csv_file(&mut buf, *render_mode, &polys_ref);
let read_polys = read_polys_csv_file::<Bn254Field>(&mut Cursor::new(buf));
assert_eq!(read_polys, polys);

View File

@@ -2,24 +2,60 @@
mod util;
use backend::{Backend, BackendType, Proof};
use backend::{Backend, BackendType};
use clap::{CommandFactory, Parser, Subcommand};
use compiler::pipeline::{Pipeline, ProofResult, Stage};
use compiler::util::{read_poly_set, FixedPolySet, WitnessPolySet};
use compiler::pipeline::{Pipeline, Stage};
use env_logger::fmt::Color;
use env_logger::{Builder, Target};
use log::LevelFilter;
use number::write_polys_file;
use number::{read_polys_csv_file, write_polys_csv_file, CsvRenderMode};
use number::{read_polys_csv_file, CsvRenderMode};
use number::{Bn254Field, FieldElement, GoldilocksField};
use riscv::continuations::{rust_continuations, rust_continuations_dry_run};
use riscv::{compile_riscv_asm, compile_rust};
use std::collections::HashMap;
use std::io::{self, BufReader, BufWriter, Read};
use std::io::{self, BufReader, BufWriter};
use std::path::PathBuf;
use std::{borrow::Cow, fs, io::Write, path::Path};
use strum::{Display, EnumString, EnumVariantNames};
/// Transforms a pipeline factory into a pipeline factory that binds CLI arguments like
/// the output directory and the CSV export settings to the pipeline.
#[allow(clippy::too_many_arguments)]
fn bind_cli_args<F: FieldElement>(
pipeline_factory: impl Fn() -> Pipeline<F>,
inputs: Vec<F>,
output_dir: PathBuf,
force_overwrite: bool,
witness_values: Option<String>,
export_csv: bool,
csv_mode: CsvRenderModeCLI,
) -> impl Fn() -> Pipeline<F> {
let witness_values = witness_values
.map(|csv_path| {
let csv_file = fs::File::open(csv_path).unwrap();
let mut csv_writer = BufReader::new(&csv_file);
read_polys_csv_file::<F>(&mut csv_writer)
})
.unwrap_or(vec![]);
let csv_mode = match csv_mode {
CsvRenderModeCLI::SignedBase10 => CsvRenderMode::SignedBase10,
CsvRenderModeCLI::UnsignedBase10 => CsvRenderMode::UnsignedBase10,
CsvRenderModeCLI::Hex => CsvRenderMode::Hex,
};
move || {
// Note that we can't just take an existing pipeline here instead of
// a factory, as Pipeline doesn't currently implement Clone, so we need to
// create a new one each time.
pipeline_factory()
.with_output(output_dir.clone(), force_overwrite)
.with_external_witness_values(witness_values.clone())
.with_witness_csv_settings(export_csv, csv_mode)
.with_prover_inputs(inputs.clone())
}
}
#[derive(Clone, EnumString, EnumVariantNames, Display)]
pub enum FieldArgument {
#[strum(serialize = "gl")]
@@ -28,7 +64,7 @@ pub enum FieldArgument {
Bn254,
}
#[derive(Clone, EnumString, EnumVariantNames, Display)]
#[derive(Clone, Copy, EnumString, EnumVariantNames, Display)]
pub enum CsvRenderModeCLI {
#[strum(serialize = "i")]
SignedBase10,
@@ -142,6 +178,17 @@ enum Commands {
#[arg(value_parser = clap_enum_variants!(BackendType))]
prove_with: Option<BackendType>,
/// Generate a CSV file containing the fixed and witness column values. Useful for debugging purposes.
#[arg(long)]
#[arg(default_value_t = false)]
export_csv: bool,
/// How to render field elements in the csv file
#[arg(long)]
#[arg(default_value_t = CsvRenderModeCLI::Hex)]
#[arg(value_parser = clap_enum_variants!(CsvRenderModeCLI))]
csv_mode: CsvRenderModeCLI,
/// Comma-separated list of coprocessors.
#[arg(long)]
coprocessors: Option<String>,
@@ -190,6 +237,17 @@ enum Commands {
#[arg(value_parser = clap_enum_variants!(BackendType))]
prove_with: Option<BackendType>,
/// Generate a CSV file containing the fixed and witness column values. Useful for debugging purposes.
#[arg(long)]
#[arg(default_value_t = false)]
export_csv: bool,
/// How to render field elements in the csv file
#[arg(long)]
#[arg(default_value_t = CsvRenderModeCLI::Hex)]
#[arg(value_parser = clap_enum_variants!(CsvRenderModeCLI))]
csv_mode: CsvRenderModeCLI,
/// Comma-separated list of coprocessors.
#[arg(long)]
coprocessors: Option<String>,
@@ -325,7 +383,7 @@ fn main() -> Result<(), io::Error> {
#[allow(clippy::print_stderr)]
fn run_command(command: Commands) {
match command {
let result = match command {
Commands::Rust {
file,
field,
@@ -333,6 +391,8 @@ fn run_command(command: Commands) {
output_directory,
force,
prove_with,
export_csv,
csv_mode,
coprocessors,
just_execute,
continuations,
@@ -343,21 +403,18 @@ fn run_command(command: Commands) {
}
None => riscv::CoProcessors::base(),
};
if let Err(errors) = call_with_field!(run_rust::<field>(
call_with_field!(run_rust::<field>(
&file,
split_inputs(&inputs),
Path::new(&output_directory),
force,
prove_with,
export_csv,
csv_mode,
coprocessors,
just_execute,
continuations
)) {
eprintln!("Errors:");
for e in errors {
eprintln!("{e}");
}
};
))
}
Commands::RiscvAsm {
files,
@@ -366,6 +423,8 @@ fn run_command(command: Commands) {
output_directory,
force,
prove_with,
export_csv,
csv_mode,
coprocessors,
just_execute,
continuations,
@@ -383,32 +442,31 @@ fn run_command(command: Commands) {
}
None => riscv::CoProcessors::base(),
};
if let Err(errors) = call_with_field!(run_riscv_asm::<field>(
call_with_field!(run_riscv_asm::<field>(
&name,
files.into_iter(),
split_inputs(&inputs),
Path::new(&output_directory),
force,
prove_with,
export_csv,
csv_mode,
coprocessors,
just_execute,
continuations
)) {
eprintln!("Errors:");
for e in errors {
eprintln!("{e}");
}
};
))
}
Commands::Reformat { file } => {
let contents = fs::read_to_string(&file).unwrap();
match parser::parse::<GoldilocksField>(Some(&file), &contents) {
Ok(ast) => println!("{ast}"),
Err(err) => err.output_to_stderr(),
}
};
Ok(())
}
Commands::OptimizePIL { file, field } => {
call_with_field!(optimize_and_output::<field>(&file))
call_with_field!(optimize_and_output::<field>(&file));
Ok(())
}
Commands::Pil {
file,
@@ -422,69 +480,20 @@ fn run_command(command: Commands) {
csv_mode,
just_execute,
continuations,
} => match (just_execute, continuations) {
(true, true) => {
assert!(matches!(field, FieldArgument::Gl));
let inputs = split_inputs::<GoldilocksField>(&inputs);
rust_continuations_dry_run(
Pipeline::default().from_asm_file(PathBuf::from(file)),
inputs,
);
}
(true, false) => {
let contents = fs::read_to_string(&file).unwrap();
let inputs = split_inputs::<GoldilocksField>(&inputs);
let inputs: HashMap<GoldilocksField, Vec<GoldilocksField>> =
vec![(GoldilocksField::from(0), inputs)]
.into_iter()
.collect();
riscv_executor::execute::<GoldilocksField>(
&contents,
&inputs,
&[],
riscv_executor::ExecMode::Fast,
);
}
(false, true) => {
assert!(matches!(field, FieldArgument::Gl));
let inputs = split_inputs::<GoldilocksField>(&inputs);
let pipeline_factory = || {
Pipeline::default()
.from_asm_file(PathBuf::from(&file))
.with_prover_inputs(vec![])
};
let pipeline_callback =
|mut pipeline: Pipeline<GoldilocksField>| -> Result<(), Vec<String>> {
pipeline.advance_to(Stage::GeneratedWitness)?;
if let Some(backend) = prove_with {
pipeline.with_backend(backend).proof()?;
}
Ok(())
};
rust_continuations(pipeline_factory, pipeline_callback, inputs.clone()).unwrap();
}
(false, false) => {
match call_with_field!(compile_with_csv_export::<field>(
file,
output_directory,
witness_values,
inputs,
force,
prove_with,
export_csv,
csv_mode
)) {
Ok(()) => {}
Err(errors) => {
eprintln!("Errors:");
for e in errors {
eprintln!("{e}");
}
}
};
}
},
} => {
call_with_field!(run_pil::<field>(
file,
output_directory,
witness_values,
inputs,
force,
prove_with,
export_csv,
csv_mode,
just_execute,
continuations
))
}
Commands::Prove {
file,
dir,
@@ -495,7 +504,7 @@ fn run_command(command: Commands) {
} => {
let pil = Path::new(&file);
let dir = Path::new(&dir);
call_with_field!(read_and_prove::<field>(pil, dir, &backend, proof, params));
call_with_field!(read_and_prove::<field>(pil, dir, &backend, proof, params))
}
Commands::Setup {
size,
@@ -504,8 +513,15 @@ fn run_command(command: Commands) {
backend,
} => {
call_with_field!(setup::<field>(size, dir, backend));
Ok(())
}
};
if let Err(errors) = result {
for error in errors {
eprintln!("{}", error);
}
std::process::exit(1);
}
}
fn setup<F: FieldElement>(size: u64, dir: String, backend_type: BackendType) {
@@ -530,6 +546,8 @@ fn run_rust<F: FieldElement>(
output_dir: &Path,
force_overwrite: bool,
prove_with: Option<BackendType>,
export_csv: bool,
csv_mode: CsvRenderModeCLI,
coprocessors: riscv::CoProcessors,
just_execute: bool,
continuations: bool,
@@ -543,12 +561,25 @@ fn run_rust<F: FieldElement>(
)
.ok_or_else(|| vec!["could not compile rust".to_string()])?;
handle_riscv_asm(
asm_file_path.to_str().unwrap(),
&asm_contents,
inputs,
output_dir,
let pipeline_factory = || {
Pipeline::<F>::default().from_asm_string(
asm_contents.clone(),
Some(PathBuf::from(asm_file_path.to_str().unwrap())),
)
};
let pipeline_factory = bind_cli_args(
pipeline_factory,
inputs.clone(),
output_dir.to_path_buf(),
force_overwrite,
None,
export_csv,
csv_mode,
);
run(
pipeline_factory,
inputs,
prove_with,
just_execute,
continuations,
@@ -564,6 +595,8 @@ fn run_riscv_asm<F: FieldElement>(
output_dir: &Path,
force_overwrite: bool,
prove_with: Option<BackendType>,
export_csv: bool,
csv_mode: CsvRenderModeCLI,
coprocessors: riscv::CoProcessors,
just_execute: bool,
continuations: bool,
@@ -578,12 +611,25 @@ fn run_riscv_asm<F: FieldElement>(
)
.ok_or_else(|| vec!["could not compile RISC-V assembly".to_string()])?;
handle_riscv_asm(
asm_file_path.to_str().unwrap(),
&asm_contents,
inputs,
output_dir,
let pipeline_factory = || {
Pipeline::<F>::default().from_asm_string(
asm_contents.clone(),
Some(PathBuf::from(asm_file_path.to_str().unwrap())),
)
};
let pipeline_factory = bind_cli_args(
pipeline_factory,
inputs.clone(),
output_dir.to_path_buf(),
force_overwrite,
None,
export_csv,
csv_mode,
);
run(
pipeline_factory,
inputs,
prove_with,
just_execute,
continuations,
@@ -592,69 +638,7 @@ fn run_riscv_asm<F: FieldElement>(
}
#[allow(clippy::too_many_arguments)]
fn handle_riscv_asm<F: FieldElement>(
file_name: &str,
contents: &str,
inputs: Vec<F>,
output_dir: &Path,
force_overwrite: bool,
prove_with: Option<BackendType>,
just_execute: bool,
continuations: bool,
) -> Result<(), Vec<String>> {
match (just_execute, continuations) {
(true, true) => {
rust_continuations_dry_run(
Pipeline::default()
.from_asm_string(contents.to_string(), Some(PathBuf::from(file_name))),
inputs,
);
}
(true, false) => {
let mut inputs_hash: HashMap<F, Vec<F>> = HashMap::default();
inputs_hash.insert(0u32.into(), inputs);
riscv_executor::execute::<F>(
contents,
&inputs_hash,
&[],
riscv_executor::ExecMode::Fast,
);
}
(false, true) => {
let pipeline_factory = || {
Pipeline::default()
.with_output(output_dir.to_path_buf(), force_overwrite)
.from_asm_string(contents.to_string(), Some(PathBuf::from(file_name)))
.with_prover_inputs(inputs.clone())
};
let pipeline_callback = |mut pipeline: Pipeline<F>| -> Result<(), Vec<String>> {
pipeline.advance_to(Stage::GeneratedWitness)?;
if let Some(backend) = prove_with {
pipeline.with_backend(backend).proof()?;
}
Ok(())
};
rust_continuations(pipeline_factory, pipeline_callback, inputs.clone())?;
}
(false, false) => {
let mut pipeline = Pipeline::default()
.with_output(output_dir.to_path_buf(), force_overwrite)
.from_asm_string(contents.to_string(), Some(PathBuf::from(file_name)))
.with_prover_inputs(inputs)
.with_backend(BackendType::PilStarkCli);
pipeline.advance_to(Stage::GeneratedWitness).unwrap();
if let Some(backend) = prove_with {
pipeline = pipeline.with_backend(backend);
pipeline.proof().unwrap();
}
}
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
fn compile_with_csv_export<T: FieldElement>(
fn run_pil<F: FieldElement>(
file: String,
output_directory: String,
witness_values: Option<String>,
@@ -663,80 +647,75 @@ fn compile_with_csv_export<T: FieldElement>(
prove_with: Option<BackendType>,
export_csv: bool,
csv_mode: CsvRenderModeCLI,
just_execute: bool,
continuations: bool,
) -> Result<(), Vec<String>> {
let external_witness_values = witness_values
.map(|csv_path| {
let csv_file = fs::File::open(csv_path).unwrap();
let mut csv_writer = BufReader::new(&csv_file);
read_polys_csv_file::<T>(&mut csv_writer)
})
.unwrap_or(vec![]);
let inputs = split_inputs::<F>(&inputs);
// Convert Vec<(String, Vec<T>)> to Vec<(&str, Vec<T>)>
let (strings, values): (Vec<_>, Vec<_>) = external_witness_values.into_iter().unzip();
let external_witness_values = strings.iter().map(AsRef::as_ref).zip(values).collect();
let output_dir = Path::new(&output_directory);
let mut pipeline = Pipeline::default()
.with_output(output_dir.to_path_buf(), force)
.from_file(PathBuf::from(file))
.with_external_witness_values(external_witness_values)
.with_prover_inputs(split_inputs(&inputs));
pipeline.advance_to(Stage::GeneratedWitness).unwrap();
let result = prove_with.map(|backend| pipeline.with_backend(backend).proof().unwrap());
if let Some(ref compilation_result) = result {
serialize_result_witness(output_dir, compilation_result);
if let Some(_backend) = &prove_with {
write_proving_results_to_fs(
false,
&compilation_result.proof,
&compilation_result.constraints_serialization,
output_dir,
);
}
}
if export_csv {
// Compilation result is None if the ASM file has not been compiled
// (e.g. it has been compiled before and the force flag is not set)
if let Some(compilation_result) = result {
let csv_path = Path::new(&output_directory).join("columns.csv");
export_columns_to_csv::<T>(
compilation_result.constants,
compilation_result.witness,
&csv_path,
csv_mode,
);
}
}
let pipeline_factory = bind_cli_args(
|| Pipeline::<F>::default().from_file(PathBuf::from(&file)),
inputs.clone(),
PathBuf::from(output_directory),
force,
witness_values,
export_csv,
csv_mode,
);
run(
pipeline_factory,
inputs,
prove_with,
just_execute,
continuations,
)?;
Ok(())
}
fn export_columns_to_csv<T: FieldElement>(
fixed: Vec<(String, Vec<T>)>,
witness: Option<Vec<(String, Vec<T>)>>,
csv_path: &Path,
render_mode: CsvRenderModeCLI,
) {
let columns = fixed
.into_iter()
.chain(witness.unwrap_or(vec![]))
.collect::<Vec<_>>();
let mut csv_file = fs::File::create(csv_path).unwrap();
let mut csv_writer = BufWriter::new(&mut csv_file);
let render_mode = match render_mode {
CsvRenderModeCLI::SignedBase10 => CsvRenderMode::SignedBase10,
CsvRenderModeCLI::UnsignedBase10 => CsvRenderMode::UnsignedBase10,
CsvRenderModeCLI::Hex => CsvRenderMode::Hex,
fn run<F: FieldElement>(
pipeline_factory: impl Fn() -> Pipeline<F>,
inputs: Vec<F>,
prove_with: Option<BackendType>,
just_execute: bool,
continuations: bool,
) -> Result<(), Vec<String>> {
let bootloader_inputs = if continuations {
rust_continuations_dry_run(pipeline_factory(), inputs.clone())
} else {
vec![]
};
write_polys_csv_file(&mut csv_writer, render_mode, &columns);
let generate_witness_and_prove_maybe = |mut pipeline: Pipeline<F>| -> Result<(), Vec<String>> {
pipeline.advance_to(Stage::GeneratedWitness)?;
prove_with.map(|backend| pipeline.with_backend(backend).proof().unwrap());
Ok(())
};
match (just_execute, continuations) {
(true, true) => {
// Already ran when computing bootloader inputs, nothing else to do.
}
(true, false) => {
let mut inputs_hash: HashMap<F, Vec<F>> = HashMap::default();
inputs_hash.insert(0u32.into(), inputs);
riscv_executor::execute::<F>(
&pipeline_factory().asm_string().unwrap(),
&inputs_hash,
&[],
riscv_executor::ExecMode::Fast,
);
}
(false, true) => {
rust_continuations(
pipeline_factory,
generate_witness_and_prove_maybe,
bootloader_inputs,
)?;
}
(false, false) => {
generate_witness_and_prove_maybe(pipeline_factory())?;
}
}
Ok(())
}
fn read_and_prove<T: FieldElement>(
@@ -745,37 +724,15 @@ fn read_and_prove<T: FieldElement>(
backend_type: &BackendType,
proof_path: Option<String>,
params: Option<String>,
) {
let pil = Pipeline::default()
) -> Result<(), Vec<String>> {
Pipeline::<T>::default()
.from_file(file.to_path_buf())
.optimized_pil()
.unwrap();
let fixed = read_poly_set::<FixedPolySet, T>(&pil, dir);
let witness = read_poly_set::<WitnessPolySet, T>(&pil, dir);
assert_eq!(fixed.1, witness.1);
let builder = backend_type.factory::<T>();
let backend = if let Some(filename) = params {
let mut file = fs::File::open(dir.join(filename)).unwrap();
builder.create_from_setup(&mut file).unwrap()
} else {
builder.create(fixed.1)
};
let proof = proof_path.map(|filename| {
let mut buf = Vec::new();
fs::File::open(dir.join(filename))
.unwrap()
.read_to_end(&mut buf)
.unwrap();
buf
});
let is_aggr = proof.is_some();
let (proof, constraints_serialization) = backend.prove(&pil, &fixed.0, &witness.0, proof);
write_proving_results_to_fs(is_aggr, &proof, &constraints_serialization, dir);
.read_generated_witness(dir)
.with_setup_file(params.map(PathBuf::from))
.with_existing_proof_file(proof_path.map(PathBuf::from))
.with_backend(*backend_type)
.proof()?;
Ok(())
}
#[allow(clippy::print_stdout)]
@@ -789,65 +746,6 @@ fn optimize_and_output<T: FieldElement>(file: &str) {
);
}
fn serialize_result_witness<T: FieldElement>(output_dir: &Path, results: &ProofResult<T>) {
write_constants_to_fs(&results.constants, output_dir);
let witness = results.witness.as_ref().unwrap();
write_commits_to_fs(witness, output_dir);
}
fn write_constants_to_fs<T: FieldElement>(constants: &[(String, Vec<T>)], output_dir: &Path) {
let to_write = output_dir.join("constants.bin");
write_polys_file(
&mut BufWriter::new(&mut fs::File::create(&to_write).unwrap()),
constants,
);
log::info!("Wrote {}.", to_write.display());
}
fn write_commits_to_fs<T: FieldElement>(commits: &[(String, Vec<T>)], output_dir: &Path) {
let to_write = output_dir.join("commits.bin");
write_polys_file(
&mut BufWriter::new(&mut fs::File::create(&to_write).unwrap()),
commits,
);
log::info!("Wrote {}.", to_write.display());
}
fn write_proving_results_to_fs(
is_aggregation: bool,
proof: &Option<Proof>,
constraints_serialization: &Option<String>,
output_dir: &Path,
) {
match proof {
Some(proof) => {
let fname = if is_aggregation {
"proof_aggr.bin"
} else {
"proof.bin"
};
// No need to bufferize the writing, because we write the whole
// proof in one call.
let to_write = output_dir.join(fname);
let mut proof_file = fs::File::create(&to_write).unwrap();
proof_file.write_all(proof).unwrap();
log::info!("Wrote {}.", to_write.display());
}
None => log::warn!("No proof was generated"),
}
match constraints_serialization {
Some(json) => {
let to_write = output_dir.join("constraints.json");
let mut file = fs::File::create(&to_write).unwrap();
file.write_all(json.as_bytes()).unwrap();
log::info!("Wrote {}.", to_write.display());
}
None => log::warn!("Constraints were not JSON serialized"),
}
}
#[cfg(test)]
mod test {
use crate::{run_command, Commands, CsvRenderModeCLI, FieldArgument};

View File

@@ -49,15 +49,12 @@ fn transposed_trace<F: FieldElement>(trace: &ExecutionTrace) -> HashMap<String,
pub fn rust_continuations<F: FieldElement, PipelineFactory, PipelineCallback, E>(
pipeline_factory: PipelineFactory,
pipeline_callback: PipelineCallback,
inputs: Vec<F>,
bootloader_inputs: Vec<Vec<F>>,
) -> Result<(), E>
where
PipelineFactory: Fn() -> Pipeline<F>,
PipelineCallback: Fn(Pipeline<F>) -> Result<(), E>,
{
log::info!("Dry running execution to collect bootloader inputs...");
let pipeline = pipeline_factory();
let bootloader_inputs = rust_continuations_dry_run(pipeline, inputs.clone());
let num_chunks = bootloader_inputs.len();
bootloader_inputs
@@ -65,6 +62,9 @@ where
.enumerate()
.map(|(i, bootloader_inputs)| -> Result<(), E> {
log::info!("Running chunk {} / {}...", i + 1, num_chunks);
// TODO(#840): If Pipeline implemented Clone, we could advance it once to
// the OptimizedPil stage and clone it here instead of creating and
// running a fresh pipeline for each chunk.
let pipeline = pipeline_factory();
let pipeline = add_bootloader_inputs(pipeline, bootloader_inputs);
pipeline_callback(pipeline)?;

View File

@@ -186,7 +186,8 @@ fn test_many_chunks() {
verify_pipeline(pipeline, vec![], vec![]);
Ok(())
};
rust_continuations(pipeline_factory, pipeline_callback, vec![]).unwrap();
let bootloader_inputs = rust_continuations_dry_run(pipeline_factory(), vec![]);
rust_continuations(pipeline_factory, pipeline_callback, bootloader_inputs).unwrap();
}
fn verify_file(case: &str, inputs: Vec<GoldilocksField>, coprocessors: &CoProcessors) {