fuzz: Add fuzz harness for zkas compilation

Create a fuzz harness to test the ./zkas binary compilation process. The
whole pipeline is tested: Lexer, Parser, Compiler, Analyzer. This is
performed by copying the relevant code from bin/zkas/src/main.rs.

Testing the entire pipeline like this is not very efficient in terms of
fuzzing cycles but on the other hand it is a quick-and-dirty way to find
results. It also benefits from testing the actual inputs to the binary
in the way it's expecting, rather than checking each of the components
in a piecemeal way using interfaces that aren't expected to be exposed
anyway.
This commit is contained in:
y
2023-09-16 18:36:13 -04:00
committed by parazyd
parent 94d101a8ae
commit 9d97aebf50
2 changed files with 90 additions and 0 deletions

View File

@@ -46,3 +46,9 @@ name = "varint-differential"
path = "src/varint_differential.rs"
test = false
doc = false
[[bin]]
name = "zkas-compile"
path = "src/zkas_compile.rs"
test = false
doc = false

View File

@@ -0,0 +1,84 @@
/* This file is part of DarkFi (https://dark.fi)
*
* Copyright (C) 2020-2023 Dyne.org foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
// extern crate darkfi_serial;
use honggfuzz::fuzz;
use darkfi::zkas::{Lexer, Parser, Compiler, Analyzer};
use std::str;
fn main() {
loop {
fuzz!(|data: &[u8]| {
// The lex, parse, compile code below is taken from bin/zkas/src/main.rs
// Use only inputs that can be encoded as .chars(), as this is what the
// zkas binary uses
let chars = match str::from_utf8(data) {
Ok(v) => v.chars(),
Err(_e) => return,
};
let filename = "/dev/null";
let lexer = Lexer::new(filename, chars.clone());
let tokens = match lexer.lex() {
Ok(v) => v,
Err(_) => return,
};
// The parser goes over the tokens provided by the lexer and builds
// the initial AST, not caring much about the semantics, just enforcing
// syntax and general structure.
let parser = Parser::new(filename, chars.clone(), tokens);
let (namespace, k, constants, witnesses, statements) = match parser.parse() {
Ok(v) => v,
Err(_) => return,
};
// The analyzer goes through the initial AST provided by the parser and
// converts return and variable types to their correct forms, and also
// checks that the semantics of the ZK script are correct.
let mut analyzer = Analyzer::new(filename, chars.clone(), constants, witnesses, statements);
if analyzer.analyze_types().is_err() {
return
}
if analyzer.analyze_semantic().is_err() {
return
}
let compiler = Compiler::new(
filename,
chars.clone(),
namespace,
k,
analyzer.constants,
analyzer.witnesses,
analyzer.statements,
analyzer.literals,
false, // a guess
);
match compiler.compile() {
Ok(v) => v,
Err(_) => return,
};
});
}
}