new dir structure

This commit is contained in:
themighty1
2022-08-19 10:48:54 +03:00
parent b9561ee927
commit 8f888bca07
20 changed files with 168 additions and 1077 deletions

View File

@@ -1,17 +1,5 @@
[package]
name = "label_sum"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "label_sum"
[dependencies]
num = { version = "0.4"}
rand = "0.8.5"
json = "0.12.4"
aes = { version = "0.7.5", features = [] }
cipher = "0.3"
uuid = { version = "0.8.1", features = ["serde", "v4"] }
[workspace]
members = [
"labelsum",
"derive-macro",
]

78
derive-macro/Cargo.lock generated Normal file
View File

@@ -0,0 +1,78 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "my_derive"
version = "0.1.0"
dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "1.0.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"

15
derive-macro/Cargo.toml Normal file
View File

@@ -0,0 +1,15 @@
[package]
name = "derive-macro"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
proc-macro = true
[dependencies]
syn = "1.0"
quote = "1.0"
proc-macro2 = { version = "1", default-features = false }
proc-macro-error = "1.0"

51
derive-macro/src/lib.rs Normal file
View File

@@ -0,0 +1,51 @@
use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use proc_macro2::{Ident, Span};
use quote::quote;
use syn::{self, DataStruct, DeriveInput, Field};
// This is a convenience macro which implements accessors for each
// field of the struct. This assumes that the definition of "trait ProverGetSet"
// is already present in the code.
#[proc_macro_derive(ProverGetSetM)]
pub fn prover_get_set_m_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
// Build the impl
let gen = produce(&ast);
// Return the generated impl
gen.into()
}
fn produce(ast: &DeriveInput) -> TokenStream2 {
let name = &ast.ident;
// Is it a struct?
if let syn::Data::Struct(DataStruct { ref fields, .. }) = ast.data {
let generated = fields.iter().map(|f| implement(f));
quote! {
impl ProverGetSet for #name {
#(#generated)*
}
}
} else {
// Nope. This is an Enum. We are not supporting these!
panic!("can only handle structs, not enums!");
}
}
// Implements accessors for a struct field
fn implement(field: &Field) -> TokenStream2 {
let field_name = field.clone().ident.unwrap();
let ty = field.ty.clone();
let get_name = field_name.clone();
let set_name = Ident::new(&format!("{}{}", "set_", field_name), Span::call_site());
return quote! {
fn #get_name(&self) -> &#ty {
&self.#field_name
}
fn #set_name(&mut self, new: #ty) {
self.#field_name = new;
}
};
}

19
labelsum/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "labelsum"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "labelsum"
[dependencies]
num = { version = "0.4"}
rand = "0.8.5"
json = "0.12.4"
aes = { version = "0.7.5", features = [] }
cipher = "0.3"
uuid = { version = "0.8.1", features = ["serde", "v4"] }
getset = "0.0.1"
derive-macro = { path = "../derive-macro"}

BIN
labelsum/circuit.r1cs Normal file

Binary file not shown.

View File

@@ -1,50 +0,0 @@
import * as circomlibjs from "circomlibjs";
async function main(){
// field elements
const fe = JSON.parse(process.argv[2]);
const poseidonReference = await circomlibjs.buildPoseidonReference();
const res = poseidonReference(fe);
// convert to BE
let buff = new Uint8Array(32);
poseidonReference.F.toRprBE(buff, 0, res);
const rv = bufToBn(buff);
// print to stdout. This is how the Rust caller reads the output
console.log(bufToBn(buff).toString());
}
main().then(() => {
process.exit(0);
});
function bufToBn(buf) {
var hex = [];
const u8 = Uint8Array.from(buf);
u8.forEach(function (i) {
var h = i.toString(16);
if (h.length % 2) { h = '0' + h; }
hex.push(h);
});
return BigInt('0x' + hex.join(''));
}
function bigToUint8Array(big) {
let hex = big.toString(16)
if (hex.length % 2) {
hex = '0' + hex
}
const len = hex.length / 2
const u8 = new Uint8Array(len)
var i = 0
var j = 0
while (i < len) {
u8[i] = parseInt(hex.slice(j, j + 2), 16)
i += 1
j += 2
}
return u8
}

View File

@@ -1,148 +0,0 @@
use num::{BigUint, FromPrimitive, ToPrimitive, Zero};
use prover::{ProverCore, ProverError};
use verifier::VerifierError;
pub mod onetimesetup;
pub mod prover;
pub mod verifier;
// bn254 prime 0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001
// in decimal 21888242871839275222246405745257275088548364400416034343698204186575808495617
const BN254_PRIME: &str =
"21888242871839275222246405745257275088548364400416034343698204186575808495617";
// ProverVirtual describes virtual methods which must be implemented by the
// nodejs and wasm implementors
pub trait ProverVirtual {
fn set_proving_key(&mut self, key: Vec<u8>) -> Result<(), ProverError>;
fn poseidon(&mut self, inputs: Vec<BigUint>) -> BigUint;
fn prove(&mut self, input: String) -> Result<Vec<u8>, ProverError>;
}
// provides default implementations for methods which must passthrough to the
// LsumProverCore. Only get_core() must be implemented.
pub trait ProverPassthrough {
fn setup(&mut self, plaintext: Vec<u8>) -> Vec<BigUint> {
self.get_core().setup(plaintext)
}
fn compute_label_sum(
&mut self,
ciphertexts: &Vec<[Vec<u8>; 2]>,
labels: &Vec<u128>,
) -> Vec<BigUint> {
self.get_core().compute_label_sum(ciphertexts, labels)
}
fn create_zk_proof(
&mut self,
zero_sum: Vec<BigUint>,
mut deltas: Vec<BigUint>,
) -> Result<Vec<Vec<u8>>, ProverError> {
self.get_core().create_zk_proof(zero_sum, deltas)
}
fn get_core(&mut self) -> &ProverCore<Box<Self>>;
}
pub trait VerifierCore {
fn get_proving_key(&mut self) -> Result<Vec<u8>, VerifierError>;
fn verify(
&mut self,
proof: Vec<u8>,
deltas: Vec<String>,
plaintext_hash: BigUint,
labelsum_hash: BigUint,
zero_sum: BigUint,
) -> Result<bool, VerifierError>;
}
#[cfg(test)]
mod tests {
use crate::{
onetimesetup::OneTimeSetup,
prover::{boolvec_to_u8vec, u8vec_to_boolvec},
};
use super::*;
use num::{BigUint, FromPrimitive};
use prover::Prover;
use rand::{thread_rng, Rng, RngCore};
use verifier::LsumVerifier;
fn random_bigint(bitsize: usize) -> BigUint {
assert!(bitsize <= 128);
let r: [u8; 16] = thread_rng().gen();
// take only those bits which we need
BigUint::from_bytes_be(&boolvec_to_u8vec(&u8vec_to_boolvec(&r)[0..bitsize]))
}
/// Unzips a slice of pairs, returning items corresponding to choice
fn choose<T: Clone>(items: &[[T; 2]], choice: &[bool]) -> Vec<T> {
assert!(items.len() == choice.len(), "arrays are different length");
items
.iter()
.zip(choice)
.map(|(items, choice)| items[*choice as usize].clone())
.collect()
}
#[test]
fn e2e_test() {
let prime = String::from(BN254_PRIME).parse::<BigUint>().unwrap();
let mut rng = thread_rng();
// OneTimeSetup is a no-op if the setup has been run before
let mut ots = OneTimeSetup::new();
ots.setup().unwrap();
// Our Poseidon is 16-width, so one permutation processes:
// 16 * 253 - 128 bits (salt) == 490 bytes. This is the size of the chunk.
// generate random plaintext of random size in range (0, 2000)
let mut plaintext = vec![0u8; rng.gen_range(0..2000)];
rng.fill_bytes(&mut plaintext);
// Normally, the Prover is expected to obtain her binary labels by
// evaluating the garbled circuit.
// To keep this test simple, we don't evaluate the gc, but we generate
// all labels of the Verifier and give the Prover her active labels.
let bit_size = plaintext.len() * 8;
let mut all_binary_labels: Vec<[u128; 2]> = Vec::with_capacity(bit_size);
let mut delta: u128 = rng.gen();
// set the last bit
delta |= 1;
for _ in 0..bit_size {
let label_zero: u128 = rng.gen();
all_binary_labels.push([label_zero, label_zero ^ delta]);
}
let prover_labels = choose(&all_binary_labels, &u8vec_to_boolvec(&plaintext));
let mut verifier = LsumVerifier::new(true);
// passing proving key to Prover (if he needs one)
let proving_key = verifier.get_proving_key().unwrap();
// produce ciphertexts which are sent to Prover for decryption
verifier.setup(&all_binary_labels);
let mut prover = Prover::new(prime);
prover.set_proving_key(proving_key);
let plaintext_hash = prover.setup(plaintext.to_vec());
// Commitment to the plaintext is sent to the Verifier
let cipheretexts = verifier.receive_pt_hashes(plaintext_hash);
// Verifier sends back encrypted arithm. labels.
let label_sum_hashes = prover.compute_label_sum(&cipheretexts, &prover_labels);
// Hash commitment to the label_sum is sent to the Notary
let (deltas, zero_sums) = verifier.receive_labelsum_hash(label_sum_hashes);
// Notary sends zero_sum and all deltas
// Prover constructs input to snarkjs
let proofs = prover.create_zk_proof(zero_sums, deltas).unwrap();
// Verifier verifies the proof
assert_eq!(verifier.verify_many(proofs).unwrap(), true);
}
}

View File

@@ -1,118 +0,0 @@
use rand::{distributions::Alphanumeric, Rng};
use std::path::Path;
use std::process::{Command, Output};
#[derive(Debug)]
pub enum Error {
FileDoesNotExist,
SnarkjsError,
}
pub struct OneTimeSetup {}
// OneTimeSetup should be run when Notary starts. It checks that all files needed
// by snarkjs are in place. If not, the files are generated.
// The files need to be generated only once *ever* for all future instantiations
// of the Notary.
impl OneTimeSetup {
pub fn new() -> Self {
Self {}
}
fn check_output(&mut self, output: Result<Output, std::io::Error>) -> Result<(), Error> {
if output.is_err() {
return Err(Error::SnarkjsError);
}
if !output.unwrap().status.success() {
return Err(Error::SnarkjsError);
}
Ok(())
}
fn generate_entropy(&mut self) -> String {
let entropy: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(500)
.map(char::from)
.collect();
assert!(entropy.len() == 500);
entropy
}
pub fn setup(&mut self) -> Result<(), Error> {
// check if files which we ship are present
if !Path::new("powersOfTau28_hez_final_14.ptau").exists()
|| !Path::new("circuit.r1cs").exists()
{
return Err(Error::FileDoesNotExist);
}
// check if any of the files hasn't been generated. If so, regenerate
// all files.
if !Path::new("circuit_0000.zkey").exists()
|| !Path::new("circuit_final.zkey.prover").exists()
|| !Path::new("verification_key.json").exists()
{
let entropy = self.generate_entropy();
//return self.regenerate1(entropy);
return self.regenerate2(entropy);
}
Ok(())
}
// this will work only if snarkjs is in the PATH
fn regenerate1(&mut self, entropy: String) -> Result<(), Error> {
let output = Command::new("snarkjs")
.args([
"groth16",
"setup",
"circuit.r1cs",
"powersOfTau28_hez_final_14.ptau",
"circuit_0000.zkey",
])
.output();
self.check_output(output)?;
let output = Command::new("snarkjs")
.args([
"zkey",
"contribute",
"circuit_0000.zkey",
"circuit_final.zkey",
&(String::from("-e=\"") + &entropy + &String::from("\"")),
])
.output();
self.check_output(output)?;
let output = Command::new("snarkjs")
.args([
"zkey",
"export",
"verificationkey",
"circuit_final.zkey",
"verification_key.json",
])
.output();
self.check_output(output)?;
Ok(())
}
// call a js wrapper which does what regenerate1() above does
fn regenerate2(&mut self, entropy: String) -> Result<(), Error> {
let output = Command::new("node")
.args(["onetimesetup.mjs", &entropy])
.output();
self.check_output(output)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test() {
let mut ots = OneTimeSetup::new();
ots.setup().unwrap();
}
}

View File

@@ -1,458 +0,0 @@
use aes::{Aes128, BlockDecrypt, NewBlockCipher};
use cipher::{consts::U16, generic_array::GenericArray, BlockCipher, BlockEncrypt};
use json::{object, stringify_pretty};
use num::{BigUint, FromPrimitive, ToPrimitive, Zero};
use rand::{thread_rng, Rng};
use std::env::temp_dir;
use std::fs;
use std::process::{Command, Output};
use uuid::Uuid;
#[derive(Debug)]
pub enum ProverError {
ProvingKeyNotFound,
FileSystemError,
FileDoesNotExist,
SnarkjsError,
}
use crate::{ProverPassthrough, ProverVirtual};
use super::BN254_PRIME;
fn check_output(output: Result<Output, std::io::Error>) -> Result<(), ProverError> {
if output.is_err() {
return Err(ProverError::SnarkjsError);
}
if !output.unwrap().status.success() {
return Err(ProverError::SnarkjsError);
}
Ok(())
}
pub struct Prover {
core: ProverCore<Box<Prover>>,
}
impl Prover {
pub fn new(field_prime: BigUint) -> Self {
let core = ProverCore::new(field_prime);
Self { core }
}
pub fn setup(&mut self, plaintext: Vec<u8>) -> Vec<BigUint> {
self.core.setup(plaintext)
}
}
impl ProverPassthrough for Prover {
fn get_core(&mut self) -> &ProverCore<Box<Prover>> {
&self.core
}
}
impl ProverVirtual for Prover {
fn set_proving_key(&mut self, key: Vec<u8>) -> Result<(), ProverError> {
let res = fs::write("circuit_final.zkey.verifier", key);
if res.is_err() {
return Err(ProverError::FileSystemError);
}
Ok(())
}
// hash the inputs with circomlibjs's Poseidon
fn poseidon(&mut self, inputs: Vec<BigUint>) -> BigUint {
// convert field elements into escaped strings
let strchunks: Vec<String> = inputs
.iter()
.map(|fe| String::from("\"") + &fe.to_string() + &String::from("\""))
.collect();
// convert to JSON array
let json = String::from("[") + &strchunks.join(", ") + &String::from("]");
println!("json {:?}", json);
let output = Command::new("node")
.args(["poseidon.mjs", &json])
.output()
.unwrap();
println!("{:?}", output);
// drop the trailing new line
let output = &output.stdout[0..output.stdout.len() - 1];
let s = String::from_utf8(output.to_vec()).unwrap();
let bi = s.parse::<BigUint>().unwrap();
//println!("poseidon output {:?}", bi);
bi
}
fn prove(&mut self, input: String) -> Result<Vec<u8>, ProverError> {
let mut path1 = temp_dir();
let mut path2 = temp_dir();
path1.push(format!("input.json.{}", Uuid::new_v4()));
path2.push(format!("proof.json.{}", Uuid::new_v4()));
fs::write(path1.clone(), input).expect("Unable to write file");
let output = Command::new("node")
.args([
"prove.mjs",
path1.to_str().unwrap(),
path2.to_str().unwrap(),
])
.output();
fs::remove_file(path1).expect("Unable to remove file");
check_output(output)?;
let proof = fs::read(path2.clone()).unwrap();
fs::remove_file(path2).expect("Unable to remove file");
Ok(proof)
}
}
// implementation of the Prover in the "label_sum" protocol (aka the User).
pub struct ProverCore<T> {
// bytes of the plaintext which was obtained from the garbled circuit
plaintext: Option<Vec<u8>>,
// the prime of the field in which Poseidon hash will be computed.
field_prime: BigUint,
// how many bits to pack into one field element
useful_bits: Option<usize>,
// the size of one chunk == useful_bits * Poseidon_width - 128 (salt size)
chunk_size: Option<usize>,
// We will compute a separate Poseidon hash on each chunk of the plaintext.
// Each chunk contains 16 field elements.
chunks: Option<Vec<[BigUint; 16]>>,
// Poseidon hashes of each chunk
hashes_of_chunks: Option<Vec<BigUint>>,
// each chunk's last 128 bits are used for the salt. w/o the salt, hashes
// of plaintext with low entropy could be brute-forced.
salts: Option<Vec<BigUint>>,
// hash of all our arithmetic labels
label_sum_hashes: Option<Vec<BigUint>>,
caller: Box<T>,
}
impl<T> ProverCore<T> {
pub fn new(field_prime: BigUint, caller: Box<T>) -> Self {
if field_prime.bits() < 129 {
// last field element must be large enough to contain the 128-bit
// salt. In the future, if we need to support fields < 129 bits,
// we can put the salt into multiple field elements.
panic!("Error: expected a prime >= 129 bits");
}
Self {
plaintext: None,
field_prime,
useful_bits: None,
chunks: None,
salts: None,
hashes_of_chunks: None,
label_sum_hashes: None,
chunk_size: None,
caller,
}
}
// Return hash digests which is Prover's commitment to the plaintext
pub fn setup(&mut self, plaintext: Vec<u8>) -> Vec<BigUint> {
self.plaintext = Some(plaintext.clone());
let useful_bits = calculate_useful_bits(self.field_prime.clone());
self.useful_bits = Some(useful_bits);
let (chunk_size, chunks, salts) = self.plaintext_to_chunks(useful_bits, plaintext);
self.chunks = Some(chunks.clone());
self.salts = Some(salts);
self.chunk_size = Some(chunk_size);
let hashes = self.hash_chunks(chunks);
self.hashes_of_chunks = Some(hashes.clone());
hashes
}
// decrypt each encrypted arithm.label based on the p&p bit of our active
// binary label. Return the hash of the sum of all arithm. labels. Note
// that we compute a separate label sum for each chunk of plaintext.
pub fn compute_label_sum(
&mut self,
ciphertexts: &Vec<[Vec<u8>; 2]>,
labels: &Vec<u128>,
) -> Vec<BigUint> {
// if binary label's p&p bit is 0, decrypt the 1st ciphertext,
// otherwise decrypt the 2nd one.
assert!(ciphertexts.len() == labels.len());
assert!(self.plaintext.as_ref().unwrap().len() * 8 == ciphertexts.len());
let mut label_sum_hashes: Vec<BigUint> =
Vec::with_capacity(self.chunks.as_ref().unwrap().len());
let ct_iter = ciphertexts.chunks(self.chunk_size.unwrap());
let lb_iter = labels.chunks(self.chunk_size.unwrap());
// process a pair (chunk of ciphertexts, chunk of corresponding labels) at a time
for (chunk_ct, chunk_lb) in ct_iter.zip(lb_iter) {
// accumulate the label sum here
let mut label_sum = BigUint::from_u8(0).unwrap();
for (ct_pair, label) in chunk_ct.iter().zip(chunk_lb) {
let key = Aes128::new_from_slice(&label.to_be_bytes()).unwrap();
// choose which ciphertext to decrypt based on the point-and-permute bit
let mut ct = [0u8; 16];
if label & 1 == 0 {
ct.copy_from_slice(&ct_pair[0]);
} else {
ct.copy_from_slice(&ct_pair[1]);
}
let mut ct: GenericArray<u8, U16> = GenericArray::from(ct);
key.decrypt_block(&mut ct);
// add the decrypted arithmetic label to the sum
label_sum += BigUint::from_bytes_be(&ct);
}
println!("{:?} label_sum", label_sum);
label_sum_hashes.push(self.poseidon(vec![label_sum]));
}
self.label_sum_hashes = Some(label_sum_hashes.clone());
label_sum_hashes
}
pub fn create_zk_proof(
&mut self,
zero_sum: Vec<BigUint>,
mut deltas: Vec<BigUint>,
) -> Result<Vec<Vec<u8>>, ProverError> {
let label_sum_hashes = self.label_sum_hashes.as_ref().unwrap().clone();
// the last chunk will be padded with zero plaintext. We also should pad
// the deltas of the last chunk
let useful_bits = self.useful_bits.unwrap();
// the size of a chunk of plaintext not counting the salt
let chunk_size = useful_bits * 16 - 128;
let chunk_count = self.chunks.as_ref().unwrap().len();
// pad deltas with 0 values to make their count a multiple of a chunk size
let delta_pad_count = chunk_size * chunk_count - deltas.len();
deltas.extend(vec![BigUint::from_u8(0).unwrap(); delta_pad_count]);
// we will have as many proofs as there are chunks of plaintext
let mut proofs: Vec<Vec<u8>> = Vec::with_capacity(chunk_count);
let deltas_chunks: Vec<&[BigUint]> = deltas.chunks(chunk_size).collect();
for count in 0..chunk_count {
// convert plaintext to string
let pt_str: Vec<String> = self.chunks.as_ref().unwrap()[count]
.to_vec()
.iter()
.map(|bigint| bigint.to_string())
.collect();
// convert all deltas to strings
let deltas_str: Vec<String> =
deltas_chunks[count].iter().map(|v| v.to_string()).collect();
// split deltas into 16 groups corresponding to 16 field elements
let deltas_fes: Vec<&[String]> = deltas_str.chunks(useful_bits).collect();
// prepare input.json
let mut data = object! {
plaintext_hash: self.hashes_of_chunks.as_ref().unwrap()[count].to_string(),
label_sum_hash: label_sum_hashes[count].to_string(),
sum_of_zero_labels: zero_sum[count].to_string(),
plaintext: pt_str,
// first 15 fes form a separate input
delta: deltas_fes[0..15],
delta_last: deltas_fes[15]
};
let s = stringify_pretty(data, 4);
proofs.push(self.prove(s).unwrap());
}
Ok(proofs)
}
// create chunks of plaintext where each chunk consists of 16 field elements.
// The last element's last 128 bits are reserved for the salt of the hash.
// If there is not enough plaintext to fill the whole chunk, we fill the gap
// with zero bits.
pub fn plaintext_to_chunks(
&mut self,
useful_bits: usize,
plaintext: Vec<u8>,
) -> (usize, Vec<[BigUint; 16]>, Vec<BigUint>) {
// the size of a chunk of plaintext not counting the salt
let chunk_size = useful_bits * 16 - 128;
// plaintext converted into bits
let mut bits = u8vec_to_boolvec(&plaintext);
// chunk count (rounded up)
let chunk_count = (bits.len() + (chunk_size - 1)) / chunk_size;
// extend bits with zeroes to fill the last chunk
bits.extend(vec![false; chunk_count * chunk_size - bits.len()]);
let mut chunks: Vec<[BigUint; 16]> = Vec::with_capacity(chunk_count);
let mut salts: Vec<BigUint> = Vec::with_capacity(chunk_count);
let mut rng = thread_rng();
for chunk_of_bits in bits.chunks(chunk_size) {
// [BigUint::default(); 16] won't work since BigUint doesn't
// implement the Copy trait, so typing out all values
let mut chunk = [
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
BigUint::default(),
];
// split chunk into 16 field elements
for (i, fe_bits) in chunk_of_bits.chunks(useful_bits).enumerate() {
if i < 15 {
chunk[i] = BigUint::from_bytes_be(&boolvec_to_u8vec(&fe_bits));
} else {
// last field element's last 128 bits are for the salt
let salt = rng.gen::<[u8; 16]>();
salts.push(BigUint::from_bytes_be(&salt));
let mut bits_and_salt = fe_bits.to_vec();
bits_and_salt.extend(u8vec_to_boolvec(&salt).iter());
chunk[15] = BigUint::from_bytes_be(&boolvec_to_u8vec(&bits_and_salt));
};
}
chunks.push(chunk);
}
(chunk_size, chunks, salts)
}
// hashes each chunk with Poseidon and returns digests for each chunk
fn hash_chunks(&mut self, chunks: Vec<[BigUint; 16]>) -> Vec<BigUint> {
let res: Vec<BigUint> = chunks
.iter()
.map(|chunk| self.poseidon(chunk.to_vec()))
.collect();
res
}
}
/// Calculates how many bits of plaintext we will pack into one field element.
/// Essentially, this is field_prime bit length minus 1.
fn calculate_useful_bits(field_prime: BigUint) -> usize {
(field_prime.bits() - 1) as usize
}
#[test]
fn test_compute_full_bits() {
assert_eq!(calculate_useful_bits(BigUint::from_u16(13).unwrap()), 3);
assert_eq!(calculate_useful_bits(BigUint::from_u16(255).unwrap()), 7);
assert_eq!(
calculate_useful_bits(String::from(BN254_PRIME,).parse::<BigUint>().unwrap()),
253
);
}
#[inline]
pub fn u8vec_to_boolvec(v: &[u8]) -> Vec<bool> {
let mut bv = Vec::with_capacity(v.len() * 8);
for byte in v.iter() {
for i in 0..8 {
bv.push(((byte >> (7 - i)) & 1) != 0);
}
}
bv
}
// Convert bits into bytes. The bits will be left-padded with zeroes to the
// multiple of 8.
#[inline]
pub fn boolvec_to_u8vec(bv: &[bool]) -> Vec<u8> {
let rem = bv.len() % 8;
let first_byte_bitsize = if rem == 0 { 8 } else { rem };
let offset = if rem == 0 { 0 } else { 1 };
let mut v = vec![0u8; bv.len() / 8 + offset];
// implicitely left-pad the first byte with zeroes
for (i, b) in bv[0..first_byte_bitsize].iter().enumerate() {
v[i / 8] |= (*b as u8) << (first_byte_bitsize - 1 - i);
}
for (i, b) in bv[first_byte_bitsize..].iter().enumerate() {
v[1 + i / 8] |= (*b as u8) << (7 - (i % 8));
}
v
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_plaintext_to_chunks() {
// 137-bit prime. Plaintext will be packed into 136 bits (17 bytes).
let mut prime = vec![false; 137];
prime[0] = true;
let prime = boolvec_to_u8vec(&prime);
let prime = BigUint::from_bytes_be(&prime);
// plaintext will spawn 2 chunks
let mut plaintext = vec![0u8; 17 * 15 + 1 + 17 * 5];
// first chunk's field elements
for i in 0..15 {
// make the last byte of each field element unique
plaintext[i * 17 + 16] = i as u8;
}
// first chunk's last field element's plaintext is 1 zero byte. The
// rest of the field element will be filled with salt
plaintext[15 * 17] = 0u8;
// second chunk's field elements
for i in 0..5 {
// make the last byte of each field element unique
plaintext[(15 * 17 + 1) + i * 17 + 16] = (i + 16) as u8;
}
let mut prover = ProverCore::new(prime, None);
prover.setup(plaintext);
// Check chunk1 correctness
let chunk1: Vec<u128> = prover.chunks.clone().unwrap()[0][0..15]
.iter()
.map(|bigint| bigint.to_u128().unwrap())
.collect();
assert_eq!(chunk1, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]);
// the last field element must be random salt. We just check that the
// salt has been set, i.e. it is not equal 0
assert!(!prover.chunks.clone().unwrap()[0][15].eq(&BigUint::from_u8(0).unwrap()));
// Check chunk2 correctness
let chunk2: Vec<u128> = prover.chunks.clone().unwrap()[1][0..15]
.iter()
.map(|bigint| bigint.to_u128().unwrap())
.collect();
assert_eq!(chunk2, [16, 17, 18, 19, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// the last field element must be random salt. We just check that the
// salt has been set, i.e. it is not equal 0
assert!(!prover.chunks.clone().unwrap()[1][15].eq(&BigUint::from_u8(0).unwrap()));
}
#[test]
fn test_hash_chunks() {
// 137-bit prime. Plaintext will be packed into 136 bits (17 bytes).
let mut prime = vec![false; 137];
prime[0] = true;
let prime = boolvec_to_u8vec(&prime);
let prime = BigUint::from_bytes_be(&prime);
// plaintext will spawn 2 chunks
let mut plaintext = vec![0u8; 17 * 15 + 1 + 17 * 5];
let mut prover = ProverCore::new(prime, None);
//LsumProver::hash_chunks(&mut prover);
}
#[test]
fn test_json() {
let mut prime = vec![false; 137];
prime[0] = true;
let prime = boolvec_to_u8vec(&prime);
let prime = BigUint::from_bytes_be(&prime);
let v = vec![BigUint::from_u8(0).unwrap(), BigUint::from_u8(1).unwrap()];
let v_str: Vec<String> = v.iter().map(|bigint| bigint.to_string()).collect();
let mut data = object! {
foo:v_str
};
println!("{:?}", data.dump());
}
}

View File

@@ -1,286 +0,0 @@
use aes::{Aes128, NewBlockCipher};
use cipher::{consts::U16, generic_array::GenericArray, BlockCipher, BlockEncrypt};
use json::{array, object, stringify, stringify_pretty, JsonValue};
use num::{BigUint, FromPrimitive, ToPrimitive, Zero};
use rand::{thread_rng, Rng};
use std::env::temp_dir;
use std::fs;
use std::path::Path;
use std::process::{Command, Output};
use uuid::Uuid;
use crate::VerifierCore;
#[derive(Debug)]
pub enum VerifierError {
ProvingKeyNotFound,
FileSystemError,
FileDoesNotExist,
SnarkjsError,
}
fn check_output(output: &Result<Output, std::io::Error>) -> Result<(), VerifierError> {
if output.is_err() {
return Err(VerifierError::SnarkjsError);
}
if !output.as_ref().unwrap().status.success() {
return Err(VerifierError::SnarkjsError);
}
Ok(())
}
// implementation of the Verifier in the "label sum" protocol (aka the Notary).
pub struct LsumVerifier {
// hashes for each chunk of Prover's plaintext
plaintext_hashes: Option<Vec<BigUint>>,
labelsum_hashes: Option<Vec<BigUint>>,
// if set to true, then we must send the proving key to the Prover
// before this protocol begins. Otherwise, it is assumed that the Prover
// already has the proving key from a previous interaction with us.
proving_key_needed: bool,
deltas: Option<Vec<BigUint>>,
zero_sums: Option<Vec<BigUint>>,
ciphertexts: Option<Vec<[Vec<u8>; 2]>>,
useful_bits: usize,
}
impl VerifierCore for LsumVerifier {
fn get_proving_key(&mut self) -> Result<Vec<u8>, VerifierError> {
if !Path::new("circuit_final.zkey.prover").exists() {
return Err(VerifierError::ProvingKeyNotFound);
}
let res = fs::read("circuit_final.zkey.prover");
if res.is_err() {
return Err(VerifierError::FileSystemError);
}
Ok(res.unwrap())
}
fn verify(
&mut self,
proof: Vec<u8>,
deltas: Vec<String>,
plaintext_hash: BigUint,
labelsum_hash: BigUint,
zero_sum: BigUint,
) -> Result<bool, VerifierError> {
// public.json is a flat array
let mut public_json: Vec<String> = Vec::new();
public_json.push(plaintext_hash.to_string());
public_json.push(labelsum_hash.to_string());
public_json.extend::<Vec<String>>(deltas);
public_json.push(zero_sum.to_string());
let s = stringify(JsonValue::from(public_json.clone()));
// write into temp files and delete the files after verification
let mut path1 = temp_dir();
let mut path2 = temp_dir();
path1.push(format!("public.json.{}", Uuid::new_v4()));
path2.push(format!("proof.json.{}", Uuid::new_v4()));
fs::write(path1.clone(), s).expect("Unable to write file");
fs::write(path2.clone(), proof).expect("Unable to write file");
let output = Command::new("node")
.args([
"verify.mjs",
path1.to_str().unwrap(),
path2.to_str().unwrap(),
])
.output();
fs::remove_file(path1).expect("Unable to remove file");
fs::remove_file(path2).expect("Unable to remove file");
check_output(&output)?;
if !output.unwrap().status.success() {
return Ok(false);
}
Ok(true)
}
}
impl LsumVerifier {
pub fn new(proving_key_needed: bool) -> Self {
Self {
plaintext_hashes: None,
labelsum_hashes: None,
proving_key_needed,
deltas: None,
zero_sums: None,
ciphertexts: None,
useful_bits: 253,
}
}
// Convert binary labels into encrypted arithmetic labels.
// Prepare JSON objects to be converted into proof.json before verification
pub fn setup(&mut self, labels: &Vec<[u128; 2]>) {
// generate as many 128-bit arithm label pairs as there are plaintext bits.
// The 128-bit size is for convenience to be able to encrypt the label with 1
// call to AES.
// To keep the handling simple, we want to avoid a negative delta, that's why
// W_0 and delta must be 127-bit values and W_1 will be set to W_0 + delta
let bitsize = labels.len();
let chunk_size = 253 * 16 - 128;
// count of chunks rounded up
let chunk_count = (bitsize + (chunk_size - 1)) / chunk_size;
let mut zero_sums: Vec<BigUint> = Vec::with_capacity(chunk_count);
let mut deltas: Vec<BigUint> = Vec::with_capacity(bitsize);
let mut all_arithm_labels: Vec<[BigUint; 2]> = Vec::with_capacity(bitsize);
for count in 0..chunk_count {
// calculate zero_sum for each chunk of plaintext separately
let mut zero_sum = BigUint::from_u8(0).unwrap();
// end of range is different for the last chunk
let end = if count < chunk_count - 1 {
(count + 1) * chunk_size
} else {
// compute the size of the gap at the end of the last chunk
let last_size = bitsize % chunk_size;
let gap_size = if last_size == 0 {
0
} else {
chunk_size - last_size
};
(count + 1) * chunk_size - gap_size
};
all_arithm_labels.append(
&mut (count * chunk_size..end)
.map(|_| {
let zero_label = random_bigint(127);
let delta = random_bigint(127);
let one_label = zero_label.clone() + delta.clone();
zero_sum += zero_label.clone();
deltas.push(delta);
[zero_label, one_label]
})
.collect(),
);
zero_sums.push(zero_sum);
}
self.zero_sums = Some(zero_sums);
self.deltas = Some(deltas);
// encrypt each arithmetic label using a corresponding binary label as a key
// place ciphertexts in an order based on binary label's p&p bit
let ciphertexts: Vec<[Vec<u8>; 2]> = labels
.iter()
.zip(all_arithm_labels)
.map(|(bin_pair, arithm_pair)| {
let zero_key = Aes128::new_from_slice(&bin_pair[0].to_be_bytes()).unwrap();
let one_key = Aes128::new_from_slice(&bin_pair[1].to_be_bytes()).unwrap();
let mut label0 = [0u8; 16];
let mut label1 = [0u8; 16];
let ap0 = arithm_pair[0].to_bytes_be();
let ap1 = arithm_pair[1].to_bytes_be();
// need to zero-pad on the left
label0[16 - ap0.len()..].copy_from_slice(&ap0);
label1[16 - ap1.len()..].copy_from_slice(&ap1);
let mut label0: GenericArray<u8, U16> = GenericArray::from(label0);
let mut label1: GenericArray<u8, U16> = GenericArray::from(label1);
zero_key.encrypt_block(&mut label0);
one_key.encrypt_block(&mut label1);
// ciphertext 0 and ciphertext 1
let ct0 = label0.to_vec();
let ct1 = label1.to_vec();
// place ar. labels based on the point and permute bit of bin. label 0
if (bin_pair[0] & 1) == 0 {
[ct0, ct1]
} else {
[ct1, ct0]
}
})
.collect();
self.ciphertexts = Some(ciphertexts);
}
// receive hashes of plaintext and reveal the encrypted arithmetic labels
pub fn receive_pt_hashes(&mut self, hashes: Vec<BigUint>) -> Vec<[Vec<u8>; 2]> {
self.plaintext_hashes = Some(hashes);
self.ciphertexts.as_ref().unwrap().clone()
}
// receive the hash commitment to the Prover's sum of labels and reveal all
// deltas and zero_sums.
pub fn receive_labelsum_hash(&mut self, hashes: Vec<BigUint>) -> (Vec<BigUint>, Vec<BigUint>) {
self.labelsum_hashes = Some(hashes);
(
self.deltas.as_ref().unwrap().clone(),
self.zero_sums.as_ref().unwrap().clone(),
)
}
pub fn verify_many(&mut self, proofs: Vec<Vec<u8>>) -> Result<bool, VerifierError> {
// // Write public.json. The elements must be written in the exact order
// // as below, that's the order snarkjs expects them to be in.
// the last chunk will be padded with zero plaintext. We also should pad
// the deltas of the last chunk
let useful_bits = self.useful_bits;
// the size of a chunk of plaintext not counting the salt
let chunk_size = useful_bits * 16 - 128;
let chunk_count = (self.deltas.as_ref().unwrap().len() + (chunk_size - 1)) / chunk_size;
assert!(proofs.len() == chunk_count);
let mut deltas = self.deltas.as_ref().unwrap().clone();
// pad deltas with 0 values to make their count a multiple of a chunk size
let delta_pad_count = chunk_size * chunk_count - self.deltas.as_ref().unwrap().len();
deltas.extend(vec![BigUint::from_u8(0).unwrap(); delta_pad_count]);
let deltas_chunks: Vec<&[BigUint]> = deltas.chunks(chunk_size).collect();
for count in 0..chunk_count {
// There are as many deltas as there are bits in the chunk of the
// plaintext (not counting the salt)
let delta_str: Vec<String> =
deltas_chunks[count].iter().map(|v| v.to_string()).collect();
let res = self.verify(
proofs[count].clone(),
delta_str,
self.plaintext_hashes.as_ref().unwrap()[count].clone(),
self.labelsum_hashes.as_ref().unwrap()[count].clone(),
self.zero_sums.as_ref().unwrap()[count].clone(),
);
if res.is_err() {
return Ok(false);
}
}
return Ok(true);
}
}
fn random_bigint(bitsize: usize) -> BigUint {
assert!(bitsize <= 128);
let r: [u8; 16] = thread_rng().gen();
// take only those bits which we need
BigUint::from_bytes_be(&boolvec_to_u8vec(&u8vec_to_boolvec(&r)[0..bitsize]))
}
#[inline]
pub fn u8vec_to_boolvec(v: &[u8]) -> Vec<bool> {
let mut bv = Vec::with_capacity(v.len() * 8);
for byte in v.iter() {
for i in 0..8 {
bv.push(((byte >> (7 - i)) & 1) != 0);
}
}
bv
}
// Convert bits into bytes. The bits will be left-padded with zeroes to the
// multiple of 8.
#[inline]
pub fn boolvec_to_u8vec(bv: &[bool]) -> Vec<u8> {
let rem = bv.len() % 8;
let first_byte_bitsize = if rem == 0 { 8 } else { rem };
let offset = if rem == 0 { 0 } else { 1 };
let mut v = vec![0u8; bv.len() / 8 + offset];
// implicitely left-pad the first byte with zeroes
for (i, b) in bv[0..first_byte_bitsize].iter().enumerate() {
v[i / 8] |= (*b as u8) << (first_byte_bitsize - 1 - i);
}
for (i, b) in bv[first_byte_bitsize..].iter().enumerate() {
v[1 + i / 8] |= (*b as u8) << (7 - (i % 8));
}
v
}