fixed some paths, added error debugging to readme

This commit is contained in:
Divide-By-0
2023-01-12 17:52:00 +07:00
parent bfe478cd36
commit 8e334184ae
4 changed files with 35 additions and 13 deletions

View File

@@ -209,7 +209,7 @@ RSA + SHA + Regex + Masking with up to 1024 byte message lengths is 1,392,219 co
The full email header circuit above with the 7-byte packing into signals is 1,408,571 constraints, with 163 public signals, and the verifier script fits in the 24kb contract limit.
The full email header and body check circuit, with 7-byte packing and final public output compression, is **3,115,057 constraints**, with 21 public signals. zkey size was originally 1.75GB, and with tar.gz compression it is now 982 MB.
The full email header and body check circuit, with 7-byte packing and final public output compression, is **3,115,057 constraints**, with 21 public signals. zkey size was originally 1.75GB, and with tar.gz compression it is now 982 MB. Decompression doesn't work in the browser however.
In the browser, on a 2019 Intel Mac on Chrome, proving uses 7.3/8 cores. zk-gen takes 384 s, groth16 prove takes 375 s, and witness calculation takes 9 s.
@@ -226,6 +226,20 @@ ls
git push --set-upstream origin main --force
```
## Possible Errors
### No available storage method found.
If when using snarkjs, you see this:
```
[ERROR] snarkJS: Error: No available storage method found. [full path]
/node_modules/localforage/dist/localforage.js:2762:25
```
Rerun with this:
`yarn add snarkjs@git+https://github.com/vb7401/snarkjs.git#24981febe8826b6ab76ae4d76cf7f9142919d2b8`
## To-Do
- Make the frontend Solidity calls work

View File

@@ -4,19 +4,24 @@ import tarfile
import time
# Set up the client for the AWS S3 service
s3 = boto3.client('s3') # Ask Aayush for the access key and secret access key
s3 = boto3.client('s3') # Ask Aayush for the access key and secret access key
# Set the name of the remote directory and the AWS bucket
zkey_dir = '../build/email/'
wasm_dir = '../build/email/email_js/'
bucket_name = 'zkemail-zkey-chunks' # us-east-1
source = '~/Documents/projects/zk-email-verify'
source = '~/zk-email-verify-old'
zkey_dir = source + '/build/email/'
wasm_dir = source + '/build/email/email_js/'
bucket_name = 'zkemail-zkey-chunks' # us-east-1
def upload_to_s3(filename, dir=""):
with open(dir + filename, 'rb') as file:
print("Starting upload...")
s3.upload_fileobj(file, bucket_name, filename, ExtraArgs={'ACL': 'public-read', 'ContentType': 'binary/octet-stream'})
s3.upload_fileobj(file, bucket_name, filename, ExtraArgs={
'ACL': 'public-read', 'ContentType': 'binary/octet-stream'})
print("Done uploading!")
# Loop through the files in the remote directory
for dir in [zkey_dir, wasm_dir]:
for file in os.listdir(dir):
@@ -27,8 +32,10 @@ for dir in [zkey_dir, wasm_dir]:
# Create a zip file for the file
tar_file_name = file + '.tar.gz'
with tarfile.open(tar_file_name, 'w:gz') as tar_file:
print("Compressing: ", dir + file)
tar_file.add(dir + file)
source_file_path = dir + file
print("Compressing: ", source_file_path)
tar_file.add(source_file_path,
arcname=os.path.basename(source_file_path))
# Upload the zip file to the AWS bucket, overwriting any existing file with the same name
upload_to_s3(tar_file_name)

View File

@@ -57,7 +57,7 @@
"test": "react-scripts test",
"eject": "react-scripts eject",
"compile": "node circuits/scripts/compile.js",
"compile-all": "npx tsx circuits/scripts/generate_input.ts && yarn compile email true"
"compile-all": "npx tsx src/scripts/generate_input.ts && yarn compile email true"
},
"eslintConfig": {
"extends": [

View File

@@ -7,6 +7,7 @@ import * as fs from "fs";
var Cryo = require("cryo");
const pki = require("node-forge").pki;
const email_file = "kaylee_phone_number_email_twitter.eml"; // "./test_email.txt", "./twitter_msg.eml"
export interface ICircuitInputs {
modulus?: string[];
signature?: string[];
@@ -76,6 +77,7 @@ async function sha256Pad(prehash_prepad_m: Uint8Array, maxShaBytes: number): Pro
while (prehash_prepad_m.length < maxShaBytes) {
prehash_prepad_m = mergeUInt8Arrays(prehash_prepad_m, int32toBytes(0));
}
// console.log(prehash_prepad_m.length, maxShaBytes);
assert(prehash_prepad_m.length === maxShaBytes, "Padding to max length did not complete properly!");
return [prehash_prepad_m, messageLen];
@@ -250,7 +252,7 @@ export async function generate_inputs(email: Buffer, eth_address: string): Promi
}
async function do_generate() {
const email = fs.readFileSync("./twitter_msg.eml");
const email = fs.readFileSync(email_file);
console.log(email);
const gen_inputs = await generate_inputs(email, "0x0000000000000000000000000000000000000000");
console.log(JSON.stringify(gen_inputs));
@@ -276,8 +278,7 @@ export async function insert13Before10(a: Uint8Array): Promise<Uint8Array> {
}
async function debug_file() {
// const email = fs.readFileSync("./test_email.txt");
const email = fs.readFileSync("./twitter_msg.eml");
const email = fs.readFileSync(email_file);
console.log(Uint8Array.from(email));
// Key difference: file load has 13 10, web version has just 10
}
@@ -287,6 +288,6 @@ if (typeof require !== "undefined" && require.main === module) {
// debug_file();
const circuitInputs = do_generate();
console.log("Writing to file...");
fs.writeFileSync(`./circuits/inputs/input_twitter.json`, JSON.stringify(circuitInputs), { flag: "w" });
circuitInputs.then((inputs) => fs.writeFileSync(`./circuits/inputs/input_twitter.json`, JSON.stringify(inputs), { flag: "w" }));
// gen_test();
}