merkle tree inclusion proof in /contract and /circuit

This commit is contained in:
yssf-io
2024-01-03 12:12:43 +01:00
parent d0d9ecf9a6
commit ec450fd2e9
26 changed files with 136174 additions and 197 deletions

View File

@@ -0,0 +1,5 @@
pragma circom 2.1.5;
include "./tree.circom";
component main = MerkleTreeInclusionProof(16);

View File

@@ -0,0 +1,40 @@
pragma circom 2.1.5;
include "../../node_modules/circomlib/circuits/poseidon.circom";
include "../../node_modules/circomlib/circuits/mux1.circom";
template MerkleTreeInclusionProof(nLevels) {
signal input leaf;
signal input pathIndices[nLevels];
signal input siblings[nLevels];
signal output root;
component poseidons[nLevels];
component mux[nLevels];
signal hashes[nLevels + 1];
hashes[0] <== leaf;
for (var i = 0; i < nLevels; i++) {
pathIndices[i] * (1 - pathIndices[i]) === 0;
poseidons[i] = Poseidon(2);
mux[i] = MultiMux1(2);
mux[i].c[0][0] <== hashes[i];
mux[i].c[0][1] <== siblings[i];
mux[i].c[1][0] <== siblings[i];
mux[i].c[1][1] <== hashes[i];
mux[i].s <== pathIndices[i];
poseidons[i].inputs[0] <== mux[i].out[0];
poseidons[i].inputs[1] <== mux[i].out[1];
hashes[i + 1] <== poseidons[i].out;
}
root <== hashes[nLevels];
}

View File

@@ -3,17 +3,60 @@ pragma circom 2.1.5;
include "../node_modules/circomlib/circuits/poseidon.circom";
include "./helpers/extract.circom";
include "./passport_verifier.circom";
include "./merkle_tree/tree.circom";
template ProofOfPassport(n, k) {
template ProofOfPassport(n, k, nLevels, pubkeySize) {
signal input mrz[93]; // formatted mrz (5 + 88) chars
signal input dataHashes[297];
signal input eContentBytes[104];
signal input pubkey[k];
signal input signature[k];
signal input signatureAlgorithm;
signal input pubkey[pubkeySize];
signal input pathIndices[nLevels];
signal input siblings[nLevels];
signal input root;
signal input reveal_bitmap[88];
signal input address;
// Converting pub_key (modulus) into 11 chunks of 192 bits, assuming original n, k are 64 and 32.
// This is because Poseidon circuit only supports an array of 16 elements.
var k3_chunked_size = 11; // Since ceil(32 / 3) in integer division is 11
signal pubkey_hash_input[k3_chunked_size];
for(var i = 0; i < k3_chunked_size; i++) {
if(i == k3_chunked_size - 1) {
if(k % 3 == 1) {
pubkey_hash_input[i] <== pubkey[3*i];
} else if(k % 3 == 2) {
pubkey_hash_input[i] <== pubkey[3*i] + (1<<n) * pubkey[3*i + 1];
} else {
pubkey_hash_input[i] <== pubkey[3*i] + (1<<n) * pubkey[3*i + 1] + (1<<(2*n)) * pubkey[3*i + 2];
}
} else {
pubkey_hash_input[i] <== pubkey[3*i] + (1<<n) * pubkey[3*i + 1] + (1<<(2*n)) * pubkey[3*i + 2];
}
}
// leaf is poseidon(signatureAlgorithm, pubkey[pubkeySize])
signal leaf_hash_input[1 + k3_chunked_size];
leaf_hash_input[0] <== signatureAlgorithm;
for (var i = 0; i < k3_chunked_size; i++) {
leaf_hash_input[i+1] <== pubkey_hash_input[i];
}
signal leaf <== Poseidon(1 + k3_chunked_size)(leaf_hash_input);
// log("Leaf in circuit:", leaf);
// Verify inclusion in merkle tree
signal computedRoot <== MerkleTreeInclusionProof(nLevels)(leaf, pathIndices, siblings);
root === computedRoot;
// sha256WithRSAEncryption_65537 is the only sigAlg supported right now
signatureAlgorithm === 1;
pubkeySize === k;
// Verify passport
component PV = PassportVerifier(n, k);
PV.mrz <== mrz;
@@ -32,25 +75,18 @@ template ProofOfPassport(n, k) {
// make nullifier public;
// we take nullifier = signature[0, 1] which it 64 + 64 bits long, so chance of collision is 2^128
signal output nullifier <== signature[0] * 2**64 + signature[1];
// we don't do Poseidon hash cuz it makes arkworks crash for obscure reasons
// we output the pubkey as 11 field elements. 9 is doable also cuz ceil(254/31) = 9
signal output pubkey_packed[11];
for (var i = 0; i < 11; i++) {
if (i < 10) {
pubkey_packed[i] <== pubkey[3*i] * 64 * 64 + pubkey[3*i + 1] * 64 + pubkey[3*i + 2];
} else {
pubkey_packed[i] <== pubkey[3*i] * 64 * 64;
}
}
}
component main { public [ address ] } = ProofOfPassport(64, 32);
component main { public [ address, root ] } = ProofOfPassport(64, 32, 16, 32);
// Us:
// 1 + 1 + 3 + 1
// pubkey_hash + nullifier + reveal_packed + address
// 1 + 3 + 1 + 1
// nullifier + reveal_packed + address + root
// Them:
// 1 + 3 + 1
// pubkey_hash + reveal_twitter_packed + address
// pubkey_hash + reveal_twitter_packed + address

View File

@@ -10,15 +10,20 @@
"@types/chai-as-promised": "^7.1.6",
"@types/node": "^20.6.3",
"@types/node-forge": "^1.3.5",
"@zk-kit/imt": "^2.0.0-beta",
"chai-as-promised": "^7.1.1",
"circom_tester": "^0.0.20",
"circomlib": "^2.0.5",
"circomlibjs": "^0.1.7",
"js-sha256": "^0.10.1",
"node-forge": "^1.3.1",
"poseidon-lite": "^0.2.0",
"snarkjs": "^0.7.1",
"typescript": "^5.2.2"
},
"devDependencies": {
"@types/chai": "^4.3.6",
"@types/circomlibjs": "^0.1.6",
"@types/mocha": "^10.0.1",
"chai": "^4.3.8",
"mocha": "^10.2.0",

View File

@@ -40,9 +40,9 @@ yarn snarkjs groth16 setup build/proof_of_passport.r1cs build/powersOfTau28_hez_
echo "building vkey"
echo "test random" | yarn snarkjs zkey contribute build/proof_of_passport.zkey build/proof_of_passport_final.zkey
yarn snarkjs zkey export verificationkey build/proof_of_passport_final.zkey build/verification_key.json
yarn snarkjs zkey export verificationkey build/proof_of_passport_final.zkey build/proof_of_passport_vkey.json
yarn snarkjs zkey export solidityverifier build/proof_of_passport_final.zkey build/Verifier.sol
cp build/Verifier.sol ../contracts/contracts/Verifier.sol
co build/proof_of_passport_final.zkey ../app/ark-circom-passport/passport/
echo "copied Verifier.sol to contracts and proof_of_passport_final.zkey to ark-circom-passport"
echo "copied Verifier.sol to contracts and proof_of_passport_final.zkey to ark-circom-passport"

View File

@@ -0,0 +1,21 @@
mkdir -p build
cd build
if [ ! -f powersOfTau28_hez_final_20.ptau ]; then
echo "Download power of tau...."
wget https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_20.ptau
echo "Finished download!"
else
echo "Powers of tau file already downloaded... Skip download action!"
fi
cd ..
echo "compiling circuit"
circom circuits/merkle_tree/only_tree.circom --r1cs --sym --wasm --output build
echo "building zkey"
yarn snarkjs groth16 setup build/only_tree.r1cs build/powersOfTau28_hez_final_20.ptau build/only_tree.zkey
echo "building vkey"
echo "test random" | yarn snarkjs zkey contribute build/only_tree.zkey build/only_tree_final.zkey
yarn snarkjs zkey export verificationkey build/only_tree_final.zkey build/only_tree_verification_key.json

View File

@@ -1,12 +1,17 @@
import { describe } from 'mocha'
import chai, { assert, expect } from 'chai'
import chaiAsPromised from 'chai-as-promised'
import { hash, toUnsignedByte, arraysAreEqual, bytesToBigDecimal, formatAndConcatenateDataHashes, formatMrz, splitToWords } from '../../common/src/utils/utils'
import { hash, toUnsignedByte, bytesToBigDecimal, formatAndConcatenateDataHashes, formatMrz, splitToWords, formatSigAlg, bigIntToChunkedBytes } from '../../common/src/utils/utils'
import { groth16 } from 'snarkjs'
import { DataHash } from '../../common/src/utils/types'
import { getPassportData } from '../../common/src/utils/passportData'
import { attributeToPosition } from '../../common/src/constants/constants'
const fs = require('fs');
import { DataHash, PassportData } from '../../common/src/utils/types'
import { genSampleData } from '../../common/src/utils/passportData'
import { buildPubkeyTree } from '../../common/src/utils/pubkeyTree'
import { attributeToPosition, SignatureAlgorithm } from '../../common/src/constants/constants'
import { poseidon12 } from "poseidon-lite"
import { IMT } from '@zk-kit/imt'
import fs from 'fs'
import path from 'path'
const wasm_tester = require("../node_modules/circom_tester").wasm;
chai.use(chaiAsPromised)
@@ -16,9 +21,24 @@ describe('Circuit tests', function () {
this.timeout(0)
let inputs: any;
let tree: IMT;
let pubkeys: any[];
let passportData: PassportData;
this.beforeAll(async () => {
const passportData = getPassportData();
pubkeys = JSON.parse(fs.readFileSync("../common/pubkeys/publicKeysParsed.json") as unknown as string)
passportData = genSampleData();
// for testing purposes
pubkeys = pubkeys.slice(0, 100);
pubkeys.push({
signatureAlgorithm: passportData.signatureAlgorithm,
issuer: 'C = TS, O = Government of Syldavia, OU = Ministry of tests, CN = CSCA-TEST',
modulus: passportData.pubKey.modulus,
exponent: passportData.pubKey.exponent
})
tree = buildPubkeyTree(pubkeys);
const formattedMrz = formatMrz(passportData.mrz);
const mrzHash = hash(formatMrz(passportData.mrz));
@@ -27,51 +47,79 @@ describe('Circuit tests', function () {
passportData.dataGroupHashes as DataHash[],
);
const concatenatedDataHashesHashDigest = hash(concatenatedDataHashes);
assert(
arraysAreEqual(passportData.eContent.slice(72, 72 + 32), concatenatedDataHashesHashDigest),
'concatenatedDataHashesHashDigest is at the right place in passportData.eContent'
)
const reveal_bitmap = Array(88).fill('1');
const sigAlgFormatted = formatSigAlg(passportData.signatureAlgorithm, passportData.pubKey.exponent)
const pubkeyChunked = bigIntToChunkedBytes(BigInt(passportData.pubKey.modulus as string), 192, 11);
const leaf = poseidon12([SignatureAlgorithm[sigAlgFormatted], ...pubkeyChunked])
// console.log('leaf', leaf)
const index = tree.indexOf(leaf)
// console.log('index', index)
const proof = tree.createProof(index)
// console.log("proof", proof)
// console.log("verifyProof", tree.verifyProof(proof))
inputs = {
mrz: formattedMrz.map(byte => String(byte)),
reveal_bitmap: reveal_bitmap.map(byte => String(byte)),
dataHashes: concatenatedDataHashes.map(toUnsignedByte).map(byte => String(byte)),
eContentBytes: passportData.eContent.map(toUnsignedByte).map(byte => String(byte)),
pubkey: splitToWords(
BigInt(passportData.pubKey.modulus),
BigInt(64),
BigInt(32)
),
signature: splitToWords(
BigInt(bytesToBigDecimal(passportData.encryptedDigest)),
BigInt(64),
BigInt(32)
),
address: "0x70997970c51812dc3a010c7d01b50e0d17dc79c8", // sample address
signatureAlgorithm: SignatureAlgorithm[sigAlgFormatted],
pubkey: splitToWords(
BigInt(passportData.pubKey.modulus as string),
BigInt(64),
BigInt(32)
),
pathIndices: proof.pathIndices,
siblings: proof.siblings.flat(),
root: tree.root,
reveal_bitmap: reveal_bitmap.map(byte => String(byte)),
address: "0x70997970c51812dc3a010c7d01b50e0d17dc79c8",
}
console.log('inputs', inputs)
})
describe('Proof', function() {
it('should prove and verify with valid inputs', async function () {
const { proof, publicSignals } = await groth16.fullProve(
// testing with wasm_tester
const circuit = await wasm_tester(
path.join(__dirname, "../circuits/proof_of_passport.circom"),
{ include: ["node_modules"] },
);
const w = await circuit.calculateWitness(inputs);
console.log("witness calculated");
await circuit.checkConstraints(w);
console.log("finished checking constraints");
// proving
const { proof: zk_proof, publicSignals } = await groth16.fullProve(
inputs,
"build/proof_of_passport_js/proof_of_passport.wasm",
"build/proof_of_passport_final.zkey"
)
const vKey = JSON.parse(fs.readFileSync("build/verification_key.json"));
// console.log('proof done');
console.log('zk_proof', zk_proof);
console.log('publicSignals', publicSignals);
const vKey = JSON.parse(fs.readFileSync("build/proof_of_passport_vkey.json") as unknown as string);
const verified = await groth16.verify(
vKey,
publicSignals,
proof
zk_proof
)
assert(verified == true, 'Should verifiable')
assert(verified == true, 'Should verify')
console.log('verified', verified)
})
it('should fail to prove with invalid mrz', async function () {
@@ -122,7 +170,7 @@ describe('Circuit tests', function () {
publicSignals[publicSignals.length - 1] = BigInt("0xC5B4F2A7Ea7F675Fca6EF724d6E06FFB40dFC93F").toString();
const vKey = JSON.parse(fs.readFileSync("build/verification_key.json"));
const vKey = JSON.parse(fs.readFileSync("build/proof_of_passport_vkey.json").toString());
return expect(await groth16.verify(
vKey,
publicSignals,
@@ -167,7 +215,7 @@ describe('Circuit tests', function () {
console.log('proof done');
const vKey = JSON.parse(fs.readFileSync("build/verification_key.json"));
const vKey = JSON.parse(fs.readFileSync("build/proof_of_passport_vkey.json").toString());
const verified = await groth16.verify(
vKey,
publicSignals,
@@ -207,9 +255,5 @@ describe('Circuit tests', function () {
}
});
});
})
})
})

165
circuits/test/tree.test.ts Normal file
View File

@@ -0,0 +1,165 @@
import { describe } from 'mocha'
import chai, { assert } from 'chai'
import chaiAsPromised from 'chai-as-promised'
import { groth16 } from 'snarkjs'
import fs from 'fs'
import { IMT } from "@zk-kit/imt"
import { poseidon12, poseidon2, poseidon8 } from "poseidon-lite"
import { genSampleData } from '../../common/src/utils/passportData'
import { bigIntToChunkedBytes, formatSigAlg } from '../../common/src/utils/utils'
chai.use(chaiAsPromised)
const DEV = true
const depth = 16
const zeroValue = 0
enum SignatureAlgorithm {
sha256WithRSAEncryption_65537 = 1,
sha256WithRSAEncryption_3 = 2,
sha1WithRSAEncryption_65537 = 3,
rsassaPss_65537 = 4,
rsassaPss_3 = 5,
ecdsa_with_SHA384 = 6,
ecdsa_with_SHA1 = 7,
ecdsa_with_SHA256 = 8,
ecdsa_with_SHA512 = 9,
sha512WithRSAEncryption_65537 = 10
}
describe('Merkle tree tests', function () {
this.timeout(0)
let tree: IMT;
let pubkeys = JSON.parse(fs.readFileSync("../common/pubkeys/publicKeysParsed.json") as unknown as string)
const passportData = genSampleData();
this.beforeAll(async () => {
// log niche exponents
// for(let i = 0; i < pubkeys.length; i++) {
// if (pubkeys[i].exponent && pubkeys[i].exponent !== '65537') {
// console.log('i:', i, pubkeys[i].signatureAlgorithm, pubkeys[i].exponent);
// }
// }
// log ecdsa pubkeys
// for(let i = 0; i < pubkeys.length; i++) {
// if (!pubkeys[i].exponent) {
// console.log('i:', i, pubkeys[i]);
// }
// }
if (DEV) {
pubkeys = pubkeys.slice(0, 100);
pubkeys.push({
signatureAlgorithm: passportData.signatureAlgorithm,
issuer: 'C = TS, O = Government of Syldavia, OU = Ministry of tests, CN = CSCA-TEST',
modulus: passportData.pubKey.modulus,
exponent: passportData.pubKey.exponent
})
}
tree = new IMT(poseidon2, depth, zeroValue)
for(let i = 0; i < pubkeys.length; i++) {
const pubkey = pubkeys[i]
const sigAlgFormatted = formatSigAlg(pubkey.signatureAlgorithm, pubkey.exponent)
let leaf: bigint | undefined;
if (i % 3000 === 0 && i !== 0) {
console.log('Processing pubkey number', i, "over", pubkeys.length);
}
if (
sigAlgFormatted === "sha256WithRSAEncryption_65537"
|| sigAlgFormatted === "sha256WithRSAEncryption_3"
|| sigAlgFormatted === "sha1WithRSAEncryption_65537"
|| sigAlgFormatted === "rsassaPss_65537"
|| sigAlgFormatted === "rsassaPss_3"
|| sigAlgFormatted === "sha512WithRSAEncryption_65537"
) {
// Converting pubkey.modulus into 11 chunks of 192 bits, assuming it is originally 2048 bits.
// This is because Poseidon circuit only supports an array of 16 elements, and field size is 254.
const pubkeyChunked = bigIntToChunkedBytes(BigInt(pubkey.modulus), 192, 11);
// console.log('pubkeyChunked', pubkeyChunked.length, pubkeyChunked)
try {
// leaf is poseidon(signatureAlgorithm, ...pubkey)
leaf = poseidon12([SignatureAlgorithm[sigAlgFormatted], ...pubkeyChunked])
} catch(err) {
console.log('err', err, i, sigAlgFormatted, pubkey)
}
} else if (
sigAlgFormatted === "ecdsa_with_SHA1"
|| sigAlgFormatted === "ecdsa_with_SHA384"
|| sigAlgFormatted === "ecdsa_with_SHA256"
|| sigAlgFormatted === "ecdsa_with_SHA512"
) {
try {
leaf = poseidon8([SignatureAlgorithm[sigAlgFormatted], pubkey.pub, pubkey.prime, pubkey.a, pubkey.b, pubkey.generator, pubkey.order, pubkey.cofactor])
} catch(err) {
console.log('err', err, i, sigAlgFormatted, pubkey)
}
} else {
console.log('no leaf for this weird signature:', i, sigAlgFormatted)
continue
}
tree.insert(leaf)
}
})
describe('Tree only', function() {
it('should prove and verify with valid inputs', async function () {
const sigAlgFormatted = formatSigAlg(passportData.signatureAlgorithm, passportData.pubKey.exponent)
const pubkeyChunked = bigIntToChunkedBytes(BigInt(passportData.pubKey.modulus as string), 192, 11);
const leaf = poseidon12([SignatureAlgorithm[sigAlgFormatted], ...pubkeyChunked])
console.log('leaf', leaf)
const index = tree.indexOf(leaf)
console.log('index', index)
const proof = tree.createProof(index)
console.log("proof", proof)
console.log("verifyProof", tree.verifyProof(proof))
const inputs = {
leaf: proof.leaf,
pathIndices: proof.pathIndices,
siblings: proof.siblings.flat(),
}
console.log('inputs', inputs)
const { proof: zk_proof, publicSignals } = await groth16.fullProve(
inputs,
"build/only_tree_js/only_tree.wasm",
"build/only_tree_final.zkey"
)
// console.log('proof done');
console.log('zk_proof', zk_proof);
console.log('publicSignals', publicSignals);
const vKey = JSON.parse(fs.readFileSync("build/only_tree_verification_key.json") as unknown as string);
const verified = await groth16.verify(
vKey,
publicSignals,
zk_proof
)
assert(verified == true, 'Should verifiable')
assert(publicSignals[0] == tree.root, 'Should be 125')
console.log('verified', verified)
console.log('publicSignals[0]', publicSignals[0])
console.log('tree.root', tree.root)
})
})
})

View File

@@ -2,7 +2,7 @@
"compilerOptions": {
"resolveJsonModule": true,
"esModuleInterop": true,
"target": "ES2015",
"target": "ES2020",
"moduleResolution": "node"
}
}

File diff suppressed because it is too large Load Diff