mirror of
https://github.com/tlsnotary/tlsn.git
synced 2026-01-12 08:08:29 -05:00
Compare commits
4 Commits
dev
...
feat/integ
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1ee91e63b | ||
|
|
57f85dcdf2 | ||
|
|
727dff3ac9 | ||
|
|
830d0ab0d5 |
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -21,7 +21,7 @@ env:
|
||||
# - https://github.com/privacy-ethereum/mpz/issues/178
|
||||
# 32 seems to be big enough for the foreseeable future
|
||||
RAYON_NUM_THREADS: 32
|
||||
RUST_VERSION: 1.92.0
|
||||
RUST_VERSION: 1.91.1
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
jobs:
|
||||
|
||||
244
Cargo.lock
generated
244
Cargo.lock
generated
@@ -174,9 +174,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
||||
|
||||
[[package]]
|
||||
name = "alloy-consensus"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b6440213a22df93a87ed512d2f668e7dc1d62a05642d107f82d61edc9e12370"
|
||||
checksum = "2e318e25fb719e747a7e8db1654170fc185024f3ed5b10f86c08d448a912f6e2"
|
||||
dependencies = [
|
||||
"alloy-eips",
|
||||
"alloy-primitives",
|
||||
@@ -201,9 +201,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-consensus-any"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15d0bea09287942405c4f9d2a4f22d1e07611c2dbd9d5bf94b75366340f9e6e0"
|
||||
checksum = "364380a845193a317bcb7a5398fc86cdb66c47ebe010771dde05f6869bf9e64a"
|
||||
dependencies = [
|
||||
"alloy-consensus",
|
||||
"alloy-eips",
|
||||
@@ -253,9 +253,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-eips"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4bd2c7ae05abcab4483ce821f12f285e01c0b33804e6883dd9ca1569a87ee2be"
|
||||
checksum = "a4c4d7c5839d9f3a467900c625416b24328450c65702eb3d8caff8813e4d1d33"
|
||||
dependencies = [
|
||||
"alloy-eip2124",
|
||||
"alloy-eip2930",
|
||||
@@ -288,9 +288,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-json-rpc"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "003f46c54f22854a32b9cc7972660a476968008ad505427eabab49225309ec40"
|
||||
checksum = "f72cf87cda808e593381fb9f005ffa4d2475552b7a6c5ac33d087bf77d82abd0"
|
||||
dependencies = [
|
||||
"alloy-primitives",
|
||||
"alloy-sol-types",
|
||||
@@ -303,9 +303,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-network"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4f4029954d9406a40979f3a3b46950928a0fdcfe3ea8a9b0c17490d57e8aa0e3"
|
||||
checksum = "12aeb37b6f2e61b93b1c3d34d01ee720207c76fe447e2a2c217e433ac75b17f5"
|
||||
dependencies = [
|
||||
"alloy-consensus",
|
||||
"alloy-consensus-any",
|
||||
@@ -329,9 +329,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-network-primitives"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7805124ad69e57bbae7731c9c344571700b2a18d351bda9e0eba521c991d1bcb"
|
||||
checksum = "abd29ace62872083e30929cd9b282d82723196d196db589f3ceda67edcc05552"
|
||||
dependencies = [
|
||||
"alloy-consensus",
|
||||
"alloy-eips",
|
||||
@@ -391,9 +391,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-rpc-types-any"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b43c1622aac2508d528743fd4cfdac1dea92d5a8fa894038488ff7edd0af0b32"
|
||||
checksum = "6a63fb40ed24e4c92505f488f9dd256e2afaed17faa1b7a221086ebba74f4122"
|
||||
dependencies = [
|
||||
"alloy-consensus-any",
|
||||
"alloy-rpc-types-eth",
|
||||
@@ -402,9 +402,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-rpc-types-eth"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed5fafb741c19b3cca4cdd04fa215c89413491f9695a3e928dee2ae5657f607e"
|
||||
checksum = "9eae0c7c40da20684548cbc8577b6b7447f7bf4ddbac363df95e3da220e41e72"
|
||||
dependencies = [
|
||||
"alloy-consensus",
|
||||
"alloy-consensus-any",
|
||||
@@ -423,9 +423,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-serde"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6f180c399ca7c1e2fe17ea58343910cad0090878a696ff5a50241aee12fc529"
|
||||
checksum = "c0df1987ed0ff2d0159d76b52e7ddfc4e4fbddacc54d2fbee765e0d14d7c01b5"
|
||||
dependencies = [
|
||||
"alloy-primitives",
|
||||
"serde",
|
||||
@@ -434,9 +434,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-signer"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ecc39ad2c0a3d2da8891f4081565780703a593f090f768f884049aa3aa929cbc"
|
||||
checksum = "6ff69deedee7232d7ce5330259025b868c5e6a52fa8dffda2c861fb3a5889b24"
|
||||
dependencies = [
|
||||
"alloy-primitives",
|
||||
"async-trait",
|
||||
@@ -449,9 +449,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-signer-local"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "930e17cb1e46446a193a593a3bfff8d0ecee4e510b802575ebe300ae2e43ef75"
|
||||
checksum = "72cfe0be3ec5a8c1a46b2e5a7047ed41121d360d97f4405bb7c1c784880c86cb"
|
||||
dependencies = [
|
||||
"alloy-consensus",
|
||||
"alloy-network",
|
||||
@@ -551,9 +551,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "alloy-tx-macros"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae109e33814b49fc0a62f2528993aa8a2dd346c26959b151f05441dc0b9da292"
|
||||
checksum = "333544408503f42d7d3792bfc0f7218b643d968a03d2c0ed383ae558fb4a76d0"
|
||||
dependencies = [
|
||||
"darling 0.21.3",
|
||||
"proc-macro2",
|
||||
@@ -1783,9 +1783,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.48"
|
||||
version = "1.2.49"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a"
|
||||
checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215"
|
||||
dependencies = [
|
||||
"find-msvc-tools",
|
||||
"shlex",
|
||||
@@ -2026,7 +2026,7 @@ checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
|
||||
[[package]]
|
||||
name = "clmul"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"cfg-if",
|
||||
@@ -2051,7 +2051,7 @@ checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"termcolor",
|
||||
"unicode-width 0.1.14",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2096,19 +2096,6 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.15.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"unicode-width 0.2.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console_error_panic_hook"
|
||||
version = "0.1.7"
|
||||
@@ -2913,12 +2900,6 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encode_unicode"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
|
||||
|
||||
[[package]]
|
||||
name = "enum-ordinalize"
|
||||
version = "4.3.2"
|
||||
@@ -3766,9 +3747,9 @@ checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties"
|
||||
version = "2.1.1"
|
||||
version = "2.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99"
|
||||
checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec"
|
||||
dependencies = [
|
||||
"icu_collections",
|
||||
"icu_locale_core",
|
||||
@@ -3780,9 +3761,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties_data"
|
||||
version = "2.1.1"
|
||||
version = "2.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899"
|
||||
checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af"
|
||||
|
||||
[[package]]
|
||||
name = "icu_provider"
|
||||
@@ -3896,19 +3877,6 @@ dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.17.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
|
||||
dependencies = [
|
||||
"console",
|
||||
"number_prefix",
|
||||
"portable-atomic",
|
||||
"unicode-width 0.2.2",
|
||||
"web-time 1.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inout"
|
||||
version = "0.1.4"
|
||||
@@ -4257,7 +4225,7 @@ checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
|
||||
[[package]]
|
||||
name = "matrix-transpose"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
@@ -4302,9 +4270,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "1.1.0"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873"
|
||||
checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"wasi",
|
||||
@@ -4314,7 +4282,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-circuits"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"mpz-circuits-core",
|
||||
"mpz-circuits-data",
|
||||
@@ -4323,7 +4291,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-circuits-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"bincode 1.3.3",
|
||||
"itybity 0.3.1",
|
||||
@@ -4338,7 +4306,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-circuits-data"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"bincode 1.3.3",
|
||||
"mpz-circuits-core",
|
||||
@@ -4348,7 +4316,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-cointoss"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"mpz-cointoss-core",
|
||||
@@ -4361,7 +4329,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-cointoss-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"mpz-core",
|
||||
"opaque-debug",
|
||||
@@ -4372,7 +4340,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-common"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bytes",
|
||||
@@ -4392,7 +4360,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"aes 0.9.0-rc.2",
|
||||
"bcs",
|
||||
@@ -4418,7 +4386,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-fields"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"ark-ff 0.4.2",
|
||||
"ark-secp256r1",
|
||||
@@ -4438,7 +4406,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-garble"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"derive_builder 0.11.2",
|
||||
@@ -4453,7 +4421,7 @@ dependencies = [
|
||||
"mpz-vm-core",
|
||||
"opaque-debug",
|
||||
"rand 0.9.2",
|
||||
"rangeset 0.2.0",
|
||||
"rangeset",
|
||||
"serde",
|
||||
"serio",
|
||||
"thiserror 1.0.69",
|
||||
@@ -4464,7 +4432,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-garble-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"aes 0.9.0-rc.2",
|
||||
"bitvec",
|
||||
@@ -4484,7 +4452,7 @@ dependencies = [
|
||||
"rand 0.9.2",
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
"rangeset 0.2.0",
|
||||
"rangeset",
|
||||
"rayon",
|
||||
"serde",
|
||||
"serde_arrays",
|
||||
@@ -4495,7 +4463,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-hash"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"blake3",
|
||||
"itybity 0.3.1",
|
||||
@@ -4508,7 +4476,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-ideal-vm"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"futures",
|
||||
@@ -4516,7 +4484,7 @@ dependencies = [
|
||||
"mpz-core",
|
||||
"mpz-memory-core",
|
||||
"mpz-vm-core",
|
||||
"rangeset 0.2.0",
|
||||
"rangeset",
|
||||
"serde",
|
||||
"serio",
|
||||
"thiserror 1.0.69",
|
||||
@@ -4525,14 +4493,14 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-memory-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"blake3",
|
||||
"futures",
|
||||
"itybity 0.3.1",
|
||||
"mpz-core",
|
||||
"rand 0.9.2",
|
||||
"rangeset 0.2.0",
|
||||
"rangeset",
|
||||
"serde",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
@@ -4540,7 +4508,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-ole"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"futures",
|
||||
@@ -4558,7 +4526,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-ole-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"hybrid-array",
|
||||
"itybity 0.3.1",
|
||||
@@ -4574,7 +4542,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-ot"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"cfg-if",
|
||||
@@ -4597,7 +4565,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-ot-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"aes 0.9.0-rc.2",
|
||||
"blake3",
|
||||
@@ -4625,10 +4593,26 @@ dependencies = [
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mpz-predicate"
|
||||
version = "0.1.0-alpha.14-pre"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"mpz-circuits",
|
||||
"rand 0.9.2",
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
"rangeset",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mpz-share-conversion"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"mpz-common",
|
||||
@@ -4644,7 +4628,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-share-conversion-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"mpz-common",
|
||||
"mpz-core",
|
||||
@@ -4658,7 +4642,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-vm-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"futures",
|
||||
@@ -4671,7 +4655,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-zk"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"blake3",
|
||||
@@ -4689,7 +4673,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpz-zk-core"
|
||||
version = "0.1.0-alpha.4"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?rev=9c343f8#9c343f86d386bc1360d6ac0a37eb1af65f48216a"
|
||||
source = "git+https://github.com/privacy-ethereum/mpz?branch=feat%2Fmpz-bool-type#9405caab5d65be1c7e7dd39c6026a9efb57a598d"
|
||||
dependencies = [
|
||||
"blake3",
|
||||
"cfg-if",
|
||||
@@ -4698,7 +4682,7 @@ dependencies = [
|
||||
"mpz-core",
|
||||
"mpz-memory-core",
|
||||
"mpz-vm-core",
|
||||
"rangeset 0.2.0",
|
||||
"rangeset",
|
||||
"rayon",
|
||||
"serde",
|
||||
"thiserror 1.0.69",
|
||||
@@ -5043,12 +5027,6 @@ dependencies = [
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "number_prefix"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
||||
|
||||
[[package]]
|
||||
name = "nybbles"
|
||||
version = "0.4.6"
|
||||
@@ -5409,12 +5387,6 @@ dependencies = [
|
||||
"universal-hash 0.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f59e70c4aef1e55797c2e8fd94a4f2a973fc972cfde0e0b05f683667b0cd39dd"
|
||||
|
||||
[[package]]
|
||||
name = "potential_utf"
|
||||
version = "0.1.4"
|
||||
@@ -5486,7 +5458,7 @@ version = "3.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
|
||||
dependencies = [
|
||||
"toml_edit 0.23.7",
|
||||
"toml_edit 0.23.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5803,15 +5775,6 @@ version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "acbbbbea733ec66275512d0b9694f34102e7d5406fdbe2ad8d21b28dce92887c"
|
||||
|
||||
[[package]]
|
||||
name = "rangeset"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fc7af00a06ad692080d87495a904677592c662610edb82b4fc8782f4ed2f01f"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rangeset"
|
||||
version = "0.4.0"
|
||||
@@ -5901,9 +5864,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.24"
|
||||
version = "0.12.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
|
||||
checksum = "b6eff9328d40131d43bd911d42d79eb6a47312002a4daefc9e37f17e74a7701a"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
@@ -5930,7 +5893,7 @@ dependencies = [
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
"tower",
|
||||
"tower-http 0.6.7",
|
||||
"tower-http 0.6.8",
|
||||
"tower-service",
|
||||
"url",
|
||||
"wasm-bindgen",
|
||||
@@ -6788,9 +6751,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "simd-adler32"
|
||||
version = "0.3.7"
|
||||
version = "0.3.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
|
||||
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
|
||||
|
||||
[[package]]
|
||||
name = "sized-chunks"
|
||||
@@ -6854,9 +6817,9 @@ checksum = "bceb57dc07c92cdae60f5b27b3fa92ecaaa42fe36c55e22dbfb0b44893e0b1f7"
|
||||
|
||||
[[package]]
|
||||
name = "sourcemap"
|
||||
version = "9.2.2"
|
||||
version = "9.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e22afbcb92ce02d23815b9795523c005cb9d3c214f8b7a66318541c240ea7935"
|
||||
checksum = "37ccaaa78a0ca68b20f8f711eaa2522a00131c48a3de5b892ca5c36cec1ce9bb"
|
||||
dependencies = [
|
||||
"base64-simd",
|
||||
"bitvec",
|
||||
@@ -6879,7 +6842,7 @@ dependencies = [
|
||||
"httparse",
|
||||
"pest",
|
||||
"pest_derive",
|
||||
"rangeset 0.4.0",
|
||||
"rangeset",
|
||||
"serde",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
@@ -7032,9 +6995,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sys_traits"
|
||||
version = "0.1.19"
|
||||
version = "0.1.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e1495a604cd38eeb30c408724966cd31ca1b68b5a97e3afc474c0d719bfeec5a"
|
||||
checksum = "6b61f4a25d0baba25511bed00c39c199d9a19cfd8107f4472724b72a84f530b1"
|
||||
dependencies = [
|
||||
"sys_traits_macros",
|
||||
]
|
||||
@@ -7268,12 +7231,13 @@ dependencies = [
|
||||
"mpz-memory-core",
|
||||
"mpz-ole",
|
||||
"mpz-ot",
|
||||
"mpz-predicate",
|
||||
"mpz-vm-core",
|
||||
"mpz-zk",
|
||||
"once_cell",
|
||||
"opaque-debug",
|
||||
"rand 0.9.2",
|
||||
"rangeset 0.4.0",
|
||||
"rangeset",
|
||||
"rstest",
|
||||
"rustls-pki-types",
|
||||
"rustls-webpki 0.103.8",
|
||||
@@ -7314,7 +7278,6 @@ dependencies = [
|
||||
"p256",
|
||||
"rand 0.9.2",
|
||||
"rand06-compat",
|
||||
"rangeset 0.4.0",
|
||||
"rstest",
|
||||
"serde",
|
||||
"thiserror 1.0.69",
|
||||
@@ -7353,11 +7316,12 @@ dependencies = [
|
||||
"generic-array",
|
||||
"hex",
|
||||
"itybity 0.2.1",
|
||||
"mpz-predicate",
|
||||
"opaque-debug",
|
||||
"rand 0.9.2",
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
"rangeset 0.4.0",
|
||||
"rangeset",
|
||||
"rs_merkle",
|
||||
"rstest",
|
||||
"rustls-pki-types",
|
||||
@@ -7392,7 +7356,7 @@ dependencies = [
|
||||
"mpz-core",
|
||||
"mpz-ideal-vm",
|
||||
"mpz-vm-core",
|
||||
"rangeset 0.4.0",
|
||||
"rangeset",
|
||||
"serde",
|
||||
"serio",
|
||||
"thiserror 1.0.69",
|
||||
@@ -7413,12 +7377,15 @@ dependencies = [
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"k256",
|
||||
"mpz-predicate",
|
||||
"noir",
|
||||
"rangeset",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"spansy",
|
||||
"tls-server-fixture",
|
||||
"tlsn",
|
||||
"tlsn-core",
|
||||
"tlsn-formats",
|
||||
"tlsn-server-fixture",
|
||||
"tlsn-server-fixture-certs",
|
||||
@@ -7504,7 +7471,6 @@ dependencies = [
|
||||
"csv",
|
||||
"duct",
|
||||
"futures",
|
||||
"indicatif",
|
||||
"ipnet",
|
||||
"serde_json",
|
||||
"serio",
|
||||
@@ -7636,7 +7602,7 @@ dependencies = [
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-http 0.6.7",
|
||||
"tower-http 0.6.8",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
@@ -7897,9 +7863,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.23.7"
|
||||
version = "0.23.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d"
|
||||
checksum = "5d7cbc3b4b49633d57a0509303158ca50de80ae32c265093b24c414705807832"
|
||||
dependencies = [
|
||||
"indexmap 2.12.1",
|
||||
"toml_datetime 0.7.3",
|
||||
@@ -7965,9 +7931,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.6.7"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9cf146f99d442e8e68e585f5d798ccd3cad9a7835b917e09728880a862706456"
|
||||
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"bytes",
|
||||
@@ -8206,12 +8172,6 @@ version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.6"
|
||||
|
||||
30
Cargo.toml
30
Cargo.toml
@@ -66,21 +66,21 @@ tlsn-harness-runner = { path = "crates/harness/runner" }
|
||||
tlsn-wasm = { path = "crates/wasm" }
|
||||
tlsn = { path = "crates/tlsn" }
|
||||
|
||||
mpz-circuits = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-circuits-data = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-memory-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-common = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-vm-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-garble = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-garble-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-ole = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-ot = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-share-conversion = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-fields = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-zk = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-hash = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-ideal-vm = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
|
||||
mpz-circuits = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-common = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-core = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-fields = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-garble = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-garble-core = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-hash = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-ideal-vm = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-memory-core = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-ole = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-ot = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-predicate = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-share-conversion = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-vm-core = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
mpz-zk = { git = "https://github.com/privacy-ethereum/mpz", branch = "feat/mpz-bool-type" }
|
||||
|
||||
rangeset = { version = "0.4" }
|
||||
serio = { version = "0.2" }
|
||||
|
||||
@@ -27,7 +27,6 @@ alloy-primitives = { version = "1.3.1", default-features = false }
|
||||
alloy-signer = { version = "1.0", default-features = false }
|
||||
alloy-signer-local = { version = "1.0", default-features = false }
|
||||
rand06-compat = { workspace = true }
|
||||
rangeset = { workspace = true }
|
||||
rstest = { workspace = true }
|
||||
tlsn-core = { workspace = true, features = ["fixtures"] }
|
||||
tlsn-data-fixtures = { workspace = true }
|
||||
|
||||
@@ -5,7 +5,7 @@ use rand::{Rng, rng};
|
||||
use tlsn_core::{
|
||||
connection::{ConnectionInfo, ServerEphemKey},
|
||||
hash::HashAlgId,
|
||||
transcript::TranscriptCommitment,
|
||||
transcript::{TranscriptCommitment, encoding::EncoderSecret},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@@ -25,6 +25,7 @@ pub struct Sign {
|
||||
connection_info: Option<ConnectionInfo>,
|
||||
server_ephemeral_key: Option<ServerEphemKey>,
|
||||
cert_commitment: ServerCertCommitment,
|
||||
encoder_secret: Option<EncoderSecret>,
|
||||
extensions: Vec<Extension>,
|
||||
transcript_commitments: Vec<TranscriptCommitment>,
|
||||
}
|
||||
@@ -86,6 +87,7 @@ impl<'a> AttestationBuilder<'a, Accept> {
|
||||
connection_info: None,
|
||||
server_ephemeral_key: None,
|
||||
cert_commitment,
|
||||
encoder_secret: None,
|
||||
transcript_commitments: Vec::new(),
|
||||
extensions,
|
||||
},
|
||||
@@ -106,6 +108,12 @@ impl AttestationBuilder<'_, Sign> {
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the secret for encoding commitments.
|
||||
pub fn encoder_secret(&mut self, secret: EncoderSecret) -> &mut Self {
|
||||
self.state.encoder_secret = Some(secret);
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds an extension to the attestation.
|
||||
pub fn extension(&mut self, extension: Extension) -> &mut Self {
|
||||
self.state.extensions.push(extension);
|
||||
@@ -129,6 +137,7 @@ impl AttestationBuilder<'_, Sign> {
|
||||
connection_info,
|
||||
server_ephemeral_key,
|
||||
cert_commitment,
|
||||
encoder_secret,
|
||||
extensions,
|
||||
transcript_commitments,
|
||||
} = self.state;
|
||||
@@ -159,6 +168,7 @@ impl AttestationBuilder<'_, Sign> {
|
||||
AttestationBuilderError::new(ErrorKind::Field, "handshake data was not set")
|
||||
})?),
|
||||
cert_commitment: field_id.next(cert_commitment),
|
||||
encoder_secret: encoder_secret.map(|secret| field_id.next(secret)),
|
||||
extensions: extensions
|
||||
.into_iter()
|
||||
.map(|extension| field_id.next(extension))
|
||||
@@ -243,7 +253,8 @@ mod test {
|
||||
use rstest::{fixture, rstest};
|
||||
use tlsn_core::{
|
||||
connection::{CertBinding, CertBindingV1_2},
|
||||
fixtures::ConnectionFixture,
|
||||
fixtures::{ConnectionFixture, encoding_provider},
|
||||
hash::Blake3,
|
||||
transcript::Transcript,
|
||||
};
|
||||
use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
|
||||
@@ -274,7 +285,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { request, .. } = request_fixture(transcript, connection, Vec::new());
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection,
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let attestation_config = AttestationConfig::builder()
|
||||
.supported_signature_algs([SignatureAlgId::SECP256R1])
|
||||
@@ -293,7 +310,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { request, .. } = request_fixture(transcript, connection, Vec::new());
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection,
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let attestation_config = AttestationConfig::builder()
|
||||
.supported_signature_algs([SignatureAlgId::SECP256K1])
|
||||
@@ -313,7 +336,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { request, .. } = request_fixture(transcript, connection, Vec::new());
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection,
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let attestation_builder = Attestation::builder(attestation_config)
|
||||
.accept_request(request)
|
||||
@@ -334,8 +363,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { request, .. } =
|
||||
request_fixture(transcript, connection.clone(), Vec::new());
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let mut attestation_builder = Attestation::builder(attestation_config)
|
||||
.accept_request(request)
|
||||
@@ -359,8 +393,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { request, .. } =
|
||||
request_fixture(transcript, connection.clone(), Vec::new());
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let mut attestation_builder = Attestation::builder(attestation_config)
|
||||
.accept_request(request)
|
||||
@@ -393,7 +432,9 @@ mod test {
|
||||
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
vec![Extension {
|
||||
id: b"foo".to_vec(),
|
||||
value: b"bar".to_vec(),
|
||||
@@ -420,7 +461,9 @@ mod test {
|
||||
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
vec![Extension {
|
||||
id: b"foo".to_vec(),
|
||||
value: b"bar".to_vec(),
|
||||
|
||||
@@ -2,7 +2,11 @@
|
||||
use tlsn_core::{
|
||||
connection::{CertBinding, CertBindingV1_2},
|
||||
fixtures::ConnectionFixture,
|
||||
transcript::{Transcript, TranscriptCommitConfigBuilder, TranscriptCommitment},
|
||||
hash::HashAlgorithm,
|
||||
transcript::{
|
||||
Transcript, TranscriptCommitConfigBuilder, TranscriptCommitment,
|
||||
encoding::{EncodingProvider, EncodingTree},
|
||||
},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@@ -17,13 +21,16 @@ use crate::{
|
||||
/// A Request fixture used for testing.
|
||||
#[allow(missing_docs)]
|
||||
pub struct RequestFixture {
|
||||
pub encoding_tree: EncodingTree,
|
||||
pub request: Request,
|
||||
}
|
||||
|
||||
/// Returns a request fixture for testing.
|
||||
pub fn request_fixture(
|
||||
transcript: Transcript,
|
||||
encodings_provider: impl EncodingProvider,
|
||||
connection: ConnectionFixture,
|
||||
encoding_hasher: impl HashAlgorithm,
|
||||
extensions: Vec<Extension>,
|
||||
) -> RequestFixture {
|
||||
let provider = CryptoProvider::default();
|
||||
@@ -43,9 +50,15 @@ pub fn request_fixture(
|
||||
.unwrap();
|
||||
let transcripts_commitment_config = transcript_commitment_builder.build().unwrap();
|
||||
|
||||
let mut builder = RequestConfig::builder();
|
||||
// Prover constructs encoding tree.
|
||||
let encoding_tree = EncodingTree::new(
|
||||
&encoding_hasher,
|
||||
transcripts_commitment_config.iter_encoding(),
|
||||
&encodings_provider,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
builder.transcript_commit(transcripts_commitment_config);
|
||||
let mut builder = RequestConfig::builder();
|
||||
|
||||
for extension in extensions {
|
||||
builder.extension(extension);
|
||||
@@ -61,7 +74,10 @@ pub fn request_fixture(
|
||||
|
||||
let (request, _) = request_builder.build(&provider).unwrap();
|
||||
|
||||
RequestFixture { request }
|
||||
RequestFixture {
|
||||
encoding_tree,
|
||||
request,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an attestation fixture for testing.
|
||||
|
||||
@@ -79,6 +79,8 @@
|
||||
//!
|
||||
//! // Specify all the transcript commitments we want to make.
|
||||
//! builder
|
||||
//! // Use BLAKE3 for encoding commitments.
|
||||
//! .encoding_hash_alg(HashAlgId::BLAKE3)
|
||||
//! // Commit to all sent data.
|
||||
//! .commit_sent(&(0..sent_len))?
|
||||
//! // Commit to the first 10 bytes of sent data.
|
||||
@@ -127,7 +129,7 @@
|
||||
//!
|
||||
//! ```no_run
|
||||
//! # use tlsn_attestation::{Attestation, CryptoProvider, Secrets, presentation::Presentation};
|
||||
//! # use tlsn_core::transcript::Direction;
|
||||
//! # use tlsn_core::transcript::{TranscriptCommitmentKind, Direction};
|
||||
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
//! # let attestation: Attestation = unimplemented!();
|
||||
//! # let secrets: Secrets = unimplemented!();
|
||||
@@ -138,6 +140,8 @@
|
||||
//! let mut builder = secrets.transcript_proof_builder();
|
||||
//!
|
||||
//! builder
|
||||
//! // Use transcript encoding commitments.
|
||||
//! .commitment_kinds(&[TranscriptCommitmentKind::Encoding])
|
||||
//! // Disclose the first 10 bytes of the sent data.
|
||||
//! .reveal(&(0..10), Direction::Sent)?
|
||||
//! // Disclose all of the received data.
|
||||
@@ -215,7 +219,7 @@ use tlsn_core::{
|
||||
connection::{ConnectionInfo, ServerEphemKey},
|
||||
hash::{Hash, HashAlgorithm, TypedHash},
|
||||
merkle::MerkleTree,
|
||||
transcript::TranscriptCommitment,
|
||||
transcript::{TranscriptCommitment, encoding::EncoderSecret},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@@ -297,6 +301,8 @@ pub enum FieldKind {
|
||||
ServerEphemKey = 0x02,
|
||||
/// Server identity commitment.
|
||||
ServerIdentityCommitment = 0x03,
|
||||
/// Encoding commitment.
|
||||
EncodingCommitment = 0x04,
|
||||
/// Plaintext hash commitment.
|
||||
PlaintextHash = 0x05,
|
||||
}
|
||||
@@ -321,6 +327,7 @@ pub struct Body {
|
||||
connection_info: Field<ConnectionInfo>,
|
||||
server_ephemeral_key: Field<ServerEphemKey>,
|
||||
cert_commitment: Field<ServerCertCommitment>,
|
||||
encoder_secret: Option<Field<EncoderSecret>>,
|
||||
extensions: Vec<Field<Extension>>,
|
||||
transcript_commitments: Vec<Field<TranscriptCommitment>>,
|
||||
}
|
||||
@@ -366,6 +373,7 @@ impl Body {
|
||||
connection_info: conn_info,
|
||||
server_ephemeral_key,
|
||||
cert_commitment,
|
||||
encoder_secret,
|
||||
extensions,
|
||||
transcript_commitments,
|
||||
} = self;
|
||||
@@ -383,6 +391,13 @@ impl Body {
|
||||
),
|
||||
];
|
||||
|
||||
if let Some(encoder_secret) = encoder_secret {
|
||||
fields.push((
|
||||
encoder_secret.id,
|
||||
hasher.hash_separated(&encoder_secret.data),
|
||||
));
|
||||
}
|
||||
|
||||
for field in extensions.iter() {
|
||||
fields.push((field.id, hasher.hash_separated(&field.data)));
|
||||
}
|
||||
|
||||
@@ -91,6 +91,11 @@ impl Presentation {
|
||||
transcript.verify_with_provider(
|
||||
&provider.hash,
|
||||
&attestation.body.connection_info().transcript_length,
|
||||
attestation
|
||||
.body
|
||||
.encoder_secret
|
||||
.as_ref()
|
||||
.map(|field| &field.data),
|
||||
attestation.body.transcript_commitments(),
|
||||
)
|
||||
})
|
||||
|
||||
@@ -144,7 +144,9 @@ impl std::fmt::Display for ErrorKind {
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use tlsn_core::{
|
||||
connection::TranscriptLength, fixtures::ConnectionFixture, hash::HashAlgId,
|
||||
connection::TranscriptLength,
|
||||
fixtures::{ConnectionFixture, encoding_provider},
|
||||
hash::{Blake3, HashAlgId},
|
||||
transcript::Transcript,
|
||||
};
|
||||
use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
|
||||
@@ -162,8 +164,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { request, .. } =
|
||||
request_fixture(transcript, connection.clone(), Vec::new());
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let attestation =
|
||||
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
|
||||
@@ -178,8 +185,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { mut request, .. } =
|
||||
request_fixture(transcript, connection.clone(), Vec::new());
|
||||
let RequestFixture { mut request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let attestation =
|
||||
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
|
||||
@@ -197,8 +209,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { mut request, .. } =
|
||||
request_fixture(transcript, connection.clone(), Vec::new());
|
||||
let RequestFixture { mut request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let attestation =
|
||||
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
|
||||
@@ -216,8 +233,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { mut request, .. } =
|
||||
request_fixture(transcript, connection.clone(), Vec::new());
|
||||
let RequestFixture { mut request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let attestation =
|
||||
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
|
||||
@@ -243,8 +265,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { request, .. } =
|
||||
request_fixture(transcript, connection.clone(), Vec::new());
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let mut attestation =
|
||||
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
|
||||
@@ -262,8 +289,13 @@ mod test {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let connection = ConnectionFixture::tlsnotary(transcript.length());
|
||||
|
||||
let RequestFixture { request, .. } =
|
||||
request_fixture(transcript, connection.clone(), Vec::new());
|
||||
let RequestFixture { request, .. } = request_fixture(
|
||||
transcript,
|
||||
encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
connection.clone(),
|
||||
Blake3::default(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
let attestation =
|
||||
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
|
||||
|
||||
@@ -49,4 +49,6 @@ impl_domain_separator!(tlsn_core::connection::ConnectionInfo);
|
||||
impl_domain_separator!(tlsn_core::connection::CertBinding);
|
||||
impl_domain_separator!(tlsn_core::transcript::TranscriptCommitment);
|
||||
impl_domain_separator!(tlsn_core::transcript::TranscriptSecret);
|
||||
impl_domain_separator!(tlsn_core::transcript::encoding::EncoderSecret);
|
||||
impl_domain_separator!(tlsn_core::transcript::encoding::EncodingCommitment);
|
||||
impl_domain_separator!(tlsn_core::transcript::hash::PlaintextHash);
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use rand::{Rng, SeedableRng, rngs::StdRng};
|
||||
use rangeset::set::RangeSet;
|
||||
use tlsn_attestation::{
|
||||
Attestation, AttestationConfig, CryptoProvider,
|
||||
presentation::PresentationOutput,
|
||||
@@ -8,11 +6,12 @@ use tlsn_attestation::{
|
||||
};
|
||||
use tlsn_core::{
|
||||
connection::{CertBinding, CertBindingV1_2},
|
||||
fixtures::ConnectionFixture,
|
||||
hash::{Blake3, Blinder, HashAlgId},
|
||||
fixtures::{self, ConnectionFixture, encoder_secret},
|
||||
hash::Blake3,
|
||||
transcript::{
|
||||
Direction, Transcript, TranscriptCommitment, TranscriptSecret,
|
||||
hash::{PlaintextHash, PlaintextHashSecret, hash_plaintext},
|
||||
Direction, Transcript, TranscriptCommitConfigBuilder, TranscriptCommitment,
|
||||
TranscriptSecret,
|
||||
encoding::{EncodingCommitment, EncodingTree},
|
||||
},
|
||||
};
|
||||
use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
|
||||
@@ -20,7 +19,6 @@ use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
|
||||
/// Tests that the attestation protocol and verification work end-to-end
|
||||
#[test]
|
||||
fn test_api() {
|
||||
let mut rng = StdRng::seed_from_u64(0);
|
||||
let mut provider = CryptoProvider::default();
|
||||
|
||||
// Configure signer for Notary
|
||||
@@ -28,6 +26,8 @@ fn test_api() {
|
||||
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let (sent_len, recv_len) = transcript.len();
|
||||
// Plaintext encodings which the Prover obtained from GC evaluation
|
||||
let encodings_provider = fixtures::encoding_provider(GET_WITH_HEADER, OK_JSON);
|
||||
|
||||
// At the end of the TLS connection the Prover holds the:
|
||||
let ConnectionFixture {
|
||||
@@ -44,38 +44,26 @@ fn test_api() {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
// Create hash commitments
|
||||
let hasher = Blake3::default();
|
||||
let sent_blinder: Blinder = rng.random();
|
||||
let recv_blinder: Blinder = rng.random();
|
||||
// Prover specifies the ranges it wants to commit to.
|
||||
let mut transcript_commitment_builder = TranscriptCommitConfigBuilder::new(&transcript);
|
||||
transcript_commitment_builder
|
||||
.commit_sent(&(0..sent_len))
|
||||
.unwrap()
|
||||
.commit_recv(&(0..recv_len))
|
||||
.unwrap();
|
||||
|
||||
let sent_idx = RangeSet::from(0..sent_len);
|
||||
let recv_idx = RangeSet::from(0..recv_len);
|
||||
let transcripts_commitment_config = transcript_commitment_builder.build().unwrap();
|
||||
|
||||
let sent_hash_commitment = PlaintextHash {
|
||||
direction: Direction::Sent,
|
||||
idx: sent_idx.clone(),
|
||||
hash: hash_plaintext(&hasher, transcript.sent(), &sent_blinder),
|
||||
};
|
||||
// Prover constructs encoding tree.
|
||||
let encoding_tree = EncodingTree::new(
|
||||
&Blake3::default(),
|
||||
transcripts_commitment_config.iter_encoding(),
|
||||
&encodings_provider,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let recv_hash_commitment = PlaintextHash {
|
||||
direction: Direction::Received,
|
||||
idx: recv_idx.clone(),
|
||||
hash: hash_plaintext(&hasher, transcript.received(), &recv_blinder),
|
||||
};
|
||||
|
||||
let sent_hash_secret = PlaintextHashSecret {
|
||||
direction: Direction::Sent,
|
||||
idx: sent_idx,
|
||||
alg: HashAlgId::BLAKE3,
|
||||
blinder: sent_blinder,
|
||||
};
|
||||
|
||||
let recv_hash_secret = PlaintextHashSecret {
|
||||
direction: Direction::Received,
|
||||
idx: recv_idx,
|
||||
alg: HashAlgId::BLAKE3,
|
||||
blinder: recv_blinder,
|
||||
let encoding_commitment = EncodingCommitment {
|
||||
root: encoding_tree.root(),
|
||||
};
|
||||
|
||||
let request_config = RequestConfig::default();
|
||||
@@ -86,14 +74,8 @@ fn test_api() {
|
||||
.handshake_data(server_cert_data)
|
||||
.transcript(transcript)
|
||||
.transcript_commitments(
|
||||
vec![
|
||||
TranscriptSecret::Hash(sent_hash_secret),
|
||||
TranscriptSecret::Hash(recv_hash_secret),
|
||||
],
|
||||
vec![
|
||||
TranscriptCommitment::Hash(sent_hash_commitment.clone()),
|
||||
TranscriptCommitment::Hash(recv_hash_commitment.clone()),
|
||||
],
|
||||
vec![TranscriptSecret::Encoding(encoding_tree)],
|
||||
vec![TranscriptCommitment::Encoding(encoding_commitment.clone())],
|
||||
);
|
||||
|
||||
let (request, secrets) = request_builder.build(&provider).unwrap();
|
||||
@@ -113,10 +95,8 @@ fn test_api() {
|
||||
.connection_info(connection_info.clone())
|
||||
// Server key Notary received during handshake
|
||||
.server_ephemeral_key(server_ephemeral_key)
|
||||
.transcript_commitments(vec![
|
||||
TranscriptCommitment::Hash(sent_hash_commitment),
|
||||
TranscriptCommitment::Hash(recv_hash_commitment),
|
||||
]);
|
||||
.encoder_secret(encoder_secret())
|
||||
.transcript_commitments(vec![TranscriptCommitment::Encoding(encoding_commitment)]);
|
||||
|
||||
let attestation = attestation_builder.build(&provider).unwrap();
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ tlsn-data-fixtures = { workspace = true, optional = true }
|
||||
tlsn-tls-core = { workspace = true, features = ["serde"] }
|
||||
tlsn-utils = { workspace = true }
|
||||
rangeset = { workspace = true, features = ["serde"] }
|
||||
mpz-predicate = { workspace = true }
|
||||
|
||||
aead = { workspace = true, features = ["alloc"], optional = true }
|
||||
aes-gcm = { workspace = true, optional = true }
|
||||
|
||||
@@ -1,16 +1,119 @@
|
||||
//! Proving configuration.
|
||||
|
||||
use mpz_predicate::Pred;
|
||||
use rangeset::set::{RangeSet, ToRangeSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::transcript::{Direction, Transcript, TranscriptCommitConfig, TranscriptCommitRequest};
|
||||
|
||||
/// Configuration to prove information to the verifier.
|
||||
/// Configuration for a predicate to prove over transcript data.
|
||||
///
|
||||
/// A predicate is a boolean constraint that operates on transcript bytes.
|
||||
/// The prover proves in ZK that the predicate evaluates to true.
|
||||
///
|
||||
/// The predicate itself encodes which byte indices it operates on via its
|
||||
/// atomic comparisons (e.g., `gte(42, threshold)` operates on byte index 42).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PredicateConfig {
|
||||
/// Human-readable name for the predicate (sent to verifier as sanity
|
||||
/// check).
|
||||
name: String,
|
||||
/// Direction of transcript data the predicate operates on.
|
||||
direction: Direction,
|
||||
/// The predicate to prove.
|
||||
predicate: Pred,
|
||||
}
|
||||
|
||||
impl PredicateConfig {
|
||||
/// Creates a new predicate configuration.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `name` - Human-readable name for the predicate.
|
||||
/// * `direction` - Whether the predicate operates on sent or received data.
|
||||
/// * `predicate` - The predicate to prove.
|
||||
pub fn new(name: impl Into<String>, direction: Direction, predicate: Pred) -> Self {
|
||||
Self {
|
||||
name: name.into(),
|
||||
direction,
|
||||
predicate,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the predicate name.
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
/// Returns the direction of transcript data.
|
||||
pub fn direction(&self) -> Direction {
|
||||
self.direction
|
||||
}
|
||||
|
||||
/// Returns the predicate.
|
||||
pub fn predicate(&self) -> &Pred {
|
||||
&self.predicate
|
||||
}
|
||||
|
||||
/// Returns the transcript byte indices this predicate operates on.
|
||||
pub fn indices(&self) -> Vec<usize> {
|
||||
self.predicate.indices()
|
||||
}
|
||||
|
||||
/// Converts to a request (wire format).
|
||||
pub fn to_request(&self) -> PredicateRequest {
|
||||
let indices: RangeSet<usize> = self
|
||||
.predicate
|
||||
.indices()
|
||||
.into_iter()
|
||||
.map(|idx| idx..idx + 1)
|
||||
.collect();
|
||||
PredicateRequest {
|
||||
name: self.name.clone(),
|
||||
direction: self.direction,
|
||||
indices,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Wire format for predicate proving request.
|
||||
///
|
||||
/// Contains only the predicate name and indices - the verifier is expected
|
||||
/// to know which predicate corresponds to the name from out-of-band agreement.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PredicateRequest {
|
||||
/// Human-readable name for the predicate.
|
||||
name: String,
|
||||
/// Direction of transcript data the predicate operates on.
|
||||
direction: Direction,
|
||||
/// Transcript byte indices the predicate operates on.
|
||||
indices: RangeSet<usize>,
|
||||
}
|
||||
|
||||
impl PredicateRequest {
|
||||
/// Returns the predicate name.
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
/// Returns the direction of transcript data.
|
||||
pub fn direction(&self) -> Direction {
|
||||
self.direction
|
||||
}
|
||||
|
||||
/// Returns the transcript byte indices as a RangeSet.
|
||||
pub fn indices(&self) -> &RangeSet<usize> {
|
||||
&self.indices
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration to prove information to the verifier.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProveConfig {
|
||||
server_identity: bool,
|
||||
reveal: Option<(RangeSet<usize>, RangeSet<usize>)>,
|
||||
transcript_commit: Option<TranscriptCommitConfig>,
|
||||
predicates: Vec<PredicateConfig>,
|
||||
}
|
||||
|
||||
impl ProveConfig {
|
||||
@@ -35,6 +138,11 @@ impl ProveConfig {
|
||||
self.transcript_commit.as_ref()
|
||||
}
|
||||
|
||||
/// Returns the predicate configurations.
|
||||
pub fn predicates(&self) -> &[PredicateConfig] {
|
||||
&self.predicates
|
||||
}
|
||||
|
||||
/// Returns a request.
|
||||
pub fn to_request(&self) -> ProveRequest {
|
||||
ProveRequest {
|
||||
@@ -44,6 +152,7 @@ impl ProveConfig {
|
||||
.transcript_commit
|
||||
.clone()
|
||||
.map(|config| config.to_request()),
|
||||
predicates: self.predicates.iter().map(|p| p.to_request()).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -55,6 +164,7 @@ pub struct ProveConfigBuilder<'a> {
|
||||
server_identity: bool,
|
||||
reveal: Option<(RangeSet<usize>, RangeSet<usize>)>,
|
||||
transcript_commit: Option<TranscriptCommitConfig>,
|
||||
predicates: Vec<PredicateConfig>,
|
||||
}
|
||||
|
||||
impl<'a> ProveConfigBuilder<'a> {
|
||||
@@ -65,6 +175,7 @@ impl<'a> ProveConfigBuilder<'a> {
|
||||
server_identity: false,
|
||||
reveal: None,
|
||||
transcript_commit: None,
|
||||
predicates: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,12 +248,52 @@ impl<'a> ProveConfigBuilder<'a> {
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Adds a predicate to prove over transcript data.
|
||||
///
|
||||
/// The predicate encodes which byte indices it operates on via its atomic
|
||||
/// comparisons (e.g., `gte(42, threshold)` operates on byte index 42).
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `name` - Human-readable name for the predicate (sent to verifier as
|
||||
/// sanity check).
|
||||
/// * `direction` - Whether the predicate operates on sent or received data.
|
||||
/// * `predicate` - The predicate to prove.
|
||||
pub fn predicate(
|
||||
&mut self,
|
||||
name: impl Into<String>,
|
||||
direction: Direction,
|
||||
predicate: Pred,
|
||||
) -> Result<&mut Self, ProveConfigError> {
|
||||
let indices = predicate.indices();
|
||||
|
||||
// Predicate must reference at least one transcript byte.
|
||||
let last_idx = *indices
|
||||
.last()
|
||||
.ok_or(ProveConfigError(ErrorRepr::EmptyPredicate))?;
|
||||
|
||||
// Since indices are sorted, only check the last one for bounds.
|
||||
let transcript_len = self.transcript.len_of_direction(direction);
|
||||
if last_idx >= transcript_len {
|
||||
return Err(ProveConfigError(ErrorRepr::IndexOutOfBounds {
|
||||
direction,
|
||||
actual: last_idx,
|
||||
len: transcript_len,
|
||||
}));
|
||||
}
|
||||
|
||||
self.predicates
|
||||
.push(PredicateConfig::new(name, direction, predicate));
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Builds the configuration.
|
||||
pub fn build(self) -> Result<ProveConfig, ProveConfigError> {
|
||||
Ok(ProveConfig {
|
||||
server_identity: self.server_identity,
|
||||
reveal: self.reveal,
|
||||
transcript_commit: self.transcript_commit,
|
||||
predicates: self.predicates,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -153,6 +304,7 @@ pub struct ProveRequest {
|
||||
server_identity: bool,
|
||||
reveal: Option<(RangeSet<usize>, RangeSet<usize>)>,
|
||||
transcript_commit: Option<TranscriptCommitRequest>,
|
||||
predicates: Vec<PredicateRequest>,
|
||||
}
|
||||
|
||||
impl ProveRequest {
|
||||
@@ -171,6 +323,11 @@ impl ProveRequest {
|
||||
pub fn transcript_commit(&self) -> Option<&TranscriptCommitRequest> {
|
||||
self.transcript_commit.as_ref()
|
||||
}
|
||||
|
||||
/// Returns the predicate requests.
|
||||
pub fn predicates(&self) -> &[PredicateRequest] {
|
||||
&self.predicates
|
||||
}
|
||||
}
|
||||
|
||||
/// Error for [`ProveConfig`].
|
||||
@@ -180,10 +337,12 @@ pub struct ProveConfigError(#[from] ErrorRepr);
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum ErrorRepr {
|
||||
#[error("range is out of bounds of the transcript ({direction}): {actual} > {len}")]
|
||||
#[error("index out of bounds for {direction} transcript: {actual} >= {len}")]
|
||||
IndexOutOfBounds {
|
||||
direction: Direction,
|
||||
actual: usize,
|
||||
len: usize,
|
||||
},
|
||||
#[error("predicate must reference at least one transcript byte")]
|
||||
EmptyPredicate,
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
//! Fixtures for testing
|
||||
|
||||
mod provider;
|
||||
pub mod transcript;
|
||||
|
||||
pub use provider::FixtureEncodingProvider;
|
||||
|
||||
use hex::FromHex;
|
||||
|
||||
use crate::{
|
||||
@@ -10,6 +13,10 @@ use crate::{
|
||||
ServerEphemKey, ServerName, ServerSignature, SignatureAlgorithm, TlsVersion,
|
||||
TranscriptLength,
|
||||
},
|
||||
transcript::{
|
||||
encoding::{EncoderSecret, EncodingProvider},
|
||||
Transcript,
|
||||
},
|
||||
webpki::CertificateDer,
|
||||
};
|
||||
|
||||
@@ -122,3 +129,27 @@ impl ConnectionFixture {
|
||||
server_ephemeral_key
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an encoding provider fixture.
|
||||
pub fn encoding_provider(tx: &[u8], rx: &[u8]) -> impl EncodingProvider {
|
||||
let secret = encoder_secret();
|
||||
FixtureEncodingProvider::new(&secret, Transcript::new(tx, rx))
|
||||
}
|
||||
|
||||
/// Seed fixture.
|
||||
const SEED: [u8; 32] = [0; 32];
|
||||
|
||||
/// Delta fixture.
|
||||
const DELTA: [u8; 16] = [1; 16];
|
||||
|
||||
/// Returns an encoder secret fixture.
|
||||
pub fn encoder_secret() -> EncoderSecret {
|
||||
EncoderSecret::new(SEED, DELTA)
|
||||
}
|
||||
|
||||
/// Returns a tampered encoder secret fixture.
|
||||
pub fn encoder_secret_tampered_seed() -> EncoderSecret {
|
||||
let mut seed = SEED;
|
||||
seed[0] += 1;
|
||||
EncoderSecret::new(seed, DELTA)
|
||||
}
|
||||
|
||||
41
crates/core/src/fixtures/provider.rs
Normal file
41
crates/core/src/fixtures/provider.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use crate::transcript::{
|
||||
encoding::{new_encoder, Encoder, EncoderSecret, EncodingProvider, EncodingProviderError},
|
||||
Direction, Transcript,
|
||||
};
|
||||
|
||||
/// A encoding provider fixture.
|
||||
pub struct FixtureEncodingProvider {
|
||||
encoder: Box<dyn Encoder>,
|
||||
transcript: Transcript,
|
||||
}
|
||||
|
||||
impl FixtureEncodingProvider {
|
||||
/// Creates a new encoding provider fixture.
|
||||
pub(crate) fn new(secret: &EncoderSecret, transcript: Transcript) -> Self {
|
||||
Self {
|
||||
encoder: Box::new(new_encoder(secret)),
|
||||
transcript,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EncodingProvider for FixtureEncodingProvider {
|
||||
fn provide_encoding(
|
||||
&self,
|
||||
direction: Direction,
|
||||
range: Range<usize>,
|
||||
dest: &mut Vec<u8>,
|
||||
) -> Result<(), EncodingProviderError> {
|
||||
let transcript = match direction {
|
||||
Direction::Sent => &self.transcript.sent(),
|
||||
Direction::Received => &self.transcript.received(),
|
||||
};
|
||||
|
||||
let data = transcript.get(range.clone()).ok_or(EncodingProviderError)?;
|
||||
self.encoder.encode_data(direction, range, data, dest);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,9 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
connection::ServerName,
|
||||
transcript::{PartialTranscript, TranscriptCommitment, TranscriptSecret},
|
||||
transcript::{
|
||||
encoding::EncoderSecret, PartialTranscript, TranscriptCommitment, TranscriptSecret,
|
||||
},
|
||||
};
|
||||
|
||||
/// Prover output.
|
||||
@@ -40,6 +42,8 @@ pub struct VerifierOutput {
|
||||
pub server_name: Option<ServerName>,
|
||||
/// Transcript data.
|
||||
pub transcript: Option<PartialTranscript>,
|
||||
/// Encoding commitment secret.
|
||||
pub encoder_secret: Option<EncoderSecret>,
|
||||
/// Transcript commitments.
|
||||
pub transcript_commitments: Vec<TranscriptCommitment>,
|
||||
}
|
||||
|
||||
@@ -63,6 +63,11 @@ impl MerkleProof {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the leaf count of the Merkle tree associated with the proof.
|
||||
pub(crate) fn leaf_count(&self) -> usize {
|
||||
self.leaf_count
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
//! withheld.
|
||||
|
||||
mod commit;
|
||||
pub mod encoding;
|
||||
pub mod hash;
|
||||
mod proof;
|
||||
mod tls;
|
||||
@@ -109,14 +110,8 @@ impl Transcript {
|
||||
}
|
||||
|
||||
Some(
|
||||
Subsequence::new(
|
||||
idx.clone(),
|
||||
data.index(idx).fold(Vec::new(), |mut acc, s| {
|
||||
acc.extend_from_slice(s);
|
||||
acc
|
||||
}),
|
||||
)
|
||||
.expect("data is same length as index"),
|
||||
Subsequence::new(idx.clone(), data.index(idx).flatten().copied().collect())
|
||||
.expect("data is same length as index"),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -195,20 +190,18 @@ pub struct CompressedPartialTranscript {
|
||||
impl From<PartialTranscript> for CompressedPartialTranscript {
|
||||
fn from(uncompressed: PartialTranscript) -> Self {
|
||||
Self {
|
||||
sent_authed: uncompressed.sent.index(&uncompressed.sent_authed_idx).fold(
|
||||
Vec::new(),
|
||||
|mut acc, s| {
|
||||
acc.extend_from_slice(s);
|
||||
acc
|
||||
},
|
||||
),
|
||||
sent_authed: uncompressed
|
||||
.sent
|
||||
.index(&uncompressed.sent_authed_idx)
|
||||
.flatten()
|
||||
.copied()
|
||||
.collect(),
|
||||
received_authed: uncompressed
|
||||
.received
|
||||
.index(&uncompressed.received_authed_idx)
|
||||
.fold(Vec::new(), |mut acc, s| {
|
||||
acc.extend_from_slice(s);
|
||||
acc
|
||||
}),
|
||||
.flatten()
|
||||
.copied()
|
||||
.collect(),
|
||||
sent_idx: uncompressed.sent_authed_idx,
|
||||
recv_idx: uncompressed.received_authed_idx,
|
||||
sent_total: uncompressed.sent.len(),
|
||||
|
||||
@@ -8,15 +8,27 @@ use serde::{Deserialize, Serialize};
|
||||
use crate::{
|
||||
hash::HashAlgId,
|
||||
transcript::{
|
||||
encoding::{EncodingCommitment, EncodingTree},
|
||||
hash::{PlaintextHash, PlaintextHashSecret},
|
||||
Direction, RangeSet, Transcript,
|
||||
},
|
||||
};
|
||||
|
||||
/// The maximum allowed total bytelength of committed data for a single
|
||||
/// commitment kind. Used to prevent DoS during verification. (May cause the
|
||||
/// verifier to hash up to a max of 1GB * 128 = 128GB of data for certain kinds
|
||||
/// of encoding commitments.)
|
||||
///
|
||||
/// This value must not exceed bcs's MAX_SEQUENCE_LENGTH limit (which is (1 <<
|
||||
/// 31) - 1 by default)
|
||||
pub(crate) const MAX_TOTAL_COMMITTED_DATA: usize = 1_000_000_000;
|
||||
|
||||
/// Kind of transcript commitment.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum TranscriptCommitmentKind {
|
||||
/// A commitment to encodings of the transcript.
|
||||
Encoding,
|
||||
/// A hash commitment to plaintext in the transcript.
|
||||
Hash {
|
||||
/// The hash algorithm used.
|
||||
@@ -27,6 +39,7 @@ pub enum TranscriptCommitmentKind {
|
||||
impl fmt::Display for TranscriptCommitmentKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Encoding => f.write_str("encoding"),
|
||||
Self::Hash { alg } => write!(f, "hash ({alg})"),
|
||||
}
|
||||
}
|
||||
@@ -36,6 +49,8 @@ impl fmt::Display for TranscriptCommitmentKind {
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum TranscriptCommitment {
|
||||
/// Encoding commitment.
|
||||
Encoding(EncodingCommitment),
|
||||
/// Plaintext hash commitment.
|
||||
Hash(PlaintextHash),
|
||||
}
|
||||
@@ -44,6 +59,8 @@ pub enum TranscriptCommitment {
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum TranscriptSecret {
|
||||
/// Encoding tree.
|
||||
Encoding(EncodingTree),
|
||||
/// Plaintext hash secret.
|
||||
Hash(PlaintextHashSecret),
|
||||
}
|
||||
@@ -51,6 +68,9 @@ pub enum TranscriptSecret {
|
||||
/// Configuration for transcript commitments.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TranscriptCommitConfig {
|
||||
encoding_hash_alg: HashAlgId,
|
||||
has_encoding: bool,
|
||||
has_hash: bool,
|
||||
commits: Vec<((Direction, RangeSet<usize>), TranscriptCommitmentKind)>,
|
||||
}
|
||||
|
||||
@@ -60,23 +80,53 @@ impl TranscriptCommitConfig {
|
||||
TranscriptCommitConfigBuilder::new(transcript)
|
||||
}
|
||||
|
||||
/// Returns the hash algorithm to use for encoding commitments.
|
||||
pub fn encoding_hash_alg(&self) -> &HashAlgId {
|
||||
&self.encoding_hash_alg
|
||||
}
|
||||
|
||||
/// Returns `true` if the configuration has any encoding commitments.
|
||||
pub fn has_encoding(&self) -> bool {
|
||||
self.has_encoding
|
||||
}
|
||||
|
||||
/// Returns `true` if the configuration has any hash commitments.
|
||||
pub fn has_hash(&self) -> bool {
|
||||
self.commits
|
||||
.iter()
|
||||
.any(|(_, kind)| matches!(kind, TranscriptCommitmentKind::Hash { .. }))
|
||||
self.has_hash
|
||||
}
|
||||
|
||||
/// Returns an iterator over the encoding commitment indices.
|
||||
pub fn iter_encoding(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>)> {
|
||||
self.commits.iter().filter_map(|(idx, kind)| match kind {
|
||||
TranscriptCommitmentKind::Encoding => Some(idx),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns an iterator over the hash commitment indices.
|
||||
pub fn iter_hash(&self) -> impl Iterator<Item = (&(Direction, RangeSet<usize>), &HashAlgId)> {
|
||||
self.commits.iter().map(|(idx, kind)| match kind {
|
||||
TranscriptCommitmentKind::Hash { alg } => (idx, alg),
|
||||
self.commits.iter().filter_map(|(idx, kind)| match kind {
|
||||
TranscriptCommitmentKind::Hash { alg } => Some((idx, alg)),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a request for the transcript commitments.
|
||||
pub fn to_request(&self) -> TranscriptCommitRequest {
|
||||
TranscriptCommitRequest {
|
||||
encoding: self.has_encoding.then(|| {
|
||||
let mut sent = RangeSet::default();
|
||||
let mut recv = RangeSet::default();
|
||||
|
||||
for (dir, idx) in self.iter_encoding() {
|
||||
match dir {
|
||||
Direction::Sent => sent.union_mut(idx),
|
||||
Direction::Received => recv.union_mut(idx),
|
||||
}
|
||||
}
|
||||
|
||||
(sent, recv)
|
||||
}),
|
||||
hash: self
|
||||
.iter_hash()
|
||||
.map(|((dir, idx), alg)| (*dir, idx.clone(), *alg))
|
||||
@@ -86,9 +136,15 @@ impl TranscriptCommitConfig {
|
||||
}
|
||||
|
||||
/// A builder for [`TranscriptCommitConfig`].
|
||||
///
|
||||
/// The default hash algorithm is [`HashAlgId::BLAKE3`] and the default kind
|
||||
/// is [`TranscriptCommitmentKind::Encoding`].
|
||||
#[derive(Debug)]
|
||||
pub struct TranscriptCommitConfigBuilder<'a> {
|
||||
transcript: &'a Transcript,
|
||||
encoding_hash_alg: HashAlgId,
|
||||
has_encoding: bool,
|
||||
has_hash: bool,
|
||||
default_kind: TranscriptCommitmentKind,
|
||||
commits: HashSet<((Direction, RangeSet<usize>), TranscriptCommitmentKind)>,
|
||||
}
|
||||
@@ -98,13 +154,20 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
|
||||
pub fn new(transcript: &'a Transcript) -> Self {
|
||||
Self {
|
||||
transcript,
|
||||
default_kind: TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::BLAKE3,
|
||||
},
|
||||
encoding_hash_alg: HashAlgId::BLAKE3,
|
||||
has_encoding: false,
|
||||
has_hash: false,
|
||||
default_kind: TranscriptCommitmentKind::Encoding,
|
||||
commits: HashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the hash algorithm to use for encoding commitments.
|
||||
pub fn encoding_hash_alg(&mut self, alg: HashAlgId) -> &mut Self {
|
||||
self.encoding_hash_alg = alg;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the default kind of commitment to use.
|
||||
pub fn default_kind(&mut self, default_kind: TranscriptCommitmentKind) -> &mut Self {
|
||||
self.default_kind = default_kind;
|
||||
@@ -138,6 +201,11 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
|
||||
));
|
||||
}
|
||||
|
||||
match kind {
|
||||
TranscriptCommitmentKind::Encoding => self.has_encoding = true,
|
||||
TranscriptCommitmentKind::Hash { .. } => self.has_hash = true,
|
||||
}
|
||||
|
||||
self.commits.insert(((direction, idx), kind));
|
||||
|
||||
Ok(self)
|
||||
@@ -184,6 +252,9 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
|
||||
/// Builds the configuration.
|
||||
pub fn build(self) -> Result<TranscriptCommitConfig, TranscriptCommitConfigBuilderError> {
|
||||
Ok(TranscriptCommitConfig {
|
||||
encoding_hash_alg: self.encoding_hash_alg,
|
||||
has_encoding: self.has_encoding,
|
||||
has_hash: self.has_hash,
|
||||
commits: Vec::from_iter(self.commits),
|
||||
})
|
||||
}
|
||||
@@ -230,10 +301,16 @@ impl fmt::Display for TranscriptCommitConfigBuilderError {
|
||||
/// Request to compute transcript commitments.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TranscriptCommitRequest {
|
||||
encoding: Option<(RangeSet<usize>, RangeSet<usize>)>,
|
||||
hash: Vec<(Direction, RangeSet<usize>, HashAlgId)>,
|
||||
}
|
||||
|
||||
impl TranscriptCommitRequest {
|
||||
/// Returns `true` if an encoding commitment is requested.
|
||||
pub fn has_encoding(&self) -> bool {
|
||||
self.encoding.is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if a hash commitment is requested.
|
||||
pub fn has_hash(&self) -> bool {
|
||||
!self.hash.is_empty()
|
||||
@@ -243,6 +320,11 @@ impl TranscriptCommitRequest {
|
||||
pub fn iter_hash(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>, HashAlgId)> {
|
||||
self.hash.iter()
|
||||
}
|
||||
|
||||
/// Returns the ranges of the encoding commitments.
|
||||
pub fn encoding(&self) -> Option<&(RangeSet<usize>, RangeSet<usize>)> {
|
||||
self.encoding.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
22
crates/core/src/transcript/encoding.rs
Normal file
22
crates/core/src/transcript/encoding.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
//! Transcript encoding commitments and proofs.
|
||||
|
||||
mod encoder;
|
||||
mod proof;
|
||||
mod provider;
|
||||
mod tree;
|
||||
|
||||
pub use encoder::{new_encoder, Encoder, EncoderSecret};
|
||||
pub use proof::{EncodingProof, EncodingProofError};
|
||||
pub use provider::{EncodingProvider, EncodingProviderError};
|
||||
pub use tree::{EncodingTree, EncodingTreeError};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::hash::TypedHash;
|
||||
|
||||
/// Transcript encoding commitment.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct EncodingCommitment {
|
||||
/// Merkle root of the encoding commitments.
|
||||
pub root: TypedHash,
|
||||
}
|
||||
137
crates/core/src/transcript/encoding/encoder.rs
Normal file
137
crates/core/src/transcript/encoding/encoder.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use crate::transcript::Direction;
|
||||
use itybity::ToBits;
|
||||
use rand::{RngCore, SeedableRng};
|
||||
use rand_chacha::ChaCha12Rng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The size of the encoding for 1 bit, in bytes.
|
||||
const BIT_ENCODING_SIZE: usize = 16;
|
||||
/// The size of the encoding for 1 byte, in bytes.
|
||||
const BYTE_ENCODING_SIZE: usize = 128;
|
||||
|
||||
/// Secret used by an encoder to generate encodings.
|
||||
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct EncoderSecret {
|
||||
seed: [u8; 32],
|
||||
delta: [u8; BIT_ENCODING_SIZE],
|
||||
}
|
||||
|
||||
opaque_debug::implement!(EncoderSecret);
|
||||
|
||||
impl EncoderSecret {
|
||||
/// Creates a new secret.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `seed` - The seed for the PRG.
|
||||
/// * `delta` - Delta for deriving the one-encodings.
|
||||
pub fn new(seed: [u8; 32], delta: [u8; 16]) -> Self {
|
||||
Self { seed, delta }
|
||||
}
|
||||
|
||||
/// Returns the seed.
|
||||
pub fn seed(&self) -> &[u8; 32] {
|
||||
&self.seed
|
||||
}
|
||||
|
||||
/// Returns the delta.
|
||||
pub fn delta(&self) -> &[u8; 16] {
|
||||
&self.delta
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new encoder.
|
||||
pub fn new_encoder(secret: &EncoderSecret) -> impl Encoder {
|
||||
ChaChaEncoder::new(secret)
|
||||
}
|
||||
|
||||
pub(crate) struct ChaChaEncoder {
|
||||
seed: [u8; 32],
|
||||
delta: [u8; 16],
|
||||
}
|
||||
|
||||
impl ChaChaEncoder {
|
||||
pub(crate) fn new(secret: &EncoderSecret) -> Self {
|
||||
let seed = *secret.seed();
|
||||
let delta = *secret.delta();
|
||||
|
||||
Self { seed, delta }
|
||||
}
|
||||
|
||||
pub(crate) fn new_prg(&self, stream_id: u64) -> ChaCha12Rng {
|
||||
let mut prg = ChaCha12Rng::from_seed(self.seed);
|
||||
prg.set_stream(stream_id);
|
||||
prg.set_word_pos(0);
|
||||
prg
|
||||
}
|
||||
}
|
||||
|
||||
/// A transcript encoder.
|
||||
///
|
||||
/// This is an internal implementation detail that should not be exposed to the
|
||||
/// public API.
|
||||
pub trait Encoder {
|
||||
/// Writes the zero encoding for the given range of the transcript into the
|
||||
/// destination buffer.
|
||||
fn encode_range(&self, direction: Direction, range: Range<usize>, dest: &mut Vec<u8>);
|
||||
|
||||
/// Writes the encoding for the given data into the destination buffer.
|
||||
fn encode_data(
|
||||
&self,
|
||||
direction: Direction,
|
||||
range: Range<usize>,
|
||||
data: &[u8],
|
||||
dest: &mut Vec<u8>,
|
||||
);
|
||||
}
|
||||
|
||||
impl Encoder for ChaChaEncoder {
|
||||
fn encode_range(&self, direction: Direction, range: Range<usize>, dest: &mut Vec<u8>) {
|
||||
// ChaCha encoder works with 32-bit words. Each encoded bit is 128 bits long.
|
||||
const WORDS_PER_BYTE: u128 = 8 * 128 / 32;
|
||||
|
||||
let stream_id: u64 = match direction {
|
||||
Direction::Sent => 0,
|
||||
Direction::Received => 1,
|
||||
};
|
||||
|
||||
let mut prg = self.new_prg(stream_id);
|
||||
let len = range.len() * BYTE_ENCODING_SIZE;
|
||||
let pos = dest.len();
|
||||
|
||||
// Write 0s to the destination buffer.
|
||||
dest.resize(pos + len, 0);
|
||||
|
||||
// Fill the destination buffer with the PRG.
|
||||
prg.set_word_pos(range.start as u128 * WORDS_PER_BYTE);
|
||||
prg.fill_bytes(&mut dest[pos..pos + len]);
|
||||
}
|
||||
|
||||
fn encode_data(
|
||||
&self,
|
||||
direction: Direction,
|
||||
range: Range<usize>,
|
||||
data: &[u8],
|
||||
dest: &mut Vec<u8>,
|
||||
) {
|
||||
const ZERO: [u8; 16] = [0; BIT_ENCODING_SIZE];
|
||||
|
||||
let pos = dest.len();
|
||||
|
||||
// Write the zero encoding for the given range.
|
||||
self.encode_range(direction, range, dest);
|
||||
let dest = &mut dest[pos..];
|
||||
|
||||
for (pos, bit) in data.iter_lsb0().enumerate() {
|
||||
// Add the delta to the encoding whenever the encoded bit is 1,
|
||||
// otherwise add a zero.
|
||||
let summand = if bit { &self.delta } else { &ZERO };
|
||||
dest[pos * BIT_ENCODING_SIZE..(pos + 1) * BIT_ENCODING_SIZE]
|
||||
.iter_mut()
|
||||
.zip(summand)
|
||||
.for_each(|(a, b)| *a ^= *b);
|
||||
}
|
||||
}
|
||||
}
|
||||
361
crates/core/src/transcript/encoding/proof.rs
Normal file
361
crates/core/src/transcript/encoding/proof.rs
Normal file
@@ -0,0 +1,361 @@
|
||||
use std::{collections::HashMap, fmt};
|
||||
|
||||
use rangeset::set::RangeSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
hash::{Blinder, HashProvider, HashProviderError},
|
||||
merkle::{MerkleError, MerkleProof},
|
||||
transcript::{
|
||||
commit::MAX_TOTAL_COMMITTED_DATA,
|
||||
encoding::{new_encoder, Encoder, EncoderSecret, EncodingCommitment},
|
||||
Direction,
|
||||
},
|
||||
};
|
||||
|
||||
/// An opening of a leaf in the encoding tree.
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub(super) struct Opening {
|
||||
pub(super) direction: Direction,
|
||||
pub(super) idx: RangeSet<usize>,
|
||||
pub(super) blinder: Blinder,
|
||||
}
|
||||
|
||||
opaque_debug::implement!(Opening);
|
||||
|
||||
/// An encoding commitment proof.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(try_from = "validation::EncodingProofUnchecked")]
|
||||
pub struct EncodingProof {
|
||||
/// The proof of inclusion of the commitment(s) in the Merkle tree of
|
||||
/// commitments.
|
||||
pub(super) inclusion_proof: MerkleProof,
|
||||
pub(super) openings: HashMap<usize, Opening>,
|
||||
}
|
||||
|
||||
impl EncodingProof {
|
||||
/// Verifies the proof against the commitment.
|
||||
///
|
||||
/// Returns the authenticated indices of the sent and received data,
|
||||
/// respectively.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `provider` - Hash provider.
|
||||
/// * `commitment` - Encoding commitment to verify against.
|
||||
/// * `sent` - Sent data to authenticate.
|
||||
/// * `recv` - Received data to authenticate.
|
||||
pub fn verify_with_provider(
|
||||
&self,
|
||||
provider: &HashProvider,
|
||||
secret: &EncoderSecret,
|
||||
commitment: &EncodingCommitment,
|
||||
sent: &[u8],
|
||||
recv: &[u8],
|
||||
) -> Result<(RangeSet<usize>, RangeSet<usize>), EncodingProofError> {
|
||||
let hasher = provider.get(&commitment.root.alg)?;
|
||||
|
||||
let encoder = new_encoder(secret);
|
||||
let Self {
|
||||
inclusion_proof,
|
||||
openings,
|
||||
} = self;
|
||||
|
||||
let mut leaves = Vec::with_capacity(openings.len());
|
||||
let mut expected_leaf = Vec::default();
|
||||
let mut total_opened = 0u128;
|
||||
let mut auth_sent = RangeSet::default();
|
||||
let mut auth_recv = RangeSet::default();
|
||||
for (
|
||||
id,
|
||||
Opening {
|
||||
direction,
|
||||
idx,
|
||||
blinder,
|
||||
},
|
||||
) in openings
|
||||
{
|
||||
// Make sure the amount of data being proved is bounded.
|
||||
total_opened += idx.len() as u128;
|
||||
if total_opened > MAX_TOTAL_COMMITTED_DATA as u128 {
|
||||
return Err(EncodingProofError::new(
|
||||
ErrorKind::Proof,
|
||||
"exceeded maximum allowed data",
|
||||
))?;
|
||||
}
|
||||
|
||||
let (data, auth) = match direction {
|
||||
Direction::Sent => (sent, &mut auth_sent),
|
||||
Direction::Received => (recv, &mut auth_recv),
|
||||
};
|
||||
|
||||
// Make sure the ranges are within the bounds of the transcript.
|
||||
if idx.end().unwrap_or(0) > data.len() {
|
||||
return Err(EncodingProofError::new(
|
||||
ErrorKind::Proof,
|
||||
format!(
|
||||
"index out of bounds of the transcript ({}): {} > {}",
|
||||
direction,
|
||||
idx.end().unwrap_or(0),
|
||||
data.len()
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
expected_leaf.clear();
|
||||
for range in idx.iter() {
|
||||
encoder.encode_data(*direction, range.clone(), &data[range], &mut expected_leaf);
|
||||
}
|
||||
expected_leaf.extend_from_slice(blinder.as_bytes());
|
||||
|
||||
// Compute the expected hash of the commitment to make sure it is
|
||||
// present in the merkle tree.
|
||||
leaves.push((*id, hasher.hash(&expected_leaf)));
|
||||
|
||||
auth.union_mut(idx);
|
||||
}
|
||||
|
||||
// Verify that the expected hashes are present in the merkle tree.
|
||||
//
|
||||
// This proves the Prover committed to the purported data prior to the encoder
|
||||
// seed being revealed. Ergo, if the encodings are authentic then the purported
|
||||
// data is authentic.
|
||||
inclusion_proof.verify(hasher, &commitment.root, leaves)?;
|
||||
|
||||
Ok((auth_sent, auth_recv))
|
||||
}
|
||||
}
|
||||
|
||||
/// Error for [`EncodingProof`].
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub struct EncodingProofError {
|
||||
kind: ErrorKind,
|
||||
source: Option<Box<dyn std::error::Error + Send + Sync>>,
|
||||
}
|
||||
|
||||
impl EncodingProofError {
|
||||
fn new<E>(kind: ErrorKind, source: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn std::error::Error + Send + Sync>>,
|
||||
{
|
||||
Self {
|
||||
kind,
|
||||
source: Some(source.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ErrorKind {
|
||||
Provider,
|
||||
Proof,
|
||||
}
|
||||
|
||||
impl fmt::Display for EncodingProofError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("encoding proof error: ")?;
|
||||
|
||||
match self.kind {
|
||||
ErrorKind::Provider => f.write_str("provider error")?,
|
||||
ErrorKind::Proof => f.write_str("proof error")?,
|
||||
}
|
||||
|
||||
if let Some(source) = &self.source {
|
||||
write!(f, " caused by: {source}")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HashProviderError> for EncodingProofError {
|
||||
fn from(error: HashProviderError) -> Self {
|
||||
Self::new(ErrorKind::Provider, error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MerkleError> for EncodingProofError {
|
||||
fn from(error: MerkleError) -> Self {
|
||||
Self::new(ErrorKind::Proof, error)
|
||||
}
|
||||
}
|
||||
|
||||
/// Invalid encoding proof error.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("invalid encoding proof: {0}")]
|
||||
pub struct InvalidEncodingProof(&'static str);
|
||||
|
||||
mod validation {
|
||||
use super::*;
|
||||
|
||||
/// The maximum allowed height of the Merkle tree of encoding commitments.
|
||||
///
|
||||
/// The statistical security parameter (SSP) of the encoding commitment
|
||||
/// protocol is calculated as "the number of uniformly random bits in a
|
||||
/// single bit's encoding minus `MAX_HEIGHT`".
|
||||
///
|
||||
/// For example, a bit encoding used in garbled circuits typically has 127
|
||||
/// uniformly random bits, hence when using it in the encoding
|
||||
/// commitment protocol, the SSP is 127 - 30 = 97 bits.
|
||||
///
|
||||
/// Leaving this validation here as a fail-safe in case we ever start
|
||||
/// using shorter encodings.
|
||||
const MAX_HEIGHT: usize = 30;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct EncodingProofUnchecked {
|
||||
inclusion_proof: MerkleProof,
|
||||
openings: HashMap<usize, Opening>,
|
||||
}
|
||||
|
||||
impl TryFrom<EncodingProofUnchecked> for EncodingProof {
|
||||
type Error = InvalidEncodingProof;
|
||||
|
||||
fn try_from(unchecked: EncodingProofUnchecked) -> Result<Self, Self::Error> {
|
||||
if unchecked.inclusion_proof.leaf_count() > 1 << MAX_HEIGHT {
|
||||
return Err(InvalidEncodingProof(
|
||||
"the height of the tree exceeds the maximum allowed",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
inclusion_proof: unchecked.inclusion_proof,
|
||||
openings: unchecked.openings,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use tlsn_data_fixtures::http::{request::POST_JSON, response::OK_JSON};
|
||||
|
||||
use crate::{
|
||||
fixtures::{encoder_secret, encoder_secret_tampered_seed, encoding_provider},
|
||||
hash::Blake3,
|
||||
transcript::{encoding::EncodingTree, Transcript},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
struct EncodingFixture {
|
||||
transcript: Transcript,
|
||||
proof: EncodingProof,
|
||||
commitment: EncodingCommitment,
|
||||
}
|
||||
|
||||
fn new_encoding_fixture() -> EncodingFixture {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len()));
|
||||
|
||||
let provider = encoding_provider(transcript.sent(), transcript.received());
|
||||
let tree = EncodingTree::new(&Blake3::default(), [&idx_0, &idx_1], &provider).unwrap();
|
||||
|
||||
let proof = tree.proof([&idx_0, &idx_1].into_iter()).unwrap();
|
||||
|
||||
let commitment = EncodingCommitment { root: tree.root() };
|
||||
|
||||
EncodingFixture {
|
||||
transcript,
|
||||
proof,
|
||||
commitment,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verify_encoding_proof_tampered_seed() {
|
||||
let EncodingFixture {
|
||||
transcript,
|
||||
proof,
|
||||
commitment,
|
||||
} = new_encoding_fixture();
|
||||
|
||||
let err = proof
|
||||
.verify_with_provider(
|
||||
&HashProvider::default(),
|
||||
&encoder_secret_tampered_seed(),
|
||||
&commitment,
|
||||
transcript.sent(),
|
||||
transcript.received(),
|
||||
)
|
||||
.unwrap_err();
|
||||
|
||||
assert!(matches!(err.kind, ErrorKind::Proof));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verify_encoding_proof_out_of_range() {
|
||||
let EncodingFixture {
|
||||
transcript,
|
||||
proof,
|
||||
commitment,
|
||||
} = new_encoding_fixture();
|
||||
|
||||
let sent = &transcript.sent()[transcript.sent().len() - 1..];
|
||||
let recv = &transcript.received()[transcript.received().len() - 2..];
|
||||
|
||||
let err = proof
|
||||
.verify_with_provider(
|
||||
&HashProvider::default(),
|
||||
&encoder_secret(),
|
||||
&commitment,
|
||||
sent,
|
||||
recv,
|
||||
)
|
||||
.unwrap_err();
|
||||
|
||||
assert!(matches!(err.kind, ErrorKind::Proof));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verify_encoding_proof_tampered_idx() {
|
||||
let EncodingFixture {
|
||||
transcript,
|
||||
mut proof,
|
||||
commitment,
|
||||
} = new_encoding_fixture();
|
||||
|
||||
let Opening { idx, .. } = proof.openings.values_mut().next().unwrap();
|
||||
|
||||
*idx = RangeSet::from([0..3, 13..15]);
|
||||
|
||||
let err = proof
|
||||
.verify_with_provider(
|
||||
&HashProvider::default(),
|
||||
&encoder_secret(),
|
||||
&commitment,
|
||||
transcript.sent(),
|
||||
transcript.received(),
|
||||
)
|
||||
.unwrap_err();
|
||||
|
||||
assert!(matches!(err.kind, ErrorKind::Proof));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verify_encoding_proof_tampered_encoding_blinder() {
|
||||
let EncodingFixture {
|
||||
transcript,
|
||||
mut proof,
|
||||
commitment,
|
||||
} = new_encoding_fixture();
|
||||
|
||||
let Opening { blinder, .. } = proof.openings.values_mut().next().unwrap();
|
||||
|
||||
*blinder = rand::random();
|
||||
|
||||
let err = proof
|
||||
.verify_with_provider(
|
||||
&HashProvider::default(),
|
||||
&encoder_secret(),
|
||||
&commitment,
|
||||
transcript.sent(),
|
||||
transcript.received(),
|
||||
)
|
||||
.unwrap_err();
|
||||
|
||||
assert!(matches!(err.kind, ErrorKind::Proof));
|
||||
}
|
||||
}
|
||||
19
crates/core/src/transcript/encoding/provider.rs
Normal file
19
crates/core/src/transcript/encoding/provider.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use crate::transcript::Direction;
|
||||
|
||||
/// A provider of plaintext encodings.
|
||||
pub trait EncodingProvider {
|
||||
/// Writes the encoding of the given range into the destination buffer.
|
||||
fn provide_encoding(
|
||||
&self,
|
||||
direction: Direction,
|
||||
range: Range<usize>,
|
||||
dest: &mut Vec<u8>,
|
||||
) -> Result<(), EncodingProviderError>;
|
||||
}
|
||||
|
||||
/// Error for [`EncodingProvider`].
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("failed to provide encoding")]
|
||||
pub struct EncodingProviderError;
|
||||
327
crates/core/src/transcript/encoding/tree.rs
Normal file
327
crates/core/src/transcript/encoding/tree.rs
Normal file
@@ -0,0 +1,327 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use bimap::BiMap;
|
||||
use rangeset::set::RangeSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
hash::{Blinder, HashAlgId, HashAlgorithm, TypedHash},
|
||||
merkle::MerkleTree,
|
||||
transcript::{
|
||||
encoding::{
|
||||
proof::{EncodingProof, Opening},
|
||||
EncodingProvider,
|
||||
},
|
||||
Direction,
|
||||
},
|
||||
};
|
||||
|
||||
/// Encoding tree builder error.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum EncodingTreeError {
|
||||
/// Index is out of bounds of the transcript.
|
||||
#[error("index is out of bounds of the transcript")]
|
||||
OutOfBounds {
|
||||
/// The index.
|
||||
index: RangeSet<usize>,
|
||||
/// The transcript length.
|
||||
transcript_length: usize,
|
||||
},
|
||||
/// Encoding provider is missing an encoding for an index.
|
||||
#[error("encoding provider is missing an encoding for an index")]
|
||||
MissingEncoding {
|
||||
/// The index which is missing.
|
||||
index: RangeSet<usize>,
|
||||
},
|
||||
/// Index is missing from the tree.
|
||||
#[error("index is missing from the tree")]
|
||||
MissingLeaf {
|
||||
/// The index which is missing.
|
||||
index: RangeSet<usize>,
|
||||
},
|
||||
}
|
||||
|
||||
/// A merkle tree of transcript encodings.
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct EncodingTree {
|
||||
/// Merkle tree of the commitments.
|
||||
tree: MerkleTree,
|
||||
/// Nonces used to blind the hashes.
|
||||
blinders: Vec<Blinder>,
|
||||
/// Mapping between the index of a leaf and the transcript index it
|
||||
/// corresponds to.
|
||||
idxs: BiMap<usize, (Direction, RangeSet<usize>)>,
|
||||
/// Union of all transcript indices in the sent direction.
|
||||
sent_idx: RangeSet<usize>,
|
||||
/// Union of all transcript indices in the received direction.
|
||||
received_idx: RangeSet<usize>,
|
||||
}
|
||||
|
||||
opaque_debug::implement!(EncodingTree);
|
||||
|
||||
impl EncodingTree {
|
||||
/// Creates a new encoding tree.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `hasher` - The hash algorithm to use.
|
||||
/// * `idxs` - The subsequence indices to commit to.
|
||||
/// * `provider` - The encoding provider.
|
||||
pub fn new<'idx>(
|
||||
hasher: &dyn HashAlgorithm,
|
||||
idxs: impl IntoIterator<Item = &'idx (Direction, RangeSet<usize>)>,
|
||||
provider: &dyn EncodingProvider,
|
||||
) -> Result<Self, EncodingTreeError> {
|
||||
let mut this = Self {
|
||||
tree: MerkleTree::new(hasher.id()),
|
||||
blinders: Vec::new(),
|
||||
idxs: BiMap::new(),
|
||||
sent_idx: RangeSet::default(),
|
||||
received_idx: RangeSet::default(),
|
||||
};
|
||||
|
||||
let mut leaves = Vec::new();
|
||||
let mut encoding = Vec::new();
|
||||
for dir_idx in idxs {
|
||||
let direction = dir_idx.0;
|
||||
let idx = &dir_idx.1;
|
||||
|
||||
// Ignore empty indices.
|
||||
if idx.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if this.idxs.contains_right(dir_idx) {
|
||||
// The subsequence is already in the tree.
|
||||
continue;
|
||||
}
|
||||
|
||||
let blinder: Blinder = rand::random();
|
||||
|
||||
encoding.clear();
|
||||
for range in idx.iter() {
|
||||
provider
|
||||
.provide_encoding(direction, range, &mut encoding)
|
||||
.map_err(|_| EncodingTreeError::MissingEncoding { index: idx.clone() })?;
|
||||
}
|
||||
encoding.extend_from_slice(blinder.as_bytes());
|
||||
|
||||
let leaf = hasher.hash(&encoding);
|
||||
|
||||
leaves.push(leaf);
|
||||
this.blinders.push(blinder);
|
||||
this.idxs.insert(this.idxs.len(), dir_idx.clone());
|
||||
match direction {
|
||||
Direction::Sent => this.sent_idx.union_mut(idx),
|
||||
Direction::Received => this.received_idx.union_mut(idx),
|
||||
}
|
||||
}
|
||||
|
||||
this.tree.insert(hasher, leaves);
|
||||
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
/// Returns the root of the tree.
|
||||
pub fn root(&self) -> TypedHash {
|
||||
self.tree.root()
|
||||
}
|
||||
|
||||
/// Returns the hash algorithm of the tree.
|
||||
pub fn algorithm(&self) -> HashAlgId {
|
||||
self.tree.algorithm()
|
||||
}
|
||||
|
||||
/// Generates a proof for the given indices.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `idxs` - The transcript indices to prove.
|
||||
pub fn proof<'idx>(
|
||||
&self,
|
||||
idxs: impl Iterator<Item = &'idx (Direction, RangeSet<usize>)>,
|
||||
) -> Result<EncodingProof, EncodingTreeError> {
|
||||
let mut openings = HashMap::new();
|
||||
for dir_idx in idxs {
|
||||
let direction = dir_idx.0;
|
||||
let idx = &dir_idx.1;
|
||||
|
||||
let leaf_idx = *self
|
||||
.idxs
|
||||
.get_by_right(dir_idx)
|
||||
.ok_or_else(|| EncodingTreeError::MissingLeaf { index: idx.clone() })?;
|
||||
let blinder = self.blinders[leaf_idx].clone();
|
||||
|
||||
openings.insert(
|
||||
leaf_idx,
|
||||
Opening {
|
||||
direction,
|
||||
idx: idx.clone(),
|
||||
blinder,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let mut indices = openings.keys().copied().collect::<Vec<_>>();
|
||||
indices.sort();
|
||||
|
||||
Ok(EncodingProof {
|
||||
inclusion_proof: self.tree.proof(&indices),
|
||||
openings,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns whether the tree contains the given transcript index.
|
||||
pub fn contains(&self, idx: &(Direction, RangeSet<usize>)) -> bool {
|
||||
self.idxs.contains_right(idx)
|
||||
}
|
||||
|
||||
pub(crate) fn idx(&self, direction: Direction) -> &RangeSet<usize> {
|
||||
match direction {
|
||||
Direction::Sent => &self.sent_idx,
|
||||
Direction::Received => &self.received_idx,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the committed transcript indices.
|
||||
pub(crate) fn transcript_indices(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>)> {
|
||||
self.idxs.right_values()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
fixtures::{encoder_secret, encoding_provider},
|
||||
hash::{Blake3, HashProvider},
|
||||
transcript::{encoding::EncodingCommitment, Transcript},
|
||||
};
|
||||
use tlsn_data_fixtures::http::{request::POST_JSON, response::OK_JSON};
|
||||
|
||||
fn new_tree<'seq>(
|
||||
transcript: &Transcript,
|
||||
idxs: impl Iterator<Item = &'seq (Direction, RangeSet<usize>)>,
|
||||
) -> Result<EncodingTree, EncodingTreeError> {
|
||||
let provider = encoding_provider(transcript.sent(), transcript.received());
|
||||
|
||||
EncodingTree::new(&Blake3::default(), idxs, &provider)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encoding_tree() {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len()));
|
||||
|
||||
let tree = new_tree(&transcript, [&idx_0, &idx_1].into_iter()).unwrap();
|
||||
|
||||
assert!(tree.contains(&idx_0));
|
||||
assert!(tree.contains(&idx_1));
|
||||
|
||||
let proof = tree.proof([&idx_0, &idx_1].into_iter()).unwrap();
|
||||
|
||||
let commitment = EncodingCommitment { root: tree.root() };
|
||||
|
||||
let (auth_sent, auth_recv) = proof
|
||||
.verify_with_provider(
|
||||
&HashProvider::default(),
|
||||
&encoder_secret(),
|
||||
&commitment,
|
||||
transcript.sent(),
|
||||
transcript.received(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(auth_sent, idx_0.1);
|
||||
assert_eq!(auth_recv, idx_1.1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encoding_tree_multiple_ranges() {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..1));
|
||||
let idx_1 = (Direction::Sent, RangeSet::from(1..POST_JSON.len()));
|
||||
let idx_2 = (Direction::Received, RangeSet::from(0..1));
|
||||
let idx_3 = (Direction::Received, RangeSet::from(1..OK_JSON.len()));
|
||||
|
||||
let tree = new_tree(&transcript, [&idx_0, &idx_1, &idx_2, &idx_3].into_iter()).unwrap();
|
||||
|
||||
assert!(tree.contains(&idx_0));
|
||||
assert!(tree.contains(&idx_1));
|
||||
assert!(tree.contains(&idx_2));
|
||||
assert!(tree.contains(&idx_3));
|
||||
|
||||
let proof = tree
|
||||
.proof([&idx_0, &idx_1, &idx_2, &idx_3].into_iter())
|
||||
.unwrap();
|
||||
|
||||
let commitment = EncodingCommitment { root: tree.root() };
|
||||
|
||||
let (auth_sent, auth_recv) = proof
|
||||
.verify_with_provider(
|
||||
&HashProvider::default(),
|
||||
&encoder_secret(),
|
||||
&commitment,
|
||||
transcript.sent(),
|
||||
transcript.received(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut expected_auth_sent = RangeSet::default();
|
||||
expected_auth_sent.union_mut(&idx_0.1);
|
||||
expected_auth_sent.union_mut(&idx_1.1);
|
||||
|
||||
let mut expected_auth_recv = RangeSet::default();
|
||||
expected_auth_recv.union_mut(&idx_2.1);
|
||||
expected_auth_recv.union_mut(&idx_3.1);
|
||||
|
||||
assert_eq!(auth_sent, expected_auth_sent);
|
||||
assert_eq!(auth_recv, expected_auth_recv);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encoding_tree_proof_missing_leaf() {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, RangeSet::from(0..4));
|
||||
let idx_2 = (Direction::Received, RangeSet::from(4..OK_JSON.len()));
|
||||
|
||||
let tree = new_tree(&transcript, [&idx_0, &idx_1].into_iter()).unwrap();
|
||||
|
||||
let result = tree
|
||||
.proof([&idx_0, &idx_1, &idx_2].into_iter())
|
||||
.unwrap_err();
|
||||
assert!(matches!(result, EncodingTreeError::MissingLeaf { .. }));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encoding_tree_out_of_bounds() {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len() + 1));
|
||||
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len() + 1));
|
||||
|
||||
let result = new_tree(&transcript, [&idx_0].into_iter()).unwrap_err();
|
||||
assert!(matches!(result, EncodingTreeError::MissingEncoding { .. }));
|
||||
|
||||
let result = new_tree(&transcript, [&idx_1].into_iter()).unwrap_err();
|
||||
assert!(matches!(result, EncodingTreeError::MissingEncoding { .. }));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encoding_tree_missing_encoding() {
|
||||
let provider = encoding_provider(&[], &[]);
|
||||
|
||||
let result = EncodingTree::new(
|
||||
&Blake3::default(),
|
||||
[(Direction::Sent, RangeSet::from(0..8))].iter(),
|
||||
&provider,
|
||||
)
|
||||
.unwrap_err();
|
||||
assert!(matches!(result, EncodingTreeError::MissingEncoding { .. }));
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ use crate::{
|
||||
hash::{HashAlgId, HashProvider},
|
||||
transcript::{
|
||||
commit::{TranscriptCommitment, TranscriptCommitmentKind},
|
||||
encoding::{EncoderSecret, EncodingProof, EncodingProofError, EncodingTree},
|
||||
hash::{hash_plaintext, PlaintextHash, PlaintextHashSecret},
|
||||
Direction, PartialTranscript, RangeSet, Transcript, TranscriptSecret,
|
||||
},
|
||||
@@ -31,12 +32,14 @@ const DEFAULT_COMMITMENT_KINDS: &[TranscriptCommitmentKind] = &[
|
||||
TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::KECCAK256,
|
||||
},
|
||||
TranscriptCommitmentKind::Encoding,
|
||||
];
|
||||
|
||||
/// Proof of the contents of a transcript.
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct TranscriptProof {
|
||||
transcript: PartialTranscript,
|
||||
encoding_proof: Option<EncodingProof>,
|
||||
hash_secrets: Vec<PlaintextHashSecret>,
|
||||
}
|
||||
|
||||
@@ -50,18 +53,27 @@ impl TranscriptProof {
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `provider` - The hash provider to use for verification.
|
||||
/// * `length` - The transcript length.
|
||||
/// * `commitments` - The commitments to verify against.
|
||||
/// * `attestation_body` - The attestation body to verify against.
|
||||
pub fn verify_with_provider<'a>(
|
||||
self,
|
||||
provider: &HashProvider,
|
||||
length: &TranscriptLength,
|
||||
encoder_secret: Option<&EncoderSecret>,
|
||||
commitments: impl IntoIterator<Item = &'a TranscriptCommitment>,
|
||||
) -> Result<PartialTranscript, TranscriptProofError> {
|
||||
let mut encoding_commitment = None;
|
||||
let mut hash_commitments = HashSet::new();
|
||||
// Index commitments.
|
||||
for commitment in commitments {
|
||||
match commitment {
|
||||
TranscriptCommitment::Encoding(commitment) => {
|
||||
if encoding_commitment.replace(commitment).is_some() {
|
||||
return Err(TranscriptProofError::new(
|
||||
ErrorKind::Encoding,
|
||||
"multiple encoding commitments are present.",
|
||||
));
|
||||
}
|
||||
}
|
||||
TranscriptCommitment::Hash(plaintext_hash) => {
|
||||
hash_commitments.insert(plaintext_hash);
|
||||
}
|
||||
@@ -80,6 +92,34 @@ impl TranscriptProof {
|
||||
let mut total_auth_sent = RangeSet::default();
|
||||
let mut total_auth_recv = RangeSet::default();
|
||||
|
||||
// Verify encoding proof.
|
||||
if let Some(proof) = self.encoding_proof {
|
||||
let secret = encoder_secret.ok_or_else(|| {
|
||||
TranscriptProofError::new(
|
||||
ErrorKind::Encoding,
|
||||
"contains an encoding proof but missing encoder secret",
|
||||
)
|
||||
})?;
|
||||
|
||||
let commitment = encoding_commitment.ok_or_else(|| {
|
||||
TranscriptProofError::new(
|
||||
ErrorKind::Encoding,
|
||||
"contains an encoding proof but missing encoding commitment",
|
||||
)
|
||||
})?;
|
||||
|
||||
let (auth_sent, auth_recv) = proof.verify_with_provider(
|
||||
provider,
|
||||
secret,
|
||||
commitment,
|
||||
self.transcript.sent_unsafe(),
|
||||
self.transcript.received_unsafe(),
|
||||
)?;
|
||||
|
||||
total_auth_sent.union_mut(&auth_sent);
|
||||
total_auth_recv.union_mut(&auth_recv);
|
||||
}
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
for PlaintextHashSecret {
|
||||
direction,
|
||||
@@ -163,6 +203,7 @@ impl TranscriptProofError {
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ErrorKind {
|
||||
Encoding,
|
||||
Hash,
|
||||
Proof,
|
||||
}
|
||||
@@ -172,6 +213,7 @@ impl fmt::Display for TranscriptProofError {
|
||||
f.write_str("transcript proof error: ")?;
|
||||
|
||||
match self.kind {
|
||||
ErrorKind::Encoding => f.write_str("encoding error")?,
|
||||
ErrorKind::Hash => f.write_str("hash error")?,
|
||||
ErrorKind::Proof => f.write_str("proof error")?,
|
||||
}
|
||||
@@ -184,6 +226,12 @@ impl fmt::Display for TranscriptProofError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EncodingProofError> for TranscriptProofError {
|
||||
fn from(e: EncodingProofError) -> Self {
|
||||
TranscriptProofError::new(ErrorKind::Encoding, e)
|
||||
}
|
||||
}
|
||||
|
||||
/// Union of ranges to reveal.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct QueryIdx {
|
||||
@@ -228,6 +276,7 @@ pub struct TranscriptProofBuilder<'a> {
|
||||
/// Commitment kinds in order of preference for building transcript proofs.
|
||||
commitment_kinds: Vec<TranscriptCommitmentKind>,
|
||||
transcript: &'a Transcript,
|
||||
encoding_tree: Option<&'a EncodingTree>,
|
||||
hash_secrets: Vec<&'a PlaintextHashSecret>,
|
||||
committed_sent: RangeSet<usize>,
|
||||
committed_recv: RangeSet<usize>,
|
||||
@@ -243,9 +292,15 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
let mut committed_sent = RangeSet::default();
|
||||
let mut committed_recv = RangeSet::default();
|
||||
|
||||
let mut encoding_tree = None;
|
||||
let mut hash_secrets = Vec::new();
|
||||
for secret in secrets {
|
||||
match secret {
|
||||
TranscriptSecret::Encoding(tree) => {
|
||||
committed_sent.union_mut(tree.idx(Direction::Sent));
|
||||
committed_recv.union_mut(tree.idx(Direction::Received));
|
||||
encoding_tree = Some(tree);
|
||||
}
|
||||
TranscriptSecret::Hash(hash) => {
|
||||
match hash.direction {
|
||||
Direction::Sent => committed_sent.union_mut(&hash.idx),
|
||||
@@ -259,6 +314,7 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
Self {
|
||||
commitment_kinds: DEFAULT_COMMITMENT_KINDS.to_vec(),
|
||||
transcript,
|
||||
encoding_tree,
|
||||
hash_secrets,
|
||||
committed_sent,
|
||||
committed_recv,
|
||||
@@ -356,6 +412,7 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
transcript: self
|
||||
.transcript
|
||||
.to_partial(self.query_idx.sent.clone(), self.query_idx.recv.clone()),
|
||||
encoding_proof: None,
|
||||
hash_secrets: Vec::new(),
|
||||
};
|
||||
let mut uncovered_query_idx = self.query_idx.clone();
|
||||
@@ -367,6 +424,46 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
// self.commitment_kinds.
|
||||
if let Some(kind) = commitment_kinds_iter.next() {
|
||||
match kind {
|
||||
TranscriptCommitmentKind::Encoding => {
|
||||
let Some(encoding_tree) = self.encoding_tree else {
|
||||
// Proceeds to the next preferred commitment kind if encoding tree is
|
||||
// not available.
|
||||
continue;
|
||||
};
|
||||
|
||||
let (sent_dir_idxs, sent_uncovered) = uncovered_query_idx.sent.cover_by(
|
||||
encoding_tree
|
||||
.transcript_indices()
|
||||
.filter(|(dir, _)| *dir == Direction::Sent),
|
||||
|(_, idx)| idx,
|
||||
);
|
||||
// Uncovered ranges will be checked with ranges of the next
|
||||
// preferred commitment kind.
|
||||
uncovered_query_idx.sent = sent_uncovered;
|
||||
|
||||
let (recv_dir_idxs, recv_uncovered) = uncovered_query_idx.recv.cover_by(
|
||||
encoding_tree
|
||||
.transcript_indices()
|
||||
.filter(|(dir, _)| *dir == Direction::Received),
|
||||
|(_, idx)| idx,
|
||||
);
|
||||
uncovered_query_idx.recv = recv_uncovered;
|
||||
|
||||
let dir_idxs = sent_dir_idxs
|
||||
.into_iter()
|
||||
.chain(recv_dir_idxs)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Skip proof generation if there are no committed ranges that can cover the
|
||||
// query ranges.
|
||||
if !dir_idxs.is_empty() {
|
||||
transcript_proof.encoding_proof = Some(
|
||||
encoding_tree
|
||||
.proof(dir_idxs.into_iter())
|
||||
.expect("subsequences were checked to be in tree"),
|
||||
);
|
||||
}
|
||||
}
|
||||
TranscriptCommitmentKind::Hash { alg } => {
|
||||
let (sent_hashes, sent_uncovered) = uncovered_query_idx.sent.cover_by(
|
||||
self.hash_secrets.iter().filter(|hash| {
|
||||
@@ -493,10 +590,46 @@ mod tests {
|
||||
use rstest::rstest;
|
||||
use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
|
||||
|
||||
use crate::hash::{Blinder, HashAlgId};
|
||||
use crate::{
|
||||
fixtures::{encoder_secret, encoding_provider},
|
||||
hash::{Blake3, Blinder, HashAlgId},
|
||||
transcript::TranscriptCommitConfigBuilder,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[rstest]
|
||||
fn test_verify_missing_encoding_commitment_root() {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let idxs = vec![(Direction::Received, RangeSet::from(0..transcript.len().1))];
|
||||
let encoding_tree = EncodingTree::new(
|
||||
&Blake3::default(),
|
||||
&idxs,
|
||||
&encoding_provider(transcript.sent(), transcript.received()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let secrets = vec![TranscriptSecret::Encoding(encoding_tree)];
|
||||
let mut builder = TranscriptProofBuilder::new(&transcript, &secrets);
|
||||
|
||||
builder.reveal_recv(&(0..transcript.len().1)).unwrap();
|
||||
|
||||
let transcript_proof = builder.build().unwrap();
|
||||
|
||||
let provider = HashProvider::default();
|
||||
let err = transcript_proof
|
||||
.verify_with_provider(
|
||||
&provider,
|
||||
&transcript.length(),
|
||||
Some(&encoder_secret()),
|
||||
&[],
|
||||
)
|
||||
.err()
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(err.kind, ErrorKind::Encoding));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_reveal_range_out_of_bounds() {
|
||||
let transcript = Transcript::new(
|
||||
@@ -516,7 +649,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_reveal_missing_commitment() {
|
||||
fn test_reveal_missing_encoding_tree() {
|
||||
let transcript = Transcript::new(
|
||||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
|
||||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
|
||||
@@ -565,6 +698,7 @@ mod tests {
|
||||
.verify_with_provider(
|
||||
&provider,
|
||||
&transcript.length(),
|
||||
None,
|
||||
&[TranscriptCommitment::Hash(commitment)],
|
||||
)
|
||||
.unwrap();
|
||||
@@ -614,6 +748,7 @@ mod tests {
|
||||
.verify_with_provider(
|
||||
&provider,
|
||||
&transcript.length(),
|
||||
None,
|
||||
&[TranscriptCommitment::Hash(commitment)],
|
||||
)
|
||||
.unwrap_err();
|
||||
@@ -629,19 +764,24 @@ mod tests {
|
||||
TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::SHA256,
|
||||
},
|
||||
TranscriptCommitmentKind::Encoding,
|
||||
TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::SHA256,
|
||||
},
|
||||
TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::SHA256,
|
||||
},
|
||||
TranscriptCommitmentKind::Encoding,
|
||||
]);
|
||||
|
||||
assert_eq!(
|
||||
builder.commitment_kinds,
|
||||
vec![TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::SHA256
|
||||
},]
|
||||
vec![
|
||||
TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::SHA256
|
||||
},
|
||||
TranscriptCommitmentKind::Encoding
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -651,7 +791,7 @@ mod tests {
|
||||
RangeSet::from([0..10, 12..30]),
|
||||
true,
|
||||
)]
|
||||
#[case::reveal_all_rangesets_with_single_superset_range(
|
||||
#[case::reveal_all_rangesets_with_superset_ranges(
|
||||
vec![RangeSet::from([0..1]), RangeSet::from([1..2, 8..9]), RangeSet::from([2..4, 6..8]), RangeSet::from([2..3, 6..7]), RangeSet::from([9..12])],
|
||||
RangeSet::from([0..4, 6..9]),
|
||||
true,
|
||||
@@ -682,30 +822,29 @@ mod tests {
|
||||
false,
|
||||
)]
|
||||
#[allow(clippy::single_range_in_vec_init)]
|
||||
fn test_reveal_multiple_rangesets_with_one_rangeset(
|
||||
fn test_reveal_mutliple_rangesets_with_one_rangeset(
|
||||
#[case] commit_recv_rangesets: Vec<RangeSet<usize>>,
|
||||
#[case] reveal_recv_rangeset: RangeSet<usize>,
|
||||
#[case] success: bool,
|
||||
) {
|
||||
use rand::{Rng, SeedableRng};
|
||||
|
||||
let mut rng = rand::rngs::StdRng::seed_from_u64(0);
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
|
||||
// Create hash commitments for each rangeset
|
||||
let mut secrets = Vec::new();
|
||||
// Encoding commitment kind
|
||||
let mut transcript_commitment_builder = TranscriptCommitConfigBuilder::new(&transcript);
|
||||
for rangeset in commit_recv_rangesets.iter() {
|
||||
let blinder: crate::hash::Blinder = rng.random();
|
||||
|
||||
let secret = PlaintextHashSecret {
|
||||
direction: Direction::Received,
|
||||
idx: rangeset.clone(),
|
||||
alg: HashAlgId::BLAKE3,
|
||||
blinder,
|
||||
};
|
||||
secrets.push(TranscriptSecret::Hash(secret));
|
||||
transcript_commitment_builder.commit_recv(rangeset).unwrap();
|
||||
}
|
||||
|
||||
let transcripts_commitment_config = transcript_commitment_builder.build().unwrap();
|
||||
|
||||
let encoding_tree = EncodingTree::new(
|
||||
&Blake3::default(),
|
||||
transcripts_commitment_config.iter_encoding(),
|
||||
&encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let secrets = vec![TranscriptSecret::Encoding(encoding_tree)];
|
||||
let mut builder = TranscriptProofBuilder::new(&transcript, &secrets);
|
||||
|
||||
if success {
|
||||
@@ -758,34 +897,27 @@ mod tests {
|
||||
#[case] uncovered_sent_rangeset: RangeSet<usize>,
|
||||
#[case] uncovered_recv_rangeset: RangeSet<usize>,
|
||||
) {
|
||||
use rand::{Rng, SeedableRng};
|
||||
|
||||
let mut rng = rand::rngs::StdRng::seed_from_u64(0);
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
|
||||
// Create hash commitments for each rangeset
|
||||
let mut secrets = Vec::new();
|
||||
// Encoding commitment kind
|
||||
let mut transcript_commitment_builder = TranscriptCommitConfigBuilder::new(&transcript);
|
||||
for rangeset in commit_sent_rangesets.iter() {
|
||||
let blinder: crate::hash::Blinder = rng.random();
|
||||
let secret = PlaintextHashSecret {
|
||||
direction: Direction::Sent,
|
||||
idx: rangeset.clone(),
|
||||
alg: HashAlgId::BLAKE3,
|
||||
blinder,
|
||||
};
|
||||
secrets.push(TranscriptSecret::Hash(secret));
|
||||
transcript_commitment_builder.commit_sent(rangeset).unwrap();
|
||||
}
|
||||
for rangeset in commit_recv_rangesets.iter() {
|
||||
let blinder: crate::hash::Blinder = rng.random();
|
||||
let secret = PlaintextHashSecret {
|
||||
direction: Direction::Received,
|
||||
idx: rangeset.clone(),
|
||||
alg: HashAlgId::BLAKE3,
|
||||
blinder,
|
||||
};
|
||||
secrets.push(TranscriptSecret::Hash(secret));
|
||||
transcript_commitment_builder.commit_recv(rangeset).unwrap();
|
||||
}
|
||||
|
||||
let transcripts_commitment_config = transcript_commitment_builder.build().unwrap();
|
||||
|
||||
let encoding_tree = EncodingTree::new(
|
||||
&Blake3::default(),
|
||||
transcripts_commitment_config.iter_encoding(),
|
||||
&encoding_provider(GET_WITH_HEADER, OK_JSON),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let secrets = vec![TranscriptSecret::Encoding(encoding_tree)];
|
||||
let mut builder = TranscriptProofBuilder::new(&transcript, &secrets);
|
||||
builder.reveal_sent(&reveal_sent_rangeset).unwrap();
|
||||
builder.reveal_recv(&reveal_recv_rangeset).unwrap();
|
||||
|
||||
@@ -10,10 +10,13 @@ workspace = true
|
||||
[dependencies]
|
||||
tlsn = { workspace = true }
|
||||
tlsn-formats = { workspace = true }
|
||||
tlsn-core = { workspace = true }
|
||||
tls-server-fixture = { workspace = true }
|
||||
tlsn-server-fixture = { workspace = true }
|
||||
tlsn-server-fixture-certs = { workspace = true }
|
||||
spansy = { workspace = true }
|
||||
mpz-predicate = { workspace = true }
|
||||
rangeset = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bincode = { workspace = true }
|
||||
@@ -61,3 +64,7 @@ path = "attestation/present.rs"
|
||||
[[example]]
|
||||
name = "attestation_verify"
|
||||
path = "attestation/verify.rs"
|
||||
|
||||
[[example]]
|
||||
name = "interactive_predicate"
|
||||
path = "interactive_predicate/interactive_predicate.rs"
|
||||
|
||||
@@ -5,6 +5,7 @@ This folder contains examples demonstrating how to use the TLSNotary protocol.
|
||||
* [Interactive](./interactive/README.md): Interactive Prover and Verifier session without a trusted notary.
|
||||
* [Attestation](./attestation/README.md): Performing a simple notarization with a trusted notary.
|
||||
* [Interactive_zk](./interactive_zk/README.md): Interactive Prover and Verifier session demonstrating zero-knowledge age verification using Noir.
|
||||
* [Interactive_predicate](./interactive_predicate/README.md): Interactive session demonstrating predicate proving over transcript data (e.g., proving a JSON field is a valid string without revealing it).
|
||||
|
||||
|
||||
Refer to <https://tlsnotary.org/docs/quick_start> for a quick start guide to using TLSNotary with these examples.
|
||||
@@ -332,6 +332,7 @@ async fn notary<S: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static>(
|
||||
let (
|
||||
VerifierOutput {
|
||||
transcript_commitments,
|
||||
encoder_secret,
|
||||
..
|
||||
},
|
||||
verifier,
|
||||
@@ -392,6 +393,10 @@ async fn notary<S: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static>(
|
||||
.server_ephemeral_key(tls_transcript.server_ephemeral_key().clone())
|
||||
.transcript_commitments(transcript_commitments);
|
||||
|
||||
if let Some(encoder_secret) = encoder_secret {
|
||||
builder.encoder_secret(encoder_secret);
|
||||
}
|
||||
|
||||
let attestation = builder.build(&provider)?;
|
||||
|
||||
// Send attestation to prover.
|
||||
|
||||
29
crates/examples/interactive_predicate/README.md
Normal file
29
crates/examples/interactive_predicate/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
## Interactive Predicate: Proving Predicates over Transcript Data
|
||||
|
||||
This example demonstrates how to use TLSNotary to prove predicates (boolean constraints) over transcript bytes in zero knowledge, without revealing the actual data.
|
||||
|
||||
In this example:
|
||||
- The server returns JSON data containing a "name" field with a string value
|
||||
- The Prover proves that the name value is a valid JSON string without revealing it
|
||||
- The Verifier learns that the string is valid JSON, but not the actual content
|
||||
|
||||
This uses `mpz_predicate` to build predicates that operate on transcript bytes. The predicate is compiled to a circuit and executed in the ZK VM to prove satisfaction.
|
||||
|
||||
### Running the Example
|
||||
|
||||
First, start the test server from the root of this repository:
|
||||
```shell
|
||||
RUST_LOG=info PORT=4000 cargo run --bin tlsn-server-fixture
|
||||
```
|
||||
|
||||
Next, run the interactive predicate example:
|
||||
```shell
|
||||
SERVER_PORT=4000 cargo run --release --example interactive_predicate
|
||||
```
|
||||
|
||||
To view more detailed debug information:
|
||||
```shell
|
||||
RUST_LOG=debug,yamux=info,uid_mux=info SERVER_PORT=4000 cargo run --release --example interactive_predicate
|
||||
```
|
||||
|
||||
> Note: In this example, the Prover and Verifier run on the same machine. In real-world scenarios, they would typically operate on separate machines.
|
||||
368
crates/examples/interactive_predicate/interactive_predicate.rs
Normal file
368
crates/examples/interactive_predicate/interactive_predicate.rs
Normal file
@@ -0,0 +1,368 @@
|
||||
//! Example demonstrating predicate proving over transcript data.
|
||||
//!
|
||||
//! This example shows how a prover can prove a predicate (boolean constraint)
|
||||
//! over transcript bytes in zero knowledge, without revealing the actual data.
|
||||
//!
|
||||
//! In this example:
|
||||
//! - The server returns JSON data containing a "name" field with a string value
|
||||
//! - The prover proves that the name value is a valid JSON string without
|
||||
//! revealing it
|
||||
//! - The verifier learns that the string is valid JSON, but not the actual
|
||||
//! content
|
||||
|
||||
use std::{
|
||||
env,
|
||||
net::{IpAddr, SocketAddr},
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use http_body_util::Empty;
|
||||
use hyper::{body::Bytes, Request, StatusCode, Uri};
|
||||
use hyper_util::rt::TokioIo;
|
||||
use mpz_predicate::{json::validate_string, Pred};
|
||||
use rangeset::prelude::RangeSet;
|
||||
use tokio::io::{AsyncRead, AsyncWrite};
|
||||
use tokio_util::compat::{FuturesAsyncReadCompatExt, TokioAsyncReadCompatExt};
|
||||
use tracing::instrument;
|
||||
|
||||
use tlsn::{
|
||||
config::{
|
||||
prove::ProveConfig,
|
||||
prover::ProverConfig,
|
||||
tls::TlsClientConfig,
|
||||
tls_commit::{mpc::MpcTlsConfig, TlsCommitConfig, TlsCommitProtocolConfig},
|
||||
verifier::VerifierConfig,
|
||||
},
|
||||
connection::ServerName,
|
||||
prover::Prover,
|
||||
transcript::Direction,
|
||||
verifier::{Verifier, VerifierOutput},
|
||||
webpki::{CertificateDer, RootCertStore},
|
||||
};
|
||||
use tlsn_server_fixture::DEFAULT_FIXTURE_PORT;
|
||||
use tlsn_server_fixture_certs::{CA_CERT_DER, SERVER_DOMAIN};
|
||||
|
||||
/// Predicate name for JSON string validation (both parties agree on this
|
||||
/// out-of-band).
|
||||
const JSON_STRING_PREDICATE: &str = "valid_json_string";
|
||||
|
||||
// Maximum number of bytes that can be sent from prover to server.
|
||||
const MAX_SENT_DATA: usize = 1 << 12;
|
||||
// Maximum number of bytes that can be received by prover from server.
|
||||
const MAX_RECV_DATA: usize = 1 << 14;
|
||||
|
||||
/// Builds a predicate that validates a JSON string at the given indices.
|
||||
///
|
||||
/// Uses mpz_predicate's `validate_string` to ensure the bytes form a valid
|
||||
/// JSON string (proper escaping, valid UTF-8, no control characters, etc.).
|
||||
fn build_json_string_predicate(indices: &RangeSet<usize>) -> Pred {
|
||||
validate_string(indices.clone())
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let server_host: String = env::var("SERVER_HOST").unwrap_or("127.0.0.1".into());
|
||||
let server_port: u16 = env::var("SERVER_PORT")
|
||||
.map(|port| port.parse().expect("port should be valid integer"))
|
||||
.unwrap_or(DEFAULT_FIXTURE_PORT);
|
||||
|
||||
// Use the JSON endpoint that returns data.
|
||||
let uri = format!("https://{SERVER_DOMAIN}:{server_port}/formats/json");
|
||||
let server_ip: IpAddr = server_host.parse().expect("Invalid IP address");
|
||||
let server_addr = SocketAddr::from((server_ip, server_port));
|
||||
|
||||
// Connect prover and verifier.
|
||||
let (prover_socket, verifier_socket) = tokio::io::duplex(1 << 23);
|
||||
let prover = prover(prover_socket, &server_addr, &uri);
|
||||
let verifier = verifier(verifier_socket);
|
||||
|
||||
match tokio::try_join!(prover, verifier) {
|
||||
Ok(_) => println!("\nSuccess! The prover proved that a JSON field contains a valid string without revealing it."),
|
||||
Err(e) => eprintln!("Error: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Finds the value of a JSON field in the response body.
|
||||
/// Returns (start_index, end_index) of the value (excluding quotes for
|
||||
/// strings).
|
||||
fn find_json_string_value(data: &[u8], field_name: &str) -> Option<(usize, usize)> {
|
||||
let search_pattern = format!("\"{}\":", field_name);
|
||||
let pattern_bytes = search_pattern.as_bytes();
|
||||
|
||||
// Find the field name
|
||||
let field_pos = data
|
||||
.windows(pattern_bytes.len())
|
||||
.position(|w| w == pattern_bytes)?;
|
||||
|
||||
// Skip past the field name and colon
|
||||
let mut pos = field_pos + pattern_bytes.len();
|
||||
|
||||
// Skip whitespace
|
||||
while pos < data.len() && (data[pos] == b' ' || data[pos] == b'\n' || data[pos] == b'\r') {
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
// Check if it's a string (starts with quote)
|
||||
if pos >= data.len() || data[pos] != b'"' {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Skip opening quote
|
||||
let start = pos + 1;
|
||||
|
||||
// Find closing quote (handling escapes)
|
||||
let mut end = start;
|
||||
while end < data.len() {
|
||||
if data[end] == b'\\' {
|
||||
// Skip escaped character
|
||||
end += 2;
|
||||
} else if data[end] == b'"' {
|
||||
break;
|
||||
} else {
|
||||
end += 1;
|
||||
}
|
||||
}
|
||||
|
||||
Some((start, end))
|
||||
}
|
||||
|
||||
#[instrument(skip(verifier_socket))]
|
||||
async fn prover<T: AsyncWrite + AsyncRead + Send + Unpin + 'static>(
|
||||
verifier_socket: T,
|
||||
server_addr: &SocketAddr,
|
||||
uri: &str,
|
||||
) -> Result<()> {
|
||||
let uri = uri.parse::<Uri>().unwrap();
|
||||
assert_eq!(uri.scheme().unwrap().as_str(), "https");
|
||||
let server_domain = uri.authority().unwrap().host();
|
||||
|
||||
// Create a new prover and perform necessary setup.
|
||||
let prover = Prover::new(ProverConfig::builder().build()?)
|
||||
.commit(
|
||||
TlsCommitConfig::builder()
|
||||
.protocol(
|
||||
MpcTlsConfig::builder()
|
||||
.max_sent_data(tlsn_examples::MAX_SENT_DATA)
|
||||
.max_recv_data(tlsn_examples::MAX_RECV_DATA)
|
||||
.build()?,
|
||||
)
|
||||
.build()?,
|
||||
verifier_socket.compat(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Open a TCP connection to the server.
|
||||
let client_socket = tokio::net::TcpStream::connect(server_addr).await?;
|
||||
|
||||
// Bind the prover to the server connection.
|
||||
let (tls_connection, prover_fut) = prover
|
||||
.connect(
|
||||
TlsClientConfig::builder()
|
||||
.server_name(ServerName::Dns(SERVER_DOMAIN.try_into()?))
|
||||
.root_store(RootCertStore {
|
||||
roots: vec![CertificateDer(CA_CERT_DER.to_vec())],
|
||||
})
|
||||
.build()?,
|
||||
client_socket.compat(),
|
||||
)
|
||||
.await?;
|
||||
let tls_connection = TokioIo::new(tls_connection.compat());
|
||||
|
||||
// Spawn the Prover to run in the background.
|
||||
let prover_task = tokio::spawn(prover_fut);
|
||||
|
||||
// MPC-TLS Handshake.
|
||||
let (mut request_sender, connection) =
|
||||
hyper::client::conn::http1::handshake(tls_connection).await?;
|
||||
|
||||
// Spawn the connection to run in the background.
|
||||
tokio::spawn(connection);
|
||||
|
||||
// Send request for JSON data.
|
||||
let request = Request::builder()
|
||||
.uri(uri.clone())
|
||||
.header("Host", server_domain)
|
||||
.header("Connection", "close")
|
||||
.method("GET")
|
||||
.body(Empty::<Bytes>::new())?;
|
||||
let response = request_sender.send_request(request).await?;
|
||||
|
||||
assert!(response.status() == StatusCode::OK);
|
||||
|
||||
// Create proof for the Verifier.
|
||||
let mut prover = prover_task.await??;
|
||||
|
||||
// Find the "name" field value in the JSON response
|
||||
let received = prover.transcript().received();
|
||||
|
||||
// Find the HTTP body (after \r\n\r\n)
|
||||
let body_start = received
|
||||
.windows(4)
|
||||
.position(|w| w == b"\r\n\r\n")
|
||||
.map(|p| p + 4)
|
||||
.unwrap_or(0);
|
||||
|
||||
// Find the "name" field's string value
|
||||
let (value_start, value_end) =
|
||||
find_json_string_value(&received[body_start..], "name").expect("should find name field");
|
||||
|
||||
// Adjust to absolute positions in transcript
|
||||
let value_start = body_start + value_start;
|
||||
let value_end = body_start + value_end;
|
||||
|
||||
let value_bytes = &received[value_start..value_end];
|
||||
println!(
|
||||
"Prover: Found 'name' field value: \"{}\" at positions {}..{}",
|
||||
String::from_utf8_lossy(value_bytes),
|
||||
value_start,
|
||||
value_end
|
||||
);
|
||||
println!("Prover: Will prove this is a valid JSON string without revealing the actual content");
|
||||
|
||||
// Build indices for the predicate as a RangeSet
|
||||
let indices: RangeSet<usize> = (value_start..value_end).into();
|
||||
|
||||
// Build the predicate using mpz_predicate
|
||||
let predicate = build_json_string_predicate(&indices);
|
||||
|
||||
let mut builder = ProveConfig::builder(prover.transcript());
|
||||
|
||||
// Reveal the server identity.
|
||||
builder.server_identity();
|
||||
|
||||
// Reveal the sent data (the request).
|
||||
builder.reveal_sent(&(0..prover.transcript().sent().len()))?;
|
||||
|
||||
// Reveal everything EXCEPT the string value we're proving the predicate over.
|
||||
if value_start > 0 {
|
||||
builder.reveal_recv(&(0..value_start))?;
|
||||
}
|
||||
if value_end < prover.transcript().received().len() {
|
||||
builder.reveal_recv(&(value_end..prover.transcript().received().len()))?;
|
||||
}
|
||||
|
||||
// Add the predicate to prove the string is valid JSON without revealing the
|
||||
// value.
|
||||
builder.predicate(JSON_STRING_PREDICATE, Direction::Received, predicate)?;
|
||||
|
||||
let config = builder.build()?;
|
||||
|
||||
prover.prove(&config).await?;
|
||||
prover.close().await?;
|
||||
|
||||
println!("Prover: Successfully proved the predicate!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip(socket))]
|
||||
async fn verifier<T: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static>(
|
||||
socket: T,
|
||||
) -> Result<()> {
|
||||
let verifier_config = VerifierConfig::builder()
|
||||
.root_store(RootCertStore {
|
||||
roots: vec![CertificateDer(CA_CERT_DER.to_vec())],
|
||||
})
|
||||
.build()?;
|
||||
let verifier = Verifier::new(verifier_config);
|
||||
|
||||
// Validate the proposed configuration and run the TLS commitment protocol.
|
||||
let verifier = verifier.commit(socket.compat()).await?;
|
||||
|
||||
// Validate configuration.
|
||||
let reject = if let TlsCommitProtocolConfig::Mpc(mpc_tls_config) = verifier.request().protocol()
|
||||
{
|
||||
if mpc_tls_config.max_sent_data() > MAX_SENT_DATA {
|
||||
Some("max_sent_data is too large")
|
||||
} else if mpc_tls_config.max_recv_data() > MAX_RECV_DATA {
|
||||
Some("max_recv_data is too large")
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
Some("expecting to use MPC-TLS")
|
||||
};
|
||||
|
||||
if reject.is_some() {
|
||||
verifier.reject(reject).await?;
|
||||
return Err(anyhow::anyhow!("protocol configuration rejected"));
|
||||
}
|
||||
|
||||
// Run the TLS commitment protocol.
|
||||
let verifier = verifier.accept().await?.run().await?;
|
||||
|
||||
// Validate the proving request.
|
||||
let verifier = verifier.verify().await?;
|
||||
|
||||
// Check that server identity is being proven.
|
||||
if !verifier.request().server_identity() {
|
||||
let verifier = verifier
|
||||
.reject(Some("expecting to verify the server name"))
|
||||
.await?;
|
||||
verifier.close().await?;
|
||||
return Err(anyhow::anyhow!("prover did not reveal the server name"));
|
||||
}
|
||||
|
||||
// Check if predicates are requested and validate them.
|
||||
let predicates = verifier.request().predicates();
|
||||
if !predicates.is_empty() {
|
||||
println!(
|
||||
"Verifier: Prover requested {} predicate(s):",
|
||||
predicates.len()
|
||||
);
|
||||
for pred in predicates {
|
||||
println!(
|
||||
" - '{}' on {:?} at {} indices",
|
||||
pred.name(),
|
||||
pred.direction(),
|
||||
pred.indices().len()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Define the predicate resolver - this maps predicate names to predicates.
|
||||
// The resolver receives the predicate name and the indices from the prover's
|
||||
// request.
|
||||
let predicate_resolver = |name: &str, indices: &RangeSet<usize>| -> Option<Pred> {
|
||||
match name {
|
||||
JSON_STRING_PREDICATE => {
|
||||
// Build the JSON string validation predicate with the provided indices
|
||||
Some(build_json_string_predicate(indices))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
|
||||
// Accept with predicate verification.
|
||||
let (
|
||||
VerifierOutput {
|
||||
server_name,
|
||||
transcript,
|
||||
..
|
||||
},
|
||||
verifier,
|
||||
) = verifier
|
||||
.accept_with_predicates(Some(&predicate_resolver))
|
||||
.await?;
|
||||
|
||||
verifier.close().await?;
|
||||
|
||||
let server_name = server_name.expect("prover should have revealed server name");
|
||||
let transcript = transcript.expect("prover should have revealed transcript data");
|
||||
|
||||
// Verify server name.
|
||||
let ServerName::Dns(server_name) = server_name;
|
||||
assert_eq!(server_name.as_str(), SERVER_DOMAIN);
|
||||
|
||||
// The verifier can see the response but with the predicated string redacted.
|
||||
let received = transcript.received_unsafe();
|
||||
let redacted = String::from_utf8_lossy(received).replace('\0', "[REDACTED]");
|
||||
println!("Verifier: Received data (string value redacted):\n{redacted}");
|
||||
|
||||
println!("Verifier: Predicate verified successfully!");
|
||||
println!("Verifier: The hidden value is proven to be a valid JSON string");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,59 +1,51 @@
|
||||
#### Default Representative Benchmarks ####
|
||||
#
|
||||
# This benchmark measures TLSNotary performance on three representative network scenarios.
|
||||
# Each scenario is run multiple times to produce statistical metrics (median, std dev, etc.)
|
||||
# rather than plots. Use this for quick performance checks and CI regression testing.
|
||||
#
|
||||
# Payload sizes:
|
||||
# - upload-size: 1KB (typical HTTP request)
|
||||
# - download-size: 2KB (typical HTTP response/API data)
|
||||
#
|
||||
# Network scenarios are chosen to represent real-world user conditions where
|
||||
# TLSNotary is primarily bottlenecked by upload bandwidth.
|
||||
|
||||
#### Cable/DSL Home Internet ####
|
||||
# Most common residential internet connection
|
||||
# - Asymmetric: high download, limited upload (typical bottleneck)
|
||||
# - Upload bandwidth: 20 Mbps (realistic cable/DSL upload speed)
|
||||
# - Latency: 20ms (typical ISP latency)
|
||||
#### Latency ####
|
||||
|
||||
[[group]]
|
||||
name = "cable"
|
||||
bandwidth = 20
|
||||
protocol_latency = 20
|
||||
upload-size = 1024
|
||||
download-size = 2048
|
||||
name = "latency"
|
||||
bandwidth = 1000
|
||||
|
||||
[[bench]]
|
||||
group = "cable"
|
||||
|
||||
#### Mobile 5G ####
|
||||
# Modern mobile connection with good coverage
|
||||
# - Upload bandwidth: 30 Mbps (typical 5G upload in good conditions)
|
||||
# - Latency: 30ms (higher than wired due to mobile tower hops)
|
||||
|
||||
[[group]]
|
||||
name = "mobile_5g"
|
||||
bandwidth = 30
|
||||
protocol_latency = 30
|
||||
upload-size = 1024
|
||||
download-size = 2048
|
||||
group = "latency"
|
||||
protocol_latency = 10
|
||||
|
||||
[[bench]]
|
||||
group = "mobile_5g"
|
||||
group = "latency"
|
||||
protocol_latency = 25
|
||||
|
||||
#### Fiber Home Internet ####
|
||||
# High-end residential connection (best case scenario)
|
||||
# - Symmetric: equal upload/download bandwidth
|
||||
# - Upload bandwidth: 100 Mbps (typical fiber upload)
|
||||
# - Latency: 15ms (lower latency than cable)
|
||||
[[bench]]
|
||||
group = "latency"
|
||||
protocol_latency = 50
|
||||
|
||||
[[bench]]
|
||||
group = "latency"
|
||||
protocol_latency = 100
|
||||
|
||||
[[bench]]
|
||||
group = "latency"
|
||||
protocol_latency = 200
|
||||
|
||||
#### Bandwidth ####
|
||||
|
||||
[[group]]
|
||||
name = "fiber"
|
||||
name = "bandwidth"
|
||||
protocol_latency = 25
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth"
|
||||
bandwidth = 10
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth"
|
||||
bandwidth = 50
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth"
|
||||
bandwidth = 100
|
||||
protocol_latency = 15
|
||||
upload-size = 1024
|
||||
download-size = 2048
|
||||
|
||||
[[bench]]
|
||||
group = "fiber"
|
||||
group = "bandwidth"
|
||||
bandwidth = 250
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth"
|
||||
bandwidth = 1000
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
#### Bandwidth Sweep Benchmark ####
|
||||
#
|
||||
# Measures how network bandwidth affects TLSNotary runtime.
|
||||
# Keeps latency and payload sizes fixed while varying upload bandwidth.
|
||||
#
|
||||
# Fixed parameters:
|
||||
# - Latency: 25ms (typical internet latency)
|
||||
# - Upload: 1KB (typical request)
|
||||
# - Download: 2KB (typical response)
|
||||
#
|
||||
# Variable: Bandwidth from 5 Mbps to 1000 Mbps
|
||||
#
|
||||
# Use this to plot "Bandwidth vs Runtime" and understand bandwidth sensitivity.
|
||||
# Focus on upload bandwidth as TLSNotary is primarily upload-bottlenecked
|
||||
|
||||
[[group]]
|
||||
name = "bandwidth_sweep"
|
||||
protocol_latency = 25
|
||||
upload-size = 1024
|
||||
download-size = 2048
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth_sweep"
|
||||
bandwidth = 5
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth_sweep"
|
||||
bandwidth = 10
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth_sweep"
|
||||
bandwidth = 20
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth_sweep"
|
||||
bandwidth = 50
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth_sweep"
|
||||
bandwidth = 100
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth_sweep"
|
||||
bandwidth = 250
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth_sweep"
|
||||
bandwidth = 500
|
||||
|
||||
[[bench]]
|
||||
group = "bandwidth_sweep"
|
||||
bandwidth = 1000
|
||||
@@ -1,53 +0,0 @@
|
||||
#### Download Size Sweep Benchmark ####
|
||||
#
|
||||
# Measures how download payload size affects TLSNotary runtime.
|
||||
# Keeps network conditions fixed while varying the response size.
|
||||
#
|
||||
# Fixed parameters:
|
||||
# - Bandwidth: 100 Mbps (typical good connection)
|
||||
# - Latency: 25ms (typical internet latency)
|
||||
# - Upload: 1KB (typical request size)
|
||||
#
|
||||
# Variable: Download size from 1KB to 100KB
|
||||
#
|
||||
# Use this to plot "Download Size vs Runtime" and understand how much data
|
||||
# TLSNotary can efficiently notarize. Useful for determining optimal
|
||||
# chunking strategies for large responses.
|
||||
|
||||
[[group]]
|
||||
name = "download_sweep"
|
||||
bandwidth = 100
|
||||
protocol_latency = 25
|
||||
upload-size = 1024
|
||||
|
||||
[[bench]]
|
||||
group = "download_sweep"
|
||||
download-size = 1024
|
||||
|
||||
[[bench]]
|
||||
group = "download_sweep"
|
||||
download-size = 2048
|
||||
|
||||
[[bench]]
|
||||
group = "download_sweep"
|
||||
download-size = 5120
|
||||
|
||||
[[bench]]
|
||||
group = "download_sweep"
|
||||
download-size = 10240
|
||||
|
||||
[[bench]]
|
||||
group = "download_sweep"
|
||||
download-size = 20480
|
||||
|
||||
[[bench]]
|
||||
group = "download_sweep"
|
||||
download-size = 30720
|
||||
|
||||
[[bench]]
|
||||
group = "download_sweep"
|
||||
download-size = 40960
|
||||
|
||||
[[bench]]
|
||||
group = "download_sweep"
|
||||
download-size = 51200
|
||||
@@ -1,47 +0,0 @@
|
||||
#### Latency Sweep Benchmark ####
|
||||
#
|
||||
# Measures how network latency affects TLSNotary runtime.
|
||||
# Keeps bandwidth and payload sizes fixed while varying protocol latency.
|
||||
#
|
||||
# Fixed parameters:
|
||||
# - Bandwidth: 100 Mbps (typical good connection)
|
||||
# - Upload: 1KB (typical request)
|
||||
# - Download: 2KB (typical response)
|
||||
#
|
||||
# Variable: Protocol latency from 10ms to 200ms
|
||||
#
|
||||
# Use this to plot "Latency vs Runtime" and understand latency sensitivity.
|
||||
|
||||
[[group]]
|
||||
name = "latency_sweep"
|
||||
bandwidth = 100
|
||||
upload-size = 1024
|
||||
download-size = 2048
|
||||
|
||||
[[bench]]
|
||||
group = "latency_sweep"
|
||||
protocol_latency = 10
|
||||
|
||||
[[bench]]
|
||||
group = "latency_sweep"
|
||||
protocol_latency = 25
|
||||
|
||||
[[bench]]
|
||||
group = "latency_sweep"
|
||||
protocol_latency = 50
|
||||
|
||||
[[bench]]
|
||||
group = "latency_sweep"
|
||||
protocol_latency = 75
|
||||
|
||||
[[bench]]
|
||||
group = "latency_sweep"
|
||||
protocol_latency = 100
|
||||
|
||||
[[bench]]
|
||||
group = "latency_sweep"
|
||||
protocol_latency = 150
|
||||
|
||||
[[bench]]
|
||||
group = "latency_sweep"
|
||||
protocol_latency = 200
|
||||
@@ -22,10 +22,7 @@ pub enum CmdOutput {
|
||||
GetTests(Vec<String>),
|
||||
Test(TestOutput),
|
||||
Bench(BenchOutput),
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
Fail {
|
||||
reason: Option<String>,
|
||||
},
|
||||
Fail { reason: Option<String> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
||||
@@ -22,7 +22,6 @@ clap = { workspace = true, features = ["derive", "env"] }
|
||||
csv = { version = "1.3" }
|
||||
duct = { version = "1" }
|
||||
futures = { workspace = true }
|
||||
indicatif = { version = "0.17" }
|
||||
ipnet = { workspace = true }
|
||||
serio = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
|
||||
@@ -16,10 +16,6 @@ pub struct Cli {
|
||||
/// Subnet to assign harness network interfaces.
|
||||
#[arg(long, default_value = "10.250.0.0/24", env = "SUBNET")]
|
||||
pub subnet: Ipv4Net,
|
||||
/// Run browser in headed mode (visible window) for debugging.
|
||||
/// Works with both X11 and Wayland.
|
||||
#[arg(long)]
|
||||
pub headed: bool,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
@@ -35,13 +31,10 @@ pub enum Command {
|
||||
},
|
||||
/// runs benchmarks.
|
||||
Bench {
|
||||
/// Configuration path. Defaults to bench.toml which contains
|
||||
/// representative scenarios (cable, 5G, fiber) for quick performance
|
||||
/// checks. Use bench_*_sweep.toml files for parametric
|
||||
/// analysis.
|
||||
/// Configuration path.
|
||||
#[arg(short, long, default_value = "bench.toml")]
|
||||
config: PathBuf,
|
||||
/// Output CSV file path for detailed metrics and post-processing.
|
||||
/// Output file path.
|
||||
#[arg(short, long, default_value = "metrics.csv")]
|
||||
output: PathBuf,
|
||||
/// Number of samples to measure per benchmark. This is overridden by
|
||||
|
||||
@@ -28,9 +28,6 @@ pub struct Executor {
|
||||
ns: Namespace,
|
||||
config: ExecutorConfig,
|
||||
target: Target,
|
||||
/// Display environment variables for headed mode (X11/Wayland).
|
||||
/// Empty means headless mode.
|
||||
display_env: Vec<String>,
|
||||
state: State,
|
||||
}
|
||||
|
||||
@@ -52,17 +49,11 @@ impl State {
|
||||
}
|
||||
|
||||
impl Executor {
|
||||
pub fn new(
|
||||
ns: Namespace,
|
||||
config: ExecutorConfig,
|
||||
target: Target,
|
||||
display_env: Vec<String>,
|
||||
) -> Self {
|
||||
pub fn new(ns: Namespace, config: ExecutorConfig, target: Target) -> Self {
|
||||
Self {
|
||||
ns,
|
||||
config,
|
||||
target,
|
||||
display_env,
|
||||
state: State::Init,
|
||||
}
|
||||
}
|
||||
@@ -129,49 +120,23 @@ impl Executor {
|
||||
let tmp = duct::cmd!("mktemp", "-d").read()?;
|
||||
let tmp = tmp.trim();
|
||||
|
||||
let headed = !self.display_env.is_empty();
|
||||
|
||||
// Build command args based on headed/headless mode
|
||||
let mut args: Vec<String> = vec![
|
||||
"ip".into(),
|
||||
"netns".into(),
|
||||
"exec".into(),
|
||||
self.ns.name().into(),
|
||||
];
|
||||
|
||||
if headed {
|
||||
// For headed mode: drop back to the current user and pass display env vars
|
||||
// This allows the browser to connect to X11/Wayland while in the namespace
|
||||
let user =
|
||||
std::env::var("USER").context("USER environment variable not set")?;
|
||||
args.extend(["sudo".into(), "-E".into(), "-u".into(), user, "env".into()]);
|
||||
args.extend(self.display_env.clone());
|
||||
}
|
||||
|
||||
args.push(chrome_path.to_string_lossy().into());
|
||||
args.push(format!("--remote-debugging-port={PORT_BROWSER}"));
|
||||
|
||||
if headed {
|
||||
// Headed mode: no headless, add flags to suppress first-run dialogs
|
||||
args.extend(["--no-first-run".into(), "--no-default-browser-check".into()]);
|
||||
} else {
|
||||
// Headless mode: original flags
|
||||
args.extend([
|
||||
"--headless".into(),
|
||||
"--disable-dev-shm-usage".into(),
|
||||
"--disable-gpu".into(),
|
||||
"--disable-cache".into(),
|
||||
"--disable-application-cache".into(),
|
||||
]);
|
||||
}
|
||||
|
||||
args.extend([
|
||||
"--no-sandbox".into(),
|
||||
let process = duct::cmd!(
|
||||
"sudo",
|
||||
"ip",
|
||||
"netns",
|
||||
"exec",
|
||||
self.ns.name(),
|
||||
chrome_path,
|
||||
format!("--remote-debugging-port={PORT_BROWSER}"),
|
||||
"--headless",
|
||||
"--disable-dev-shm-usage",
|
||||
"--disable-gpu",
|
||||
"--disable-cache",
|
||||
"--disable-application-cache",
|
||||
"--no-sandbox",
|
||||
format!("--user-data-dir={tmp}"),
|
||||
"--allowed-ips=10.250.0.1".into(),
|
||||
]);
|
||||
|
||||
let process = duct::cmd("sudo", &args);
|
||||
format!("--allowed-ips=10.250.0.1"),
|
||||
);
|
||||
|
||||
let process = if !cfg!(feature = "debug") {
|
||||
process.stderr_capture().stdout_capture().start()?
|
||||
|
||||
@@ -9,7 +9,7 @@ mod ws_proxy;
|
||||
#[cfg(feature = "debug")]
|
||||
mod debug_prelude;
|
||||
|
||||
use std::{collections::HashMap, time::Duration};
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
@@ -22,7 +22,6 @@ use harness_core::{
|
||||
rpc::{BenchCmd, TestCmd},
|
||||
test::TestStatus,
|
||||
};
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
|
||||
use cli::{Cli, Command};
|
||||
use executor::Executor;
|
||||
@@ -33,60 +32,6 @@ use crate::debug_prelude::*;
|
||||
|
||||
use crate::{cli::Route, network::Network, wasm_server::WasmServer, ws_proxy::WsProxy};
|
||||
|
||||
/// Statistics for a benchmark configuration
|
||||
#[derive(Debug, Clone)]
|
||||
struct BenchStats {
|
||||
group: Option<String>,
|
||||
bandwidth: usize,
|
||||
latency: usize,
|
||||
upload_size: usize,
|
||||
download_size: usize,
|
||||
times: Vec<u64>,
|
||||
}
|
||||
|
||||
impl BenchStats {
|
||||
fn median(&self) -> f64 {
|
||||
let mut sorted = self.times.clone();
|
||||
sorted.sort();
|
||||
let len = sorted.len();
|
||||
if len == 0 {
|
||||
return 0.0;
|
||||
}
|
||||
if len.is_multiple_of(2) {
|
||||
(sorted[len / 2 - 1] + sorted[len / 2]) as f64 / 2.0
|
||||
} else {
|
||||
sorted[len / 2] as f64
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Print summary table of benchmark results
|
||||
fn print_bench_summary(stats: &[BenchStats]) {
|
||||
if stats.is_empty() {
|
||||
println!("\nNo benchmark results to display (only warmup was run).");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("\n{}", "=".repeat(80));
|
||||
println!("TLSNotary Benchmark Results");
|
||||
println!("{}", "=".repeat(80));
|
||||
println!();
|
||||
|
||||
for stat in stats {
|
||||
let group_name = stat.group.as_deref().unwrap_or("unnamed");
|
||||
println!(
|
||||
"{} ({} Mbps, {}ms latency, {}KB↑ {}KB↓):",
|
||||
group_name,
|
||||
stat.bandwidth,
|
||||
stat.latency,
|
||||
stat.upload_size / 1024,
|
||||
stat.download_size / 1024
|
||||
);
|
||||
println!(" Median: {:.2}s", stat.median() / 1000.0);
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, clap::ValueEnum, Default)]
|
||||
pub enum Target {
|
||||
#[default]
|
||||
@@ -105,46 +50,14 @@ struct Runner {
|
||||
started: bool,
|
||||
}
|
||||
|
||||
/// Collects display-related environment variables for headed browser mode.
|
||||
/// Works with both X11 and Wayland by collecting whichever vars are present.
|
||||
fn collect_display_env_vars() -> Vec<String> {
|
||||
const DISPLAY_VARS: &[&str] = &[
|
||||
"DISPLAY", // X11
|
||||
"XAUTHORITY", // X11 auth
|
||||
"WAYLAND_DISPLAY", // Wayland
|
||||
"XDG_RUNTIME_DIR", // Wayland runtime dir
|
||||
];
|
||||
|
||||
DISPLAY_VARS
|
||||
.iter()
|
||||
.filter_map(|&var| {
|
||||
std::env::var(var)
|
||||
.ok()
|
||||
.map(|val| format!("{}={}", var, val))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
impl Runner {
|
||||
fn new(cli: &Cli) -> Result<Self> {
|
||||
let Cli {
|
||||
target,
|
||||
subnet,
|
||||
headed,
|
||||
..
|
||||
} = cli;
|
||||
let Cli { target, subnet, .. } = cli;
|
||||
let current_path = std::env::current_exe().unwrap();
|
||||
let fixture_path = current_path.parent().unwrap().join("server-fixture");
|
||||
let network_config = NetworkConfig::new(*subnet);
|
||||
let network = Network::new(network_config.clone())?;
|
||||
|
||||
// Collect display env vars once if headed mode is enabled
|
||||
let display_env = if *headed {
|
||||
collect_display_env_vars()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let server_fixture =
|
||||
ServerFixture::new(fixture_path, network.ns_app().clone(), network_config.app);
|
||||
let wasm_server = WasmServer::new(
|
||||
@@ -162,7 +75,6 @@ impl Runner {
|
||||
.network_config(network_config.clone())
|
||||
.build(),
|
||||
*target,
|
||||
display_env.clone(),
|
||||
);
|
||||
let exec_v = Executor::new(
|
||||
network.ns_1().clone(),
|
||||
@@ -172,7 +84,6 @@ impl Runner {
|
||||
.network_config(network_config.clone())
|
||||
.build(),
|
||||
Target::Native,
|
||||
Vec::new(), // Verifier doesn't need display env
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
@@ -207,12 +118,6 @@ pub async fn main() -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Validate --headed requires --target browser
|
||||
if cli.headed && cli.target != Target::Browser {
|
||||
anyhow::bail!("--headed can only be used with --target browser");
|
||||
}
|
||||
|
||||
let mut runner = Runner::new(&cli)?;
|
||||
|
||||
let mut exit_code = 0;
|
||||
@@ -301,12 +206,6 @@ pub async fn main() -> Result<()> {
|
||||
samples_override,
|
||||
skip_warmup,
|
||||
} => {
|
||||
// Print configuration info
|
||||
println!("TLSNotary Benchmark Harness");
|
||||
println!("Running benchmarks from: {}", config.display());
|
||||
println!("Output will be written to: {}", output.display());
|
||||
println!();
|
||||
|
||||
let items: BenchItems = toml::from_str(&std::fs::read_to_string(config)?)?;
|
||||
let output_file = std::fs::File::create(output)?;
|
||||
let mut writer = WriterBuilder::new().from_writer(output_file);
|
||||
@@ -321,34 +220,7 @@ pub async fn main() -> Result<()> {
|
||||
runner.exec_p.start().await?;
|
||||
runner.exec_v.start().await?;
|
||||
|
||||
// Create progress bar
|
||||
let pb = ProgressBar::new(benches.len() as u64);
|
||||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("[{elapsed_precise}] {bar:40.cyan/blue} {pos}/{len} {msg}")
|
||||
.expect("valid template")
|
||||
.progress_chars("█▓▒░ "),
|
||||
);
|
||||
|
||||
// Collect measurements for stats
|
||||
let mut measurements_by_config: HashMap<String, Vec<u64>> = HashMap::new();
|
||||
|
||||
let warmup_count = if skip_warmup { 0 } else { 3 };
|
||||
|
||||
for (idx, config) in benches.iter().enumerate() {
|
||||
let is_warmup = idx < warmup_count;
|
||||
|
||||
let group_name = if is_warmup {
|
||||
format!("Warmup {}/{}", idx + 1, warmup_count)
|
||||
} else {
|
||||
config.group.as_deref().unwrap_or("unnamed").to_string()
|
||||
};
|
||||
|
||||
pb.set_message(format!(
|
||||
"{} ({} Mbps, {}ms)",
|
||||
group_name, config.bandwidth, config.protocol_latency
|
||||
));
|
||||
|
||||
for config in benches {
|
||||
runner
|
||||
.network
|
||||
.set_proto_config(config.bandwidth, config.protocol_latency.div_ceil(2))?;
|
||||
@@ -377,73 +249,11 @@ pub async fn main() -> Result<()> {
|
||||
panic!("expected prover output");
|
||||
};
|
||||
|
||||
// Collect metrics for stats (skip warmup benches)
|
||||
if !is_warmup {
|
||||
let config_key = format!(
|
||||
"{:?}|{}|{}|{}|{}",
|
||||
config.group,
|
||||
config.bandwidth,
|
||||
config.protocol_latency,
|
||||
config.upload_size,
|
||||
config.download_size
|
||||
);
|
||||
measurements_by_config
|
||||
.entry(config_key)
|
||||
.or_default()
|
||||
.push(metrics.time_total);
|
||||
}
|
||||
|
||||
let measurement = Measurement::new(config.clone(), metrics);
|
||||
let measurement = Measurement::new(config, metrics);
|
||||
|
||||
writer.serialize(measurement)?;
|
||||
writer.flush()?;
|
||||
|
||||
pb.inc(1);
|
||||
}
|
||||
|
||||
pb.finish_with_message("Benchmarks complete");
|
||||
|
||||
// Compute and print statistics
|
||||
let mut all_stats: Vec<BenchStats> = Vec::new();
|
||||
for (key, times) in measurements_by_config {
|
||||
// Parse back the config from the key
|
||||
let parts: Vec<&str> = key.split('|').collect();
|
||||
if parts.len() >= 5 {
|
||||
let group = if parts[0] == "None" {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
parts[0]
|
||||
.trim_start_matches("Some(\"")
|
||||
.trim_end_matches("\")")
|
||||
.to_string(),
|
||||
)
|
||||
};
|
||||
let bandwidth: usize = parts[1].parse().unwrap_or(0);
|
||||
let latency: usize = parts[2].parse().unwrap_or(0);
|
||||
let upload_size: usize = parts[3].parse().unwrap_or(0);
|
||||
let download_size: usize = parts[4].parse().unwrap_or(0);
|
||||
|
||||
all_stats.push(BenchStats {
|
||||
group,
|
||||
bandwidth,
|
||||
latency,
|
||||
upload_size,
|
||||
download_size,
|
||||
times,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort stats by group name for consistent output
|
||||
all_stats.sort_by(|a, b| {
|
||||
a.group
|
||||
.cmp(&b.group)
|
||||
.then(a.latency.cmp(&b.latency))
|
||||
.then(a.bandwidth.cmp(&b.bandwidth))
|
||||
});
|
||||
|
||||
print_bench_summary(&all_stats);
|
||||
}
|
||||
Command::Serve {} => {
|
||||
runner.start_services().await?;
|
||||
|
||||
@@ -24,7 +24,7 @@ use std::{
|
||||
};
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
use tracing::{debug, debug_span, trace, warn, Instrument};
|
||||
use tracing::{debug, debug_span, error, trace, warn, Instrument};
|
||||
|
||||
use tls_client::ClientConnection;
|
||||
|
||||
|
||||
@@ -33,6 +33,7 @@ web-spawn = { workspace = true, optional = true }
|
||||
mpz-circuits = { workspace = true, features = ["aes"] }
|
||||
mpz-common = { workspace = true }
|
||||
mpz-core = { workspace = true }
|
||||
mpz-predicate = { workspace = true }
|
||||
mpz-garble = { workspace = true }
|
||||
mpz-garble-core = { workspace = true }
|
||||
mpz-hash = { workspace = true }
|
||||
|
||||
@@ -21,6 +21,20 @@ impl<T> RangeMap<T>
|
||||
where
|
||||
T: Item,
|
||||
{
|
||||
pub(crate) fn new(map: Vec<(usize, T)>) -> Self {
|
||||
let mut pos = 0;
|
||||
for (idx, item) in &map {
|
||||
assert!(
|
||||
*idx >= pos,
|
||||
"items must be sorted by index and non-overlapping"
|
||||
);
|
||||
|
||||
pos = *idx + item.length();
|
||||
}
|
||||
|
||||
Self { map }
|
||||
}
|
||||
|
||||
/// Returns `true` if the map is empty.
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.map.is_empty()
|
||||
@@ -33,6 +47,11 @@ where
|
||||
.map(|(idx, item)| *idx..*idx + item.length())
|
||||
}
|
||||
|
||||
/// Returns the length of the map.
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.map.iter().map(|(_, item)| item.length()).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = (Range<usize>, &T)> {
|
||||
self.map
|
||||
.iter()
|
||||
|
||||
@@ -6,6 +6,11 @@ use mpz_core::Block;
|
||||
#[cfg(not(tlsn_insecure))]
|
||||
use mpz_garble::protocol::semihonest::{Evaluator, Garbler};
|
||||
use mpz_garble_core::Delta;
|
||||
use mpz_memory_core::{
|
||||
Vector,
|
||||
binary::U8,
|
||||
correlated::{Key, Mac},
|
||||
};
|
||||
#[cfg(not(tlsn_insecure))]
|
||||
use mpz_ot::cot::{DerandCOTReceiver, DerandCOTSender};
|
||||
use mpz_ot::{
|
||||
@@ -19,6 +24,8 @@ use tlsn_core::config::tls_commit::mpc::{MpcTlsConfig, NetworkSetting};
|
||||
use tlsn_deap::Deap;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::transcript_internal::commit::encoding::{KeyStore, MacStore};
|
||||
|
||||
#[cfg(not(tlsn_insecure))]
|
||||
pub(crate) type ProverMpc =
|
||||
Garbler<DerandCOTSender<SharedRCOTSender<kos::Sender<co::Receiver>, Block>>>;
|
||||
@@ -186,3 +193,41 @@ pub(crate) fn translate_keys<Mpc, Zk>(keys: &mut SessionKeys, vm: &Deap<Mpc, Zk>
|
||||
.translate(keys.server_write_mac_key)
|
||||
.expect("VM memory should be consistent");
|
||||
}
|
||||
|
||||
impl<T> KeyStore for Verifier<T> {
|
||||
fn delta(&self) -> &Delta {
|
||||
self.delta()
|
||||
}
|
||||
|
||||
fn get_keys(&self, data: Vector<U8>) -> Option<&[Key]> {
|
||||
self.get_keys(data).ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> MacStore for Prover<T> {
|
||||
fn get_macs(&self, data: Vector<U8>) -> Option<&[Mac]> {
|
||||
self.get_macs(data).ok()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(tlsn_insecure)]
|
||||
mod insecure {
|
||||
use super::*;
|
||||
use mpz_ideal_vm::IdealVm;
|
||||
|
||||
impl KeyStore for IdealVm {
|
||||
fn delta(&self) -> &Delta {
|
||||
unimplemented!("encodings not supported in insecure mode")
|
||||
}
|
||||
|
||||
fn get_keys(&self, _data: Vector<U8>) -> Option<&[Key]> {
|
||||
unimplemented!("encodings not supported in insecure mode")
|
||||
}
|
||||
}
|
||||
|
||||
impl MacStore for IdealVm {
|
||||
fn get_macs(&self, _data: Vector<U8>) -> Option<&[Mac]> {
|
||||
unimplemented!("encodings not supported in insecure mode")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ use std::{error::Error, fmt};
|
||||
|
||||
use mpc_tls::MpcTlsError;
|
||||
|
||||
use crate::transcript_internal::commit::encoding::EncodingError;
|
||||
|
||||
/// Error for [`Prover`](crate::prover::Prover).
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub struct ProverError {
|
||||
@@ -47,6 +49,13 @@ impl ProverError {
|
||||
{
|
||||
Self::new(ErrorKind::Commit, source)
|
||||
}
|
||||
|
||||
pub(crate) fn predicate<E>(source: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn Error + Send + Sync + 'static>>,
|
||||
{
|
||||
Self::new(ErrorKind::Predicate, source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -56,6 +65,7 @@ enum ErrorKind {
|
||||
Zk,
|
||||
Config,
|
||||
Commit,
|
||||
Predicate,
|
||||
}
|
||||
|
||||
impl fmt::Display for ProverError {
|
||||
@@ -68,6 +78,7 @@ impl fmt::Display for ProverError {
|
||||
ErrorKind::Zk => f.write_str("zk error")?,
|
||||
ErrorKind::Config => f.write_str("config error")?,
|
||||
ErrorKind::Commit => f.write_str("commit error")?,
|
||||
ErrorKind::Predicate => f.write_str("predicate error")?,
|
||||
}
|
||||
|
||||
if let Some(source) = &self.source {
|
||||
@@ -107,3 +118,9 @@ impl From<MpcTlsError> for ProverError {
|
||||
Self::new(ErrorKind::Mpc, e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EncodingError> for ProverError {
|
||||
fn from(e: EncodingError) -> Self {
|
||||
Self::new(ErrorKind::Commit, e)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,10 +13,18 @@ use tlsn_core::{
|
||||
|
||||
use crate::{
|
||||
prover::ProverError,
|
||||
transcript_internal::{TranscriptRefs, auth::prove_plaintext, commit::hash::prove_hash},
|
||||
transcript_internal::{
|
||||
TranscriptRefs,
|
||||
auth::prove_plaintext,
|
||||
commit::{
|
||||
encoding::{self, MacStore},
|
||||
hash::prove_hash,
|
||||
},
|
||||
predicate::prove_predicates,
|
||||
},
|
||||
};
|
||||
|
||||
pub(crate) async fn prove<T: Vm<Binary> + Send + Sync>(
|
||||
pub(crate) async fn prove<T: Vm<Binary> + MacStore + Send + Sync>(
|
||||
ctx: &mut Context,
|
||||
vm: &mut T,
|
||||
keys: &SessionKeys,
|
||||
@@ -38,6 +46,27 @@ pub(crate) async fn prove<T: Vm<Binary> + Send + Sync>(
|
||||
Direction::Sent => commit_sent.union_mut(idx),
|
||||
Direction::Received => commit_recv.union_mut(idx),
|
||||
});
|
||||
|
||||
commit_config
|
||||
.iter_encoding()
|
||||
.for_each(|(direction, idx)| match direction {
|
||||
Direction::Sent => commit_sent.union_mut(idx),
|
||||
Direction::Received => commit_recv.union_mut(idx),
|
||||
});
|
||||
}
|
||||
|
||||
// Build predicate ranges from config
|
||||
let (mut predicate_sent, mut predicate_recv) = (RangeSet::default(), RangeSet::default());
|
||||
for predicate in config.predicates() {
|
||||
let indices: RangeSet<usize> = predicate
|
||||
.indices()
|
||||
.into_iter()
|
||||
.map(|idx| idx..idx + 1)
|
||||
.collect();
|
||||
match predicate.direction() {
|
||||
Direction::Sent => predicate_sent.union_mut(&indices),
|
||||
Direction::Received => predicate_recv.union_mut(&indices),
|
||||
}
|
||||
}
|
||||
|
||||
let transcript_refs = TranscriptRefs {
|
||||
@@ -52,6 +81,7 @@ pub(crate) async fn prove<T: Vm<Binary> + Send + Sync>(
|
||||
.filter(|record| record.typ == ContentType::ApplicationData),
|
||||
&reveal_sent,
|
||||
&commit_sent,
|
||||
&predicate_sent,
|
||||
)
|
||||
.map_err(ProverError::commit)?,
|
||||
recv: prove_plaintext(
|
||||
@@ -65,6 +95,7 @@ pub(crate) async fn prove<T: Vm<Binary> + Send + Sync>(
|
||||
.filter(|record| record.typ == ContentType::ApplicationData),
|
||||
&reveal_recv,
|
||||
&commit_recv,
|
||||
&predicate_recv,
|
||||
)
|
||||
.map_err(ProverError::commit)?,
|
||||
};
|
||||
@@ -86,8 +117,53 @@ pub(crate) async fn prove<T: Vm<Binary> + Send + Sync>(
|
||||
None
|
||||
};
|
||||
|
||||
// Prove predicates over transcript data
|
||||
if !config.predicates().is_empty() {
|
||||
prove_predicates(vm, &transcript_refs, config.predicates())
|
||||
.map_err(ProverError::predicate)?;
|
||||
}
|
||||
|
||||
vm.execute_all(ctx).await.map_err(ProverError::zk)?;
|
||||
|
||||
if let Some(commit_config) = config.transcript_commit()
|
||||
&& commit_config.has_encoding()
|
||||
{
|
||||
let mut sent_ranges = RangeSet::default();
|
||||
let mut recv_ranges = RangeSet::default();
|
||||
for (dir, idx) in commit_config.iter_encoding() {
|
||||
match dir {
|
||||
Direction::Sent => sent_ranges.union_mut(idx),
|
||||
Direction::Received => recv_ranges.union_mut(idx),
|
||||
}
|
||||
}
|
||||
|
||||
let sent_map = transcript_refs
|
||||
.sent
|
||||
.index(&sent_ranges)
|
||||
.expect("indices are valid");
|
||||
let recv_map = transcript_refs
|
||||
.recv
|
||||
.index(&recv_ranges)
|
||||
.expect("indices are valid");
|
||||
|
||||
let (commitment, tree) = encoding::receive(
|
||||
ctx,
|
||||
vm,
|
||||
*commit_config.encoding_hash_alg(),
|
||||
&sent_map,
|
||||
&recv_map,
|
||||
commit_config.iter_encoding(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
output
|
||||
.transcript_commitments
|
||||
.push(TranscriptCommitment::Encoding(commitment));
|
||||
output
|
||||
.transcript_secrets
|
||||
.push(TranscriptSecret::Encoding(tree));
|
||||
}
|
||||
|
||||
if let Some((hash_fut, hash_secrets)) = hash_commitments {
|
||||
let hash_commitments = hash_fut.try_recv().map_err(ProverError::commit)?;
|
||||
for (commitment, secret) in hash_commitments.into_iter().zip(hash_secrets) {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub(crate) mod auth;
|
||||
pub(crate) mod commit;
|
||||
pub(crate) mod predicate;
|
||||
|
||||
use mpz_memory_core::{Vector, binary::U8};
|
||||
|
||||
|
||||
@@ -25,14 +25,15 @@ pub(crate) fn prove_plaintext<'a>(
|
||||
records: impl IntoIterator<Item = &'a Record>,
|
||||
reveal: &RangeSet<usize>,
|
||||
commit: &RangeSet<usize>,
|
||||
predicate: &RangeSet<usize>,
|
||||
) -> Result<ReferenceMap, PlaintextAuthError> {
|
||||
let is_reveal_all = reveal == (0..plaintext.len());
|
||||
let is_reveal_all = reveal == (0..plaintext.len()) && predicate.is_empty();
|
||||
|
||||
let alloc_ranges = if is_reveal_all {
|
||||
commit.clone()
|
||||
} else {
|
||||
// The plaintext is only partially revealed, so we need to authenticate in ZK.
|
||||
commit.union(reveal).into_set()
|
||||
commit.union(reveal).union(predicate).into_set()
|
||||
};
|
||||
|
||||
let plaintext_refs = alloc_plaintext(vm, &alloc_ranges)?;
|
||||
@@ -49,7 +50,8 @@ pub(crate) fn prove_plaintext<'a>(
|
||||
vm.commit(*slice).map_err(PlaintextAuthError::vm)?;
|
||||
}
|
||||
} else {
|
||||
let private = commit.difference(reveal).into_set();
|
||||
// Private ranges: committed but not revealed, plus predicate ranges
|
||||
let private = commit.difference(reveal).union(predicate).into_set();
|
||||
for (_, slice) in plaintext_refs
|
||||
.index(&private)
|
||||
.expect("all ranges are allocated")
|
||||
@@ -91,14 +93,15 @@ pub(crate) fn verify_plaintext<'a>(
|
||||
records: impl IntoIterator<Item = &'a Record>,
|
||||
reveal: &RangeSet<usize>,
|
||||
commit: &RangeSet<usize>,
|
||||
predicate: &RangeSet<usize>,
|
||||
) -> Result<(ReferenceMap, PlaintextProof<'a>), PlaintextAuthError> {
|
||||
let is_reveal_all = reveal == (0..plaintext.len());
|
||||
let is_reveal_all = reveal == (0..plaintext.len()) && predicate.is_empty();
|
||||
|
||||
let alloc_ranges = if is_reveal_all {
|
||||
commit.clone()
|
||||
} else {
|
||||
// The plaintext is only partially revealed, so we need to authenticate in ZK.
|
||||
commit.union(reveal).into_set()
|
||||
commit.union(reveal).union(predicate).into_set()
|
||||
};
|
||||
|
||||
let plaintext_refs = alloc_plaintext(vm, &alloc_ranges)?;
|
||||
@@ -123,9 +126,10 @@ pub(crate) fn verify_plaintext<'a>(
|
||||
ciphertext,
|
||||
})
|
||||
} else {
|
||||
let private = commit.difference(reveal).into_set();
|
||||
// Blind ranges: committed but not revealed, plus predicate ranges
|
||||
let blind = commit.difference(reveal).union(predicate).into_set();
|
||||
for (_, slice) in plaintext_refs
|
||||
.index(&private)
|
||||
.index(&blind)
|
||||
.expect("all ranges are allocated")
|
||||
.iter()
|
||||
{
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//! Plaintext commitment and proof of encryption.
|
||||
|
||||
pub(crate) mod encoding;
|
||||
pub(crate) mod hash;
|
||||
|
||||
267
crates/tlsn/src/transcript_internal/commit/encoding.rs
Normal file
267
crates/tlsn/src/transcript_internal/commit/encoding.rs
Normal file
@@ -0,0 +1,267 @@
|
||||
//! Encoding commitment protocol.
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
use mpz_common::Context;
|
||||
use mpz_memory_core::{
|
||||
Vector,
|
||||
binary::U8,
|
||||
correlated::{Delta, Key, Mac},
|
||||
};
|
||||
use rand::Rng;
|
||||
use rangeset::set::RangeSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serio::{SinkExt, stream::IoStreamExt};
|
||||
use tlsn_core::{
|
||||
hash::{Blake3, HashAlgId, HashAlgorithm, Keccak256, Sha256},
|
||||
transcript::{
|
||||
Direction,
|
||||
encoding::{
|
||||
Encoder, EncoderSecret, EncodingCommitment, EncodingProvider, EncodingProviderError,
|
||||
EncodingTree, EncodingTreeError, new_encoder,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
map::{Item, RangeMap},
|
||||
transcript_internal::ReferenceMap,
|
||||
};
|
||||
|
||||
/// Bytes of encoding, per byte.
|
||||
const ENCODING_SIZE: usize = 128;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Encodings {
|
||||
sent: Vec<u8>,
|
||||
recv: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Transfers encodings for the provided plaintext ranges.
|
||||
pub(crate) async fn transfer<K: KeyStore>(
|
||||
ctx: &mut Context,
|
||||
store: &K,
|
||||
sent: &ReferenceMap,
|
||||
recv: &ReferenceMap,
|
||||
) -> Result<(EncoderSecret, EncodingCommitment), EncodingError> {
|
||||
let secret = EncoderSecret::new(rand::rng().random(), store.delta().as_block().to_bytes());
|
||||
let encoder = new_encoder(&secret);
|
||||
|
||||
// Collects the encodings for the provided plaintext ranges.
|
||||
fn collect_encodings(
|
||||
encoder: &impl Encoder,
|
||||
store: &impl KeyStore,
|
||||
direction: Direction,
|
||||
map: &ReferenceMap,
|
||||
) -> Vec<u8> {
|
||||
let mut encodings = Vec::with_capacity(map.len() * ENCODING_SIZE);
|
||||
for (range, chunk) in map.iter() {
|
||||
let start = encodings.len();
|
||||
encoder.encode_range(direction, range, &mut encodings);
|
||||
let keys = store
|
||||
.get_keys(*chunk)
|
||||
.expect("keys are present for provided plaintext ranges");
|
||||
encodings[start..]
|
||||
.iter_mut()
|
||||
.zip(keys.iter().flat_map(|key| key.as_block().as_bytes()))
|
||||
.for_each(|(encoding, key)| {
|
||||
*encoding ^= *key;
|
||||
});
|
||||
}
|
||||
encodings
|
||||
}
|
||||
|
||||
let encodings = Encodings {
|
||||
sent: collect_encodings(&encoder, store, Direction::Sent, sent),
|
||||
recv: collect_encodings(&encoder, store, Direction::Received, recv),
|
||||
};
|
||||
|
||||
let frame_limit = ctx
|
||||
.io()
|
||||
.limit()
|
||||
.saturating_add(encodings.sent.len() + encodings.recv.len());
|
||||
ctx.io_mut().with_limit(frame_limit).send(encodings).await?;
|
||||
|
||||
let root = ctx.io_mut().expect_next().await?;
|
||||
|
||||
Ok((secret, EncodingCommitment { root }))
|
||||
}
|
||||
|
||||
/// Receives and commits to the encodings for the provided plaintext ranges.
|
||||
pub(crate) async fn receive<M: MacStore>(
|
||||
ctx: &mut Context,
|
||||
store: &M,
|
||||
hash_alg: HashAlgId,
|
||||
sent: &ReferenceMap,
|
||||
recv: &ReferenceMap,
|
||||
idxs: impl IntoIterator<Item = &(Direction, RangeSet<usize>)>,
|
||||
) -> Result<(EncodingCommitment, EncodingTree), EncodingError> {
|
||||
let hasher: &(dyn HashAlgorithm + Send + Sync) = match hash_alg {
|
||||
HashAlgId::SHA256 => &Sha256::default(),
|
||||
HashAlgId::KECCAK256 => &Keccak256::default(),
|
||||
HashAlgId::BLAKE3 => &Blake3::default(),
|
||||
alg => {
|
||||
return Err(ErrorRepr::UnsupportedHashAlgorithm(alg).into());
|
||||
}
|
||||
};
|
||||
|
||||
let (sent_len, recv_len) = (sent.len(), recv.len());
|
||||
let frame_limit = ctx
|
||||
.io()
|
||||
.limit()
|
||||
.saturating_add(ENCODING_SIZE * (sent_len + recv_len));
|
||||
let encodings: Encodings = ctx.io_mut().with_limit(frame_limit).expect_next().await?;
|
||||
|
||||
if encodings.sent.len() != sent_len * ENCODING_SIZE {
|
||||
return Err(ErrorRepr::IncorrectMacCount {
|
||||
direction: Direction::Sent,
|
||||
expected: sent_len,
|
||||
got: encodings.sent.len() / ENCODING_SIZE,
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
if encodings.recv.len() != recv_len * ENCODING_SIZE {
|
||||
return Err(ErrorRepr::IncorrectMacCount {
|
||||
direction: Direction::Received,
|
||||
expected: recv_len,
|
||||
got: encodings.recv.len() / ENCODING_SIZE,
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
// Collects a map of plaintext ranges to their encodings.
|
||||
fn collect_map(
|
||||
store: &impl MacStore,
|
||||
mut encodings: Vec<u8>,
|
||||
map: &ReferenceMap,
|
||||
) -> RangeMap<EncodingSlice> {
|
||||
let mut encoding_map = Vec::new();
|
||||
let mut pos = 0;
|
||||
for (range, chunk) in map.iter() {
|
||||
let macs = store
|
||||
.get_macs(*chunk)
|
||||
.expect("MACs are present for provided plaintext ranges");
|
||||
let encoding = &mut encodings[pos..pos + range.len() * ENCODING_SIZE];
|
||||
encoding
|
||||
.iter_mut()
|
||||
.zip(macs.iter().flat_map(|mac| mac.as_bytes()))
|
||||
.for_each(|(encoding, mac)| {
|
||||
*encoding ^= *mac;
|
||||
});
|
||||
|
||||
encoding_map.push((range.start, EncodingSlice::from(&(*encoding))));
|
||||
pos += range.len() * ENCODING_SIZE;
|
||||
}
|
||||
RangeMap::new(encoding_map)
|
||||
}
|
||||
|
||||
let provider = Provider {
|
||||
sent: collect_map(store, encodings.sent, sent),
|
||||
recv: collect_map(store, encodings.recv, recv),
|
||||
};
|
||||
|
||||
let tree = EncodingTree::new(hasher, idxs, &provider)?;
|
||||
let root = tree.root();
|
||||
|
||||
ctx.io_mut().send(root.clone()).await?;
|
||||
|
||||
let commitment = EncodingCommitment { root };
|
||||
|
||||
Ok((commitment, tree))
|
||||
}
|
||||
|
||||
pub(crate) trait KeyStore {
|
||||
fn delta(&self) -> Δ
|
||||
|
||||
fn get_keys(&self, data: Vector<U8>) -> Option<&[Key]>;
|
||||
}
|
||||
|
||||
pub(crate) trait MacStore {
|
||||
fn get_macs(&self, data: Vector<U8>) -> Option<&[Mac]>;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Provider {
|
||||
sent: RangeMap<EncodingSlice>,
|
||||
recv: RangeMap<EncodingSlice>,
|
||||
}
|
||||
|
||||
impl EncodingProvider for Provider {
|
||||
fn provide_encoding(
|
||||
&self,
|
||||
direction: Direction,
|
||||
range: Range<usize>,
|
||||
dest: &mut Vec<u8>,
|
||||
) -> Result<(), EncodingProviderError> {
|
||||
let encodings = match direction {
|
||||
Direction::Sent => &self.sent,
|
||||
Direction::Received => &self.recv,
|
||||
};
|
||||
|
||||
let encoding = encodings.get(range).ok_or(EncodingProviderError)?;
|
||||
|
||||
dest.extend_from_slice(encoding);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct EncodingSlice(Vec<u8>);
|
||||
|
||||
impl From<&[u8]> for EncodingSlice {
|
||||
fn from(value: &[u8]) -> Self {
|
||||
Self(value.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for EncodingSlice {
|
||||
type Slice<'a>
|
||||
= &'a [u8]
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
fn length(&self) -> usize {
|
||||
self.0.len() / ENCODING_SIZE
|
||||
}
|
||||
|
||||
fn slice<'a>(&'a self, range: Range<usize>) -> Option<Self::Slice<'a>> {
|
||||
self.0
|
||||
.get(range.start * ENCODING_SIZE..range.end * ENCODING_SIZE)
|
||||
}
|
||||
}
|
||||
|
||||
/// Encoding protocol error.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
pub struct EncodingError(#[from] ErrorRepr);
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("encoding protocol error: {0}")]
|
||||
enum ErrorRepr {
|
||||
#[error("I/O error: {0}")]
|
||||
Io(std::io::Error),
|
||||
#[error("incorrect MAC count for {direction}: expected {expected}, got {got}")]
|
||||
IncorrectMacCount {
|
||||
direction: Direction,
|
||||
expected: usize,
|
||||
got: usize,
|
||||
},
|
||||
#[error("encoding tree error: {0}")]
|
||||
EncodingTree(EncodingTreeError),
|
||||
#[error("unsupported hash algorithm: {0}")]
|
||||
UnsupportedHashAlgorithm(HashAlgId),
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for EncodingError {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
Self(ErrorRepr::Io(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EncodingTreeError> for EncodingError {
|
||||
fn from(value: EncodingTreeError) -> Self {
|
||||
Self(ErrorRepr::EncodingTree(value))
|
||||
}
|
||||
}
|
||||
175
crates/tlsn/src/transcript_internal/predicate.rs
Normal file
175
crates/tlsn/src/transcript_internal/predicate.rs
Normal file
@@ -0,0 +1,175 @@
|
||||
//! Predicate proving and verification over transcript data.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use mpz_circuits::Circuit;
|
||||
use mpz_core::bitvec::BitVec;
|
||||
use mpz_memory_core::{
|
||||
DecodeFutureTyped, MemoryExt,
|
||||
binary::{Binary, Bool},
|
||||
};
|
||||
use mpz_predicate::{Pred, compiler::Compiler};
|
||||
use mpz_vm_core::{Call, CallableExt, Vm};
|
||||
use rangeset::set::RangeSet;
|
||||
use tlsn_core::{config::prove::PredicateConfig, transcript::Direction};
|
||||
|
||||
use super::{ReferenceMap, TranscriptRefs};
|
||||
|
||||
/// Error during predicate proving/verification.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum PredicateError {
|
||||
/// Indices not found in transcript references.
|
||||
#[error("predicate indices {0:?} not found in transcript references")]
|
||||
IndicesNotFound(RangeSet<usize>),
|
||||
/// VM error.
|
||||
#[error("VM error: {0}")]
|
||||
Vm(#[from] mpz_vm_core::VmError),
|
||||
/// Circuit call error.
|
||||
#[error("circuit call error: {0}")]
|
||||
Call(#[from] mpz_vm_core::CallError),
|
||||
/// Decode error.
|
||||
#[error("decode error: {0}")]
|
||||
Decode(#[from] mpz_memory_core::DecodeError),
|
||||
/// Missing decoding.
|
||||
#[error("missing decoding")]
|
||||
MissingDecoding,
|
||||
/// Predicate not satisfied.
|
||||
#[error("predicate evaluated to false")]
|
||||
PredicateNotSatisfied,
|
||||
}
|
||||
|
||||
/// Converts a slice of indices to a RangeSet (each index becomes a single-byte
|
||||
/// range).
|
||||
fn indices_to_rangeset(indices: &[usize]) -> RangeSet<usize> {
|
||||
indices.iter().map(|&idx| idx..idx + 1).collect()
|
||||
}
|
||||
|
||||
/// Proves predicates over transcript data (prover side).
|
||||
///
|
||||
/// Each predicate is compiled to a circuit and executed with the corresponding
|
||||
/// transcript bytes as input. The circuit outputs a single bit that must be
|
||||
/// true.
|
||||
pub(crate) fn prove_predicates<T: Vm<Binary>>(
|
||||
vm: &mut T,
|
||||
transcript_refs: &TranscriptRefs,
|
||||
predicates: &[PredicateConfig],
|
||||
) -> Result<(), PredicateError> {
|
||||
let mut compiler = Compiler::new();
|
||||
|
||||
for predicate in predicates {
|
||||
let refs = match predicate.direction() {
|
||||
Direction::Sent => &transcript_refs.sent,
|
||||
Direction::Received => &transcript_refs.recv,
|
||||
};
|
||||
|
||||
// Compile predicate to circuit
|
||||
let circuit = compiler.compile(predicate.predicate());
|
||||
|
||||
// Get indices from the predicate and convert to RangeSet
|
||||
let indices = indices_to_rangeset(&predicate.indices());
|
||||
|
||||
// Prover doesn't need to verify output - they know their data satisfies the predicate
|
||||
let _ = execute_predicate(vm, refs, &indices, &circuit)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Proof that predicates were satisfied.
|
||||
///
|
||||
/// Must be verified after `vm.execute_all()` completes.
|
||||
#[must_use]
|
||||
pub(crate) struct PredicateProof {
|
||||
/// Decode futures for each predicate output.
|
||||
outputs: Vec<DecodeFutureTyped<BitVec, bool>>,
|
||||
}
|
||||
|
||||
impl PredicateProof {
|
||||
/// Verifies that all predicates evaluated to true.
|
||||
///
|
||||
/// Must be called after `vm.execute_all()` completes.
|
||||
pub(crate) fn verify(self) -> Result<(), PredicateError> {
|
||||
for mut output in self.outputs {
|
||||
let result = output
|
||||
.try_recv()
|
||||
.map_err(PredicateError::Decode)?
|
||||
.ok_or(PredicateError::MissingDecoding)?;
|
||||
|
||||
if !result {
|
||||
return Err(PredicateError::PredicateNotSatisfied);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Verifies predicates over transcript data (verifier side).
|
||||
///
|
||||
/// The verifier must provide the same predicates that the prover used,
|
||||
/// looked up by predicate name from out-of-band agreement.
|
||||
///
|
||||
/// Returns a [`PredicateProof`] that must be verified after `vm.execute_all()`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `vm` - The zkVM.
|
||||
/// * `transcript_refs` - References to transcript data in the VM.
|
||||
/// * `predicates` - Iterator of (direction, indices, predicate) tuples.
|
||||
pub(crate) fn verify_predicates<T: Vm<Binary>>(
|
||||
vm: &mut T,
|
||||
transcript_refs: &TranscriptRefs,
|
||||
predicates: impl IntoIterator<Item = (Direction, RangeSet<usize>, Pred)>,
|
||||
) -> Result<PredicateProof, PredicateError> {
|
||||
let mut compiler = Compiler::new();
|
||||
let mut outputs = Vec::new();
|
||||
|
||||
for (direction, indices, predicate) in predicates {
|
||||
let refs = match direction {
|
||||
Direction::Sent => &transcript_refs.sent,
|
||||
Direction::Received => &transcript_refs.recv,
|
||||
};
|
||||
|
||||
// Compile predicate to circuit
|
||||
let circuit = compiler.compile(&predicate);
|
||||
|
||||
let output_fut = execute_predicate(vm, refs, &indices, &circuit)?;
|
||||
outputs.push(output_fut);
|
||||
}
|
||||
|
||||
Ok(PredicateProof { outputs })
|
||||
}
|
||||
|
||||
/// Executes a predicate circuit with transcript bytes as input.
|
||||
///
|
||||
/// Returns a decode future for the circuit output.
|
||||
fn execute_predicate<T: Vm<Binary>>(
|
||||
vm: &mut T,
|
||||
refs: &ReferenceMap,
|
||||
indices: &RangeSet<usize>,
|
||||
circuit: &Circuit,
|
||||
) -> Result<DecodeFutureTyped<BitVec, bool>, PredicateError> {
|
||||
// Get the transcript bytes for the predicate indices
|
||||
let indexed_refs = refs
|
||||
.index(indices)
|
||||
.ok_or_else(|| PredicateError::IndicesNotFound(indices.clone()))?;
|
||||
|
||||
// Build the circuit call with transcript bytes as inputs
|
||||
let circuit = Arc::new(circuit.clone());
|
||||
let mut call_builder = Call::builder(circuit);
|
||||
|
||||
// Add each byte in the range as an input to the circuit
|
||||
// The predicate circuit expects bytes in order, so we iterate through
|
||||
// the indexed refs which maintains ordering
|
||||
for (_range, vector) in indexed_refs.iter() {
|
||||
call_builder = call_builder.arg(*vector);
|
||||
}
|
||||
|
||||
let call = call_builder.build()?;
|
||||
|
||||
// Execute the circuit - output is a single bit (true/false)
|
||||
// Both parties must call decode() on the output to reveal it
|
||||
let output: Bool = vm.call(call)?;
|
||||
|
||||
// Return decode future - caller must verify output == true after execute_all
|
||||
Ok(vm.decode(output)?)
|
||||
}
|
||||
@@ -8,6 +8,7 @@ use std::sync::Arc;
|
||||
|
||||
pub use error::VerifierError;
|
||||
pub use tlsn_core::{VerifierOutput, webpki::ServerCertVerifier};
|
||||
pub use verify::PredicateResolver;
|
||||
|
||||
use crate::{
|
||||
Role,
|
||||
@@ -323,8 +324,24 @@ impl Verifier<state::Verify> {
|
||||
}
|
||||
|
||||
/// Accepts the proving request.
|
||||
///
|
||||
/// Note: If the prover requests predicate verification, use
|
||||
/// [`accept_with_predicates`](Self::accept_with_predicates) instead.
|
||||
pub async fn accept(
|
||||
self,
|
||||
) -> Result<(VerifierOutput, Verifier<state::Committed>), VerifierError> {
|
||||
self.accept_with_predicates(None).await
|
||||
}
|
||||
|
||||
/// Accepts the proving request with predicate verification support.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `predicate_resolver` - A function that resolves predicate names to
|
||||
/// circuits. Required if the prover requests any predicates.
|
||||
pub async fn accept_with_predicates(
|
||||
self,
|
||||
predicate_resolver: Option<&verify::PredicateResolver>,
|
||||
) -> Result<(VerifierOutput, Verifier<state::Committed>), VerifierError> {
|
||||
let state::Verify {
|
||||
mux_ctrl,
|
||||
@@ -353,6 +370,7 @@ impl Verifier<state::Verify> {
|
||||
request,
|
||||
handshake,
|
||||
transcript,
|
||||
predicate_resolver,
|
||||
))
|
||||
.await?;
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@ use std::{error::Error, fmt};
|
||||
|
||||
use mpc_tls::MpcTlsError;
|
||||
|
||||
use crate::transcript_internal::commit::encoding::EncodingError;
|
||||
|
||||
/// Error for [`Verifier`](crate::verifier::Verifier).
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub struct VerifierError {
|
||||
@@ -47,6 +49,13 @@ impl VerifierError {
|
||||
{
|
||||
Self::new(ErrorKind::Verify, source)
|
||||
}
|
||||
|
||||
pub(crate) fn predicate<E>(source: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn Error + Send + Sync + 'static>>,
|
||||
{
|
||||
Self::new(ErrorKind::Predicate, source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -55,7 +64,9 @@ enum ErrorKind {
|
||||
Config,
|
||||
Mpc,
|
||||
Zk,
|
||||
Commit,
|
||||
Verify,
|
||||
Predicate,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerifierError {
|
||||
@@ -67,7 +78,9 @@ impl fmt::Display for VerifierError {
|
||||
ErrorKind::Config => f.write_str("config error")?,
|
||||
ErrorKind::Mpc => f.write_str("mpc error")?,
|
||||
ErrorKind::Zk => f.write_str("zk error")?,
|
||||
ErrorKind::Commit => f.write_str("commit error")?,
|
||||
ErrorKind::Verify => f.write_str("verification error")?,
|
||||
ErrorKind::Predicate => f.write_str("predicate error")?,
|
||||
}
|
||||
|
||||
if let Some(source) = &self.source {
|
||||
@@ -101,3 +114,9 @@ impl From<MpcTlsError> for VerifierError {
|
||||
Self::new(ErrorKind::Mpc, e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EncodingError> for VerifierError {
|
||||
fn from(e: EncodingError) -> Self {
|
||||
Self::new(ErrorKind::Commit, e)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use mpc_tls::SessionKeys;
|
||||
use mpz_common::Context;
|
||||
use mpz_memory_core::binary::Binary;
|
||||
use mpz_predicate::Pred;
|
||||
use mpz_vm_core::Vm;
|
||||
use rangeset::set::RangeSet;
|
||||
use tlsn_core::{
|
||||
@@ -14,12 +15,33 @@ use tlsn_core::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
transcript_internal::{TranscriptRefs, auth::verify_plaintext, commit::hash::verify_hash},
|
||||
transcript_internal::{
|
||||
TranscriptRefs,
|
||||
auth::verify_plaintext,
|
||||
commit::{
|
||||
encoding::{self, KeyStore},
|
||||
hash::verify_hash,
|
||||
},
|
||||
predicate::verify_predicates,
|
||||
},
|
||||
verifier::VerifierError,
|
||||
};
|
||||
|
||||
/// A function that resolves predicate names to predicates.
|
||||
///
|
||||
/// The verifier must provide this to look up predicates by name,
|
||||
/// based on out-of-band agreement with the prover.
|
||||
///
|
||||
/// The function receives:
|
||||
/// - The predicate name
|
||||
/// - The byte indices the predicate operates on (from the prover's request)
|
||||
///
|
||||
/// The verifier should validate that the indices make sense for the predicate
|
||||
/// and return the appropriate predicate built with those indices.
|
||||
pub type PredicateResolver = dyn Fn(&str, &RangeSet<usize>) -> Option<Pred> + Send + Sync;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
|
||||
pub(crate) async fn verify<T: Vm<Binary> + KeyStore + Send + Sync>(
|
||||
ctx: &mut Context,
|
||||
vm: &mut T,
|
||||
keys: &SessionKeys,
|
||||
@@ -28,6 +50,7 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
|
||||
request: ProveRequest,
|
||||
handshake: Option<(ServerName, HandshakeData)>,
|
||||
transcript: Option<PartialTranscript>,
|
||||
predicate_resolver: Option<&PredicateResolver>,
|
||||
) -> Result<VerifierOutput, VerifierError> {
|
||||
let ciphertext_sent = collect_ciphertext(tls_transcript.sent());
|
||||
let ciphertext_recv = collect_ciphertext(tls_transcript.recv());
|
||||
@@ -87,6 +110,20 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
|
||||
Direction::Sent => commit_sent.union_mut(idx),
|
||||
Direction::Received => commit_recv.union_mut(idx),
|
||||
});
|
||||
|
||||
if let Some((sent, recv)) = commit_config.encoding() {
|
||||
commit_sent.union_mut(sent);
|
||||
commit_recv.union_mut(recv);
|
||||
}
|
||||
}
|
||||
|
||||
// Build predicate ranges from request
|
||||
let (mut predicate_sent, mut predicate_recv) = (RangeSet::default(), RangeSet::default());
|
||||
for predicate_req in request.predicates() {
|
||||
match predicate_req.direction() {
|
||||
Direction::Sent => predicate_sent.union_mut(predicate_req.indices()),
|
||||
Direction::Received => predicate_recv.union_mut(predicate_req.indices()),
|
||||
}
|
||||
}
|
||||
|
||||
let (sent_refs, sent_proof) = verify_plaintext(
|
||||
@@ -101,6 +138,7 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
|
||||
.filter(|record| record.typ == ContentType::ApplicationData),
|
||||
transcript.sent_authed(),
|
||||
&commit_sent,
|
||||
&predicate_sent,
|
||||
)
|
||||
.map_err(VerifierError::zk)?;
|
||||
let (recv_refs, recv_proof) = verify_plaintext(
|
||||
@@ -115,6 +153,7 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
|
||||
.filter(|record| record.typ == ContentType::ApplicationData),
|
||||
transcript.received_authed(),
|
||||
&commit_recv,
|
||||
&predicate_recv,
|
||||
)
|
||||
.map_err(VerifierError::zk)?;
|
||||
|
||||
@@ -134,11 +173,56 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
|
||||
);
|
||||
}
|
||||
|
||||
// Verify predicates if any were requested
|
||||
let predicate_proof = if !request.predicates().is_empty() {
|
||||
let resolver = predicate_resolver.ok_or_else(|| {
|
||||
VerifierError::predicate("predicates requested but no resolver provided")
|
||||
})?;
|
||||
|
||||
let predicates = request
|
||||
.predicates()
|
||||
.iter()
|
||||
.map(|req| {
|
||||
let predicate = resolver(req.name(), req.indices()).ok_or_else(|| {
|
||||
VerifierError::predicate(format!("unknown predicate: {}", req.name()))
|
||||
})?;
|
||||
Ok((req.direction(), req.indices().clone(), predicate))
|
||||
})
|
||||
.collect::<Result<Vec<_>, VerifierError>>()?;
|
||||
|
||||
Some(verify_predicates(vm, &transcript_refs, predicates).map_err(VerifierError::predicate)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
vm.execute_all(ctx).await.map_err(VerifierError::zk)?;
|
||||
|
||||
sent_proof.verify().map_err(VerifierError::verify)?;
|
||||
recv_proof.verify().map_err(VerifierError::verify)?;
|
||||
|
||||
// Verify predicate outputs after ZK execution
|
||||
if let Some(proof) = predicate_proof {
|
||||
proof.verify().map_err(VerifierError::predicate)?;
|
||||
}
|
||||
|
||||
let mut encoder_secret = None;
|
||||
if let Some(commit_config) = request.transcript_commit()
|
||||
&& let Some((sent, recv)) = commit_config.encoding()
|
||||
{
|
||||
let sent_map = transcript_refs
|
||||
.sent
|
||||
.index(sent)
|
||||
.expect("ranges were authenticated");
|
||||
let recv_map = transcript_refs
|
||||
.recv
|
||||
.index(recv)
|
||||
.expect("ranges were authenticated");
|
||||
|
||||
let (secret, commitment) = encoding::transfer(ctx, vm, &sent_map, &recv_map).await?;
|
||||
encoder_secret = Some(secret);
|
||||
transcript_commitments.push(TranscriptCommitment::Encoding(commitment));
|
||||
}
|
||||
|
||||
if let Some(hash_commitments) = hash_commitments {
|
||||
for commitment in hash_commitments.try_recv().map_err(VerifierError::verify)? {
|
||||
transcript_commitments.push(TranscriptCommitment::Hash(commitment));
|
||||
@@ -148,6 +232,7 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
|
||||
Ok(VerifierOutput {
|
||||
server_name,
|
||||
transcript: request.reveal().is_some().then_some(transcript),
|
||||
encoder_secret,
|
||||
transcript_commitments,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use futures::{AsyncReadExt, AsyncWriteExt};
|
||||
use mpz_predicate::{Pred, eq};
|
||||
use rangeset::set::RangeSet;
|
||||
use tlsn::{
|
||||
config::{
|
||||
prove::ProveConfig,
|
||||
@@ -8,9 +10,12 @@ use tlsn::{
|
||||
verifier::VerifierConfig,
|
||||
},
|
||||
connection::ServerName,
|
||||
hash::HashAlgId,
|
||||
hash::{HashAlgId, HashProvider},
|
||||
prover::Prover,
|
||||
transcript::{Direction, Transcript, TranscriptCommitConfig, TranscriptCommitmentKind},
|
||||
transcript::{
|
||||
Direction, Transcript, TranscriptCommitConfig, TranscriptCommitment,
|
||||
TranscriptCommitmentKind, TranscriptSecret,
|
||||
},
|
||||
verifier::{Verifier, VerifierOutput},
|
||||
webpki::{CertificateDer, RootCertStore},
|
||||
};
|
||||
@@ -38,7 +43,7 @@ async fn test() {
|
||||
|
||||
let (socket_0, socket_1) = tokio::io::duplex(2 << 23);
|
||||
|
||||
let ((_full_transcript, _prover_output), verifier_output) =
|
||||
let ((full_transcript, prover_output), verifier_output) =
|
||||
tokio::join!(prover(socket_0), verifier(socket_1));
|
||||
|
||||
let partial_transcript = verifier_output.transcript.unwrap();
|
||||
@@ -54,6 +59,50 @@ async fn test() {
|
||||
partial_transcript.received_authed().iter().next().unwrap(),
|
||||
0..10
|
||||
);
|
||||
|
||||
let encoding_tree = prover_output
|
||||
.transcript_secrets
|
||||
.iter()
|
||||
.find_map(|secret| {
|
||||
if let TranscriptSecret::Encoding(tree) = secret {
|
||||
Some(tree)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let encoding_commitment = prover_output
|
||||
.transcript_commitments
|
||||
.iter()
|
||||
.find_map(|commitment| {
|
||||
if let TranscriptCommitment::Encoding(commitment) = commitment {
|
||||
Some(commitment)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let prove_sent = RangeSet::from(1..full_transcript.sent().len() - 1);
|
||||
let prove_recv = RangeSet::from(1..full_transcript.received().len() - 1);
|
||||
let idxs = [
|
||||
(Direction::Sent, prove_sent.clone()),
|
||||
(Direction::Received, prove_recv.clone()),
|
||||
];
|
||||
let proof = encoding_tree.proof(idxs.iter()).unwrap();
|
||||
let (auth_sent, auth_recv) = proof
|
||||
.verify_with_provider(
|
||||
&HashProvider::default(),
|
||||
&verifier_output.encoder_secret.unwrap(),
|
||||
encoding_commitment,
|
||||
full_transcript.sent(),
|
||||
full_transcript.received(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(auth_sent, prove_sent);
|
||||
assert_eq!(auth_recv, prove_recv);
|
||||
}
|
||||
|
||||
#[instrument(skip(verifier_socket))]
|
||||
@@ -115,21 +164,25 @@ async fn prover<T: AsyncWrite + AsyncRead + Send + Unpin + 'static>(
|
||||
|
||||
let mut builder = TranscriptCommitConfig::builder(prover.transcript());
|
||||
|
||||
let kind = TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::SHA256,
|
||||
};
|
||||
builder
|
||||
.commit_with_kind(&(0..sent_tx_len), Direction::Sent, kind)
|
||||
.unwrap();
|
||||
builder
|
||||
.commit_with_kind(&(0..recv_tx_len), Direction::Received, kind)
|
||||
.unwrap();
|
||||
builder
|
||||
.commit_with_kind(&(1..sent_tx_len - 1), Direction::Sent, kind)
|
||||
.unwrap();
|
||||
builder
|
||||
.commit_with_kind(&(1..recv_tx_len - 1), Direction::Received, kind)
|
||||
.unwrap();
|
||||
for kind in [
|
||||
TranscriptCommitmentKind::Encoding,
|
||||
TranscriptCommitmentKind::Hash {
|
||||
alg: HashAlgId::SHA256,
|
||||
},
|
||||
] {
|
||||
builder
|
||||
.commit_with_kind(&(0..sent_tx_len), Direction::Sent, kind)
|
||||
.unwrap();
|
||||
builder
|
||||
.commit_with_kind(&(0..recv_tx_len), Direction::Received, kind)
|
||||
.unwrap();
|
||||
builder
|
||||
.commit_with_kind(&(1..sent_tx_len - 1), Direction::Sent, kind)
|
||||
.unwrap();
|
||||
builder
|
||||
.commit_with_kind(&(1..recv_tx_len - 1), Direction::Received, kind)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let transcript_commit = builder.build().unwrap();
|
||||
|
||||
@@ -179,3 +232,184 @@ async fn verifier<T: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static>(
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
// Predicate name for testing
|
||||
const TEST_PREDICATE: &str = "test_first_byte";
|
||||
|
||||
/// Test that a correct predicate passes verification.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore]
|
||||
async fn test_predicate_passes() {
|
||||
let (socket_0, socket_1) = tokio::io::duplex(2 << 23);
|
||||
|
||||
// Request is "GET / HTTP/1.1\r\n..." - index 10 is '/' (in "HTTP/1.1")
|
||||
// Using index 10 to avoid overlap with revealed range (0..10)
|
||||
// Verifier uses the same predicate - should pass
|
||||
let prover_predicate = eq(10, b'/');
|
||||
|
||||
let (prover_result, verifier_result) = tokio::join!(
|
||||
prover_with_predicate(socket_0, prover_predicate),
|
||||
verifier_with_predicate(socket_1, || eq(10, b'/'))
|
||||
);
|
||||
|
||||
prover_result.expect("prover should succeed");
|
||||
verifier_result.expect("verifier should succeed with correct predicate");
|
||||
}
|
||||
|
||||
/// Test that a wrong predicate is rejected by the verifier.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore]
|
||||
async fn test_wrong_predicate_rejected() {
|
||||
let (socket_0, socket_1) = tokio::io::duplex(2 << 23);
|
||||
|
||||
// Request is "GET / HTTP/1.1\r\n..." - index 10 is '/'
|
||||
// Verifier uses a DIFFERENT predicate that checks for 'X' - should fail
|
||||
let prover_predicate = eq(10, b'/');
|
||||
|
||||
let (prover_result, verifier_result) = tokio::join!(
|
||||
prover_with_predicate(socket_0, prover_predicate),
|
||||
verifier_with_predicate(socket_1, || eq(10, b'X'))
|
||||
);
|
||||
|
||||
// Prover may succeed or fail depending on when verifier rejects
|
||||
let _ = prover_result;
|
||||
|
||||
// Verifier should fail because predicate evaluates to false
|
||||
assert!(
|
||||
verifier_result.is_err(),
|
||||
"verifier should reject wrong predicate"
|
||||
);
|
||||
}
|
||||
|
||||
/// Test that prover can't prove a predicate their data doesn't satisfy.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore]
|
||||
async fn test_unsatisfied_predicate_rejected() {
|
||||
let (socket_0, socket_1) = tokio::io::duplex(2 << 23);
|
||||
|
||||
// Request is "GET / HTTP/1.1\r\n..." - index 10 is '/'
|
||||
// Both parties use eq(10, b'X') but prover's data has '/' at index 10
|
||||
// This tests that a prover can't cheat - the predicate must actually be satisfied
|
||||
let prover_predicate = eq(10, b'X');
|
||||
|
||||
let (prover_result, verifier_result) = tokio::join!(
|
||||
prover_with_predicate(socket_0, prover_predicate),
|
||||
verifier_with_predicate(socket_1, || eq(10, b'X'))
|
||||
);
|
||||
|
||||
// Prover may succeed or fail depending on when verifier rejects
|
||||
let _ = prover_result;
|
||||
|
||||
// Verifier should fail because prover's data doesn't satisfy the predicate
|
||||
assert!(
|
||||
verifier_result.is_err(),
|
||||
"verifier should reject unsatisfied predicate"
|
||||
);
|
||||
}
|
||||
|
||||
#[instrument(skip(verifier_socket, predicate))]
|
||||
async fn prover_with_predicate<T: AsyncWrite + AsyncRead + Send + Unpin + 'static>(
|
||||
verifier_socket: T,
|
||||
predicate: Pred,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let (client_socket, server_socket) = tokio::io::duplex(2 << 16);
|
||||
|
||||
let server_task = tokio::spawn(bind(server_socket.compat()));
|
||||
|
||||
let prover = Prover::new(ProverConfig::builder().build()?)
|
||||
.commit(
|
||||
TlsCommitConfig::builder()
|
||||
.protocol(
|
||||
MpcTlsConfig::builder()
|
||||
.max_sent_data(MAX_SENT_DATA)
|
||||
.max_sent_records(MAX_SENT_RECORDS)
|
||||
.max_recv_data(MAX_RECV_DATA)
|
||||
.max_recv_records_online(MAX_RECV_RECORDS)
|
||||
.build()?,
|
||||
)
|
||||
.build()?,
|
||||
verifier_socket.compat(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let (mut tls_connection, prover_fut) = prover
|
||||
.connect(
|
||||
TlsClientConfig::builder()
|
||||
.server_name(ServerName::Dns(SERVER_DOMAIN.try_into()?))
|
||||
.root_store(RootCertStore {
|
||||
roots: vec![CertificateDer(CA_CERT_DER.to_vec())],
|
||||
})
|
||||
.build()?,
|
||||
client_socket.compat(),
|
||||
)
|
||||
.await?;
|
||||
let prover_task = tokio::spawn(prover_fut);
|
||||
|
||||
tls_connection
|
||||
.write_all(b"GET / HTTP/1.1\r\nConnection: close\r\n\r\n")
|
||||
.await?;
|
||||
tls_connection.close().await?;
|
||||
|
||||
let mut response = vec![0u8; 1024];
|
||||
tls_connection.read_to_end(&mut response).await?;
|
||||
|
||||
let _ = server_task.await?;
|
||||
|
||||
let mut prover = prover_task.await??;
|
||||
|
||||
let mut builder = ProveConfig::builder(prover.transcript());
|
||||
builder.server_identity();
|
||||
builder.reveal_sent(&(0..10))?;
|
||||
builder.reveal_recv(&(0..10))?;
|
||||
builder.predicate(TEST_PREDICATE, Direction::Sent, predicate)?;
|
||||
|
||||
let config = builder.build()?;
|
||||
prover.prove(&config).await?;
|
||||
prover.close().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn verifier_with_predicate<T, F>(
|
||||
socket: T,
|
||||
make_predicate: F,
|
||||
) -> Result<VerifierOutput, Box<dyn std::error::Error + Send + Sync>>
|
||||
where
|
||||
T: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static,
|
||||
F: Fn() -> Pred + Send + Sync + 'static,
|
||||
{
|
||||
let verifier = Verifier::new(
|
||||
VerifierConfig::builder()
|
||||
.root_store(RootCertStore {
|
||||
roots: vec![CertificateDer(CA_CERT_DER.to_vec())],
|
||||
})
|
||||
.build()?,
|
||||
);
|
||||
|
||||
let verifier = verifier
|
||||
.commit(socket.compat())
|
||||
.await?
|
||||
.accept()
|
||||
.await?
|
||||
.run()
|
||||
.await?;
|
||||
|
||||
let verifier = verifier.verify().await?;
|
||||
|
||||
// Resolver that builds the predicate fresh (Pred uses Rc, so can't be shared)
|
||||
let predicate_resolver = move |name: &str, _indices: &RangeSet<usize>| -> Option<Pred> {
|
||||
if name == TEST_PREDICATE {
|
||||
Some(make_predicate())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let (output, verifier) = verifier
|
||||
.accept_with_predicates(Some(&predicate_resolver))
|
||||
.await?;
|
||||
|
||||
verifier.close().await?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user