mirror of
https://github.com/tlsnotary/tlsn.git
synced 2026-01-11 15:47:58 -05:00
Compare commits
24 Commits
feat/sdk
...
refactor/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a56f2dd02b | ||
|
|
38a1ec3f72 | ||
|
|
1501bc661f | ||
|
|
0e2c2cb045 | ||
|
|
a662fb7511 | ||
|
|
be2e1ab95a | ||
|
|
d34d135bfe | ||
|
|
091d26bb63 | ||
|
|
f031fe9a8d | ||
|
|
12c9a5eb34 | ||
|
|
b4380f021e | ||
|
|
8a823d18ec | ||
|
|
7bcfc56bd8 | ||
|
|
2909d5ebaa | ||
|
|
7918494ccc | ||
|
|
92dd47b376 | ||
|
|
5474a748ce | ||
|
|
92da5adc24 | ||
|
|
e0ce1ad31a | ||
|
|
3b76877920 | ||
|
|
783355772a | ||
|
|
e5c59da90b | ||
|
|
f059c53c2d | ||
|
|
a1367b5428 |
1095
Cargo.lock
generated
1095
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
35
Cargo.toml
35
Cargo.toml
@@ -39,6 +39,8 @@ opt-level = 1
|
||||
[profile.wasm]
|
||||
inherits = "release"
|
||||
lto = true
|
||||
panic = "abort"
|
||||
codegen-units = 1
|
||||
|
||||
[workspace.dependencies]
|
||||
tls-server-fixture = { path = "crates/tls/server-fixture" }
|
||||
@@ -64,19 +66,19 @@ tlsn-harness-runner = { path = "crates/harness/runner" }
|
||||
tlsn-wasm = { path = "crates/wasm" }
|
||||
tlsn = { path = "crates/tlsn" }
|
||||
|
||||
mpz-circuits = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-memory-core = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-common = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-core = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-vm-core = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-garble = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-garble-core = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-ole = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-ot = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-share-conversion = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-fields = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-zk = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-hash = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "1b00912" }
|
||||
mpz-circuits = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-memory-core = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-common = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-core = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-vm-core = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-garble = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-garble-core = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-ole = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-ot = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-share-conversion = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-fields = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-zk = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
mpz-hash = { git = "https://github.com/privacy-scaling-explorations/mpz", rev = "3d90b6c" }
|
||||
|
||||
rangeset = { version = "0.2" }
|
||||
serio = { version = "0.2" }
|
||||
@@ -84,6 +86,7 @@ spansy = { git = "https://github.com/tlsnotary/tlsn-utils", rev = "6168663" }
|
||||
uid-mux = { version = "0.2" }
|
||||
websocket-relay = { git = "https://github.com/tlsnotary/tlsn-utils", rev = "6168663" }
|
||||
|
||||
aead = { version = "0.4" }
|
||||
aes = { version = "0.8" }
|
||||
aes-gcm = { version = "0.9" }
|
||||
anyhow = { version = "1.0" }
|
||||
@@ -97,7 +100,7 @@ bytes = { version = "1.4" }
|
||||
cfg-if = { version = "1" }
|
||||
chromiumoxide = { version = "0.7" }
|
||||
chrono = { version = "0.4" }
|
||||
cipher = { version = "0.4" }
|
||||
cipher-crypto = { package = "cipher", version = "0.4" }
|
||||
clap = { version = "4.5" }
|
||||
criterion = { version = "0.5" }
|
||||
ctr = { version = "0.9" }
|
||||
@@ -120,6 +123,7 @@ inventory = { version = "0.3" }
|
||||
itybity = { version = "0.2" }
|
||||
js-sys = { version = "0.3" }
|
||||
k256 = { version = "0.13" }
|
||||
lipsum = { version = "0.9" }
|
||||
log = { version = "0.4" }
|
||||
once_cell = { version = "1.19" }
|
||||
opaque-debug = { version = "0.3" }
|
||||
@@ -161,6 +165,5 @@ web-spawn = { version = "0.2" }
|
||||
web-time = { version = "0.2" }
|
||||
webpki-roots = { version = "1.0" }
|
||||
webpki-root-certs = { version = "1.0" }
|
||||
# Use the patched ws_stream_wasm to fix the issue https://github.com/najamelan/ws_stream_wasm/issues/12#issuecomment-1711902958
|
||||
ws_stream_wasm = { git = "https://github.com/tlsnotary/ws_stream_wasm", rev = "2ed12aad9f0236e5321f577672f309920b2aef51" }
|
||||
ws_stream_wasm = { version = "0.7.5" }
|
||||
zeroize = { version = "1.8" }
|
||||
|
||||
@@ -31,4 +31,4 @@ mpz-ot = { workspace = true }
|
||||
tokio = { version = "1", features = ["macros", "rt", "rt-multi-thread"] }
|
||||
rand = { workspace = true }
|
||||
ctr = { workspace = true }
|
||||
cipher = { workspace = true }
|
||||
cipher-crypto = { workspace = true }
|
||||
|
||||
@@ -344,8 +344,8 @@ mod tests {
|
||||
start_ctr: usize,
|
||||
msg: Vec<u8>,
|
||||
) -> Vec<u8> {
|
||||
use ::cipher::{KeyIvInit, StreamCipher, StreamCipherSeek};
|
||||
use aes::Aes128;
|
||||
use cipher_crypto::{KeyIvInit, StreamCipher, StreamCipherSeek};
|
||||
use ctr::Ctr32BE;
|
||||
|
||||
let mut full_iv = [0u8; 16];
|
||||
@@ -365,7 +365,7 @@ mod tests {
|
||||
|
||||
fn aes128(key: [u8; 16], msg: [u8; 16]) -> [u8; 16] {
|
||||
use ::aes::Aes128 as TestAes128;
|
||||
use ::cipher::{BlockEncrypt, KeyInit};
|
||||
use cipher_crypto::{BlockEncrypt, KeyInit};
|
||||
|
||||
let mut msg = msg.into();
|
||||
let cipher = TestAes128::new(&key.into());
|
||||
|
||||
@@ -391,7 +391,7 @@ mod tests {
|
||||
memory::{binary::U8, correlated::Delta, Array},
|
||||
prelude::*,
|
||||
};
|
||||
use mpz_zk::{Prover, Verifier};
|
||||
use mpz_zk::{Prover, ProverConfig, Verifier, VerifierConfig};
|
||||
use rand::{rngs::StdRng, SeedableRng};
|
||||
|
||||
use super::*;
|
||||
@@ -408,8 +408,8 @@ mod tests {
|
||||
|
||||
let gb = Garbler::new(cot_send, [0u8; 16], delta_mpc);
|
||||
let ev = Evaluator::new(cot_recv);
|
||||
let prover = Prover::new(rcot_recv);
|
||||
let verifier = Verifier::new(delta_zk, rcot_send);
|
||||
let prover = Prover::new(ProverConfig::default(), rcot_recv);
|
||||
let verifier = Verifier::new(VerifierConfig::default(), delta_zk, rcot_send);
|
||||
|
||||
let mut leader = Deap::new(Role::Leader, gb, prover);
|
||||
let mut follower = Deap::new(Role::Follower, ev, verifier);
|
||||
@@ -488,8 +488,8 @@ mod tests {
|
||||
|
||||
let gb = Garbler::new(cot_send, [0u8; 16], delta_mpc);
|
||||
let ev = Evaluator::new(cot_recv);
|
||||
let prover = Prover::new(rcot_recv);
|
||||
let verifier = Verifier::new(delta_zk, rcot_send);
|
||||
let prover = Prover::new(ProverConfig::default(), rcot_recv);
|
||||
let verifier = Verifier::new(VerifierConfig::default(), delta_zk, rcot_send);
|
||||
|
||||
let mut leader = Deap::new(Role::Leader, gb, prover);
|
||||
let mut follower = Deap::new(Role::Follower, ev, verifier);
|
||||
@@ -574,8 +574,8 @@ mod tests {
|
||||
|
||||
let gb = Garbler::new(cot_send, [1u8; 16], delta_mpc);
|
||||
let ev = Evaluator::new(cot_recv);
|
||||
let prover = Prover::new(rcot_recv);
|
||||
let verifier = Verifier::new(delta_zk, rcot_send);
|
||||
let prover = Prover::new(ProverConfig::default(), rcot_recv);
|
||||
let verifier = Verifier::new(VerifierConfig::default(), delta_zk, rcot_send);
|
||||
|
||||
let mut leader = Deap::new(Role::Leader, gb, prover);
|
||||
let mut follower = Deap::new(Role::Follower, ev, verifier);
|
||||
|
||||
@@ -13,7 +13,13 @@ workspace = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
fixtures = ["dep:hex", "dep:tlsn-data-fixtures"]
|
||||
fixtures = [
|
||||
"dep:hex",
|
||||
"dep:tlsn-data-fixtures",
|
||||
"dep:aead",
|
||||
"dep:aes-gcm",
|
||||
"dep:generic-array",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
tlsn-data-fixtures = { workspace = true, optional = true }
|
||||
@@ -21,6 +27,9 @@ tlsn-tls-core = { workspace = true, features = ["serde"] }
|
||||
tlsn-utils = { workspace = true }
|
||||
rangeset = { workspace = true, features = ["serde"] }
|
||||
|
||||
aead = { workspace = true, features = ["alloc"], optional = true }
|
||||
aes-gcm = { workspace = true, optional = true }
|
||||
generic-array = { workspace = true, optional = true }
|
||||
bimap = { version = "0.6", features = ["serde"] }
|
||||
blake3 = { workspace = true }
|
||||
hex = { workspace = true, optional = true }
|
||||
@@ -39,9 +48,12 @@ webpki-roots = { workspace = true }
|
||||
rustls-webpki = { workspace = true, features = ["ring"] }
|
||||
rustls-pki-types = { workspace = true }
|
||||
itybity = { workspace = true }
|
||||
zeroize = { workspace = true }
|
||||
zeroize = { workspace = true, features = ["zeroize_derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
aead = { workspace = true, features = ["alloc"] }
|
||||
aes-gcm = { workspace = true }
|
||||
generic-array = { workspace = true }
|
||||
bincode = { workspace = true }
|
||||
hex = { workspace = true }
|
||||
rstest = { workspace = true }
|
||||
|
||||
@@ -6,7 +6,10 @@ use rustls_pki_types as webpki_types;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tls_core::msgs::{codec::Codec, enums::NamedGroup, handshake::ServerECDHParams};
|
||||
|
||||
use crate::webpki::{CertificateDer, ServerCertVerifier, ServerCertVerifierError};
|
||||
use crate::{
|
||||
transcript::TlsTranscript,
|
||||
webpki::{CertificateDer, ServerCertVerifier, ServerCertVerifierError},
|
||||
};
|
||||
|
||||
/// TLS version.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
@@ -312,6 +315,25 @@ pub struct HandshakeData {
|
||||
}
|
||||
|
||||
impl HandshakeData {
|
||||
/// Creates a new instance.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `transcript` - The TLS transcript.
|
||||
pub fn new(transcript: &TlsTranscript) -> Self {
|
||||
Self {
|
||||
certs: transcript
|
||||
.server_cert_chain()
|
||||
.expect("server cert chain is present")
|
||||
.to_vec(),
|
||||
sig: transcript
|
||||
.server_signature()
|
||||
.expect("server signature is present")
|
||||
.clone(),
|
||||
binding: transcript.certificate_binding().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Verifies the handshake data.
|
||||
///
|
||||
/// # Arguments
|
||||
|
||||
16
crates/core/src/display.rs
Normal file
16
crates/core/src/display.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use rangeset::RangeSet;
|
||||
|
||||
pub(crate) struct FmtRangeSet<'a>(pub &'a RangeSet<usize>);
|
||||
|
||||
impl<'a> std::fmt::Display for FmtRangeSet<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("{")?;
|
||||
for range in self.0.iter_ranges() {
|
||||
write!(f, "{}..{}", range.start, range.end)?;
|
||||
if range.end < self.0.end().unwrap_or(0) {
|
||||
f.write_str(", ")?;
|
||||
}
|
||||
}
|
||||
f.write_str("}")
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
//! Fixtures for testing
|
||||
|
||||
mod provider;
|
||||
pub mod transcript;
|
||||
|
||||
pub use provider::FixtureEncodingProvider;
|
||||
|
||||
|
||||
199
crates/core/src/fixtures/transcript.rs
Normal file
199
crates/core/src/fixtures/transcript.rs
Normal file
@@ -0,0 +1,199 @@
|
||||
//! Transcript fixtures for testing.
|
||||
|
||||
use aead::Payload as AeadPayload;
|
||||
use aes_gcm::{aead::Aead, Aes128Gcm, NewAead};
|
||||
use generic_array::GenericArray;
|
||||
use rand::{rngs::StdRng, Rng, SeedableRng};
|
||||
use tls_core::msgs::{
|
||||
base::Payload,
|
||||
codec::Codec,
|
||||
enums::{ContentType, HandshakeType, ProtocolVersion},
|
||||
handshake::{HandshakeMessagePayload, HandshakePayload},
|
||||
message::{OpaqueMessage, PlainMessage},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
connection::{TranscriptLength, VerifyData},
|
||||
fixtures::ConnectionFixture,
|
||||
transcript::{Record, TlsTranscript},
|
||||
};
|
||||
|
||||
/// The key used for encryption of the sent and received transcript.
|
||||
pub const KEY: [u8; 16] = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15];
|
||||
|
||||
/// The iv used for encryption of the sent and received transcript.
|
||||
pub const IV: [u8; 4] = [1, 3, 3, 7];
|
||||
|
||||
/// The record size in bytes.
|
||||
pub const RECORD_SIZE: usize = 512;
|
||||
|
||||
/// Creates a transript fixture for testing.
|
||||
pub fn transcript_fixture(sent: &[u8], recv: &[u8]) -> TlsTranscript {
|
||||
TranscriptGenerator::new(KEY, IV).generate(sent, recv)
|
||||
}
|
||||
|
||||
struct TranscriptGenerator {
|
||||
key: [u8; 16],
|
||||
iv: [u8; 4],
|
||||
}
|
||||
|
||||
impl TranscriptGenerator {
|
||||
fn new(key: [u8; 16], iv: [u8; 4]) -> Self {
|
||||
Self { key, iv }
|
||||
}
|
||||
|
||||
fn generate(&self, sent: &[u8], recv: &[u8]) -> TlsTranscript {
|
||||
let mut rng = StdRng::from_seed([1; 32]);
|
||||
|
||||
let transcript_len = TranscriptLength {
|
||||
sent: sent.len() as u32,
|
||||
received: recv.len() as u32,
|
||||
};
|
||||
let tlsn = ConnectionFixture::tlsnotary(transcript_len);
|
||||
|
||||
let time = tlsn.connection_info.time;
|
||||
let version = tlsn.connection_info.version;
|
||||
let server_cert_chain = tlsn.server_cert_data.certs;
|
||||
let server_signature = tlsn.server_cert_data.sig;
|
||||
let cert_binding = tlsn.server_cert_data.binding;
|
||||
|
||||
let cf_vd: [u8; 12] = rng.random();
|
||||
let sf_vd: [u8; 12] = rng.random();
|
||||
|
||||
let verify_data = VerifyData {
|
||||
client_finished: cf_vd.to_vec(),
|
||||
server_finished: sf_vd.to_vec(),
|
||||
};
|
||||
|
||||
let sent = self.gen_records(cf_vd, sent);
|
||||
let recv = self.gen_records(sf_vd, recv);
|
||||
|
||||
TlsTranscript::new(
|
||||
time,
|
||||
version,
|
||||
Some(server_cert_chain),
|
||||
Some(server_signature),
|
||||
cert_binding,
|
||||
verify_data,
|
||||
sent,
|
||||
recv,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn gen_records(&self, vd: [u8; 12], plaintext: &[u8]) -> Vec<Record> {
|
||||
let mut records = Vec::new();
|
||||
|
||||
let handshake = self.gen_handshake(vd);
|
||||
records.push(handshake);
|
||||
|
||||
for (seq, msg) in (1_u64..).zip(plaintext.chunks(RECORD_SIZE)) {
|
||||
let record = self.gen_app_data(seq, msg);
|
||||
records.push(record);
|
||||
}
|
||||
|
||||
records
|
||||
}
|
||||
|
||||
fn gen_app_data(&self, seq: u64, plaintext: &[u8]) -> Record {
|
||||
assert!(
|
||||
plaintext.len() <= 1 << 14,
|
||||
"plaintext len per record must be smaller than 2^14 bytes"
|
||||
);
|
||||
|
||||
let explicit_nonce: [u8; 8] = seq.to_be_bytes();
|
||||
let msg = PlainMessage {
|
||||
typ: ContentType::ApplicationData,
|
||||
version: ProtocolVersion::TLSv1_2,
|
||||
payload: Payload::new(plaintext),
|
||||
};
|
||||
let opaque = aes_gcm_encrypt(self.key, self.iv, seq, explicit_nonce, &msg);
|
||||
|
||||
let mut payload = opaque.payload.0;
|
||||
let mut ciphertext = payload.split_off(8);
|
||||
let tag = ciphertext.split_off(ciphertext.len() - 16);
|
||||
|
||||
Record {
|
||||
seq,
|
||||
typ: ContentType::ApplicationData,
|
||||
plaintext: Some(plaintext.to_vec()),
|
||||
explicit_nonce: explicit_nonce.to_vec(),
|
||||
ciphertext,
|
||||
tag: Some(tag),
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_handshake(&self, vd: [u8; 12]) -> Record {
|
||||
let seq = 0_u64;
|
||||
let explicit_nonce = seq.to_be_bytes();
|
||||
|
||||
let mut plaintext = Vec::new();
|
||||
|
||||
let payload = Payload(vd.to_vec());
|
||||
let hs_payload = HandshakePayload::Finished(payload);
|
||||
let handshake_message = HandshakeMessagePayload {
|
||||
typ: HandshakeType::Finished,
|
||||
payload: hs_payload,
|
||||
};
|
||||
handshake_message.encode(&mut plaintext);
|
||||
|
||||
let msg = PlainMessage {
|
||||
typ: ContentType::Handshake,
|
||||
version: ProtocolVersion::TLSv1_2,
|
||||
payload: Payload::new(plaintext.clone()),
|
||||
};
|
||||
|
||||
let opaque = aes_gcm_encrypt(self.key, self.iv, seq, explicit_nonce, &msg);
|
||||
let mut payload = opaque.payload.0;
|
||||
let mut ciphertext = payload.split_off(8);
|
||||
let tag = ciphertext.split_off(ciphertext.len() - 16);
|
||||
|
||||
Record {
|
||||
seq,
|
||||
typ: ContentType::Handshake,
|
||||
plaintext: Some(plaintext),
|
||||
explicit_nonce: explicit_nonce.to_vec(),
|
||||
ciphertext,
|
||||
tag: Some(tag),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn aes_gcm_encrypt(
|
||||
key: [u8; 16],
|
||||
iv: [u8; 4],
|
||||
seq: u64,
|
||||
explicit_nonce: [u8; 8],
|
||||
msg: &PlainMessage,
|
||||
) -> OpaqueMessage {
|
||||
let mut aad = [0u8; 13];
|
||||
|
||||
aad[..8].copy_from_slice(&seq.to_be_bytes());
|
||||
aad[8] = msg.typ.get_u8();
|
||||
aad[9..11].copy_from_slice(&msg.version.get_u16().to_be_bytes());
|
||||
aad[11..13].copy_from_slice(&(msg.payload.0.len() as u16).to_be_bytes());
|
||||
let payload = AeadPayload {
|
||||
msg: &msg.payload.0,
|
||||
aad: &aad,
|
||||
};
|
||||
|
||||
let mut nonce = [0u8; 12];
|
||||
nonce[..4].copy_from_slice(&iv);
|
||||
nonce[4..].copy_from_slice(&explicit_nonce);
|
||||
let nonce = GenericArray::from_slice(&nonce);
|
||||
let cipher = Aes128Gcm::new_from_slice(&key).unwrap();
|
||||
|
||||
// ciphertext will have the MAC appended
|
||||
let ciphertext = cipher.encrypt(nonce, payload).unwrap();
|
||||
|
||||
// prepend the explicit nonce
|
||||
let mut nonce_ct_mac = vec![0u8; 0];
|
||||
nonce_ct_mac.extend(explicit_nonce.iter());
|
||||
nonce_ct_mac.extend(ciphertext.iter());
|
||||
|
||||
OpaqueMessage {
|
||||
typ: msg.typ,
|
||||
version: msg.version,
|
||||
payload: Payload::new(nonce_ct_mac),
|
||||
}
|
||||
}
|
||||
@@ -95,7 +95,7 @@ impl Display for HashAlgId {
|
||||
}
|
||||
|
||||
/// A typed hash value.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct TypedHash {
|
||||
/// The algorithm of the hash.
|
||||
pub alg: HashAlgId,
|
||||
|
||||
@@ -11,15 +11,17 @@ pub mod hash;
|
||||
pub mod merkle;
|
||||
pub mod transcript;
|
||||
pub mod webpki;
|
||||
pub use rangeset;
|
||||
pub(crate) mod display;
|
||||
|
||||
use rangeset::ToRangeSet;
|
||||
use rangeset::{RangeSet, ToRangeSet, UnionMut};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
connection::{HandshakeData, ServerName},
|
||||
transcript::{
|
||||
Direction, Idx, PartialTranscript, Transcript, TranscriptCommitConfig,
|
||||
TranscriptCommitRequest, TranscriptCommitment, TranscriptSecret,
|
||||
Direction, PartialTranscript, Transcript, TranscriptCommitConfig, TranscriptCommitRequest,
|
||||
TranscriptCommitment, TranscriptSecret,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -27,7 +29,7 @@ use crate::{
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProveConfig {
|
||||
server_identity: bool,
|
||||
transcript: Option<PartialTranscript>,
|
||||
reveal: Option<(RangeSet<usize>, RangeSet<usize>)>,
|
||||
transcript_commit: Option<TranscriptCommitConfig>,
|
||||
}
|
||||
|
||||
@@ -42,9 +44,9 @@ impl ProveConfig {
|
||||
self.server_identity
|
||||
}
|
||||
|
||||
/// Returns the transcript to be proven.
|
||||
pub fn transcript(&self) -> Option<&PartialTranscript> {
|
||||
self.transcript.as_ref()
|
||||
/// Returns the ranges of the transcript to be revealed.
|
||||
pub fn reveal(&self) -> Option<&(RangeSet<usize>, RangeSet<usize>)> {
|
||||
self.reveal.as_ref()
|
||||
}
|
||||
|
||||
/// Returns the transcript commitment configuration.
|
||||
@@ -58,8 +60,7 @@ impl ProveConfig {
|
||||
pub struct ProveConfigBuilder<'a> {
|
||||
transcript: &'a Transcript,
|
||||
server_identity: bool,
|
||||
reveal_sent: Idx,
|
||||
reveal_recv: Idx,
|
||||
reveal: Option<(RangeSet<usize>, RangeSet<usize>)>,
|
||||
transcript_commit: Option<TranscriptCommitConfig>,
|
||||
}
|
||||
|
||||
@@ -69,8 +70,7 @@ impl<'a> ProveConfigBuilder<'a> {
|
||||
Self {
|
||||
transcript,
|
||||
server_identity: false,
|
||||
reveal_sent: Idx::default(),
|
||||
reveal_recv: Idx::default(),
|
||||
reveal: None,
|
||||
transcript_commit: None,
|
||||
}
|
||||
}
|
||||
@@ -93,22 +93,24 @@ impl<'a> ProveConfigBuilder<'a> {
|
||||
direction: Direction,
|
||||
ranges: &dyn ToRangeSet<usize>,
|
||||
) -> Result<&mut Self, ProveConfigBuilderError> {
|
||||
let idx = Idx::new(ranges.to_range_set());
|
||||
let idx = ranges.to_range_set();
|
||||
|
||||
if idx.end() > self.transcript.len_of_direction(direction) {
|
||||
if idx.end().unwrap_or(0) > self.transcript.len_of_direction(direction) {
|
||||
return Err(ProveConfigBuilderError(
|
||||
ProveConfigBuilderErrorRepr::IndexOutOfBounds {
|
||||
direction,
|
||||
actual: idx.end(),
|
||||
actual: idx.end().unwrap_or(0),
|
||||
len: self.transcript.len_of_direction(direction),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
let (sent, recv) = self.reveal.get_or_insert_default();
|
||||
match direction {
|
||||
Direction::Sent => self.reveal_sent.union_mut(&idx),
|
||||
Direction::Received => self.reveal_recv.union_mut(&idx),
|
||||
Direction::Sent => sent.union_mut(&idx),
|
||||
Direction::Received => recv.union_mut(&idx),
|
||||
}
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
@@ -128,20 +130,20 @@ impl<'a> ProveConfigBuilder<'a> {
|
||||
self.reveal(Direction::Received, ranges)
|
||||
}
|
||||
|
||||
/// Reveals the full transcript range for a given direction.
|
||||
pub fn reveal_all(
|
||||
&mut self,
|
||||
direction: Direction,
|
||||
) -> Result<&mut Self, ProveConfigBuilderError> {
|
||||
let len = self.transcript.len_of_direction(direction);
|
||||
self.reveal(direction, &(0..len))
|
||||
}
|
||||
|
||||
/// Builds the configuration.
|
||||
pub fn build(self) -> Result<ProveConfig, ProveConfigBuilderError> {
|
||||
let transcript = if !self.reveal_sent.is_empty() || !self.reveal_recv.is_empty() {
|
||||
Some(
|
||||
self.transcript
|
||||
.to_partial(self.reveal_sent, self.reveal_recv),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(ProveConfig {
|
||||
server_identity: self.server_identity,
|
||||
transcript,
|
||||
reveal: self.reveal,
|
||||
transcript_commit: self.transcript_commit,
|
||||
})
|
||||
}
|
||||
@@ -197,10 +199,10 @@ pub struct VerifyConfigBuilderError(#[from] VerifyConfigBuilderErrorRepr);
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum VerifyConfigBuilderErrorRepr {}
|
||||
|
||||
/// Payload sent to the verifier.
|
||||
/// Request to prove statements about the connection.
|
||||
#[doc(hidden)]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ProvePayload {
|
||||
pub struct ProveRequest {
|
||||
/// Handshake data.
|
||||
pub handshake: Option<(ServerName, HandshakeData)>,
|
||||
/// Transcript data.
|
||||
@@ -209,6 +211,29 @@ pub struct ProvePayload {
|
||||
pub transcript_commit: Option<TranscriptCommitRequest>,
|
||||
}
|
||||
|
||||
impl ProveRequest {
|
||||
/// Creates a new prove payload.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `config` - The prove config.
|
||||
/// * `transcript` - The partial transcript.
|
||||
/// * `handshake` - The server name and handshake data.
|
||||
pub fn new(
|
||||
config: &ProveConfig,
|
||||
transcript: Option<PartialTranscript>,
|
||||
handshake: Option<(ServerName, HandshakeData)>,
|
||||
) -> Self {
|
||||
let transcript_commit = config.transcript_commit().map(|config| config.to_request());
|
||||
|
||||
Self {
|
||||
handshake,
|
||||
transcript,
|
||||
transcript_commit,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Prover output.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ProverOutput {
|
||||
|
||||
@@ -26,7 +26,7 @@ mod tls;
|
||||
|
||||
use std::{fmt, ops::Range};
|
||||
|
||||
use rangeset::{Difference, IndexRanges, RangeSet, Subset, ToRangeSet, Union, UnionMut};
|
||||
use rangeset::{Difference, IndexRanges, RangeSet, Union};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::connection::TranscriptLength;
|
||||
@@ -39,6 +39,7 @@ pub use proof::{
|
||||
TranscriptProof, TranscriptProofBuilder, TranscriptProofBuilderError, TranscriptProofError,
|
||||
};
|
||||
pub use tls::{Record, TlsTranscript};
|
||||
pub use tls_core::msgs::enums::ContentType;
|
||||
|
||||
/// A transcript contains the plaintext of all application data communicated
|
||||
/// between the Prover and the Server.
|
||||
@@ -95,18 +96,18 @@ impl Transcript {
|
||||
|
||||
/// Returns the subsequence of the transcript with the provided index,
|
||||
/// returning `None` if the index is out of bounds.
|
||||
pub fn get(&self, direction: Direction, idx: &Idx) -> Option<Subsequence> {
|
||||
pub fn get(&self, direction: Direction, idx: &RangeSet<usize>) -> Option<Subsequence> {
|
||||
let data = match direction {
|
||||
Direction::Sent => &self.sent,
|
||||
Direction::Received => &self.received,
|
||||
};
|
||||
|
||||
if idx.end() > data.len() {
|
||||
if idx.end().unwrap_or(0) > data.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
Subsequence::new(idx.clone(), data.index_ranges(&idx.0))
|
||||
Subsequence::new(idx.clone(), data.index_ranges(idx))
|
||||
.expect("data is same length as index"),
|
||||
)
|
||||
}
|
||||
@@ -121,7 +122,11 @@ impl Transcript {
|
||||
///
|
||||
/// * `sent_idx` - The indices of the sent data to include.
|
||||
/// * `recv_idx` - The indices of the received data to include.
|
||||
pub fn to_partial(&self, sent_idx: Idx, recv_idx: Idx) -> PartialTranscript {
|
||||
pub fn to_partial(
|
||||
&self,
|
||||
sent_idx: RangeSet<usize>,
|
||||
recv_idx: RangeSet<usize>,
|
||||
) -> PartialTranscript {
|
||||
let mut sent = vec![0; self.sent.len()];
|
||||
let mut received = vec![0; self.received.len()];
|
||||
|
||||
@@ -156,9 +161,9 @@ pub struct PartialTranscript {
|
||||
/// Data received by the Prover from the Server.
|
||||
received: Vec<u8>,
|
||||
/// Index of `sent` which have been authenticated.
|
||||
sent_authed_idx: Idx,
|
||||
sent_authed_idx: RangeSet<usize>,
|
||||
/// Index of `received` which have been authenticated.
|
||||
received_authed_idx: Idx,
|
||||
received_authed_idx: RangeSet<usize>,
|
||||
}
|
||||
|
||||
/// `PartialTranscript` in a compressed form.
|
||||
@@ -170,9 +175,9 @@ pub struct CompressedPartialTranscript {
|
||||
/// Received data which has been authenticated.
|
||||
received_authed: Vec<u8>,
|
||||
/// Index of `sent_authed`.
|
||||
sent_idx: Idx,
|
||||
sent_idx: RangeSet<usize>,
|
||||
/// Index of `received_authed`.
|
||||
recv_idx: Idx,
|
||||
recv_idx: RangeSet<usize>,
|
||||
/// Total bytelength of sent data in the original partial transcript.
|
||||
sent_total: usize,
|
||||
/// Total bytelength of received data in the original partial transcript.
|
||||
@@ -184,10 +189,10 @@ impl From<PartialTranscript> for CompressedPartialTranscript {
|
||||
Self {
|
||||
sent_authed: uncompressed
|
||||
.sent
|
||||
.index_ranges(&uncompressed.sent_authed_idx.0),
|
||||
.index_ranges(&uncompressed.sent_authed_idx),
|
||||
received_authed: uncompressed
|
||||
.received
|
||||
.index_ranges(&uncompressed.received_authed_idx.0),
|
||||
.index_ranges(&uncompressed.received_authed_idx),
|
||||
sent_idx: uncompressed.sent_authed_idx,
|
||||
recv_idx: uncompressed.received_authed_idx,
|
||||
sent_total: uncompressed.sent.len(),
|
||||
@@ -237,8 +242,8 @@ impl PartialTranscript {
|
||||
Self {
|
||||
sent: vec![0; sent_len],
|
||||
received: vec![0; received_len],
|
||||
sent_authed_idx: Idx::default(),
|
||||
received_authed_idx: Idx::default(),
|
||||
sent_authed_idx: RangeSet::default(),
|
||||
received_authed_idx: RangeSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -259,10 +264,10 @@ impl PartialTranscript {
|
||||
}
|
||||
|
||||
/// Returns whether the index is in bounds of the transcript.
|
||||
pub fn contains(&self, direction: Direction, idx: &Idx) -> bool {
|
||||
pub fn contains(&self, direction: Direction, idx: &RangeSet<usize>) -> bool {
|
||||
match direction {
|
||||
Direction::Sent => idx.end() <= self.sent.len(),
|
||||
Direction::Received => idx.end() <= self.received.len(),
|
||||
Direction::Sent => idx.end().unwrap_or(0) <= self.sent.len(),
|
||||
Direction::Received => idx.end().unwrap_or(0) <= self.received.len(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -289,23 +294,23 @@ impl PartialTranscript {
|
||||
}
|
||||
|
||||
/// Returns the index of sent data which have been authenticated.
|
||||
pub fn sent_authed(&self) -> &Idx {
|
||||
pub fn sent_authed(&self) -> &RangeSet<usize> {
|
||||
&self.sent_authed_idx
|
||||
}
|
||||
|
||||
/// Returns the index of received data which have been authenticated.
|
||||
pub fn received_authed(&self) -> &Idx {
|
||||
pub fn received_authed(&self) -> &RangeSet<usize> {
|
||||
&self.received_authed_idx
|
||||
}
|
||||
|
||||
/// Returns the index of sent data which haven't been authenticated.
|
||||
pub fn sent_unauthed(&self) -> Idx {
|
||||
Idx(RangeSet::from(0..self.sent.len()).difference(&self.sent_authed_idx.0))
|
||||
pub fn sent_unauthed(&self) -> RangeSet<usize> {
|
||||
(0..self.sent.len()).difference(&self.sent_authed_idx)
|
||||
}
|
||||
|
||||
/// Returns the index of received data which haven't been authenticated.
|
||||
pub fn received_unauthed(&self) -> Idx {
|
||||
Idx(RangeSet::from(0..self.received.len()).difference(&self.received_authed_idx.0))
|
||||
pub fn received_unauthed(&self) -> RangeSet<usize> {
|
||||
(0..self.received.len()).difference(&self.received_authed_idx)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the authenticated data in the transcript.
|
||||
@@ -315,7 +320,7 @@ impl PartialTranscript {
|
||||
Direction::Received => (&self.received, &self.received_authed_idx),
|
||||
};
|
||||
|
||||
authed.0.iter().map(|i| data[i])
|
||||
authed.iter().map(|i| data[i])
|
||||
}
|
||||
|
||||
/// Unions the authenticated data of this transcript with another.
|
||||
@@ -337,8 +342,7 @@ impl PartialTranscript {
|
||||
|
||||
for range in other
|
||||
.sent_authed_idx
|
||||
.0
|
||||
.difference(&self.sent_authed_idx.0)
|
||||
.difference(&self.sent_authed_idx)
|
||||
.iter_ranges()
|
||||
{
|
||||
self.sent[range.clone()].copy_from_slice(&other.sent[range]);
|
||||
@@ -346,8 +350,7 @@ impl PartialTranscript {
|
||||
|
||||
for range in other
|
||||
.received_authed_idx
|
||||
.0
|
||||
.difference(&self.received_authed_idx.0)
|
||||
.difference(&self.received_authed_idx)
|
||||
.iter_ranges()
|
||||
{
|
||||
self.received[range.clone()].copy_from_slice(&other.received[range]);
|
||||
@@ -399,12 +402,12 @@ impl PartialTranscript {
|
||||
pub fn set_unauthed_range(&mut self, value: u8, direction: Direction, range: Range<usize>) {
|
||||
match direction {
|
||||
Direction::Sent => {
|
||||
for range in range.difference(&self.sent_authed_idx.0).iter_ranges() {
|
||||
for range in range.difference(&self.sent_authed_idx).iter_ranges() {
|
||||
self.sent[range].fill(value);
|
||||
}
|
||||
}
|
||||
Direction::Received => {
|
||||
for range in range.difference(&self.received_authed_idx.0).iter_ranges() {
|
||||
for range in range.difference(&self.received_authed_idx).iter_ranges() {
|
||||
self.received[range].fill(value);
|
||||
}
|
||||
}
|
||||
@@ -433,130 +436,19 @@ impl fmt::Display for Direction {
|
||||
}
|
||||
}
|
||||
|
||||
/// Transcript index.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct Idx(RangeSet<usize>);
|
||||
|
||||
impl Idx {
|
||||
/// Creates a new index builder.
|
||||
pub fn builder() -> IdxBuilder {
|
||||
IdxBuilder::default()
|
||||
}
|
||||
|
||||
/// Creates an empty index.
|
||||
pub fn empty() -> Self {
|
||||
Self(RangeSet::default())
|
||||
}
|
||||
|
||||
/// Creates a new transcript index.
|
||||
pub fn new(ranges: impl Into<RangeSet<usize>>) -> Self {
|
||||
Self(ranges.into())
|
||||
}
|
||||
|
||||
/// Returns the start of the index.
|
||||
pub fn start(&self) -> usize {
|
||||
self.0.min().unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Returns the end of the index, non-inclusive.
|
||||
pub fn end(&self) -> usize {
|
||||
self.0.end().unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the values in the index.
|
||||
pub fn iter(&self) -> impl Iterator<Item = usize> + '_ {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the ranges of the index.
|
||||
pub fn iter_ranges(&self) -> impl Iterator<Item = Range<usize>> + '_ {
|
||||
self.0.iter_ranges()
|
||||
}
|
||||
|
||||
/// Returns the number of values in the index.
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Returns whether the index is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Returns the number of disjoint ranges in the index.
|
||||
pub fn count(&self) -> usize {
|
||||
self.0.len_ranges()
|
||||
}
|
||||
|
||||
pub(crate) fn as_range_set(&self) -> &RangeSet<usize> {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Returns the union of this index with another.
|
||||
pub(crate) fn union(&self, other: &Idx) -> Idx {
|
||||
Idx(self.0.union(&other.0))
|
||||
}
|
||||
|
||||
/// Unions this index with another.
|
||||
pub(crate) fn union_mut(&mut self, other: &Idx) {
|
||||
self.0.union_mut(&other.0);
|
||||
}
|
||||
|
||||
/// Returns the difference between `self` and `other`.
|
||||
pub(crate) fn difference(&self, other: &Idx) -> Idx {
|
||||
Idx(self.0.difference(&other.0))
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a subset of `other`.
|
||||
pub(crate) fn is_subset(&self, other: &Idx) -> bool {
|
||||
self.0.is_subset(&other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Idx {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("Idx([")?;
|
||||
let count = self.0.len_ranges();
|
||||
for (i, range) in self.0.iter_ranges().enumerate() {
|
||||
write!(f, "{}..{}", range.start, range.end)?;
|
||||
if i < count - 1 {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
}
|
||||
f.write_str("])")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Builder for [`Idx`].
|
||||
#[derive(Debug, Default)]
|
||||
pub struct IdxBuilder(RangeSet<usize>);
|
||||
|
||||
impl IdxBuilder {
|
||||
/// Unions ranges.
|
||||
pub fn union(self, ranges: &dyn ToRangeSet<usize>) -> Self {
|
||||
IdxBuilder(self.0.union(&ranges.to_range_set()))
|
||||
}
|
||||
|
||||
/// Builds the index.
|
||||
pub fn build(self) -> Idx {
|
||||
Idx(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Transcript subsequence.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(try_from = "validation::SubsequenceUnchecked")]
|
||||
pub struct Subsequence {
|
||||
/// Index of the subsequence.
|
||||
idx: Idx,
|
||||
idx: RangeSet<usize>,
|
||||
/// Data of the subsequence.
|
||||
data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Subsequence {
|
||||
/// Creates a new subsequence.
|
||||
pub fn new(idx: Idx, data: Vec<u8>) -> Result<Self, InvalidSubsequence> {
|
||||
pub fn new(idx: RangeSet<usize>, data: Vec<u8>) -> Result<Self, InvalidSubsequence> {
|
||||
if idx.len() != data.len() {
|
||||
return Err(InvalidSubsequence(
|
||||
"index length does not match data length",
|
||||
@@ -567,7 +459,7 @@ impl Subsequence {
|
||||
}
|
||||
|
||||
/// Returns the index of the subsequence.
|
||||
pub fn index(&self) -> &Idx {
|
||||
pub fn index(&self) -> &RangeSet<usize> {
|
||||
&self.idx
|
||||
}
|
||||
|
||||
@@ -583,7 +475,7 @@ impl Subsequence {
|
||||
}
|
||||
|
||||
/// Returns the inner parts of the subsequence.
|
||||
pub fn into_parts(self) -> (Idx, Vec<u8>) {
|
||||
pub fn into_parts(self) -> (RangeSet<usize>, Vec<u8>) {
|
||||
(self.idx, self.data)
|
||||
}
|
||||
|
||||
@@ -611,7 +503,7 @@ mod validation {
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct SubsequenceUnchecked {
|
||||
idx: Idx,
|
||||
idx: RangeSet<usize>,
|
||||
data: Vec<u8>,
|
||||
}
|
||||
|
||||
@@ -633,8 +525,8 @@ mod validation {
|
||||
pub(super) struct CompressedPartialTranscriptUnchecked {
|
||||
sent_authed: Vec<u8>,
|
||||
received_authed: Vec<u8>,
|
||||
sent_idx: Idx,
|
||||
recv_idx: Idx,
|
||||
sent_idx: RangeSet<usize>,
|
||||
recv_idx: RangeSet<usize>,
|
||||
sent_total: usize,
|
||||
recv_total: usize,
|
||||
}
|
||||
@@ -651,8 +543,8 @@ mod validation {
|
||||
));
|
||||
}
|
||||
|
||||
if unchecked.sent_idx.end() > unchecked.sent_total
|
||||
|| unchecked.recv_idx.end() > unchecked.recv_total
|
||||
if unchecked.sent_idx.end().unwrap_or(0) > unchecked.sent_total
|
||||
|| unchecked.recv_idx.end().unwrap_or(0) > unchecked.recv_total
|
||||
{
|
||||
return Err(InvalidCompressedPartialTranscript(
|
||||
"ranges are not in bounds of the data",
|
||||
@@ -681,8 +573,8 @@ mod validation {
|
||||
CompressedPartialTranscriptUnchecked {
|
||||
received_authed: vec![1, 2, 3, 11, 12, 13],
|
||||
sent_authed: vec![4, 5, 6, 14, 15, 16],
|
||||
recv_idx: Idx(RangeSet::new(&[1..4, 11..14])),
|
||||
sent_idx: Idx(RangeSet::new(&[4..7, 14..17])),
|
||||
recv_idx: RangeSet::from([1..4, 11..14]),
|
||||
sent_idx: RangeSet::from([4..7, 14..17]),
|
||||
sent_total: 20,
|
||||
recv_total: 20,
|
||||
}
|
||||
@@ -721,7 +613,6 @@ mod validation {
|
||||
// Change the total to be less than the last range's end bound.
|
||||
let end = partial_transcript
|
||||
.sent_idx
|
||||
.0
|
||||
.iter_ranges()
|
||||
.next_back()
|
||||
.unwrap()
|
||||
@@ -753,31 +644,25 @@ mod tests {
|
||||
|
||||
#[fixture]
|
||||
fn partial_transcript() -> PartialTranscript {
|
||||
transcript().to_partial(
|
||||
Idx::new(RangeSet::new(&[1..4, 6..9])),
|
||||
Idx::new(RangeSet::new(&[2..5, 7..10])),
|
||||
)
|
||||
transcript().to_partial(RangeSet::from([1..4, 6..9]), RangeSet::from([2..5, 7..10]))
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_transcript_get_subsequence(transcript: Transcript) {
|
||||
let subseq = transcript
|
||||
.get(Direction::Received, &Idx(RangeSet::from([0..4, 7..10])))
|
||||
.get(Direction::Received, &RangeSet::from([0..4, 7..10]))
|
||||
.unwrap();
|
||||
assert_eq!(subseq.data, vec![0, 1, 2, 3, 7, 8, 9]);
|
||||
|
||||
let subseq = transcript
|
||||
.get(Direction::Sent, &Idx(RangeSet::from([0..4, 9..12])))
|
||||
.get(Direction::Sent, &RangeSet::from([0..4, 9..12]))
|
||||
.unwrap();
|
||||
assert_eq!(subseq.data, vec![0, 1, 2, 3, 9, 10, 11]);
|
||||
|
||||
let subseq = transcript.get(
|
||||
Direction::Received,
|
||||
&Idx(RangeSet::from([0..4, 7..10, 11..13])),
|
||||
);
|
||||
let subseq = transcript.get(Direction::Received, &RangeSet::from([0..4, 7..10, 11..13]));
|
||||
assert_eq!(subseq, None);
|
||||
|
||||
let subseq = transcript.get(Direction::Sent, &Idx(RangeSet::from([0..4, 7..10, 11..13])));
|
||||
let subseq = transcript.get(Direction::Sent, &RangeSet::from([0..4, 7..10, 11..13]));
|
||||
assert_eq!(subseq, None);
|
||||
}
|
||||
|
||||
@@ -790,7 +675,7 @@ mod tests {
|
||||
|
||||
#[rstest]
|
||||
fn test_transcript_to_partial_success(transcript: Transcript) {
|
||||
let partial = transcript.to_partial(Idx::new(0..2), Idx::new(3..7));
|
||||
let partial = transcript.to_partial(RangeSet::from(0..2), RangeSet::from(3..7));
|
||||
assert_eq!(partial.sent_unsafe(), [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
|
||||
assert_eq!(
|
||||
partial.received_unsafe(),
|
||||
@@ -801,29 +686,30 @@ mod tests {
|
||||
#[rstest]
|
||||
#[should_panic]
|
||||
fn test_transcript_to_partial_failure(transcript: Transcript) {
|
||||
let _ = transcript.to_partial(Idx::new(0..14), Idx::new(3..7));
|
||||
let _ = transcript.to_partial(RangeSet::from(0..14), RangeSet::from(3..7));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_partial_transcript_contains(transcript: Transcript) {
|
||||
let partial = transcript.to_partial(Idx::new(0..2), Idx::new(3..7));
|
||||
assert!(partial.contains(Direction::Sent, &Idx::new([0..5, 7..10])));
|
||||
assert!(!partial.contains(Direction::Received, &Idx::new([4..6, 7..13])))
|
||||
let partial = transcript.to_partial(RangeSet::from(0..2), RangeSet::from(3..7));
|
||||
assert!(partial.contains(Direction::Sent, &RangeSet::from([0..5, 7..10])));
|
||||
assert!(!partial.contains(Direction::Received, &RangeSet::from([4..6, 7..13])))
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_partial_transcript_unauthed(transcript: Transcript) {
|
||||
let partial = transcript.to_partial(Idx::new(0..2), Idx::new(3..7));
|
||||
assert_eq!(partial.sent_unauthed(), Idx::new(2..12));
|
||||
assert_eq!(partial.received_unauthed(), Idx::new([0..3, 7..12]));
|
||||
let partial = transcript.to_partial(RangeSet::from(0..2), RangeSet::from(3..7));
|
||||
assert_eq!(partial.sent_unauthed(), RangeSet::from(2..12));
|
||||
assert_eq!(partial.received_unauthed(), RangeSet::from([0..3, 7..12]));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_partial_transcript_union_success(transcript: Transcript) {
|
||||
// Non overlapping ranges.
|
||||
let mut simple_partial = transcript.to_partial(Idx::new(0..2), Idx::new(3..7));
|
||||
let mut simple_partial = transcript.to_partial(RangeSet::from(0..2), RangeSet::from(3..7));
|
||||
|
||||
let other_simple_partial = transcript.to_partial(Idx::new(3..5), Idx::new(1..2));
|
||||
let other_simple_partial =
|
||||
transcript.to_partial(RangeSet::from(3..5), RangeSet::from(1..2));
|
||||
|
||||
simple_partial.union_transcript(&other_simple_partial);
|
||||
|
||||
@@ -835,12 +721,16 @@ mod tests {
|
||||
simple_partial.received_unsafe(),
|
||||
[0, 1, 0, 3, 4, 5, 6, 0, 0, 0, 0, 0]
|
||||
);
|
||||
assert_eq!(simple_partial.sent_authed(), &Idx::new([0..2, 3..5]));
|
||||
assert_eq!(simple_partial.received_authed(), &Idx::new([1..2, 3..7]));
|
||||
assert_eq!(simple_partial.sent_authed(), &RangeSet::from([0..2, 3..5]));
|
||||
assert_eq!(
|
||||
simple_partial.received_authed(),
|
||||
&RangeSet::from([1..2, 3..7])
|
||||
);
|
||||
|
||||
// Overwrite with another partial transcript.
|
||||
|
||||
let another_simple_partial = transcript.to_partial(Idx::new(1..4), Idx::new(6..9));
|
||||
let another_simple_partial =
|
||||
transcript.to_partial(RangeSet::from(1..4), RangeSet::from(6..9));
|
||||
|
||||
simple_partial.union_transcript(&another_simple_partial);
|
||||
|
||||
@@ -852,13 +742,17 @@ mod tests {
|
||||
simple_partial.received_unsafe(),
|
||||
[0, 1, 0, 3, 4, 5, 6, 7, 8, 0, 0, 0]
|
||||
);
|
||||
assert_eq!(simple_partial.sent_authed(), &Idx::new(0..5));
|
||||
assert_eq!(simple_partial.received_authed(), &Idx::new([1..2, 3..9]));
|
||||
assert_eq!(simple_partial.sent_authed(), &RangeSet::from(0..5));
|
||||
assert_eq!(
|
||||
simple_partial.received_authed(),
|
||||
&RangeSet::from([1..2, 3..9])
|
||||
);
|
||||
|
||||
// Overlapping ranges.
|
||||
let mut overlap_partial = transcript.to_partial(Idx::new(4..6), Idx::new(3..7));
|
||||
let mut overlap_partial = transcript.to_partial(RangeSet::from(4..6), RangeSet::from(3..7));
|
||||
|
||||
let other_overlap_partial = transcript.to_partial(Idx::new(3..5), Idx::new(5..9));
|
||||
let other_overlap_partial =
|
||||
transcript.to_partial(RangeSet::from(3..5), RangeSet::from(5..9));
|
||||
|
||||
overlap_partial.union_transcript(&other_overlap_partial);
|
||||
|
||||
@@ -870,13 +764,16 @@ mod tests {
|
||||
overlap_partial.received_unsafe(),
|
||||
[0, 0, 0, 3, 4, 5, 6, 7, 8, 0, 0, 0]
|
||||
);
|
||||
assert_eq!(overlap_partial.sent_authed(), &Idx::new([3..5, 4..6]));
|
||||
assert_eq!(overlap_partial.received_authed(), &Idx::new([3..7, 5..9]));
|
||||
assert_eq!(overlap_partial.sent_authed(), &RangeSet::from([3..5, 4..6]));
|
||||
assert_eq!(
|
||||
overlap_partial.received_authed(),
|
||||
&RangeSet::from([3..7, 5..9])
|
||||
);
|
||||
|
||||
// Equal ranges.
|
||||
let mut equal_partial = transcript.to_partial(Idx::new(4..6), Idx::new(3..7));
|
||||
let mut equal_partial = transcript.to_partial(RangeSet::from(4..6), RangeSet::from(3..7));
|
||||
|
||||
let other_equal_partial = transcript.to_partial(Idx::new(4..6), Idx::new(3..7));
|
||||
let other_equal_partial = transcript.to_partial(RangeSet::from(4..6), RangeSet::from(3..7));
|
||||
|
||||
equal_partial.union_transcript(&other_equal_partial);
|
||||
|
||||
@@ -888,13 +785,15 @@ mod tests {
|
||||
equal_partial.received_unsafe(),
|
||||
[0, 0, 0, 3, 4, 5, 6, 0, 0, 0, 0, 0]
|
||||
);
|
||||
assert_eq!(equal_partial.sent_authed(), &Idx::new(4..6));
|
||||
assert_eq!(equal_partial.received_authed(), &Idx::new(3..7));
|
||||
assert_eq!(equal_partial.sent_authed(), &RangeSet::from(4..6));
|
||||
assert_eq!(equal_partial.received_authed(), &RangeSet::from(3..7));
|
||||
|
||||
// Subset ranges.
|
||||
let mut subset_partial = transcript.to_partial(Idx::new(4..10), Idx::new(3..11));
|
||||
let mut subset_partial =
|
||||
transcript.to_partial(RangeSet::from(4..10), RangeSet::from(3..11));
|
||||
|
||||
let other_subset_partial = transcript.to_partial(Idx::new(6..9), Idx::new(5..6));
|
||||
let other_subset_partial =
|
||||
transcript.to_partial(RangeSet::from(6..9), RangeSet::from(5..6));
|
||||
|
||||
subset_partial.union_transcript(&other_subset_partial);
|
||||
|
||||
@@ -906,30 +805,32 @@ mod tests {
|
||||
subset_partial.received_unsafe(),
|
||||
[0, 0, 0, 3, 4, 5, 6, 7, 8, 9, 10, 0]
|
||||
);
|
||||
assert_eq!(subset_partial.sent_authed(), &Idx::new(4..10));
|
||||
assert_eq!(subset_partial.received_authed(), &Idx::new(3..11));
|
||||
assert_eq!(subset_partial.sent_authed(), &RangeSet::from(4..10));
|
||||
assert_eq!(subset_partial.received_authed(), &RangeSet::from(3..11));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[should_panic]
|
||||
fn test_partial_transcript_union_failure(transcript: Transcript) {
|
||||
let mut partial = transcript.to_partial(Idx::new(4..10), Idx::new(3..11));
|
||||
let mut partial = transcript.to_partial(RangeSet::from(4..10), RangeSet::from(3..11));
|
||||
|
||||
let other_transcript = Transcript::new(
|
||||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
|
||||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
|
||||
);
|
||||
|
||||
let other_partial = other_transcript.to_partial(Idx::new(6..9), Idx::new(5..6));
|
||||
let other_partial = other_transcript.to_partial(RangeSet::from(6..9), RangeSet::from(5..6));
|
||||
|
||||
partial.union_transcript(&other_partial);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_partial_transcript_union_subseq_success(transcript: Transcript) {
|
||||
let mut partial = transcript.to_partial(Idx::new(4..10), Idx::new(3..11));
|
||||
let sent_seq = Subsequence::new(Idx::new([0..3, 5..7]), [0, 1, 2, 5, 6].into()).unwrap();
|
||||
let recv_seq = Subsequence::new(Idx::new([0..4, 5..7]), [0, 1, 2, 3, 5, 6].into()).unwrap();
|
||||
let mut partial = transcript.to_partial(RangeSet::from(4..10), RangeSet::from(3..11));
|
||||
let sent_seq =
|
||||
Subsequence::new(RangeSet::from([0..3, 5..7]), [0, 1, 2, 5, 6].into()).unwrap();
|
||||
let recv_seq =
|
||||
Subsequence::new(RangeSet::from([0..4, 5..7]), [0, 1, 2, 3, 5, 6].into()).unwrap();
|
||||
|
||||
partial.union_subsequence(Direction::Sent, &sent_seq);
|
||||
partial.union_subsequence(Direction::Received, &recv_seq);
|
||||
@@ -939,30 +840,31 @@ mod tests {
|
||||
partial.received_unsafe(),
|
||||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0]
|
||||
);
|
||||
assert_eq!(partial.sent_authed(), &Idx::new([0..3, 4..10]));
|
||||
assert_eq!(partial.received_authed(), &Idx::new(0..11));
|
||||
assert_eq!(partial.sent_authed(), &RangeSet::from([0..3, 4..10]));
|
||||
assert_eq!(partial.received_authed(), &RangeSet::from(0..11));
|
||||
|
||||
// Overwrite with another subseq.
|
||||
let other_sent_seq = Subsequence::new(Idx::new(0..3), [3, 2, 1].into()).unwrap();
|
||||
let other_sent_seq = Subsequence::new(RangeSet::from(0..3), [3, 2, 1].into()).unwrap();
|
||||
|
||||
partial.union_subsequence(Direction::Sent, &other_sent_seq);
|
||||
assert_eq!(partial.sent_unsafe(), [3, 2, 1, 0, 4, 5, 6, 7, 8, 9, 0, 0]);
|
||||
assert_eq!(partial.sent_authed(), &Idx::new([0..3, 4..10]));
|
||||
assert_eq!(partial.sent_authed(), &RangeSet::from([0..3, 4..10]));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[should_panic]
|
||||
fn test_partial_transcript_union_subseq_failure(transcript: Transcript) {
|
||||
let mut partial = transcript.to_partial(Idx::new(4..10), Idx::new(3..11));
|
||||
let mut partial = transcript.to_partial(RangeSet::from(4..10), RangeSet::from(3..11));
|
||||
|
||||
let sent_seq = Subsequence::new(Idx::new([0..3, 13..15]), [0, 1, 2, 5, 6].into()).unwrap();
|
||||
let sent_seq =
|
||||
Subsequence::new(RangeSet::from([0..3, 13..15]), [0, 1, 2, 5, 6].into()).unwrap();
|
||||
|
||||
partial.union_subsequence(Direction::Sent, &sent_seq);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_partial_transcript_set_unauthed_range(transcript: Transcript) {
|
||||
let mut partial = transcript.to_partial(Idx::new(4..10), Idx::new(3..7));
|
||||
let mut partial = transcript.to_partial(RangeSet::from(4..10), RangeSet::from(3..7));
|
||||
|
||||
partial.set_unauthed_range(7, Direction::Sent, 2..5);
|
||||
partial.set_unauthed_range(5, Direction::Sent, 0..2);
|
||||
@@ -979,13 +881,13 @@ mod tests {
|
||||
#[rstest]
|
||||
#[should_panic]
|
||||
fn test_subsequence_new_invalid_len() {
|
||||
let _ = Subsequence::new(Idx::new([0..3, 5..8]), [0, 1, 2, 5, 6].into()).unwrap();
|
||||
let _ = Subsequence::new(RangeSet::from([0..3, 5..8]), [0, 1, 2, 5, 6].into()).unwrap();
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[should_panic]
|
||||
fn test_subsequence_copy_to_invalid_len() {
|
||||
let seq = Subsequence::new(Idx::new([0..3, 5..7]), [0, 1, 2, 5, 6].into()).unwrap();
|
||||
let seq = Subsequence::new(RangeSet::from([0..3, 5..7]), [0, 1, 2, 5, 6].into()).unwrap();
|
||||
|
||||
let mut data: [u8; 3] = [0, 1, 2];
|
||||
seq.copy_to(&mut data);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Transcript commitments.
|
||||
|
||||
use std::{collections::HashSet, fmt};
|
||||
use std::fmt;
|
||||
|
||||
use rangeset::ToRangeSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -10,7 +10,7 @@ use crate::{
|
||||
transcript::{
|
||||
encoding::{EncodingCommitment, EncodingTree},
|
||||
hash::{PlaintextHash, PlaintextHashSecret},
|
||||
Direction, Idx, Transcript,
|
||||
Direction, RangeSet, Transcript,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -69,9 +69,7 @@ pub enum TranscriptSecret {
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TranscriptCommitConfig {
|
||||
encoding_hash_alg: HashAlgId,
|
||||
has_encoding: bool,
|
||||
has_hash: bool,
|
||||
commits: Vec<((Direction, Idx), TranscriptCommitmentKind)>,
|
||||
commits: Vec<((Direction, RangeSet<usize>), TranscriptCommitmentKind)>,
|
||||
}
|
||||
|
||||
impl TranscriptCommitConfig {
|
||||
@@ -85,18 +83,8 @@ impl TranscriptCommitConfig {
|
||||
&self.encoding_hash_alg
|
||||
}
|
||||
|
||||
/// Returns `true` if the configuration has any encoding commitments.
|
||||
pub fn has_encoding(&self) -> bool {
|
||||
self.has_encoding
|
||||
}
|
||||
|
||||
/// Returns `true` if the configuration has any hash commitments.
|
||||
pub fn has_hash(&self) -> bool {
|
||||
self.has_hash
|
||||
}
|
||||
|
||||
/// Returns an iterator over the encoding commitment indices.
|
||||
pub fn iter_encoding(&self) -> impl Iterator<Item = &(Direction, Idx)> {
|
||||
pub fn iter_encoding(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>)> {
|
||||
self.commits.iter().filter_map(|(idx, kind)| match kind {
|
||||
TranscriptCommitmentKind::Encoding => Some(idx),
|
||||
_ => None,
|
||||
@@ -104,7 +92,7 @@ impl TranscriptCommitConfig {
|
||||
}
|
||||
|
||||
/// Returns an iterator over the hash commitment indices.
|
||||
pub fn iter_hash(&self) -> impl Iterator<Item = (&(Direction, Idx), &HashAlgId)> {
|
||||
pub fn iter_hash(&self) -> impl Iterator<Item = (&(Direction, RangeSet<usize>), &HashAlgId)> {
|
||||
self.commits.iter().filter_map(|(idx, kind)| match kind {
|
||||
TranscriptCommitmentKind::Hash { alg } => Some((idx, alg)),
|
||||
_ => None,
|
||||
@@ -114,7 +102,10 @@ impl TranscriptCommitConfig {
|
||||
/// Returns a request for the transcript commitments.
|
||||
pub fn to_request(&self) -> TranscriptCommitRequest {
|
||||
TranscriptCommitRequest {
|
||||
encoding: self.has_encoding,
|
||||
encoding: self
|
||||
.iter_encoding()
|
||||
.map(|(dir, idx)| (*dir, idx.clone()))
|
||||
.collect(),
|
||||
hash: self
|
||||
.iter_hash()
|
||||
.map(|((dir, idx), alg)| (*dir, idx.clone(), *alg))
|
||||
@@ -131,10 +122,8 @@ impl TranscriptCommitConfig {
|
||||
pub struct TranscriptCommitConfigBuilder<'a> {
|
||||
transcript: &'a Transcript,
|
||||
encoding_hash_alg: HashAlgId,
|
||||
has_encoding: bool,
|
||||
has_hash: bool,
|
||||
default_kind: TranscriptCommitmentKind,
|
||||
commits: HashSet<((Direction, Idx), TranscriptCommitmentKind)>,
|
||||
commits: Vec<((Direction, RangeSet<usize>), TranscriptCommitmentKind)>,
|
||||
}
|
||||
|
||||
impl<'a> TranscriptCommitConfigBuilder<'a> {
|
||||
@@ -143,10 +132,8 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
|
||||
Self {
|
||||
transcript,
|
||||
encoding_hash_alg: HashAlgId::BLAKE3,
|
||||
has_encoding: false,
|
||||
has_hash: false,
|
||||
default_kind: TranscriptCommitmentKind::Encoding,
|
||||
commits: HashSet::default(),
|
||||
commits: Vec::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -175,27 +162,25 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
|
||||
direction: Direction,
|
||||
kind: TranscriptCommitmentKind,
|
||||
) -> Result<&mut Self, TranscriptCommitConfigBuilderError> {
|
||||
let idx = Idx::new(ranges.to_range_set());
|
||||
let idx = ranges.to_range_set();
|
||||
|
||||
if idx.end() > self.transcript.len_of_direction(direction) {
|
||||
if idx.end().unwrap_or(0) > self.transcript.len_of_direction(direction) {
|
||||
return Err(TranscriptCommitConfigBuilderError::new(
|
||||
ErrorKind::Index,
|
||||
format!(
|
||||
"range is out of bounds of the transcript ({}): {} > {}",
|
||||
direction,
|
||||
idx.end(),
|
||||
idx.end().unwrap_or(0),
|
||||
self.transcript.len_of_direction(direction)
|
||||
),
|
||||
));
|
||||
}
|
||||
let value = ((direction, idx), kind);
|
||||
|
||||
match kind {
|
||||
TranscriptCommitmentKind::Encoding => self.has_encoding = true,
|
||||
TranscriptCommitmentKind::Hash { .. } => self.has_hash = true,
|
||||
if !self.commits.contains(&value) {
|
||||
self.commits.push(value);
|
||||
}
|
||||
|
||||
self.commits.insert(((direction, idx), kind));
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
@@ -241,8 +226,6 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
|
||||
pub fn build(self) -> Result<TranscriptCommitConfig, TranscriptCommitConfigBuilderError> {
|
||||
Ok(TranscriptCommitConfig {
|
||||
encoding_hash_alg: self.encoding_hash_alg,
|
||||
has_encoding: self.has_encoding,
|
||||
has_hash: self.has_hash,
|
||||
commits: Vec::from_iter(self.commits),
|
||||
})
|
||||
}
|
||||
@@ -289,23 +272,18 @@ impl fmt::Display for TranscriptCommitConfigBuilderError {
|
||||
/// Request to compute transcript commitments.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TranscriptCommitRequest {
|
||||
encoding: bool,
|
||||
hash: Vec<(Direction, Idx, HashAlgId)>,
|
||||
encoding: Vec<(Direction, RangeSet<usize>)>,
|
||||
hash: Vec<(Direction, RangeSet<usize>, HashAlgId)>,
|
||||
}
|
||||
|
||||
impl TranscriptCommitRequest {
|
||||
/// Returns `true` if an encoding commitment is requested.
|
||||
pub fn encoding(&self) -> bool {
|
||||
self.encoding
|
||||
}
|
||||
|
||||
/// Returns `true` if a hash commitment is requested.
|
||||
pub fn has_hash(&self) -> bool {
|
||||
!self.hash.is_empty()
|
||||
/// Returns an iterator over the encoding commitments.
|
||||
pub fn iter_encoding(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>)> {
|
||||
self.encoding.iter()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the hash commitments.
|
||||
pub fn iter_hash(&self) -> impl Iterator<Item = &(Direction, Idx, HashAlgId)> {
|
||||
pub fn iter_hash(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>, HashAlgId)> {
|
||||
self.hash.iter()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ const BIT_ENCODING_SIZE: usize = 16;
|
||||
const BYTE_ENCODING_SIZE: usize = 128;
|
||||
|
||||
/// Secret used by an encoder to generate encodings.
|
||||
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct EncoderSecret {
|
||||
seed: [u8; 32],
|
||||
delta: [u8; BIT_ENCODING_SIZE],
|
||||
|
||||
@@ -9,7 +9,7 @@ use crate::{
|
||||
transcript::{
|
||||
commit::MAX_TOTAL_COMMITTED_DATA,
|
||||
encoding::{new_encoder, Encoder, EncodingCommitment},
|
||||
Direction, Idx,
|
||||
Direction,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -17,7 +17,7 @@ use crate::{
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub(super) struct Opening {
|
||||
pub(super) direction: Direction,
|
||||
pub(super) idx: Idx,
|
||||
pub(super) idx: RangeSet<usize>,
|
||||
pub(super) blinder: Blinder,
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ impl EncodingProof {
|
||||
commitment: &EncodingCommitment,
|
||||
sent: &[u8],
|
||||
recv: &[u8],
|
||||
) -> Result<(Idx, Idx), EncodingProofError> {
|
||||
) -> Result<(RangeSet<usize>, RangeSet<usize>), EncodingProofError> {
|
||||
let hasher = provider.get(&commitment.root.alg)?;
|
||||
|
||||
let encoder = new_encoder(&commitment.secret);
|
||||
@@ -89,13 +89,13 @@ impl EncodingProof {
|
||||
};
|
||||
|
||||
// Make sure the ranges are within the bounds of the transcript.
|
||||
if idx.end() > data.len() {
|
||||
if idx.end().unwrap_or(0) > data.len() {
|
||||
return Err(EncodingProofError::new(
|
||||
ErrorKind::Proof,
|
||||
format!(
|
||||
"index out of bounds of the transcript ({}): {} > {}",
|
||||
direction,
|
||||
idx.end(),
|
||||
idx.end().unwrap_or(0),
|
||||
data.len()
|
||||
),
|
||||
));
|
||||
@@ -111,7 +111,7 @@ impl EncodingProof {
|
||||
// present in the merkle tree.
|
||||
leaves.push((*id, hasher.hash(&expected_leaf)));
|
||||
|
||||
auth.union_mut(idx.as_range_set());
|
||||
auth.union_mut(idx);
|
||||
}
|
||||
|
||||
// Verify that the expected hashes are present in the merkle tree.
|
||||
@@ -121,7 +121,7 @@ impl EncodingProof {
|
||||
// data is authentic.
|
||||
inclusion_proof.verify(hasher, &commitment.root, leaves)?;
|
||||
|
||||
Ok((Idx(auth_sent), Idx(auth_recv)))
|
||||
Ok((auth_sent, auth_recv))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -234,7 +234,7 @@ mod test {
|
||||
hash::Blake3,
|
||||
transcript::{
|
||||
encoding::{EncoderSecret, EncodingTree},
|
||||
Idx, Transcript,
|
||||
Transcript,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -249,8 +249,8 @@ mod test {
|
||||
fn new_encoding_fixture(secret: EncoderSecret) -> EncodingFixture {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, Idx::new(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, Idx::new(0..OK_JSON.len()));
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len()));
|
||||
|
||||
let provider = encoding_provider(transcript.sent(), transcript.received());
|
||||
let tree = EncodingTree::new(&Blake3::default(), [&idx_0, &idx_1], &provider).unwrap();
|
||||
@@ -317,7 +317,7 @@ mod test {
|
||||
|
||||
let Opening { idx, .. } = proof.openings.values_mut().next().unwrap();
|
||||
|
||||
*idx = Idx::new([0..3, 13..15]);
|
||||
*idx = RangeSet::from([0..3, 13..15]);
|
||||
|
||||
let err = proof
|
||||
.verify_with_provider(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use bimap::BiMap;
|
||||
use rangeset::{RangeSet, UnionMut};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
@@ -11,7 +12,7 @@ use crate::{
|
||||
proof::{EncodingProof, Opening},
|
||||
EncodingProvider,
|
||||
},
|
||||
Direction, Idx,
|
||||
Direction,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -22,7 +23,7 @@ pub enum EncodingTreeError {
|
||||
#[error("index is out of bounds of the transcript")]
|
||||
OutOfBounds {
|
||||
/// The index.
|
||||
index: Idx,
|
||||
index: RangeSet<usize>,
|
||||
/// The transcript length.
|
||||
transcript_length: usize,
|
||||
},
|
||||
@@ -30,13 +31,13 @@ pub enum EncodingTreeError {
|
||||
#[error("encoding provider is missing an encoding for an index")]
|
||||
MissingEncoding {
|
||||
/// The index which is missing.
|
||||
index: Idx,
|
||||
index: RangeSet<usize>,
|
||||
},
|
||||
/// Index is missing from the tree.
|
||||
#[error("index is missing from the tree")]
|
||||
MissingLeaf {
|
||||
/// The index which is missing.
|
||||
index: Idx,
|
||||
index: RangeSet<usize>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -49,11 +50,11 @@ pub struct EncodingTree {
|
||||
blinders: Vec<Blinder>,
|
||||
/// Mapping between the index of a leaf and the transcript index it
|
||||
/// corresponds to.
|
||||
idxs: BiMap<usize, (Direction, Idx)>,
|
||||
idxs: BiMap<usize, (Direction, RangeSet<usize>)>,
|
||||
/// Union of all transcript indices in the sent direction.
|
||||
sent_idx: Idx,
|
||||
sent_idx: RangeSet<usize>,
|
||||
/// Union of all transcript indices in the received direction.
|
||||
received_idx: Idx,
|
||||
received_idx: RangeSet<usize>,
|
||||
}
|
||||
|
||||
opaque_debug::implement!(EncodingTree);
|
||||
@@ -68,15 +69,15 @@ impl EncodingTree {
|
||||
/// * `provider` - The encoding provider.
|
||||
pub fn new<'idx>(
|
||||
hasher: &dyn HashAlgorithm,
|
||||
idxs: impl IntoIterator<Item = &'idx (Direction, Idx)>,
|
||||
idxs: impl IntoIterator<Item = &'idx (Direction, RangeSet<usize>)>,
|
||||
provider: &dyn EncodingProvider,
|
||||
) -> Result<Self, EncodingTreeError> {
|
||||
let mut this = Self {
|
||||
tree: MerkleTree::new(hasher.id()),
|
||||
blinders: Vec::new(),
|
||||
idxs: BiMap::new(),
|
||||
sent_idx: Idx::empty(),
|
||||
received_idx: Idx::empty(),
|
||||
sent_idx: RangeSet::default(),
|
||||
received_idx: RangeSet::default(),
|
||||
};
|
||||
|
||||
let mut leaves = Vec::new();
|
||||
@@ -138,7 +139,7 @@ impl EncodingTree {
|
||||
/// * `idxs` - The transcript indices to prove.
|
||||
pub fn proof<'idx>(
|
||||
&self,
|
||||
idxs: impl Iterator<Item = &'idx (Direction, Idx)>,
|
||||
idxs: impl Iterator<Item = &'idx (Direction, RangeSet<usize>)>,
|
||||
) -> Result<EncodingProof, EncodingTreeError> {
|
||||
let mut openings = HashMap::new();
|
||||
for dir_idx in idxs {
|
||||
@@ -171,11 +172,11 @@ impl EncodingTree {
|
||||
}
|
||||
|
||||
/// Returns whether the tree contains the given transcript index.
|
||||
pub fn contains(&self, idx: &(Direction, Idx)) -> bool {
|
||||
pub fn contains(&self, idx: &(Direction, RangeSet<usize>)) -> bool {
|
||||
self.idxs.contains_right(idx)
|
||||
}
|
||||
|
||||
pub(crate) fn idx(&self, direction: Direction) -> &Idx {
|
||||
pub(crate) fn idx(&self, direction: Direction) -> &RangeSet<usize> {
|
||||
match direction {
|
||||
Direction::Sent => &self.sent_idx,
|
||||
Direction::Received => &self.received_idx,
|
||||
@@ -183,7 +184,7 @@ impl EncodingTree {
|
||||
}
|
||||
|
||||
/// Returns the committed transcript indices.
|
||||
pub(crate) fn transcript_indices(&self) -> impl Iterator<Item = &(Direction, Idx)> {
|
||||
pub(crate) fn transcript_indices(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>)> {
|
||||
self.idxs.right_values()
|
||||
}
|
||||
}
|
||||
@@ -200,7 +201,7 @@ mod tests {
|
||||
|
||||
fn new_tree<'seq>(
|
||||
transcript: &Transcript,
|
||||
idxs: impl Iterator<Item = &'seq (Direction, Idx)>,
|
||||
idxs: impl Iterator<Item = &'seq (Direction, RangeSet<usize>)>,
|
||||
) -> Result<EncodingTree, EncodingTreeError> {
|
||||
let provider = encoding_provider(transcript.sent(), transcript.received());
|
||||
|
||||
@@ -211,8 +212,8 @@ mod tests {
|
||||
fn test_encoding_tree() {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, Idx::new(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, Idx::new(0..OK_JSON.len()));
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len()));
|
||||
|
||||
let tree = new_tree(&transcript, [&idx_0, &idx_1].into_iter()).unwrap();
|
||||
|
||||
@@ -243,10 +244,10 @@ mod tests {
|
||||
fn test_encoding_tree_multiple_ranges() {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, Idx::new(0..1));
|
||||
let idx_1 = (Direction::Sent, Idx::new(1..POST_JSON.len()));
|
||||
let idx_2 = (Direction::Received, Idx::new(0..1));
|
||||
let idx_3 = (Direction::Received, Idx::new(1..OK_JSON.len()));
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..1));
|
||||
let idx_1 = (Direction::Sent, RangeSet::from(1..POST_JSON.len()));
|
||||
let idx_2 = (Direction::Received, RangeSet::from(0..1));
|
||||
let idx_3 = (Direction::Received, RangeSet::from(1..OK_JSON.len()));
|
||||
|
||||
let tree = new_tree(&transcript, [&idx_0, &idx_1, &idx_2, &idx_3].into_iter()).unwrap();
|
||||
|
||||
@@ -273,11 +274,11 @@ mod tests {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut expected_auth_sent = Idx::default();
|
||||
let mut expected_auth_sent = RangeSet::default();
|
||||
expected_auth_sent.union_mut(&idx_0.1);
|
||||
expected_auth_sent.union_mut(&idx_1.1);
|
||||
|
||||
let mut expected_auth_recv = Idx::default();
|
||||
let mut expected_auth_recv = RangeSet::default();
|
||||
expected_auth_recv.union_mut(&idx_2.1);
|
||||
expected_auth_recv.union_mut(&idx_3.1);
|
||||
|
||||
@@ -289,9 +290,9 @@ mod tests {
|
||||
fn test_encoding_tree_proof_missing_leaf() {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, Idx::new(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, Idx::new(0..4));
|
||||
let idx_2 = (Direction::Received, Idx::new(4..OK_JSON.len()));
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
|
||||
let idx_1 = (Direction::Received, RangeSet::from(0..4));
|
||||
let idx_2 = (Direction::Received, RangeSet::from(4..OK_JSON.len()));
|
||||
|
||||
let tree = new_tree(&transcript, [&idx_0, &idx_1].into_iter()).unwrap();
|
||||
|
||||
@@ -305,8 +306,8 @@ mod tests {
|
||||
fn test_encoding_tree_out_of_bounds() {
|
||||
let transcript = Transcript::new(POST_JSON, OK_JSON);
|
||||
|
||||
let idx_0 = (Direction::Sent, Idx::new(0..POST_JSON.len() + 1));
|
||||
let idx_1 = (Direction::Received, Idx::new(0..OK_JSON.len() + 1));
|
||||
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len() + 1));
|
||||
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len() + 1));
|
||||
|
||||
let result = new_tree(&transcript, [&idx_0].into_iter()).unwrap_err();
|
||||
assert!(matches!(result, EncodingTreeError::MissingEncoding { .. }));
|
||||
@@ -321,7 +322,7 @@ mod tests {
|
||||
|
||||
let result = EncodingTree::new(
|
||||
&Blake3::default(),
|
||||
[(Direction::Sent, Idx::new(0..8))].iter(),
|
||||
[(Direction::Sent, RangeSet::from(0..8))].iter(),
|
||||
&provider,
|
||||
)
|
||||
.unwrap_err();
|
||||
|
||||
@@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
hash::{Blinder, HashAlgId, HashAlgorithm, TypedHash},
|
||||
transcript::{Direction, Idx},
|
||||
transcript::{Direction, RangeSet},
|
||||
};
|
||||
|
||||
/// Hashes plaintext with a blinder.
|
||||
@@ -23,7 +23,7 @@ pub struct PlaintextHash {
|
||||
/// Direction of the plaintext.
|
||||
pub direction: Direction,
|
||||
/// Index of plaintext.
|
||||
pub idx: Idx,
|
||||
pub idx: RangeSet<usize>,
|
||||
/// The hash of the data.
|
||||
pub hash: TypedHash,
|
||||
}
|
||||
@@ -34,7 +34,7 @@ pub struct PlaintextHashSecret {
|
||||
/// Direction of the plaintext.
|
||||
pub direction: Direction,
|
||||
/// Index of plaintext.
|
||||
pub idx: Idx,
|
||||
pub idx: RangeSet<usize>,
|
||||
/// The algorithm of the hash.
|
||||
pub alg: HashAlgId,
|
||||
/// Blinder for the hash.
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
//! Transcript proofs.
|
||||
|
||||
use rangeset::{Cover, ToRangeSet};
|
||||
use rangeset::{Cover, Difference, Subset, ToRangeSet, UnionMut};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{collections::HashSet, fmt};
|
||||
|
||||
use crate::{
|
||||
connection::TranscriptLength,
|
||||
display::FmtRangeSet,
|
||||
hash::{HashAlgId, HashProvider},
|
||||
transcript::{
|
||||
commit::{TranscriptCommitment, TranscriptCommitmentKind},
|
||||
encoding::{EncodingProof, EncodingProofError, EncodingTree},
|
||||
hash::{hash_plaintext, PlaintextHash, PlaintextHashSecret},
|
||||
Direction, Idx, PartialTranscript, Transcript, TranscriptSecret,
|
||||
Direction, PartialTranscript, RangeSet, Transcript, TranscriptSecret,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -77,8 +78,8 @@ impl TranscriptProof {
|
||||
));
|
||||
}
|
||||
|
||||
let mut total_auth_sent = Idx::default();
|
||||
let mut total_auth_recv = Idx::default();
|
||||
let mut total_auth_sent = RangeSet::default();
|
||||
let mut total_auth_recv = RangeSet::default();
|
||||
|
||||
// Verify encoding proof.
|
||||
if let Some(proof) = self.encoding_proof {
|
||||
@@ -120,7 +121,7 @@ impl TranscriptProof {
|
||||
Direction::Received => (self.transcript.received_unsafe(), &mut total_auth_recv),
|
||||
};
|
||||
|
||||
if idx.end() > plaintext.len() {
|
||||
if idx.end().unwrap_or(0) > plaintext.len() {
|
||||
return Err(TranscriptProofError::new(
|
||||
ErrorKind::Hash,
|
||||
"hash opening index is out of bounds",
|
||||
@@ -215,15 +216,15 @@ impl From<EncodingProofError> for TranscriptProofError {
|
||||
/// Union of ranges to reveal.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct QueryIdx {
|
||||
sent: Idx,
|
||||
recv: Idx,
|
||||
sent: RangeSet<usize>,
|
||||
recv: RangeSet<usize>,
|
||||
}
|
||||
|
||||
impl QueryIdx {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
sent: Idx::empty(),
|
||||
recv: Idx::empty(),
|
||||
sent: RangeSet::default(),
|
||||
recv: RangeSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -231,7 +232,7 @@ impl QueryIdx {
|
||||
self.sent.is_empty() && self.recv.is_empty()
|
||||
}
|
||||
|
||||
fn union(&mut self, direction: &Direction, other: &Idx) {
|
||||
fn union(&mut self, direction: &Direction, other: &RangeSet<usize>) {
|
||||
match direction {
|
||||
Direction::Sent => self.sent.union_mut(other),
|
||||
Direction::Received => self.recv.union_mut(other),
|
||||
@@ -241,7 +242,12 @@ impl QueryIdx {
|
||||
|
||||
impl std::fmt::Display for QueryIdx {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "sent: {}, received: {}", self.sent, self.recv)
|
||||
write!(
|
||||
f,
|
||||
"sent: {}, received: {}",
|
||||
FmtRangeSet(&self.sent),
|
||||
FmtRangeSet(&self.recv)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -253,8 +259,8 @@ pub struct TranscriptProofBuilder<'a> {
|
||||
transcript: &'a Transcript,
|
||||
encoding_tree: Option<&'a EncodingTree>,
|
||||
hash_secrets: Vec<&'a PlaintextHashSecret>,
|
||||
committed_sent: Idx,
|
||||
committed_recv: Idx,
|
||||
committed_sent: RangeSet<usize>,
|
||||
committed_recv: RangeSet<usize>,
|
||||
query_idx: QueryIdx,
|
||||
}
|
||||
|
||||
@@ -264,8 +270,8 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
transcript: &'a Transcript,
|
||||
secrets: impl IntoIterator<Item = &'a TranscriptSecret>,
|
||||
) -> Self {
|
||||
let mut committed_sent = Idx::empty();
|
||||
let mut committed_recv = Idx::empty();
|
||||
let mut committed_sent = RangeSet::default();
|
||||
let mut committed_recv = RangeSet::default();
|
||||
|
||||
let mut encoding_tree = None;
|
||||
let mut hash_secrets = Vec::new();
|
||||
@@ -323,15 +329,15 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
ranges: &dyn ToRangeSet<usize>,
|
||||
direction: Direction,
|
||||
) -> Result<&mut Self, TranscriptProofBuilderError> {
|
||||
let idx = Idx::new(ranges.to_range_set());
|
||||
let idx = ranges.to_range_set();
|
||||
|
||||
if idx.end() > self.transcript.len_of_direction(direction) {
|
||||
if idx.end().unwrap_or(0) > self.transcript.len_of_direction(direction) {
|
||||
return Err(TranscriptProofBuilderError::new(
|
||||
BuilderErrorKind::Index,
|
||||
format!(
|
||||
"range is out of bounds of the transcript ({}): {} > {}",
|
||||
direction,
|
||||
idx.end(),
|
||||
idx.end().unwrap_or(0),
|
||||
self.transcript.len_of_direction(direction)
|
||||
),
|
||||
));
|
||||
@@ -348,7 +354,10 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
let missing = idx.difference(committed);
|
||||
return Err(TranscriptProofBuilderError::new(
|
||||
BuilderErrorKind::MissingCommitment,
|
||||
format!("commitment is missing for ranges in {direction} transcript: {missing}"),
|
||||
format!(
|
||||
"commitment is missing for ranges in {direction} transcript: {}",
|
||||
FmtRangeSet(&missing)
|
||||
),
|
||||
));
|
||||
}
|
||||
Ok(self)
|
||||
@@ -403,25 +412,23 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
continue;
|
||||
};
|
||||
|
||||
let (sent_dir_idxs, sent_uncovered) =
|
||||
uncovered_query_idx.sent.as_range_set().cover_by(
|
||||
encoding_tree
|
||||
.transcript_indices()
|
||||
.filter(|(dir, _)| *dir == Direction::Sent),
|
||||
|(_, idx)| &idx.0,
|
||||
);
|
||||
let (sent_dir_idxs, sent_uncovered) = uncovered_query_idx.sent.cover_by(
|
||||
encoding_tree
|
||||
.transcript_indices()
|
||||
.filter(|(dir, _)| *dir == Direction::Sent),
|
||||
|(_, idx)| idx,
|
||||
);
|
||||
// Uncovered ranges will be checked with ranges of the next
|
||||
// preferred commitment kind.
|
||||
uncovered_query_idx.sent = Idx(sent_uncovered);
|
||||
uncovered_query_idx.sent = sent_uncovered;
|
||||
|
||||
let (recv_dir_idxs, recv_uncovered) =
|
||||
uncovered_query_idx.recv.as_range_set().cover_by(
|
||||
encoding_tree
|
||||
.transcript_indices()
|
||||
.filter(|(dir, _)| *dir == Direction::Received),
|
||||
|(_, idx)| &idx.0,
|
||||
);
|
||||
uncovered_query_idx.recv = Idx(recv_uncovered);
|
||||
let (recv_dir_idxs, recv_uncovered) = uncovered_query_idx.recv.cover_by(
|
||||
encoding_tree
|
||||
.transcript_indices()
|
||||
.filter(|(dir, _)| *dir == Direction::Received),
|
||||
|(_, idx)| idx,
|
||||
);
|
||||
uncovered_query_idx.recv = recv_uncovered;
|
||||
|
||||
let dir_idxs = sent_dir_idxs
|
||||
.into_iter()
|
||||
@@ -439,25 +446,23 @@ impl<'a> TranscriptProofBuilder<'a> {
|
||||
}
|
||||
}
|
||||
TranscriptCommitmentKind::Hash { alg } => {
|
||||
let (sent_hashes, sent_uncovered) =
|
||||
uncovered_query_idx.sent.as_range_set().cover_by(
|
||||
self.hash_secrets.iter().filter(|hash| {
|
||||
hash.direction == Direction::Sent && &hash.alg == alg
|
||||
}),
|
||||
|hash| &hash.idx.0,
|
||||
);
|
||||
let (sent_hashes, sent_uncovered) = uncovered_query_idx.sent.cover_by(
|
||||
self.hash_secrets.iter().filter(|hash| {
|
||||
hash.direction == Direction::Sent && &hash.alg == alg
|
||||
}),
|
||||
|hash| &hash.idx,
|
||||
);
|
||||
// Uncovered ranges will be checked with ranges of the next
|
||||
// preferred commitment kind.
|
||||
uncovered_query_idx.sent = Idx(sent_uncovered);
|
||||
uncovered_query_idx.sent = sent_uncovered;
|
||||
|
||||
let (recv_hashes, recv_uncovered) =
|
||||
uncovered_query_idx.recv.as_range_set().cover_by(
|
||||
self.hash_secrets.iter().filter(|hash| {
|
||||
hash.direction == Direction::Received && &hash.alg == alg
|
||||
}),
|
||||
|hash| &hash.idx.0,
|
||||
);
|
||||
uncovered_query_idx.recv = Idx(recv_uncovered);
|
||||
let (recv_hashes, recv_uncovered) = uncovered_query_idx.recv.cover_by(
|
||||
self.hash_secrets.iter().filter(|hash| {
|
||||
hash.direction == Direction::Received && &hash.alg == alg
|
||||
}),
|
||||
|hash| &hash.idx,
|
||||
);
|
||||
uncovered_query_idx.recv = recv_uncovered;
|
||||
|
||||
transcript_proof.hash_secrets.extend(
|
||||
sent_hashes
|
||||
@@ -577,7 +582,7 @@ mod tests {
|
||||
#[rstest]
|
||||
fn test_verify_missing_encoding_commitment_root() {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
let idxs = vec![(Direction::Received, Idx::new(0..transcript.len().1))];
|
||||
let idxs = vec![(Direction::Received, RangeSet::from(0..transcript.len().1))];
|
||||
let encoding_tree = EncodingTree::new(
|
||||
&Blake3::default(),
|
||||
&idxs,
|
||||
@@ -638,7 +643,7 @@ mod tests {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
|
||||
let direction = Direction::Sent;
|
||||
let idx = Idx::new(0..10);
|
||||
let idx = RangeSet::from(0..10);
|
||||
let blinder: Blinder = rng.random();
|
||||
let alg = HashAlgId::SHA256;
|
||||
let hasher = provider.get(&alg).unwrap();
|
||||
@@ -684,7 +689,7 @@ mod tests {
|
||||
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
|
||||
|
||||
let direction = Direction::Sent;
|
||||
let idx = Idx::new(0..10);
|
||||
let idx = RangeSet::from(0..10);
|
||||
let blinder: Blinder = rng.random();
|
||||
let alg = HashAlgId::SHA256;
|
||||
let hasher = provider.get(&alg).unwrap();
|
||||
@@ -894,10 +899,10 @@ mod tests {
|
||||
match kind {
|
||||
BuilderErrorKind::Cover { uncovered, .. } => {
|
||||
if !uncovered_sent_rangeset.is_empty() {
|
||||
assert_eq!(uncovered.sent, Idx(uncovered_sent_rangeset));
|
||||
assert_eq!(uncovered.sent, uncovered_sent_rangeset);
|
||||
}
|
||||
if !uncovered_recv_rangeset.is_empty() {
|
||||
assert_eq!(uncovered.recv, Idx(uncovered_recv_rangeset));
|
||||
assert_eq!(uncovered.recv, uncovered_recv_rangeset);
|
||||
}
|
||||
}
|
||||
_ => panic!("unexpected error kind: {kind:?}"),
|
||||
|
||||
@@ -18,7 +18,6 @@ spansy = { workspace = true }
|
||||
bincode = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
dotenv = { version = "0.15.0" }
|
||||
futures = { workspace = true }
|
||||
http-body-util = { workspace = true }
|
||||
hex = { workspace = true }
|
||||
@@ -41,3 +40,15 @@ tracing-subscriber = { workspace = true }
|
||||
[[example]]
|
||||
name = "interactive"
|
||||
path = "interactive/interactive.rs"
|
||||
|
||||
[[example]]
|
||||
name = "attestation_prove"
|
||||
path = "attestation/prove.rs"
|
||||
|
||||
[[example]]
|
||||
name = "attestation_present"
|
||||
path = "attestation/present.rs"
|
||||
|
||||
[[example]]
|
||||
name = "attestation_verify"
|
||||
path = "attestation/verify.rs"
|
||||
|
||||
@@ -5,4 +5,4 @@ This folder contains examples demonstrating how to use the TLSNotary protocol.
|
||||
* [Interactive](./interactive/README.md): Interactive Prover and Verifier session without a trusted notary.
|
||||
* [Attestation](./attestation/README.md): Performing a simple notarization with a trusted notary.
|
||||
|
||||
Refer to <https://docs.tlsnotary.org/quick_start/index.html> for a quick start guide to using TLSNotary with these examples.
|
||||
Refer to <https://tlsnotary.org/docs/quick_start> for a quick start guide to using TLSNotary with these examples.
|
||||
164
crates/examples/attestation/README.md
Normal file
164
crates/examples/attestation/README.md
Normal file
@@ -0,0 +1,164 @@
|
||||
# Attestation Example
|
||||
|
||||
|
||||
This example demonstrates a **TLSNotary attestation workflow**: notarizing data from a server with a trusted third party (Notary), then creating verifiable presentations with selective disclosure of sensitive information to a Verifier.
|
||||
|
||||
## 🔍 How It Works
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant P as Prover
|
||||
participant N as MPC-TLS<br/>Verifier
|
||||
participant S as Server<br/>Fixture
|
||||
participant V as Attestation<br/>Verifier
|
||||
|
||||
Note over P,S: 1. Notarization Phase
|
||||
P->>N: Establish MPC-TLS connection
|
||||
P->>S: Request (MPC-TLS)
|
||||
S->>P: Response (MPC-TLS)
|
||||
N->>P: Issue signed attestation
|
||||
|
||||
Note over P: 2. Presentation Phase
|
||||
P->>P: Create redacted presentation
|
||||
|
||||
Note over P,V: 3. Verification Phase
|
||||
P->>V: Share presentation
|
||||
V->>V: Verify attestation signature
|
||||
```
|
||||
|
||||
### The Three-Step Process
|
||||
|
||||
1. **🔐 Notarize**: Prover collaborates with Notary to create an authenticated TLS session and obtain a signed attestation
|
||||
2. **✂️ Present**: Prover creates a selective presentation, choosing which data to reveal or redact
|
||||
3. **✅ Verify**: Anyone can verify the presentation's authenticity using the Notary's public key
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Step 1: Notarize Data
|
||||
|
||||
**Start the test server** (from repository root):
|
||||
```bash
|
||||
RUST_LOG=info PORT=4000 cargo run --bin tlsn-server-fixture
|
||||
```
|
||||
|
||||
**Run the notarization** (in a new terminal):
|
||||
```bash
|
||||
RUST_LOG=info SERVER_PORT=4000 cargo run --release --example attestation_prove
|
||||
```
|
||||
|
||||
**Expected output:**
|
||||
```
|
||||
Notarization completed successfully!
|
||||
The attestation has been written to `example-json.attestation.tlsn` and the corresponding secrets to `example-json.secrets.tlsn`.
|
||||
```
|
||||
|
||||
### Step 2: Create Verifiable Presentation
|
||||
|
||||
**Generate a redacted presentation:**
|
||||
```bash
|
||||
cargo run --release --example attestation_present
|
||||
```
|
||||
|
||||
**Expected output:**
|
||||
```
|
||||
Presentation built successfully!
|
||||
The presentation has been written to `example-json.presentation.tlsn`.
|
||||
```
|
||||
|
||||
> 💡 **Tip**: You can create multiple presentations from the same attestation, each with different redactions!
|
||||
|
||||
### Step 3: Verify the Presentation
|
||||
|
||||
**Verify the presentation:**
|
||||
```bash
|
||||
cargo run --release --example attestation_verify
|
||||
```
|
||||
|
||||
**Expected output:**
|
||||
```
|
||||
Verifying presentation with {key algorithm} key: { hex encoded key }
|
||||
|
||||
**Ask yourself, do you trust this key?**
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Successfully verified that the data below came from a session with test-server.io at { time }.
|
||||
Note that the data which the Prover chose not to disclose are shown as X.
|
||||
|
||||
Data sent:
|
||||
|
||||
GET /formats/json HTTP/1.1
|
||||
host: test-server.io
|
||||
accept: */*
|
||||
accept-encoding: identity
|
||||
connection: close
|
||||
user-agent: XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
|
||||
|
||||
Data received:
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
content-type: application/json
|
||||
content-length: 722
|
||||
connection: close
|
||||
date: Mon, 08 Sep 2025 09:18:29 GMT
|
||||
|
||||
XXXXXX1234567890XXXXXXXXXXXXXXXXXXXXXXXXJohn DoeXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX1.2XX
|
||||
```
|
||||
|
||||
## 🎯 Use Cases & Examples
|
||||
|
||||
### JSON Data (Default)
|
||||
Perfect for API responses, configuration data, or structured information:
|
||||
```bash
|
||||
# All three steps use JSON by default
|
||||
SERVER_PORT=4000 cargo run --release --example attestation_prove
|
||||
cargo run --release --example attestation_present
|
||||
cargo run --release --example attestation_verify
|
||||
```
|
||||
|
||||
### HTML Content
|
||||
Ideal for web pages, forms, or any HTML-based data:
|
||||
```bash
|
||||
# Notarize HTML content
|
||||
SERVER_PORT=4000 cargo run --release --example attestation_prove -- html
|
||||
cargo run --release --example attestation_present -- html
|
||||
cargo run --release --example attestation_verify -- html
|
||||
```
|
||||
|
||||
### Authenticated/Private Data
|
||||
For APIs requiring authentication tokens, cookies, or private access:
|
||||
```bash
|
||||
# Notarize private data with authentication
|
||||
SERVER_PORT=4000 cargo run --release --example attestation_prove -- authenticated
|
||||
cargo run --release --example attestation_present -- authenticated
|
||||
cargo run --release --example attestation_verify -- authenticated
|
||||
```
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For detailed logging and troubleshooting:
|
||||
```bash
|
||||
RUST_LOG=debug,yamux=info,uid_mux=info SERVER_PORT=4000 cargo run --release --example attestation_prove
|
||||
```
|
||||
|
||||
### Generated Files
|
||||
|
||||
After running the examples, you'll find:
|
||||
- **`*.attestation.tlsn`**: The cryptographically signed attestation from the Notary
|
||||
- **`*.secrets.tlsn`**: Cryptographic secrets needed to create presentations
|
||||
- **`*.presentation.tlsn`**: The verifiable presentation with your chosen redactions
|
||||
|
||||
## 🔐 Security Considerations
|
||||
|
||||
### Trust Model
|
||||
- ✅ **Notary Key**: The presentation includes the Notary's verifying key - The verifier must trust this key
|
||||
- ✅ **Data Authenticity**: Cryptographically guaranteed that data came from the specified server
|
||||
- ✅ **Tamper Evidence**: Any modification to the presentation will fail verification
|
||||
- ⚠️ **Notary Trust**: The verifier must trust the Notary not to collude with the Prover
|
||||
|
||||
### Production Deployment
|
||||
- 🏭 **Independent Notary**: Use a trusted third-party Notary service (not a local one)
|
||||
- 🔒 **Key Management**: Implement proper Notary key distribution and verification
|
||||
- 📋 **Audit Trail**: Maintain logs of notarization and verification events
|
||||
- 🔄 **Key Rotation**: Plan for Notary key updates and migration
|
||||
|
||||
> ⚠️ **Demo Notice**: This example uses a local test server and local Notary for demonstration. In production, use trusted third-party Notary services and real server endpoints.
|
||||
117
crates/examples/attestation/present.rs
Normal file
117
crates/examples/attestation/present.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
// This example demonstrates how to build a verifiable presentation from an
|
||||
// attestation and the corresponding connection secrets. See the `prove.rs`
|
||||
// example to learn how to acquire an attestation from a Notary.
|
||||
|
||||
use clap::Parser;
|
||||
use hyper::header;
|
||||
|
||||
use tlsn::attestation::{presentation::Presentation, Attestation, CryptoProvider, Secrets};
|
||||
use tlsn_examples::ExampleType;
|
||||
use tlsn_formats::http::HttpTranscript;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// What data to notarize
|
||||
#[clap(default_value_t, value_enum)]
|
||||
example_type: ExampleType,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = Args::parse();
|
||||
|
||||
create_presentation(&args.example_type).await
|
||||
}
|
||||
|
||||
async fn create_presentation(example_type: &ExampleType) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let attestation_path = tlsn_examples::get_file_path(example_type, "attestation");
|
||||
let secrets_path = tlsn_examples::get_file_path(example_type, "secrets");
|
||||
|
||||
// Read attestation from disk.
|
||||
let attestation: Attestation = bincode::deserialize(&std::fs::read(attestation_path)?)?;
|
||||
|
||||
// Read secrets from disk.
|
||||
let secrets: Secrets = bincode::deserialize(&std::fs::read(secrets_path)?)?;
|
||||
|
||||
// Parse the HTTP transcript.
|
||||
let transcript = HttpTranscript::parse(secrets.transcript())?;
|
||||
|
||||
// Build a transcript proof.
|
||||
let mut builder = secrets.transcript_proof_builder();
|
||||
|
||||
// Here is where we reveal all or some of the parts we committed in `prove.rs`
|
||||
// previously.
|
||||
let request = &transcript.requests[0];
|
||||
// Reveal the structure of the request without the headers or body.
|
||||
builder.reveal_sent(&request.without_data())?;
|
||||
// Reveal the request target.
|
||||
builder.reveal_sent(&request.request.target)?;
|
||||
// Reveal all request headers except the values of User-Agent and Authorization.
|
||||
for header in &request.headers {
|
||||
if !(header
|
||||
.name
|
||||
.as_str()
|
||||
.eq_ignore_ascii_case(header::USER_AGENT.as_str())
|
||||
|| header
|
||||
.name
|
||||
.as_str()
|
||||
.eq_ignore_ascii_case(header::AUTHORIZATION.as_str()))
|
||||
{
|
||||
builder.reveal_sent(header)?;
|
||||
} else {
|
||||
builder.reveal_sent(&header.without_value())?;
|
||||
}
|
||||
}
|
||||
|
||||
// Reveal only parts of the response.
|
||||
let response = &transcript.responses[0];
|
||||
// Reveal the structure of the response without the headers or body.
|
||||
builder.reveal_recv(&response.without_data())?;
|
||||
// Reveal all response headers.
|
||||
for header in &response.headers {
|
||||
builder.reveal_recv(header)?;
|
||||
}
|
||||
|
||||
let content = &response.body.as_ref().unwrap().content;
|
||||
match content {
|
||||
tlsn_formats::http::BodyContent::Json(json) => {
|
||||
// For experimentation, reveal the entire response or just a selection.
|
||||
let reveal_all = false;
|
||||
if reveal_all {
|
||||
builder.reveal_recv(response)?;
|
||||
} else {
|
||||
builder.reveal_recv(json.get("id").unwrap())?;
|
||||
builder.reveal_recv(json.get("information.name").unwrap())?;
|
||||
builder.reveal_recv(json.get("meta.version").unwrap())?;
|
||||
}
|
||||
}
|
||||
tlsn_formats::http::BodyContent::Unknown(span) => {
|
||||
builder.reveal_recv(span)?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let transcript_proof = builder.build()?;
|
||||
|
||||
// Use default crypto provider to build the presentation.
|
||||
let provider = CryptoProvider::default();
|
||||
|
||||
let mut builder = attestation.presentation_builder(&provider);
|
||||
|
||||
builder
|
||||
.identity_proof(secrets.identity_proof())
|
||||
.transcript_proof(transcript_proof);
|
||||
|
||||
let presentation: Presentation = builder.build()?;
|
||||
|
||||
let presentation_path = tlsn_examples::get_file_path(example_type, "presentation");
|
||||
|
||||
// Write the presentation to disk.
|
||||
std::fs::write(&presentation_path, bincode::serialize(&presentation)?)?;
|
||||
|
||||
println!("Presentation built successfully!");
|
||||
println!("The presentation has been written to `{presentation_path}`.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
396
crates/examples/attestation/prove.rs
Normal file
396
crates/examples/attestation/prove.rs
Normal file
@@ -0,0 +1,396 @@
|
||||
// This example demonstrates how to use the Prover to acquire an attestation for
|
||||
// an HTTP request sent to a server fixture. The attestation and secrets are
|
||||
// saved to disk.
|
||||
|
||||
use std::env;
|
||||
|
||||
use clap::Parser;
|
||||
use http_body_util::Empty;
|
||||
use hyper::{body::Bytes, Request, StatusCode};
|
||||
use hyper_util::rt::TokioIo;
|
||||
use spansy::Spanned;
|
||||
use tokio::{
|
||||
io::{AsyncRead, AsyncWrite},
|
||||
sync::oneshot::{self, Receiver, Sender},
|
||||
};
|
||||
use tokio_util::compat::{FuturesAsyncReadCompatExt, TokioAsyncReadCompatExt};
|
||||
use tracing::info;
|
||||
|
||||
use tlsn::{
|
||||
attestation::{
|
||||
request::{Request as AttestationRequest, RequestConfig},
|
||||
signing::Secp256k1Signer,
|
||||
Attestation, AttestationConfig, CryptoProvider, Secrets,
|
||||
},
|
||||
config::{
|
||||
CertificateDer, PrivateKeyDer, ProtocolConfig, ProtocolConfigValidator, RootCertStore,
|
||||
},
|
||||
connection::{ConnectionInfo, HandshakeData, ServerName, TranscriptLength},
|
||||
prover::{state::Committed, ProveConfig, Prover, ProverConfig, ProverOutput, TlsConfig},
|
||||
transcript::{ContentType, TranscriptCommitConfig},
|
||||
verifier::{Verifier, VerifierConfig, VerifierOutput, VerifyConfig},
|
||||
};
|
||||
use tlsn_examples::ExampleType;
|
||||
use tlsn_formats::http::{DefaultHttpCommitter, HttpCommit, HttpTranscript};
|
||||
use tlsn_server_fixture::DEFAULT_FIXTURE_PORT;
|
||||
use tlsn_server_fixture_certs::{CA_CERT_DER, CLIENT_CERT_DER, CLIENT_KEY_DER, SERVER_DOMAIN};
|
||||
|
||||
// Setting of the application server.
|
||||
const USER_AGENT: &str = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36";
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// What data to notarize.
|
||||
#[clap(default_value_t, value_enum)]
|
||||
example_type: ExampleType,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let args = Args::parse();
|
||||
let (uri, extra_headers) = match args.example_type {
|
||||
ExampleType::Json => ("/formats/json", vec![]),
|
||||
ExampleType::Html => ("/formats/html", vec![]),
|
||||
ExampleType::Authenticated => ("/protected", vec![("Authorization", "random_auth_token")]),
|
||||
};
|
||||
|
||||
let (notary_socket, prover_socket) = tokio::io::duplex(1 << 23);
|
||||
let (request_tx, request_rx) = oneshot::channel();
|
||||
let (attestation_tx, attestation_rx) = oneshot::channel();
|
||||
|
||||
tokio::spawn(async move {
|
||||
notary(notary_socket, request_rx, attestation_tx)
|
||||
.await
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
prover(
|
||||
prover_socket,
|
||||
request_tx,
|
||||
attestation_rx,
|
||||
uri,
|
||||
extra_headers,
|
||||
&args.example_type,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn prover<S: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static>(
|
||||
socket: S,
|
||||
req_tx: Sender<AttestationRequest>,
|
||||
resp_rx: Receiver<Attestation>,
|
||||
uri: &str,
|
||||
extra_headers: Vec<(&str, &str)>,
|
||||
example_type: &ExampleType,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let server_host: String = env::var("SERVER_HOST").unwrap_or("127.0.0.1".into());
|
||||
let server_port: u16 = env::var("SERVER_PORT")
|
||||
.map(|port| port.parse().expect("port should be valid integer"))
|
||||
.unwrap_or(DEFAULT_FIXTURE_PORT);
|
||||
|
||||
// Create a root certificate store with the server-fixture's self-signed
|
||||
// certificate. This is only required for offline testing with the
|
||||
// server-fixture.
|
||||
let mut tls_config_builder = TlsConfig::builder();
|
||||
tls_config_builder
|
||||
.root_store(RootCertStore {
|
||||
roots: vec![CertificateDer(CA_CERT_DER.to_vec())],
|
||||
})
|
||||
// (Optional) Set up TLS client authentication if required by the server.
|
||||
.client_auth((
|
||||
vec![CertificateDer(CLIENT_CERT_DER.to_vec())],
|
||||
PrivateKeyDer(CLIENT_KEY_DER.to_vec()),
|
||||
));
|
||||
|
||||
let tls_config = tls_config_builder.build().unwrap();
|
||||
|
||||
// Set up protocol configuration for prover.
|
||||
let mut prover_config_builder = ProverConfig::builder();
|
||||
prover_config_builder
|
||||
.server_name(ServerName::Dns(SERVER_DOMAIN.try_into().unwrap()))
|
||||
.tls_config(tls_config)
|
||||
.protocol_config(
|
||||
ProtocolConfig::builder()
|
||||
// We must configure the amount of data we expect to exchange beforehand, which will
|
||||
// be preprocessed prior to the connection. Reducing these limits will improve
|
||||
// performance.
|
||||
.max_sent_data(tlsn_examples::MAX_SENT_DATA)
|
||||
.max_recv_data(tlsn_examples::MAX_RECV_DATA)
|
||||
.build()?,
|
||||
);
|
||||
|
||||
let prover_config = prover_config_builder.build()?;
|
||||
|
||||
// Create a new prover and perform necessary setup.
|
||||
let prover = Prover::new(prover_config).setup(socket.compat()).await?;
|
||||
|
||||
// Open a TCP connection to the server.
|
||||
let client_socket = tokio::net::TcpStream::connect((server_host, server_port)).await?;
|
||||
|
||||
// Bind the prover to the server connection.
|
||||
// The returned `mpc_tls_connection` is an MPC TLS connection to the server: all
|
||||
// data written to/read from it will be encrypted/decrypted using MPC with
|
||||
// the notary.
|
||||
let (mpc_tls_connection, prover_fut) = prover.connect(client_socket.compat()).await?;
|
||||
let mpc_tls_connection = TokioIo::new(mpc_tls_connection.compat());
|
||||
|
||||
// Spawn the prover task to be run concurrently in the background.
|
||||
let prover_task = tokio::spawn(prover_fut);
|
||||
|
||||
// Attach the hyper HTTP client to the connection.
|
||||
let (mut request_sender, connection) =
|
||||
hyper::client::conn::http1::handshake(mpc_tls_connection).await?;
|
||||
|
||||
// Spawn the HTTP task to be run concurrently in the background.
|
||||
tokio::spawn(connection);
|
||||
|
||||
// Build a simple HTTP request with common headers.
|
||||
let request_builder = Request::builder()
|
||||
.uri(uri)
|
||||
.header("Host", SERVER_DOMAIN)
|
||||
.header("Accept", "*/*")
|
||||
// Using "identity" instructs the Server not to use compression for its HTTP response.
|
||||
// TLSNotary tooling does not support compression.
|
||||
.header("Accept-Encoding", "identity")
|
||||
.header("Connection", "close")
|
||||
.header("User-Agent", USER_AGENT);
|
||||
let mut request_builder = request_builder;
|
||||
for (key, value) in extra_headers {
|
||||
request_builder = request_builder.header(key, value);
|
||||
}
|
||||
let request = request_builder.body(Empty::<Bytes>::new())?;
|
||||
|
||||
info!("Starting an MPC TLS connection with the server");
|
||||
|
||||
// Send the request to the server and wait for the response.
|
||||
let response = request_sender.send_request(request).await?;
|
||||
|
||||
info!("Got a response from the server: {}", response.status());
|
||||
|
||||
assert!(response.status() == StatusCode::OK);
|
||||
|
||||
// The prover task should be done now, so we can await it.
|
||||
let mut prover = prover_task.await??;
|
||||
|
||||
// Parse the HTTP transcript.
|
||||
let transcript = HttpTranscript::parse(prover.transcript())?;
|
||||
|
||||
let body_content = &transcript.responses[0].body.as_ref().unwrap().content;
|
||||
let body = String::from_utf8_lossy(body_content.span().as_bytes());
|
||||
|
||||
match body_content {
|
||||
tlsn_formats::http::BodyContent::Json(_json) => {
|
||||
let parsed = serde_json::from_str::<serde_json::Value>(&body)?;
|
||||
info!("{}", serde_json::to_string_pretty(&parsed)?);
|
||||
}
|
||||
tlsn_formats::http::BodyContent::Unknown(_span) => {
|
||||
info!("{}", &body);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Commit to the transcript.
|
||||
let mut builder = TranscriptCommitConfig::builder(prover.transcript());
|
||||
|
||||
// This commits to various parts of the transcript separately (e.g. request
|
||||
// headers, response headers, response body and more). See https://docs.tlsnotary.org//protocol/commit_strategy.html
|
||||
// for other strategies that can be used to generate commitments.
|
||||
DefaultHttpCommitter::default().commit_transcript(&mut builder, &transcript)?;
|
||||
|
||||
let transcript_commit = builder.build()?;
|
||||
|
||||
// Build an attestation request.
|
||||
let mut builder = RequestConfig::builder();
|
||||
|
||||
builder.transcript_commit(transcript_commit);
|
||||
|
||||
// Optionally, add an extension to the attestation if the notary supports it.
|
||||
// builder.extension(Extension {
|
||||
// id: b"example.name".to_vec(),
|
||||
// value: b"Bobert".to_vec(),
|
||||
// });
|
||||
|
||||
let request_config = builder.build()?;
|
||||
|
||||
let (attestation, secrets) = notarize(&mut prover, &request_config, req_tx, resp_rx).await?;
|
||||
|
||||
// Write the attestation to disk.
|
||||
let attestation_path = tlsn_examples::get_file_path(example_type, "attestation");
|
||||
let secrets_path = tlsn_examples::get_file_path(example_type, "secrets");
|
||||
|
||||
tokio::fs::write(&attestation_path, bincode::serialize(&attestation)?).await?;
|
||||
|
||||
// Write the secrets to disk.
|
||||
tokio::fs::write(&secrets_path, bincode::serialize(&secrets)?).await?;
|
||||
|
||||
println!("Notarization completed successfully!");
|
||||
println!(
|
||||
"The attestation has been written to `{attestation_path}` and the \
|
||||
corresponding secrets to `{secrets_path}`."
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn notarize(
|
||||
prover: &mut Prover<Committed>,
|
||||
config: &RequestConfig,
|
||||
request_tx: Sender<AttestationRequest>,
|
||||
attestation_rx: Receiver<Attestation>,
|
||||
) -> Result<(Attestation, Secrets), Box<dyn std::error::Error>> {
|
||||
let mut builder = ProveConfig::builder(prover.transcript());
|
||||
|
||||
if let Some(config) = config.transcript_commit() {
|
||||
builder.transcript_commit(config.clone());
|
||||
}
|
||||
|
||||
let disclosure_config = builder.build()?;
|
||||
|
||||
let ProverOutput {
|
||||
transcript_commitments,
|
||||
transcript_secrets,
|
||||
..
|
||||
} = prover.prove(disclosure_config).await?;
|
||||
|
||||
// Build an attestation request.
|
||||
let mut builder = AttestationRequest::builder(config);
|
||||
|
||||
builder
|
||||
.server_name(ServerName::Dns(SERVER_DOMAIN.try_into().unwrap()))
|
||||
.handshake_data(HandshakeData {
|
||||
certs: prover
|
||||
.tls_transcript()
|
||||
.server_cert_chain()
|
||||
.expect("server cert chain is present")
|
||||
.to_vec(),
|
||||
sig: prover
|
||||
.tls_transcript()
|
||||
.server_signature()
|
||||
.expect("server signature is present")
|
||||
.clone(),
|
||||
binding: prover.tls_transcript().certificate_binding().clone(),
|
||||
})
|
||||
.transcript(prover.transcript().clone())
|
||||
.transcript_commitments(transcript_secrets, transcript_commitments);
|
||||
|
||||
let (request, secrets) = builder.build(&CryptoProvider::default())?;
|
||||
|
||||
// Send attestation request to notary.
|
||||
request_tx
|
||||
.send(request.clone())
|
||||
.map_err(|_| "notary is not receiving attestation request".to_string())?;
|
||||
|
||||
// Receive attestation from notary.
|
||||
let attestation = attestation_rx
|
||||
.await
|
||||
.map_err(|err| format!("notary did not respond with attestation: {err}"))?;
|
||||
|
||||
// Check the attestation is consistent with the Prover's view.
|
||||
request.validate(&attestation)?;
|
||||
|
||||
Ok((attestation, secrets))
|
||||
}
|
||||
|
||||
async fn notary<S: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static>(
|
||||
socket: S,
|
||||
request_rx: Receiver<AttestationRequest>,
|
||||
attestation_tx: Sender<Attestation>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Set up Verifier.
|
||||
let config_validator = ProtocolConfigValidator::builder()
|
||||
.max_sent_data(tlsn_examples::MAX_SENT_DATA)
|
||||
.max_recv_data(tlsn_examples::MAX_RECV_DATA)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
// Create a root certificate store with the server-fixture's self-signed
|
||||
// certificate. This is only required for offline testing with the
|
||||
// server-fixture.
|
||||
let verifier_config = VerifierConfig::builder()
|
||||
.root_store(RootCertStore {
|
||||
roots: vec![CertificateDer(CA_CERT_DER.to_vec())],
|
||||
})
|
||||
.protocol_config_validator(config_validator)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let mut verifier = Verifier::new(verifier_config)
|
||||
.setup(socket.compat())
|
||||
.await?
|
||||
.run()
|
||||
.await?;
|
||||
|
||||
let VerifierOutput {
|
||||
transcript_commitments,
|
||||
..
|
||||
} = verifier.verify(&VerifyConfig::default()).await?;
|
||||
|
||||
let tls_transcript = verifier.tls_transcript().clone();
|
||||
|
||||
verifier.close().await?;
|
||||
|
||||
let sent_len = tls_transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if let ContentType::ApplicationData = record.typ {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum::<usize>();
|
||||
|
||||
let recv_len = tls_transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if let ContentType::ApplicationData = record.typ {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum::<usize>();
|
||||
|
||||
// Receive attestation request from prover.
|
||||
let request = request_rx.await?;
|
||||
|
||||
// Load a dummy signing key.
|
||||
let signing_key = k256::ecdsa::SigningKey::from_bytes(&[1u8; 32].into())?;
|
||||
let signer = Box::new(Secp256k1Signer::new(&signing_key.to_bytes())?);
|
||||
let mut provider = CryptoProvider::default();
|
||||
provider.signer.set_signer(signer);
|
||||
|
||||
// Build an attestation.
|
||||
let mut att_config_builder = AttestationConfig::builder();
|
||||
att_config_builder.supported_signature_algs(Vec::from_iter(provider.signer.supported_algs()));
|
||||
let att_config = att_config_builder.build()?;
|
||||
|
||||
let mut builder = Attestation::builder(&att_config).accept_request(request)?;
|
||||
builder
|
||||
.connection_info(ConnectionInfo {
|
||||
time: tls_transcript.time(),
|
||||
version: (*tls_transcript.version()),
|
||||
transcript_length: TranscriptLength {
|
||||
sent: sent_len as u32,
|
||||
received: recv_len as u32,
|
||||
},
|
||||
})
|
||||
.server_ephemeral_key(tls_transcript.server_ephemeral_key().clone())
|
||||
.transcript_commitments(transcript_commitments);
|
||||
|
||||
let attestation = builder.build(&provider)?;
|
||||
|
||||
// Send attestation to prover.
|
||||
attestation_tx
|
||||
.send(attestation)
|
||||
.map_err(|_| "prover is not receiving attestation".to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
96
crates/examples/attestation/verify.rs
Normal file
96
crates/examples/attestation/verify.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
// This example demonstrates how to verify a presentation. See `present.rs` for
|
||||
// an example of how to build a presentation from an attestation and connection
|
||||
// secrets.
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
use tlsn::{
|
||||
attestation::{
|
||||
presentation::{Presentation, PresentationOutput},
|
||||
signing::VerifyingKey,
|
||||
CryptoProvider,
|
||||
},
|
||||
config::{CertificateDer, RootCertStore},
|
||||
verifier::ServerCertVerifier,
|
||||
};
|
||||
use tlsn_examples::ExampleType;
|
||||
use tlsn_server_fixture_certs::CA_CERT_DER;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// What data to notarize.
|
||||
#[clap(default_value_t, value_enum)]
|
||||
example_type: ExampleType,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = Args::parse();
|
||||
|
||||
verify_presentation(&args.example_type).await
|
||||
}
|
||||
|
||||
async fn verify_presentation(example_type: &ExampleType) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Read the presentation from disk.
|
||||
let presentation_path = tlsn_examples::get_file_path(example_type, "presentation");
|
||||
|
||||
let presentation: Presentation = bincode::deserialize(&std::fs::read(presentation_path)?)?;
|
||||
|
||||
// Create a crypto provider accepting the server-fixture's self-signed
|
||||
// root certificate.
|
||||
//
|
||||
// This is only required for offline testing with the server-fixture. In
|
||||
// production, use `CryptoProvider::default()` instead.
|
||||
let root_cert_store = RootCertStore {
|
||||
roots: vec![CertificateDer(CA_CERT_DER.to_vec())],
|
||||
};
|
||||
let crypto_provider = CryptoProvider {
|
||||
cert: ServerCertVerifier::new(&root_cert_store)?,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let VerifyingKey {
|
||||
alg,
|
||||
data: key_data,
|
||||
} = presentation.verifying_key();
|
||||
|
||||
println!(
|
||||
"Verifying presentation with {alg} key: {}\n\n**Ask yourself, do you trust this key?**\n",
|
||||
hex::encode(key_data)
|
||||
);
|
||||
|
||||
// Verify the presentation.
|
||||
let PresentationOutput {
|
||||
server_name,
|
||||
connection_info,
|
||||
transcript,
|
||||
// extensions, // Optionally, verify any custom extensions from prover/notary.
|
||||
..
|
||||
} = presentation.verify(&crypto_provider).unwrap();
|
||||
|
||||
// The time at which the connection was started.
|
||||
let time = chrono::DateTime::UNIX_EPOCH + Duration::from_secs(connection_info.time);
|
||||
let server_name = server_name.unwrap();
|
||||
let mut partial_transcript = transcript.unwrap();
|
||||
// Set the unauthenticated bytes so they are distinguishable.
|
||||
partial_transcript.set_unauthed(b'X');
|
||||
|
||||
let sent = String::from_utf8_lossy(partial_transcript.sent_unsafe());
|
||||
let recv = String::from_utf8_lossy(partial_transcript.received_unsafe());
|
||||
|
||||
println!("-------------------------------------------------------------------");
|
||||
println!(
|
||||
"Successfully verified that the data below came from a session with {server_name} at {time}.",
|
||||
);
|
||||
println!("Note that the data which the Prover chose not to disclose are shown as X.\n");
|
||||
println!("Data sent:\n");
|
||||
println!("{sent}\n");
|
||||
println!("Data received:\n");
|
||||
println!("{recv}\n");
|
||||
println!("-------------------------------------------------------------------");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -10,7 +10,6 @@ use tokio::io::{AsyncRead, AsyncWrite};
|
||||
use tokio_util::compat::{FuturesAsyncReadCompatExt, TokioAsyncReadCompatExt};
|
||||
use tracing::instrument;
|
||||
|
||||
use tls_server_fixture::CA_CERT_DER;
|
||||
use tlsn::{
|
||||
config::{CertificateDer, ProtocolConfig, ProtocolConfigValidator, RootCertStore},
|
||||
connection::ServerName,
|
||||
@@ -19,7 +18,7 @@ use tlsn::{
|
||||
verifier::{Verifier, VerifierConfig, VerifierOutput, VerifyConfig},
|
||||
};
|
||||
use tlsn_server_fixture::DEFAULT_FIXTURE_PORT;
|
||||
use tlsn_server_fixture_certs::SERVER_DOMAIN;
|
||||
use tlsn_server_fixture_certs::{CA_CERT_DER, SERVER_DOMAIN};
|
||||
|
||||
const SECRET: &str = "TLSNotary's private key 🤡";
|
||||
|
||||
@@ -175,7 +174,7 @@ async fn prover<T: AsyncWrite + AsyncRead + Send + Unpin + 'static>(
|
||||
|
||||
let config = builder.build().unwrap();
|
||||
|
||||
prover.prove(&config).await.unwrap();
|
||||
prover.prove(config).await.unwrap();
|
||||
prover.close().await.unwrap();
|
||||
}
|
||||
|
||||
|
||||
@@ -7,12 +7,12 @@ docker build --pull -t tlsn-bench . -f ./crates/harness/harness.Dockerfile
|
||||
|
||||
Next run the benches with:
|
||||
```
|
||||
docker run -it --privileged -v ./crates/harness/:/benches tlsn-bench bash -c "runner setup; runner bench"
|
||||
docker run -it --privileged -v $(pwd)/crates/harness/:/benches tlsn-bench bash -c "runner setup; runner bench"
|
||||
```
|
||||
The `--privileged` parameter is required because this test bench needs permission to create networks with certain parameters
|
||||
|
||||
To run the benches in a browser run:
|
||||
```
|
||||
docker run -it --privileged -v ./crates/harness/:/benches tlsn-bench bash -c "cd /; runner setup; runner --target browser bench"
|
||||
docker run -it --privileged -v $(pwd)/crates/harness/:/benches tlsn-bench bash -c "runner setup; runner --target browser bench"
|
||||
```
|
||||
|
||||
|
||||
@@ -8,9 +8,6 @@ publish = false
|
||||
name = "harness_executor"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[package.metadata.wasm-pack.profile.custom]
|
||||
wasm-opt = ["-O3"]
|
||||
|
||||
[dependencies]
|
||||
tlsn-harness-core = { workspace = true }
|
||||
tlsn = { workspace = true }
|
||||
|
||||
@@ -93,7 +93,7 @@ pub async fn bench_prover(provider: &IoProvider, config: &Bench) -> Result<Prove
|
||||
|
||||
let config = builder.build()?;
|
||||
|
||||
prover.prove(&config).await?;
|
||||
prover.prove(config).await?;
|
||||
prover.close().await?;
|
||||
|
||||
let time_total = time_start.elapsed().as_millis();
|
||||
|
||||
@@ -107,7 +107,7 @@ async fn prover(provider: &IoProvider) {
|
||||
|
||||
let config = builder.build().unwrap();
|
||||
|
||||
prover.prove(&config).await.unwrap();
|
||||
prover.prove(config).await.unwrap();
|
||||
prover.close().await.unwrap();
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ pub static SERVER_CERT_DER: &[u8] = include_bytes!("tls/test_server_cert.der");
|
||||
pub static SERVER_KEY_DER: &[u8] = include_bytes!("tls/test_server_private_key.der");
|
||||
/// The domain name bound to the server certificate.
|
||||
pub static SERVER_DOMAIN: &str = "test-server.io";
|
||||
/// A client certificate fixture PEM-encoded.
|
||||
pub static CLIENT_CERT: &[u8] = include_bytes!("tls/client_cert.pem");
|
||||
/// A client private key fixture PEM-encoded.
|
||||
pub static CLIENT_KEY: &[u8] = include_bytes!("tls/client_cert.key");
|
||||
/// A client certificate fixture.
|
||||
pub static CLIENT_CERT_DER: &[u8] = include_bytes!("tls/client_cert.der");
|
||||
/// A client private key fixture.
|
||||
pub static CLIENT_KEY_DER: &[u8] = include_bytes!("tls/client_cert_private_key.der");
|
||||
|
||||
@@ -33,5 +33,8 @@ openssl req -new -key client_cert.key -out client_cert.csr -subj "/C=US/ST=State
|
||||
# Sign the CSR with the root CA to create the end entity certificate (100 years validity)
|
||||
openssl x509 -req -in client_cert.csr -CA root_ca.crt -CAkey root_ca.key -CAcreateserial -out client_cert.crt -days 36525 -sha256 -extfile openssl.cnf -extensions v3_req
|
||||
|
||||
# Convert the end entity certificate to PEM format
|
||||
openssl x509 -in client_cert.crt -outform pem -out client_cert.pem
|
||||
# Convert the end entity certificate to DER format
|
||||
openssl x509 -in client_cert.crt -outform der -out client_cert.der
|
||||
|
||||
# Convert the end entity certificate private key to DER format
|
||||
openssl pkcs8 -topk8 -inform PEM -outform DER -in client_cert.key -out client_cert_private_key.der -nocrypt
|
||||
|
||||
BIN
crates/server-fixture/certs/src/tls/client_cert.der
Normal file
BIN
crates/server-fixture/certs/src/tls/client_cert.der
Normal file
Binary file not shown.
@@ -1,23 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID2jCCAsKgAwIBAgIUG5JKIz/fbUDdpX1+TAw33mS+mWwwDQYJKoZIhvcNAQEL
|
||||
BQAwZTELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVN0YXRlMQ0wCwYDVQQHDARDaXR5
|
||||
MRIwEAYDVQQKDAl0bHNub3RhcnkxCzAJBgNVBAsMAklUMRYwFAYDVQQDDA10bHNu
|
||||
b3Rhcnkub3JnMCAXDTI1MDYxMDA3MTYxOVoYDzIxMjUwNjExMDcxNjE5WjBwMQsw
|
||||
CQYDVQQGEwJVUzEOMAwGA1UECAwFU3RhdGUxDTALBgNVBAcMBENpdHkxEjAQBgNV
|
||||
BAoMCXRsc25vdGFyeTELMAkGA1UECwwCSVQxITAfBgNVBAMMGGNsaWVudC1hdXRo
|
||||
ZW50aWNhdGlvbi5pbzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANsx
|
||||
Tf3JqWdAMGFzOwbO64vJ5fV/IPSrdBwKY/Fjef0REZC1Z/gGzmp0nnlaHZzZLtLS
|
||||
Z9kyfdUrL6PuG3HfP6wxhiaBpUay+1O9KZsuhkKSif4KMPjlYKm+oZLvD12Qj62r
|
||||
TFlui4+1wKgPrTGUUO6SQdoRxKU4nzuzRYRLyzDi0pO5YD9RLaruBj+IDEOVRW7d
|
||||
1uleheVMg61lbQle5Fo0c4I0Sif96Z+7aotj3j9F2lK52jaLpA1kvC3oLajfAT30
|
||||
BzpNLZTnWa1b5PRRxkuOYUXeNr+aNO90fL80K1YeIlea0f7qmKL9uDLtQbrqIJv5
|
||||
tBaf8Uf0UghtBm//kx8CAwEAAaN1MHMwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAw
|
||||
GQYDVR0RBBIwEIIOdGVzdC1zZXJ2ZXIuaW8wHQYDVR0OBBYEFH1qCgl04Y5i75aF
|
||||
cT0V3fn9423iMB8GA1UdIwQYMBaAFMmBciQ/DZlWROxwXH8IplmuHKbNMA0GCSqG
|
||||
SIb3DQEBCwUAA4IBAQB8Gvj3dsENAn0u6PS9uTFm46MaA9Dm+Fa+KbXuEHp3ADs2
|
||||
7m4Hb3eojM3yae93/v/stYn8IVcB5zWmMvg6WA6obe86muuB+SZeMC/AnSD8P4pm
|
||||
AzO3eTSR1s5Dr4O0qVPd2VP36e7NWXfojQg4W9t9UQtC64bVOaCDQvbe0xeWT+AR
|
||||
w0y7GwnuCr/8bisqQZS8+Er1JU3zxBEjQwMiMxlOWHnYtjGeA6pdWaeLp0E6Ss3x
|
||||
ecsTjmrLt6oY+BdfRSyWU4qVEOpuZLCeikUWXFzpxRX7NWYRtJUfVnoRWwuD2lzG
|
||||
LybzCW2qxwHJe4biGIfWKQ7Ne7DrwQwFxVRJxCm0
|
||||
-----END CERTIFICATE-----
|
||||
BIN
crates/server-fixture/certs/src/tls/client_cert_private_key.der
Normal file
BIN
crates/server-fixture/certs/src/tls/client_cert_private_key.der
Normal file
Binary file not shown.
@@ -886,6 +886,7 @@ async fn client_error_is_sticky() {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[allow(clippy::no_effect)]
|
||||
#[allow(clippy::unnecessary_operation)]
|
||||
async fn client_is_send() {
|
||||
let (client, _) = make_pair(KeyType::Rsa).await;
|
||||
|
||||
@@ -415,7 +415,8 @@ pub(crate) fn pki_error(error: webpki::Error) -> Error {
|
||||
match error {
|
||||
BadDer | BadDerTime => Error::InvalidCertificateEncoding,
|
||||
InvalidSignatureForPublicKey => Error::InvalidCertificateSignature,
|
||||
UnsupportedSignatureAlgorithm | UnsupportedSignatureAlgorithmForPublicKey => {
|
||||
UnsupportedSignatureAlgorithmContext(_)
|
||||
| UnsupportedSignatureAlgorithmForPublicKeyContext(_) => {
|
||||
Error::InvalidCertificateSignatureType
|
||||
}
|
||||
e => Error::InvalidCertificateData(format!("invalid peer certificate: {e}")),
|
||||
@@ -475,12 +476,17 @@ fn verify_sig_using_any_alg(
|
||||
// we try them all.
|
||||
for alg in algs {
|
||||
match cert.verify_signature(*alg, message, sig) {
|
||||
Err(webpki::Error::UnsupportedSignatureAlgorithmForPublicKey) => continue,
|
||||
Err(webpki::Error::UnsupportedSignatureAlgorithmForPublicKeyContext(_)) => continue,
|
||||
res => return res,
|
||||
}
|
||||
}
|
||||
|
||||
Err(webpki::Error::UnsupportedSignatureAlgorithmForPublicKey)
|
||||
Err(webpki::Error::UnsupportedSignatureAlgorithmContext(
|
||||
webpki::UnsupportedSignatureAlgorithmContext {
|
||||
signature_algorithm_id: vec![],
|
||||
supported_algorithms: algs.iter().map(|alg| alg.signature_alg_id()).collect(),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn verify_signed_struct(
|
||||
|
||||
@@ -40,6 +40,9 @@ mpz-ot = { workspace = true }
|
||||
mpz-vm-core = { workspace = true }
|
||||
mpz-zk = { workspace = true }
|
||||
|
||||
aes = { workspace = true }
|
||||
cipher-crypto = { workspace = true }
|
||||
ctr = { workspace = true }
|
||||
derive_builder = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
opaque-debug = { workspace = true }
|
||||
@@ -57,6 +60,8 @@ rangeset = { workspace = true }
|
||||
webpki-roots = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
lipsum = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
rstest = { workspace = true }
|
||||
tlsn-server-fixture = { workspace = true }
|
||||
tlsn-server-fixture-certs = { workspace = true }
|
||||
@@ -65,3 +70,5 @@ tokio-util = { workspace = true, features = ["compat"] }
|
||||
hyper = { workspace = true, features = ["client"] }
|
||||
http-body-util = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter"] }
|
||||
tlsn-core = { workspace = true, features = ["fixtures"] }
|
||||
mpz-ot = { workspace = true, features = ["ideal"] }
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
708
crates/tlsn/src/commit/auth.rs
Normal file
708
crates/tlsn/src/commit/auth.rs
Normal file
@@ -0,0 +1,708 @@
|
||||
//! Authentication of the transcript plaintext and creation of the transcript
|
||||
//! references.
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
use mpz_core::bitvec::BitVec;
|
||||
use mpz_memory_core::{DecodeError, DecodeFutureTyped, MemoryExt, binary::Binary};
|
||||
use mpz_vm_core::Vm;
|
||||
use rangeset::{Disjoint, RangeSet, Union, UnionMut};
|
||||
use tlsn_core::{
|
||||
hash::HashAlgId,
|
||||
transcript::{ContentType, Direction, PartialTranscript, Record, TlsTranscript},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
Role,
|
||||
commit::transcript::TranscriptRefs,
|
||||
zk_aes_ctr::{ZkAesCtr, ZkAesCtrError},
|
||||
};
|
||||
|
||||
/// Transcript Authenticator.
|
||||
pub(crate) struct Authenticator {
|
||||
encoding: Index,
|
||||
hash: Index,
|
||||
decoding: Index,
|
||||
proving: Index,
|
||||
}
|
||||
|
||||
impl Authenticator {
|
||||
/// Creates a new authenticator.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `encoding` - Ranges for encoding commitments.
|
||||
/// * `hash` - Ranges for hash commitments.
|
||||
/// * `partial` - The partial transcript.
|
||||
pub(crate) fn new<'a>(
|
||||
encoding: impl Iterator<Item = &'a (Direction, RangeSet<usize>)>,
|
||||
hash: impl Iterator<Item = &'a (Direction, RangeSet<usize>, HashAlgId)>,
|
||||
partial: Option<&PartialTranscript>,
|
||||
) -> Self {
|
||||
// Compute encoding index.
|
||||
let mut encoding_sent = RangeSet::default();
|
||||
let mut encoding_recv = RangeSet::default();
|
||||
|
||||
for (d, idx) in encoding {
|
||||
match d {
|
||||
Direction::Sent => encoding_sent.union_mut(idx),
|
||||
Direction::Received => encoding_recv.union_mut(idx),
|
||||
}
|
||||
}
|
||||
|
||||
let encoding = Index::new(encoding_sent, encoding_recv);
|
||||
|
||||
// Compute hash index.
|
||||
let mut hash_sent = RangeSet::default();
|
||||
let mut hash_recv = RangeSet::default();
|
||||
|
||||
for (d, idx, _) in hash {
|
||||
match d {
|
||||
Direction::Sent => hash_sent.union_mut(idx),
|
||||
Direction::Received => hash_recv.union_mut(idx),
|
||||
}
|
||||
}
|
||||
|
||||
let hash = Index {
|
||||
sent: hash_sent,
|
||||
recv: hash_recv,
|
||||
};
|
||||
|
||||
// Compute decoding index.
|
||||
let mut decoding_sent = RangeSet::default();
|
||||
let mut decoding_recv = RangeSet::default();
|
||||
|
||||
if let Some(partial) = partial {
|
||||
decoding_sent.union_mut(partial.sent_authed());
|
||||
decoding_recv.union_mut(partial.received_authed());
|
||||
}
|
||||
|
||||
let decoding = Index::new(decoding_sent, decoding_recv);
|
||||
|
||||
// Compute proving index.
|
||||
let mut proving_sent = RangeSet::default();
|
||||
let mut proving_recv = RangeSet::default();
|
||||
|
||||
proving_sent.union_mut(decoding.sent());
|
||||
proving_sent.union_mut(encoding.sent());
|
||||
proving_sent.union_mut(hash.sent());
|
||||
|
||||
proving_recv.union_mut(decoding.recv());
|
||||
proving_recv.union_mut(encoding.recv());
|
||||
proving_recv.union_mut(hash.recv());
|
||||
|
||||
let proving = Index::new(proving_sent, proving_recv);
|
||||
|
||||
Self {
|
||||
encoding,
|
||||
hash,
|
||||
decoding,
|
||||
proving,
|
||||
}
|
||||
}
|
||||
|
||||
/// Authenticates the sent plaintext, returning a proof of encryption and
|
||||
/// writes the plaintext VM references to the transcript references.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `vm` - The virtual machine.
|
||||
/// * `zk_aes_sent` - ZK AES Cipher for sent traffic.
|
||||
/// * `transcript` - The TLS transcript.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
pub(crate) fn auth_sent(
|
||||
&mut self,
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
zk_aes_sent: &mut ZkAesCtr,
|
||||
transcript: &TlsTranscript,
|
||||
transcript_refs: &mut TranscriptRefs,
|
||||
) -> Result<RecordProof, AuthError> {
|
||||
let missing_index = transcript_refs.compute_missing(Direction::Sent, self.proving.sent());
|
||||
|
||||
// If there is nothing new to prove, return early.
|
||||
if missing_index == RangeSet::default() {
|
||||
return Ok(RecordProof::default());
|
||||
}
|
||||
|
||||
let sent = transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData);
|
||||
|
||||
authenticate(
|
||||
vm,
|
||||
zk_aes_sent,
|
||||
Direction::Sent,
|
||||
sent,
|
||||
transcript_refs,
|
||||
missing_index,
|
||||
)
|
||||
}
|
||||
|
||||
/// Authenticates the received plaintext, returning a proof of encryption
|
||||
/// and writes the plaintext VM references to the transcript references.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `vm` - The virtual machine.
|
||||
/// * `zk_aes_recv` - ZK AES Cipher for received traffic.
|
||||
/// * `transcript` - The TLS transcript.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
pub(crate) fn auth_recv(
|
||||
&mut self,
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
zk_aes_recv: &mut ZkAesCtr,
|
||||
transcript: &TlsTranscript,
|
||||
transcript_refs: &mut TranscriptRefs,
|
||||
) -> Result<RecordProof, AuthError> {
|
||||
let decoding_recv = self.decoding.recv();
|
||||
let fully_decoded = decoding_recv.union(&transcript_refs.decoded(Direction::Received));
|
||||
let full_range = 0..transcript_refs.max_len(Direction::Received);
|
||||
|
||||
// If we only have decoding ranges, and the parts we are going to decode will
|
||||
// complete to the full received transcript, then we do not need to
|
||||
// authenticate, because this will be done by
|
||||
// `crate::commit::decode::verify_transcript`, as it uses the server write
|
||||
// key and iv for verification.
|
||||
if decoding_recv == self.proving.recv() && fully_decoded == full_range {
|
||||
return Ok(RecordProof::default());
|
||||
}
|
||||
|
||||
let missing_index =
|
||||
transcript_refs.compute_missing(Direction::Received, self.proving.recv());
|
||||
|
||||
// If there is nothing new to prove, return early.
|
||||
if missing_index == RangeSet::default() {
|
||||
return Ok(RecordProof::default());
|
||||
}
|
||||
|
||||
let recv = transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData);
|
||||
|
||||
authenticate(
|
||||
vm,
|
||||
zk_aes_recv,
|
||||
Direction::Received,
|
||||
recv,
|
||||
transcript_refs,
|
||||
missing_index,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the sent and received encoding ranges.
|
||||
pub(crate) fn encoding(&self) -> (&RangeSet<usize>, &RangeSet<usize>) {
|
||||
(self.encoding.sent(), self.encoding.recv())
|
||||
}
|
||||
|
||||
/// Returns the sent and received hash ranges.
|
||||
pub(crate) fn hash(&self) -> (&RangeSet<usize>, &RangeSet<usize>) {
|
||||
(self.hash.sent(), self.hash.recv())
|
||||
}
|
||||
|
||||
/// Returns the sent and received decoding ranges.
|
||||
pub(crate) fn decoding(&self) -> (&RangeSet<usize>, &RangeSet<usize>) {
|
||||
(self.decoding.sent(), self.decoding.recv())
|
||||
}
|
||||
}
|
||||
|
||||
/// Authenticates parts of the transcript in zk.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `vm` - The virtual machine.
|
||||
/// * `zk_aes` - ZK AES Cipher.
|
||||
/// * `direction` - The direction of the application data.
|
||||
/// * `app_data` - The application data.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
/// * `missing_index` - The index which needs to be proven.
|
||||
fn authenticate<'a>(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
zk_aes: &mut ZkAesCtr,
|
||||
direction: Direction,
|
||||
app_data: impl Iterator<Item = &'a Record>,
|
||||
transcript_refs: &mut TranscriptRefs,
|
||||
missing_index: RangeSet<usize>,
|
||||
) -> Result<RecordProof, AuthError> {
|
||||
let mut record_idx = Range::default();
|
||||
let mut ciphertexts = Vec::new();
|
||||
|
||||
for record in app_data {
|
||||
let record_len = record.ciphertext.len();
|
||||
record_idx.end += record_len;
|
||||
|
||||
if missing_index.is_disjoint(&record_idx) {
|
||||
record_idx.start += record_len;
|
||||
continue;
|
||||
}
|
||||
|
||||
let (plaintext_ref, ciphertext_ref) =
|
||||
zk_aes.encrypt(vm, record.explicit_nonce.clone(), record.ciphertext.len())?;
|
||||
|
||||
if let Role::Prover = zk_aes.role() {
|
||||
let Some(plaintext) = record.plaintext.clone() else {
|
||||
return Err(AuthError(ErrorRepr::MissingPlainText));
|
||||
};
|
||||
|
||||
vm.assign(plaintext_ref, plaintext).map_err(AuthError::vm)?;
|
||||
}
|
||||
vm.commit(plaintext_ref).map_err(AuthError::vm)?;
|
||||
|
||||
let ciphertext = vm.decode(ciphertext_ref).map_err(AuthError::vm)?;
|
||||
|
||||
transcript_refs.add(direction, &record_idx, plaintext_ref);
|
||||
ciphertexts.push((ciphertext, record.ciphertext.clone()));
|
||||
|
||||
record_idx.start += record_len;
|
||||
}
|
||||
|
||||
let proof = RecordProof { ciphertexts };
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct Index {
|
||||
sent: RangeSet<usize>,
|
||||
recv: RangeSet<usize>,
|
||||
}
|
||||
|
||||
impl Index {
|
||||
fn new(sent: RangeSet<usize>, recv: RangeSet<usize>) -> Self {
|
||||
Self { sent, recv }
|
||||
}
|
||||
|
||||
fn sent(&self) -> &RangeSet<usize> {
|
||||
&self.sent
|
||||
}
|
||||
|
||||
fn recv(&self) -> &RangeSet<usize> {
|
||||
&self.recv
|
||||
}
|
||||
}
|
||||
|
||||
/// Proof of encryption.
|
||||
#[derive(Debug, Default)]
|
||||
#[must_use]
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub(crate) struct RecordProof {
|
||||
ciphertexts: Vec<(DecodeFutureTyped<BitVec, Vec<u8>>, Vec<u8>)>,
|
||||
}
|
||||
|
||||
impl RecordProof {
|
||||
/// Verifies the proof.
|
||||
pub(crate) fn verify(self) -> Result<(), AuthError> {
|
||||
let Self { ciphertexts } = self;
|
||||
|
||||
for (mut ciphertext, expected) in ciphertexts {
|
||||
let ciphertext = ciphertext
|
||||
.try_recv()
|
||||
.map_err(AuthError::vm)?
|
||||
.ok_or(AuthError(ErrorRepr::MissingDecoding))?;
|
||||
|
||||
if ciphertext != expected {
|
||||
return Err(AuthError(ErrorRepr::InvalidCiphertext));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Error for [`Authenticator`].
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("transcript authentication error: {0}")]
|
||||
pub(crate) struct AuthError(#[source] ErrorRepr);
|
||||
|
||||
impl AuthError {
|
||||
fn vm<E>(err: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn std::error::Error + Send + Sync + 'static>>,
|
||||
{
|
||||
Self(ErrorRepr::Vm(err.into()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum ErrorRepr {
|
||||
#[error("vm error: {0}")]
|
||||
Vm(Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||
#[error("zk-aes error: {0}")]
|
||||
ZkAes(ZkAesCtrError),
|
||||
#[error("decode error: {0}")]
|
||||
Decode(DecodeError),
|
||||
#[error("plaintext is missing in record")]
|
||||
MissingPlainText,
|
||||
#[error("decoded value is missing")]
|
||||
MissingDecoding,
|
||||
#[error("invalid ciphertext")]
|
||||
InvalidCiphertext,
|
||||
}
|
||||
|
||||
impl From<ZkAesCtrError> for AuthError {
|
||||
fn from(value: ZkAesCtrError) -> Self {
|
||||
Self(ErrorRepr::ZkAes(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DecodeError> for AuthError {
|
||||
fn from(value: DecodeError) -> Self {
|
||||
Self(ErrorRepr::Decode(value))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{
|
||||
Role,
|
||||
commit::{
|
||||
auth::{Authenticator, ErrorRepr},
|
||||
transcript::TranscriptRefs,
|
||||
},
|
||||
zk_aes_ctr::ZkAesCtr,
|
||||
};
|
||||
use lipsum::{LIBER_PRIMUS, lipsum};
|
||||
use mpz_common::context::test_st_context;
|
||||
use mpz_garble_core::Delta;
|
||||
use mpz_memory_core::{
|
||||
Array, MemoryExt, ViewExt,
|
||||
binary::{Binary, U8},
|
||||
};
|
||||
use mpz_ot::ideal::rcot::{IdealRCOTReceiver, IdealRCOTSender, ideal_rcot};
|
||||
use mpz_vm_core::{Execute, Vm};
|
||||
use mpz_zk::{Prover, ProverConfig, Verifier, VerifierConfig};
|
||||
use rand::{Rng, SeedableRng, rngs::StdRng};
|
||||
use rangeset::{RangeSet, UnionMut};
|
||||
use rstest::{fixture, rstest};
|
||||
use tlsn_core::{
|
||||
fixtures::transcript::{IV, KEY, RECORD_SIZE},
|
||||
hash::HashAlgId,
|
||||
transcript::{ContentType, Direction, TlsTranscript},
|
||||
};
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_authenticator_sent(
|
||||
encoding: Vec<(Direction, RangeSet<usize>)>,
|
||||
hashes: Vec<(Direction, RangeSet<usize>, HashAlgId)>,
|
||||
decoding: (RangeSet<usize>, RangeSet<usize>),
|
||||
transcript: TlsTranscript,
|
||||
transcript_refs: TranscriptRefs,
|
||||
) {
|
||||
let (sent_decdoding, recv_decdoding) = decoding;
|
||||
let partial = transcript
|
||||
.to_transcript()
|
||||
.unwrap()
|
||||
.to_partial(sent_decdoding, recv_decdoding);
|
||||
|
||||
let (mut ctx_p, mut ctx_v) = test_st_context(8);
|
||||
|
||||
let (mut prover, mut verifier) = vms();
|
||||
let mut refs_prover = transcript_refs.clone();
|
||||
let mut refs_verifier = transcript_refs;
|
||||
|
||||
let (key, iv) = keys(&mut prover, KEY, IV, Role::Prover);
|
||||
let mut auth_prover = Authenticator::new(encoding.iter(), hashes.iter(), Some(&partial));
|
||||
let mut zk_prover = ZkAesCtr::new(Role::Prover);
|
||||
zk_prover.set_key(key, iv);
|
||||
zk_prover.alloc(&mut prover, SENT_LEN).unwrap();
|
||||
|
||||
let (key, iv) = keys(&mut verifier, KEY, IV, Role::Verifier);
|
||||
let mut auth_verifier = Authenticator::new(encoding.iter(), hashes.iter(), Some(&partial));
|
||||
let mut zk_verifier = ZkAesCtr::new(Role::Verifier);
|
||||
zk_verifier.set_key(key, iv);
|
||||
zk_verifier.alloc(&mut verifier, SENT_LEN).unwrap();
|
||||
|
||||
let _ = auth_prover
|
||||
.auth_sent(&mut prover, &mut zk_prover, &transcript, &mut refs_prover)
|
||||
.unwrap();
|
||||
|
||||
let proof = auth_verifier
|
||||
.auth_sent(
|
||||
&mut verifier,
|
||||
&mut zk_verifier,
|
||||
&transcript,
|
||||
&mut refs_verifier,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
tokio::try_join!(
|
||||
prover.execute_all(&mut ctx_p),
|
||||
verifier.execute_all(&mut ctx_v)
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
proof.verify().unwrap();
|
||||
|
||||
let mut prove_range: RangeSet<usize> = RangeSet::default();
|
||||
prove_range.union_mut(&(600..1600));
|
||||
prove_range.union_mut(&(800..2000));
|
||||
prove_range.union_mut(&(2600..3700));
|
||||
|
||||
let mut expected_ranges = RangeSet::default();
|
||||
for r in prove_range.iter_ranges() {
|
||||
let floor = r.start / RECORD_SIZE;
|
||||
let ceil = r.end.div_ceil(RECORD_SIZE);
|
||||
|
||||
let expected = floor * RECORD_SIZE..ceil * RECORD_SIZE;
|
||||
expected_ranges.union_mut(&expected);
|
||||
}
|
||||
|
||||
assert_eq!(refs_prover.index(Direction::Sent), expected_ranges);
|
||||
assert_eq!(refs_verifier.index(Direction::Sent), expected_ranges);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_authenticator_recv(
|
||||
encoding: Vec<(Direction, RangeSet<usize>)>,
|
||||
hashes: Vec<(Direction, RangeSet<usize>, HashAlgId)>,
|
||||
decoding: (RangeSet<usize>, RangeSet<usize>),
|
||||
transcript: TlsTranscript,
|
||||
transcript_refs: TranscriptRefs,
|
||||
) {
|
||||
let (sent_decdoding, recv_decdoding) = decoding;
|
||||
let partial = transcript
|
||||
.to_transcript()
|
||||
.unwrap()
|
||||
.to_partial(sent_decdoding, recv_decdoding);
|
||||
|
||||
let (mut ctx_p, mut ctx_v) = test_st_context(8);
|
||||
|
||||
let (mut prover, mut verifier) = vms();
|
||||
let mut refs_prover = transcript_refs.clone();
|
||||
let mut refs_verifier = transcript_refs;
|
||||
|
||||
let (key, iv) = keys(&mut prover, KEY, IV, Role::Prover);
|
||||
let mut auth_prover = Authenticator::new(encoding.iter(), hashes.iter(), Some(&partial));
|
||||
let mut zk_prover = ZkAesCtr::new(Role::Prover);
|
||||
zk_prover.set_key(key, iv);
|
||||
zk_prover.alloc(&mut prover, RECV_LEN).unwrap();
|
||||
|
||||
let (key, iv) = keys(&mut verifier, KEY, IV, Role::Verifier);
|
||||
let mut auth_verifier = Authenticator::new(encoding.iter(), hashes.iter(), Some(&partial));
|
||||
let mut zk_verifier = ZkAesCtr::new(Role::Verifier);
|
||||
zk_verifier.set_key(key, iv);
|
||||
zk_verifier.alloc(&mut verifier, RECV_LEN).unwrap();
|
||||
|
||||
let _ = auth_prover
|
||||
.auth_recv(&mut prover, &mut zk_prover, &transcript, &mut refs_prover)
|
||||
.unwrap();
|
||||
|
||||
let proof = auth_verifier
|
||||
.auth_recv(
|
||||
&mut verifier,
|
||||
&mut zk_verifier,
|
||||
&transcript,
|
||||
&mut refs_verifier,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
tokio::try_join!(
|
||||
prover.execute_all(&mut ctx_p),
|
||||
verifier.execute_all(&mut ctx_v)
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
proof.verify().unwrap();
|
||||
|
||||
let mut prove_range: RangeSet<usize> = RangeSet::default();
|
||||
|
||||
prove_range.union_mut(&(4000..4200));
|
||||
prove_range.union_mut(&(5000..5800));
|
||||
prove_range.union_mut(&(6800..RECV_LEN));
|
||||
|
||||
let mut expected_ranges = RangeSet::default();
|
||||
for r in prove_range.iter_ranges() {
|
||||
let floor = r.start / RECORD_SIZE;
|
||||
let ceil = r.end.div_ceil(RECORD_SIZE);
|
||||
|
||||
let expected = floor * RECORD_SIZE..ceil * RECORD_SIZE;
|
||||
expected_ranges.union_mut(&expected);
|
||||
}
|
||||
|
||||
assert_eq!(refs_prover.index(Direction::Received), expected_ranges);
|
||||
assert_eq!(refs_verifier.index(Direction::Received), expected_ranges);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_authenticator_sent_verify_fail(
|
||||
encoding: Vec<(Direction, RangeSet<usize>)>,
|
||||
hashes: Vec<(Direction, RangeSet<usize>, HashAlgId)>,
|
||||
decoding: (RangeSet<usize>, RangeSet<usize>),
|
||||
transcript: TlsTranscript,
|
||||
transcript_refs: TranscriptRefs,
|
||||
) {
|
||||
let (sent_decdoding, recv_decdoding) = decoding;
|
||||
let partial = transcript
|
||||
.to_transcript()
|
||||
.unwrap()
|
||||
.to_partial(sent_decdoding, recv_decdoding);
|
||||
|
||||
let (mut ctx_p, mut ctx_v) = test_st_context(8);
|
||||
|
||||
let (mut prover, mut verifier) = vms();
|
||||
let mut refs_prover = transcript_refs.clone();
|
||||
let mut refs_verifier = transcript_refs;
|
||||
|
||||
let (key, iv) = keys(&mut prover, KEY, IV, Role::Prover);
|
||||
let mut auth_prover = Authenticator::new(encoding.iter(), hashes.iter(), Some(&partial));
|
||||
let mut zk_prover = ZkAesCtr::new(Role::Prover);
|
||||
zk_prover.set_key(key, iv);
|
||||
zk_prover.alloc(&mut prover, SENT_LEN).unwrap();
|
||||
|
||||
let (key, iv) = keys(&mut verifier, KEY, IV, Role::Verifier);
|
||||
let mut auth_verifier = Authenticator::new(encoding.iter(), hashes.iter(), Some(&partial));
|
||||
let mut zk_verifier = ZkAesCtr::new(Role::Verifier);
|
||||
zk_verifier.set_key(key, iv);
|
||||
zk_verifier.alloc(&mut verifier, SENT_LEN).unwrap();
|
||||
|
||||
let _ = auth_prover
|
||||
.auth_sent(&mut prover, &mut zk_prover, &transcript, &mut refs_prover)
|
||||
.unwrap();
|
||||
|
||||
// Forge verifier transcript to check if verify fails.
|
||||
// Use an index which is part of the proving range.
|
||||
let forged = forged();
|
||||
|
||||
let proof = auth_verifier
|
||||
.auth_sent(&mut verifier, &mut zk_verifier, &forged, &mut refs_verifier)
|
||||
.unwrap();
|
||||
|
||||
tokio::try_join!(
|
||||
prover.execute_all(&mut ctx_p),
|
||||
verifier.execute_all(&mut ctx_v)
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let err = proof.verify().unwrap_err();
|
||||
assert!(matches!(err.0, ErrorRepr::InvalidCiphertext));
|
||||
}
|
||||
|
||||
fn keys(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
key_value: [u8; 16],
|
||||
iv_value: [u8; 4],
|
||||
role: Role,
|
||||
) -> (Array<U8, 16>, Array<U8, 4>) {
|
||||
let key: Array<U8, 16> = vm.alloc().unwrap();
|
||||
let iv: Array<U8, 4> = vm.alloc().unwrap();
|
||||
|
||||
if let Role::Prover = role {
|
||||
vm.mark_private(key).unwrap();
|
||||
vm.mark_private(iv).unwrap();
|
||||
|
||||
vm.assign(key, key_value).unwrap();
|
||||
vm.assign(iv, iv_value).unwrap();
|
||||
} else {
|
||||
vm.mark_blind(key).unwrap();
|
||||
vm.mark_blind(iv).unwrap();
|
||||
}
|
||||
|
||||
vm.commit(key).unwrap();
|
||||
vm.commit(iv).unwrap();
|
||||
|
||||
(key, iv)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn decoding() -> (RangeSet<usize>, RangeSet<usize>) {
|
||||
let sent = 600..1600;
|
||||
let recv = 4000..4200;
|
||||
|
||||
(sent.into(), recv.into())
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn encoding() -> Vec<(Direction, RangeSet<usize>)> {
|
||||
let sent = 800..2000;
|
||||
let recv = 5000..5800;
|
||||
|
||||
let encoding = vec![
|
||||
(Direction::Sent, sent.into()),
|
||||
(Direction::Received, recv.into()),
|
||||
];
|
||||
encoding
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn hashes() -> Vec<(Direction, RangeSet<usize>, HashAlgId)> {
|
||||
let sent = 2600..3700;
|
||||
let recv = 6800..RECV_LEN;
|
||||
|
||||
let alg = HashAlgId::SHA256;
|
||||
|
||||
let hashes = vec![
|
||||
(Direction::Sent, sent.into(), alg),
|
||||
(Direction::Received, recv.into(), alg),
|
||||
];
|
||||
hashes
|
||||
}
|
||||
|
||||
fn vms() -> (Prover<IdealRCOTReceiver>, Verifier<IdealRCOTSender>) {
|
||||
let mut rng = StdRng::seed_from_u64(0);
|
||||
let delta = Delta::random(&mut rng);
|
||||
|
||||
let (ot_send, ot_recv) = ideal_rcot(rng.random(), delta.into_inner());
|
||||
|
||||
let prover = Prover::new(ProverConfig::default(), ot_recv);
|
||||
let verifier = Verifier::new(VerifierConfig::default(), delta, ot_send);
|
||||
|
||||
(prover, verifier)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn transcript() -> TlsTranscript {
|
||||
let sent = LIBER_PRIMUS.as_bytes()[..SENT_LEN].to_vec();
|
||||
|
||||
let mut recv = lipsum(RECV_LEN).into_bytes();
|
||||
recv.truncate(RECV_LEN);
|
||||
|
||||
tlsn_core::fixtures::transcript::transcript_fixture(&sent, &recv)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn forged() -> TlsTranscript {
|
||||
const WRONG_BYTE_INDEX: usize = 610;
|
||||
|
||||
let mut sent = LIBER_PRIMUS.as_bytes()[..SENT_LEN].to_vec();
|
||||
sent[WRONG_BYTE_INDEX] = sent[WRONG_BYTE_INDEX].wrapping_add(1);
|
||||
|
||||
let mut recv = lipsum(RECV_LEN).into_bytes();
|
||||
recv.truncate(RECV_LEN);
|
||||
|
||||
tlsn_core::fixtures::transcript::transcript_fixture(&sent, &recv)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn transcript_refs(transcript: TlsTranscript) -> TranscriptRefs {
|
||||
let sent_len = transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if matches!(record.typ, ContentType::ApplicationData) {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
let recv_len = transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if matches!(record.typ, ContentType::ApplicationData) {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
|
||||
TranscriptRefs::new(sent_len, recv_len)
|
||||
}
|
||||
|
||||
const SENT_LEN: usize = 4096;
|
||||
const RECV_LEN: usize = 8192;
|
||||
}
|
||||
615
crates/tlsn/src/commit/decode.rs
Normal file
615
crates/tlsn/src/commit/decode.rs
Normal file
@@ -0,0 +1,615 @@
|
||||
//! Selective disclosure.
|
||||
|
||||
use mpz_memory_core::{
|
||||
Array, MemoryExt,
|
||||
binary::{Binary, U8},
|
||||
};
|
||||
use mpz_vm_core::Vm;
|
||||
use rangeset::{Intersection, RangeSet, Subset, Union};
|
||||
use tlsn_core::transcript::{ContentType, Direction, PartialTranscript, TlsTranscript};
|
||||
|
||||
use crate::commit::TranscriptRefs;
|
||||
|
||||
/// Decodes parts of the transcript.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `vm` - The virtual machine.
|
||||
/// * `key` - The server write key.
|
||||
/// * `iv` - The server write iv.
|
||||
/// * `decoding_ranges` - The decoding ranges.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
pub(crate) fn decode_transcript(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
key: Array<U8, 16>,
|
||||
iv: Array<U8, 4>,
|
||||
decoding_ranges: (&RangeSet<usize>, &RangeSet<usize>),
|
||||
transcript_refs: &mut TranscriptRefs,
|
||||
) -> Result<(), DecodeError> {
|
||||
let (sent, recv) = decoding_ranges;
|
||||
|
||||
let sent_refs = transcript_refs.get(Direction::Sent, sent);
|
||||
for slice in sent_refs.into_iter() {
|
||||
// Drop the future, we don't need it.
|
||||
drop(vm.decode(slice).map_err(DecodeError::vm));
|
||||
}
|
||||
|
||||
transcript_refs.mark_decoded(Direction::Sent, sent);
|
||||
|
||||
// If possible use server write key for decoding.
|
||||
let fully_decoded = recv.union(&transcript_refs.decoded(Direction::Received));
|
||||
let full_range = 0..transcript_refs.max_len(Direction::Received);
|
||||
|
||||
if fully_decoded == full_range {
|
||||
// Drop the future, we don't need it.
|
||||
drop(vm.decode(key).map_err(DecodeError::vm)?);
|
||||
drop(vm.decode(iv).map_err(DecodeError::vm)?);
|
||||
|
||||
transcript_refs.mark_decoded(Direction::Received, &full_range.into());
|
||||
} else {
|
||||
let recv_refs = transcript_refs.get(Direction::Received, recv);
|
||||
for slice in recv_refs {
|
||||
// Drop the future, we don't need it.
|
||||
drop(vm.decode(slice).map_err(DecodeError::vm));
|
||||
}
|
||||
|
||||
transcript_refs.mark_decoded(Direction::Received, recv);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Verifies parts of the transcript.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `vm` - The virtual machine.
|
||||
/// * `key` - The server write key.
|
||||
/// * `iv` - The server write iv.
|
||||
/// * `decoding_ranges` - The decoding ranges.
|
||||
/// * `partial` - The partial transcript.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
/// * `transcript` - The TLS transcript.
|
||||
pub(crate) fn verify_transcript(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
key: Array<U8, 16>,
|
||||
iv: Array<U8, 4>,
|
||||
decoding_ranges: (&RangeSet<usize>, &RangeSet<usize>),
|
||||
partial: Option<&PartialTranscript>,
|
||||
transcript_refs: &mut TranscriptRefs,
|
||||
transcript: &TlsTranscript,
|
||||
) -> Result<(), DecodeError> {
|
||||
let Some(partial) = partial else {
|
||||
return Err(DecodeError(ErrorRepr::MissingPartialTranscript));
|
||||
};
|
||||
let (sent, recv) = decoding_ranges;
|
||||
let mut authenticated_data = Vec::new();
|
||||
|
||||
// Add sent transcript parts.
|
||||
let sent_refs = transcript_refs.get(Direction::Sent, sent);
|
||||
for data in sent_refs.into_iter() {
|
||||
let plaintext = vm
|
||||
.get(data)
|
||||
.map_err(DecodeError::vm)?
|
||||
.ok_or(DecodeError(ErrorRepr::MissingPlaintext))?;
|
||||
authenticated_data.extend_from_slice(&plaintext);
|
||||
}
|
||||
|
||||
// Add received transcript parts, if possible using key and iv.
|
||||
if let (Some(key), Some(iv)) = (
|
||||
vm.get(key).map_err(DecodeError::vm)?,
|
||||
vm.get(iv).map_err(DecodeError::vm)?,
|
||||
) {
|
||||
let plaintext = verify_with_keys(key, iv, recv, transcript)?;
|
||||
authenticated_data.extend_from_slice(&plaintext);
|
||||
} else {
|
||||
let recv_refs = transcript_refs.get(Direction::Received, recv);
|
||||
for data in recv_refs {
|
||||
let plaintext = vm
|
||||
.get(data)
|
||||
.map_err(DecodeError::vm)?
|
||||
.ok_or(DecodeError(ErrorRepr::MissingPlaintext))?;
|
||||
authenticated_data.extend_from_slice(&plaintext);
|
||||
}
|
||||
}
|
||||
|
||||
let mut purported_data = Vec::with_capacity(authenticated_data.len());
|
||||
|
||||
for range in sent.iter_ranges() {
|
||||
purported_data.extend_from_slice(&partial.sent_unsafe()[range]);
|
||||
}
|
||||
for range in recv.iter_ranges() {
|
||||
purported_data.extend_from_slice(&partial.received_unsafe()[range]);
|
||||
}
|
||||
|
||||
if purported_data != authenticated_data {
|
||||
return Err(DecodeError(ErrorRepr::InconsistentTranscript));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Checks the transcript length.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `partial` - The partial transcript.
|
||||
/// * `transcript` - The TLS transcript.
|
||||
pub(crate) fn check_transcript_length(
|
||||
partial: Option<&PartialTranscript>,
|
||||
transcript: &TlsTranscript,
|
||||
) -> Result<(), DecodeError> {
|
||||
let Some(partial) = partial else {
|
||||
return Err(DecodeError(ErrorRepr::MissingPartialTranscript));
|
||||
};
|
||||
|
||||
let sent_len: usize = transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if matches!(record.typ, ContentType::ApplicationData) {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
let recv_len: usize = transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if matches!(record.typ, ContentType::ApplicationData) {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
|
||||
// Check ranges.
|
||||
if partial.len_sent() != sent_len || partial.len_received() != recv_len {
|
||||
return Err(DecodeError(ErrorRepr::VerifyTranscriptLength));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn verify_with_keys(
|
||||
key: [u8; 16],
|
||||
iv: [u8; 4],
|
||||
decoding_ranges: &RangeSet<usize>,
|
||||
transcript: &TlsTranscript,
|
||||
) -> Result<Vec<u8>, DecodeError> {
|
||||
let mut plaintexts = Vec::with_capacity(decoding_ranges.len());
|
||||
let mut position = 0_usize;
|
||||
|
||||
let recv_data = transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData);
|
||||
|
||||
for record in recv_data {
|
||||
let current = position..position + record.ciphertext.len();
|
||||
|
||||
if !current.is_subset(decoding_ranges) {
|
||||
position += record.ciphertext.len();
|
||||
continue;
|
||||
}
|
||||
|
||||
let nonce = record
|
||||
.explicit_nonce
|
||||
.clone()
|
||||
.try_into()
|
||||
.expect("explicit nonce should be 8 bytes");
|
||||
let plaintext = aes_apply_keystream(key, iv, nonce, &record.ciphertext);
|
||||
|
||||
let record_decoding_range = decoding_ranges.intersection(¤t);
|
||||
for r in record_decoding_range.iter_ranges() {
|
||||
let shifted = r.start - position..r.end - position;
|
||||
plaintexts.extend_from_slice(&plaintext[shifted]);
|
||||
}
|
||||
|
||||
position += record.ciphertext.len()
|
||||
}
|
||||
Ok(plaintexts)
|
||||
}
|
||||
|
||||
fn aes_apply_keystream(key: [u8; 16], iv: [u8; 4], explicit_nonce: [u8; 8], msg: &[u8]) -> Vec<u8> {
|
||||
use aes::Aes128;
|
||||
use cipher_crypto::{KeyIvInit, StreamCipher, StreamCipherSeek};
|
||||
use ctr::Ctr32BE;
|
||||
|
||||
let start_ctr = 2;
|
||||
let mut full_iv = [0u8; 16];
|
||||
full_iv[0..4].copy_from_slice(&iv);
|
||||
full_iv[4..12].copy_from_slice(&explicit_nonce);
|
||||
|
||||
let mut cipher = Ctr32BE::<Aes128>::new(&key.into(), &full_iv.into());
|
||||
let mut out = msg.to_vec();
|
||||
|
||||
cipher
|
||||
.try_seek(start_ctr * 16)
|
||||
.expect("start counter is less than keystream length");
|
||||
cipher.apply_keystream(&mut out);
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
/// A decoding error.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("decode error: {0}")]
|
||||
pub(crate) struct DecodeError(#[source] ErrorRepr);
|
||||
|
||||
impl DecodeError {
|
||||
fn vm<E>(err: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn std::error::Error + Send + Sync + 'static>>,
|
||||
{
|
||||
Self(ErrorRepr::Vm(err.into()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum ErrorRepr {
|
||||
#[error("vm error: {0}")]
|
||||
Vm(Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||
#[error("missing partial transcript")]
|
||||
MissingPartialTranscript,
|
||||
#[error("length of partial transcript does not match expected length")]
|
||||
VerifyTranscriptLength,
|
||||
#[error("provided transcript does not match exptected")]
|
||||
InconsistentTranscript,
|
||||
#[error("trying to get plaintext, but it is missing")]
|
||||
MissingPlaintext,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{
|
||||
Role,
|
||||
commit::{
|
||||
TranscriptRefs,
|
||||
decode::{DecodeError, ErrorRepr, decode_transcript, verify_transcript},
|
||||
},
|
||||
};
|
||||
use lipsum::{LIBER_PRIMUS, lipsum};
|
||||
use mpz_common::context::test_st_context;
|
||||
use mpz_garble_core::Delta;
|
||||
use mpz_memory_core::{
|
||||
Array, MemoryExt, Vector, ViewExt,
|
||||
binary::{Binary, U8},
|
||||
};
|
||||
use mpz_ot::ideal::rcot::{IdealRCOTReceiver, IdealRCOTSender, ideal_rcot};
|
||||
use mpz_vm_core::{Execute, Vm};
|
||||
use mpz_zk::{Prover, ProverConfig, Verifier, VerifierConfig};
|
||||
use rand::{Rng, SeedableRng, rngs::StdRng};
|
||||
use rangeset::{RangeSet, UnionMut};
|
||||
use rstest::{fixture, rstest};
|
||||
use tlsn_core::{
|
||||
fixtures::transcript::{IV, KEY},
|
||||
transcript::{ContentType, Direction, PartialTranscript, TlsTranscript},
|
||||
};
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_decode(
|
||||
decoding: (RangeSet<usize>, RangeSet<usize>),
|
||||
transcript: TlsTranscript,
|
||||
transcript_refs: TranscriptRefs,
|
||||
) {
|
||||
let partial = partial(&transcript, decoding.clone());
|
||||
decode(decoding, partial, transcript, transcript_refs)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_decode_fail(
|
||||
decoding: (RangeSet<usize>, RangeSet<usize>),
|
||||
forged: TlsTranscript,
|
||||
transcript: TlsTranscript,
|
||||
transcript_refs: TranscriptRefs,
|
||||
) {
|
||||
let partial = partial(&forged, decoding.clone());
|
||||
let err = decode(decoding, partial, transcript, transcript_refs)
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
assert!(matches!(err.0, ErrorRepr::InconsistentTranscript));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_decode_all(
|
||||
decoding_full: (RangeSet<usize>, RangeSet<usize>),
|
||||
transcript: TlsTranscript,
|
||||
transcript_refs: TranscriptRefs,
|
||||
) {
|
||||
let partial = partial(&transcript, decoding_full.clone());
|
||||
decode(decoding_full, partial, transcript, transcript_refs)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_decode_all_fail(
|
||||
decoding_full: (RangeSet<usize>, RangeSet<usize>),
|
||||
forged: TlsTranscript,
|
||||
transcript: TlsTranscript,
|
||||
transcript_refs: TranscriptRefs,
|
||||
) {
|
||||
let partial = partial(&forged, decoding_full.clone());
|
||||
let err = decode(decoding_full, partial, transcript, transcript_refs)
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
assert!(matches!(err.0, ErrorRepr::InconsistentTranscript));
|
||||
}
|
||||
|
||||
async fn decode(
|
||||
decoding: (RangeSet<usize>, RangeSet<usize>),
|
||||
partial: PartialTranscript,
|
||||
transcript: TlsTranscript,
|
||||
transcript_refs: TranscriptRefs,
|
||||
) -> Result<(), DecodeError> {
|
||||
let (sent, recv) = decoding;
|
||||
|
||||
let (mut ctx_p, mut ctx_v) = test_st_context(8);
|
||||
let (mut prover, mut verifier) = vms();
|
||||
|
||||
let mut transcript_refs_verifier = transcript_refs.clone();
|
||||
let mut transcript_refs_prover = transcript_refs;
|
||||
|
||||
let key: [u8; 16] = KEY;
|
||||
let iv: [u8; 4] = IV;
|
||||
|
||||
let (key_prover, iv_prover) = assign_keys(&mut prover, key, iv, Role::Prover);
|
||||
let (key_verifier, iv_verifier) = assign_keys(&mut verifier, key, iv, Role::Verifier);
|
||||
|
||||
assign_transcript(
|
||||
&mut prover,
|
||||
Role::Prover,
|
||||
&transcript,
|
||||
&mut transcript_refs_prover,
|
||||
);
|
||||
assign_transcript(
|
||||
&mut verifier,
|
||||
Role::Verifier,
|
||||
&transcript,
|
||||
&mut transcript_refs_verifier,
|
||||
);
|
||||
|
||||
decode_transcript(
|
||||
&mut prover,
|
||||
key_prover,
|
||||
iv_prover,
|
||||
(&sent, &recv),
|
||||
&mut transcript_refs_prover,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
decode_transcript(
|
||||
&mut verifier,
|
||||
key_verifier,
|
||||
iv_verifier,
|
||||
(&sent, &recv),
|
||||
&mut transcript_refs_verifier,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
tokio::try_join!(
|
||||
prover.execute_all(&mut ctx_p),
|
||||
verifier.execute_all(&mut ctx_v),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
verify_transcript(
|
||||
&mut verifier,
|
||||
key_verifier,
|
||||
iv_verifier,
|
||||
(&sent, &recv),
|
||||
Some(&partial),
|
||||
&mut transcript_refs_verifier,
|
||||
&transcript,
|
||||
)
|
||||
}
|
||||
|
||||
fn assign_keys(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
key_value: [u8; 16],
|
||||
iv_value: [u8; 4],
|
||||
role: Role,
|
||||
) -> (Array<U8, 16>, Array<U8, 4>) {
|
||||
let key: Array<U8, 16> = vm.alloc().unwrap();
|
||||
let iv: Array<U8, 4> = vm.alloc().unwrap();
|
||||
|
||||
if let Role::Prover = role {
|
||||
vm.mark_private(key).unwrap();
|
||||
vm.mark_private(iv).unwrap();
|
||||
|
||||
vm.assign(key, key_value).unwrap();
|
||||
vm.assign(iv, iv_value).unwrap();
|
||||
} else {
|
||||
vm.mark_blind(key).unwrap();
|
||||
vm.mark_blind(iv).unwrap();
|
||||
}
|
||||
|
||||
vm.commit(key).unwrap();
|
||||
vm.commit(iv).unwrap();
|
||||
|
||||
(key, iv)
|
||||
}
|
||||
|
||||
fn assign_transcript(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
role: Role,
|
||||
transcript: &TlsTranscript,
|
||||
transcript_refs: &mut TranscriptRefs,
|
||||
) {
|
||||
let mut pos = 0_usize;
|
||||
|
||||
let sent = transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData);
|
||||
|
||||
for record in sent {
|
||||
let len = record.ciphertext.len();
|
||||
|
||||
let cipher_ref: Vector<U8> = vm.alloc_vec(len).unwrap();
|
||||
vm.mark_public(cipher_ref).unwrap();
|
||||
vm.assign(cipher_ref, record.ciphertext.clone()).unwrap();
|
||||
vm.commit(cipher_ref).unwrap();
|
||||
|
||||
let plaintext_ref: Vector<U8> = vm.alloc_vec(len).unwrap();
|
||||
if let Role::Prover = role {
|
||||
vm.mark_private(plaintext_ref).unwrap();
|
||||
vm.assign(plaintext_ref, record.plaintext.clone().unwrap())
|
||||
.unwrap();
|
||||
} else {
|
||||
vm.mark_blind(plaintext_ref).unwrap();
|
||||
}
|
||||
vm.commit(plaintext_ref).unwrap();
|
||||
|
||||
let index = pos..pos + record.ciphertext.len();
|
||||
transcript_refs.add(Direction::Sent, &index, plaintext_ref);
|
||||
|
||||
pos += record.ciphertext.len();
|
||||
}
|
||||
|
||||
pos = 0;
|
||||
|
||||
let recv = transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData);
|
||||
|
||||
for record in recv {
|
||||
let len = record.ciphertext.len();
|
||||
|
||||
let cipher_ref: Vector<U8> = vm.alloc_vec(len).unwrap();
|
||||
vm.mark_public(cipher_ref).unwrap();
|
||||
vm.assign(cipher_ref, record.ciphertext.clone()).unwrap();
|
||||
vm.commit(cipher_ref).unwrap();
|
||||
|
||||
let plaintext_ref: Vector<U8> = vm.alloc_vec(len).unwrap();
|
||||
if let Role::Prover = role {
|
||||
vm.mark_private(plaintext_ref).unwrap();
|
||||
vm.assign(plaintext_ref, record.plaintext.clone().unwrap())
|
||||
.unwrap();
|
||||
} else {
|
||||
vm.mark_blind(plaintext_ref).unwrap();
|
||||
}
|
||||
vm.commit(plaintext_ref).unwrap();
|
||||
|
||||
let index = pos..pos + record.ciphertext.len();
|
||||
transcript_refs.add(Direction::Received, &index, plaintext_ref);
|
||||
|
||||
pos += record.ciphertext.len();
|
||||
}
|
||||
}
|
||||
|
||||
fn partial(
|
||||
transcript: &TlsTranscript,
|
||||
decoding: (RangeSet<usize>, RangeSet<usize>),
|
||||
) -> PartialTranscript {
|
||||
let (sent, recv) = decoding;
|
||||
|
||||
transcript.to_transcript().unwrap().to_partial(sent, recv)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn decoding() -> (RangeSet<usize>, RangeSet<usize>) {
|
||||
let mut sent = RangeSet::default();
|
||||
let mut recv = RangeSet::default();
|
||||
|
||||
sent.union_mut(&(600..1100));
|
||||
sent.union_mut(&(3450..4000));
|
||||
|
||||
recv.union_mut(&(2000..3000));
|
||||
recv.union_mut(&(4800..4900));
|
||||
recv.union_mut(&(6000..7000));
|
||||
|
||||
(sent, recv)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn decoding_full(transcript: TlsTranscript) -> (RangeSet<usize>, RangeSet<usize>) {
|
||||
let transcript = transcript.to_transcript().unwrap();
|
||||
let (len_sent, len_recv) = transcript.len();
|
||||
|
||||
let sent = (0..len_sent).into();
|
||||
let recv = (0..len_recv).into();
|
||||
|
||||
(sent, recv)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn transcript() -> TlsTranscript {
|
||||
let sent = LIBER_PRIMUS.as_bytes()[..SENT_LEN].to_vec();
|
||||
|
||||
let mut recv = lipsum(RECV_LEN).into_bytes();
|
||||
recv.truncate(RECV_LEN);
|
||||
|
||||
tlsn_core::fixtures::transcript::transcript_fixture(&sent, &recv)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn forged() -> TlsTranscript {
|
||||
const WRONG_BYTE_INDEX: usize = 2200;
|
||||
|
||||
let sent = LIBER_PRIMUS.as_bytes()[..SENT_LEN].to_vec();
|
||||
|
||||
let mut recv = lipsum(RECV_LEN).into_bytes();
|
||||
recv.truncate(RECV_LEN);
|
||||
recv[WRONG_BYTE_INDEX] = recv[WRONG_BYTE_INDEX].wrapping_add(1);
|
||||
|
||||
tlsn_core::fixtures::transcript::transcript_fixture(&sent, &recv)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn transcript_refs(transcript: TlsTranscript) -> TranscriptRefs {
|
||||
let sent_len = transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if let ContentType::ApplicationData = record.typ {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum::<usize>();
|
||||
|
||||
let recv_len = transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if let ContentType::ApplicationData = record.typ {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum::<usize>();
|
||||
|
||||
TranscriptRefs::new(sent_len, recv_len)
|
||||
}
|
||||
|
||||
fn vms() -> (Prover<IdealRCOTReceiver>, Verifier<IdealRCOTSender>) {
|
||||
let mut rng = StdRng::seed_from_u64(0);
|
||||
let delta = Delta::random(&mut rng);
|
||||
|
||||
let (ot_send, ot_recv) = ideal_rcot(rng.random(), delta.into_inner());
|
||||
|
||||
let prover = Prover::new(ProverConfig::default(), ot_recv);
|
||||
let verifier = Verifier::new(VerifierConfig::default(), delta, ot_send);
|
||||
|
||||
(prover, verifier)
|
||||
}
|
||||
|
||||
const SENT_LEN: usize = 4096;
|
||||
const RECV_LEN: usize = 8192;
|
||||
}
|
||||
530
crates/tlsn/src/commit/encoding.rs
Normal file
530
crates/tlsn/src/commit/encoding.rs
Normal file
@@ -0,0 +1,530 @@
|
||||
//! Encoding commitment protocol.
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
use mpz_memory_core::{
|
||||
MemoryType, Vector,
|
||||
binary::{Binary, U8},
|
||||
};
|
||||
use mpz_vm_core::Vm;
|
||||
use rangeset::{RangeSet, Subset, UnionMut};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tlsn_core::{
|
||||
hash::{Blake3, HashAlgId, HashAlgorithm, Keccak256, Sha256, TypedHash},
|
||||
transcript::{
|
||||
Direction,
|
||||
encoding::{
|
||||
Encoder, EncoderSecret, EncodingProvider, EncodingProviderError, EncodingTree,
|
||||
EncodingTreeError, new_encoder,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
use crate::commit::transcript::TranscriptRefs;
|
||||
|
||||
/// Bytes of encoding, per byte.
|
||||
pub(crate) const ENCODING_SIZE: usize = 128;
|
||||
|
||||
pub(crate) trait EncodingVm<T: MemoryType>: EncodingMemory<T> + Vm<T> {}
|
||||
|
||||
impl<T: MemoryType, U> EncodingVm<T> for U where U: EncodingMemory<T> + Vm<T> {}
|
||||
|
||||
pub(crate) trait EncodingMemory<T: MemoryType> {
|
||||
fn get_encodings(&self, values: &[Vector<U8>]) -> Vec<u8>;
|
||||
}
|
||||
|
||||
impl<T> EncodingMemory<Binary> for mpz_zk::Prover<T> {
|
||||
fn get_encodings(&self, values: &[Vector<U8>]) -> Vec<u8> {
|
||||
let len = values.iter().map(|v| v.len()).sum::<usize>() * ENCODING_SIZE;
|
||||
let mut encodings = Vec::with_capacity(len);
|
||||
|
||||
for &v in values {
|
||||
let macs = self.get_macs(v).expect("macs should be available");
|
||||
encodings.extend(macs.iter().flat_map(|mac| mac.as_bytes()));
|
||||
}
|
||||
encodings
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> EncodingMemory<Binary> for mpz_zk::Verifier<T> {
|
||||
fn get_encodings(&self, values: &[Vector<U8>]) -> Vec<u8> {
|
||||
let len = values.iter().map(|v| v.len()).sum::<usize>() * ENCODING_SIZE;
|
||||
let mut encodings = Vec::with_capacity(len);
|
||||
|
||||
for &v in values {
|
||||
let keys = self.get_keys(v).expect("keys should be available");
|
||||
encodings.extend(keys.iter().flat_map(|key| key.as_block().as_bytes()));
|
||||
}
|
||||
encodings
|
||||
}
|
||||
}
|
||||
|
||||
/// The encoding adjustments.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct Encodings {
|
||||
pub(crate) sent: Vec<u8>,
|
||||
pub(crate) recv: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Creates encoding commitments.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct EncodingCreator {
|
||||
hash_id: Option<HashAlgId>,
|
||||
sent: RangeSet<usize>,
|
||||
recv: RangeSet<usize>,
|
||||
idxs: Vec<(Direction, RangeSet<usize>)>,
|
||||
}
|
||||
|
||||
impl EncodingCreator {
|
||||
/// Creates a new encoding creator.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `hash_id` - The id of the hash algorithm.
|
||||
/// * `idxs` - The indices for encoding commitments.
|
||||
pub(crate) fn new(hash_id: Option<HashAlgId>, idxs: Vec<(Direction, RangeSet<usize>)>) -> Self {
|
||||
let mut sent = RangeSet::default();
|
||||
let mut recv = RangeSet::default();
|
||||
|
||||
for (direction, idx) in idxs.iter() {
|
||||
for range in idx.iter_ranges() {
|
||||
match direction {
|
||||
Direction::Sent => sent.union_mut(&range),
|
||||
Direction::Received => recv.union_mut(&range),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
hash_id,
|
||||
sent,
|
||||
recv,
|
||||
idxs,
|
||||
}
|
||||
}
|
||||
|
||||
/// Receives the encodings using the provided MACs from the encoding memory.
|
||||
///
|
||||
/// The MACs must be consistent with the global delta used in the encodings.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `encoding_mem` - The encoding memory.
|
||||
/// * `encodings` - The encoding adjustments.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
pub(crate) fn receive(
|
||||
&self,
|
||||
encoding_mem: &dyn EncodingMemory<Binary>,
|
||||
encodings: Encodings,
|
||||
transcript_refs: &TranscriptRefs,
|
||||
) -> Result<(TypedHash, EncodingTree), EncodingError> {
|
||||
let Some(id) = self.hash_id else {
|
||||
return Err(EncodingError(ErrorRepr::MissingHashId));
|
||||
};
|
||||
|
||||
let hasher: &(dyn HashAlgorithm + Send + Sync) = match id {
|
||||
HashAlgId::SHA256 => &Sha256::default(),
|
||||
HashAlgId::KECCAK256 => &Keccak256::default(),
|
||||
HashAlgId::BLAKE3 => &Blake3::default(),
|
||||
alg => {
|
||||
return Err(EncodingError(ErrorRepr::UnsupportedHashAlg(alg)));
|
||||
}
|
||||
};
|
||||
|
||||
let Encodings {
|
||||
sent: mut sent_adjust,
|
||||
recv: mut recv_adjust,
|
||||
} = encodings;
|
||||
|
||||
let sent_refs = transcript_refs.get(Direction::Sent, &self.sent);
|
||||
let sent = encoding_mem.get_encodings(&sent_refs);
|
||||
|
||||
let recv_refs = transcript_refs.get(Direction::Received, &self.recv);
|
||||
let recv = encoding_mem.get_encodings(&recv_refs);
|
||||
|
||||
adjust(&sent, &recv, &mut sent_adjust, &mut recv_adjust)?;
|
||||
|
||||
let provider = Provider::new(sent_adjust, &self.sent, recv_adjust, &self.recv);
|
||||
|
||||
let tree = EncodingTree::new(hasher, self.idxs.iter(), &provider)?;
|
||||
let root = tree.root();
|
||||
|
||||
Ok((root, tree))
|
||||
}
|
||||
|
||||
/// Transfers the encodings using the provided secret and keys from the
|
||||
/// encoding memory.
|
||||
///
|
||||
/// The keys must be consistent with the global delta used in the encodings.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `encoding_mem` - The encoding memory.
|
||||
/// * `secret` - The encoder secret.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
pub(crate) fn transfer(
|
||||
&self,
|
||||
encoding_mem: &dyn EncodingMemory<Binary>,
|
||||
secret: EncoderSecret,
|
||||
transcript_refs: &TranscriptRefs,
|
||||
) -> Result<Encodings, EncodingError> {
|
||||
let encoder = new_encoder(&secret);
|
||||
|
||||
let mut sent_zero = Vec::with_capacity(self.sent.len() * ENCODING_SIZE);
|
||||
let mut recv_zero = Vec::with_capacity(self.recv.len() * ENCODING_SIZE);
|
||||
|
||||
for range in self.sent.iter_ranges() {
|
||||
encoder.encode_range(Direction::Sent, range, &mut sent_zero);
|
||||
}
|
||||
|
||||
for range in self.recv.iter_ranges() {
|
||||
encoder.encode_range(Direction::Received, range, &mut recv_zero);
|
||||
}
|
||||
|
||||
let sent_refs = transcript_refs.get(Direction::Sent, &self.sent);
|
||||
let sent = encoding_mem.get_encodings(&sent_refs);
|
||||
|
||||
let recv_refs = transcript_refs.get(Direction::Received, &self.recv);
|
||||
let recv = encoding_mem.get_encodings(&recv_refs);
|
||||
|
||||
adjust(&sent, &recv, &mut sent_zero, &mut recv_zero)?;
|
||||
let encodings = Encodings {
|
||||
sent: sent_zero,
|
||||
recv: recv_zero,
|
||||
};
|
||||
|
||||
Ok(encodings)
|
||||
}
|
||||
}
|
||||
|
||||
/// Adjust encodings by transcript references.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `sent` - The encodings for the sent bytes.
|
||||
/// * `recv` - The encodings for the received bytes.
|
||||
/// * `sent_adjust` - The adjustment bytes for the encodings of the sent bytes.
|
||||
/// * `recv_adjust` - The adjustment bytes for the encodings of the received
|
||||
/// bytes.
|
||||
fn adjust(
|
||||
sent: &[u8],
|
||||
recv: &[u8],
|
||||
sent_adjust: &mut [u8],
|
||||
recv_adjust: &mut [u8],
|
||||
) -> Result<(), EncodingError> {
|
||||
assert_eq!(sent.len() % ENCODING_SIZE, 0);
|
||||
assert_eq!(recv.len() % ENCODING_SIZE, 0);
|
||||
|
||||
if sent_adjust.len() != sent.len() {
|
||||
return Err(ErrorRepr::IncorrectAdjustCount {
|
||||
direction: Direction::Sent,
|
||||
expected: sent.len(),
|
||||
got: sent_adjust.len(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
if recv_adjust.len() != recv.len() {
|
||||
return Err(ErrorRepr::IncorrectAdjustCount {
|
||||
direction: Direction::Received,
|
||||
expected: recv.len(),
|
||||
got: recv_adjust.len(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
sent_adjust
|
||||
.iter_mut()
|
||||
.zip(sent)
|
||||
.for_each(|(adjust, enc)| *adjust ^= enc);
|
||||
recv_adjust
|
||||
.iter_mut()
|
||||
.zip(recv)
|
||||
.for_each(|(adjust, enc)| *adjust ^= enc);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Provider {
|
||||
sent: Vec<u8>,
|
||||
sent_range: RangeSet<usize>,
|
||||
recv: Vec<u8>,
|
||||
recv_range: RangeSet<usize>,
|
||||
}
|
||||
|
||||
impl Provider {
|
||||
fn new(
|
||||
sent: Vec<u8>,
|
||||
sent_range: &RangeSet<usize>,
|
||||
recv: Vec<u8>,
|
||||
recv_range: &RangeSet<usize>,
|
||||
) -> Self {
|
||||
assert_eq!(
|
||||
sent.len(),
|
||||
sent_range.len() * ENCODING_SIZE,
|
||||
"length of sent encodings and their index length do not match"
|
||||
);
|
||||
assert_eq!(
|
||||
recv.len(),
|
||||
recv_range.len() * ENCODING_SIZE,
|
||||
"length of received encodings and their index length do not match"
|
||||
);
|
||||
|
||||
Self {
|
||||
sent,
|
||||
sent_range: sent_range.clone(),
|
||||
recv,
|
||||
recv_range: recv_range.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn adjust(
|
||||
&self,
|
||||
direction: Direction,
|
||||
range: &Range<usize>,
|
||||
) -> Result<Range<usize>, EncodingProviderError> {
|
||||
let internal_range = match direction {
|
||||
Direction::Sent => &self.sent_range,
|
||||
Direction::Received => &self.recv_range,
|
||||
};
|
||||
|
||||
if !range.is_subset(internal_range) {
|
||||
return Err(EncodingProviderError);
|
||||
}
|
||||
|
||||
let shift = internal_range
|
||||
.iter()
|
||||
.take_while(|&el| el < range.start)
|
||||
.count();
|
||||
|
||||
let translated = Range {
|
||||
start: shift,
|
||||
end: shift + range.len(),
|
||||
};
|
||||
|
||||
Ok(translated)
|
||||
}
|
||||
}
|
||||
|
||||
impl EncodingProvider for Provider {
|
||||
fn provide_encoding(
|
||||
&self,
|
||||
direction: Direction,
|
||||
range: Range<usize>,
|
||||
dest: &mut Vec<u8>,
|
||||
) -> Result<(), EncodingProviderError> {
|
||||
let encodings = match direction {
|
||||
Direction::Sent => &self.sent,
|
||||
Direction::Received => &self.recv,
|
||||
};
|
||||
|
||||
let range = self.adjust(direction, &range)?;
|
||||
|
||||
let start = range.start * ENCODING_SIZE;
|
||||
let end = range.end * ENCODING_SIZE;
|
||||
|
||||
dest.extend_from_slice(&encodings[start..end]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Encoding protocol error.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
pub(crate) struct EncodingError(#[from] ErrorRepr);
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("encoding protocol error: {0}")]
|
||||
enum ErrorRepr {
|
||||
#[error("incorrect adjustment count for {direction}: expected {expected}, got {got}")]
|
||||
IncorrectAdjustCount {
|
||||
direction: Direction,
|
||||
expected: usize,
|
||||
got: usize,
|
||||
},
|
||||
#[error("encoding tree error: {0}")]
|
||||
EncodingTree(EncodingTreeError),
|
||||
#[error("missing hash id")]
|
||||
MissingHashId,
|
||||
#[error("unsupported hash algorithm for encoding commitment: {0}")]
|
||||
UnsupportedHashAlg(HashAlgId),
|
||||
}
|
||||
|
||||
impl From<EncodingTreeError> for EncodingError {
|
||||
fn from(value: EncodingTreeError) -> Self {
|
||||
Self(ErrorRepr::EncodingTree(value))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::ops::Range;
|
||||
|
||||
use crate::commit::{
|
||||
encoding::{ENCODING_SIZE, EncodingCreator, Encodings, Provider},
|
||||
transcript::TranscriptRefs,
|
||||
};
|
||||
use mpz_core::Block;
|
||||
use mpz_garble_core::Delta;
|
||||
use mpz_memory_core::{
|
||||
FromRaw, Slice, ToRaw, Vector,
|
||||
binary::{Binary, U8},
|
||||
};
|
||||
use rangeset::{RangeSet, UnionMut};
|
||||
use rstest::{fixture, rstest};
|
||||
use tlsn_core::{
|
||||
hash::{HashAlgId, HashProvider},
|
||||
transcript::{
|
||||
Direction,
|
||||
encoding::{EncoderSecret, EncodingCommitment, EncodingProvider},
|
||||
},
|
||||
};
|
||||
|
||||
#[rstest]
|
||||
fn test_encoding_adjust(
|
||||
index: (RangeSet<usize>, RangeSet<usize>),
|
||||
transcript_refs: TranscriptRefs,
|
||||
encoding_idxs: Vec<(Direction, RangeSet<usize>)>,
|
||||
) {
|
||||
let creator = EncodingCreator::new(Some(HashAlgId::SHA256), encoding_idxs);
|
||||
|
||||
let mock_memory = MockEncodingMemory;
|
||||
|
||||
let delta = Delta::new(Block::ONES);
|
||||
let seed = [1_u8; 32];
|
||||
let secret = EncoderSecret::new(seed, delta.as_block().to_bytes());
|
||||
|
||||
let adjustments = creator
|
||||
.transfer(&mock_memory, secret, &transcript_refs)
|
||||
.unwrap();
|
||||
|
||||
let (root, tree) = creator
|
||||
.receive(&mock_memory, adjustments, &transcript_refs)
|
||||
.unwrap();
|
||||
|
||||
// Check correctness of encoding protocol.
|
||||
let mut idxs = Vec::new();
|
||||
|
||||
let (sent_range, recv_range) = index;
|
||||
idxs.push((Direction::Sent, sent_range.clone()));
|
||||
idxs.push((Direction::Received, recv_range.clone()));
|
||||
|
||||
let commitment = EncodingCommitment { root, secret };
|
||||
let proof = tree.proof(idxs.iter()).unwrap();
|
||||
|
||||
// Here we set the trancscript plaintext to just be zeroes, which is possible
|
||||
// because it is not determined by the encodings so far.
|
||||
let sent = vec![0_u8; transcript_refs.max_len(Direction::Sent)];
|
||||
let recv = vec![0_u8; transcript_refs.max_len(Direction::Received)];
|
||||
|
||||
let (idx_sent, idx_recv) = proof
|
||||
.verify_with_provider(&HashProvider::default(), &commitment, &sent, &recv)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(idx_sent, idxs[0].1);
|
||||
assert_eq!(idx_recv, idxs[1].1);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_encoding_provider(index: (RangeSet<usize>, RangeSet<usize>), encodings: Encodings) {
|
||||
let (sent_range, recv_range) = index;
|
||||
let Encodings { sent, recv } = encodings;
|
||||
|
||||
let provider = Provider::new(sent, &sent_range, recv, &recv_range);
|
||||
|
||||
let mut encodings_sent = Vec::new();
|
||||
let mut encodings_recv = Vec::new();
|
||||
|
||||
provider
|
||||
.provide_encoding(Direction::Sent, 16..24, &mut encodings_sent)
|
||||
.unwrap();
|
||||
provider
|
||||
.provide_encoding(Direction::Received, 56..64, &mut encodings_recv)
|
||||
.unwrap();
|
||||
|
||||
let expected_sent = generate_encodings((16..24).into());
|
||||
let expected_recv = generate_encodings((56..64).into());
|
||||
|
||||
assert_eq!(expected_sent, encodings_sent);
|
||||
assert_eq!(expected_recv, encodings_recv);
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn transcript_refs(index: (RangeSet<usize>, RangeSet<usize>)) -> TranscriptRefs {
|
||||
let mut transcript_refs = TranscriptRefs::new(40, 64);
|
||||
|
||||
let dummy = |range: Range<usize>| {
|
||||
Vector::<U8>::from_raw(Slice::from_range_unchecked(8 * range.start..8 * range.end))
|
||||
};
|
||||
|
||||
for range in index.0.iter_ranges() {
|
||||
transcript_refs.add(Direction::Sent, &range, dummy(range.clone()));
|
||||
}
|
||||
|
||||
for range in index.1.iter_ranges() {
|
||||
transcript_refs.add(Direction::Received, &range, dummy(range.clone()));
|
||||
}
|
||||
|
||||
transcript_refs
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn encodings(index: (RangeSet<usize>, RangeSet<usize>)) -> Encodings {
|
||||
let sent = generate_encodings(index.0);
|
||||
let recv = generate_encodings(index.1);
|
||||
|
||||
Encodings { sent, recv }
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn encoding_idxs(
|
||||
index: (RangeSet<usize>, RangeSet<usize>),
|
||||
) -> Vec<(Direction, RangeSet<usize>)> {
|
||||
let (sent, recv) = index;
|
||||
|
||||
vec![(Direction::Sent, sent), (Direction::Received, recv)]
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn index() -> (RangeSet<usize>, RangeSet<usize>) {
|
||||
let mut sent = RangeSet::default();
|
||||
sent.union_mut(&(0..8));
|
||||
sent.union_mut(&(16..24));
|
||||
sent.union_mut(&(32..40));
|
||||
|
||||
let mut recv = RangeSet::default();
|
||||
recv.union_mut(&(40..48));
|
||||
recv.union_mut(&(56..64));
|
||||
|
||||
(sent, recv)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct MockEncodingMemory;
|
||||
|
||||
impl EncodingMemory<Binary> for MockEncodingMemory {
|
||||
fn get_encodings(&self, values: &[Vector<U8>]) -> Vec<u8> {
|
||||
let ranges: Vec<Range<usize>> = values
|
||||
.iter()
|
||||
.map(|r| {
|
||||
let range = r.to_raw().to_range();
|
||||
range.start / 8..range.end / 8
|
||||
})
|
||||
.collect();
|
||||
let ranges: RangeSet<usize> = ranges.into();
|
||||
|
||||
generate_encodings(ranges)
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_encodings(index: RangeSet<usize>) -> Vec<u8> {
|
||||
let mut out = Vec::new();
|
||||
for el in index.iter() {
|
||||
out.extend_from_slice(&[el as u8; ENCODING_SIZE]);
|
||||
}
|
||||
out
|
||||
}
|
||||
}
|
||||
@@ -9,44 +9,182 @@ use mpz_memory_core::{
|
||||
binary::{Binary, U8},
|
||||
};
|
||||
use mpz_vm_core::{Vm, VmError, prelude::*};
|
||||
use rangeset::RangeSet;
|
||||
use tlsn_core::{
|
||||
hash::{Blinder, Hash, HashAlgId, TypedHash},
|
||||
transcript::{
|
||||
Direction, Idx,
|
||||
Direction,
|
||||
hash::{PlaintextHash, PlaintextHashSecret},
|
||||
},
|
||||
};
|
||||
|
||||
use crate::{Role, commit::transcript::TranscriptRefs};
|
||||
|
||||
/// Future which will resolve to the committed hash values.
|
||||
/// Creates plaintext hashes.
|
||||
#[derive(Debug)]
|
||||
|
||||
pub(crate) struct HashCommitFuture {
|
||||
#[allow(clippy::type_complexity)]
|
||||
futs: Vec<(
|
||||
Direction,
|
||||
Idx,
|
||||
HashAlgId,
|
||||
DecodeFutureTyped<BitVec, Vec<u8>>,
|
||||
)>,
|
||||
pub(crate) struct PlaintextHasher {
|
||||
ranges: Vec<HashRange>,
|
||||
}
|
||||
|
||||
impl HashCommitFuture {
|
||||
impl PlaintextHasher {
|
||||
/// Creates a new instance.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `indices` - The hash indices.
|
||||
pub(crate) fn new<'a>(
|
||||
indices: impl Iterator<Item = &'a (Direction, RangeSet<usize>, HashAlgId)>,
|
||||
) -> Self {
|
||||
let mut ranges = Vec::new();
|
||||
|
||||
for (direction, index, id) in indices {
|
||||
let hash_range = HashRange::new(*direction, index.clone(), *id);
|
||||
ranges.push(hash_range);
|
||||
}
|
||||
|
||||
Self { ranges }
|
||||
}
|
||||
|
||||
/// Prove plaintext hash commitments.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `vm` - The virtual machine.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
pub(crate) fn prove(
|
||||
&self,
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
transcript_refs: &TranscriptRefs,
|
||||
) -> Result<(HashFuture, Vec<PlaintextHashSecret>), HashCommitError> {
|
||||
let (hash_refs, blinders) = commit(vm, &self.ranges, Role::Prover, transcript_refs)?;
|
||||
|
||||
let mut futures = Vec::new();
|
||||
let mut secrets = Vec::new();
|
||||
|
||||
for ((range, hash_ref), blinder_ref) in self.ranges.iter().zip(hash_refs).zip(blinders) {
|
||||
let blinder: Blinder = rand::random();
|
||||
|
||||
vm.assign(blinder_ref, blinder.as_bytes().to_vec())?;
|
||||
vm.commit(blinder_ref)?;
|
||||
|
||||
let hash_fut = vm.decode(Vector::<U8>::from(hash_ref))?;
|
||||
|
||||
futures.push((range.clone(), hash_fut));
|
||||
secrets.push(PlaintextHashSecret {
|
||||
direction: range.direction,
|
||||
idx: range.range.clone(),
|
||||
blinder,
|
||||
alg: range.id,
|
||||
});
|
||||
}
|
||||
|
||||
let hashes = HashFuture { futures };
|
||||
Ok((hashes, secrets))
|
||||
}
|
||||
|
||||
/// Verify plaintext hash commitments.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `vm` - The virtual machine.
|
||||
/// * `transcript_refs` - The transcript references.
|
||||
pub(crate) fn verify(
|
||||
&self,
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
transcript_refs: &TranscriptRefs,
|
||||
) -> Result<HashFuture, HashCommitError> {
|
||||
let (hash_refs, blinders) = commit(vm, &self.ranges, Role::Verifier, transcript_refs)?;
|
||||
|
||||
let mut futures = Vec::new();
|
||||
|
||||
for ((range, hash_ref), blinder) in self.ranges.iter().zip(hash_refs).zip(blinders) {
|
||||
vm.commit(blinder)?;
|
||||
|
||||
let hash_fut = vm.decode(Vector::<U8>::from(hash_ref))?;
|
||||
futures.push((range.clone(), hash_fut))
|
||||
}
|
||||
|
||||
let hashes = HashFuture { futures };
|
||||
Ok(hashes)
|
||||
}
|
||||
}
|
||||
|
||||
/// Commit plaintext hashes of the transcript.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn commit(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
ranges: &[HashRange],
|
||||
role: Role,
|
||||
refs: &TranscriptRefs,
|
||||
) -> Result<(Vec<Array<U8, 32>>, Vec<Vector<U8>>), HashCommitError> {
|
||||
let mut hashers = HashMap::new();
|
||||
let mut hash_refs = Vec::new();
|
||||
let mut blinders = Vec::new();
|
||||
|
||||
for HashRange {
|
||||
direction,
|
||||
range,
|
||||
id,
|
||||
} in ranges.iter()
|
||||
{
|
||||
let blinder = vm.alloc_vec::<U8>(16)?;
|
||||
match role {
|
||||
Role::Prover => vm.mark_private(blinder)?,
|
||||
Role::Verifier => vm.mark_blind(blinder)?,
|
||||
}
|
||||
|
||||
let hash = match *id {
|
||||
HashAlgId::SHA256 => {
|
||||
let mut hasher = if let Some(hasher) = hashers.get(id).cloned() {
|
||||
hasher
|
||||
} else {
|
||||
let hasher = Sha256::new_with_init(vm).map_err(HashCommitError::hasher)?;
|
||||
hashers.insert(id, hasher.clone());
|
||||
hasher
|
||||
};
|
||||
|
||||
for plaintext in refs.get(*direction, range) {
|
||||
hasher.update(&plaintext);
|
||||
}
|
||||
|
||||
hasher.update(&blinder);
|
||||
hasher.finalize(vm).map_err(HashCommitError::hasher)?
|
||||
}
|
||||
id => {
|
||||
return Err(HashCommitError::unsupported_alg(id));
|
||||
}
|
||||
};
|
||||
|
||||
hash_refs.push(hash);
|
||||
blinders.push(blinder);
|
||||
}
|
||||
|
||||
Ok((hash_refs, blinders))
|
||||
}
|
||||
|
||||
/// Future which will resolve to the committed hash values.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct HashFuture {
|
||||
futures: Vec<(HashRange, DecodeFutureTyped<BitVec, Vec<u8>>)>,
|
||||
}
|
||||
|
||||
impl HashFuture {
|
||||
/// Tries to receive the value, returning an error if the value is not
|
||||
/// ready.
|
||||
pub(crate) fn try_recv(self) -> Result<Vec<PlaintextHash>, HashCommitError> {
|
||||
let mut output = Vec::new();
|
||||
for (direction, idx, alg, mut fut) in self.futs {
|
||||
|
||||
for (hash_range, mut fut) in self.futures {
|
||||
let hash = fut
|
||||
.try_recv()
|
||||
.map_err(|_| HashCommitError::decode())?
|
||||
.ok_or_else(HashCommitError::decode)?;
|
||||
|
||||
output.push(PlaintextHash {
|
||||
direction,
|
||||
idx,
|
||||
direction: hash_range.direction,
|
||||
idx: hash_range.range,
|
||||
hash: TypedHash {
|
||||
alg,
|
||||
alg: hash_range.id,
|
||||
value: Hash::try_from(hash).map_err(HashCommitError::convert)?,
|
||||
},
|
||||
});
|
||||
@@ -56,98 +194,21 @@ impl HashCommitFuture {
|
||||
}
|
||||
}
|
||||
|
||||
/// Prove plaintext hash commitments.
|
||||
pub(crate) fn prove_hash(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
refs: &TranscriptRefs,
|
||||
idxs: impl IntoIterator<Item = (Direction, Idx, HashAlgId)>,
|
||||
) -> Result<(HashCommitFuture, Vec<PlaintextHashSecret>), HashCommitError> {
|
||||
let mut futs = Vec::new();
|
||||
let mut secrets = Vec::new();
|
||||
for (direction, idx, alg, hash_ref, blinder_ref) in
|
||||
hash_commit_inner(vm, Role::Prover, refs, idxs)?
|
||||
{
|
||||
let blinder: Blinder = rand::random();
|
||||
#[derive(Debug, Clone)]
|
||||
struct HashRange {
|
||||
direction: Direction,
|
||||
range: RangeSet<usize>,
|
||||
id: HashAlgId,
|
||||
}
|
||||
|
||||
vm.assign(blinder_ref, blinder.as_bytes().to_vec())?;
|
||||
vm.commit(blinder_ref)?;
|
||||
|
||||
let hash_fut = vm.decode(Vector::<U8>::from(hash_ref))?;
|
||||
|
||||
futs.push((direction, idx.clone(), alg, hash_fut));
|
||||
secrets.push(PlaintextHashSecret {
|
||||
impl HashRange {
|
||||
fn new(direction: Direction, range: RangeSet<usize>, id: HashAlgId) -> Self {
|
||||
Self {
|
||||
direction,
|
||||
idx,
|
||||
blinder,
|
||||
alg,
|
||||
});
|
||||
}
|
||||
|
||||
Ok((HashCommitFuture { futs }, secrets))
|
||||
}
|
||||
|
||||
/// Verify plaintext hash commitments.
|
||||
pub(crate) fn verify_hash(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
refs: &TranscriptRefs,
|
||||
idxs: impl IntoIterator<Item = (Direction, Idx, HashAlgId)>,
|
||||
) -> Result<HashCommitFuture, HashCommitError> {
|
||||
let mut futs = Vec::new();
|
||||
for (direction, idx, alg, hash_ref, blinder_ref) in
|
||||
hash_commit_inner(vm, Role::Verifier, refs, idxs)?
|
||||
{
|
||||
vm.commit(blinder_ref)?;
|
||||
|
||||
let hash_fut = vm.decode(Vector::<U8>::from(hash_ref))?;
|
||||
|
||||
futs.push((direction, idx, alg, hash_fut));
|
||||
}
|
||||
|
||||
Ok(HashCommitFuture { futs })
|
||||
}
|
||||
|
||||
/// Commit plaintext hashes of the transcript.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn hash_commit_inner(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
role: Role,
|
||||
refs: &TranscriptRefs,
|
||||
idxs: impl IntoIterator<Item = (Direction, Idx, HashAlgId)>,
|
||||
) -> Result<Vec<(Direction, Idx, HashAlgId, Array<U8, 32>, Vector<U8>)>, HashCommitError> {
|
||||
let mut output = Vec::new();
|
||||
let mut hashers = HashMap::new();
|
||||
for (direction, idx, alg) in idxs {
|
||||
let blinder = vm.alloc_vec::<U8>(16)?;
|
||||
match role {
|
||||
Role::Prover => vm.mark_private(blinder)?,
|
||||
Role::Verifier => vm.mark_blind(blinder)?,
|
||||
range,
|
||||
id,
|
||||
}
|
||||
|
||||
let hash = match alg {
|
||||
HashAlgId::SHA256 => {
|
||||
let mut hasher = if let Some(hasher) = hashers.get(&alg).cloned() {
|
||||
hasher
|
||||
} else {
|
||||
let hasher = Sha256::new_with_init(vm).map_err(HashCommitError::hasher)?;
|
||||
hashers.insert(alg, hasher.clone());
|
||||
hasher
|
||||
};
|
||||
|
||||
for plaintext in refs.get(direction, &idx).expect("plaintext refs are valid") {
|
||||
hasher.update(&plaintext);
|
||||
}
|
||||
hasher.update(&blinder);
|
||||
hasher.finalize(vm).map_err(HashCommitError::hasher)?
|
||||
}
|
||||
alg => {
|
||||
return Err(HashCommitError::unsupported_alg(alg));
|
||||
}
|
||||
};
|
||||
|
||||
output.push((direction, idx, alg, hash, blinder));
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Error type for hash commitments.
|
||||
@@ -196,3 +257,148 @@ impl From<VmError> for HashCommitError {
|
||||
Self(ErrorRepr::Vm(value))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{
|
||||
Role,
|
||||
commit::{hash::PlaintextHasher, transcript::TranscriptRefs},
|
||||
};
|
||||
use mpz_common::context::test_st_context;
|
||||
use mpz_garble_core::Delta;
|
||||
use mpz_memory_core::{
|
||||
MemoryExt, Vector, ViewExt,
|
||||
binary::{Binary, U8},
|
||||
};
|
||||
use mpz_ot::ideal::rcot::{IdealRCOTReceiver, IdealRCOTSender, ideal_rcot};
|
||||
use mpz_vm_core::{Execute, Vm};
|
||||
use mpz_zk::{Prover, ProverConfig, Verifier, VerifierConfig};
|
||||
use rand::{Rng, SeedableRng, rngs::StdRng};
|
||||
use rangeset::{RangeSet, UnionMut};
|
||||
use rstest::{fixture, rstest};
|
||||
use sha2::Digest;
|
||||
use tlsn_core::{hash::HashAlgId, transcript::Direction};
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_hasher() {
|
||||
let mut sent1 = RangeSet::default();
|
||||
sent1.union_mut(&(1..6));
|
||||
sent1.union_mut(&(11..16));
|
||||
|
||||
let mut sent2 = RangeSet::default();
|
||||
sent2.union_mut(&(22..25));
|
||||
|
||||
let mut recv = RangeSet::default();
|
||||
recv.union_mut(&(20..25));
|
||||
|
||||
let hash_ranges = [
|
||||
(Direction::Sent, sent1, HashAlgId::SHA256),
|
||||
(Direction::Sent, sent2, HashAlgId::SHA256),
|
||||
(Direction::Received, recv, HashAlgId::SHA256),
|
||||
];
|
||||
|
||||
let mut refs_prover = TranscriptRefs::new(1000, 1000);
|
||||
let mut refs_verifier = TranscriptRefs::new(1000, 1000);
|
||||
let values = [
|
||||
b"abcde".to_vec(),
|
||||
b"vwxyz".to_vec(),
|
||||
b"xxx".to_vec(),
|
||||
b"12345".to_vec(),
|
||||
];
|
||||
|
||||
let (mut ctx_p, mut ctx_v) = test_st_context(8);
|
||||
let (mut prover, mut verifier) = vms();
|
||||
|
||||
let mut values_iter = values.iter();
|
||||
|
||||
for (direction, idx, _) in hash_ranges.iter() {
|
||||
for range in idx.iter_ranges() {
|
||||
let value = values_iter.next().unwrap();
|
||||
|
||||
let ref_prover = assign(Role::Prover, &mut prover, value.clone());
|
||||
refs_prover.add(*direction, &range, ref_prover);
|
||||
|
||||
let ref_verifier = assign(Role::Verifier, &mut verifier, value.clone());
|
||||
refs_verifier.add(*direction, &range, ref_verifier);
|
||||
}
|
||||
}
|
||||
|
||||
let hasher_prover = PlaintextHasher::new(hash_ranges.iter());
|
||||
let hasher_verifier = PlaintextHasher::new(hash_ranges.iter());
|
||||
|
||||
tokio::try_join!(
|
||||
prover.execute_all(&mut ctx_p),
|
||||
verifier.execute_all(&mut ctx_v)
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let (prover_hashes, prover_secrets) =
|
||||
hasher_prover.prove(&mut prover, &refs_prover).unwrap();
|
||||
let verifier_hashes = hasher_verifier
|
||||
.verify(&mut verifier, &refs_verifier)
|
||||
.unwrap();
|
||||
|
||||
tokio::try_join!(
|
||||
prover.execute_all(&mut ctx_p),
|
||||
verifier.execute_all(&mut ctx_v)
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let prover_hashes = prover_hashes.try_recv().unwrap();
|
||||
let verifier_hashes = verifier_hashes.try_recv().unwrap();
|
||||
|
||||
assert_eq!(prover_hashes, verifier_hashes);
|
||||
|
||||
let values_per_commitment = [b"abcdevwxyz".to_vec(), b"xxx".to_vec(), b"12345".to_vec()];
|
||||
|
||||
for ((value, hash), secret) in values_per_commitment
|
||||
.iter()
|
||||
.zip(prover_hashes)
|
||||
.zip(prover_secrets)
|
||||
{
|
||||
let blinder = secret.blinder.as_bytes();
|
||||
let mut blinded_value = value.clone();
|
||||
blinded_value.extend_from_slice(blinder);
|
||||
let expected_hash = sha256(&blinded_value);
|
||||
|
||||
let hash: Vec<u8> = hash.hash.value.into();
|
||||
|
||||
assert_eq!(expected_hash, hash);
|
||||
}
|
||||
}
|
||||
|
||||
fn assign(role: Role, vm: &mut dyn Vm<Binary>, value: Vec<u8>) -> Vector<U8> {
|
||||
let reference: Vector<U8> = vm.alloc_vec(value.len()).unwrap();
|
||||
|
||||
if let Role::Prover = role {
|
||||
vm.mark_private(reference).unwrap();
|
||||
vm.assign(reference, value).unwrap();
|
||||
} else {
|
||||
vm.mark_blind(reference).unwrap();
|
||||
}
|
||||
|
||||
vm.commit(reference).unwrap();
|
||||
|
||||
reference
|
||||
}
|
||||
|
||||
fn sha256(data: &[u8]) -> Vec<u8> {
|
||||
let mut hasher = sha2::Sha256::default();
|
||||
hasher.update(data);
|
||||
hasher.finalize().as_slice().to_vec()
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn vms() -> (Prover<IdealRCOTReceiver>, Verifier<IdealRCOTSender>) {
|
||||
let mut rng = StdRng::seed_from_u64(0);
|
||||
let delta = Delta::random(&mut rng);
|
||||
|
||||
let (ot_send, ot_recv) = ideal_rcot(rng.random(), delta.into_inner());
|
||||
|
||||
let prover = Prover::new(ProverConfig::default(), ot_recv);
|
||||
let verifier = Verifier::new(VerifierConfig::default(), delta, ot_send);
|
||||
|
||||
(prover, verifier)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,212 +1,473 @@
|
||||
use mpz_memory_core::{
|
||||
MemoryExt, Vector,
|
||||
binary::{Binary, U8},
|
||||
};
|
||||
use mpz_vm_core::{Vm, VmError};
|
||||
use rangeset::Intersection;
|
||||
use tlsn_core::transcript::{Direction, Idx, PartialTranscript};
|
||||
//! Transcript reference storage.
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
use mpz_memory_core::{FromRaw, Slice, ToRaw, Vector, binary::U8};
|
||||
use rangeset::{Difference, Disjoint, RangeSet, Subset, UnionMut};
|
||||
use tlsn_core::transcript::Direction;
|
||||
|
||||
/// References to the application plaintext in the transcript.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct TranscriptRefs {
|
||||
sent: Vec<Vector<U8>>,
|
||||
recv: Vec<Vector<U8>>,
|
||||
sent: RefStorage,
|
||||
recv: RefStorage,
|
||||
}
|
||||
|
||||
impl TranscriptRefs {
|
||||
pub(crate) fn new(sent: Vec<Vector<U8>>, recv: Vec<Vector<U8>>) -> Self {
|
||||
/// Creates a new instance.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// `sent_max_len` - The maximum length of the sent transcript in bytes.
|
||||
/// `recv_max_len` - The maximum length of the received transcript in bytes.
|
||||
pub(crate) fn new(sent_max_len: usize, recv_max_len: usize) -> Self {
|
||||
let sent = RefStorage::new(sent_max_len);
|
||||
let recv = RefStorage::new(recv_max_len);
|
||||
|
||||
Self { sent, recv }
|
||||
}
|
||||
|
||||
/// Returns the sent plaintext references.
|
||||
pub(crate) fn sent(&self) -> &[Vector<U8>] {
|
||||
&self.sent
|
||||
/// Adds new references to the transcript refs.
|
||||
///
|
||||
/// New transcript references are only added if none of them are already
|
||||
/// present.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `direction` - The direction of the transcript.
|
||||
/// * `index` - The index of the transcript references.
|
||||
/// * `refs` - The new transcript refs.
|
||||
pub(crate) fn add(&mut self, direction: Direction, index: &Range<usize>, refs: Vector<U8>) {
|
||||
match direction {
|
||||
Direction::Sent => self.sent.add(index, refs),
|
||||
Direction::Received => self.recv.add(index, refs),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the received plaintext references.
|
||||
pub(crate) fn recv(&self) -> &[Vector<U8>] {
|
||||
&self.recv
|
||||
/// Marks references of the transcript as decoded.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `direction` - The direction of the transcript.
|
||||
/// * `index` - The index of the transcript references.
|
||||
pub(crate) fn mark_decoded(&mut self, direction: Direction, index: &RangeSet<usize>) {
|
||||
match direction {
|
||||
Direction::Sent => self.sent.mark_decoded(index),
|
||||
Direction::Received => self.recv.mark_decoded(index),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the transcript lengths.
|
||||
pub(crate) fn len(&self) -> (usize, usize) {
|
||||
let sent = self.sent.iter().map(|v| v.len()).sum();
|
||||
let recv = self.recv.iter().map(|v| v.len()).sum();
|
||||
|
||||
(sent, recv)
|
||||
/// Returns plaintext references for some index.
|
||||
///
|
||||
/// Queries that cannot or only partially be satisfied will return an empty
|
||||
/// vector.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `direction` - The direction of the transcript.
|
||||
/// * `index` - The index of the transcript references.
|
||||
pub(crate) fn get(&self, direction: Direction, index: &RangeSet<usize>) -> Vec<Vector<U8>> {
|
||||
match direction {
|
||||
Direction::Sent => self.sent.get(index),
|
||||
Direction::Received => self.recv.get(index),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns VM references for the given direction and index, otherwise
|
||||
/// `None` if the index is out of bounds.
|
||||
pub(crate) fn get(&self, direction: Direction, idx: &Idx) -> Option<Vec<Vector<U8>>> {
|
||||
if idx.is_empty() {
|
||||
return Some(Vec::new());
|
||||
/// Computes the subset of `index` which is missing.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `direction` - The direction of the transcript.
|
||||
/// * `index` - The index of the transcript references.
|
||||
pub(crate) fn compute_missing(
|
||||
&self,
|
||||
direction: Direction,
|
||||
index: &RangeSet<usize>,
|
||||
) -> RangeSet<usize> {
|
||||
match direction {
|
||||
Direction::Sent => self.sent.compute_missing(index),
|
||||
Direction::Received => self.recv.compute_missing(index),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the maximum length of the transcript.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `direction` - The direction of the transcript.
|
||||
pub(crate) fn max_len(&self, direction: Direction) -> usize {
|
||||
match direction {
|
||||
Direction::Sent => self.sent.max_len(),
|
||||
Direction::Received => self.recv.max_len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the decoded ranges of the transcript.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `direction` - The direction of the transcript.
|
||||
pub(crate) fn decoded(&self, direction: Direction) -> RangeSet<usize> {
|
||||
match direction {
|
||||
Direction::Sent => self.sent.decoded(),
|
||||
Direction::Received => self.recv.decoded(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the set ranges of the transcript.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `direction` - The direction of the transcript.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn index(&self, direction: Direction) -> RangeSet<usize> {
|
||||
match direction {
|
||||
Direction::Sent => self.sent.index(),
|
||||
Direction::Received => self.recv.index(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Inner storage for transcript references.
|
||||
///
|
||||
/// Saves transcript references by maintaining an `index` and an `offset`. The
|
||||
/// offset translates from `index` to some memory location and contains
|
||||
/// information about possibly non-contigious memory locations. The storage is
|
||||
/// bit-addressed but the API works with ranges over bytes.
|
||||
#[derive(Debug, Clone)]
|
||||
struct RefStorage {
|
||||
index: RangeSet<usize>,
|
||||
decoded: RangeSet<usize>,
|
||||
offset: Vec<isize>,
|
||||
max_len: usize,
|
||||
}
|
||||
|
||||
impl RefStorage {
|
||||
fn new(max_len: usize) -> Self {
|
||||
Self {
|
||||
index: RangeSet::default(),
|
||||
decoded: RangeSet::default(),
|
||||
offset: Vec::default(),
|
||||
max_len: 8 * max_len,
|
||||
}
|
||||
}
|
||||
|
||||
fn add(&mut self, index: &Range<usize>, data: Vector<U8>) {
|
||||
assert!(
|
||||
index.start < index.end,
|
||||
"Range should be valid for adding to reference storage"
|
||||
);
|
||||
assert_eq!(
|
||||
index.len(),
|
||||
data.len(),
|
||||
"Provided index and vm references should have the same length"
|
||||
);
|
||||
let bit_index = 8 * index.start..8 * index.end;
|
||||
|
||||
assert!(
|
||||
bit_index.is_disjoint(&self.index),
|
||||
"Parts of the provided index have already been computed"
|
||||
);
|
||||
assert!(
|
||||
bit_index.end <= self.max_len,
|
||||
"Provided index should be smaller than max_len"
|
||||
);
|
||||
|
||||
if bit_index.end > self.offset.len() {
|
||||
self.offset.resize(bit_index.end, 0);
|
||||
}
|
||||
|
||||
let refs = match direction {
|
||||
Direction::Sent => &self.sent,
|
||||
Direction::Received => &self.recv,
|
||||
};
|
||||
let mem_address = data.to_raw().ptr().as_usize() as isize;
|
||||
let offset = mem_address - bit_index.start as isize;
|
||||
|
||||
// Computes the transcript range for each reference.
|
||||
let mut start = 0;
|
||||
let mut slice_iter = refs.iter().map(move |slice| {
|
||||
let out = (slice, start..start + slice.len());
|
||||
start += slice.len();
|
||||
out
|
||||
});
|
||||
self.index.union_mut(&bit_index);
|
||||
self.offset[bit_index].fill(offset);
|
||||
}
|
||||
|
||||
let mut slices = Vec::new();
|
||||
let (mut slice, mut slice_range) = slice_iter.next()?;
|
||||
for range in idx.iter_ranges() {
|
||||
loop {
|
||||
if let Some(intersection) = slice_range.intersection(&range) {
|
||||
let start = intersection.start - slice_range.start;
|
||||
let end = intersection.end - slice_range.start;
|
||||
slices.push(slice.get(start..end).expect("range should be in bounds"));
|
||||
fn mark_decoded(&mut self, index: &RangeSet<usize>) {
|
||||
let bit_index = to_bit_index(index);
|
||||
self.decoded.union_mut(&bit_index);
|
||||
}
|
||||
|
||||
fn get(&self, index: &RangeSet<usize>) -> Vec<Vector<U8>> {
|
||||
let bit_index = to_bit_index(index);
|
||||
|
||||
if bit_index.is_empty() || !bit_index.is_subset(&self.index) {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
// Partition rangeset into ranges mapping to possibly disjunct memory locations.
|
||||
//
|
||||
// If the offset changes during iteration of a single range, it means that the
|
||||
// backing memory is non-contigious and we need to split that range.
|
||||
let mut transcript_refs = Vec::new();
|
||||
|
||||
for idx in bit_index.iter_ranges() {
|
||||
let mut start = idx.start;
|
||||
let mut end = idx.start;
|
||||
let mut offset = self.offset[start];
|
||||
|
||||
for k in idx {
|
||||
let next_offset = self.offset[k];
|
||||
if next_offset == offset {
|
||||
end += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Proceed to next range if the current slice extends beyond. Otherwise, proceed
|
||||
// to the next slice.
|
||||
if range.end <= slice_range.end {
|
||||
break;
|
||||
} else {
|
||||
(slice, slice_range) = slice_iter.next()?;
|
||||
}
|
||||
let len = end - start;
|
||||
|
||||
let ptr = (start as isize + offset) as usize;
|
||||
let mem_ref = Slice::from_range_unchecked(ptr..ptr + len);
|
||||
transcript_refs.push(Vector::from_raw(mem_ref));
|
||||
|
||||
start = k;
|
||||
end = k + 1;
|
||||
offset = next_offset;
|
||||
}
|
||||
let len = end - start;
|
||||
|
||||
let ptr = (start as isize + offset) as usize;
|
||||
let mem_ref = Slice::from_range_unchecked(ptr..ptr + len);
|
||||
|
||||
transcript_refs.push(Vector::from_raw(mem_ref));
|
||||
}
|
||||
|
||||
Some(slices)
|
||||
transcript_refs
|
||||
}
|
||||
|
||||
fn compute_missing(&self, index: &RangeSet<usize>) -> RangeSet<usize> {
|
||||
let byte_index = to_byte_index(&self.index);
|
||||
index.difference(&byte_index)
|
||||
}
|
||||
|
||||
fn decoded(&self) -> RangeSet<usize> {
|
||||
to_byte_index(&self.decoded)
|
||||
}
|
||||
|
||||
fn max_len(&self) -> usize {
|
||||
self.max_len / 8
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn index(&self) -> RangeSet<usize> {
|
||||
to_byte_index(&self.index)
|
||||
}
|
||||
}
|
||||
|
||||
/// Decodes the transcript.
|
||||
pub(crate) fn decode_transcript(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
sent: &Idx,
|
||||
recv: &Idx,
|
||||
refs: &TranscriptRefs,
|
||||
) -> Result<(), VmError> {
|
||||
let sent_refs = refs.get(Direction::Sent, sent).expect("index is in bounds");
|
||||
let recv_refs = refs
|
||||
.get(Direction::Received, recv)
|
||||
.expect("index is in bounds");
|
||||
fn to_bit_index(index: &RangeSet<usize>) -> RangeSet<usize> {
|
||||
let mut bit_index = RangeSet::default();
|
||||
|
||||
for slice in sent_refs.into_iter().chain(recv_refs) {
|
||||
// Drop the future, we don't need it.
|
||||
drop(vm.decode(slice)?);
|
||||
for r in index.iter_ranges() {
|
||||
bit_index.union_mut(&(8 * r.start..8 * r.end));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
bit_index
|
||||
}
|
||||
|
||||
/// Verifies a partial transcript.
|
||||
pub(crate) fn verify_transcript(
|
||||
vm: &mut dyn Vm<Binary>,
|
||||
transcript: &PartialTranscript,
|
||||
refs: &TranscriptRefs,
|
||||
) -> Result<(), InconsistentTranscript> {
|
||||
let sent_refs = refs
|
||||
.get(Direction::Sent, transcript.sent_authed())
|
||||
.expect("index is in bounds");
|
||||
let recv_refs = refs
|
||||
.get(Direction::Received, transcript.received_authed())
|
||||
.expect("index is in bounds");
|
||||
fn to_byte_index(index: &RangeSet<usize>) -> RangeSet<usize> {
|
||||
let mut byte_index = RangeSet::default();
|
||||
|
||||
let mut authenticated_data = Vec::new();
|
||||
for data in sent_refs.into_iter().chain(recv_refs) {
|
||||
let plaintext = vm
|
||||
.get(data)
|
||||
.expect("reference is valid")
|
||||
.expect("plaintext is decoded");
|
||||
authenticated_data.extend_from_slice(&plaintext);
|
||||
for r in index.iter_ranges() {
|
||||
let start = r.start;
|
||||
let end = r.end;
|
||||
|
||||
assert!(
|
||||
start.trailing_zeros() >= 3,
|
||||
"start range should be divisible by 8"
|
||||
);
|
||||
assert!(
|
||||
end.trailing_zeros() >= 3,
|
||||
"end range should be divisible by 8"
|
||||
);
|
||||
|
||||
let start = start >> 3;
|
||||
let end = end >> 3;
|
||||
|
||||
byte_index.union_mut(&(start..end));
|
||||
}
|
||||
|
||||
let mut purported_data = Vec::with_capacity(authenticated_data.len());
|
||||
for range in transcript.sent_authed().iter_ranges() {
|
||||
purported_data.extend_from_slice(&transcript.sent_unsafe()[range]);
|
||||
}
|
||||
|
||||
for range in transcript.received_authed().iter_ranges() {
|
||||
purported_data.extend_from_slice(&transcript.received_unsafe()[range]);
|
||||
}
|
||||
|
||||
if purported_data != authenticated_data {
|
||||
return Err(InconsistentTranscript {});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
byte_index
|
||||
}
|
||||
|
||||
/// Error for [`verify_transcript`].
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("inconsistent transcript")]
|
||||
pub(crate) struct InconsistentTranscript {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::TranscriptRefs;
|
||||
use mpz_memory_core::{FromRaw, Slice, Vector, binary::U8};
|
||||
use rangeset::RangeSet;
|
||||
use crate::commit::transcript::RefStorage;
|
||||
use mpz_memory_core::{FromRaw, Slice, ToRaw, Vector, binary::U8};
|
||||
use rangeset::{RangeSet, UnionMut};
|
||||
use rstest::{fixture, rstest};
|
||||
use std::ops::Range;
|
||||
use tlsn_core::transcript::{Direction, Idx};
|
||||
|
||||
// TRANSCRIPT_REFS:
|
||||
//
|
||||
// 48..96 -> 6 slots
|
||||
// 112..176 -> 8 slots
|
||||
// 240..288 -> 6 slots
|
||||
// 352..392 -> 5 slots
|
||||
// 440..480 -> 5 slots
|
||||
const TRANSCRIPT_REFS: &[Range<usize>] = &[48..96, 112..176, 240..288, 352..392, 440..480];
|
||||
#[rstest]
|
||||
fn test_storage_add(
|
||||
max_len: usize,
|
||||
ranges: [Range<usize>; 6],
|
||||
offsets: [isize; 6],
|
||||
storage: RefStorage,
|
||||
) {
|
||||
let bit_ranges: Vec<Range<usize>> = ranges.iter().map(|r| 8 * r.start..8 * r.end).collect();
|
||||
let bit_offsets: Vec<isize> = offsets.iter().map(|o| 8 * o).collect();
|
||||
|
||||
const IDXS: &[Range<usize>] = &[0..4, 5..10, 14..16, 16..28];
|
||||
let mut expected_index: RangeSet<usize> = RangeSet::default();
|
||||
|
||||
// 1. Take slots 0..4, 4 slots -> 48..80 (4)
|
||||
// 2. Take slots 5..10, 5 slots -> 88..96 (1) + 112..144 (4)
|
||||
// 3. Take slots 14..16, 2 slots -> 240..256 (2)
|
||||
// 4. Take slots 16..28, 12 slots -> 256..288 (4) + 352..392 (5) + 440..464 (3)
|
||||
//
|
||||
// 5. Merge slots 240..256 and 256..288 => 240..288 and get EXPECTED_REFS
|
||||
const EXPECTED_REFS: &[Range<usize>] =
|
||||
&[48..80, 88..96, 112..144, 240..288, 352..392, 440..464];
|
||||
expected_index.union_mut(&bit_ranges[0]);
|
||||
expected_index.union_mut(&bit_ranges[1]);
|
||||
|
||||
#[test]
|
||||
fn test_transcript_refs_get() {
|
||||
let transcript_refs: Vec<Vector<U8>> = TRANSCRIPT_REFS
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|range| Vector::from_raw(Slice::from_range_unchecked(range)))
|
||||
.collect();
|
||||
expected_index.union_mut(&bit_ranges[2]);
|
||||
expected_index.union_mut(&bit_ranges[3]);
|
||||
|
||||
let transcript_refs = TranscriptRefs {
|
||||
sent: transcript_refs.clone(),
|
||||
recv: transcript_refs,
|
||||
};
|
||||
expected_index.union_mut(&bit_ranges[4]);
|
||||
expected_index.union_mut(&bit_ranges[5]);
|
||||
assert_eq!(storage.index, expected_index);
|
||||
|
||||
let vm_refs = transcript_refs
|
||||
.get(Direction::Sent, &idx_fixture())
|
||||
.unwrap();
|
||||
let end = expected_index.end().unwrap();
|
||||
let mut expected_offset = vec![0_isize; end];
|
||||
|
||||
let expected_refs: Vec<Vector<U8>> = EXPECTED_REFS
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|range| Vector::from_raw(Slice::from_range_unchecked(range)))
|
||||
.collect();
|
||||
expected_offset[bit_ranges[0].clone()].fill(bit_offsets[0]);
|
||||
expected_offset[bit_ranges[1].clone()].fill(bit_offsets[1]);
|
||||
|
||||
assert_eq!(
|
||||
vm_refs.len(),
|
||||
expected_refs.len(),
|
||||
"Length of actual and expected refs are not equal"
|
||||
);
|
||||
expected_offset[bit_ranges[2].clone()].fill(bit_offsets[2]);
|
||||
expected_offset[bit_ranges[3].clone()].fill(bit_offsets[3]);
|
||||
|
||||
for (&expected, actual) in expected_refs.iter().zip(vm_refs) {
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
expected_offset[bit_ranges[4].clone()].fill(bit_offsets[4]);
|
||||
expected_offset[bit_ranges[5].clone()].fill(bit_offsets[5]);
|
||||
|
||||
assert_eq!(storage.offset, expected_offset);
|
||||
|
||||
assert_eq!(storage.decoded, RangeSet::default());
|
||||
assert_eq!(storage.max_len, 8 * max_len);
|
||||
}
|
||||
|
||||
fn idx_fixture() -> Idx {
|
||||
let set = RangeSet::from(IDXS);
|
||||
Idx::builder().union(&set).build()
|
||||
#[rstest]
|
||||
fn test_storage_get(ranges: [Range<usize>; 6], offsets: [isize; 6], storage: RefStorage) {
|
||||
let mut index = RangeSet::default();
|
||||
ranges.iter().for_each(|r| index.union_mut(r));
|
||||
|
||||
let data = storage.get(&index);
|
||||
|
||||
let mut data_recovered = Vec::new();
|
||||
for (r, o) in ranges.iter().zip(offsets) {
|
||||
data_recovered.push(vec(r.start as isize + o..r.end as isize + o));
|
||||
}
|
||||
|
||||
// Merge possibly adjacent vectors.
|
||||
//
|
||||
// Two vectors are adjacent if
|
||||
//
|
||||
// - vectors are adjacent in memory.
|
||||
// - transcript ranges of those vectors are adjacent, too.
|
||||
let mut range_iter = ranges.iter();
|
||||
let mut vec_iter = data_recovered.iter();
|
||||
let mut data_expected = Vec::new();
|
||||
|
||||
let mut current_vec = vec_iter.next().unwrap().to_raw().to_range();
|
||||
let mut current_range = range_iter.next().unwrap();
|
||||
|
||||
for (r, v) in range_iter.zip(vec_iter) {
|
||||
let v_range = v.to_raw().to_range();
|
||||
let start = v_range.start;
|
||||
let end = v_range.end;
|
||||
|
||||
if current_vec.end == start && current_range.end == r.start {
|
||||
current_vec.end = end;
|
||||
} else {
|
||||
let v = Vector::<U8>::from_raw(Slice::from_range_unchecked(current_vec));
|
||||
data_expected.push(v);
|
||||
current_vec = start..end;
|
||||
current_range = r;
|
||||
}
|
||||
}
|
||||
let v = Vector::<U8>::from_raw(Slice::from_range_unchecked(current_vec));
|
||||
data_expected.push(v);
|
||||
assert_eq!(data, data_expected);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_storage_compute_missing(storage: RefStorage) {
|
||||
let mut range = RangeSet::default();
|
||||
range.union_mut(&(6..12));
|
||||
range.union_mut(&(18..21));
|
||||
range.union_mut(&(22..25));
|
||||
range.union_mut(&(50..60));
|
||||
|
||||
let missing = storage.compute_missing(&range);
|
||||
|
||||
let mut missing_expected = RangeSet::default();
|
||||
missing_expected.union_mut(&(8..12));
|
||||
missing_expected.union_mut(&(20..21));
|
||||
missing_expected.union_mut(&(50..60));
|
||||
|
||||
assert_eq!(missing, missing_expected);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_mark_decoded(mut storage: RefStorage) {
|
||||
let mut range = RangeSet::default();
|
||||
|
||||
range.union_mut(&(14..17));
|
||||
range.union_mut(&(30..37));
|
||||
|
||||
storage.mark_decoded(&range);
|
||||
let decoded = storage.decoded();
|
||||
|
||||
assert_eq!(range, decoded);
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn max_len() -> usize {
|
||||
1000
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn ranges() -> [Range<usize>; 6] {
|
||||
let r1 = 0..5;
|
||||
let r2 = 5..8;
|
||||
let r3 = 12..20;
|
||||
let r4 = 22..26;
|
||||
let r5 = 30..35;
|
||||
let r6 = 35..38;
|
||||
|
||||
[r1, r2, r3, r4, r5, r6]
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn offsets() -> [isize; 6] {
|
||||
[7, 9, 20, 18, 30, 30]
|
||||
}
|
||||
|
||||
// expected memory ranges: 8 * ranges + 8 * offsets
|
||||
// 1. 56..96 do not merge with next one, because not adjacent in memory
|
||||
// 2. 112..136
|
||||
// 3. 256..320 do not merge with next one, adjacent in memory, but the ranges
|
||||
// itself are not
|
||||
// 4. 320..352
|
||||
// 5. 480..520 merge with next one
|
||||
// 6 520..544
|
||||
//
|
||||
//
|
||||
// 1. 56..96, length: 5
|
||||
// 2. 112..136, length: 3
|
||||
// 3. 256..320, length: 8
|
||||
// 4. 320..352, length: 4
|
||||
// 5. 480..544, length: 8
|
||||
#[fixture]
|
||||
fn storage(max_len: usize, ranges: [Range<usize>; 6], offsets: [isize; 6]) -> RefStorage {
|
||||
let [r1, r2, r3, r4, r5, r6] = ranges;
|
||||
let [o1, o2, o3, o4, o5, o6] = offsets;
|
||||
|
||||
let mut storage = RefStorage::new(max_len);
|
||||
storage.add(&r1, vec(r1.start as isize + o1..r1.end as isize + o1));
|
||||
storage.add(&r2, vec(r2.start as isize + o2..r2.end as isize + o2));
|
||||
|
||||
storage.add(&r3, vec(r3.start as isize + o3..r3.end as isize + o3));
|
||||
storage.add(&r4, vec(r4.start as isize + o4..r4.end as isize + o4));
|
||||
|
||||
storage.add(&r5, vec(r5.start as isize + o5..r5.end as isize + o5));
|
||||
storage.add(&r6, vec(r6.start as isize + o6..r6.end as isize + o6));
|
||||
|
||||
storage
|
||||
}
|
||||
|
||||
fn vec(range: Range<isize>) -> Vector<U8> {
|
||||
let range = 8 * range.start as usize..8 * range.end as usize;
|
||||
Vector::from_raw(Slice::from_range_unchecked(range))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,15 +233,17 @@ impl ProtocolConfigValidator {
|
||||
/// situations.
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum NetworkSetting {
|
||||
/// Prefers a bandwidth-heavy protocol.
|
||||
/// Reduces network round-trips at the expense of consuming more network
|
||||
/// bandwidth.
|
||||
Bandwidth,
|
||||
/// Prefers a latency-heavy protocol.
|
||||
/// Reduces network bandwidth utilization at the expense of more network
|
||||
/// round-trips.
|
||||
Latency,
|
||||
}
|
||||
|
||||
impl Default for NetworkSetting {
|
||||
fn default() -> Self {
|
||||
Self::Bandwidth
|
||||
Self::Latency
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,248 +0,0 @@
|
||||
//! Encoding commitment protocol.
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
use mpz_common::Context;
|
||||
use mpz_memory_core::{
|
||||
Vector,
|
||||
binary::U8,
|
||||
correlated::{Delta, Key, Mac},
|
||||
};
|
||||
use rand::Rng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serio::{SinkExt, stream::IoStreamExt};
|
||||
use tlsn_core::{
|
||||
hash::HashAlgorithm,
|
||||
transcript::{
|
||||
Direction, Idx,
|
||||
encoding::{
|
||||
Encoder, EncoderSecret, EncodingCommitment, EncodingProvider, EncodingProviderError,
|
||||
EncodingTree, EncodingTreeError, new_encoder,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
use crate::commit::transcript::TranscriptRefs;
|
||||
|
||||
/// Bytes of encoding, per byte.
|
||||
const ENCODING_SIZE: usize = 128;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Encodings {
|
||||
sent: Vec<u8>,
|
||||
recv: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Transfers the encodings using the provided seed and keys.
|
||||
///
|
||||
/// The keys must be consistent with the global delta used in the encodings.
|
||||
pub(crate) async fn transfer<'a>(
|
||||
ctx: &mut Context,
|
||||
refs: &TranscriptRefs,
|
||||
delta: &Delta,
|
||||
f: impl Fn(Vector<U8>) -> &'a [Key],
|
||||
) -> Result<EncodingCommitment, EncodingError> {
|
||||
let secret = EncoderSecret::new(rand::rng().random(), delta.as_block().to_bytes());
|
||||
let encoder = new_encoder(&secret);
|
||||
|
||||
let sent_keys: Vec<u8> = refs
|
||||
.sent()
|
||||
.iter()
|
||||
.copied()
|
||||
.flat_map(&f)
|
||||
.flat_map(|key| key.as_block().as_bytes())
|
||||
.copied()
|
||||
.collect();
|
||||
let recv_keys: Vec<u8> = refs
|
||||
.recv()
|
||||
.iter()
|
||||
.copied()
|
||||
.flat_map(&f)
|
||||
.flat_map(|key| key.as_block().as_bytes())
|
||||
.copied()
|
||||
.collect();
|
||||
|
||||
assert_eq!(sent_keys.len() % ENCODING_SIZE, 0);
|
||||
assert_eq!(recv_keys.len() % ENCODING_SIZE, 0);
|
||||
|
||||
let mut sent_encoding = Vec::with_capacity(sent_keys.len());
|
||||
let mut recv_encoding = Vec::with_capacity(recv_keys.len());
|
||||
|
||||
encoder.encode_range(
|
||||
Direction::Sent,
|
||||
0..sent_keys.len() / ENCODING_SIZE,
|
||||
&mut sent_encoding,
|
||||
);
|
||||
encoder.encode_range(
|
||||
Direction::Received,
|
||||
0..recv_keys.len() / ENCODING_SIZE,
|
||||
&mut recv_encoding,
|
||||
);
|
||||
|
||||
sent_encoding
|
||||
.iter_mut()
|
||||
.zip(sent_keys)
|
||||
.for_each(|(enc, key)| *enc ^= key);
|
||||
recv_encoding
|
||||
.iter_mut()
|
||||
.zip(recv_keys)
|
||||
.for_each(|(enc, key)| *enc ^= key);
|
||||
|
||||
// Set frame limit and add some extra bytes cushion room.
|
||||
let (sent, recv) = refs.len();
|
||||
let frame_limit = ENCODING_SIZE * (sent + recv) + ctx.io().limit();
|
||||
|
||||
ctx.io_mut()
|
||||
.with_limit(frame_limit)
|
||||
.send(Encodings {
|
||||
sent: sent_encoding,
|
||||
recv: recv_encoding,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let root = ctx.io_mut().expect_next().await?;
|
||||
ctx.io_mut().send(secret.clone()).await?;
|
||||
|
||||
Ok(EncodingCommitment {
|
||||
root,
|
||||
secret: secret.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Receives the encodings using the provided MACs.
|
||||
///
|
||||
/// The MACs must be consistent with the global delta used in the encodings.
|
||||
pub(crate) async fn receive<'a>(
|
||||
ctx: &mut Context,
|
||||
hasher: &(dyn HashAlgorithm + Send + Sync),
|
||||
refs: &TranscriptRefs,
|
||||
f: impl Fn(Vector<U8>) -> &'a [Mac],
|
||||
idxs: impl IntoIterator<Item = &(Direction, Idx)>,
|
||||
) -> Result<(EncodingCommitment, EncodingTree), EncodingError> {
|
||||
// Set frame limit and add some extra bytes cushion room.
|
||||
let (sent, recv) = refs.len();
|
||||
let frame_limit = ENCODING_SIZE * (sent + recv) + ctx.io().limit();
|
||||
|
||||
let Encodings { mut sent, mut recv } =
|
||||
ctx.io_mut().with_limit(frame_limit).expect_next().await?;
|
||||
|
||||
let sent_macs: Vec<u8> = refs
|
||||
.sent()
|
||||
.iter()
|
||||
.copied()
|
||||
.flat_map(&f)
|
||||
.flat_map(|mac| mac.as_bytes())
|
||||
.copied()
|
||||
.collect();
|
||||
let recv_macs: Vec<u8> = refs
|
||||
.recv()
|
||||
.iter()
|
||||
.copied()
|
||||
.flat_map(&f)
|
||||
.flat_map(|mac| mac.as_bytes())
|
||||
.copied()
|
||||
.collect();
|
||||
|
||||
assert_eq!(sent_macs.len() % ENCODING_SIZE, 0);
|
||||
assert_eq!(recv_macs.len() % ENCODING_SIZE, 0);
|
||||
|
||||
if sent.len() != sent_macs.len() {
|
||||
return Err(ErrorRepr::IncorrectMacCount {
|
||||
direction: Direction::Sent,
|
||||
expected: sent_macs.len(),
|
||||
got: sent.len(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
if recv.len() != recv_macs.len() {
|
||||
return Err(ErrorRepr::IncorrectMacCount {
|
||||
direction: Direction::Received,
|
||||
expected: recv_macs.len(),
|
||||
got: recv.len(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
sent.iter_mut()
|
||||
.zip(sent_macs)
|
||||
.for_each(|(enc, mac)| *enc ^= mac);
|
||||
recv.iter_mut()
|
||||
.zip(recv_macs)
|
||||
.for_each(|(enc, mac)| *enc ^= mac);
|
||||
|
||||
let provider = Provider { sent, recv };
|
||||
|
||||
let tree = EncodingTree::new(hasher, idxs, &provider)?;
|
||||
let root = tree.root();
|
||||
|
||||
ctx.io_mut().send(root.clone()).await?;
|
||||
let secret = ctx.io_mut().expect_next().await?;
|
||||
|
||||
let commitment = EncodingCommitment { root, secret };
|
||||
|
||||
Ok((commitment, tree))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Provider {
|
||||
sent: Vec<u8>,
|
||||
recv: Vec<u8>,
|
||||
}
|
||||
|
||||
impl EncodingProvider for Provider {
|
||||
fn provide_encoding(
|
||||
&self,
|
||||
direction: Direction,
|
||||
range: Range<usize>,
|
||||
dest: &mut Vec<u8>,
|
||||
) -> Result<(), EncodingProviderError> {
|
||||
let encodings = match direction {
|
||||
Direction::Sent => &self.sent,
|
||||
Direction::Received => &self.recv,
|
||||
};
|
||||
|
||||
let start = range.start * ENCODING_SIZE;
|
||||
let end = range.end * ENCODING_SIZE;
|
||||
|
||||
if end > encodings.len() {
|
||||
return Err(EncodingProviderError);
|
||||
}
|
||||
|
||||
dest.extend_from_slice(&encodings[start..end]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Encoding protocol error.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
pub struct EncodingError(#[from] ErrorRepr);
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("encoding protocol error: {0}")]
|
||||
enum ErrorRepr {
|
||||
#[error("I/O error: {0}")]
|
||||
Io(std::io::Error),
|
||||
#[error("incorrect MAC count for {direction}: expected {expected}, got {got}")]
|
||||
IncorrectMacCount {
|
||||
direction: Direction,
|
||||
expected: usize,
|
||||
got: usize,
|
||||
},
|
||||
#[error("encoding tree error: {0}")]
|
||||
EncodingTree(EncodingTreeError),
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for EncodingError {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
Self(ErrorRepr::Io(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EncodingTreeError> for EncodingError {
|
||||
fn from(value: EncodingTreeError) -> Self {
|
||||
Self(ErrorRepr::EncodingTree(value))
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,6 @@
|
||||
pub(crate) mod commit;
|
||||
pub mod config;
|
||||
pub(crate) mod context;
|
||||
pub(crate) mod encoding;
|
||||
pub(crate) mod ghash;
|
||||
pub(crate) mod msg;
|
||||
pub(crate) mod mux;
|
||||
|
||||
@@ -8,48 +8,41 @@ pub mod state;
|
||||
pub use config::{ProverConfig, ProverConfigBuilder, TlsConfig, TlsConfigBuilder};
|
||||
pub use error::ProverError;
|
||||
pub use future::ProverFuture;
|
||||
use rustls_pki_types::CertificateDer;
|
||||
pub use tlsn_core::{ProveConfig, ProveConfigBuilder, ProveConfigBuilderError, ProverOutput};
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use futures::{AsyncRead, AsyncWrite, TryFutureExt};
|
||||
use mpc_tls::{LeaderCtrl, MpcTlsLeader, SessionKeys};
|
||||
use mpz_common::Context;
|
||||
use mpz_core::Block;
|
||||
use mpz_garble_core::Delta;
|
||||
use mpz_vm_core::prelude::*;
|
||||
use mpz_zk::ProverConfig as ZkProverConfig;
|
||||
use rand::Rng;
|
||||
use rustls_pki_types::CertificateDer;
|
||||
use serio::SinkExt;
|
||||
use tls_client::{ClientConnection, ServerName as TlsServerName};
|
||||
use tls_client_async::{TlsConnection, bind_client};
|
||||
use tlsn_core::{
|
||||
ProveRequest,
|
||||
connection::{HandshakeData, ServerName},
|
||||
transcript::{TlsTranscript, Transcript},
|
||||
};
|
||||
use tlsn_deap::Deap;
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{Instrument, Span, debug, info, info_span, instrument};
|
||||
use webpki::anchor_from_trusted_cert;
|
||||
|
||||
use crate::{
|
||||
Role,
|
||||
commit::{
|
||||
commit_records,
|
||||
hash::prove_hash,
|
||||
transcript::{TranscriptRefs, decode_transcript},
|
||||
},
|
||||
commit::{ProvingState, TranscriptRefs},
|
||||
context::build_mt_context,
|
||||
encoding,
|
||||
mux::attach_mux,
|
||||
tag::verify_tags,
|
||||
zk_aes_ctr::ZkAesCtr,
|
||||
};
|
||||
|
||||
use futures::{AsyncRead, AsyncWrite, TryFutureExt};
|
||||
use mpc_tls::{LeaderCtrl, MpcTlsLeader, SessionKeys};
|
||||
use rand::Rng;
|
||||
use serio::SinkExt;
|
||||
use std::sync::Arc;
|
||||
use tls_client::{ClientConnection, ServerName as TlsServerName};
|
||||
use tls_client_async::{TlsConnection, bind_client};
|
||||
use tls_core::msgs::enums::ContentType;
|
||||
use tlsn_core::{
|
||||
ProvePayload,
|
||||
connection::{HandshakeData, ServerName},
|
||||
hash::{Blake3, HashAlgId, HashAlgorithm, Keccak256, Sha256},
|
||||
transcript::{TlsTranscript, Transcript, TranscriptCommitment, TranscriptSecret},
|
||||
};
|
||||
use tlsn_deap::Deap;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use tracing::{Instrument, Span, debug, info, info_span, instrument};
|
||||
|
||||
pub(crate) type RCOTSender = mpz_ot::rcot::shared::SharedRCOTSender<
|
||||
mpz_ot::kos::Sender<mpz_ot::chou_orlandi::Receiver>,
|
||||
mpz_core::Block,
|
||||
@@ -172,8 +165,8 @@ impl Prover<state::Setup> {
|
||||
mux_ctrl,
|
||||
mut mux_fut,
|
||||
mpc_tls,
|
||||
mut zk_aes_ctr_sent,
|
||||
mut zk_aes_ctr_recv,
|
||||
zk_aes_ctr_sent,
|
||||
zk_aes_ctr_recv,
|
||||
keys,
|
||||
vm,
|
||||
..
|
||||
@@ -280,28 +273,6 @@ impl Prover<state::Setup> {
|
||||
)
|
||||
.map_err(ProverError::zk)?;
|
||||
|
||||
// Prove received plaintext. Prover drops the proof output, as
|
||||
// they trust themselves.
|
||||
let (sent_refs, _) = commit_records(
|
||||
&mut vm,
|
||||
&mut zk_aes_ctr_sent,
|
||||
tls_transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData),
|
||||
)
|
||||
.map_err(ProverError::zk)?;
|
||||
|
||||
let (recv_refs, _) = commit_records(
|
||||
&mut vm,
|
||||
&mut zk_aes_ctr_recv,
|
||||
tls_transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData),
|
||||
)
|
||||
.map_err(ProverError::zk)?;
|
||||
|
||||
mux_fut
|
||||
.poll_with(vm.execute_all(&mut ctx).map_err(ProverError::zk))
|
||||
.await?;
|
||||
@@ -309,7 +280,9 @@ impl Prover<state::Setup> {
|
||||
let transcript = tls_transcript
|
||||
.to_transcript()
|
||||
.expect("transcript is complete");
|
||||
let transcript_refs = TranscriptRefs::new(sent_refs, recv_refs);
|
||||
|
||||
let (sent_len, recv_len) = transcript.len();
|
||||
let transcript_refs = TranscriptRefs::new(sent_len, recv_len);
|
||||
|
||||
Ok(Prover {
|
||||
config: self.config,
|
||||
@@ -322,6 +295,10 @@ impl Prover<state::Setup> {
|
||||
tls_transcript,
|
||||
transcript,
|
||||
transcript_refs,
|
||||
zk_aes_ctr_sent,
|
||||
zk_aes_ctr_recv,
|
||||
keys,
|
||||
encodings_transferred: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -355,123 +332,56 @@ impl Prover<state::Committed> {
|
||||
///
|
||||
/// * `config` - The disclosure configuration.
|
||||
#[instrument(parent = &self.span, level = "info", skip_all, err)]
|
||||
pub async fn prove(&mut self, config: &ProveConfig) -> Result<ProverOutput, ProverError> {
|
||||
pub async fn prove(&mut self, config: ProveConfig) -> Result<ProverOutput, ProverError> {
|
||||
let state::Committed {
|
||||
mux_fut,
|
||||
ctx,
|
||||
vm,
|
||||
tls_transcript,
|
||||
transcript,
|
||||
transcript_refs,
|
||||
zk_aes_ctr_sent,
|
||||
zk_aes_ctr_recv,
|
||||
keys,
|
||||
encodings_transferred,
|
||||
..
|
||||
} = &mut self.state;
|
||||
|
||||
let mut output = ProverOutput {
|
||||
transcript_commitments: Vec::new(),
|
||||
transcript_secrets: Vec::new(),
|
||||
// Create and send prove payload.
|
||||
let server_name = self.config.server_name();
|
||||
let handshake = config
|
||||
.server_identity()
|
||||
.then(|| (server_name.clone(), HandshakeData::new(tls_transcript)));
|
||||
|
||||
let partial = if let Some((reveal_sent, reveal_recv)) = config.reveal() {
|
||||
Some(transcript.to_partial(reveal_sent.clone(), reveal_recv.clone()))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let payload = ProvePayload {
|
||||
handshake: config.server_identity().then(|| {
|
||||
(
|
||||
self.config.server_name().clone(),
|
||||
HandshakeData {
|
||||
certs: tls_transcript
|
||||
.server_cert_chain()
|
||||
.expect("server cert chain is present")
|
||||
.to_vec(),
|
||||
sig: tls_transcript
|
||||
.server_signature()
|
||||
.expect("server signature is present")
|
||||
.clone(),
|
||||
binding: tls_transcript.certificate_binding().clone(),
|
||||
},
|
||||
)
|
||||
}),
|
||||
transcript: config.transcript().cloned(),
|
||||
transcript_commit: config.transcript_commit().map(|config| config.to_request()),
|
||||
};
|
||||
let payload = ProveRequest::new(&config, partial, handshake);
|
||||
|
||||
// Send payload.
|
||||
mux_fut
|
||||
.poll_with(ctx.io_mut().send(payload).map_err(ProverError::from))
|
||||
.await?;
|
||||
|
||||
if let Some(partial_transcript) = config.transcript() {
|
||||
decode_transcript(
|
||||
vm,
|
||||
partial_transcript.sent_authed(),
|
||||
partial_transcript.received_authed(),
|
||||
transcript_refs,
|
||||
let proving_state = ProvingState::for_prover(
|
||||
config,
|
||||
tls_transcript,
|
||||
transcript,
|
||||
transcript_refs,
|
||||
*encodings_transferred,
|
||||
);
|
||||
|
||||
let (output, encodings_executed) = mux_fut
|
||||
.poll_with(
|
||||
proving_state
|
||||
.prove(vm, ctx, zk_aes_ctr_sent, zk_aes_ctr_recv, keys.clone())
|
||||
.map_err(ProverError::from),
|
||||
)
|
||||
.map_err(ProverError::zk)?;
|
||||
}
|
||||
|
||||
let mut hash_commitments = None;
|
||||
if let Some(commit_config) = config.transcript_commit() {
|
||||
if commit_config.has_encoding() {
|
||||
let hasher: &(dyn HashAlgorithm + Send + Sync) =
|
||||
match *commit_config.encoding_hash_alg() {
|
||||
HashAlgId::SHA256 => &Sha256::default(),
|
||||
HashAlgId::KECCAK256 => &Keccak256::default(),
|
||||
HashAlgId::BLAKE3 => &Blake3::default(),
|
||||
alg => {
|
||||
return Err(ProverError::config(format!(
|
||||
"unsupported hash algorithm for encoding commitment: {alg}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
let (commitment, tree) = mux_fut
|
||||
.poll_with(
|
||||
encoding::receive(
|
||||
ctx,
|
||||
hasher,
|
||||
transcript_refs,
|
||||
|plaintext| vm.get_macs(plaintext).expect("reference is valid"),
|
||||
commit_config.iter_encoding(),
|
||||
)
|
||||
.map_err(ProverError::commit),
|
||||
)
|
||||
.await?;
|
||||
|
||||
output
|
||||
.transcript_commitments
|
||||
.push(TranscriptCommitment::Encoding(commitment));
|
||||
output
|
||||
.transcript_secrets
|
||||
.push(TranscriptSecret::Encoding(tree));
|
||||
}
|
||||
|
||||
if commit_config.has_hash() {
|
||||
hash_commitments = Some(
|
||||
prove_hash(
|
||||
vm,
|
||||
transcript_refs,
|
||||
commit_config
|
||||
.iter_hash()
|
||||
.map(|((dir, idx), alg)| (*dir, idx.clone(), *alg)),
|
||||
)
|
||||
.map_err(ProverError::commit)?,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mux_fut
|
||||
.poll_with(vm.execute_all(ctx).map_err(ProverError::zk))
|
||||
.await?;
|
||||
|
||||
if let Some((hash_fut, hash_secrets)) = hash_commitments {
|
||||
let hash_commitments = hash_fut.try_recv().map_err(ProverError::commit)?;
|
||||
for (commitment, secret) in hash_commitments.into_iter().zip(hash_secrets) {
|
||||
output
|
||||
.transcript_commitments
|
||||
.push(TranscriptCommitment::Hash(commitment));
|
||||
output
|
||||
.transcript_secrets
|
||||
.push(TranscriptSecret::Hash(secret));
|
||||
}
|
||||
}
|
||||
|
||||
*encodings_transferred = encodings_executed;
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
@@ -523,7 +433,7 @@ fn build_mpc_tls(config: &ProverConfig, ctx: Context) -> (Arc<Mutex<Deap<Mpc, Zk
|
||||
delta,
|
||||
);
|
||||
|
||||
let zk = Zk::new(rcot_recv.clone());
|
||||
let zk = Zk::new(ZkProverConfig::default(), rcot_recv.clone());
|
||||
|
||||
let vm = Arc::new(Mutex::new(Deap::new(tlsn_deap::Role::Leader, mpc, zk)));
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use std::{error::Error, fmt};
|
||||
|
||||
use crate::{commit::CommitError, zk_aes_ctr::ZkAesCtrError};
|
||||
use mpc_tls::MpcTlsError;
|
||||
|
||||
use crate::{encoding::EncodingError, zk_aes_ctr::ZkAesCtrError};
|
||||
use std::{error::Error, fmt};
|
||||
|
||||
/// Error for [`Prover`](crate::Prover).
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
@@ -42,13 +40,6 @@ impl ProverError {
|
||||
{
|
||||
Self::new(ErrorKind::Zk, source)
|
||||
}
|
||||
|
||||
pub(crate) fn commit<E>(source: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn Error + Send + Sync + 'static>>,
|
||||
{
|
||||
Self::new(ErrorKind::Commit, source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -116,8 +107,8 @@ impl From<ZkAesCtrError> for ProverError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EncodingError> for ProverError {
|
||||
fn from(e: EncodingError) -> Self {
|
||||
impl From<CommitError> for ProverError {
|
||||
fn from(e: CommitError) -> Self {
|
||||
Self::new(ErrorKind::Commit, e)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use tlsn_deap::Deap;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::{
|
||||
commit::transcript::TranscriptRefs,
|
||||
commit::TranscriptRefs,
|
||||
mux::{MuxControl, MuxFuture},
|
||||
prover::{Mpc, Zk},
|
||||
zk_aes_ctr::ZkAesCtr,
|
||||
@@ -42,6 +42,10 @@ pub struct Committed {
|
||||
pub(crate) tls_transcript: TlsTranscript,
|
||||
pub(crate) transcript: Transcript,
|
||||
pub(crate) transcript_refs: TranscriptRefs,
|
||||
pub(crate) zk_aes_ctr_sent: ZkAesCtr,
|
||||
pub(crate) zk_aes_ctr_recv: ZkAesCtr,
|
||||
pub(crate) keys: SessionKeys,
|
||||
pub(crate) encodings_transferred: bool,
|
||||
}
|
||||
|
||||
opaque_debug::implement!(Committed);
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
//! Verifier.
|
||||
|
||||
pub(crate) mod config;
|
||||
mod config;
|
||||
mod error;
|
||||
pub mod state;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use config::{VerifierConfig, VerifierConfigBuilder, VerifierConfigBuilderError};
|
||||
pub use error::VerifierError;
|
||||
pub use tlsn_core::{
|
||||
@@ -13,38 +11,35 @@ pub use tlsn_core::{
|
||||
webpki::ServerCertVerifier,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
Role,
|
||||
commit::{
|
||||
commit_records,
|
||||
hash::verify_hash,
|
||||
transcript::{TranscriptRefs, decode_transcript, verify_transcript},
|
||||
},
|
||||
config::ProtocolConfig,
|
||||
context::build_mt_context,
|
||||
encoding,
|
||||
mux::attach_mux,
|
||||
tag::verify_tags,
|
||||
zk_aes_ctr::ZkAesCtr,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
use futures::{AsyncRead, AsyncWrite, TryFutureExt};
|
||||
use mpc_tls::{MpcTlsFollower, SessionKeys};
|
||||
use mpz_common::Context;
|
||||
use mpz_core::Block;
|
||||
use mpz_garble_core::Delta;
|
||||
use mpz_vm_core::prelude::*;
|
||||
use mpz_zk::VerifierConfig as ZkVerifierConfig;
|
||||
use serio::stream::IoStreamExt;
|
||||
use tls_core::msgs::enums::ContentType;
|
||||
use tlsn_core::{
|
||||
ProvePayload,
|
||||
ProveRequest,
|
||||
connection::{ConnectionInfo, ServerName},
|
||||
transcript::{TlsTranscript, TranscriptCommitment},
|
||||
transcript::{ContentType, TlsTranscript},
|
||||
};
|
||||
use tlsn_deap::Deap;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use tracing::{Span, debug, info, info_span, instrument};
|
||||
|
||||
use crate::{
|
||||
Role,
|
||||
commit::{ProvingState, TranscriptRefs},
|
||||
config::ProtocolConfig,
|
||||
context::build_mt_context,
|
||||
mux::attach_mux,
|
||||
tag::verify_tags,
|
||||
zk_aes_ctr::ZkAesCtr,
|
||||
};
|
||||
|
||||
pub(crate) type RCOTSender = mpz_ot::rcot::shared::SharedRCOTSender<
|
||||
mpz_ot::ferret::Sender<mpz_ot::kos::Sender<mpz_ot::chou_orlandi::Receiver>>,
|
||||
mpz_core::Block,
|
||||
@@ -187,8 +182,8 @@ impl Verifier<state::Setup> {
|
||||
mut mux_fut,
|
||||
delta,
|
||||
mpc_tls,
|
||||
mut zk_aes_ctr_sent,
|
||||
mut zk_aes_ctr_recv,
|
||||
zk_aes_ctr_sent,
|
||||
zk_aes_ctr_recv,
|
||||
vm,
|
||||
keys,
|
||||
} = self.state;
|
||||
@@ -229,27 +224,6 @@ impl Verifier<state::Setup> {
|
||||
)
|
||||
.map_err(VerifierError::zk)?;
|
||||
|
||||
// Prepare for the prover to prove received plaintext.
|
||||
let (sent_refs, sent_proof) = commit_records(
|
||||
&mut vm,
|
||||
&mut zk_aes_ctr_sent,
|
||||
tls_transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData),
|
||||
)
|
||||
.map_err(VerifierError::zk)?;
|
||||
|
||||
let (recv_refs, recv_proof) = commit_records(
|
||||
&mut vm,
|
||||
&mut zk_aes_ctr_recv,
|
||||
tls_transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter(|record| record.typ == ContentType::ApplicationData),
|
||||
)
|
||||
.map_err(VerifierError::zk)?;
|
||||
|
||||
mux_fut
|
||||
.poll_with(vm.execute_all(&mut ctx).map_err(VerifierError::zk))
|
||||
.await?;
|
||||
@@ -259,11 +233,30 @@ impl Verifier<state::Setup> {
|
||||
// authenticated from the verifier's perspective.
|
||||
tag_proof.verify().map_err(VerifierError::zk)?;
|
||||
|
||||
// Verify the plaintext proofs.
|
||||
sent_proof.verify().map_err(VerifierError::zk)?;
|
||||
recv_proof.verify().map_err(VerifierError::zk)?;
|
||||
let sent_len = tls_transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if matches!(record.typ, ContentType::ApplicationData) {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
let recv_len = tls_transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if matches!(record.typ, ContentType::ApplicationData) {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
|
||||
let transcript_refs = TranscriptRefs::new(sent_refs, recv_refs);
|
||||
let transcript_refs = TranscriptRefs::new(sent_len, recv_len);
|
||||
|
||||
Ok(Verifier {
|
||||
config: self.config,
|
||||
@@ -276,6 +269,11 @@ impl Verifier<state::Setup> {
|
||||
vm,
|
||||
tls_transcript,
|
||||
transcript_refs,
|
||||
zk_aes_ctr_sent,
|
||||
zk_aes_ctr_recv,
|
||||
keys,
|
||||
verified_server_name: None,
|
||||
encodings_transferred: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -304,126 +302,42 @@ impl Verifier<state::Committed> {
|
||||
vm,
|
||||
tls_transcript,
|
||||
transcript_refs,
|
||||
zk_aes_ctr_sent,
|
||||
zk_aes_ctr_recv,
|
||||
keys,
|
||||
verified_server_name,
|
||||
encodings_transferred,
|
||||
..
|
||||
} = &mut self.state;
|
||||
|
||||
let ProvePayload {
|
||||
handshake,
|
||||
transcript,
|
||||
transcript_commit,
|
||||
} = mux_fut
|
||||
let payload: ProveRequest = mux_fut
|
||||
.poll_with(ctx.io_mut().expect_next().map_err(VerifierError::from))
|
||||
.await?;
|
||||
|
||||
let verifier = if let Some(root_store) = self.config.root_store() {
|
||||
ServerCertVerifier::new(root_store).map_err(VerifierError::config)?
|
||||
} else {
|
||||
ServerCertVerifier::mozilla()
|
||||
};
|
||||
let proving_state = ProvingState::for_verifier(
|
||||
payload,
|
||||
tls_transcript,
|
||||
transcript_refs,
|
||||
verified_server_name.clone(),
|
||||
*encodings_transferred,
|
||||
);
|
||||
|
||||
let server_name = if let Some((name, cert_data)) = handshake {
|
||||
cert_data
|
||||
.verify(
|
||||
&verifier,
|
||||
tls_transcript.time(),
|
||||
tls_transcript.server_ephemeral_key(),
|
||||
&name,
|
||||
)
|
||||
.map_err(VerifierError::verify)?;
|
||||
|
||||
Some(name)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(partial_transcript) = &transcript {
|
||||
let sent_len = tls_transcript
|
||||
.sent()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if let ContentType::ApplicationData = record.typ {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum::<usize>();
|
||||
|
||||
let recv_len = tls_transcript
|
||||
.recv()
|
||||
.iter()
|
||||
.filter_map(|record| {
|
||||
if let ContentType::ApplicationData = record.typ {
|
||||
Some(record.ciphertext.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum::<usize>();
|
||||
|
||||
// Check ranges.
|
||||
if partial_transcript.len_sent() != sent_len
|
||||
|| partial_transcript.len_received() != recv_len
|
||||
{
|
||||
return Err(VerifierError::verify(
|
||||
"prover sent transcript with incorrect length",
|
||||
));
|
||||
}
|
||||
|
||||
decode_transcript(
|
||||
let (output, encodings_executed) = mux_fut
|
||||
.poll_with(proving_state.verify(
|
||||
vm,
|
||||
partial_transcript.sent_authed(),
|
||||
partial_transcript.received_authed(),
|
||||
transcript_refs,
|
||||
)
|
||||
.map_err(VerifierError::zk)?;
|
||||
}
|
||||
|
||||
let mut transcript_commitments = Vec::new();
|
||||
let mut hash_commitments = None;
|
||||
if let Some(commit_config) = transcript_commit {
|
||||
if commit_config.encoding() {
|
||||
let commitment = mux_fut
|
||||
.poll_with(encoding::transfer(
|
||||
ctx,
|
||||
transcript_refs,
|
||||
delta,
|
||||
|plaintext| vm.get_keys(plaintext).expect("reference is valid"),
|
||||
))
|
||||
.await?;
|
||||
|
||||
transcript_commitments.push(TranscriptCommitment::Encoding(commitment));
|
||||
}
|
||||
|
||||
if commit_config.has_hash() {
|
||||
hash_commitments = Some(
|
||||
verify_hash(vm, transcript_refs, commit_config.iter_hash().cloned())
|
||||
.map_err(VerifierError::verify)?,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mux_fut
|
||||
.poll_with(vm.execute_all(ctx).map_err(VerifierError::zk))
|
||||
ctx,
|
||||
zk_aes_ctr_sent,
|
||||
zk_aes_ctr_recv,
|
||||
keys.clone(),
|
||||
*delta,
|
||||
self.config.root_store(),
|
||||
))
|
||||
.await?;
|
||||
|
||||
// Verify revealed data.
|
||||
if let Some(partial_transcript) = &transcript {
|
||||
verify_transcript(vm, partial_transcript, transcript_refs)
|
||||
.map_err(VerifierError::verify)?;
|
||||
}
|
||||
*verified_server_name = output.server_name.clone();
|
||||
*encodings_transferred = encodings_executed;
|
||||
|
||||
if let Some(hash_commitments) = hash_commitments {
|
||||
for commitment in hash_commitments.try_recv().map_err(VerifierError::verify)? {
|
||||
transcript_commitments.push(TranscriptCommitment::Hash(commitment));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(VerifierOutput {
|
||||
server_name,
|
||||
transcript,
|
||||
transcript_commitments,
|
||||
})
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Closes the connection with the prover.
|
||||
@@ -474,7 +388,7 @@ fn build_mpc_tls(
|
||||
|
||||
let mpc = Mpc::new(mpz_ot::cot::DerandCOTReceiver::new(rcot_recv.clone()));
|
||||
|
||||
let zk = Zk::new(delta, rcot_send.clone());
|
||||
let zk = Zk::new(ZkVerifierConfig::default(), delta, rcot_send.clone());
|
||||
|
||||
let vm = Arc::new(Mutex::new(Deap::new(tlsn_deap::Role::Follower, mpc, zk)));
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::{encoding::EncodingError, zk_aes_ctr::ZkAesCtrError};
|
||||
use crate::{commit::CommitError, zk_aes_ctr::ZkAesCtrError};
|
||||
use mpc_tls::MpcTlsError;
|
||||
use std::{error::Error, fmt};
|
||||
|
||||
@@ -20,13 +20,6 @@ impl VerifierError {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn config<E>(source: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn Error + Send + Sync + 'static>>,
|
||||
{
|
||||
Self::new(ErrorKind::Config, source)
|
||||
}
|
||||
|
||||
pub(crate) fn mpc<E>(source: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn Error + Send + Sync + 'static>>,
|
||||
@@ -40,13 +33,6 @@ impl VerifierError {
|
||||
{
|
||||
Self::new(ErrorKind::Zk, source)
|
||||
}
|
||||
|
||||
pub(crate) fn verify<E>(source: E) -> Self
|
||||
where
|
||||
E: Into<Box<dyn Error + Send + Sync + 'static>>,
|
||||
{
|
||||
Self::new(ErrorKind::Verify, source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -56,7 +42,6 @@ enum ErrorKind {
|
||||
Mpc,
|
||||
Zk,
|
||||
Commit,
|
||||
Verify,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerifierError {
|
||||
@@ -69,7 +54,6 @@ impl fmt::Display for VerifierError {
|
||||
ErrorKind::Mpc => f.write_str("mpc error")?,
|
||||
ErrorKind::Zk => f.write_str("zk error")?,
|
||||
ErrorKind::Commit => f.write_str("commit error")?,
|
||||
ErrorKind::Verify => f.write_str("verification error")?,
|
||||
}
|
||||
|
||||
if let Some(source) = &self.source {
|
||||
@@ -116,8 +100,8 @@ impl From<ZkAesCtrError> for VerifierError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EncodingError> for VerifierError {
|
||||
fn from(e: EncodingError) -> Self {
|
||||
impl From<CommitError> for VerifierError {
|
||||
fn from(e: CommitError) -> Self {
|
||||
Self::new(ErrorKind::Commit, e)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{
|
||||
commit::transcript::TranscriptRefs,
|
||||
commit::TranscriptRefs,
|
||||
mux::{MuxControl, MuxFuture},
|
||||
zk_aes_ctr::ZkAesCtr,
|
||||
};
|
||||
use mpc_tls::{MpcTlsFollower, SessionKeys};
|
||||
use mpz_common::Context;
|
||||
use mpz_memory_core::correlated::Delta;
|
||||
use tlsn_core::transcript::TlsTranscript;
|
||||
use tlsn_core::{connection::ServerName, transcript::TlsTranscript};
|
||||
use tlsn_deap::Deap;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
@@ -45,6 +45,11 @@ pub struct Committed {
|
||||
pub(crate) vm: Zk,
|
||||
pub(crate) tls_transcript: TlsTranscript,
|
||||
pub(crate) transcript_refs: TranscriptRefs,
|
||||
pub(crate) zk_aes_ctr_sent: ZkAesCtr,
|
||||
pub(crate) zk_aes_ctr_recv: ZkAesCtr,
|
||||
pub(crate) keys: SessionKeys,
|
||||
pub(crate) verified_server_name: Option<ServerName>,
|
||||
pub(crate) encodings_transferred: bool,
|
||||
}
|
||||
|
||||
opaque_debug::implement!(Committed);
|
||||
|
||||
@@ -37,8 +37,8 @@ impl ZkAesCtr {
|
||||
}
|
||||
|
||||
/// Returns the role.
|
||||
pub(crate) fn role(&self) -> &Role {
|
||||
&self.role
|
||||
pub(crate) fn role(&self) -> Role {
|
||||
self.role
|
||||
}
|
||||
|
||||
/// Allocates `len` bytes for encryption.
|
||||
|
||||
@@ -103,7 +103,7 @@ async fn prover<T: AsyncWrite + AsyncRead + Send + Unpin + 'static>(verifier_soc
|
||||
|
||||
let config = builder.build().unwrap();
|
||||
|
||||
prover.prove(&config).await.unwrap();
|
||||
prover.prove(config).await.unwrap();
|
||||
prover.close().await.unwrap();
|
||||
}
|
||||
|
||||
|
||||
@@ -14,9 +14,6 @@ workspace = true
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[package.metadata.wasm-pack.profile.custom]
|
||||
wasm-opt = ["-O3"]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
test = []
|
||||
@@ -49,8 +46,7 @@ tsify-next = { version = "0.5", default-features = false, features = ["js"] }
|
||||
wasm-bindgen = { version = "0.2" }
|
||||
wasm-bindgen-futures = { version = "0.4" }
|
||||
web-spawn = { workspace = true }
|
||||
# Use the patched ws_stream_wasm to fix the issue https://github.com/najamelan/ws_stream_wasm/issues/12#issuecomment-1711902958
|
||||
ws_stream_wasm = { git = "https://github.com/tlsnotary/ws_stream_wasm", rev = "2ed12aad9f0236e5321f577672f309920b2aef51" }
|
||||
ws_stream_wasm = { workspace = true }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
getrandom = { version = "0.2", features = ["js"] }
|
||||
|
||||
@@ -126,7 +126,7 @@ impl JsProver {
|
||||
|
||||
let config = builder.build()?;
|
||||
|
||||
prover.prove(&config).await?;
|
||||
prover.prove(config).await?;
|
||||
prover.close().await?;
|
||||
|
||||
info!("Finalized");
|
||||
|
||||
Reference in New Issue
Block a user