Compare commits

..

2 Commits

Author SHA1 Message Date
dan
a34355ef77 fix test 2025-11-19 09:17:38 +02:00
dan
bf6a23a61e convert pest ast to predicate ast 2025-11-19 08:54:21 +02:00
64 changed files with 4362 additions and 1318 deletions

View File

@@ -21,8 +21,7 @@ env:
# - https://github.com/privacy-ethereum/mpz/issues/178
# 32 seems to be big enough for the foreseeable future
RAYON_NUM_THREADS: 32
RUST_VERSION: 1.92.0
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RUST_VERSION: 1.90.0
jobs:
clippy:

1037
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -66,27 +66,26 @@ tlsn-harness-runner = { path = "crates/harness/runner" }
tlsn-wasm = { path = "crates/wasm" }
tlsn = { path = "crates/tlsn" }
mpz-circuits = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-circuits-data = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-memory-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-common = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-vm-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-garble = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-garble-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-ole = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-ot = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-share-conversion = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-fields = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-zk = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-hash = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-ideal-vm = { git = "https://github.com/privacy-ethereum/mpz", rev = "9c343f8" }
mpz-circuits = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-memory-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-common = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-vm-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-garble = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-garble-core = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-ole = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-ot = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-share-conversion = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-fields = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-zk = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-hash = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
mpz-ideal-vm = { git = "https://github.com/privacy-ethereum/mpz", rev = "5250a78" }
rangeset = { version = "0.4" }
rangeset = { version = "0.2" }
serio = { version = "0.2" }
spansy = { git = "https://github.com/tlsnotary/tlsn-utils", rev = "6f1a934" }
spansy = { git = "https://github.com/tlsnotary/tlsn-utils", rev = "6168663" }
uid-mux = { version = "0.2" }
websocket-relay = { git = "https://github.com/tlsnotary/tlsn-utils", rev = "6f1a934" }
websocket-relay = { git = "https://github.com/tlsnotary/tlsn-utils", rev = "6168663" }
aead = { version = "0.4" }
aes = { version = "0.8" }

View File

@@ -27,7 +27,6 @@ alloy-primitives = { version = "1.3.1", default-features = false }
alloy-signer = { version = "1.0", default-features = false }
alloy-signer-local = { version = "1.0", default-features = false }
rand06-compat = { workspace = true }
rangeset = { workspace = true }
rstest = { workspace = true }
tlsn-core = { workspace = true, features = ["fixtures"] }
tlsn-data-fixtures = { workspace = true }

View File

@@ -5,7 +5,7 @@ use rand::{Rng, rng};
use tlsn_core::{
connection::{ConnectionInfo, ServerEphemKey},
hash::HashAlgId,
transcript::TranscriptCommitment,
transcript::{TranscriptCommitment, encoding::EncoderSecret},
};
use crate::{
@@ -25,6 +25,7 @@ pub struct Sign {
connection_info: Option<ConnectionInfo>,
server_ephemeral_key: Option<ServerEphemKey>,
cert_commitment: ServerCertCommitment,
encoder_secret: Option<EncoderSecret>,
extensions: Vec<Extension>,
transcript_commitments: Vec<TranscriptCommitment>,
}
@@ -86,6 +87,7 @@ impl<'a> AttestationBuilder<'a, Accept> {
connection_info: None,
server_ephemeral_key: None,
cert_commitment,
encoder_secret: None,
transcript_commitments: Vec::new(),
extensions,
},
@@ -106,6 +108,12 @@ impl AttestationBuilder<'_, Sign> {
self
}
/// Sets the secret for encoding commitments.
pub fn encoder_secret(&mut self, secret: EncoderSecret) -> &mut Self {
self.state.encoder_secret = Some(secret);
self
}
/// Adds an extension to the attestation.
pub fn extension(&mut self, extension: Extension) -> &mut Self {
self.state.extensions.push(extension);
@@ -129,6 +137,7 @@ impl AttestationBuilder<'_, Sign> {
connection_info,
server_ephemeral_key,
cert_commitment,
encoder_secret,
extensions,
transcript_commitments,
} = self.state;
@@ -159,6 +168,7 @@ impl AttestationBuilder<'_, Sign> {
AttestationBuilderError::new(ErrorKind::Field, "handshake data was not set")
})?),
cert_commitment: field_id.next(cert_commitment),
encoder_secret: encoder_secret.map(|secret| field_id.next(secret)),
extensions: extensions
.into_iter()
.map(|extension| field_id.next(extension))
@@ -243,7 +253,8 @@ mod test {
use rstest::{fixture, rstest};
use tlsn_core::{
connection::{CertBinding, CertBindingV1_2},
fixtures::ConnectionFixture,
fixtures::{ConnectionFixture, encoding_provider},
hash::Blake3,
transcript::Transcript,
};
use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
@@ -274,7 +285,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { request, .. } = request_fixture(transcript, connection, Vec::new());
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection,
Blake3::default(),
Vec::new(),
);
let attestation_config = AttestationConfig::builder()
.supported_signature_algs([SignatureAlgId::SECP256R1])
@@ -293,7 +310,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { request, .. } = request_fixture(transcript, connection, Vec::new());
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection,
Blake3::default(),
Vec::new(),
);
let attestation_config = AttestationConfig::builder()
.supported_signature_algs([SignatureAlgId::SECP256K1])
@@ -313,7 +336,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { request, .. } = request_fixture(transcript, connection, Vec::new());
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection,
Blake3::default(),
Vec::new(),
);
let attestation_builder = Attestation::builder(attestation_config)
.accept_request(request)
@@ -334,8 +363,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { request, .. } =
request_fixture(transcript, connection.clone(), Vec::new());
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
Vec::new(),
);
let mut attestation_builder = Attestation::builder(attestation_config)
.accept_request(request)
@@ -359,8 +393,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { request, .. } =
request_fixture(transcript, connection.clone(), Vec::new());
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
Vec::new(),
);
let mut attestation_builder = Attestation::builder(attestation_config)
.accept_request(request)
@@ -393,7 +432,9 @@ mod test {
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
vec![Extension {
id: b"foo".to_vec(),
value: b"bar".to_vec(),
@@ -420,7 +461,9 @@ mod test {
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
vec![Extension {
id: b"foo".to_vec(),
value: b"bar".to_vec(),

View File

@@ -2,7 +2,11 @@
use tlsn_core::{
connection::{CertBinding, CertBindingV1_2},
fixtures::ConnectionFixture,
transcript::{Transcript, TranscriptCommitConfigBuilder, TranscriptCommitment},
hash::HashAlgorithm,
transcript::{
Transcript, TranscriptCommitConfigBuilder, TranscriptCommitment,
encoding::{EncodingProvider, EncodingTree},
},
};
use crate::{
@@ -17,13 +21,16 @@ use crate::{
/// A Request fixture used for testing.
#[allow(missing_docs)]
pub struct RequestFixture {
pub encoding_tree: EncodingTree,
pub request: Request,
}
/// Returns a request fixture for testing.
pub fn request_fixture(
transcript: Transcript,
encodings_provider: impl EncodingProvider,
connection: ConnectionFixture,
encoding_hasher: impl HashAlgorithm,
extensions: Vec<Extension>,
) -> RequestFixture {
let provider = CryptoProvider::default();
@@ -43,9 +50,15 @@ pub fn request_fixture(
.unwrap();
let transcripts_commitment_config = transcript_commitment_builder.build().unwrap();
let mut builder = RequestConfig::builder();
// Prover constructs encoding tree.
let encoding_tree = EncodingTree::new(
&encoding_hasher,
transcripts_commitment_config.iter_encoding(),
&encodings_provider,
)
.unwrap();
builder.transcript_commit(transcripts_commitment_config);
let mut builder = RequestConfig::builder();
for extension in extensions {
builder.extension(extension);
@@ -61,7 +74,10 @@ pub fn request_fixture(
let (request, _) = request_builder.build(&provider).unwrap();
RequestFixture { request }
RequestFixture {
encoding_tree,
request,
}
}
/// Returns an attestation fixture for testing.

View File

@@ -79,6 +79,8 @@
//!
//! // Specify all the transcript commitments we want to make.
//! builder
//! // Use BLAKE3 for encoding commitments.
//! .encoding_hash_alg(HashAlgId::BLAKE3)
//! // Commit to all sent data.
//! .commit_sent(&(0..sent_len))?
//! // Commit to the first 10 bytes of sent data.
@@ -127,7 +129,7 @@
//!
//! ```no_run
//! # use tlsn_attestation::{Attestation, CryptoProvider, Secrets, presentation::Presentation};
//! # use tlsn_core::transcript::Direction;
//! # use tlsn_core::transcript::{TranscriptCommitmentKind, Direction};
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
//! # let attestation: Attestation = unimplemented!();
//! # let secrets: Secrets = unimplemented!();
@@ -138,6 +140,8 @@
//! let mut builder = secrets.transcript_proof_builder();
//!
//! builder
//! // Use transcript encoding commitments.
//! .commitment_kinds(&[TranscriptCommitmentKind::Encoding])
//! // Disclose the first 10 bytes of the sent data.
//! .reveal(&(0..10), Direction::Sent)?
//! // Disclose all of the received data.
@@ -215,7 +219,7 @@ use tlsn_core::{
connection::{ConnectionInfo, ServerEphemKey},
hash::{Hash, HashAlgorithm, TypedHash},
merkle::MerkleTree,
transcript::TranscriptCommitment,
transcript::{TranscriptCommitment, encoding::EncoderSecret},
};
use crate::{
@@ -297,6 +301,8 @@ pub enum FieldKind {
ServerEphemKey = 0x02,
/// Server identity commitment.
ServerIdentityCommitment = 0x03,
/// Encoding commitment.
EncodingCommitment = 0x04,
/// Plaintext hash commitment.
PlaintextHash = 0x05,
}
@@ -321,6 +327,7 @@ pub struct Body {
connection_info: Field<ConnectionInfo>,
server_ephemeral_key: Field<ServerEphemKey>,
cert_commitment: Field<ServerCertCommitment>,
encoder_secret: Option<Field<EncoderSecret>>,
extensions: Vec<Field<Extension>>,
transcript_commitments: Vec<Field<TranscriptCommitment>>,
}
@@ -366,6 +373,7 @@ impl Body {
connection_info: conn_info,
server_ephemeral_key,
cert_commitment,
encoder_secret,
extensions,
transcript_commitments,
} = self;
@@ -383,6 +391,13 @@ impl Body {
),
];
if let Some(encoder_secret) = encoder_secret {
fields.push((
encoder_secret.id,
hasher.hash_separated(&encoder_secret.data),
));
}
for field in extensions.iter() {
fields.push((field.id, hasher.hash_separated(&field.data)));
}

View File

@@ -91,6 +91,11 @@ impl Presentation {
transcript.verify_with_provider(
&provider.hash,
&attestation.body.connection_info().transcript_length,
attestation
.body
.encoder_secret
.as_ref()
.map(|field| &field.data),
attestation.body.transcript_commitments(),
)
})

View File

@@ -144,7 +144,9 @@ impl std::fmt::Display for ErrorKind {
#[cfg(test)]
mod test {
use tlsn_core::{
connection::TranscriptLength, fixtures::ConnectionFixture, hash::HashAlgId,
connection::TranscriptLength,
fixtures::{ConnectionFixture, encoding_provider},
hash::{Blake3, HashAlgId},
transcript::Transcript,
};
use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
@@ -162,8 +164,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { request, .. } =
request_fixture(transcript, connection.clone(), Vec::new());
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
Vec::new(),
);
let attestation =
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
@@ -178,8 +185,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { mut request, .. } =
request_fixture(transcript, connection.clone(), Vec::new());
let RequestFixture { mut request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
Vec::new(),
);
let attestation =
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
@@ -197,8 +209,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { mut request, .. } =
request_fixture(transcript, connection.clone(), Vec::new());
let RequestFixture { mut request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
Vec::new(),
);
let attestation =
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
@@ -216,8 +233,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { mut request, .. } =
request_fixture(transcript, connection.clone(), Vec::new());
let RequestFixture { mut request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
Vec::new(),
);
let attestation =
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
@@ -243,8 +265,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { request, .. } =
request_fixture(transcript, connection.clone(), Vec::new());
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
Vec::new(),
);
let mut attestation =
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);
@@ -262,8 +289,13 @@ mod test {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let connection = ConnectionFixture::tlsnotary(transcript.length());
let RequestFixture { request, .. } =
request_fixture(transcript, connection.clone(), Vec::new());
let RequestFixture { request, .. } = request_fixture(
transcript,
encoding_provider(GET_WITH_HEADER, OK_JSON),
connection.clone(),
Blake3::default(),
Vec::new(),
);
let attestation =
attestation_fixture(request.clone(), connection, SignatureAlgId::SECP256K1, &[]);

View File

@@ -49,4 +49,6 @@ impl_domain_separator!(tlsn_core::connection::ConnectionInfo);
impl_domain_separator!(tlsn_core::connection::CertBinding);
impl_domain_separator!(tlsn_core::transcript::TranscriptCommitment);
impl_domain_separator!(tlsn_core::transcript::TranscriptSecret);
impl_domain_separator!(tlsn_core::transcript::encoding::EncoderSecret);
impl_domain_separator!(tlsn_core::transcript::encoding::EncodingCommitment);
impl_domain_separator!(tlsn_core::transcript::hash::PlaintextHash);

View File

@@ -1,5 +1,3 @@
use rand::{Rng, SeedableRng, rngs::StdRng};
use rangeset::set::RangeSet;
use tlsn_attestation::{
Attestation, AttestationConfig, CryptoProvider,
presentation::PresentationOutput,
@@ -8,11 +6,12 @@ use tlsn_attestation::{
};
use tlsn_core::{
connection::{CertBinding, CertBindingV1_2},
fixtures::ConnectionFixture,
hash::{Blake3, Blinder, HashAlgId},
fixtures::{self, ConnectionFixture, encoder_secret},
hash::Blake3,
transcript::{
Direction, Transcript, TranscriptCommitment, TranscriptSecret,
hash::{PlaintextHash, PlaintextHashSecret, hash_plaintext},
Direction, Transcript, TranscriptCommitConfigBuilder, TranscriptCommitment,
TranscriptSecret,
encoding::{EncodingCommitment, EncodingTree},
},
};
use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
@@ -20,7 +19,6 @@ use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
/// Tests that the attestation protocol and verification work end-to-end
#[test]
fn test_api() {
let mut rng = StdRng::seed_from_u64(0);
let mut provider = CryptoProvider::default();
// Configure signer for Notary
@@ -28,6 +26,8 @@ fn test_api() {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let (sent_len, recv_len) = transcript.len();
// Plaintext encodings which the Prover obtained from GC evaluation
let encodings_provider = fixtures::encoding_provider(GET_WITH_HEADER, OK_JSON);
// At the end of the TLS connection the Prover holds the:
let ConnectionFixture {
@@ -44,38 +44,26 @@ fn test_api() {
unreachable!()
};
// Create hash commitments
let hasher = Blake3::default();
let sent_blinder: Blinder = rng.random();
let recv_blinder: Blinder = rng.random();
// Prover specifies the ranges it wants to commit to.
let mut transcript_commitment_builder = TranscriptCommitConfigBuilder::new(&transcript);
transcript_commitment_builder
.commit_sent(&(0..sent_len))
.unwrap()
.commit_recv(&(0..recv_len))
.unwrap();
let sent_idx = RangeSet::from(0..sent_len);
let recv_idx = RangeSet::from(0..recv_len);
let transcripts_commitment_config = transcript_commitment_builder.build().unwrap();
let sent_hash_commitment = PlaintextHash {
direction: Direction::Sent,
idx: sent_idx.clone(),
hash: hash_plaintext(&hasher, transcript.sent(), &sent_blinder),
};
// Prover constructs encoding tree.
let encoding_tree = EncodingTree::new(
&Blake3::default(),
transcripts_commitment_config.iter_encoding(),
&encodings_provider,
)
.unwrap();
let recv_hash_commitment = PlaintextHash {
direction: Direction::Received,
idx: recv_idx.clone(),
hash: hash_plaintext(&hasher, transcript.received(), &recv_blinder),
};
let sent_hash_secret = PlaintextHashSecret {
direction: Direction::Sent,
idx: sent_idx,
alg: HashAlgId::BLAKE3,
blinder: sent_blinder,
};
let recv_hash_secret = PlaintextHashSecret {
direction: Direction::Received,
idx: recv_idx,
alg: HashAlgId::BLAKE3,
blinder: recv_blinder,
let encoding_commitment = EncodingCommitment {
root: encoding_tree.root(),
};
let request_config = RequestConfig::default();
@@ -86,14 +74,8 @@ fn test_api() {
.handshake_data(server_cert_data)
.transcript(transcript)
.transcript_commitments(
vec![
TranscriptSecret::Hash(sent_hash_secret),
TranscriptSecret::Hash(recv_hash_secret),
],
vec![
TranscriptCommitment::Hash(sent_hash_commitment.clone()),
TranscriptCommitment::Hash(recv_hash_commitment.clone()),
],
vec![TranscriptSecret::Encoding(encoding_tree)],
vec![TranscriptCommitment::Encoding(encoding_commitment.clone())],
);
let (request, secrets) = request_builder.build(&provider).unwrap();
@@ -113,10 +95,8 @@ fn test_api() {
.connection_info(connection_info.clone())
// Server key Notary received during handshake
.server_ephemeral_key(server_ephemeral_key)
.transcript_commitments(vec![
TranscriptCommitment::Hash(sent_hash_commitment),
TranscriptCommitment::Hash(recv_hash_commitment),
]);
.encoder_secret(encoder_secret())
.transcript_commitments(vec![TranscriptCommitment::Encoding(encoding_commitment)]);
let attestation = attestation_builder.build(&provider).unwrap();

View File

@@ -15,7 +15,7 @@ use mpz_vm_core::{
memory::{binary::Binary, DecodeFuture, Memory, Repr, Slice, View},
Call, Callable, Execute, Vm, VmError,
};
use rangeset::{ops::Set, set::RangeSet};
use rangeset::{Difference, RangeSet, UnionMut};
use tokio::sync::{Mutex, MutexGuard, OwnedMutexGuard};
type Error = DeapError;
@@ -210,12 +210,10 @@ where
}
fn commit_raw(&mut self, slice: Slice) -> Result<(), VmError> {
let slice_range = slice.to_range();
// Follower's private inputs are not committed in the ZK VM until finalization.
let input_minus_follower = slice_range.difference(&self.follower_input_ranges);
let input_minus_follower = slice.to_range().difference(&self.follower_input_ranges);
let mut zk = self.zk.try_lock().unwrap();
for input in input_minus_follower {
for input in input_minus_follower.iter_ranges() {
zk.commit_raw(
self.memory_map
.try_get(Slice::from_range_unchecked(input))?,
@@ -268,7 +266,7 @@ where
mpc.mark_private_raw(slice)?;
// Follower's private inputs will become public during finalization.
zk.mark_public_raw(self.memory_map.try_get(slice)?)?;
self.follower_input_ranges.union_mut(slice.to_range());
self.follower_input_ranges.union_mut(&slice.to_range());
self.follower_inputs.push(slice);
}
}
@@ -284,7 +282,7 @@ where
mpc.mark_blind_raw(slice)?;
// Follower's private inputs will become public during finalization.
zk.mark_public_raw(self.memory_map.try_get(slice)?)?;
self.follower_input_ranges.union_mut(slice.to_range());
self.follower_input_ranges.union_mut(&slice.to_range());
self.follower_inputs.push(slice);
}
Role::Follower => {

View File

@@ -1,7 +1,7 @@
use std::ops::Range;
use mpz_vm_core::{memory::Slice, VmError};
use rangeset::ops::Set;
use rangeset::Subset;
/// A mapping between the memories of the MPC and ZK VMs.
#[derive(Debug, Default)]

View File

@@ -27,6 +27,12 @@ tlsn-data-fixtures = { workspace = true, optional = true }
tlsn-tls-core = { workspace = true, features = ["serde"] }
tlsn-utils = { workspace = true }
rangeset = { workspace = true, features = ["serde"] }
mpz-circuits = { workspace = true }
pest = "*"
pest_derive = "*"
pest_meta = "*"
aead = { workspace = true, features = ["alloc"], optional = true }
aes-gcm = { workspace = true, optional = true }
@@ -59,7 +65,5 @@ generic-array = { workspace = true }
bincode = { workspace = true }
hex = { workspace = true }
rstest = { workspace = true }
tlsn-core = { workspace = true, features = ["fixtures"] }
tlsn-attestation = { workspace = true, features = ["fixtures"] }
tlsn-data-fixtures = { workspace = true }
webpki-root-certs = { workspace = true }

View File

@@ -1,6 +1,6 @@
//! Proving configuration.
use rangeset::set::{RangeSet, ToRangeSet};
use rangeset::{RangeSet, ToRangeSet, UnionMut};
use serde::{Deserialize, Serialize};
use crate::transcript::{Direction, Transcript, TranscriptCommitConfig, TranscriptCommitRequest};

View File

@@ -185,17 +185,22 @@ impl MpcTlsConfigBuilder {
///
/// Provides optimization options to adapt the protocol to different network
/// situations.
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Default)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub enum NetworkSetting {
/// Reduces network round-trips at the expense of consuming more network
/// bandwidth.
Bandwidth,
/// Reduces network bandwidth utilization at the expense of more network
/// round-trips.
#[default]
Latency,
}
impl Default for NetworkSetting {
fn default() -> Self {
Self::Latency
}
}
/// Error for [`MpcTlsConfig`].
#[derive(Debug, thiserror::Error)]
#[error(transparent)]

View File

@@ -1,11 +1,11 @@
use rangeset::set::RangeSet;
use rangeset::RangeSet;
pub(crate) struct FmtRangeSet<'a>(pub &'a RangeSet<usize>);
impl<'a> std::fmt::Display for FmtRangeSet<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("{")?;
for range in self.0.iter() {
for range in self.0.iter_ranges() {
write!(f, "{}..{}", range.start, range.end)?;
if range.end < self.0.end().unwrap_or(0) {
f.write_str(", ")?;

View File

@@ -1,7 +1,10 @@
//! Fixtures for testing
mod provider;
pub mod transcript;
pub use provider::FixtureEncodingProvider;
use hex::FromHex;
use crate::{
@@ -10,6 +13,10 @@ use crate::{
ServerEphemKey, ServerName, ServerSignature, SignatureAlgorithm, TlsVersion,
TranscriptLength,
},
transcript::{
encoding::{EncoderSecret, EncodingProvider},
Transcript,
},
webpki::CertificateDer,
};
@@ -122,3 +129,27 @@ impl ConnectionFixture {
server_ephemeral_key
}
}
/// Returns an encoding provider fixture.
pub fn encoding_provider(tx: &[u8], rx: &[u8]) -> impl EncodingProvider {
let secret = encoder_secret();
FixtureEncodingProvider::new(&secret, Transcript::new(tx, rx))
}
/// Seed fixture.
const SEED: [u8; 32] = [0; 32];
/// Delta fixture.
const DELTA: [u8; 16] = [1; 16];
/// Returns an encoder secret fixture.
pub fn encoder_secret() -> EncoderSecret {
EncoderSecret::new(SEED, DELTA)
}
/// Returns a tampered encoder secret fixture.
pub fn encoder_secret_tampered_seed() -> EncoderSecret {
let mut seed = SEED;
seed[0] += 1;
EncoderSecret::new(seed, DELTA)
}

View File

@@ -0,0 +1,41 @@
use std::ops::Range;
use crate::transcript::{
encoding::{new_encoder, Encoder, EncoderSecret, EncodingProvider, EncodingProviderError},
Direction, Transcript,
};
/// A encoding provider fixture.
pub struct FixtureEncodingProvider {
encoder: Box<dyn Encoder>,
transcript: Transcript,
}
impl FixtureEncodingProvider {
/// Creates a new encoding provider fixture.
pub(crate) fn new(secret: &EncoderSecret, transcript: Transcript) -> Self {
Self {
encoder: Box::new(new_encoder(secret)),
transcript,
}
}
}
impl EncodingProvider for FixtureEncodingProvider {
fn provide_encoding(
&self,
direction: Direction,
range: Range<usize>,
dest: &mut Vec<u8>,
) -> Result<(), EncodingProviderError> {
let transcript = match direction {
Direction::Sent => &self.transcript.sent(),
Direction::Received => &self.transcript.received(),
};
let data = transcript.get(range.clone()).ok_or(EncodingProviderError)?;
self.encoder.encode_data(direction, range, data, dest);
Ok(())
}
}

View File

@@ -2,7 +2,6 @@
use aead::Payload as AeadPayload;
use aes_gcm::{aead::Aead, Aes128Gcm, NewAead};
#[allow(deprecated)]
use generic_array::GenericArray;
use rand::{rngs::StdRng, Rng, SeedableRng};
use tls_core::msgs::{
@@ -181,7 +180,6 @@ fn aes_gcm_encrypt(
let mut nonce = [0u8; 12];
nonce[..4].copy_from_slice(&iv);
nonce[4..].copy_from_slice(&explicit_nonce);
#[allow(deprecated)]
let nonce = GenericArray::from_slice(&nonce);
let cipher = Aes128Gcm::new_from_slice(&key).unwrap();

159
crates/core/src/grammar.rs Normal file
View File

@@ -0,0 +1,159 @@
use crate::predicates::Pred;
use pest::{
iterators::{Pair, Pairs},
Parser,
};
use pest_derive::Parser;
#[derive(Parser)]
#[grammar = "expr.pest"]
struct ExprParser;
fn parse_expr(input: &str) -> Result<Pred, pest::error::Error<Rule>> {
let mut pairs = ExprParser::parse(Rule::expr, input)?;
Ok(build_expr(pairs.next().unwrap()))
}
fn build_expr(pair: Pair<Rule>) -> Pred {
match pair.as_rule() {
Rule::expr | Rule::or_expr => build_left_assoc(pair.into_inner(), Rule::and_expr, Pred::Or),
Rule::and_expr => build_left_assoc(pair.into_inner(), Rule::not_expr, Pred::And),
Rule::not_expr => {
// NOT* cmp
let mut inner = pair.into_inner(); // possibly multiple NOT then a cmp
// Count NOTs, then parse cmp
let mut not_count = 0;
let mut rest = Vec::new();
for p in inner {
match p.as_rule() {
Rule::NOT => not_count += 1,
_ => {
rest.push(p);
}
}
}
let mut node = build_cmp(rest.into_iter().next().expect("cmp missing"));
if not_count % 2 == 1 {
node = Pred::Not(Box::new(node));
}
node
}
Rule::cmp => build_cmp(pair),
Rule::primary => build_expr(pair.into_inner().next().unwrap()),
Rule::paren => build_expr(pair.into_inner().next().unwrap()),
_ => unreachable!("unexpected rule: {:?}", pair.as_rule()),
}
}
fn build_left_assoc(
mut inner: Pairs<Rule>,
unit_rule: Rule,
mk_node: impl Fn(Vec<Pred>) -> Pred,
) -> Pred {
// pattern: unit (OP unit)*
let mut nodes = Vec::new();
// First unit
if let Some(first) = inner.next() {
assert_eq!(first.as_rule(), unit_rule);
nodes.push(build_expr(first));
}
// Remaining are: OP unit pairs; we only collect the units and wrap later.
while let Some(next) = inner.next() {
// next is the operator token pair (AND/OR), skip it
// then the unit:
if let Some(unit) = inner.next() {
assert_eq!(unit.as_rule(), unit_rule);
nodes.push(build_expr(unit));
}
}
if nodes.len() == 1 {
nodes.pop().unwrap()
} else {
mk_node(nodes)
}
}
fn build_cmp(pair: Pair<Rule>) -> Pred {
// cmp: primary (cmp_op primary)?
let mut inner = pair.into_inner();
let lhs = inner.next().unwrap();
let lhs_term = parse_term(lhs);
if let Some(op_pair) = inner.next() {
let op = match op_pair.as_str() {
"==" => CmpOp::Eq,
"!=" => CmpOp::Ne,
"<" => CmpOp::Lt,
"<=" => CmpOp::Lte,
">" => CmpOp::Gt,
">=" => CmpOp::Gte,
_ => unreachable!(),
};
let rhs = parse_term(inner.next().unwrap());
// Map to your Atom constraint form (LHS must be x[idx]):
let (index, rhs_val) = match (lhs_term, rhs) {
(Term::Idx(i), Term::Const(c)) => (i, Rhs::Const(c)),
(Term::Idx(i1), Term::Idx(i2)) => (i1, Rhs::Idx(i2)),
// If you want to allow const OP idx or const OP const, handle here (flip, etc.)
other => panic!("unsupported comparison pattern: {:?}", other),
};
Pred::Atom(Atom {
index,
op,
rhs: rhs_val,
})
} else {
// A bare primary is treated as a boolean atom; you can decide policy.
// Here we treat "x[i]" as (x[i] != 0) and const as (const != 0).
match lhs_term {
Term::Idx(i) => Pred::Atom(Atom {
index: i,
op: CmpOp::Ne,
rhs: Rhs::Const(0),
}),
Term::Const(c) => {
if c != 0 {
Pred::Or(vec![])
} else {
Pred::And(vec![])
} // true/false constants if you add Const
}
}
}
}
#[derive(Debug, Clone, Copy)]
enum Term {
Idx(usize),
Const(u8),
}
fn parse_term(pair: Pair<Rule>) -> Term {
match pair.as_rule() {
Rule::atom => parse_term(pair.into_inner().next().unwrap()),
Rule::byte_idx => {
// "x" "[" number "]"
let mut i = pair.into_inner();
let num = i.find(|p| p.as_rule() == Rule::number).unwrap();
Term::Idx(num.as_str().parse::<usize>().unwrap())
}
Rule::byte_const => {
let n = pair.into_inner().next().unwrap(); // number
Term::Const(n.as_str().parse::<u8>().unwrap())
}
Rule::paren => parse_term(pair.into_inner().next().unwrap()),
Rule::primary => parse_term(pair.into_inner().next().unwrap()),
_ => unreachable!("term {:?}", pair.as_rule()),
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_and() {
let pred = parse_expr("x[100] < x[300] && x[200] == 2 || ! (x[5] >= 57)").unwrap();
// `pred` is a Pred::Or with an And on the left and a Not on the right,
// with Atoms inside.
}
}

View File

@@ -296,14 +296,14 @@ mod sha2 {
fn hash(&self, data: &[u8]) -> super::Hash {
let mut hasher = ::sha2::Sha256::default();
hasher.update(data);
super::Hash::new(hasher.finalize().as_ref())
super::Hash::new(hasher.finalize().as_slice())
}
fn hash_prefixed(&self, prefix: &[u8], data: &[u8]) -> super::Hash {
let mut hasher = ::sha2::Sha256::default();
hasher.update(prefix);
hasher.update(data);
super::Hash::new(hasher.finalize().as_ref())
super::Hash::new(hasher.finalize().as_slice())
}
}
}

41
crates/core/src/json.pest Normal file
View File

@@ -0,0 +1,41 @@
// pest. The Elegant Parser
// Copyright (c) 2018 Dragoș Tiselice
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
//! A parser for JSON file.
//!
//! And this is a example for JSON parser.
json = _{ SOI ~ value ~ eoi }
eoi = _{ !ANY }
/// Matches object, e.g.: `{ "foo": "bar" }`
/// Foobar
object = { "{" ~ pair ~ (pair)* ~ "}" | "{" ~ "}" }
pair = { quoted_string ~ ":" ~ value ~ (",")? }
array = { "[" ~ value ~ ("," ~ value)* ~ "]" | "[" ~ "]" }
//////////////////////
/// Matches value, e.g.: `"foo"`, `42`, `true`, `null`, `[]`, `{}`.
//////////////////////
value = _{ quoted_string | number | object | array | bool | null }
quoted_string = _{ "\"" ~ string ~ "\"" }
string = @{ (!("\"" | "\\") ~ ANY)* ~ (escape ~ string)? }
escape = @{ "\\" ~ ("\"" | "\\" | "/" | "b" | "f" | "n" | "r" | "t" | unicode) }
unicode = @{ "u" ~ ASCII_HEX_DIGIT{4} }
number = @{ "-"? ~ int ~ ("." ~ ASCII_DIGIT+ ~ exp? | exp)? }
int = @{ "0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT* }
exp = @{ ("E" | "e") ~ ("+" | "-")? ~ ASCII_DIGIT+ }
bool = { "true" | "false" }
null = { "null" }
WHITESPACE = _{ " " | "\t" | "\r" | "\n" }

760
crates/core/src/json.rs Normal file
View File

@@ -0,0 +1,760 @@
//!
use crate::predicates::Pred;
use pest::{
iterators::{Pair, Pairs},
Parser,
};
use pest_derive::Parser;
use pest_meta::{ast, parser as meta, parser::consume_rules, validator};
#[cfg(test)]
mod test {
use core::panic;
use std::cmp::{max, min};
use crate::{
config::prove::ProveConfig,
predicates::{eval_pred, is_unicode, Atom, CmpOp, Compiler, Rhs},
};
use super::*;
use mpz_circuits::{
evaluate,
ops::{all, any},
};
use pest_meta::ast::Expr;
use rangeset::RangeSet;
const MAX_LEN: usize = 999_999;
#[derive(Debug, Clone)]
enum Ex {
RepEx(Rep),
RepExactEx(RepExact),
SeqEx(Seq),
StrEx(Str),
ChoiceEx(Choice),
NegPredEx(NegPred),
OptEx(Opt),
// An expression which must be replaced with a copy of the rule.
NestedEx,
#[allow(non_camel_case_types)]
ASCII_NONZERO_DIGIT,
#[allow(non_camel_case_types)]
ASCII_DIGIT,
#[allow(non_camel_case_types)]
// A single Unicode character
ANY,
#[allow(non_camel_case_types)]
ASCII_HEX_DIGIT,
}
impl Ex {
fn min_len(&self) -> usize {
match self {
Ex::RepEx(e) => 0,
Ex::RepExactEx(e) => e.0 .0.min_len() * e.0 .1 as usize,
Ex::StrEx(e) => e.0.len(),
Ex::SeqEx(e) => e.0.min_len() + e.1.min_len(),
Ex::ChoiceEx(e) => min(e.0.min_len(), e.1.min_len()),
Ex::NegPredEx(e) => 0,
Ex::ASCII_NONZERO_DIGIT => 1,
Ex::ASCII_DIGIT => 1,
Ex::ANY => 1,
Ex::ASCII_HEX_DIGIT => 1,
Ex::OptEx(e) => 0,
Ex::NestedEx => 0,
_ => unimplemented!(),
}
}
fn max_len(&self) -> usize {
match self {
Ex::RepEx(e) => MAX_LEN,
Ex::RepExactEx(e) => e.0 .0.max_len() * e.0 .1 as usize,
Ex::StrEx(e) => e.0.len(),
Ex::SeqEx(e) => e.0.max_len() + e.1.max_len(),
Ex::ChoiceEx(e) => max(e.0.max_len(), e.1.max_len()),
Ex::NegPredEx(e) => 0,
Ex::ASCII_NONZERO_DIGIT => 1,
Ex::ASCII_DIGIT => 1,
Ex::ANY => 4,
Ex::ASCII_HEX_DIGIT => 1,
Ex::OptEx(e) => e.0.max_len(),
Ex::NestedEx => 0,
_ => unimplemented!(),
}
}
}
#[derive(Debug, Clone)]
struct Rep(Box<Ex>);
#[derive(Debug, Clone)]
struct RepExact((Box<Ex>, u32));
#[derive(Debug, Clone)]
struct Str(String);
#[derive(Debug, Clone)]
struct Seq(Box<Ex>, Box<Ex>);
#[derive(Debug, Clone)]
struct Choice(Box<Ex>, Box<Ex>);
#[derive(Debug, Clone)]
struct NegPred(Box<Ex>);
#[derive(Debug, Clone)]
struct Opt(Box<Ex>);
struct Rule {
name: String,
pub ex: Ex,
}
/// Builds the rules, returning the final expression.
fn build_rules(ast_rules: &[ast::Rule]) -> Ex {
let mut rules = Vec::new();
// build from the bottom up
let iter = ast_rules.iter().rev();
for r in iter {
println!("building rule with name {:?}", r.name);
let ex = build_expr(&r.expr, &rules, &r.name, false);
// TODO deal with recursive rules
rules.push(Rule {
name: r.name.clone(),
ex,
});
}
let ex = rules.last().unwrap().ex.clone();
ex
}
/// Builds expression from pest expression.
/// passes in current rule's name to deal with recursion.
/// depth is used to prevent infinite recursion.
fn build_expr(exp: &Expr, rules: &[Rule], this_name: &String, is_nested: bool) -> Ex {
match exp {
Expr::Rep(exp) => {
Ex::RepEx(Rep(Box::new(build_expr(exp, rules, this_name, is_nested))))
}
Expr::RepExact(exp, count) => Ex::RepExactEx(RepExact((
Box::new(build_expr(exp, rules, this_name, is_nested)),
*count,
))),
Expr::Str(str) => Ex::StrEx(Str(str.clone())),
Expr::NegPred(exp) => Ex::NegPredEx(NegPred(Box::new(build_expr(
exp, rules, this_name, is_nested,
)))),
Expr::Seq(a, b) => {
//
let a = build_expr(a, rules, this_name, is_nested);
Ex::SeqEx(Seq(
Box::new(a),
Box::new(build_expr(b, rules, this_name, is_nested)),
))
}
Expr::Choice(a, b) => Ex::ChoiceEx(Choice(
Box::new(build_expr(a, rules, this_name, is_nested)),
Box::new(build_expr(b, rules, this_name, is_nested)),
)),
Expr::Opt(exp) => {
Ex::OptEx(Opt(Box::new(build_expr(exp, rules, this_name, is_nested))))
}
Expr::Ident(ident) => {
let ex = match ident.as_str() {
"ASCII_NONZERO_DIGIT" => Ex::ASCII_NONZERO_DIGIT,
"ASCII_DIGIT" => Ex::ASCII_DIGIT,
"ANY" => Ex::ANY,
"ASCII_HEX_DIGIT" => Ex::ASCII_HEX_DIGIT,
_ => {
if *ident == *this_name {
return Ex::NestedEx;
}
for rule in rules {
return rule.ex.clone();
}
panic!("couldnt find rule {:?}", ident);
}
};
ex
}
_ => unimplemented!(),
}
}
// This method must be called when we know that there is enough
// data remained starting from the offset to match the expression
// at least once.
//
// returns the predicate and the offset from which the next expression
// should be matched.
// Returns multiple predicates if the expression caused multiple branches.
// A top level expr always returns a single predicate, in which all branches
// are coalesced.
fn expr_to_pred(
exp: &Ex,
offset: usize,
data_len: usize,
is_top_level: bool,
) -> Vec<(Pred, usize)> {
// if is_top_level {
// println!("top level exps {:?}", exp);
// } else {
// println!("Non-top level exps {:?}", exp);
// }
match exp {
Ex::SeqEx(s) => {
let a = &s.0;
let b = &s.1;
if is_top_level && (offset + a.max_len() + b.max_len() < data_len) {
panic!();
}
if offset + a.min_len() + b.min_len() > data_len {
panic!();
}
// The first expression must not try to match in the
// data of the next expression
let pred1 = expr_to_pred(a, offset, data_len - b.min_len(), false);
// interlace all branches
let mut interlaced = Vec::new();
for (p1, offset) in pred1.iter() {
// if the seq expr was top-level, the 2nd expr becomes top-level
let mut pred2 = expr_to_pred(b, *offset, data_len, is_top_level);
for (p2, offser_inner) in pred2.iter() {
let pred = Pred::And(vec![p1.clone(), p2.clone()]);
interlaced.push((pred, *offser_inner));
}
}
if is_top_level {
// coalesce all branches
let preds: Vec<Pred> = interlaced.into_iter().map(|(a, _b)| a).collect();
if preds.len() == 1 {
vec![(preds[0].clone(), 0)]
} else {
vec![(Pred::Or(preds), 0)]
}
} else {
interlaced
}
}
Ex::ChoiceEx(s) => {
let a = &s.0;
let b = &s.1;
let mut skip_a = false;
let mut skip_b = false;
if is_top_level {
if offset + a.max_len() != data_len {
skip_a = true
}
if offset + b.max_len() != data_len {
skip_b = true;
}
} else {
// if not top level, we may skip an expression when it will
// overflow the data len
if offset + a.min_len() > data_len {
skip_a = true
}
if offset + b.min_len() > data_len {
skip_b = true
}
}
if skip_a && skip_b {
panic!();
}
let mut preds_a = Vec::new();
let mut preds_b = Vec::new();
if !skip_a {
preds_a = expr_to_pred(a, offset, data_len, is_top_level);
}
if !skip_b {
preds_b = expr_to_pred(b, offset, data_len, is_top_level);
}
// combine all branches
let mut combined = Vec::new();
if preds_a.is_empty() {
combined = preds_b.clone();
} else if preds_b.is_empty() {
combined = preds_a.clone();
} else {
assert!(!(preds_a.is_empty() && preds_b.is_empty()));
combined.append(&mut preds_a);
combined.append(&mut preds_b);
}
if is_top_level {
// coalesce all branches
let preds: Vec<Pred> = combined.into_iter().map(|(a, _b)| a).collect();
if preds.len() == 1 {
vec![(preds[0].clone(), 0)]
} else {
vec![(Pred::Or(preds), 0)]
}
} else {
combined
}
}
Ex::RepEx(r) => {
let e = &r.0;
if offset + e.min_len() > data_len {
if is_top_level {
panic!();
}
// zero matches
return vec![];
}
let mut interlaced = Vec::new();
let mut preds = expr_to_pred(&e, offset, data_len, false);
// for (i, (pred, depth)) in preds.iter().enumerate() {
// println!("preds[{i}] (depth {depth}):");
// println!("{pred}");
// }
// Append single matches.
interlaced.append(&mut preds.clone());
let mut was_found = true;
while was_found {
was_found = false;
for (pred_outer, offset_outer) in std::mem::take(&mut preds).into_iter() {
if offset_outer + e.min_len() > data_len {
// cannot match any more
continue;
}
let mut preds_inner = expr_to_pred(&e, offset_outer, data_len, false);
// for (i, (pred, depth)) in preds_inner.iter().enumerate() {
// println!("preds[{i}] (depth {depth}):");
// println!("{pred}");
// }
for (pred_inner, offset_inner) in preds_inner {
let pred = (
Pred::And(vec![pred_outer.clone(), pred_inner]),
offset_inner,
);
preds.push(pred);
was_found = true;
}
}
interlaced.append(&mut preds.clone());
}
// for (i, (pred, depth)) in interlaced.iter().enumerate() {
// println!("preds[{i}] (depth {depth}):");
// println!("{pred}");
// }
if is_top_level {
// drop all branches which do not match exactly at the data length
// border and coalesce the rest
let preds: Vec<Pred> = interlaced
.into_iter()
.filter(|(_a, b)| *b == data_len)
.map(|(a, _b)| a)
.collect();
if preds.is_empty() {
panic!()
}
if preds.len() == 1 {
vec![(preds[0].clone(), 0)]
} else {
// coalesce all branches
vec![(Pred::Or(preds), 0)]
}
} else {
interlaced
}
}
Ex::RepExactEx(r) => {
let e = &r.0 .0;
let count = r.0 .1;
assert!(count > 0);
if is_top_level && (offset + e.max_len() * count as usize <= data_len) {
panic!();
}
let mut preds = expr_to_pred(&e, offset, data_len, false);
for i in 1..count {
for (pred_outer, offset_outer) in std::mem::take(&mut preds).into_iter() {
if offset_outer + e.min_len() > data_len {
// cannot match any more
continue;
}
let mut preds_inner = expr_to_pred(&e, offset_outer, data_len, false);
for (pred_inner, offset_inner) in preds_inner {
let pred = (
Pred::And(vec![pred_outer.clone(), pred_inner]),
offset_inner,
);
preds.push(pred);
}
}
}
if is_top_level {
// drop all branches which do not match exactly at the data length
// border and coalesce the rest
let preds: Vec<Pred> = preds
.into_iter()
.filter(|(_a, b)| *b != data_len)
.map(|(a, _b)| a)
.collect();
if preds.is_empty() {
panic!()
}
if preds.len() == 1 {
vec![(preds[0].clone(), 0)]
} else {
// coalesce all branches
vec![(Pred::Or(preds), 0)]
}
} else {
preds
}
}
Ex::NegPredEx(e) => {
assert!(offset <= data_len);
if offset == data_len {
// the internal expression cannot be match since there is no data left,
// this means that the negative expression matched
if is_top_level {
panic!("always true predicate doesnt make sense")
}
// TODO this is hacky.
return vec![(Pred::True, offset)];
}
let e = &e.0;
let preds = expr_to_pred(&e, offset, data_len, is_top_level);
let preds: Vec<Pred> = preds.into_iter().map(|(a, _b)| a).collect();
let len = preds.len();
// coalesce all branches, offset doesnt matter since those
// offset will never be used anymore.
let pred = if preds.len() == 1 {
Pred::Not(Box::new(preds[0].clone()))
} else {
Pred::Not(Box::new(Pred::Or(preds)))
};
if is_top_level && len == 0 {
panic!()
}
// all offset if negative predicate are ignored since no matching
// will be done from those offsets.
vec![(pred, offset)]
}
Ex::OptEx(e) => {
let e = &e.0;
if is_top_level {
return vec![(Pred::True, 0)];
}
// add an always-matching branch
let mut preds = vec![(Pred::True, offset)];
if e.min_len() + offset <= data_len {
// try to match only if there is enough data
let mut p = expr_to_pred(&e, offset, data_len, is_top_level);
preds.append(&mut p);
}
preds
}
Ex::StrEx(s) => {
if is_top_level && offset + s.0.len() != data_len {
panic!();
}
let mut preds = Vec::new();
for (idx, byte) in s.0.clone().into_bytes().iter().enumerate() {
let a = Atom {
index: offset + idx,
op: CmpOp::Eq,
rhs: Rhs::Const(*byte),
};
preds.push(Pred::Atom(a));
}
if preds.len() == 1 {
vec![(preds[0].clone(), offset + s.0.len())]
} else {
vec![(Pred::And(preds), offset + s.0.len())]
}
}
Ex::ASCII_NONZERO_DIGIT => {
if is_top_level && (offset + 1 != data_len) {
panic!();
}
let gte = Pred::Atom(Atom {
index: offset,
op: CmpOp::Gte,
rhs: Rhs::Const(49u8),
});
let lte = Pred::Atom(Atom {
index: offset,
op: CmpOp::Lte,
rhs: Rhs::Const(57u8),
});
vec![(Pred::And(vec![gte, lte]), offset + 1)]
}
Ex::ASCII_DIGIT => {
if is_top_level && (offset + 1 != data_len) {
panic!();
}
let gte = Pred::Atom(Atom {
index: offset,
op: CmpOp::Gte,
rhs: Rhs::Const(48u8),
});
let lte = Pred::Atom(Atom {
index: offset,
op: CmpOp::Lte,
rhs: Rhs::Const(57u8),
});
vec![(Pred::And(vec![gte, lte]), offset + 1)]
}
Ex::ANY => {
if is_top_level && (offset + 1 > data_len) {
panic!();
}
let start = offset;
let end = min(offset + 4, data_len);
let mut branches = Vec::new();
for branch_end in start + 1..end {
branches.push((is_unicode(RangeSet::from(start..branch_end)), branch_end))
}
if is_top_level {
assert!(branches.len() == 1);
}
branches
}
_ => unimplemented!(),
}
}
#[test]
fn test_json_int() {
use rand::{distr::Alphanumeric, rng, Rng};
let grammar = include_str!("json_int.pest");
// Parse the grammar file into Pairs (the grammars own parse tree)
let pairs = meta::parse(meta::Rule::grammar_rules, grammar).expect("grammar parse error");
// Optional: validate (reports duplicate rules, unreachable rules, etc.)
validator::validate_pairs(pairs.clone()).expect("invalid grammar");
// 4) Convert the parsed pairs into the stable AST representation
let rules_ast: Vec<ast::Rule> = consume_rules(pairs).unwrap();
let exp = build_rules(&rules_ast);
// 5) Inspect the AST however you like For a quick look, the Debug print is the
// safest (works across versions)
for rule in &rules_ast {
println!("{:#?}", rule);
}
const LENGTH: usize = 7; // Adjustable constant
let pred = expr_to_pred(&exp, 0, LENGTH, true);
assert!(pred.len() == 1);
let pred = &pred[0].0;
let circ = Compiler::new().compile(&pred);
println!("{:?} and gates", circ.and_count());
for i in 0..1000000 {
let s: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(LENGTH)
.map(char::from)
.collect();
let out = eval_pred(pred, s.as_bytes());
let is_int = s.chars().all(|c| c.is_ascii_digit()) && !s.starts_with('0');
if out != is_int {
println!("failed at index {:?} with {:?}", i, s);
}
assert_eq!(out, is_int)
}
}
#[test]
fn test_json_str() {
use rand::{distr::Alphanumeric, rng, Rng};
const LENGTH: usize = 10; // Adjustable constant
let grammar = include_str!("json_str.pest");
// Parse the grammar file into Pairs (the grammars own parse tree)
let pairs = meta::parse(meta::Rule::grammar_rules, grammar).expect("grammar parse error");
// Optional: validate (reports duplicate rules, unreachable rules, etc.)
validator::validate_pairs(pairs.clone()).expect("invalid grammar");
// 4) Convert the parsed pairs into the stable AST representation
let rules_ast: Vec<ast::Rule> = consume_rules(pairs).unwrap();
for rule in &rules_ast {
println!("{:#?}", rule);
}
let exp = build_rules(&rules_ast);
for len in LENGTH..LENGTH + 7 {
let pred = expr_to_pred(&exp, 0, len, true);
assert!(pred.len() == 1);
let pred = &pred[0].0;
let circ = Compiler::new().compile(pred);
println!(
"JSON string length: {:?}; circuit AND gate count {:?}",
len,
circ.and_count()
);
}
}
#[test]
fn test_choice() {
let a = Expr::Ident("ASCII_NONZERO_DIGIT".to_string());
let b = Expr::Ident("ASCII_DIGIT".to_string());
let rule = ast::Rule {
name: "test".to_string(),
ty: ast::RuleType::Atomic,
expr: Expr::Choice(Box::new(a), Box::new(b)),
};
let exp = build_rules(&vec![rule]);
let pred = expr_to_pred(&exp, 0, 1, true);
assert!(pred.len() == 1);
let pred = &pred[0].0;
println!("pred is {:?}", pred);
}
#[test]
fn test_seq() {
let a = Expr::Ident("ASCII_NONZERO_DIGIT".to_string());
let b = Expr::Ident("ASCII_DIGIT".to_string());
let rule = ast::Rule {
name: "test".to_string(),
ty: ast::RuleType::Atomic,
expr: Expr::Seq(Box::new(a), Box::new(b)),
};
let exp = build_rules(&vec![rule]);
let pred = expr_to_pred(&exp, 0, 2, true);
assert!(pred.len() == 1);
let pred = &pred[0].0;
println!("pred is {:?}", pred);
}
#[test]
fn test_rep() {
let a = Expr::Ident("ASCII_NONZERO_DIGIT".to_string());
let b = Expr::Ident("ASCII_DIGIT".to_string());
let rule = ast::Rule {
name: "test".to_string(),
ty: ast::RuleType::Atomic,
expr: Expr::Rep(Box::new(a)),
};
let exp = build_rules(&vec![rule]);
let pred = expr_to_pred(&exp, 0, 3, true);
assert!(pred.len() == 1);
let pred = &pred[0].0;
println!("pred is {:?}", pred);
}
#[test]
fn test_rep_choice() {
const LENGTH: usize = 5; // Adjustable constant
let a = Expr::Ident("ASCII_NONZERO_DIGIT".to_string());
let b = Expr::Ident("ASCII_DIGIT".to_string());
// Number of predicates needed to represent the expressions.
let a_weight = 2usize;
let b_weight = 2usize;
let rep_a = Expr::Rep(Box::new(a));
let rep_b = Expr::Rep(Box::new(b));
let rule = ast::Rule {
name: "test".to_string(),
ty: ast::RuleType::Atomic,
expr: Expr::Choice(Box::new(rep_a), Box::new(rep_b)),
};
let exp = build_rules(&vec![rule]);
let pred = expr_to_pred(&exp, 0, LENGTH, true);
assert!(pred.len() == 1);
let pred = &pred[0].0;
println!("pred is {}", pred);
// This is for sanity that no extra predicates are being added.
assert_eq!(pred.leaves(), a_weight * LENGTH + b_weight * LENGTH);
}
#[test]
fn test_neg_choice() {
let a = Expr::Str("4".to_string());
let b = Expr::Str("5".to_string());
let choice = Expr::Choice(Box::new(a), Box::new(b));
let neg_choice = Expr::NegPred(Box::new(choice));
let c = Expr::Str("a".to_string());
let d = Expr::Str("BC".to_string());
let choice2 = Expr::Choice(Box::new(c), Box::new(d));
let rule = ast::Rule {
name: "test".to_string(),
ty: ast::RuleType::Atomic,
expr: Expr::Seq(Box::new(neg_choice), Box::new(choice2)),
};
let exp = build_rules(&vec![rule]);
let pred = expr_to_pred(&exp, 0, 2, true);
assert!(pred.len() == 1);
let pred = &pred[0].0;
println!("pred is {:?}", pred);
assert_eq!(pred.leaves(), 4);
}
}

View File

@@ -0,0 +1,3 @@
// Copied from pest.json
int = @{ "0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT* }

View File

@@ -0,0 +1,6 @@
// Copied from pest.json
// Replaced "string" with "X" to avoid recursion.
string = @{ (!("\"" | "\\") ~ ANY)* ~ (escape ~ "X")? }
escape = @{ "\\" ~ ("\"" | "\\" | "/" | "b" | "f" | "n" | "r" | "t" | unicode) }
unicode = @{ "u" ~ ASCII_HEX_DIGIT{4} }

View File

@@ -14,12 +14,17 @@ pub mod webpki;
pub use rangeset;
pub mod config;
pub(crate) mod display;
//pub mod grammar;
pub mod json;
pub mod predicates;
use serde::{Deserialize, Serialize};
use crate::{
connection::ServerName,
transcript::{PartialTranscript, TranscriptCommitment, TranscriptSecret},
transcript::{
encoding::EncoderSecret, PartialTranscript, TranscriptCommitment, TranscriptSecret,
},
};
/// Prover output.
@@ -40,6 +45,8 @@ pub struct VerifierOutput {
pub server_name: Option<ServerName>,
/// Transcript data.
pub transcript: Option<PartialTranscript>,
/// Encoding commitment secret.
pub encoder_secret: Option<EncoderSecret>,
/// Transcript commitments.
pub transcript_commitments: Vec<TranscriptCommitment>,
}

View File

@@ -63,6 +63,11 @@ impl MerkleProof {
Ok(())
}
/// Returns the leaf count of the Merkle tree associated with the proof.
pub(crate) fn leaf_count(&self) -> usize {
self.leaf_count
}
}
#[derive(Clone)]

View File

@@ -0,0 +1,790 @@
//! Predicate and compiler.
use std::{collections::HashMap, fmt};
use mpz_circuits::{itybity::ToBits, ops, Circuit, CircuitBuilder, Feed, Node};
use rangeset::RangeSet;
/// ddd
#[derive(Debug, Clone)]
pub(crate) enum Pred {
And(Vec<Pred>),
Or(Vec<Pred>),
Not(Box<Pred>),
Atom(Atom),
// An always-true predicate.
True,
// An always-false predicate.
False,
}
impl Pred {
/// Returns sorted unique byte indices of this predicate.
pub(crate) fn indices(&self) -> Vec<usize> {
let mut indices = self.indices_internal(self);
indices.sort_unstable();
indices.dedup();
indices
}
// Returns the number of leaves (i.e atoms) the AST of this predicate has.
pub(crate) fn leaves(&self) -> usize {
match self {
Pred::And(vec) => vec.iter().map(|p| p.leaves()).sum(),
Pred::Or(vec) => vec.iter().map(|p| p.leaves()).sum(),
Pred::Not(p) => p.leaves(),
Pred::Atom(atom) => 1,
Pred::True => 0,
Pred::False => 0,
}
}
/// Returns all byte indices of the given `pred`icate.
fn indices_internal(&self, pred: &Pred) -> Vec<usize> {
match pred {
Pred::And(vec) => vec
.iter()
.flat_map(|p| self.indices_internal(p))
.collect::<Vec<_>>(),
Pred::Or(vec) => vec
.iter()
.flat_map(|p| self.indices_internal(p))
.collect::<Vec<_>>(),
Pred::Not(p) => self.indices_internal(p),
Pred::Atom(atom) => {
let mut indices = Vec::new();
indices.push(atom.index);
if let Rhs::Idx(idx) = atom.rhs {
indices.push(idx);
}
indices
}
Pred::True => vec![],
Pred::False => vec![],
}
}
}
impl fmt::Display for Pred {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.fmt_with_indent(f, 0)
}
}
impl Pred {
fn fmt_with_indent(&self, f: &mut fmt::Formatter<'_>, indent: usize) -> fmt::Result {
// helper to write the current indentation
fn pad(f: &mut fmt::Formatter<'_>, indent: usize) -> fmt::Result {
// 2 spaces per level; tweak as you like
write!(f, "{:indent$}", "", indent = indent * 2)
}
match self {
Pred::And(preds) => {
pad(f, indent)?;
writeln!(f, "And(")?;
for p in preds {
p.fmt_with_indent(f, indent + 1)?;
}
pad(f, indent)?;
writeln!(f, ")")
}
Pred::Or(preds) => {
pad(f, indent)?;
writeln!(f, "Or(")?;
for p in preds {
p.fmt_with_indent(f, indent + 1)?;
}
pad(f, indent)?;
writeln!(f, ")")
}
Pred::Not(p) => {
pad(f, indent)?;
writeln!(f, "Not(")?;
p.fmt_with_indent(f, indent + 1)?;
pad(f, indent)?;
writeln!(f, ")")
}
Pred::Atom(a) => {
pad(f, indent)?;
writeln!(f, "Atom({:?})", a)
}
Pred::True => {
pad(f, indent)?;
writeln!(f, "True")
}
Pred::False => {
pad(f, indent)?;
writeln!(f, "False")
}
}
}
}
/// Atomic predicate of the form:
/// x[index] (op) rhs
#[derive(Debug, Clone)]
pub struct Atom {
/// Left-hand side byte index `i` (x_i).
pub index: usize,
/// Comparison operator.
pub op: CmpOp,
/// Right-hand side operand (constant or x_j).
pub rhs: Rhs,
}
/// ddd
#[derive(Debug, Clone)]
pub(crate) enum CmpOp {
Eq, // ==
Ne, // !=
Gt, // >
Gte, // >=
Lt, // <
Lte, // <=
}
/// RHS of a comparison
#[derive(Debug, Clone)]
pub enum Rhs {
/// Byte at index
Idx(usize),
/// Literal constant.
Const(u8),
}
/// Compiles a predicate into a circuit.
pub struct Compiler {
/// A <byte index, circuit feeds> map.
map: HashMap<usize, [Node<Feed>; 8]>,
}
impl Compiler {
pub(crate) fn new() -> Self {
Self {
map: HashMap::new(),
}
}
/// Compiles the given predicate into a circuit, consuming the
/// compiler.
pub(crate) fn compile(&mut self, pred: &Pred) -> Circuit {
let mut builder = CircuitBuilder::new();
for idx in pred.indices() {
let feeds = (0..8).map(|_| builder.add_input()).collect::<Vec<_>>();
self.map.insert(idx, feeds.try_into().unwrap());
}
let out = self.process(&mut builder, pred);
builder.add_output(out);
builder.build().unwrap()
}
// Processes a single predicate.
fn process(&mut self, builder: &mut CircuitBuilder, pred: &Pred) -> Node<Feed> {
match pred {
Pred::And(vec) => {
let out = vec
.iter()
.map(|p| self.process(builder, p))
.collect::<Vec<_>>();
ops::all(builder, &out)
}
Pred::Or(vec) => {
let out = vec
.iter()
.map(|p| self.process(builder, p))
.collect::<Vec<_>>();
ops::any(builder, &out)
}
Pred::Not(p) => {
let pred_out = self.process(builder, p);
let inv = ops::inv(builder, [pred_out]);
inv[0]
}
Pred::Atom(atom) => {
let lhs = self.map.get(&atom.index).unwrap().clone();
let rhs = match atom.rhs {
Rhs::Const(c) => const_feeds(builder, c),
Rhs::Idx(s) => self.map.get(&s).unwrap().clone(),
};
match atom.op {
CmpOp::Eq => ops::eq(builder, lhs, rhs),
CmpOp::Ne => ops::neq(builder, lhs, rhs),
CmpOp::Lt => ops::lt(builder, lhs, rhs),
CmpOp::Lte => ops::lte(builder, lhs, rhs),
CmpOp::Gt => ops::gt(builder, lhs, rhs),
CmpOp::Gte => ops::gte(builder, lhs, rhs),
}
}
Pred::True => builder.get_const_one(),
Pred::False => builder.get_const_zero(),
}
}
}
// Returns circuit feeds for the given constant u8 value.
fn const_feeds(builder: &CircuitBuilder, cnst: u8) -> [Node<Feed>; 8] {
cnst.iter_lsb0()
.map(|b| {
if b {
builder.get_const_one()
} else {
builder.get_const_zero()
}
})
.collect::<Vec<_>>()
.try_into()
.expect("u8 has 8 feeds")
}
// Evaluates the predicate on the input `data`.
pub(crate) fn eval_pred(pred: &Pred, data: &[u8]) -> bool {
match pred {
Pred::And(vec) => vec.iter().map(|p| eval_pred(p, data)).all(|b| b),
Pred::Or(vec) => vec.iter().map(|p| eval_pred(p, data)).any(|b| b),
Pred::Not(p) => !eval_pred(p, data),
Pred::Atom(atom) => {
let lhs = data[atom.index];
let rhs = match atom.rhs {
Rhs::Const(c) => c,
Rhs::Idx(s) => data[s],
};
match atom.op {
CmpOp::Eq => lhs == rhs,
CmpOp::Ne => lhs != rhs,
CmpOp::Lt => lhs < rhs,
CmpOp::Lte => lhs <= rhs,
CmpOp::Gt => lhs > rhs,
CmpOp::Gte => lhs >= rhs,
}
}
Pred::True => true,
Pred::False => true,
}
}
/// Builds a predicate that an ascii integer is contained in the ranges.
fn is_ascii_integer(range: RangeSet<usize>) -> Pred {
let mut preds = Vec::new();
for idx in range.iter() {
let lte = Pred::Atom(Atom {
index: idx,
op: CmpOp::Lte,
rhs: Rhs::Const(57u8),
});
let gte = Pred::Atom(Atom {
index: idx,
op: CmpOp::Gte,
rhs: Rhs::Const(48u8),
});
preds.push(Pred::And(vec![lte, gte]));
}
Pred::And(preds)
}
/// Builds a predicate that a valid HTTP header value is contained in the
/// ranges.
fn is_valid_http_header_value(range: RangeSet<usize>) -> Pred {
let mut preds = Vec::new();
for idx in range.iter() {
let ne = Pred::Atom(Atom {
index: idx,
op: CmpOp::Ne,
// ascii code for carriage return \r
rhs: Rhs::Const(13u8),
});
preds.push(ne);
}
Pred::And(preds)
}
/// Builds a predicate that a valid JSON string is contained in the
/// ranges.
fn is_valid_json_string(range: RangeSet<usize>) -> Pred {
assert!(
range.len_ranges() == 1,
"only a contiguous range is allowed"
);
const BACKSLASH: u8 = 92;
// check if all unicode chars are allowed
let mut preds = Vec::new();
// Find all /u sequences
for (i, idx) in range.iter().enumerate() {
if i == range.len() - 1 {
// if this is a last char, skip it
continue;
}
let is_backslash = Pred::Atom(Atom {
index: idx,
op: CmpOp::Eq,
rhs: Rhs::Const(BACKSLASH),
});
}
Pred::And(preds)
}
// Returns a predicate that a unicode char is contained in the range
pub(crate) fn is_unicode(range: RangeSet<usize>) -> Pred {
assert!(range.len() <= 4);
match range.len() {
1 => is_1_byte_unicode(range.max().unwrap()),
2 => is_2_byte_unicode(range),
3 => is_3_byte_unicode(range),
4 => is_4_byte_unicode(range),
_ => unimplemented!(),
}
}
fn is_1_byte_unicode(pos: usize) -> Pred {
Pred::Atom(Atom {
index: pos,
op: CmpOp::Lte,
rhs: Rhs::Const(127u8),
})
}
fn is_2_byte_unicode(range: RangeSet<usize>) -> Pred {
assert!(range.len() == 2);
let mut iter = range.iter();
// should be 110xxxxx
let first = iter.next().unwrap();
let gte = Pred::Atom(Atom {
index: first,
op: CmpOp::Gte,
rhs: Rhs::Const(0xC0),
});
let lte = Pred::Atom(Atom {
index: first,
op: CmpOp::Lte,
rhs: Rhs::Const(0xDF),
});
let second = iter.next().unwrap();
Pred::And(vec![lte, gte, is_unicode_continuation(second)])
}
fn is_3_byte_unicode(range: RangeSet<usize>) -> Pred {
assert!(range.len() == 3);
let mut iter = range.iter();
let first = iter.next().unwrap();
// should be 1110xxxx
let gte = Pred::Atom(Atom {
index: first,
op: CmpOp::Gte,
rhs: Rhs::Const(0xE0),
});
let lte = Pred::Atom(Atom {
index: first,
op: CmpOp::Lte,
rhs: Rhs::Const(0xEF),
});
let second = iter.next().unwrap();
let third = iter.next().unwrap();
Pred::And(vec![
lte,
gte,
is_unicode_continuation(second),
is_unicode_continuation(third),
])
}
fn is_4_byte_unicode(range: RangeSet<usize>) -> Pred {
assert!(range.len() == 4);
let mut iter = range.iter();
let first = iter.next().unwrap();
// should be 11110xxx
let gte = Pred::Atom(Atom {
index: first,
op: CmpOp::Gte,
rhs: Rhs::Const(0xF0),
});
let lte = Pred::Atom(Atom {
index: first,
op: CmpOp::Lte,
rhs: Rhs::Const(0xF7),
});
let second = iter.next().unwrap();
let third = iter.next().unwrap();
let fourth = iter.next().unwrap();
Pred::And(vec![
lte,
gte,
is_unicode_continuation(second),
is_unicode_continuation(third),
is_unicode_continuation(fourth),
])
}
fn is_unicode_continuation(pos: usize) -> Pred {
// should be 10xxxxxx
let gte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Gte,
rhs: Rhs::Const(0x80),
});
let lte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Lte,
rhs: Rhs::Const(0xBF),
});
Pred::And(vec![lte, gte])
}
fn is_ascii_hex_digit(pos: usize) -> Pred {
let gte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Gte,
rhs: Rhs::Const(48u8),
});
let lte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Lte,
rhs: Rhs::Const(57u8),
});
let is_digit = Pred::And(vec![lte, gte]);
let gte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Gte,
rhs: Rhs::Const(65u8),
});
let lte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Lte,
rhs: Rhs::Const(70u8),
});
let is_upper = Pred::And(vec![lte, gte]);
let gte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Gte,
rhs: Rhs::Const(97u8),
});
let lte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Lte,
rhs: Rhs::Const(102u8),
});
let is_lower = Pred::And(vec![lte, gte]);
Pred::Or(vec![is_digit, is_lower, is_upper])
}
fn is_ascii_lowercase(pos: usize) -> Pred {
let gte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Gte,
rhs: Rhs::Const(48u8),
});
let lte = Pred::Atom(Atom {
index: pos,
op: CmpOp::Lte,
rhs: Rhs::Const(57u8),
});
Pred::And(vec![lte, gte])
}
#[cfg(test)]
mod test {
use super::*;
use mpz_circuits::evaluate;
use rand::rng;
#[test]
fn test_and() {
let pred = Pred::And(vec![
Pred::Atom(Atom {
index: 100,
op: CmpOp::Lt,
rhs: Rhs::Idx(300),
}),
Pred::Atom(Atom {
index: 200,
op: CmpOp::Eq,
rhs: Rhs::Const(2u8),
}),
]);
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [1u8, 2, 3]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [1u8, 3, 3]).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_or() {
let pred = Pred::Or(vec![
Pred::Atom(Atom {
index: 100,
op: CmpOp::Lt,
rhs: Rhs::Idx(300),
}),
Pred::Atom(Atom {
index: 200,
op: CmpOp::Eq,
rhs: Rhs::Const(2u8),
}),
]);
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [1u8, 0, 3]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [1u8, 3, 0]).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_not() {
let pred = Pred::Not(Box::new(Pred::Atom(Atom {
index: 100,
op: CmpOp::Lt,
rhs: Rhs::Idx(300),
})));
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [5u8, 3]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [1u8, 3]).unwrap();
assert_eq!(out, false);
}
// Tests when RHS is a const.
#[test]
fn test_rhs_const() {
let pred = Pred::Atom(Atom {
index: 100,
op: CmpOp::Lt,
rhs: Rhs::Const(22u8),
});
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, 5u8).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, 23u8).unwrap();
assert_eq!(out, false);
}
// Tests when RHS is an index.
#[test]
fn test_rhs_idx() {
let pred = Pred::Atom(Atom {
index: 100,
op: CmpOp::Lt,
rhs: Rhs::Idx(200),
});
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, 5u8, 10u8).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, 23u8, 5u8).unwrap();
assert_eq!(out, false);
}
// Tests when same index is used in the predicate.
#[test]
fn test_same_idx() {
let pred1 = Pred::Atom(Atom {
index: 100,
op: CmpOp::Eq,
rhs: Rhs::Idx(100),
});
let pred2 = Pred::Atom(Atom {
index: 100,
op: CmpOp::Lt,
rhs: Rhs::Idx(100),
});
let circ = Compiler::new().compile(&pred1);
let out: bool = evaluate!(circ, 5u8).unwrap();
assert_eq!(out, true);
let circ = Compiler::new().compile(&pred2);
let out: bool = evaluate!(circ, 5u8).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_atom_eq() {
let pred = Pred::Atom(Atom {
index: 100,
op: CmpOp::Eq,
rhs: Rhs::Idx(300),
});
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [5u8, 5]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [1u8, 3]).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_atom_neq() {
let pred = Pred::Atom(Atom {
index: 100,
op: CmpOp::Ne,
rhs: Rhs::Idx(300),
});
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [5u8, 6]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [1u8, 1]).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_atom_gt() {
let pred = Pred::Atom(Atom {
index: 100,
op: CmpOp::Gt,
rhs: Rhs::Idx(300),
});
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [7u8, 6]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [1u8, 1]).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_atom_gte() {
let pred = Pred::Atom(Atom {
index: 100,
op: CmpOp::Gte,
rhs: Rhs::Idx(300),
});
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [7u8, 7]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [0u8, 1]).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_atom_lt() {
let pred = Pred::Atom(Atom {
index: 100,
op: CmpOp::Lt,
rhs: Rhs::Idx(300),
});
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [2u8, 7]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [4u8, 1]).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_atom_lte() {
let pred = Pred::Atom(Atom {
index: 100,
op: CmpOp::Lte,
rhs: Rhs::Idx(300),
});
let circ = Compiler::new().compile(&pred);
let out: bool = evaluate!(circ, [2u8, 2]).unwrap();
assert_eq!(out, true);
let out: bool = evaluate!(circ, [4u8, 1]).unwrap();
assert_eq!(out, false);
}
#[test]
fn test_is_ascii_integer() {
let text = "text with integers 123456 text";
let pos = text.find("123456").unwrap();
let pred = is_ascii_integer(RangeSet::from(pos..pos + 6));
let bytes: &[u8] = text.as_bytes();
let out = eval_pred(&pred, bytes);
assert_eq!(out, true);
let out = eval_pred(&pred, &[&[0u8], bytes].concat());
assert_eq!(out, false);
}
#[test]
fn test_is_valid_http_header_value() {
let valid = "valid header value";
let invalid = "invalid header \r value";
let pred = is_valid_http_header_value(RangeSet::from(0..valid.len()));
let out: bool = eval_pred(&pred, valid.as_bytes());
assert_eq!(out, true);
let pred = is_valid_http_header_value(RangeSet::from(0..invalid.len()));
let out = eval_pred(&pred, invalid.as_bytes());
assert_eq!(out, false);
}
#[test]
fn test_is_unicode() {
use rand::{distr::Alphanumeric, rng, Rng};
let mut rng = rng();
for _ in 0..1000000 {
let mut s = String::from("HelloWorld");
let insert_pos = 5; // logical character index (after "Hello")
let byte_index = s
.char_indices()
.nth(insert_pos)
.map(|(i, _)| i)
.unwrap_or_else(|| s.len());
// Pick a random Unicode scalar value (0x0000..=0x10FFFF)
// Retry if it's in the surrogate range (U+D800..=U+DFFF)
let c = loop {
let code = rng.random_range(0x0000u32..=0x10FFFF);
if !(0xD800..=0xDFFF).contains(&code) {
if let Some(ch) = char::from_u32(code) {
break ch;
}
}
};
let mut buf = [0u8; 4]; // max UTF-8 length
let encoded = c.encode_utf8(&mut buf); // returns &str
let len = encoded.len();
s.insert_str(byte_index, &c.to_string());
let pred = is_unicode(RangeSet::from(byte_index..byte_index + len));
let out = eval_pred(&pred, s.as_bytes());
assert_eq!(out, true);
}
let bad_unicode = 255u8;
let pred = is_unicode(RangeSet::from(0..1));
let out = eval_pred(&pred, &[bad_unicode]);
assert_eq!(out, false);
}
}

View File

@@ -19,17 +19,14 @@
//! withheld.
mod commit;
pub mod encoding;
pub mod hash;
mod proof;
mod tls;
use std::{fmt, ops::Range};
use rangeset::{
iter::RangeIterator,
ops::{Index, Set},
set::RangeSet,
};
use rangeset::{Difference, IndexRanges, RangeSet, Union};
use serde::{Deserialize, Serialize};
use crate::connection::TranscriptLength;
@@ -109,14 +106,8 @@ impl Transcript {
}
Some(
Subsequence::new(
idx.clone(),
data.index(idx).fold(Vec::new(), |mut acc, s| {
acc.extend_from_slice(s);
acc
}),
)
.expect("data is same length as index"),
Subsequence::new(idx.clone(), data.index_ranges(idx))
.expect("data is same length as index"),
)
}
@@ -138,11 +129,11 @@ impl Transcript {
let mut sent = vec![0; self.sent.len()];
let mut received = vec![0; self.received.len()];
for range in sent_idx.iter() {
for range in sent_idx.iter_ranges() {
sent[range.clone()].copy_from_slice(&self.sent[range]);
}
for range in recv_idx.iter() {
for range in recv_idx.iter_ranges() {
received[range.clone()].copy_from_slice(&self.received[range]);
}
@@ -195,20 +186,12 @@ pub struct CompressedPartialTranscript {
impl From<PartialTranscript> for CompressedPartialTranscript {
fn from(uncompressed: PartialTranscript) -> Self {
Self {
sent_authed: uncompressed.sent.index(&uncompressed.sent_authed_idx).fold(
Vec::new(),
|mut acc, s| {
acc.extend_from_slice(s);
acc
},
),
sent_authed: uncompressed
.sent
.index_ranges(&uncompressed.sent_authed_idx),
received_authed: uncompressed
.received
.index(&uncompressed.received_authed_idx)
.fold(Vec::new(), |mut acc, s| {
acc.extend_from_slice(s);
acc
}),
.index_ranges(&uncompressed.received_authed_idx),
sent_idx: uncompressed.sent_authed_idx,
recv_idx: uncompressed.received_authed_idx,
sent_total: uncompressed.sent.len(),
@@ -224,7 +207,7 @@ impl From<CompressedPartialTranscript> for PartialTranscript {
let mut offset = 0;
for range in compressed.sent_idx.iter() {
for range in compressed.sent_idx.iter_ranges() {
sent[range.clone()]
.copy_from_slice(&compressed.sent_authed[offset..offset + range.len()]);
offset += range.len();
@@ -232,7 +215,7 @@ impl From<CompressedPartialTranscript> for PartialTranscript {
let mut offset = 0;
for range in compressed.recv_idx.iter() {
for range in compressed.recv_idx.iter_ranges() {
received[range.clone()]
.copy_from_slice(&compressed.received_authed[offset..offset + range.len()]);
offset += range.len();
@@ -321,16 +304,12 @@ impl PartialTranscript {
/// Returns the index of sent data which haven't been authenticated.
pub fn sent_unauthed(&self) -> RangeSet<usize> {
(0..self.sent.len())
.difference(&self.sent_authed_idx)
.into_set()
(0..self.sent.len()).difference(&self.sent_authed_idx)
}
/// Returns the index of received data which haven't been authenticated.
pub fn received_unauthed(&self) -> RangeSet<usize> {
(0..self.received.len())
.difference(&self.received_authed_idx)
.into_set()
(0..self.received.len()).difference(&self.received_authed_idx)
}
/// Returns an iterator over the authenticated data in the transcript.
@@ -340,7 +319,7 @@ impl PartialTranscript {
Direction::Received => (&self.received, &self.received_authed_idx),
};
authed.iter_values().map(move |i| data[i])
authed.iter().map(|i| data[i])
}
/// Unions the authenticated data of this transcript with another.
@@ -360,20 +339,24 @@ impl PartialTranscript {
"received data are not the same length"
);
for range in other.sent_authed_idx.difference(&self.sent_authed_idx) {
for range in other
.sent_authed_idx
.difference(&self.sent_authed_idx)
.iter_ranges()
{
self.sent[range.clone()].copy_from_slice(&other.sent[range]);
}
for range in other
.received_authed_idx
.difference(&self.received_authed_idx)
.iter_ranges()
{
self.received[range.clone()].copy_from_slice(&other.received[range]);
}
self.sent_authed_idx.union_mut(&other.sent_authed_idx);
self.received_authed_idx
.union_mut(&other.received_authed_idx);
self.sent_authed_idx = self.sent_authed_idx.union(&other.sent_authed_idx);
self.received_authed_idx = self.received_authed_idx.union(&other.received_authed_idx);
}
/// Unions an authenticated subsequence into this transcript.
@@ -385,11 +368,11 @@ impl PartialTranscript {
match direction {
Direction::Sent => {
seq.copy_to(&mut self.sent);
self.sent_authed_idx.union_mut(&seq.idx);
self.sent_authed_idx = self.sent_authed_idx.union(&seq.idx);
}
Direction::Received => {
seq.copy_to(&mut self.received);
self.received_authed_idx.union_mut(&seq.idx);
self.received_authed_idx = self.received_authed_idx.union(&seq.idx);
}
}
}
@@ -400,10 +383,10 @@ impl PartialTranscript {
///
/// * `value` - The value to set the unauthenticated bytes to
pub fn set_unauthed(&mut self, value: u8) {
for range in self.sent_unauthed().iter() {
for range in self.sent_unauthed().iter_ranges() {
self.sent[range].fill(value);
}
for range in self.received_unauthed().iter() {
for range in self.received_unauthed().iter_ranges() {
self.received[range].fill(value);
}
}
@@ -418,13 +401,13 @@ impl PartialTranscript {
pub fn set_unauthed_range(&mut self, value: u8, direction: Direction, range: Range<usize>) {
match direction {
Direction::Sent => {
for r in range.difference(&self.sent_authed_idx) {
self.sent[r].fill(value);
for range in range.difference(&self.sent_authed_idx).iter_ranges() {
self.sent[range].fill(value);
}
}
Direction::Received => {
for r in range.difference(&self.received_authed_idx) {
self.received[r].fill(value);
for range in range.difference(&self.received_authed_idx).iter_ranges() {
self.received[range].fill(value);
}
}
}
@@ -502,7 +485,7 @@ impl Subsequence {
/// Panics if the subsequence ranges are out of bounds.
pub(crate) fn copy_to(&self, dest: &mut [u8]) {
let mut offset = 0;
for range in self.idx.iter() {
for range in self.idx.iter_ranges() {
dest[range.clone()].copy_from_slice(&self.data[offset..offset + range.len()]);
offset += range.len();
}
@@ -627,7 +610,12 @@ mod validation {
mut partial_transcript: CompressedPartialTranscriptUnchecked,
) {
// Change the total to be less than the last range's end bound.
let end = partial_transcript.sent_idx.iter().next_back().unwrap().end;
let end = partial_transcript
.sent_idx
.iter_ranges()
.next_back()
.unwrap()
.end;
partial_transcript.sent_total = end - 1;

View File

@@ -2,21 +2,33 @@
use std::{collections::HashSet, fmt};
use rangeset::set::ToRangeSet;
use rangeset::{ToRangeSet, UnionMut};
use serde::{Deserialize, Serialize};
use crate::{
hash::HashAlgId,
transcript::{
encoding::{EncodingCommitment, EncodingTree},
hash::{PlaintextHash, PlaintextHashSecret},
Direction, RangeSet, Transcript,
},
};
/// The maximum allowed total bytelength of committed data for a single
/// commitment kind. Used to prevent DoS during verification. (May cause the
/// verifier to hash up to a max of 1GB * 128 = 128GB of data for certain kinds
/// of encoding commitments.)
///
/// This value must not exceed bcs's MAX_SEQUENCE_LENGTH limit (which is (1 <<
/// 31) - 1 by default)
pub(crate) const MAX_TOTAL_COMMITTED_DATA: usize = 1_000_000_000;
/// Kind of transcript commitment.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub enum TranscriptCommitmentKind {
/// A commitment to encodings of the transcript.
Encoding,
/// A hash commitment to plaintext in the transcript.
Hash {
/// The hash algorithm used.
@@ -27,6 +39,7 @@ pub enum TranscriptCommitmentKind {
impl fmt::Display for TranscriptCommitmentKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Encoding => f.write_str("encoding"),
Self::Hash { alg } => write!(f, "hash ({alg})"),
}
}
@@ -36,6 +49,8 @@ impl fmt::Display for TranscriptCommitmentKind {
#[derive(Debug, Clone, Serialize, Deserialize)]
#[non_exhaustive]
pub enum TranscriptCommitment {
/// Encoding commitment.
Encoding(EncodingCommitment),
/// Plaintext hash commitment.
Hash(PlaintextHash),
}
@@ -44,6 +59,8 @@ pub enum TranscriptCommitment {
#[derive(Debug, Clone, Serialize, Deserialize)]
#[non_exhaustive]
pub enum TranscriptSecret {
/// Encoding tree.
Encoding(EncodingTree),
/// Plaintext hash secret.
Hash(PlaintextHashSecret),
}
@@ -51,6 +68,9 @@ pub enum TranscriptSecret {
/// Configuration for transcript commitments.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscriptCommitConfig {
encoding_hash_alg: HashAlgId,
has_encoding: bool,
has_hash: bool,
commits: Vec<((Direction, RangeSet<usize>), TranscriptCommitmentKind)>,
}
@@ -60,23 +80,53 @@ impl TranscriptCommitConfig {
TranscriptCommitConfigBuilder::new(transcript)
}
/// Returns the hash algorithm to use for encoding commitments.
pub fn encoding_hash_alg(&self) -> &HashAlgId {
&self.encoding_hash_alg
}
/// Returns `true` if the configuration has any encoding commitments.
pub fn has_encoding(&self) -> bool {
self.has_encoding
}
/// Returns `true` if the configuration has any hash commitments.
pub fn has_hash(&self) -> bool {
self.commits
.iter()
.any(|(_, kind)| matches!(kind, TranscriptCommitmentKind::Hash { .. }))
self.has_hash
}
/// Returns an iterator over the encoding commitment indices.
pub fn iter_encoding(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>)> {
self.commits.iter().filter_map(|(idx, kind)| match kind {
TranscriptCommitmentKind::Encoding => Some(idx),
_ => None,
})
}
/// Returns an iterator over the hash commitment indices.
pub fn iter_hash(&self) -> impl Iterator<Item = (&(Direction, RangeSet<usize>), &HashAlgId)> {
self.commits.iter().map(|(idx, kind)| match kind {
TranscriptCommitmentKind::Hash { alg } => (idx, alg),
self.commits.iter().filter_map(|(idx, kind)| match kind {
TranscriptCommitmentKind::Hash { alg } => Some((idx, alg)),
_ => None,
})
}
/// Returns a request for the transcript commitments.
pub fn to_request(&self) -> TranscriptCommitRequest {
TranscriptCommitRequest {
encoding: self.has_encoding.then(|| {
let mut sent = RangeSet::default();
let mut recv = RangeSet::default();
for (dir, idx) in self.iter_encoding() {
match dir {
Direction::Sent => sent.union_mut(idx),
Direction::Received => recv.union_mut(idx),
}
}
(sent, recv)
}),
hash: self
.iter_hash()
.map(|((dir, idx), alg)| (*dir, idx.clone(), *alg))
@@ -86,9 +136,15 @@ impl TranscriptCommitConfig {
}
/// A builder for [`TranscriptCommitConfig`].
///
/// The default hash algorithm is [`HashAlgId::BLAKE3`] and the default kind
/// is [`TranscriptCommitmentKind::Encoding`].
#[derive(Debug)]
pub struct TranscriptCommitConfigBuilder<'a> {
transcript: &'a Transcript,
encoding_hash_alg: HashAlgId,
has_encoding: bool,
has_hash: bool,
default_kind: TranscriptCommitmentKind,
commits: HashSet<((Direction, RangeSet<usize>), TranscriptCommitmentKind)>,
}
@@ -98,13 +154,20 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
pub fn new(transcript: &'a Transcript) -> Self {
Self {
transcript,
default_kind: TranscriptCommitmentKind::Hash {
alg: HashAlgId::BLAKE3,
},
encoding_hash_alg: HashAlgId::BLAKE3,
has_encoding: false,
has_hash: false,
default_kind: TranscriptCommitmentKind::Encoding,
commits: HashSet::default(),
}
}
/// Sets the hash algorithm to use for encoding commitments.
pub fn encoding_hash_alg(&mut self, alg: HashAlgId) -> &mut Self {
self.encoding_hash_alg = alg;
self
}
/// Sets the default kind of commitment to use.
pub fn default_kind(&mut self, default_kind: TranscriptCommitmentKind) -> &mut Self {
self.default_kind = default_kind;
@@ -138,6 +201,11 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
));
}
match kind {
TranscriptCommitmentKind::Encoding => self.has_encoding = true,
TranscriptCommitmentKind::Hash { .. } => self.has_hash = true,
}
self.commits.insert(((direction, idx), kind));
Ok(self)
@@ -184,6 +252,9 @@ impl<'a> TranscriptCommitConfigBuilder<'a> {
/// Builds the configuration.
pub fn build(self) -> Result<TranscriptCommitConfig, TranscriptCommitConfigBuilderError> {
Ok(TranscriptCommitConfig {
encoding_hash_alg: self.encoding_hash_alg,
has_encoding: self.has_encoding,
has_hash: self.has_hash,
commits: Vec::from_iter(self.commits),
})
}
@@ -230,10 +301,16 @@ impl fmt::Display for TranscriptCommitConfigBuilderError {
/// Request to compute transcript commitments.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscriptCommitRequest {
encoding: Option<(RangeSet<usize>, RangeSet<usize>)>,
hash: Vec<(Direction, RangeSet<usize>, HashAlgId)>,
}
impl TranscriptCommitRequest {
/// Returns `true` if an encoding commitment is requested.
pub fn has_encoding(&self) -> bool {
self.encoding.is_some()
}
/// Returns `true` if a hash commitment is requested.
pub fn has_hash(&self) -> bool {
!self.hash.is_empty()
@@ -243,6 +320,11 @@ impl TranscriptCommitRequest {
pub fn iter_hash(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>, HashAlgId)> {
self.hash.iter()
}
/// Returns the ranges of the encoding commitments.
pub fn encoding(&self) -> Option<&(RangeSet<usize>, RangeSet<usize>)> {
self.encoding.as_ref()
}
}
#[cfg(test)]

View File

@@ -0,0 +1,22 @@
//! Transcript encoding commitments and proofs.
mod encoder;
mod proof;
mod provider;
mod tree;
pub use encoder::{new_encoder, Encoder, EncoderSecret};
pub use proof::{EncodingProof, EncodingProofError};
pub use provider::{EncodingProvider, EncodingProviderError};
pub use tree::{EncodingTree, EncodingTreeError};
use serde::{Deserialize, Serialize};
use crate::hash::TypedHash;
/// Transcript encoding commitment.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct EncodingCommitment {
/// Merkle root of the encoding commitments.
pub root: TypedHash,
}

View File

@@ -0,0 +1,137 @@
use std::ops::Range;
use crate::transcript::Direction;
use itybity::ToBits;
use rand::{RngCore, SeedableRng};
use rand_chacha::ChaCha12Rng;
use serde::{Deserialize, Serialize};
/// The size of the encoding for 1 bit, in bytes.
const BIT_ENCODING_SIZE: usize = 16;
/// The size of the encoding for 1 byte, in bytes.
const BYTE_ENCODING_SIZE: usize = 128;
/// Secret used by an encoder to generate encodings.
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct EncoderSecret {
seed: [u8; 32],
delta: [u8; BIT_ENCODING_SIZE],
}
opaque_debug::implement!(EncoderSecret);
impl EncoderSecret {
/// Creates a new secret.
///
/// # Arguments
///
/// * `seed` - The seed for the PRG.
/// * `delta` - Delta for deriving the one-encodings.
pub fn new(seed: [u8; 32], delta: [u8; 16]) -> Self {
Self { seed, delta }
}
/// Returns the seed.
pub fn seed(&self) -> &[u8; 32] {
&self.seed
}
/// Returns the delta.
pub fn delta(&self) -> &[u8; 16] {
&self.delta
}
}
/// Creates a new encoder.
pub fn new_encoder(secret: &EncoderSecret) -> impl Encoder {
ChaChaEncoder::new(secret)
}
pub(crate) struct ChaChaEncoder {
seed: [u8; 32],
delta: [u8; 16],
}
impl ChaChaEncoder {
pub(crate) fn new(secret: &EncoderSecret) -> Self {
let seed = *secret.seed();
let delta = *secret.delta();
Self { seed, delta }
}
pub(crate) fn new_prg(&self, stream_id: u64) -> ChaCha12Rng {
let mut prg = ChaCha12Rng::from_seed(self.seed);
prg.set_stream(stream_id);
prg.set_word_pos(0);
prg
}
}
/// A transcript encoder.
///
/// This is an internal implementation detail that should not be exposed to the
/// public API.
pub trait Encoder {
/// Writes the zero encoding for the given range of the transcript into the
/// destination buffer.
fn encode_range(&self, direction: Direction, range: Range<usize>, dest: &mut Vec<u8>);
/// Writes the encoding for the given data into the destination buffer.
fn encode_data(
&self,
direction: Direction,
range: Range<usize>,
data: &[u8],
dest: &mut Vec<u8>,
);
}
impl Encoder for ChaChaEncoder {
fn encode_range(&self, direction: Direction, range: Range<usize>, dest: &mut Vec<u8>) {
// ChaCha encoder works with 32-bit words. Each encoded bit is 128 bits long.
const WORDS_PER_BYTE: u128 = 8 * 128 / 32;
let stream_id: u64 = match direction {
Direction::Sent => 0,
Direction::Received => 1,
};
let mut prg = self.new_prg(stream_id);
let len = range.len() * BYTE_ENCODING_SIZE;
let pos = dest.len();
// Write 0s to the destination buffer.
dest.resize(pos + len, 0);
// Fill the destination buffer with the PRG.
prg.set_word_pos(range.start as u128 * WORDS_PER_BYTE);
prg.fill_bytes(&mut dest[pos..pos + len]);
}
fn encode_data(
&self,
direction: Direction,
range: Range<usize>,
data: &[u8],
dest: &mut Vec<u8>,
) {
const ZERO: [u8; 16] = [0; BIT_ENCODING_SIZE];
let pos = dest.len();
// Write the zero encoding for the given range.
self.encode_range(direction, range, dest);
let dest = &mut dest[pos..];
for (pos, bit) in data.iter_lsb0().enumerate() {
// Add the delta to the encoding whenever the encoded bit is 1,
// otherwise add a zero.
let summand = if bit { &self.delta } else { &ZERO };
dest[pos * BIT_ENCODING_SIZE..(pos + 1) * BIT_ENCODING_SIZE]
.iter_mut()
.zip(summand)
.for_each(|(a, b)| *a ^= *b);
}
}
}

View File

@@ -0,0 +1,361 @@
use std::{collections::HashMap, fmt};
use rangeset::{RangeSet, UnionMut};
use serde::{Deserialize, Serialize};
use crate::{
hash::{Blinder, HashProvider, HashProviderError},
merkle::{MerkleError, MerkleProof},
transcript::{
commit::MAX_TOTAL_COMMITTED_DATA,
encoding::{new_encoder, Encoder, EncoderSecret, EncodingCommitment},
Direction,
},
};
/// An opening of a leaf in the encoding tree.
#[derive(Clone, Serialize, Deserialize)]
pub(super) struct Opening {
pub(super) direction: Direction,
pub(super) idx: RangeSet<usize>,
pub(super) blinder: Blinder,
}
opaque_debug::implement!(Opening);
/// An encoding commitment proof.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(try_from = "validation::EncodingProofUnchecked")]
pub struct EncodingProof {
/// The proof of inclusion of the commitment(s) in the Merkle tree of
/// commitments.
pub(super) inclusion_proof: MerkleProof,
pub(super) openings: HashMap<usize, Opening>,
}
impl EncodingProof {
/// Verifies the proof against the commitment.
///
/// Returns the authenticated indices of the sent and received data,
/// respectively.
///
/// # Arguments
///
/// * `provider` - Hash provider.
/// * `commitment` - Encoding commitment to verify against.
/// * `sent` - Sent data to authenticate.
/// * `recv` - Received data to authenticate.
pub fn verify_with_provider(
&self,
provider: &HashProvider,
secret: &EncoderSecret,
commitment: &EncodingCommitment,
sent: &[u8],
recv: &[u8],
) -> Result<(RangeSet<usize>, RangeSet<usize>), EncodingProofError> {
let hasher = provider.get(&commitment.root.alg)?;
let encoder = new_encoder(secret);
let Self {
inclusion_proof,
openings,
} = self;
let mut leaves = Vec::with_capacity(openings.len());
let mut expected_leaf = Vec::default();
let mut total_opened = 0u128;
let mut auth_sent = RangeSet::default();
let mut auth_recv = RangeSet::default();
for (
id,
Opening {
direction,
idx,
blinder,
},
) in openings
{
// Make sure the amount of data being proved is bounded.
total_opened += idx.len() as u128;
if total_opened > MAX_TOTAL_COMMITTED_DATA as u128 {
return Err(EncodingProofError::new(
ErrorKind::Proof,
"exceeded maximum allowed data",
))?;
}
let (data, auth) = match direction {
Direction::Sent => (sent, &mut auth_sent),
Direction::Received => (recv, &mut auth_recv),
};
// Make sure the ranges are within the bounds of the transcript.
if idx.end().unwrap_or(0) > data.len() {
return Err(EncodingProofError::new(
ErrorKind::Proof,
format!(
"index out of bounds of the transcript ({}): {} > {}",
direction,
idx.end().unwrap_or(0),
data.len()
),
));
}
expected_leaf.clear();
for range in idx.iter_ranges() {
encoder.encode_data(*direction, range.clone(), &data[range], &mut expected_leaf);
}
expected_leaf.extend_from_slice(blinder.as_bytes());
// Compute the expected hash of the commitment to make sure it is
// present in the merkle tree.
leaves.push((*id, hasher.hash(&expected_leaf)));
auth.union_mut(idx);
}
// Verify that the expected hashes are present in the merkle tree.
//
// This proves the Prover committed to the purported data prior to the encoder
// seed being revealed. Ergo, if the encodings are authentic then the purported
// data is authentic.
inclusion_proof.verify(hasher, &commitment.root, leaves)?;
Ok((auth_sent, auth_recv))
}
}
/// Error for [`EncodingProof`].
#[derive(Debug, thiserror::Error)]
pub struct EncodingProofError {
kind: ErrorKind,
source: Option<Box<dyn std::error::Error + Send + Sync>>,
}
impl EncodingProofError {
fn new<E>(kind: ErrorKind, source: E) -> Self
where
E: Into<Box<dyn std::error::Error + Send + Sync>>,
{
Self {
kind,
source: Some(source.into()),
}
}
}
#[derive(Debug)]
enum ErrorKind {
Provider,
Proof,
}
impl fmt::Display for EncodingProofError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("encoding proof error: ")?;
match self.kind {
ErrorKind::Provider => f.write_str("provider error")?,
ErrorKind::Proof => f.write_str("proof error")?,
}
if let Some(source) = &self.source {
write!(f, " caused by: {source}")?;
}
Ok(())
}
}
impl From<HashProviderError> for EncodingProofError {
fn from(error: HashProviderError) -> Self {
Self::new(ErrorKind::Provider, error)
}
}
impl From<MerkleError> for EncodingProofError {
fn from(error: MerkleError) -> Self {
Self::new(ErrorKind::Proof, error)
}
}
/// Invalid encoding proof error.
#[derive(Debug, thiserror::Error)]
#[error("invalid encoding proof: {0}")]
pub struct InvalidEncodingProof(&'static str);
mod validation {
use super::*;
/// The maximum allowed height of the Merkle tree of encoding commitments.
///
/// The statistical security parameter (SSP) of the encoding commitment
/// protocol is calculated as "the number of uniformly random bits in a
/// single bit's encoding minus `MAX_HEIGHT`".
///
/// For example, a bit encoding used in garbled circuits typically has 127
/// uniformly random bits, hence when using it in the encoding
/// commitment protocol, the SSP is 127 - 30 = 97 bits.
///
/// Leaving this validation here as a fail-safe in case we ever start
/// using shorter encodings.
const MAX_HEIGHT: usize = 30;
#[derive(Debug, Deserialize)]
pub(super) struct EncodingProofUnchecked {
inclusion_proof: MerkleProof,
openings: HashMap<usize, Opening>,
}
impl TryFrom<EncodingProofUnchecked> for EncodingProof {
type Error = InvalidEncodingProof;
fn try_from(unchecked: EncodingProofUnchecked) -> Result<Self, Self::Error> {
if unchecked.inclusion_proof.leaf_count() > 1 << MAX_HEIGHT {
return Err(InvalidEncodingProof(
"the height of the tree exceeds the maximum allowed",
));
}
Ok(Self {
inclusion_proof: unchecked.inclusion_proof,
openings: unchecked.openings,
})
}
}
}
#[cfg(test)]
mod test {
use tlsn_data_fixtures::http::{request::POST_JSON, response::OK_JSON};
use crate::{
fixtures::{encoder_secret, encoder_secret_tampered_seed, encoding_provider},
hash::Blake3,
transcript::{encoding::EncodingTree, Transcript},
};
use super::*;
struct EncodingFixture {
transcript: Transcript,
proof: EncodingProof,
commitment: EncodingCommitment,
}
fn new_encoding_fixture() -> EncodingFixture {
let transcript = Transcript::new(POST_JSON, OK_JSON);
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len()));
let provider = encoding_provider(transcript.sent(), transcript.received());
let tree = EncodingTree::new(&Blake3::default(), [&idx_0, &idx_1], &provider).unwrap();
let proof = tree.proof([&idx_0, &idx_1].into_iter()).unwrap();
let commitment = EncodingCommitment { root: tree.root() };
EncodingFixture {
transcript,
proof,
commitment,
}
}
#[test]
fn test_verify_encoding_proof_tampered_seed() {
let EncodingFixture {
transcript,
proof,
commitment,
} = new_encoding_fixture();
let err = proof
.verify_with_provider(
&HashProvider::default(),
&encoder_secret_tampered_seed(),
&commitment,
transcript.sent(),
transcript.received(),
)
.unwrap_err();
assert!(matches!(err.kind, ErrorKind::Proof));
}
#[test]
fn test_verify_encoding_proof_out_of_range() {
let EncodingFixture {
transcript,
proof,
commitment,
} = new_encoding_fixture();
let sent = &transcript.sent()[transcript.sent().len() - 1..];
let recv = &transcript.received()[transcript.received().len() - 2..];
let err = proof
.verify_with_provider(
&HashProvider::default(),
&encoder_secret(),
&commitment,
sent,
recv,
)
.unwrap_err();
assert!(matches!(err.kind, ErrorKind::Proof));
}
#[test]
fn test_verify_encoding_proof_tampered_idx() {
let EncodingFixture {
transcript,
mut proof,
commitment,
} = new_encoding_fixture();
let Opening { idx, .. } = proof.openings.values_mut().next().unwrap();
*idx = RangeSet::from([0..3, 13..15]);
let err = proof
.verify_with_provider(
&HashProvider::default(),
&encoder_secret(),
&commitment,
transcript.sent(),
transcript.received(),
)
.unwrap_err();
assert!(matches!(err.kind, ErrorKind::Proof));
}
#[test]
fn test_verify_encoding_proof_tampered_encoding_blinder() {
let EncodingFixture {
transcript,
mut proof,
commitment,
} = new_encoding_fixture();
let Opening { blinder, .. } = proof.openings.values_mut().next().unwrap();
*blinder = rand::random();
let err = proof
.verify_with_provider(
&HashProvider::default(),
&encoder_secret(),
&commitment,
transcript.sent(),
transcript.received(),
)
.unwrap_err();
assert!(matches!(err.kind, ErrorKind::Proof));
}
}

View File

@@ -0,0 +1,19 @@
use std::ops::Range;
use crate::transcript::Direction;
/// A provider of plaintext encodings.
pub trait EncodingProvider {
/// Writes the encoding of the given range into the destination buffer.
fn provide_encoding(
&self,
direction: Direction,
range: Range<usize>,
dest: &mut Vec<u8>,
) -> Result<(), EncodingProviderError>;
}
/// Error for [`EncodingProvider`].
#[derive(Debug, thiserror::Error)]
#[error("failed to provide encoding")]
pub struct EncodingProviderError;

View File

@@ -0,0 +1,327 @@
use std::collections::HashMap;
use bimap::BiMap;
use rangeset::{RangeSet, UnionMut};
use serde::{Deserialize, Serialize};
use crate::{
hash::{Blinder, HashAlgId, HashAlgorithm, TypedHash},
merkle::MerkleTree,
transcript::{
encoding::{
proof::{EncodingProof, Opening},
EncodingProvider,
},
Direction,
},
};
/// Encoding tree builder error.
#[derive(Debug, thiserror::Error)]
pub enum EncodingTreeError {
/// Index is out of bounds of the transcript.
#[error("index is out of bounds of the transcript")]
OutOfBounds {
/// The index.
index: RangeSet<usize>,
/// The transcript length.
transcript_length: usize,
},
/// Encoding provider is missing an encoding for an index.
#[error("encoding provider is missing an encoding for an index")]
MissingEncoding {
/// The index which is missing.
index: RangeSet<usize>,
},
/// Index is missing from the tree.
#[error("index is missing from the tree")]
MissingLeaf {
/// The index which is missing.
index: RangeSet<usize>,
},
}
/// A merkle tree of transcript encodings.
#[derive(Clone, Serialize, Deserialize)]
pub struct EncodingTree {
/// Merkle tree of the commitments.
tree: MerkleTree,
/// Nonces used to blind the hashes.
blinders: Vec<Blinder>,
/// Mapping between the index of a leaf and the transcript index it
/// corresponds to.
idxs: BiMap<usize, (Direction, RangeSet<usize>)>,
/// Union of all transcript indices in the sent direction.
sent_idx: RangeSet<usize>,
/// Union of all transcript indices in the received direction.
received_idx: RangeSet<usize>,
}
opaque_debug::implement!(EncodingTree);
impl EncodingTree {
/// Creates a new encoding tree.
///
/// # Arguments
///
/// * `hasher` - The hash algorithm to use.
/// * `idxs` - The subsequence indices to commit to.
/// * `provider` - The encoding provider.
pub fn new<'idx>(
hasher: &dyn HashAlgorithm,
idxs: impl IntoIterator<Item = &'idx (Direction, RangeSet<usize>)>,
provider: &dyn EncodingProvider,
) -> Result<Self, EncodingTreeError> {
let mut this = Self {
tree: MerkleTree::new(hasher.id()),
blinders: Vec::new(),
idxs: BiMap::new(),
sent_idx: RangeSet::default(),
received_idx: RangeSet::default(),
};
let mut leaves = Vec::new();
let mut encoding = Vec::new();
for dir_idx in idxs {
let direction = dir_idx.0;
let idx = &dir_idx.1;
// Ignore empty indices.
if idx.is_empty() {
continue;
}
if this.idxs.contains_right(dir_idx) {
// The subsequence is already in the tree.
continue;
}
let blinder: Blinder = rand::random();
encoding.clear();
for range in idx.iter_ranges() {
provider
.provide_encoding(direction, range, &mut encoding)
.map_err(|_| EncodingTreeError::MissingEncoding { index: idx.clone() })?;
}
encoding.extend_from_slice(blinder.as_bytes());
let leaf = hasher.hash(&encoding);
leaves.push(leaf);
this.blinders.push(blinder);
this.idxs.insert(this.idxs.len(), dir_idx.clone());
match direction {
Direction::Sent => this.sent_idx.union_mut(idx),
Direction::Received => this.received_idx.union_mut(idx),
}
}
this.tree.insert(hasher, leaves);
Ok(this)
}
/// Returns the root of the tree.
pub fn root(&self) -> TypedHash {
self.tree.root()
}
/// Returns the hash algorithm of the tree.
pub fn algorithm(&self) -> HashAlgId {
self.tree.algorithm()
}
/// Generates a proof for the given indices.
///
/// # Arguments
///
/// * `idxs` - The transcript indices to prove.
pub fn proof<'idx>(
&self,
idxs: impl Iterator<Item = &'idx (Direction, RangeSet<usize>)>,
) -> Result<EncodingProof, EncodingTreeError> {
let mut openings = HashMap::new();
for dir_idx in idxs {
let direction = dir_idx.0;
let idx = &dir_idx.1;
let leaf_idx = *self
.idxs
.get_by_right(dir_idx)
.ok_or_else(|| EncodingTreeError::MissingLeaf { index: idx.clone() })?;
let blinder = self.blinders[leaf_idx].clone();
openings.insert(
leaf_idx,
Opening {
direction,
idx: idx.clone(),
blinder,
},
);
}
let mut indices = openings.keys().copied().collect::<Vec<_>>();
indices.sort();
Ok(EncodingProof {
inclusion_proof: self.tree.proof(&indices),
openings,
})
}
/// Returns whether the tree contains the given transcript index.
pub fn contains(&self, idx: &(Direction, RangeSet<usize>)) -> bool {
self.idxs.contains_right(idx)
}
pub(crate) fn idx(&self, direction: Direction) -> &RangeSet<usize> {
match direction {
Direction::Sent => &self.sent_idx,
Direction::Received => &self.received_idx,
}
}
/// Returns the committed transcript indices.
pub(crate) fn transcript_indices(&self) -> impl Iterator<Item = &(Direction, RangeSet<usize>)> {
self.idxs.right_values()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
fixtures::{encoder_secret, encoding_provider},
hash::{Blake3, HashProvider},
transcript::{encoding::EncodingCommitment, Transcript},
};
use tlsn_data_fixtures::http::{request::POST_JSON, response::OK_JSON};
fn new_tree<'seq>(
transcript: &Transcript,
idxs: impl Iterator<Item = &'seq (Direction, RangeSet<usize>)>,
) -> Result<EncodingTree, EncodingTreeError> {
let provider = encoding_provider(transcript.sent(), transcript.received());
EncodingTree::new(&Blake3::default(), idxs, &provider)
}
#[test]
fn test_encoding_tree() {
let transcript = Transcript::new(POST_JSON, OK_JSON);
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len()));
let tree = new_tree(&transcript, [&idx_0, &idx_1].into_iter()).unwrap();
assert!(tree.contains(&idx_0));
assert!(tree.contains(&idx_1));
let proof = tree.proof([&idx_0, &idx_1].into_iter()).unwrap();
let commitment = EncodingCommitment { root: tree.root() };
let (auth_sent, auth_recv) = proof
.verify_with_provider(
&HashProvider::default(),
&encoder_secret(),
&commitment,
transcript.sent(),
transcript.received(),
)
.unwrap();
assert_eq!(auth_sent, idx_0.1);
assert_eq!(auth_recv, idx_1.1);
}
#[test]
fn test_encoding_tree_multiple_ranges() {
let transcript = Transcript::new(POST_JSON, OK_JSON);
let idx_0 = (Direction::Sent, RangeSet::from(0..1));
let idx_1 = (Direction::Sent, RangeSet::from(1..POST_JSON.len()));
let idx_2 = (Direction::Received, RangeSet::from(0..1));
let idx_3 = (Direction::Received, RangeSet::from(1..OK_JSON.len()));
let tree = new_tree(&transcript, [&idx_0, &idx_1, &idx_2, &idx_3].into_iter()).unwrap();
assert!(tree.contains(&idx_0));
assert!(tree.contains(&idx_1));
assert!(tree.contains(&idx_2));
assert!(tree.contains(&idx_3));
let proof = tree
.proof([&idx_0, &idx_1, &idx_2, &idx_3].into_iter())
.unwrap();
let commitment = EncodingCommitment { root: tree.root() };
let (auth_sent, auth_recv) = proof
.verify_with_provider(
&HashProvider::default(),
&encoder_secret(),
&commitment,
transcript.sent(),
transcript.received(),
)
.unwrap();
let mut expected_auth_sent = RangeSet::default();
expected_auth_sent.union_mut(&idx_0.1);
expected_auth_sent.union_mut(&idx_1.1);
let mut expected_auth_recv = RangeSet::default();
expected_auth_recv.union_mut(&idx_2.1);
expected_auth_recv.union_mut(&idx_3.1);
assert_eq!(auth_sent, expected_auth_sent);
assert_eq!(auth_recv, expected_auth_recv);
}
#[test]
fn test_encoding_tree_proof_missing_leaf() {
let transcript = Transcript::new(POST_JSON, OK_JSON);
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len()));
let idx_1 = (Direction::Received, RangeSet::from(0..4));
let idx_2 = (Direction::Received, RangeSet::from(4..OK_JSON.len()));
let tree = new_tree(&transcript, [&idx_0, &idx_1].into_iter()).unwrap();
let result = tree
.proof([&idx_0, &idx_1, &idx_2].into_iter())
.unwrap_err();
assert!(matches!(result, EncodingTreeError::MissingLeaf { .. }));
}
#[test]
fn test_encoding_tree_out_of_bounds() {
let transcript = Transcript::new(POST_JSON, OK_JSON);
let idx_0 = (Direction::Sent, RangeSet::from(0..POST_JSON.len() + 1));
let idx_1 = (Direction::Received, RangeSet::from(0..OK_JSON.len() + 1));
let result = new_tree(&transcript, [&idx_0].into_iter()).unwrap_err();
assert!(matches!(result, EncodingTreeError::MissingEncoding { .. }));
let result = new_tree(&transcript, [&idx_1].into_iter()).unwrap_err();
assert!(matches!(result, EncodingTreeError::MissingEncoding { .. }));
}
#[test]
fn test_encoding_tree_missing_encoding() {
let provider = encoding_provider(&[], &[]);
let result = EncodingTree::new(
&Blake3::default(),
[(Direction::Sent, RangeSet::from(0..8))].iter(),
&provider,
)
.unwrap_err();
assert!(matches!(result, EncodingTreeError::MissingEncoding { .. }));
}
}

View File

@@ -1,10 +1,6 @@
//! Transcript proofs.
use rangeset::{
iter::RangeIterator,
ops::{Cover, Set},
set::ToRangeSet,
};
use rangeset::{Cover, Difference, Subset, ToRangeSet, UnionMut};
use serde::{Deserialize, Serialize};
use std::{collections::HashSet, fmt};
@@ -14,6 +10,7 @@ use crate::{
hash::{HashAlgId, HashProvider},
transcript::{
commit::{TranscriptCommitment, TranscriptCommitmentKind},
encoding::{EncoderSecret, EncodingProof, EncodingProofError, EncodingTree},
hash::{hash_plaintext, PlaintextHash, PlaintextHashSecret},
Direction, PartialTranscript, RangeSet, Transcript, TranscriptSecret,
},
@@ -31,12 +28,14 @@ const DEFAULT_COMMITMENT_KINDS: &[TranscriptCommitmentKind] = &[
TranscriptCommitmentKind::Hash {
alg: HashAlgId::KECCAK256,
},
TranscriptCommitmentKind::Encoding,
];
/// Proof of the contents of a transcript.
#[derive(Clone, Serialize, Deserialize)]
pub struct TranscriptProof {
transcript: PartialTranscript,
encoding_proof: Option<EncodingProof>,
hash_secrets: Vec<PlaintextHashSecret>,
}
@@ -50,18 +49,27 @@ impl TranscriptProof {
/// # Arguments
///
/// * `provider` - The hash provider to use for verification.
/// * `length` - The transcript length.
/// * `commitments` - The commitments to verify against.
/// * `attestation_body` - The attestation body to verify against.
pub fn verify_with_provider<'a>(
self,
provider: &HashProvider,
length: &TranscriptLength,
encoder_secret: Option<&EncoderSecret>,
commitments: impl IntoIterator<Item = &'a TranscriptCommitment>,
) -> Result<PartialTranscript, TranscriptProofError> {
let mut encoding_commitment = None;
let mut hash_commitments = HashSet::new();
// Index commitments.
for commitment in commitments {
match commitment {
TranscriptCommitment::Encoding(commitment) => {
if encoding_commitment.replace(commitment).is_some() {
return Err(TranscriptProofError::new(
ErrorKind::Encoding,
"multiple encoding commitments are present.",
));
}
}
TranscriptCommitment::Hash(plaintext_hash) => {
hash_commitments.insert(plaintext_hash);
}
@@ -80,6 +88,34 @@ impl TranscriptProof {
let mut total_auth_sent = RangeSet::default();
let mut total_auth_recv = RangeSet::default();
// Verify encoding proof.
if let Some(proof) = self.encoding_proof {
let secret = encoder_secret.ok_or_else(|| {
TranscriptProofError::new(
ErrorKind::Encoding,
"contains an encoding proof but missing encoder secret",
)
})?;
let commitment = encoding_commitment.ok_or_else(|| {
TranscriptProofError::new(
ErrorKind::Encoding,
"contains an encoding proof but missing encoding commitment",
)
})?;
let (auth_sent, auth_recv) = proof.verify_with_provider(
provider,
secret,
commitment,
self.transcript.sent_unsafe(),
self.transcript.received_unsafe(),
)?;
total_auth_sent.union_mut(&auth_sent);
total_auth_recv.union_mut(&auth_recv);
}
let mut buffer = Vec::new();
for PlaintextHashSecret {
direction,
@@ -108,7 +144,7 @@ impl TranscriptProof {
}
buffer.clear();
for range in idx.iter() {
for range in idx.iter_ranges() {
buffer.extend_from_slice(&plaintext[range]);
}
@@ -163,6 +199,7 @@ impl TranscriptProofError {
#[derive(Debug)]
enum ErrorKind {
Encoding,
Hash,
Proof,
}
@@ -172,6 +209,7 @@ impl fmt::Display for TranscriptProofError {
f.write_str("transcript proof error: ")?;
match self.kind {
ErrorKind::Encoding => f.write_str("encoding error")?,
ErrorKind::Hash => f.write_str("hash error")?,
ErrorKind::Proof => f.write_str("proof error")?,
}
@@ -184,6 +222,12 @@ impl fmt::Display for TranscriptProofError {
}
}
impl From<EncodingProofError> for TranscriptProofError {
fn from(e: EncodingProofError) -> Self {
TranscriptProofError::new(ErrorKind::Encoding, e)
}
}
/// Union of ranges to reveal.
#[derive(Clone, Debug, PartialEq)]
struct QueryIdx {
@@ -228,6 +272,7 @@ pub struct TranscriptProofBuilder<'a> {
/// Commitment kinds in order of preference for building transcript proofs.
commitment_kinds: Vec<TranscriptCommitmentKind>,
transcript: &'a Transcript,
encoding_tree: Option<&'a EncodingTree>,
hash_secrets: Vec<&'a PlaintextHashSecret>,
committed_sent: RangeSet<usize>,
committed_recv: RangeSet<usize>,
@@ -243,9 +288,15 @@ impl<'a> TranscriptProofBuilder<'a> {
let mut committed_sent = RangeSet::default();
let mut committed_recv = RangeSet::default();
let mut encoding_tree = None;
let mut hash_secrets = Vec::new();
for secret in secrets {
match secret {
TranscriptSecret::Encoding(tree) => {
committed_sent.union_mut(tree.idx(Direction::Sent));
committed_recv.union_mut(tree.idx(Direction::Received));
encoding_tree = Some(tree);
}
TranscriptSecret::Hash(hash) => {
match hash.direction {
Direction::Sent => committed_sent.union_mut(&hash.idx),
@@ -259,6 +310,7 @@ impl<'a> TranscriptProofBuilder<'a> {
Self {
commitment_kinds: DEFAULT_COMMITMENT_KINDS.to_vec(),
transcript,
encoding_tree,
hash_secrets,
committed_sent,
committed_recv,
@@ -314,7 +366,7 @@ impl<'a> TranscriptProofBuilder<'a> {
if idx.is_subset(committed) {
self.query_idx.union(&direction, &idx);
} else {
let missing = idx.difference(committed).into_set();
let missing = idx.difference(committed);
return Err(TranscriptProofBuilderError::new(
BuilderErrorKind::MissingCommitment,
format!(
@@ -356,6 +408,7 @@ impl<'a> TranscriptProofBuilder<'a> {
transcript: self
.transcript
.to_partial(self.query_idx.sent.clone(), self.query_idx.recv.clone()),
encoding_proof: None,
hash_secrets: Vec::new(),
};
let mut uncovered_query_idx = self.query_idx.clone();
@@ -367,6 +420,46 @@ impl<'a> TranscriptProofBuilder<'a> {
// self.commitment_kinds.
if let Some(kind) = commitment_kinds_iter.next() {
match kind {
TranscriptCommitmentKind::Encoding => {
let Some(encoding_tree) = self.encoding_tree else {
// Proceeds to the next preferred commitment kind if encoding tree is
// not available.
continue;
};
let (sent_dir_idxs, sent_uncovered) = uncovered_query_idx.sent.cover_by(
encoding_tree
.transcript_indices()
.filter(|(dir, _)| *dir == Direction::Sent),
|(_, idx)| idx,
);
// Uncovered ranges will be checked with ranges of the next
// preferred commitment kind.
uncovered_query_idx.sent = sent_uncovered;
let (recv_dir_idxs, recv_uncovered) = uncovered_query_idx.recv.cover_by(
encoding_tree
.transcript_indices()
.filter(|(dir, _)| *dir == Direction::Received),
|(_, idx)| idx,
);
uncovered_query_idx.recv = recv_uncovered;
let dir_idxs = sent_dir_idxs
.into_iter()
.chain(recv_dir_idxs)
.collect::<Vec<_>>();
// Skip proof generation if there are no committed ranges that can cover the
// query ranges.
if !dir_idxs.is_empty() {
transcript_proof.encoding_proof = Some(
encoding_tree
.proof(dir_idxs.into_iter())
.expect("subsequences were checked to be in tree"),
);
}
}
TranscriptCommitmentKind::Hash { alg } => {
let (sent_hashes, sent_uncovered) = uncovered_query_idx.sent.cover_by(
self.hash_secrets.iter().filter(|hash| {
@@ -489,14 +582,50 @@ impl fmt::Display for TranscriptProofBuilderError {
#[cfg(test)]
mod tests {
use rand::{Rng, SeedableRng};
use rangeset::prelude::*;
use rangeset::RangeSet;
use rstest::rstest;
use tlsn_data_fixtures::http::{request::GET_WITH_HEADER, response::OK_JSON};
use crate::hash::{Blinder, HashAlgId};
use crate::{
fixtures::{encoder_secret, encoding_provider},
hash::{Blake3, Blinder, HashAlgId},
transcript::TranscriptCommitConfigBuilder,
};
use super::*;
#[rstest]
fn test_verify_missing_encoding_commitment_root() {
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
let idxs = vec![(Direction::Received, RangeSet::from(0..transcript.len().1))];
let encoding_tree = EncodingTree::new(
&Blake3::default(),
&idxs,
&encoding_provider(transcript.sent(), transcript.received()),
)
.unwrap();
let secrets = vec![TranscriptSecret::Encoding(encoding_tree)];
let mut builder = TranscriptProofBuilder::new(&transcript, &secrets);
builder.reveal_recv(&(0..transcript.len().1)).unwrap();
let transcript_proof = builder.build().unwrap();
let provider = HashProvider::default();
let err = transcript_proof
.verify_with_provider(
&provider,
&transcript.length(),
Some(&encoder_secret()),
&[],
)
.err()
.unwrap();
assert!(matches!(err.kind, ErrorKind::Encoding));
}
#[rstest]
fn test_reveal_range_out_of_bounds() {
let transcript = Transcript::new(
@@ -516,7 +645,7 @@ mod tests {
}
#[rstest]
fn test_reveal_missing_commitment() {
fn test_reveal_missing_encoding_tree() {
let transcript = Transcript::new(
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
@@ -565,6 +694,7 @@ mod tests {
.verify_with_provider(
&provider,
&transcript.length(),
None,
&[TranscriptCommitment::Hash(commitment)],
)
.unwrap();
@@ -614,6 +744,7 @@ mod tests {
.verify_with_provider(
&provider,
&transcript.length(),
None,
&[TranscriptCommitment::Hash(commitment)],
)
.unwrap_err();
@@ -629,19 +760,24 @@ mod tests {
TranscriptCommitmentKind::Hash {
alg: HashAlgId::SHA256,
},
TranscriptCommitmentKind::Encoding,
TranscriptCommitmentKind::Hash {
alg: HashAlgId::SHA256,
},
TranscriptCommitmentKind::Hash {
alg: HashAlgId::SHA256,
},
TranscriptCommitmentKind::Encoding,
]);
assert_eq!(
builder.commitment_kinds,
vec![TranscriptCommitmentKind::Hash {
alg: HashAlgId::SHA256
},]
vec![
TranscriptCommitmentKind::Hash {
alg: HashAlgId::SHA256
},
TranscriptCommitmentKind::Encoding
]
);
}
@@ -651,7 +787,7 @@ mod tests {
RangeSet::from([0..10, 12..30]),
true,
)]
#[case::reveal_all_rangesets_with_single_superset_range(
#[case::reveal_all_rangesets_with_superset_ranges(
vec![RangeSet::from([0..1]), RangeSet::from([1..2, 8..9]), RangeSet::from([2..4, 6..8]), RangeSet::from([2..3, 6..7]), RangeSet::from([9..12])],
RangeSet::from([0..4, 6..9]),
true,
@@ -682,30 +818,29 @@ mod tests {
false,
)]
#[allow(clippy::single_range_in_vec_init)]
fn test_reveal_multiple_rangesets_with_one_rangeset(
fn test_reveal_mutliple_rangesets_with_one_rangeset(
#[case] commit_recv_rangesets: Vec<RangeSet<usize>>,
#[case] reveal_recv_rangeset: RangeSet<usize>,
#[case] success: bool,
) {
use rand::{Rng, SeedableRng};
let mut rng = rand::rngs::StdRng::seed_from_u64(0);
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
// Create hash commitments for each rangeset
let mut secrets = Vec::new();
// Encoding commitment kind
let mut transcript_commitment_builder = TranscriptCommitConfigBuilder::new(&transcript);
for rangeset in commit_recv_rangesets.iter() {
let blinder: crate::hash::Blinder = rng.random();
let secret = PlaintextHashSecret {
direction: Direction::Received,
idx: rangeset.clone(),
alg: HashAlgId::BLAKE3,
blinder,
};
secrets.push(TranscriptSecret::Hash(secret));
transcript_commitment_builder.commit_recv(rangeset).unwrap();
}
let transcripts_commitment_config = transcript_commitment_builder.build().unwrap();
let encoding_tree = EncodingTree::new(
&Blake3::default(),
transcripts_commitment_config.iter_encoding(),
&encoding_provider(GET_WITH_HEADER, OK_JSON),
)
.unwrap();
let secrets = vec![TranscriptSecret::Encoding(encoding_tree)];
let mut builder = TranscriptProofBuilder::new(&transcript, &secrets);
if success {
@@ -758,34 +893,27 @@ mod tests {
#[case] uncovered_sent_rangeset: RangeSet<usize>,
#[case] uncovered_recv_rangeset: RangeSet<usize>,
) {
use rand::{Rng, SeedableRng};
let mut rng = rand::rngs::StdRng::seed_from_u64(0);
let transcript = Transcript::new(GET_WITH_HEADER, OK_JSON);
// Create hash commitments for each rangeset
let mut secrets = Vec::new();
// Encoding commitment kind
let mut transcript_commitment_builder = TranscriptCommitConfigBuilder::new(&transcript);
for rangeset in commit_sent_rangesets.iter() {
let blinder: crate::hash::Blinder = rng.random();
let secret = PlaintextHashSecret {
direction: Direction::Sent,
idx: rangeset.clone(),
alg: HashAlgId::BLAKE3,
blinder,
};
secrets.push(TranscriptSecret::Hash(secret));
transcript_commitment_builder.commit_sent(rangeset).unwrap();
}
for rangeset in commit_recv_rangesets.iter() {
let blinder: crate::hash::Blinder = rng.random();
let secret = PlaintextHashSecret {
direction: Direction::Received,
idx: rangeset.clone(),
alg: HashAlgId::BLAKE3,
blinder,
};
secrets.push(TranscriptSecret::Hash(secret));
transcript_commitment_builder.commit_recv(rangeset).unwrap();
}
let transcripts_commitment_config = transcript_commitment_builder.build().unwrap();
let encoding_tree = EncodingTree::new(
&Blake3::default(),
transcripts_commitment_config.iter_encoding(),
&encoding_provider(GET_WITH_HEADER, OK_JSON),
)
.unwrap();
let secrets = vec![TranscriptSecret::Encoding(encoding_tree)];
let mut builder = TranscriptProofBuilder::new(&transcript, &secrets);
builder.reveal_sent(&reveal_sent_rangeset).unwrap();
builder.reveal_recv(&reveal_recv_rangeset).unwrap();

View File

@@ -332,6 +332,7 @@ async fn notary<S: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static>(
let (
VerifierOutput {
transcript_commitments,
encoder_secret,
..
},
verifier,
@@ -392,6 +393,10 @@ async fn notary<S: AsyncWrite + AsyncRead + Send + Sync + Unpin + 'static>(
.server_ephemeral_key(tls_transcript.server_ephemeral_key().clone())
.transcript_commitments(transcript_commitments);
if let Some(encoder_secret) = encoder_secret {
builder.encoder_secret(encoder_secret);
}
let attestation = builder.build(&provider)?;
// Send attestation to prover.

View File

@@ -324,7 +324,7 @@ fn prepare_zk_proof_input(
hasher.update(&blinder);
let computed_hash = hasher.finalize();
if committed_hash != computed_hash.as_ref() as &[u8] {
if committed_hash != computed_hash.as_slice() {
return Err(anyhow::anyhow!(
"Computed hash does not match committed hash"
));

View File

@@ -1,59 +1,51 @@
#### Default Representative Benchmarks ####
#
# This benchmark measures TLSNotary performance on three representative network scenarios.
# Each scenario is run multiple times to produce statistical metrics (median, std dev, etc.)
# rather than plots. Use this for quick performance checks and CI regression testing.
#
# Payload sizes:
# - upload-size: 1KB (typical HTTP request)
# - download-size: 2KB (typical HTTP response/API data)
#
# Network scenarios are chosen to represent real-world user conditions where
# TLSNotary is primarily bottlenecked by upload bandwidth.
#### Cable/DSL Home Internet ####
# Most common residential internet connection
# - Asymmetric: high download, limited upload (typical bottleneck)
# - Upload bandwidth: 20 Mbps (realistic cable/DSL upload speed)
# - Latency: 20ms (typical ISP latency)
#### Latency ####
[[group]]
name = "cable"
bandwidth = 20
protocol_latency = 20
upload-size = 1024
download-size = 2048
name = "latency"
bandwidth = 1000
[[bench]]
group = "cable"
#### Mobile 5G ####
# Modern mobile connection with good coverage
# - Upload bandwidth: 30 Mbps (typical 5G upload in good conditions)
# - Latency: 30ms (higher than wired due to mobile tower hops)
[[group]]
name = "mobile_5g"
bandwidth = 30
protocol_latency = 30
upload-size = 1024
download-size = 2048
group = "latency"
protocol_latency = 10
[[bench]]
group = "mobile_5g"
group = "latency"
protocol_latency = 25
#### Fiber Home Internet ####
# High-end residential connection (best case scenario)
# - Symmetric: equal upload/download bandwidth
# - Upload bandwidth: 100 Mbps (typical fiber upload)
# - Latency: 15ms (lower latency than cable)
[[bench]]
group = "latency"
protocol_latency = 50
[[bench]]
group = "latency"
protocol_latency = 100
[[bench]]
group = "latency"
protocol_latency = 200
#### Bandwidth ####
[[group]]
name = "fiber"
name = "bandwidth"
protocol_latency = 25
[[bench]]
group = "bandwidth"
bandwidth = 10
[[bench]]
group = "bandwidth"
bandwidth = 50
[[bench]]
group = "bandwidth"
bandwidth = 100
protocol_latency = 15
upload-size = 1024
download-size = 2048
[[bench]]
group = "fiber"
group = "bandwidth"
bandwidth = 250
[[bench]]
group = "bandwidth"
bandwidth = 1000

View File

@@ -1,52 +0,0 @@
#### Bandwidth Sweep Benchmark ####
#
# Measures how network bandwidth affects TLSNotary runtime.
# Keeps latency and payload sizes fixed while varying upload bandwidth.
#
# Fixed parameters:
# - Latency: 25ms (typical internet latency)
# - Upload: 1KB (typical request)
# - Download: 2KB (typical response)
#
# Variable: Bandwidth from 5 Mbps to 1000 Mbps
#
# Use this to plot "Bandwidth vs Runtime" and understand bandwidth sensitivity.
# Focus on upload bandwidth as TLSNotary is primarily upload-bottlenecked
[[group]]
name = "bandwidth_sweep"
protocol_latency = 25
upload-size = 1024
download-size = 2048
[[bench]]
group = "bandwidth_sweep"
bandwidth = 5
[[bench]]
group = "bandwidth_sweep"
bandwidth = 10
[[bench]]
group = "bandwidth_sweep"
bandwidth = 20
[[bench]]
group = "bandwidth_sweep"
bandwidth = 50
[[bench]]
group = "bandwidth_sweep"
bandwidth = 100
[[bench]]
group = "bandwidth_sweep"
bandwidth = 250
[[bench]]
group = "bandwidth_sweep"
bandwidth = 500
[[bench]]
group = "bandwidth_sweep"
bandwidth = 1000

View File

@@ -1,53 +0,0 @@
#### Download Size Sweep Benchmark ####
#
# Measures how download payload size affects TLSNotary runtime.
# Keeps network conditions fixed while varying the response size.
#
# Fixed parameters:
# - Bandwidth: 100 Mbps (typical good connection)
# - Latency: 25ms (typical internet latency)
# - Upload: 1KB (typical request size)
#
# Variable: Download size from 1KB to 100KB
#
# Use this to plot "Download Size vs Runtime" and understand how much data
# TLSNotary can efficiently notarize. Useful for determining optimal
# chunking strategies for large responses.
[[group]]
name = "download_sweep"
bandwidth = 100
protocol_latency = 25
upload-size = 1024
[[bench]]
group = "download_sweep"
download-size = 1024
[[bench]]
group = "download_sweep"
download-size = 2048
[[bench]]
group = "download_sweep"
download-size = 5120
[[bench]]
group = "download_sweep"
download-size = 10240
[[bench]]
group = "download_sweep"
download-size = 20480
[[bench]]
group = "download_sweep"
download-size = 30720
[[bench]]
group = "download_sweep"
download-size = 40960
[[bench]]
group = "download_sweep"
download-size = 51200

View File

@@ -1,47 +0,0 @@
#### Latency Sweep Benchmark ####
#
# Measures how network latency affects TLSNotary runtime.
# Keeps bandwidth and payload sizes fixed while varying protocol latency.
#
# Fixed parameters:
# - Bandwidth: 100 Mbps (typical good connection)
# - Upload: 1KB (typical request)
# - Download: 2KB (typical response)
#
# Variable: Protocol latency from 10ms to 200ms
#
# Use this to plot "Latency vs Runtime" and understand latency sensitivity.
[[group]]
name = "latency_sweep"
bandwidth = 100
upload-size = 1024
download-size = 2048
[[bench]]
group = "latency_sweep"
protocol_latency = 10
[[bench]]
group = "latency_sweep"
protocol_latency = 25
[[bench]]
group = "latency_sweep"
protocol_latency = 50
[[bench]]
group = "latency_sweep"
protocol_latency = 75
[[bench]]
group = "latency_sweep"
protocol_latency = 100
[[bench]]
group = "latency_sweep"
protocol_latency = 150
[[bench]]
group = "latency_sweep"
protocol_latency = 200

View File

@@ -9,7 +9,6 @@ pub const DEFAULT_UPLOAD_SIZE: usize = 1024;
pub const DEFAULT_DOWNLOAD_SIZE: usize = 4096;
pub const DEFAULT_DEFER_DECRYPTION: bool = true;
pub const DEFAULT_MEMORY_PROFILE: bool = false;
pub const DEFAULT_REVEAL_ALL: bool = false;
pub const WARM_UP_BENCH: Bench = Bench {
group: None,
@@ -21,7 +20,6 @@ pub const WARM_UP_BENCH: Bench = Bench {
download_size: 4096,
defer_decryption: true,
memory_profile: false,
reveal_all: true,
};
#[derive(Deserialize)]
@@ -81,8 +79,6 @@ pub struct BenchGroupItem {
pub defer_decryption: Option<bool>,
#[serde(rename = "memory-profile")]
pub memory_profile: Option<bool>,
#[serde(rename = "reveal-all")]
pub reveal_all: Option<bool>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -101,8 +97,6 @@ pub struct BenchItem {
pub defer_decryption: Option<bool>,
#[serde(rename = "memory-profile")]
pub memory_profile: Option<bool>,
#[serde(rename = "reveal-all")]
pub reveal_all: Option<bool>,
}
impl BenchItem {
@@ -138,10 +132,6 @@ impl BenchItem {
if self.memory_profile.is_none() {
self.memory_profile = group.memory_profile;
}
if self.reveal_all.is_none() {
self.reveal_all = group.reveal_all;
}
}
pub fn into_bench(&self) -> Bench {
@@ -155,7 +145,6 @@ impl BenchItem {
download_size: self.download_size.unwrap_or(DEFAULT_DOWNLOAD_SIZE),
defer_decryption: self.defer_decryption.unwrap_or(DEFAULT_DEFER_DECRYPTION),
memory_profile: self.memory_profile.unwrap_or(DEFAULT_MEMORY_PROFILE),
reveal_all: self.reveal_all.unwrap_or(DEFAULT_REVEAL_ALL),
}
}
}
@@ -175,8 +164,6 @@ pub struct Bench {
pub defer_decryption: bool,
#[serde(rename = "memory-profile")]
pub memory_profile: bool,
#[serde(rename = "reveal-all")]
pub reveal_all: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]

View File

@@ -22,10 +22,7 @@ pub enum CmdOutput {
GetTests(Vec<String>),
Test(TestOutput),
Bench(BenchOutput),
#[cfg(target_arch = "wasm32")]
Fail {
reason: Option<String>,
},
Fail { reason: Option<String> },
}
#[derive(Debug, Clone, Serialize, Deserialize)]

View File

@@ -98,27 +98,14 @@ pub async fn bench_prover(provider: &IoProvider, config: &Bench) -> Result<Prove
let mut builder = ProveConfig::builder(prover.transcript());
// When reveal_all is false (the default), we exclude 1 byte to avoid the
// reveal-all optimization and benchmark the realistic ZK authentication path.
let reveal_sent_range = if config.reveal_all {
0..sent_len
} else {
0..sent_len.saturating_sub(1)
};
let reveal_recv_range = if config.reveal_all {
0..recv_len
} else {
0..recv_len.saturating_sub(1)
};
builder
.server_identity()
.reveal_sent(&reveal_sent_range)?
.reveal_recv(&reveal_recv_range)?;
.reveal_sent(&(0..sent_len))?
.reveal_recv(&(0..recv_len))?;
let prove_config = builder.build()?;
let config = builder.build()?;
prover.prove(&prove_config).await?;
prover.prove(&config).await?;
prover.close().await?;
let time_total = time_start.elapsed().as_millis();

View File

@@ -22,7 +22,6 @@ clap = { workspace = true, features = ["derive", "env"] }
csv = { version = "1.3" }
duct = { version = "1" }
futures = { workspace = true }
indicatif = { version = "0.17" }
ipnet = { workspace = true }
serio = { workspace = true }
serde_json = { workspace = true }

View File

@@ -16,10 +16,6 @@ pub struct Cli {
/// Subnet to assign harness network interfaces.
#[arg(long, default_value = "10.250.0.0/24", env = "SUBNET")]
pub subnet: Ipv4Net,
/// Run browser in headed mode (visible window) for debugging.
/// Works with both X11 and Wayland.
#[arg(long)]
pub headed: bool,
}
#[derive(Subcommand)]
@@ -35,13 +31,10 @@ pub enum Command {
},
/// runs benchmarks.
Bench {
/// Configuration path. Defaults to bench.toml which contains
/// representative scenarios (cable, 5G, fiber) for quick performance
/// checks. Use bench_*_sweep.toml files for parametric
/// analysis.
/// Configuration path.
#[arg(short, long, default_value = "bench.toml")]
config: PathBuf,
/// Output CSV file path for detailed metrics and post-processing.
/// Output file path.
#[arg(short, long, default_value = "metrics.csv")]
output: PathBuf,
/// Number of samples to measure per benchmark. This is overridden by

View File

@@ -28,9 +28,6 @@ pub struct Executor {
ns: Namespace,
config: ExecutorConfig,
target: Target,
/// Display environment variables for headed mode (X11/Wayland).
/// Empty means headless mode.
display_env: Vec<String>,
state: State,
}
@@ -52,17 +49,11 @@ impl State {
}
impl Executor {
pub fn new(
ns: Namespace,
config: ExecutorConfig,
target: Target,
display_env: Vec<String>,
) -> Self {
pub fn new(ns: Namespace, config: ExecutorConfig, target: Target) -> Self {
Self {
ns,
config,
target,
display_env,
state: State::Init,
}
}
@@ -129,49 +120,23 @@ impl Executor {
let tmp = duct::cmd!("mktemp", "-d").read()?;
let tmp = tmp.trim();
let headed = !self.display_env.is_empty();
// Build command args based on headed/headless mode
let mut args: Vec<String> = vec![
"ip".into(),
"netns".into(),
"exec".into(),
self.ns.name().into(),
];
if headed {
// For headed mode: drop back to the current user and pass display env vars
// This allows the browser to connect to X11/Wayland while in the namespace
let user =
std::env::var("USER").context("USER environment variable not set")?;
args.extend(["sudo".into(), "-E".into(), "-u".into(), user, "env".into()]);
args.extend(self.display_env.clone());
}
args.push(chrome_path.to_string_lossy().into());
args.push(format!("--remote-debugging-port={PORT_BROWSER}"));
if headed {
// Headed mode: no headless, add flags to suppress first-run dialogs
args.extend(["--no-first-run".into(), "--no-default-browser-check".into()]);
} else {
// Headless mode: original flags
args.extend([
"--headless".into(),
"--disable-dev-shm-usage".into(),
"--disable-gpu".into(),
"--disable-cache".into(),
"--disable-application-cache".into(),
]);
}
args.extend([
"--no-sandbox".into(),
let process = duct::cmd!(
"sudo",
"ip",
"netns",
"exec",
self.ns.name(),
chrome_path,
format!("--remote-debugging-port={PORT_BROWSER}"),
"--headless",
"--disable-dev-shm-usage",
"--disable-gpu",
"--disable-cache",
"--disable-application-cache",
"--no-sandbox",
format!("--user-data-dir={tmp}"),
"--allowed-ips=10.250.0.1".into(),
]);
let process = duct::cmd("sudo", &args);
format!("--allowed-ips=10.250.0.1"),
);
let process = if !cfg!(feature = "debug") {
process.stderr_capture().stdout_capture().start()?

View File

@@ -9,7 +9,7 @@ mod ws_proxy;
#[cfg(feature = "debug")]
mod debug_prelude;
use std::{collections::HashMap, time::Duration};
use std::time::Duration;
use anyhow::Result;
use clap::Parser;
@@ -22,7 +22,6 @@ use harness_core::{
rpc::{BenchCmd, TestCmd},
test::TestStatus,
};
use indicatif::{ProgressBar, ProgressStyle};
use cli::{Cli, Command};
use executor::Executor;
@@ -33,67 +32,18 @@ use crate::debug_prelude::*;
use crate::{cli::Route, network::Network, wasm_server::WasmServer, ws_proxy::WsProxy};
/// Statistics for a benchmark configuration
#[derive(Debug, Clone)]
struct BenchStats {
group: Option<String>,
bandwidth: usize,
latency: usize,
upload_size: usize,
download_size: usize,
times: Vec<u64>,
}
impl BenchStats {
fn median(&self) -> f64 {
let mut sorted = self.times.clone();
sorted.sort();
let len = sorted.len();
if len == 0 {
return 0.0;
}
if len.is_multiple_of(2) {
(sorted[len / 2 - 1] + sorted[len / 2]) as f64 / 2.0
} else {
sorted[len / 2] as f64
}
}
}
/// Print summary table of benchmark results
fn print_bench_summary(stats: &[BenchStats]) {
if stats.is_empty() {
println!("\nNo benchmark results to display (only warmup was run).");
return;
}
println!("\n{}", "=".repeat(80));
println!("TLSNotary Benchmark Results");
println!("{}", "=".repeat(80));
println!();
for stat in stats {
let group_name = stat.group.as_deref().unwrap_or("unnamed");
println!(
"{} ({} Mbps, {}ms latency, {}KB↑ {}KB↓):",
group_name,
stat.bandwidth,
stat.latency,
stat.upload_size / 1024,
stat.download_size / 1024
);
println!(" Median: {:.2}s", stat.median() / 1000.0);
println!();
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, clap::ValueEnum, Default)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, clap::ValueEnum)]
pub enum Target {
#[default]
Native,
Browser,
}
impl Default for Target {
fn default() -> Self {
Self::Native
}
}
struct Runner {
network: Network,
server_fixture: ServerFixture,
@@ -105,46 +55,14 @@ struct Runner {
started: bool,
}
/// Collects display-related environment variables for headed browser mode.
/// Works with both X11 and Wayland by collecting whichever vars are present.
fn collect_display_env_vars() -> Vec<String> {
const DISPLAY_VARS: &[&str] = &[
"DISPLAY", // X11
"XAUTHORITY", // X11 auth
"WAYLAND_DISPLAY", // Wayland
"XDG_RUNTIME_DIR", // Wayland runtime dir
];
DISPLAY_VARS
.iter()
.filter_map(|&var| {
std::env::var(var)
.ok()
.map(|val| format!("{}={}", var, val))
})
.collect()
}
impl Runner {
fn new(cli: &Cli) -> Result<Self> {
let Cli {
target,
subnet,
headed,
..
} = cli;
let Cli { target, subnet, .. } = cli;
let current_path = std::env::current_exe().unwrap();
let fixture_path = current_path.parent().unwrap().join("server-fixture");
let network_config = NetworkConfig::new(*subnet);
let network = Network::new(network_config.clone())?;
// Collect display env vars once if headed mode is enabled
let display_env = if *headed {
collect_display_env_vars()
} else {
Vec::new()
};
let server_fixture =
ServerFixture::new(fixture_path, network.ns_app().clone(), network_config.app);
let wasm_server = WasmServer::new(
@@ -162,7 +80,6 @@ impl Runner {
.network_config(network_config.clone())
.build(),
*target,
display_env.clone(),
);
let exec_v = Executor::new(
network.ns_1().clone(),
@@ -172,7 +89,6 @@ impl Runner {
.network_config(network_config.clone())
.build(),
Target::Native,
Vec::new(), // Verifier doesn't need display env
);
Ok(Self {
@@ -207,12 +123,6 @@ pub async fn main() -> Result<()> {
tracing_subscriber::fmt::init();
let cli = Cli::parse();
// Validate --headed requires --target browser
if cli.headed && cli.target != Target::Browser {
anyhow::bail!("--headed can only be used with --target browser");
}
let mut runner = Runner::new(&cli)?;
let mut exit_code = 0;
@@ -301,12 +211,6 @@ pub async fn main() -> Result<()> {
samples_override,
skip_warmup,
} => {
// Print configuration info
println!("TLSNotary Benchmark Harness");
println!("Running benchmarks from: {}", config.display());
println!("Output will be written to: {}", output.display());
println!();
let items: BenchItems = toml::from_str(&std::fs::read_to_string(config)?)?;
let output_file = std::fs::File::create(output)?;
let mut writer = WriterBuilder::new().from_writer(output_file);
@@ -321,34 +225,7 @@ pub async fn main() -> Result<()> {
runner.exec_p.start().await?;
runner.exec_v.start().await?;
// Create progress bar
let pb = ProgressBar::new(benches.len() as u64);
pb.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {pos}/{len} {msg}")
.expect("valid template")
.progress_chars("█▓▒░ "),
);
// Collect measurements for stats
let mut measurements_by_config: HashMap<String, Vec<u64>> = HashMap::new();
let warmup_count = if skip_warmup { 0 } else { 3 };
for (idx, config) in benches.iter().enumerate() {
let is_warmup = idx < warmup_count;
let group_name = if is_warmup {
format!("Warmup {}/{}", idx + 1, warmup_count)
} else {
config.group.as_deref().unwrap_or("unnamed").to_string()
};
pb.set_message(format!(
"{} ({} Mbps, {}ms)",
group_name, config.bandwidth, config.protocol_latency
));
for config in benches {
runner
.network
.set_proto_config(config.bandwidth, config.protocol_latency.div_ceil(2))?;
@@ -377,73 +254,11 @@ pub async fn main() -> Result<()> {
panic!("expected prover output");
};
// Collect metrics for stats (skip warmup benches)
if !is_warmup {
let config_key = format!(
"{:?}|{}|{}|{}|{}",
config.group,
config.bandwidth,
config.protocol_latency,
config.upload_size,
config.download_size
);
measurements_by_config
.entry(config_key)
.or_default()
.push(metrics.time_total);
}
let measurement = Measurement::new(config.clone(), metrics);
let measurement = Measurement::new(config, metrics);
writer.serialize(measurement)?;
writer.flush()?;
pb.inc(1);
}
pb.finish_with_message("Benchmarks complete");
// Compute and print statistics
let mut all_stats: Vec<BenchStats> = Vec::new();
for (key, times) in measurements_by_config {
// Parse back the config from the key
let parts: Vec<&str> = key.split('|').collect();
if parts.len() >= 5 {
let group = if parts[0] == "None" {
None
} else {
Some(
parts[0]
.trim_start_matches("Some(\"")
.trim_end_matches("\")")
.to_string(),
)
};
let bandwidth: usize = parts[1].parse().unwrap_or(0);
let latency: usize = parts[2].parse().unwrap_or(0);
let upload_size: usize = parts[3].parse().unwrap_or(0);
let download_size: usize = parts[4].parse().unwrap_or(0);
all_stats.push(BenchStats {
group,
bandwidth,
latency,
upload_size,
download_size,
times,
});
}
}
// Sort stats by group name for consistent output
all_stats.sort_by(|a, b| {
a.group
.cmp(&b.group)
.then(a.latency.cmp(&b.latency))
.then(a.bandwidth.cmp(&b.bandwidth))
});
print_bench_summary(&all_stats);
}
Command::Serve {} => {
runner.start_services().await?;

View File

@@ -24,7 +24,7 @@ use std::{
};
#[cfg(feature = "tracing")]
use tracing::{debug, debug_span, trace, warn, Instrument};
use tracing::{debug, debug_span, error, trace, warn, Instrument};
use tls_client::ClientConnection;

View File

@@ -1,6 +1,5 @@
use super::{Backend, BackendError};
use crate::{DecryptMode, EncryptMode, Error};
#[allow(deprecated)]
use aes_gcm::{
aead::{generic_array::GenericArray, Aead, NewAead, Payload},
Aes128Gcm,
@@ -508,7 +507,6 @@ impl Encrypter {
let mut nonce = [0u8; 12];
nonce[..4].copy_from_slice(&self.write_iv);
nonce[4..].copy_from_slice(explicit_nonce);
#[allow(deprecated)]
let nonce = GenericArray::from_slice(&nonce);
let cipher = Aes128Gcm::new_from_slice(&self.write_key).unwrap();
// ciphertext will have the MAC appended
@@ -570,7 +568,6 @@ impl Decrypter {
let mut nonce = [0u8; 12];
nonce[..4].copy_from_slice(&self.write_iv);
nonce[4..].copy_from_slice(&m.payload.0[0..8]);
#[allow(deprecated)]
let nonce = GenericArray::from_slice(&nonce);
let plaintext = cipher
.decrypt(nonce, aes_payload)

View File

@@ -1,7 +1,7 @@
use std::ops::Range;
use mpz_memory_core::{Vector, binary::U8};
use rangeset::set::RangeSet;
use rangeset::RangeSet;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct RangeMap<T> {
@@ -21,6 +21,20 @@ impl<T> RangeMap<T>
where
T: Item,
{
pub(crate) fn new(map: Vec<(usize, T)>) -> Self {
let mut pos = 0;
for (idx, item) in &map {
assert!(
*idx >= pos,
"items must be sorted by index and non-overlapping"
);
pos = *idx + item.length();
}
Self { map }
}
/// Returns `true` if the map is empty.
pub(crate) fn is_empty(&self) -> bool {
self.map.is_empty()
@@ -33,6 +47,11 @@ where
.map(|(idx, item)| *idx..*idx + item.length())
}
/// Returns the length of the map.
pub(crate) fn len(&self) -> usize {
self.map.iter().map(|(_, item)| item.length()).sum()
}
pub(crate) fn iter(&self) -> impl Iterator<Item = (Range<usize>, &T)> {
self.map
.iter()
@@ -58,7 +77,7 @@ where
pub(crate) fn index(&self, idx: &RangeSet<usize>) -> Option<Self> {
let mut map = Vec::new();
for idx in idx.iter() {
for idx in idx.iter_ranges() {
let pos = match self.map.binary_search_by(|(base, _)| base.cmp(&idx.start)) {
Ok(i) => i,
Err(0) => return None,

View File

@@ -6,6 +6,11 @@ use mpz_core::Block;
#[cfg(not(tlsn_insecure))]
use mpz_garble::protocol::semihonest::{Evaluator, Garbler};
use mpz_garble_core::Delta;
use mpz_memory_core::{
Vector,
binary::U8,
correlated::{Key, Mac},
};
#[cfg(not(tlsn_insecure))]
use mpz_ot::cot::{DerandCOTReceiver, DerandCOTSender};
use mpz_ot::{
@@ -19,6 +24,8 @@ use tlsn_core::config::tls_commit::mpc::{MpcTlsConfig, NetworkSetting};
use tlsn_deap::Deap;
use tokio::sync::Mutex;
use crate::transcript_internal::commit::encoding::{KeyStore, MacStore};
#[cfg(not(tlsn_insecure))]
pub(crate) type ProverMpc =
Garbler<DerandCOTSender<SharedRCOTSender<kos::Sender<co::Receiver>, Block>>>;
@@ -186,3 +193,41 @@ pub(crate) fn translate_keys<Mpc, Zk>(keys: &mut SessionKeys, vm: &Deap<Mpc, Zk>
.translate(keys.server_write_mac_key)
.expect("VM memory should be consistent");
}
impl<T> KeyStore for Verifier<T> {
fn delta(&self) -> &Delta {
self.delta()
}
fn get_keys(&self, data: Vector<U8>) -> Option<&[Key]> {
self.get_keys(data).ok()
}
}
impl<T> MacStore for Prover<T> {
fn get_macs(&self, data: Vector<U8>) -> Option<&[Mac]> {
self.get_macs(data).ok()
}
}
#[cfg(tlsn_insecure)]
mod insecure {
use super::*;
use mpz_ideal_vm::IdealVm;
impl KeyStore for IdealVm {
fn delta(&self) -> &Delta {
unimplemented!("encodings not supported in insecure mode")
}
fn get_keys(&self, _data: Vector<U8>) -> Option<&[Key]> {
unimplemented!("encodings not supported in insecure mode")
}
}
impl MacStore for IdealVm {
fn get_macs(&self, _data: Vector<U8>) -> Option<&[Mac]> {
unimplemented!("encodings not supported in insecure mode")
}
}
}

View File

@@ -2,6 +2,8 @@ use std::{error::Error, fmt};
use mpc_tls::MpcTlsError;
use crate::transcript_internal::commit::encoding::EncodingError;
/// Error for [`Prover`](crate::prover::Prover).
#[derive(Debug, thiserror::Error)]
pub struct ProverError {
@@ -107,3 +109,9 @@ impl From<MpcTlsError> for ProverError {
Self::new(ErrorKind::Mpc, e)
}
}
impl From<EncodingError> for ProverError {
fn from(e: EncodingError) -> Self {
Self::new(ErrorKind::Commit, e)
}
}

View File

@@ -2,7 +2,7 @@ use mpc_tls::SessionKeys;
use mpz_common::Context;
use mpz_memory_core::binary::Binary;
use mpz_vm_core::Vm;
use rangeset::set::RangeSet;
use rangeset::{RangeSet, UnionMut};
use tlsn_core::{
ProverOutput,
config::prove::ProveConfig,
@@ -13,10 +13,17 @@ use tlsn_core::{
use crate::{
prover::ProverError,
transcript_internal::{TranscriptRefs, auth::prove_plaintext, commit::hash::prove_hash},
transcript_internal::{
TranscriptRefs,
auth::prove_plaintext,
commit::{
encoding::{self, MacStore},
hash::prove_hash,
},
},
};
pub(crate) async fn prove<T: Vm<Binary> + Send + Sync>(
pub(crate) async fn prove<T: Vm<Binary> + MacStore + Send + Sync>(
ctx: &mut Context,
vm: &mut T,
keys: &SessionKeys,
@@ -38,6 +45,13 @@ pub(crate) async fn prove<T: Vm<Binary> + Send + Sync>(
Direction::Sent => commit_sent.union_mut(idx),
Direction::Received => commit_recv.union_mut(idx),
});
commit_config
.iter_encoding()
.for_each(|(direction, idx)| match direction {
Direction::Sent => commit_sent.union_mut(idx),
Direction::Received => commit_recv.union_mut(idx),
});
}
let transcript_refs = TranscriptRefs {
@@ -88,6 +102,45 @@ pub(crate) async fn prove<T: Vm<Binary> + Send + Sync>(
vm.execute_all(ctx).await.map_err(ProverError::zk)?;
if let Some(commit_config) = config.transcript_commit()
&& commit_config.has_encoding()
{
let mut sent_ranges = RangeSet::default();
let mut recv_ranges = RangeSet::default();
for (dir, idx) in commit_config.iter_encoding() {
match dir {
Direction::Sent => sent_ranges.union_mut(idx),
Direction::Received => recv_ranges.union_mut(idx),
}
}
let sent_map = transcript_refs
.sent
.index(&sent_ranges)
.expect("indices are valid");
let recv_map = transcript_refs
.recv
.index(&recv_ranges)
.expect("indices are valid");
let (commitment, tree) = encoding::receive(
ctx,
vm,
*commit_config.encoding_hash_alg(),
&sent_map,
&recv_map,
commit_config.iter_encoding(),
)
.await?;
output
.transcript_commitments
.push(TranscriptCommitment::Encoding(commitment));
output
.transcript_secrets
.push(TranscriptSecret::Encoding(tree));
}
if let Some((hash_fut, hash_secrets)) = hash_commitments {
let hash_commitments = hash_fut.try_recv().map_err(ProverError::commit)?;
for (commitment, secret) in hash_commitments.into_iter().zip(hash_secrets) {

View File

@@ -12,7 +12,7 @@ use mpz_memory_core::{
binary::{Binary, U8},
};
use mpz_vm_core::{Call, CallableExt, Vm};
use rangeset::{iter::RangeIterator, ops::Set, set::RangeSet};
use rangeset::{Difference, RangeSet, Union};
use tlsn_core::transcript::Record;
use crate::transcript_internal::ReferenceMap;
@@ -32,7 +32,7 @@ pub(crate) fn prove_plaintext<'a>(
commit.clone()
} else {
// The plaintext is only partially revealed, so we need to authenticate in ZK.
commit.union(reveal).into_set()
commit.union(reveal)
};
let plaintext_refs = alloc_plaintext(vm, &alloc_ranges)?;
@@ -49,7 +49,7 @@ pub(crate) fn prove_plaintext<'a>(
vm.commit(*slice).map_err(PlaintextAuthError::vm)?;
}
} else {
let private = commit.difference(reveal).into_set();
let private = commit.difference(reveal);
for (_, slice) in plaintext_refs
.index(&private)
.expect("all ranges are allocated")
@@ -98,7 +98,7 @@ pub(crate) fn verify_plaintext<'a>(
commit.clone()
} else {
// The plaintext is only partially revealed, so we need to authenticate in ZK.
commit.union(reveal).into_set()
commit.union(reveal)
};
let plaintext_refs = alloc_plaintext(vm, &alloc_ranges)?;
@@ -123,7 +123,7 @@ pub(crate) fn verify_plaintext<'a>(
ciphertext,
})
} else {
let private = commit.difference(reveal).into_set();
let private = commit.difference(reveal);
for (_, slice) in plaintext_refs
.index(&private)
.expect("all ranges are allocated")
@@ -175,13 +175,15 @@ fn alloc_plaintext(
let plaintext = vm.alloc_vec::<U8>(len).map_err(PlaintextAuthError::vm)?;
let mut pos = 0;
Ok(ReferenceMap::from_iter(ranges.iter().map(move |range| {
let chunk = plaintext
.get(pos..pos + range.len())
.expect("length was checked");
pos += range.len();
(range.start, chunk)
})))
Ok(ReferenceMap::from_iter(ranges.iter_ranges().map(
move |range| {
let chunk = plaintext
.get(pos..pos + range.len())
.expect("length was checked");
pos += range.len();
(range.start, chunk)
},
)))
}
fn alloc_ciphertext<'a>(
@@ -210,13 +212,15 @@ fn alloc_ciphertext<'a>(
let ciphertext: Vector<U8> = vm.call(call).map_err(PlaintextAuthError::vm)?;
let mut pos = 0;
Ok(ReferenceMap::from_iter(ranges.iter().map(move |range| {
let chunk = ciphertext
.get(pos..pos + range.len())
.expect("length was checked");
pos += range.len();
(range.start, chunk)
})))
Ok(ReferenceMap::from_iter(ranges.iter_ranges().map(
move |range| {
let chunk = ciphertext
.get(pos..pos + range.len())
.expect("length was checked");
pos += range.len();
(range.start, chunk)
},
)))
}
fn alloc_keystream<'a>(
@@ -229,7 +233,7 @@ fn alloc_keystream<'a>(
let mut keystream = Vec::new();
let mut pos = 0;
let mut range_iter = ranges.iter();
let mut range_iter = ranges.iter_ranges();
let mut current_range = range_iter.next();
for record in records {
let mut explicit_nonce = None;
@@ -504,7 +508,7 @@ mod tests {
for record in records {
let mut record_keystream = vec![0u8; record.len];
aes_ctr_apply_keystream(&key, &iv, &record.explicit_nonce, &mut record_keystream);
for mut range in ranges.iter() {
for mut range in ranges.iter_ranges() {
range.start = range.start.max(pos);
range.end = range.end.min(pos + record.len);
if range.start < range.end {

View File

@@ -1,3 +1,4 @@
//! Plaintext commitment and proof of encryption.
pub(crate) mod encoding;
pub(crate) mod hash;

View File

@@ -0,0 +1,267 @@
//! Encoding commitment protocol.
use std::ops::Range;
use mpz_common::Context;
use mpz_memory_core::{
Vector,
binary::U8,
correlated::{Delta, Key, Mac},
};
use rand::Rng;
use rangeset::RangeSet;
use serde::{Deserialize, Serialize};
use serio::{SinkExt, stream::IoStreamExt};
use tlsn_core::{
hash::{Blake3, HashAlgId, HashAlgorithm, Keccak256, Sha256},
transcript::{
Direction,
encoding::{
Encoder, EncoderSecret, EncodingCommitment, EncodingProvider, EncodingProviderError,
EncodingTree, EncodingTreeError, new_encoder,
},
},
};
use crate::{
map::{Item, RangeMap},
transcript_internal::ReferenceMap,
};
/// Bytes of encoding, per byte.
const ENCODING_SIZE: usize = 128;
#[derive(Debug, Serialize, Deserialize)]
struct Encodings {
sent: Vec<u8>,
recv: Vec<u8>,
}
/// Transfers encodings for the provided plaintext ranges.
pub(crate) async fn transfer<K: KeyStore>(
ctx: &mut Context,
store: &K,
sent: &ReferenceMap,
recv: &ReferenceMap,
) -> Result<(EncoderSecret, EncodingCommitment), EncodingError> {
let secret = EncoderSecret::new(rand::rng().random(), store.delta().as_block().to_bytes());
let encoder = new_encoder(&secret);
// Collects the encodings for the provided plaintext ranges.
fn collect_encodings(
encoder: &impl Encoder,
store: &impl KeyStore,
direction: Direction,
map: &ReferenceMap,
) -> Vec<u8> {
let mut encodings = Vec::with_capacity(map.len() * ENCODING_SIZE);
for (range, chunk) in map.iter() {
let start = encodings.len();
encoder.encode_range(direction, range, &mut encodings);
let keys = store
.get_keys(*chunk)
.expect("keys are present for provided plaintext ranges");
encodings[start..]
.iter_mut()
.zip(keys.iter().flat_map(|key| key.as_block().as_bytes()))
.for_each(|(encoding, key)| {
*encoding ^= *key;
});
}
encodings
}
let encodings = Encodings {
sent: collect_encodings(&encoder, store, Direction::Sent, sent),
recv: collect_encodings(&encoder, store, Direction::Received, recv),
};
let frame_limit = ctx
.io()
.limit()
.saturating_add(encodings.sent.len() + encodings.recv.len());
ctx.io_mut().with_limit(frame_limit).send(encodings).await?;
let root = ctx.io_mut().expect_next().await?;
Ok((secret, EncodingCommitment { root }))
}
/// Receives and commits to the encodings for the provided plaintext ranges.
pub(crate) async fn receive<M: MacStore>(
ctx: &mut Context,
store: &M,
hash_alg: HashAlgId,
sent: &ReferenceMap,
recv: &ReferenceMap,
idxs: impl IntoIterator<Item = &(Direction, RangeSet<usize>)>,
) -> Result<(EncodingCommitment, EncodingTree), EncodingError> {
let hasher: &(dyn HashAlgorithm + Send + Sync) = match hash_alg {
HashAlgId::SHA256 => &Sha256::default(),
HashAlgId::KECCAK256 => &Keccak256::default(),
HashAlgId::BLAKE3 => &Blake3::default(),
alg => {
return Err(ErrorRepr::UnsupportedHashAlgorithm(alg).into());
}
};
let (sent_len, recv_len) = (sent.len(), recv.len());
let frame_limit = ctx
.io()
.limit()
.saturating_add(ENCODING_SIZE * (sent_len + recv_len));
let encodings: Encodings = ctx.io_mut().with_limit(frame_limit).expect_next().await?;
if encodings.sent.len() != sent_len * ENCODING_SIZE {
return Err(ErrorRepr::IncorrectMacCount {
direction: Direction::Sent,
expected: sent_len,
got: encodings.sent.len() / ENCODING_SIZE,
}
.into());
}
if encodings.recv.len() != recv_len * ENCODING_SIZE {
return Err(ErrorRepr::IncorrectMacCount {
direction: Direction::Received,
expected: recv_len,
got: encodings.recv.len() / ENCODING_SIZE,
}
.into());
}
// Collects a map of plaintext ranges to their encodings.
fn collect_map(
store: &impl MacStore,
mut encodings: Vec<u8>,
map: &ReferenceMap,
) -> RangeMap<EncodingSlice> {
let mut encoding_map = Vec::new();
let mut pos = 0;
for (range, chunk) in map.iter() {
let macs = store
.get_macs(*chunk)
.expect("MACs are present for provided plaintext ranges");
let encoding = &mut encodings[pos..pos + range.len() * ENCODING_SIZE];
encoding
.iter_mut()
.zip(macs.iter().flat_map(|mac| mac.as_bytes()))
.for_each(|(encoding, mac)| {
*encoding ^= *mac;
});
encoding_map.push((range.start, EncodingSlice::from(&(*encoding))));
pos += range.len() * ENCODING_SIZE;
}
RangeMap::new(encoding_map)
}
let provider = Provider {
sent: collect_map(store, encodings.sent, sent),
recv: collect_map(store, encodings.recv, recv),
};
let tree = EncodingTree::new(hasher, idxs, &provider)?;
let root = tree.root();
ctx.io_mut().send(root.clone()).await?;
let commitment = EncodingCommitment { root };
Ok((commitment, tree))
}
pub(crate) trait KeyStore {
fn delta(&self) -> &Delta;
fn get_keys(&self, data: Vector<U8>) -> Option<&[Key]>;
}
pub(crate) trait MacStore {
fn get_macs(&self, data: Vector<U8>) -> Option<&[Mac]>;
}
#[derive(Debug)]
struct Provider {
sent: RangeMap<EncodingSlice>,
recv: RangeMap<EncodingSlice>,
}
impl EncodingProvider for Provider {
fn provide_encoding(
&self,
direction: Direction,
range: Range<usize>,
dest: &mut Vec<u8>,
) -> Result<(), EncodingProviderError> {
let encodings = match direction {
Direction::Sent => &self.sent,
Direction::Received => &self.recv,
};
let encoding = encodings.get(range).ok_or(EncodingProviderError)?;
dest.extend_from_slice(encoding);
Ok(())
}
}
#[derive(Debug)]
struct EncodingSlice(Vec<u8>);
impl From<&[u8]> for EncodingSlice {
fn from(value: &[u8]) -> Self {
Self(value.to_vec())
}
}
impl Item for EncodingSlice {
type Slice<'a>
= &'a [u8]
where
Self: 'a;
fn length(&self) -> usize {
self.0.len() / ENCODING_SIZE
}
fn slice<'a>(&'a self, range: Range<usize>) -> Option<Self::Slice<'a>> {
self.0
.get(range.start * ENCODING_SIZE..range.end * ENCODING_SIZE)
}
}
/// Encoding protocol error.
#[derive(Debug, thiserror::Error)]
#[error(transparent)]
pub struct EncodingError(#[from] ErrorRepr);
#[derive(Debug, thiserror::Error)]
#[error("encoding protocol error: {0}")]
enum ErrorRepr {
#[error("I/O error: {0}")]
Io(std::io::Error),
#[error("incorrect MAC count for {direction}: expected {expected}, got {got}")]
IncorrectMacCount {
direction: Direction,
expected: usize,
got: usize,
},
#[error("encoding tree error: {0}")]
EncodingTree(EncodingTreeError),
#[error("unsupported hash algorithm: {0}")]
UnsupportedHashAlgorithm(HashAlgId),
}
impl From<std::io::Error> for EncodingError {
fn from(value: std::io::Error) -> Self {
Self(ErrorRepr::Io(value))
}
}
impl From<EncodingTreeError> for EncodingError {
fn from(value: EncodingTreeError) -> Self {
Self(ErrorRepr::EncodingTree(value))
}
}

View File

@@ -9,7 +9,7 @@ use mpz_memory_core::{
binary::{Binary, U8},
};
use mpz_vm_core::{Vm, VmError, prelude::*};
use rangeset::set::RangeSet;
use rangeset::RangeSet;
use tlsn_core::{
hash::{Blinder, Hash, HashAlgId, TypedHash},
transcript::{
@@ -155,7 +155,7 @@ fn hash_commit_inner(
Direction::Received => &refs.recv,
};
for range in idx.iter() {
for range in idx.iter_ranges() {
hasher.update(&refs.get(range).expect("plaintext refs are valid"));
}
@@ -176,7 +176,7 @@ fn hash_commit_inner(
Direction::Received => &refs.recv,
};
for range in idx.iter() {
for range in idx.iter_ranges() {
hasher
.update(vm, &refs.get(range).expect("plaintext refs are valid"))
.map_err(HashCommitError::hasher)?;
@@ -201,7 +201,7 @@ fn hash_commit_inner(
Direction::Received => &refs.recv,
};
for range in idx.iter() {
for range in idx.iter_ranges() {
hasher
.update(vm, &refs.get(range).expect("plaintext refs are valid"))
.map_err(HashCommitError::hasher)?;

View File

@@ -2,6 +2,8 @@ use std::{error::Error, fmt};
use mpc_tls::MpcTlsError;
use crate::transcript_internal::commit::encoding::EncodingError;
/// Error for [`Verifier`](crate::verifier::Verifier).
#[derive(Debug, thiserror::Error)]
pub struct VerifierError {
@@ -55,6 +57,7 @@ enum ErrorKind {
Config,
Mpc,
Zk,
Commit,
Verify,
}
@@ -67,6 +70,7 @@ impl fmt::Display for VerifierError {
ErrorKind::Config => f.write_str("config error")?,
ErrorKind::Mpc => f.write_str("mpc error")?,
ErrorKind::Zk => f.write_str("zk error")?,
ErrorKind::Commit => f.write_str("commit error")?,
ErrorKind::Verify => f.write_str("verification error")?,
}
@@ -101,3 +105,9 @@ impl From<MpcTlsError> for VerifierError {
Self::new(ErrorKind::Mpc, e)
}
}
impl From<EncodingError> for VerifierError {
fn from(e: EncodingError) -> Self {
Self::new(ErrorKind::Commit, e)
}
}

View File

@@ -2,7 +2,7 @@ use mpc_tls::SessionKeys;
use mpz_common::Context;
use mpz_memory_core::binary::Binary;
use mpz_vm_core::Vm;
use rangeset::set::RangeSet;
use rangeset::{RangeSet, UnionMut};
use tlsn_core::{
VerifierOutput,
config::prove::ProveRequest,
@@ -14,12 +14,19 @@ use tlsn_core::{
};
use crate::{
transcript_internal::{TranscriptRefs, auth::verify_plaintext, commit::hash::verify_hash},
transcript_internal::{
TranscriptRefs,
auth::verify_plaintext,
commit::{
encoding::{self, KeyStore},
hash::verify_hash,
},
},
verifier::VerifierError,
};
#[allow(clippy::too_many_arguments)]
pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
pub(crate) async fn verify<T: Vm<Binary> + KeyStore + Send + Sync>(
ctx: &mut Context,
vm: &mut T,
keys: &SessionKeys,
@@ -87,6 +94,11 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
Direction::Sent => commit_sent.union_mut(idx),
Direction::Received => commit_recv.union_mut(idx),
});
if let Some((sent, recv)) = commit_config.encoding() {
commit_sent.union_mut(sent);
commit_recv.union_mut(recv);
}
}
let (sent_refs, sent_proof) = verify_plaintext(
@@ -139,6 +151,24 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
sent_proof.verify().map_err(VerifierError::verify)?;
recv_proof.verify().map_err(VerifierError::verify)?;
let mut encoder_secret = None;
if let Some(commit_config) = request.transcript_commit()
&& let Some((sent, recv)) = commit_config.encoding()
{
let sent_map = transcript_refs
.sent
.index(sent)
.expect("ranges were authenticated");
let recv_map = transcript_refs
.recv
.index(recv)
.expect("ranges were authenticated");
let (secret, commitment) = encoding::transfer(ctx, vm, &sent_map, &recv_map).await?;
encoder_secret = Some(secret);
transcript_commitments.push(TranscriptCommitment::Encoding(commitment));
}
if let Some(hash_commitments) = hash_commitments {
for commitment in hash_commitments.try_recv().map_err(VerifierError::verify)? {
transcript_commitments.push(TranscriptCommitment::Hash(commitment));
@@ -148,6 +178,7 @@ pub(crate) async fn verify<T: Vm<Binary> + Send + Sync>(
Ok(VerifierOutput {
server_name,
transcript: request.reveal().is_some().then_some(transcript),
encoder_secret,
transcript_commitments,
})
}

View File

@@ -1,4 +1,5 @@
use futures::{AsyncReadExt, AsyncWriteExt};
use rangeset::RangeSet;
use tlsn::{
config::{
prove::ProveConfig,
@@ -8,9 +9,12 @@ use tlsn::{
verifier::VerifierConfig,
},
connection::ServerName,
hash::HashAlgId,
hash::{HashAlgId, HashProvider},
prover::Prover,
transcript::{Direction, Transcript, TranscriptCommitConfig, TranscriptCommitmentKind},
transcript::{
Direction, Transcript, TranscriptCommitConfig, TranscriptCommitment,
TranscriptCommitmentKind, TranscriptSecret,
},
verifier::{Verifier, VerifierOutput},
webpki::{CertificateDer, RootCertStore},
};
@@ -38,7 +42,7 @@ async fn test() {
let (socket_0, socket_1) = tokio::io::duplex(2 << 23);
let ((_full_transcript, _prover_output), verifier_output) =
let ((full_transcript, prover_output), verifier_output) =
tokio::join!(prover(socket_0), verifier(socket_1));
let partial_transcript = verifier_output.transcript.unwrap();
@@ -47,13 +51,65 @@ async fn test() {
assert_eq!(server_name.as_str(), SERVER_DOMAIN);
assert!(!partial_transcript.is_complete());
assert_eq!(
partial_transcript.sent_authed().iter().next().unwrap(),
partial_transcript
.sent_authed()
.iter_ranges()
.next()
.unwrap(),
0..10
);
assert_eq!(
partial_transcript.received_authed().iter().next().unwrap(),
partial_transcript
.received_authed()
.iter_ranges()
.next()
.unwrap(),
0..10
);
let encoding_tree = prover_output
.transcript_secrets
.iter()
.find_map(|secret| {
if let TranscriptSecret::Encoding(tree) = secret {
Some(tree)
} else {
None
}
})
.unwrap();
let encoding_commitment = prover_output
.transcript_commitments
.iter()
.find_map(|commitment| {
if let TranscriptCommitment::Encoding(commitment) = commitment {
Some(commitment)
} else {
None
}
})
.unwrap();
let prove_sent = RangeSet::from(1..full_transcript.sent().len() - 1);
let prove_recv = RangeSet::from(1..full_transcript.received().len() - 1);
let idxs = [
(Direction::Sent, prove_sent.clone()),
(Direction::Received, prove_recv.clone()),
];
let proof = encoding_tree.proof(idxs.iter()).unwrap();
let (auth_sent, auth_recv) = proof
.verify_with_provider(
&HashProvider::default(),
&verifier_output.encoder_secret.unwrap(),
encoding_commitment,
full_transcript.sent(),
full_transcript.received(),
)
.unwrap();
assert_eq!(auth_sent, prove_sent);
assert_eq!(auth_recv, prove_recv);
}
#[instrument(skip(verifier_socket))]
@@ -115,21 +171,25 @@ async fn prover<T: AsyncWrite + AsyncRead + Send + Unpin + 'static>(
let mut builder = TranscriptCommitConfig::builder(prover.transcript());
let kind = TranscriptCommitmentKind::Hash {
alg: HashAlgId::SHA256,
};
builder
.commit_with_kind(&(0..sent_tx_len), Direction::Sent, kind)
.unwrap();
builder
.commit_with_kind(&(0..recv_tx_len), Direction::Received, kind)
.unwrap();
builder
.commit_with_kind(&(1..sent_tx_len - 1), Direction::Sent, kind)
.unwrap();
builder
.commit_with_kind(&(1..recv_tx_len - 1), Direction::Received, kind)
.unwrap();
for kind in [
TranscriptCommitmentKind::Encoding,
TranscriptCommitmentKind::Hash {
alg: HashAlgId::SHA256,
},
] {
builder
.commit_with_kind(&(0..sent_tx_len), Direction::Sent, kind)
.unwrap();
builder
.commit_with_kind(&(0..recv_tx_len), Direction::Received, kind)
.unwrap();
builder
.commit_with_kind(&(1..sent_tx_len - 1), Direction::Sent, kind)
.unwrap();
builder
.commit_with_kind(&(1..recv_tx_len - 1), Direction::Received, kind)
.unwrap();
}
let transcript_commit = builder.build().unwrap();

View File

@@ -151,9 +151,9 @@ impl From<tlsn::transcript::PartialTranscript> for PartialTranscript {
fn from(value: tlsn::transcript::PartialTranscript) -> Self {
Self {
sent: value.sent_unsafe().to_vec(),
sent_authed: value.sent_authed().iter().collect(),
sent_authed: value.sent_authed().iter_ranges().collect(),
recv: value.received_unsafe().to_vec(),
recv_authed: value.received_authed().iter().collect(),
recv_authed: value.received_authed().iter_ranges().collect(),
}
}
}