diff --git a/.cargo/config b/.cargo/config new file mode 100644 index 0000000..4d6052d --- /dev/null +++ b/.cargo/config @@ -0,0 +1,10 @@ +[target.wasm32-unknown-unknown] +rustflags = [ + "-C", + "target-feature=+atomics,+bulk-memory,+mutable-globals", + "-C", + "link-arg=--max-memory=4294967296" +] + +[unstable] +build-std = ["panic_abort", "std"] \ No newline at end of file diff --git a/.gitignore b/.gitignore index e22ae5b..7ed991b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,10 @@ +wasm/prover/pkg +wasm/prover/target +wasm/prover/Cargo.lock +wasm-pack.log node_modules/ .idea/ .DS_Store pnpm-lock.yaml build/ -test-build/ \ No newline at end of file +test-build/ diff --git a/package.json b/package.json index 018124b..7a38a82 100644 --- a/package.json +++ b/package.json @@ -4,8 +4,14 @@ "description": "", "main": "index.js", "scripts": { + "build:wasm": "wasm-pack build --target web wasm/prover", "build:test": "webpack --config webpack.test.config.js", - "test": "webpack-dev-server --config webpack.test.config.js --hot" + "watch:test": "webpack --config webpack.test.config.js --watch", + "serve:test": "serve ./test-build -l 3000", + "test": "concurrently npm:watch:test npm:serve:test" + }, + "dependencies": { + "comlink": "^4.4.1" }, "devDependencies": { "browserify": "^17.0.0", @@ -20,9 +26,11 @@ "image-webpack-loader": "^6.0.0", "node-loader": "^0.6.0", "process": "^0.11.10", + "serve": "14.2.1", "stream-browserify": "^3.0.0", "ts-loader": "^6.2.1", "typescript": "^4.9.3", + "wasm-pack": "^0.12.1", "webpack": "^5.75.0", "webpack-cli": "^5.0.0", "webpack-dev-server": "^4.11.1", diff --git a/serve.json b/serve.json new file mode 100644 index 0000000..d9becd8 --- /dev/null +++ b/serve.json @@ -0,0 +1,14 @@ +{ + "headers": [{ + "source": "**/*", + "headers": [ + { + "key": "Cross-Origin-Embedder-Policy", + "value": "require-corp" + }, { + "key": "Cross-Origin-Opener-Policy", + "value": "same-origin" + } + ] + }] +} \ No newline at end of file diff --git a/src/app.tsx b/src/app.tsx index 309af59..a928287 100644 --- a/src/app.tsx +++ b/src/app.tsx @@ -1 +1,61 @@ -console.log('hi') \ No newline at end of file +import * as Comlink from 'comlink'; + +const TLSN: any = Comlink.wrap( + new Worker(new URL('./worker.ts', import.meta.url)), +); + +let tlsn: any | null = null; + +async function getTLSN(): Promise { + if (tlsn) return tlsn; + tlsn = await new TLSN(); + return tlsn; +} + +export const NOTARY_SERVER_PUBKEY = `-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEBv36FI4ZFszJa0DQFJ3wWCXvVLFr\ncRzMG5kaTeHGoSzDu6cFqx3uEWYpFGo6C0EOUgf+mEgbktLrXocv5yHzKg==\n-----END PUBLIC KEY-----`; + +export const prove = async ( + url: string, + options: { + notaryUrl: string; + websocketProxyUrl: string; + method?: string; + headers?: { [key: string]: string }; + body?: string; + maxTranscriptSize?: number; + secretHeaders?: string[]; + secretResps?: string[]; + } +) => { + const { + method, + headers, + body, + maxTranscriptSize = 32768, + notaryUrl, + websocketProxyUrl, + secretHeaders, + secretResps, + } = options; + + const tlsn = await getTLSN(); + return tlsn.prover(url, { + method, + headers, + body, + maxTranscriptSize, + notaryUrl, + websocketProxyUrl, + secretHeaders, + secretResps, + }); +} + +export const verify = async (proof: { session: any; substrings: any }, pubkey = NOTARY_SERVER_PUBKEY) => { + const tlsn = await getTLSN(); + const result = await tlsn.verify( + proof, + pubkey, + ); + return result; +} \ No newline at end of file diff --git a/src/worker.ts b/src/worker.ts new file mode 100644 index 0000000..cbb74d4 --- /dev/null +++ b/src/worker.ts @@ -0,0 +1,89 @@ +import * as Comlink from 'comlink'; +import init, { + initThreadPool, + prover, + verify, +} from '../wasm/prover/pkg/tlsn_extension_rs'; + +class TLSN { + private startPromise: any; + private resolveStart: any; + + constructor() { + console.log('worker module initiated.'); + this.startPromise = new Promise((resolve) => { + this.resolveStart = resolve; + }); + this.start(); + } + + async start() { + console.log('start'); + const numConcurrency = navigator.hardwareConcurrency; + console.log('!@# navigator.hardwareConcurrency=', numConcurrency); + const res = await init(); + console.log('!@# res.memory=', res.memory); + // 6422528 ~= 6.12 mb + console.log('!@# res.memory.buffer.length=', res.memory.buffer.byteLength); + await initThreadPool(numConcurrency); + this.resolveStart(); + } + + async waitForStart() { + return this.startPromise; + } + + async prover( + url: string, + options?: { + method?: string; + headers?: { [key: string]: string }; + body?: string; + maxTranscriptSize?: number; + notaryUrl?: string; + websocketProxyUrl?: string; + secretHeaders?: string[]; + secretResps?: string[]; + }, + ) { + try { + await this.waitForStart(); + console.log('worker', url, { + ...options, + notaryUrl: options?.notaryUrl, + websocketProxyUrl: options?.websocketProxyUrl, + }); + const resProver = await prover( + url, + { + ...options, + notaryUrl: options?.notaryUrl, + websocketProxyUrl: options?.websocketProxyUrl, + }, + options?.secretHeaders || [], + options?.secretResps || [], + ); + const resJSON = JSON.parse(resProver); + console.log('!@# resProver,resJSON=', { resProver, resJSON }); + console.log('!@# resAfter.memory=', resJSON.memory); + // 1105920000 ~= 1.03 gb + console.log( + '!@# resAfter.memory.buffer.length=', + resJSON.memory?.buffer?.byteLength, + ); + + return resJSON; + } catch (e: any) { + console.log(e); + return e; + } + } + + async verify(proof: any, pubkey: string) { + await this.waitForStart(); + const raw = await verify(JSON.stringify(proof), pubkey); + return JSON.parse(raw); + } +} + +Comlink.expose(TLSN); diff --git a/utils/test.ts b/utils/test.ts new file mode 100644 index 0000000..7899a65 --- /dev/null +++ b/utils/test.ts @@ -0,0 +1,21 @@ +import {prove, verify, NOTARY_SERVER_PUBKEY} from "../src/app"; + + +(async function runTest() { + const proof = await prove('https://swapi.dev/api/people/1', { + method: 'GET', + headers: { + 'Connection': 'close', + 'Accept': 'application/json', + 'Accept-Encoding': 'identity' + }, + body: '', + maxTranscriptSize: 20000, + notaryUrl: 'https://127.0.0.1:7047', + websocketProxyUrl: 'ws://127.0.0.1:55688' + }); + + const result = await verify(proof, NOTARY_SERVER_PUBKEY); + + console.log(result); +})(); diff --git a/wasm/prover/Cargo.toml b/wasm/prover/Cargo.toml new file mode 100644 index 0000000..52fce59 --- /dev/null +++ b/wasm/prover/Cargo.toml @@ -0,0 +1,111 @@ +[package] +name = "tlsn-extension-rs" +version = "0.1.0" +authors = ["The tlsn-extension Developers"] +edition = "2018" +rust-version = "1.56" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +rayon = "1.5" +wasm-bindgen-rayon = "1.0" +wasm-bindgen = "0.2.87" +js-sys = "0.3.64" +tracing = "0.1" +getrandom = { version = "0.2", features = ["js"] } +ws_stream_wasm = "0.7.4" +wasm-bindgen-futures = "0.4.37" +tokio-util = "0.7" +futures = "0.3" +serde_json = "1.0" +serde = { version = "1.0.147", features = ["derive"] } +serde-wasm-bindgen = "0.4" +url = { version = "2.0", features = ["serde"] } +futures-util = "0.3.28" +chrono = "0.4" +elliptic-curve = { version = "0.13.5", features = ["pkcs8"] } +p256 = { version = "0.13", features = ["pem", "ecdsa"] } + +hyper = { version = "0.14", features = ["client", "http1"] } +console_error_panic_hook = "0.1.7" +tracing-web = "0.1.2" +tracing-subscriber = { version = "0.3", features = ["time"] } + +# time crate: https://crates.io/crates/time +# NOTE: It is required, otherwise "time not implemented on this platform" error happens right after "!@# 2". +# Probably due to tokio's time feature is used in tlsn-prover? +time = { version = "0.3", features = ["wasm-bindgen"] } +# Used to calculate elapsed time. +web-time = "0.2" + +# tlsn-prover = { path = "../tlsn/tlsn/tlsn-prover", features = ["tracing"] } +[dependencies.tlsn-prover] +git = "https://github.com/mhchia/tlsn.git" +branch = "dev-20231006-webtime-2" +package = "tlsn-prover" +features = ["tracing"] + +[dependencies.tlsn-core] +git = "https://github.com/mhchia/tlsn.git" +branch = "dev-20231006-webtime-2" +package = "tlsn-core" + +[dependencies.web-sys] +version = "0.3.4" +features = [ + "BinaryType", + "Blob", + "ErrorEvent", + "FileReader", + "MessageEvent", + "ProgressEvent", + "WebSocket", + "console", + 'Document', + 'HtmlElement', + 'HtmlInputElement', + 'Window', + 'Worker', + 'Headers', + 'Request', + 'RequestInit', + 'RequestMode', + 'Response', +] + +# Replace ring with the forked version `ring-xous` implemented in pure rust +# to make it compiled to wasm. +# Refs: +# - Rationale for `ring-xous`: https://www.bunniestudios.com/blog/?p=6521 +# - Issue for wasm comptability: https://github.com/briansmith/ring/issues/918 +[patch.crates-io.ring] +git="https://github.com/betrusted-io/ring-xous" +branch="0.16.20-cleanup" + +[patch.crates-io.ws_stream_wasm] +# path = "../../../others/ws_stream_wasm" +# Use the patched ws_stream_wasm to fix the issue https://github.com/najamelan/ws_stream_wasm/issues/12#issuecomment-1711902958 +git="https://github.com/mhchia/ws_stream_wasm" +branch="dev" + +# [patch.'https://github.com/tlsnotary/tlsn-utils'] +# # Use single cpu backend +# tlsn-utils = { git = 'https://www.github.com/mhchia/tlsn-utils.git', rev = "46327f0" } +# tlsn-utils-aio = { git = 'https://www.github.com/mhchia/tlsn-utils.git', rev = "46327f0" } + +# The `console_error_panic_hook` crate provides better debugging of panics by +# logging them with `console.error`. This is great for development, but requires +# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for +# code size when deploying. +console_error_panic_hook = { version = "0.1.7", optional = true } + +[dev-dependencies] +wasm-bindgen-test = "0.3.34" + +[profile.release] +# Tell `rustc` to optimize for small code size. + +[package.metadata.wasm-pack.profile.release] +wasm-opt = false diff --git a/wasm/prover/src/lib.rs b/wasm/prover/src/lib.rs new file mode 100644 index 0000000..716e928 --- /dev/null +++ b/wasm/prover/src/lib.rs @@ -0,0 +1,485 @@ +mod requests; +mod requestOpt; + +use std::panic; +use std::ops::Range; +use web_time::Instant; + +use hyper::{body::to_bytes, Body, Request, StatusCode}; +use futures::{AsyncWriteExt, TryFutureExt}; +use futures::channel::oneshot; +use tlsn_prover::{Prover, ProverConfig}; + +// use tokio::io::AsyncWriteExt as _; +use tokio_util::compat::{FuturesAsyncReadCompatExt, TokioAsyncReadCompatExt}; + +use tokio_util::compat::FuturesAsyncWriteCompatExt; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::spawn_local; + +use tracing_web::{MakeConsoleWriter, performance_layer}; +use tracing_subscriber::fmt::format::Pretty; +use tracing_subscriber::fmt::time::UtcTime; +use tracing_subscriber::prelude::*; + +use ws_stream_wasm::{*}; + +use crate::requests::{NotarizationSessionRequest, NotarizationSessionResponse, ClientType}; +use crate::requestOpt::{RequestOptions, VerifyResult}; + +pub use wasm_bindgen_rayon::init_thread_pool; +// use rayon::iter::IntoParallelRefIterator; +use rayon::prelude::*; + +use wasm_bindgen_futures::JsFuture; +use web_sys::{Request as WebsysRequest, RequestInit, Headers, RequestMode, Response}; +use js_sys::{JSON, Array}; +use url::Url; + +use tlsn_core::proof::{SessionProof, TlsProof}; +use std::time::Duration; +use elliptic_curve::pkcs8::DecodePublicKey; + +// A macro to provide `println!(..)`-style syntax for `console.log` logging. +macro_rules! log { + ( $( $t:tt )* ) => { + web_sys::console::log_1(&format!( $( $t )* ).into()); + } +} + +extern crate console_error_panic_hook; + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(js_namespace = self)] + fn fetch(request: &web_sys::Request) -> js_sys::Promise; +} + +async fn fetch_as_json_string(url: &str, opts: &RequestInit) -> Result { + let request = WebsysRequest::new_with_str_and_init(url, opts)?; + let promise = fetch(&request); + let future = JsFuture::from(promise); + let resp_value = future.await?; + let resp: Response = resp_value.dyn_into().unwrap(); + let json = JsFuture::from(resp.json()?).await?; + let stringified = JSON::stringify(&json).unwrap(); + Ok(stringified.as_string().unwrap()) +} + +#[wasm_bindgen] +pub async fn prover( + targetUrl: &str, + val: JsValue, + secret_headers: JsValue, + secret_body: JsValue, +) -> Result { + log!("target_url: {}", targetUrl); + let target_url = Url::parse(targetUrl).expect("url must be valid"); + + log!("target_url.host: {}", target_url.host().unwrap()); + let options: RequestOptions = serde_wasm_bindgen::from_value(val).unwrap(); + log!("done!"); + log!("options.notary_url: {}", options.notary_url.as_str()); + // let fmt_layer = tracing_subscriber::fmt::layer() + // .with_ansi(false) // Only partially supported across browsers + // .with_timer(UtcTime::rfc_3339()) // std::time is not available in browsers + // .with_writer(MakeConsoleWriter); // write events to the console + // let perf_layer = performance_layer() + // .with_details_from_fields(Pretty::default()); + + // tracing_subscriber::registry() + // .with(tracing_subscriber::filter::LevelFilter::DEBUG) + // .with(fmt_layer) + // .with(perf_layer) + // .init(); // Install these as subscribers to tracing events + + // https://github.com/rustwasm/console_error_panic_hook + panic::set_hook(Box::new(console_error_panic_hook::hook)); + + let start_time = Instant::now(); + + /* + * Connect Notary with websocket + */ + + let mut opts = RequestInit::new(); + log!("method: {}", "POST"); + opts.method("POST"); + // opts.method("GET"); + opts.mode(RequestMode::Cors); + + // set headers + let headers = Headers::new().unwrap(); + let notary_url = Url::parse(options.notary_url.as_str()).expect("url must be valid"); + let notary_ssl = notary_url.scheme() == "https" || notary_url.scheme() == "wss"; + let notary_host = notary_url.authority(); + + headers.append("Host", notary_host).unwrap(); + headers.append("Content-Type", "application/json").unwrap(); + opts.headers(&headers); + + log!("notary_host: {}", notary_host); + // set body + let payload = serde_json::to_string(&NotarizationSessionRequest { + client_type: ClientType::Websocket, + max_transcript_size: Some(options.max_transcript_size), + }) + .unwrap(); + opts.body(Some(&JsValue::from_str(&payload))); + + // url + let url = format!( + "{}://{}/session", + if notary_ssl { "https" } else { "http" }, + notary_host + ); + log!("Request: {}", url); + let rust_string = fetch_as_json_string(&url, &opts).await.unwrap(); + let notarization_response = serde_json::from_str::(&rust_string).unwrap(); + log!("Response: {}", rust_string); + + log!("Notarization response: {:?}", notarization_response,); + let notary_wss_url = format!( + "{}://{}/notarize?sessionId={}", + if notary_ssl { "wss" } else { "ws" }, + notary_host, + notarization_response.session_id + ); + let (mut notary_ws_meta, mut notary_ws_stream) = WsMeta::connect( + notary_wss_url, + None + ).await + .expect_throw( "assume the notary ws connection succeeds" ); + let mut notary_ws_stream_into = notary_ws_stream.into_io(); + + /* + Connect Application Server with websocket proxy + */ + + let (mut client_ws_meta, mut client_ws_stream) = WsMeta::connect( + options.websocket_proxy_url, + None ).await + .expect_throw( "assume the client ws connection succeeds" ); + let mut client_ws_stream_into = client_ws_stream.into_io(); + + log!("!@# 0"); + + let target_host = target_url.host_str().unwrap(); + // Basic default prover config + let config = ProverConfig::builder() + .id(notarization_response.session_id) + .server_dns(target_host) + .build() + .unwrap(); + + log!("!@# 1"); + + // Create a Prover and set it up with the Notary + // This will set up the MPC backend prior to connecting to the server. + let prover = Prover::new(config) + .setup(notary_ws_stream_into) + .await + .unwrap(); + + + // Bind the Prover to the server connection. + // The returned `mpc_tls_connection` is an MPC TLS connection to the Server: all data written + // to/read from it will be encrypted/decrypted using MPC with the Notary. + let (mpc_tls_connection, prover_fut) = prover.connect(client_ws_stream_into).await.unwrap(); + + log!("!@# 3"); + + + // let prover_task = tokio::spawn(prover_fut); + let (prover_sender, prover_receiver) = oneshot::channel(); + let handled_prover_fut = async { + match prover_fut.await { + Ok(prover_result) => { + // Send the prover + let _ = prover_sender.send(prover_result); + }, + Err(err) => { + panic!("An error occurred in prover_fut: {:?}", err); + } + } + }; + spawn_local(handled_prover_fut); + log!("!@# 7"); + + // Attach the hyper HTTP client to the TLS connection + let (mut request_sender, connection) = hyper::client::conn::handshake(mpc_tls_connection.compat()) + .await + .unwrap(); + log!("!@# 8"); + + // Spawn the HTTP task to be run concurrently + // let connection_task = tokio::spawn(connection.without_shutdown()); + let (connection_sender, connection_receiver) = oneshot::channel(); + let connection_fut = connection.without_shutdown(); + let handled_connection_fut = async { + match connection_fut.await { + Ok(connection_result) => { + // Send the connection + let _ = connection_sender.send(connection_result); + }, + Err(err) => { + panic!("An error occurred in connection_task: {:?}", err); + } + } + }; + spawn_local(handled_connection_fut); + log!("!@# 9 - {} request to {}", options.method.as_str(), targetUrl); + + let mut req_with_header = Request::builder() + .uri(targetUrl) + .method(options.method.as_str()); + + for (key, value) in options.headers { + log!("adding header: {} - {}", key.as_str(), value.as_str()); + req_with_header = req_with_header.header(key.as_str(), value.as_str()); + } + + let req_with_body; + + if options.body.is_empty() { + log!("empty body"); + req_with_body = req_with_header.body(Body::empty()); + } else { + log!("added body - {}", options.body.as_str()); + req_with_body = req_with_header.body(Body::from(options.body)); + } + + let unwrapped_request = req_with_body.unwrap(); + + + log!("Starting an MPC TLS connection with the server"); + + // Send the request to the Server and get a response via the MPC TLS connection + let response = request_sender.send_request(unwrapped_request).await.unwrap(); + + log!("Got a response from the server"); + + assert!(response.status() == StatusCode::OK); + + log!("Request OK"); + + // Pretty printing :) + let payload = to_bytes(response.into_body()).await.unwrap().to_vec(); + let parsed = + serde_json::from_str::(&String::from_utf8_lossy(&payload)).unwrap(); + log!("!@# 10"); + log!("{}", serde_json::to_string_pretty(&parsed).unwrap()); + log!("!@# 11"); + + // Close the connection to the server + // let mut client_socket = connection_task.await.unwrap().unwrap().io.into_inner(); + let mut client_socket = connection_receiver.await.unwrap().io.into_inner(); + log!("!@# 12"); + client_socket.close().await.unwrap(); + log!("!@# 13"); + + // The Prover task should be done now, so we can grab it. + // let mut prover = prover_task.await.unwrap().unwrap(); + let mut prover = prover_receiver.await.unwrap(); + let mut prover = prover.start_notarize(); + log!("!@# 14"); + + let secret_headers_vecs = string_list_to_bytes_vec(&secret_headers); + let secret_headers_slices: Vec<&[u8]> = secret_headers_vecs.iter().map(|vec| vec.as_slice()).collect(); + + // Identify the ranges in the transcript that contain revealed_headers + let (sent_public_ranges, sent_private_ranges) = find_ranges( + prover.sent_transcript().data(), + secret_headers_slices.as_slice(), + ); + + let secret_body_vecs = string_list_to_bytes_vec(&secret_body); + let secret_body_slices: Vec<&[u8]> = secret_body_vecs.iter().map(|vec| vec.as_slice()).collect(); + + // Identify the ranges in the transcript that contain the only data we want to reveal later + let (recv_public_ranges, recv_private_ranges) = find_ranges( + prover.recv_transcript().data(), + secret_body_slices.as_slice(), + ); + log!("!@# 15"); + + let recv_len = prover.recv_transcript().data().len(); + + let builder = prover.commitment_builder(); + + // Commit to the outbound and inbound transcript, isolating the data that contain secrets + let sent_pub_commitment_ids = sent_public_ranges + .iter() + .map(|range| builder.commit_sent(range.clone()).unwrap()) + .collect::>(); + + sent_private_ranges.iter().for_each(|range| { + builder.commit_sent(range.clone()).unwrap(); + }); + + let recv_pub_commitment_ids = recv_public_ranges + .iter() + .map(|range| builder.commit_recv(range.clone()).unwrap()) + .collect::>(); + + recv_private_ranges.iter().for_each(|range| { + builder.commit_recv(range.clone()).unwrap(); + }); + + // Finalize, returning the notarized session + let notarized_session = prover.finalize().await.unwrap(); + + log!("Notarization complete!"); + + // Create a proof for all committed data in this session + let session_proof = notarized_session.session_proof(); + + let mut proof_builder = notarized_session.data().build_substrings_proof(); + + // Reveal everything except the redacted stuff (which for the response it's everything except the screen_name) + sent_pub_commitment_ids + .iter() + .chain(recv_pub_commitment_ids.iter()) + .for_each(|id| { + proof_builder.reveal(*id).unwrap(); + }); + + let substrings_proof = proof_builder.build().unwrap(); + + let proof = TlsProof { + session: session_proof, + substrings: substrings_proof, + }; + + let res = serde_json::to_string_pretty(&proof).unwrap(); + + let duration = start_time.elapsed(); + log!("!@# request takes: {} seconds", duration.as_secs()); + + Ok(res) + +} + +#[wasm_bindgen] +pub async fn verify( + proof: &str, + notary_pubkey_str: &str, +) -> Result { + log!("!@# proof {}", proof); + let proof: TlsProof = serde_json::from_str(proof).unwrap(); + + let TlsProof { + // The session proof establishes the identity of the server and the commitments + // to the TLS transcript. + session, + // The substrings proof proves select portions of the transcript, while redacting + // anything the Prover chose not to disclose. + substrings, + } = proof; + + + log!("!@# notary_pubkey {}, {}", notary_pubkey_str, notary_pubkey_str.len()); + session + .verify_with_default_cert_verifier(get_notary_pubkey(notary_pubkey_str)) + .unwrap(); + + + let SessionProof { + // The session header that was signed by the Notary is a succinct commitment to the TLS transcript. + header, + // This is the server name, checked against the certificate chain shared in the TLS handshake. + server_name, + .. + } = session; + + // The time at which the session was recorded + let time = chrono::DateTime::UNIX_EPOCH + Duration::from_secs(header.time()); + + // Verify the substrings proof against the session header. + // + // This returns the redacted transcripts + let (mut sent, mut recv) = substrings.verify(&header).unwrap(); + + // Replace the bytes which the Prover chose not to disclose with 'X' + sent.set_redacted(b'X'); + recv.set_redacted(b'X'); + + log!("-------------------------------------------------------------------"); + log!( + "Successfully verified that the bytes below came from a session with {:?} at {}.", + server_name, time + ); + log!("Note that the bytes which the Prover chose not to disclose are shown as X."); + log!("Bytes sent:"); + log!("{}", String::from_utf8(sent.data().to_vec()).unwrap()); + log!("Bytes received:"); + log!("{}", String::from_utf8(recv.data().to_vec()).unwrap()); + log!("-------------------------------------------------------------------"); + + let result = VerifyResult { + server_name: String::from(server_name.as_str()), + time: header.time(), + sent: String::from_utf8(sent.data().to_vec()).unwrap(), + recv: String::from_utf8(recv.data().to_vec()).unwrap(), + }; + let res = serde_json::to_string_pretty(&result).unwrap(); + + Ok(res) +} + +fn print_type_of(_: &T) { + log!("{}", std::any::type_name::()) +} + +/// Returns a Notary pubkey trusted by this Verifier +fn get_notary_pubkey(pubkey: &str) -> p256::PublicKey { + // from https://github.com/tlsnotary/notary-server/tree/main/src/fixture/notary/notary.key + // converted with `openssl ec -in notary.key -pubout -outform PEM` + p256::PublicKey::from_public_key_pem(pubkey).unwrap() +} + +/// Find the ranges of the public and private parts of a sequence. +/// +/// Returns a tuple of `(public, private)` ranges. +fn find_ranges(seq: &[u8], private_seq: &[&[u8]]) -> (Vec>, Vec>) { + let mut private_ranges = Vec::new(); + for s in private_seq { + for (idx, w) in seq.windows(s.len()).enumerate() { + if w == *s { + private_ranges.push(idx..(idx + w.len())); + } + } + } + + let mut sorted_ranges = private_ranges.clone(); + sorted_ranges.sort_by_key(|r| r.start); + + let mut public_ranges = Vec::new(); + let mut last_end = 0; + for r in sorted_ranges { + if r.start > last_end { + public_ranges.push(last_end..r.start); + } + last_end = r.end; + } + + if last_end < seq.len() { + public_ranges.push(last_end..seq.len()); + } + + (public_ranges, private_ranges) +} + +fn string_list_to_bytes_vec(secrets: &JsValue) -> Vec> { + let array: Array = Array::from(secrets); + let length = array.length(); + let mut byte_slices: Vec> = Vec::new(); + + for i in 0..length { + let secret_js: JsValue = array.get(i); + let secret_str: String = secret_js.as_string().unwrap(); + let secret_bytes = secret_str.into_bytes(); + byte_slices.push(secret_bytes); + } + byte_slices +} diff --git a/wasm/prover/src/requestOpt.rs b/wasm/prover/src/requestOpt.rs new file mode 100644 index 0000000..2114bf9 --- /dev/null +++ b/wasm/prover/src/requestOpt.rs @@ -0,0 +1,29 @@ +use std::{collections::HashMap}; +use serde::{Deserialize, Serialize}; + +/// Requestion Options of Fetch API +// https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RequestOptions { + pub method: String, // *GET, POST, PUT, DELETE, etc. + // pub mode: String, // no-cors, *cors, same-origin + // pub cache: String, // *default, no-cache, reload, force-cache, only-if-cached + // pub credentials: String, // include, *same-origin, omit + pub headers: HashMap, + // pub redirect: String, // manual, *follow, error + // pub referrer_policy: String, // no-referrer, *no-referrer-when-downgrade, origin, origin-when-cross-origin, same-origin, strict-origin, strict-origin-when-cross-origin, unsafe-url + pub body: String, // body data type must match "Content-Type" header + pub max_transcript_size: usize, + pub notary_url: String, + pub websocket_proxy_url: String, +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VerifyResult { + pub server_name: String, + pub time: u64, + pub sent: String, + pub recv: String, +} \ No newline at end of file diff --git a/wasm/prover/src/requests.rs b/wasm/prover/src/requests.rs new file mode 100644 index 0000000..75c66ab --- /dev/null +++ b/wasm/prover/src/requests.rs @@ -0,0 +1,29 @@ +use std::{collections::HashMap, sync::Arc}; + +use serde::{Deserialize, Serialize}; + +/// Response object of the /session API +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NotarizationSessionResponse { + /// Unique session id that is generated by notary and shared to prover + pub session_id: String, +} + +/// Request object of the /session API +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NotarizationSessionRequest { + pub client_type: ClientType, + /// Maximum transcript size in bytes + pub max_transcript_size: Option, +} + +/// Types of client that the prover is using +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub enum ClientType { + /// Client that has access to the transport layer + Tcp, + /// Client that cannot directly access transport layer, e.g. browser extension + Websocket, +} diff --git a/webpack.test.config.js b/webpack.test.config.js index bbac9bb..5b9f424 100644 --- a/webpack.test.config.js +++ b/webpack.test.config.js @@ -41,10 +41,10 @@ module.exports = [ target: 'web', mode: isProd ? 'production' : 'development', entry: { - app: path.join(__dirname, 'src', 'app.tsx'), + test: path.join(__dirname, 'utils', 'test.ts'), }, output: { - path: __dirname + '/build', + path: __dirname + '/test-build', publicPath: isProd ? '/' : 'http://localhost:8080/', filename: `[name].js`, }, @@ -91,8 +91,8 @@ module.exports = [ devServer: { historyApiFallback: true, proxy: { - '/rest': { - target: `http://127.0.0.1:8080`, + '/ns': { + target: `https://127.0.0.1:7074`, secure: true, }, },