Compare commits

...

20 Commits

Author SHA1 Message Date
tsukino
8dd5f8c9a2 add interactive verifier demo 2025-07-22 20:49:18 +08:00
q
75f5690f74 remove dead code 2025-07-15 08:40:10 +08:00
q
3b233c18c2 wip: interactive verifier demo 2025-07-15 08:35:38 +08:00
yuroitaki
ef6180c313 Change naming. 2025-06-20 10:51:26 +08:00
yuroitaki
1a80ef75f8 Merge branch 'dev' into poc/verifier-server 2025-06-20 10:36:32 +08:00
yuroitaki
ba0056c8db Revert to enable wasi. 2025-06-18 17:17:58 +08:00
yuroitaki
c790b2482a Merge in dev 2025-06-18 16:35:58 +08:00
yuroitaki
0db2e6b48f Change rust target to wasm32-unknown 2025-06-18 15:16:05 +08:00
yuroitaki
0c2cc6e466 Add function to load multiple plugins. 2025-06-17 16:55:06 +08:00
yuroitaki
2c048e92ed Fix typo. 2025-06-13 20:52:43 +08:00
yuroitaki
6c8cf8a182 Chmod rust prepare. 2025-06-13 20:48:41 +08:00
yuroitaki
59781d1293 Switch to rust plugin. 2025-06-13 20:43:46 +08:00
yuroitaki
43fd4d34b5 Add schema generator. 2025-06-13 20:28:03 +08:00
yuroitaki
4a604c98ce Add prover example for plugin, verification logic. 2025-06-10 19:45:31 +08:00
yuroitaki
126ba26648 Revert Cargo.lock 2025-06-10 10:56:55 +08:00
yuroitaki
2e571b0684 Merge 2025-06-09 20:34:54 +08:00
yuroitaki
4e0141f993 Lock change. 2025-06-09 20:33:56 +08:00
yuroitaki
c9ca87f4b4 Merge in dev 2025-06-09 20:18:14 +08:00
yuroitaki
7e631de84a Run dummy plugin as part of request. 2025-06-06 20:09:12 +08:00
yuroitaki
02cdbb8130 Setup extism. 2025-06-04 15:14:24 +08:00
44 changed files with 4287 additions and 266 deletions

10
.cargo/config.toml Normal file
View File

@@ -0,0 +1,10 @@
[target.wasm32-unknown-unknown]
rustflags = [
"-C",
"target-feature=+atomics,+bulk-memory,+mutable-globals",
"-A",
"unused_qualifications"
]
[unstable]
build-std = ["panic_abort", "std"]

1907
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +1,3 @@
// Ignore files from examples.
*.tlsn
*.tlsn
*.sh

View File

@@ -9,6 +9,7 @@ workspace = true
[dependencies]
notary-client = { workspace = true }
notary-common = { workspace = true }
tlsn-common = { workspace = true }
tlsn-core = { workspace = true }
tlsn-prover = { workspace = true }
@@ -20,6 +21,7 @@ tlsn-server-fixture = { workspace = true }
tlsn-server-fixture-certs = { workspace = true }
spansy = { workspace = true }
async-tungstenite = { workspace = true, features = ["tokio-runtime"] }
bincode = { workspace = true }
chrono = { workspace = true }
clap = { version = "4.5", features = ["derive"] }
@@ -30,6 +32,7 @@ hex = { workspace = true }
hyper = { workspace = true, features = ["client", "http1"] }
hyper-util = { workspace = true, features = ["full"] }
k256 = { workspace = true, features = ["ecdsa"] }
rangeset = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = [
"rt",
@@ -42,6 +45,8 @@ tokio = { workspace = true, features = [
tokio-util = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
uuid = { workspace = true, features = ["v4", "fast-rng"] }
ws_stream_tungstenite = { workspace = true, features = ["tokio_io"] }
[[example]]
name = "attestation_prove"
@@ -58,3 +63,7 @@ path = "attestation/verify.rs"
[[example]]
name = "interactive"
path = "interactive/interactive.rs"
[[example]]
name = "plugin"
path = "plugin/plugin.rs"

View File

@@ -0,0 +1,302 @@
use async_tungstenite::{tokio::connect_async_with_config, tungstenite::protocol::WebSocketConfig};
use clap::Parser;
use http_body_util::{BodyExt, Empty, Full};
use hyper::{body::Bytes, Request, StatusCode, Uri};
use hyper_util::{
client::legacy::connect::HttpConnector,
rt::{TokioExecutor, TokioIo},
};
use notary_client::{Accepted, NotarizationRequest, NotaryClient};
use notary_common::{ClientType, NotarizationSessionRequest, NotarizationSessionResponse};
use rangeset::RangeSet;
use spansy::{
http::parse_response,
json::{self},
Spanned,
};
use std::env;
use tlsn_common::config::ProtocolConfig;
use tlsn_core::{
hash::HashAlgId,
transcript::{TranscriptCommitConfig, TranscriptCommitmentKind},
ProveConfig,
};
use tlsn_prover::{Prover, ProverConfig};
use tokio::io::{AsyncRead, AsyncWrite};
use tokio_util::compat::{FuturesAsyncReadCompatExt, TokioAsyncReadCompatExt};
use tracing::{debug, info};
use ws_stream_tungstenite::WsStream;
const SECRET: &str = "TLSNotary's private key 🤡";
const SERVER_URL: &str = "https://raw.githubusercontent.com/tlsnotary/tlsn/refs/tags/v0.1.0-alpha.11/crates/server-fixture/server/src/data/1kb.json";
#[derive(clap::ValueEnum, Clone, Default, Debug)]
pub enum ProverType {
#[default]
Tcp,
Ws,
}
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
/// What data to notarize.
#[clap(default_value_t, value_enum)]
prover_type: ProverType,
/// Name of verifier plugin to use — can get available names from `/info` endpoint of verifier server.
#[clap(long, default_value = "plugin_rs")]
plugin: String,
}
#[tokio::main]
async fn main() {
tracing_subscriber::fmt::init();
let verifier_host: String = env::var("VERIFIER_HOST").unwrap_or("127.0.0.1".into());
let verifier_port: u16 = env::var("VERIFIER_PORT")
.map(|port| port.parse().expect("port should be valid integer"))
.unwrap_or(7047);
let args = Args::parse();
match args.prover_type {
ProverType::Tcp => run_tcp_prover(&verifier_host, verifier_port, &args.plugin).await,
ProverType::Ws => run_ws_prover(&verifier_host, verifier_port, &args.plugin).await,
}
}
async fn run_ws_prover(verifier_host: &str, verifier_port: u16, plugin_name: &str) {
info!("Running websocket prover...");
// Build the HTTP request to configure notarization
let payload = serde_json::to_string(&NotarizationSessionRequest {
client_type: ClientType::Websocket,
plugin: plugin_name.to_string(),
max_sent_data: Some(tlsn_examples::MAX_SENT_DATA),
max_recv_data: Some(tlsn_examples::MAX_RECV_DATA),
})
.unwrap();
let session_request = Request::builder()
.uri(format!("http://{verifier_host}:{verifier_port}/session"))
.method("POST")
.header("Host", verifier_host)
// Need to specify application/json for axum to parse it as json
.header("Content-Type", "application/json")
.body(Full::new(Bytes::from(payload)))
.unwrap();
let hyper_http_connector: HttpConnector = HttpConnector::new();
let http_client =
hyper_util::client::legacy::Builder::new(TokioExecutor::new()).build(hyper_http_connector);
let response = http_client.request(session_request).await.unwrap();
assert!(response.status() == StatusCode::OK);
let payload = response.into_body().collect().await.unwrap().to_bytes();
let notarization_response =
serde_json::from_str::<NotarizationSessionResponse>(&String::from_utf8_lossy(&payload))
.unwrap();
debug!("Session request response: {:?}", notarization_response,);
let verification_request = hyper::Request::builder()
.uri(format!(
"ws://{verifier_host}:{verifier_port}/notarize?sessionId={}",
notarization_response.session_id
))
.header("Host", verifier_host)
.header("Sec-WebSocket-Key", uuid::Uuid::new_v4().to_string())
.header("Sec-WebSocket-Version", "13")
.header("Connection", "Upgrade")
.header("Upgrade", "Websocket")
.body(())
.unwrap();
let (verifier_ws_stream, _) =
connect_async_with_config(verification_request, Some(WebSocketConfig::default()))
.await
.unwrap();
info!("Websocket connection established!");
let verifier_ws_socket = WsStream::new(verifier_ws_stream);
prover(verifier_ws_socket, SERVER_URL).await;
info!("Websocket proving is successful!");
}
async fn run_tcp_prover(verifier_host: &str, verifier_port: u16, plugin_name: &str) {
info!("Running tcp prover...");
// Build a tcp client to connect to the verifier server.
let verifier_client = NotaryClient::builder()
.host(verifier_host)
.port(verifier_port)
// WARNING: Always use TLS to connect to verifier server, except if verifier is running locally
// e.g. this example, hence `enable_tls` is set to False (else it always defaults to True).
.enable_tls(false)
.build()
.unwrap();
// Send requests for configuration and verification to the verifier server.
let verification_request = NotarizationRequest::builder()
// We must configure the amount of data we expect to exchange beforehand, which will
// be preprocessed prior to the connection. Reducing these limits will improve
// performance.
.max_sent_data(tlsn_examples::MAX_SENT_DATA)
.max_recv_data(tlsn_examples::MAX_RECV_DATA)
.plugin(plugin_name.to_string())
.build()
.unwrap();
let Accepted {
io: verifier_connection,
id: _session_id,
..
} = verifier_client
.request_notarization(verification_request)
.await
.expect("Could not connect to verifier. Make sure it is running.");
info!("Tcp connection established!");
prover(verifier_connection, SERVER_URL).await;
info!("Tcp proving is successful!");
}
async fn prover<T: AsyncWrite + AsyncRead + Send + Unpin + 'static>(verifier_socket: T, uri: &str) {
debug!("Starting proving...");
let uri = uri.parse::<Uri>().unwrap();
assert_eq!(uri.scheme().unwrap().as_str(), "https");
let server_domain = uri.authority().unwrap().host();
let server_port = uri.port_u16().unwrap_or(443);
// Create prover and connect to verifier.
//
// Perform the setup phase with the verifier.
let prover = Prover::new(
ProverConfig::builder()
.server_name(server_domain)
.protocol_config(
ProtocolConfig::builder()
.max_sent_data(tlsn_examples::MAX_SENT_DATA)
.max_recv_data(tlsn_examples::MAX_RECV_DATA)
.build()
.unwrap(),
)
.build()
.unwrap(),
)
.setup(verifier_socket.compat())
.await
.unwrap();
// Connect to TLS Server.
let tls_client_socket = tokio::net::TcpStream::connect((server_domain, server_port))
.await
.unwrap();
// Pass server connection into the prover.
let (mpc_tls_connection, prover_fut) =
prover.connect(tls_client_socket.compat()).await.unwrap();
// Wrap the connection in a TokioIo compatibility layer to use it with hyper.
let mpc_tls_connection = TokioIo::new(mpc_tls_connection.compat());
// Spawn the Prover to run in the background.
let prover_task = tokio::spawn(prover_fut);
// MPC-TLS Handshake.
let (mut request_sender, connection) =
hyper::client::conn::http1::handshake(mpc_tls_connection)
.await
.unwrap();
tokio::spawn(connection);
// MPC-TLS: Send Request and wait for Response.
info!("Send Request and wait for Response");
let request = Request::builder()
.uri(uri.clone())
.header("Host", server_domain)
.header("Connection", "close")
.header("Secret", SECRET)
.method("GET")
.body(Empty::<Bytes>::new())
.unwrap();
let response = request_sender.send_request(request).await.unwrap();
debug!("TLS response: {:?}", response);
assert!(response.status() == StatusCode::OK);
// Create proof for the Verifier.
let mut prover = prover_task.await.unwrap().unwrap();
let (sent_len, recv_len) = prover.transcript().len();
let mut builder = TranscriptCommitConfig::builder(prover.transcript());
builder.default_kind(TranscriptCommitmentKind::Hash {
alg: HashAlgId::SHA256,
});
builder.commit_sent(&(0..sent_len)).unwrap();
builder.commit_recv(&(0..recv_len)).unwrap();
let transcript_commit = builder.build().unwrap();
let mut builder: tlsn_core::ProveConfigBuilder<'_> = ProveConfig::builder(prover.transcript());
// Reveal the DNS name.
builder.server_identity();
let sent_rangeset = redact_and_reveal_sent_data(prover.transcript().sent());
let _ = builder.reveal_sent(&sent_rangeset);
let recv_rangeset = redact_and_reveal_received_data(prover.transcript().received());
let _ = builder.reveal_recv(&recv_rangeset);
builder.transcript_commit(transcript_commit);
let config = builder.build().unwrap();
prover.prove(&config).await.unwrap();
prover.close().await.unwrap();
}
/// Redacts and reveals received data to the verifier.
fn redact_and_reveal_received_data(recv_transcript: &[u8]) -> RangeSet<usize> {
// Get the some information from the received data.
let received_string = String::from_utf8(recv_transcript.to_vec()).unwrap();
debug!("Received data: {}", received_string);
let resp = parse_response(recv_transcript).unwrap();
let body = resp.body.unwrap();
let mut json = json::parse_slice(body.as_bytes()).unwrap();
json.offset(body.content.span().indices().min().unwrap());
let name = json.get("information.name").expect("name field not found");
let street = json
.get("information.address.street")
.expect("street field not found");
let name_start = name.span().indices().min().unwrap() - 9; // 9 is the length of "name: "
let name_end = name.span().indices().max().unwrap() + 1; // include `"`
let street_start = street.span().indices().min().unwrap() - 11; // 11 is the length of "street: "
let street_end = street.span().indices().max().unwrap() + 1; // include `"`
[name_start..name_end + 1, street_start..street_end + 1].into()
}
/// Redacts and reveals sent data to the verifier.
fn redact_and_reveal_sent_data(sent_transcript: &[u8]) -> RangeSet<usize> {
let sent_transcript_len = sent_transcript.len();
let sent_string: String = String::from_utf8(sent_transcript.to_vec()).unwrap();
let secret_start = sent_string.find(SECRET).unwrap();
debug!("Send data: {}", sent_string);
// Reveal everything except for the SECRET.
[
0..secret_start,
secret_start + SECRET.len()..sent_transcript_len,
]
.into()
}

View File

@@ -37,6 +37,8 @@ use crate::error::{ClientError, ErrorKind};
/// Parameters used to configure notarization.
#[derive(Debug, Clone, derive_builder::Builder)]
pub struct NotarizationRequest {
/// Name of verifier plugin that prover wants to interact with.
plugin: String,
/// Maximum number of bytes that can be sent.
max_sent_data: usize,
/// Maximum number of bytes that can be received.
@@ -274,6 +276,7 @@ impl NotaryClient {
let configuration_request_payload =
serde_json::to_string(&NotarizationSessionRequest {
client_type: ClientType::Tcp,
plugin: notarization_request.plugin,
max_sent_data: Some(notarization_request.max_sent_data),
max_recv_data: Some(notarization_request.max_recv_data),
})

View File

@@ -14,10 +14,12 @@ pub enum ClientType {
}
/// Request object of the /session API.
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NotarizationSessionRequest {
pub client_type: ClientType,
/// Name of verifier plugin that prover wants to interact with.
pub plugin: String,
/// Maximum data that can be sent by the prover.
pub max_sent_data: Option<usize>,
/// Maximum data that can be received by the prover.

1
crates/notary/plugin/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
wasm/

46
crates/notary/plugin/js/.gitignore vendored Normal file
View File

@@ -0,0 +1,46 @@
lib-cov
*.seed
*.log
*.csv
*.dat
*.out
*.pid
*.gz
*.swp
pids
logs
results
tmp
# Build
public/css/main.css
# Coverage reports
coverage
# API keys and secrets
.env
# Dependency directory
node_modules
bower_components
# Editors
.idea
*.iml
# OS metadata
.DS_Store
Thumbs.db
# Ignore built ts files
dist/**/*
# ignore yarn.lock
yarn.lock
# include relevant json files
!package.json
!package-lock.json
!tsconfig.json

View File

@@ -0,0 +1,4 @@
node_modules
dist
.git

View File

View File

@@ -0,0 +1,12 @@
const esbuild = require('esbuild');
esbuild
.build({
entryPoints: ['src/index.ts'],
outdir: 'dist',
bundle: true,
sourcemap: true,
minify: false, // might want to use true for production build
format: 'cjs', // needs to be CJS for now
target: ['es2020'], // don't go over es2020 because quickjs doesn't support it
})

550
crates/notary/plugin/js/package-lock.json generated Normal file
View File

@@ -0,0 +1,550 @@
{
"name": "plugin-js",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "plugin-js",
"version": "1.0.0",
"license": "UNLICENSED",
"devDependencies": {
"@extism/js-pdk": "^1.0.1",
"esbuild": "^0.25.0",
"prettier": "^3.3.2",
"prettier-plugin-organize-imports": "^4.1.0",
"typescript": "^5.3.2"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz",
"integrity": "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.5.tgz",
"integrity": "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz",
"integrity": "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.5.tgz",
"integrity": "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz",
"integrity": "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz",
"integrity": "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz",
"integrity": "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz",
"integrity": "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz",
"integrity": "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz",
"integrity": "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz",
"integrity": "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz",
"integrity": "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz",
"integrity": "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==",
"cpu": [
"mips64el"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz",
"integrity": "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz",
"integrity": "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz",
"integrity": "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz",
"integrity": "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz",
"integrity": "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz",
"integrity": "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz",
"integrity": "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz",
"integrity": "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz",
"integrity": "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz",
"integrity": "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz",
"integrity": "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz",
"integrity": "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@extism/js-pdk": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@extism/js-pdk/-/js-pdk-1.1.1.tgz",
"integrity": "sha512-VZLn/dX0ttA1uKk2PZeR/FL3N+nA1S5Vc7E5gdjkR60LuUIwCZT9cYON245V4HowHlBA7YOegh0TLjkx+wNbrA==",
"dev": true,
"license": "BSD-Clause-3",
"dependencies": {
"urlpattern-polyfill": "^8.0.2"
}
},
"node_modules/esbuild": {
"version": "0.25.5",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.5.tgz",
"integrity": "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.25.5",
"@esbuild/android-arm": "0.25.5",
"@esbuild/android-arm64": "0.25.5",
"@esbuild/android-x64": "0.25.5",
"@esbuild/darwin-arm64": "0.25.5",
"@esbuild/darwin-x64": "0.25.5",
"@esbuild/freebsd-arm64": "0.25.5",
"@esbuild/freebsd-x64": "0.25.5",
"@esbuild/linux-arm": "0.25.5",
"@esbuild/linux-arm64": "0.25.5",
"@esbuild/linux-ia32": "0.25.5",
"@esbuild/linux-loong64": "0.25.5",
"@esbuild/linux-mips64el": "0.25.5",
"@esbuild/linux-ppc64": "0.25.5",
"@esbuild/linux-riscv64": "0.25.5",
"@esbuild/linux-s390x": "0.25.5",
"@esbuild/linux-x64": "0.25.5",
"@esbuild/netbsd-arm64": "0.25.5",
"@esbuild/netbsd-x64": "0.25.5",
"@esbuild/openbsd-arm64": "0.25.5",
"@esbuild/openbsd-x64": "0.25.5",
"@esbuild/sunos-x64": "0.25.5",
"@esbuild/win32-arm64": "0.25.5",
"@esbuild/win32-ia32": "0.25.5",
"@esbuild/win32-x64": "0.25.5"
}
},
"node_modules/prettier": {
"version": "3.5.3",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
"dev": true,
"license": "MIT",
"bin": {
"prettier": "bin/prettier.cjs"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/prettier/prettier?sponsor=1"
}
},
"node_modules/prettier-plugin-organize-imports": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-4.1.0.tgz",
"integrity": "sha512-5aWRdCgv645xaa58X8lOxzZoiHAldAPChljr/MT0crXVOWTZ+Svl4hIWlz+niYSlO6ikE5UXkN1JrRvIP2ut0A==",
"dev": true,
"license": "MIT",
"peerDependencies": {
"prettier": ">=2.0",
"typescript": ">=2.9",
"vue-tsc": "^2.1.0"
},
"peerDependenciesMeta": {
"vue-tsc": {
"optional": true
}
}
},
"node_modules/typescript": {
"version": "5.8.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz",
"integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/urlpattern-polyfill": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-8.0.2.tgz",
"integrity": "sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ==",
"dev": true,
"license": "MIT"
}
}
}

View File

@@ -0,0 +1,21 @@
{
"name": "plugin-js",
"version": "1.0.0",
"description": "",
"main": "src/index.ts",
"scripts": {
"build": "npx tsc --noEmit && node esbuild.js && extism-js dist/index.js -i src/index.d.ts -o ../wasm/plugin-js.wasm",
"format": "npx prettier --write \"src/**/*.{ts,tsx}\" --plugin=prettier-plugin-organize-imports"
},
"keywords": [],
"author": "",
"private": true,
"license": "UNLICENSED",
"devDependencies": {
"@extism/js-pdk": "^1.0.1",
"esbuild": "^0.25.0",
"typescript": "^5.3.2",
"prettier": "^3.3.2",
"prettier-plugin-organize-imports": "^4.1.0"
}
}

View File

@@ -0,0 +1,87 @@
#!/bin/bash
# Function to check if a command exists
command_exists () {
command -v "$1" >/dev/null 2>&1
}
missing_deps=0
# Check for Node.js
if ! (command_exists node || command_exists nodejs); then
missing_deps=1
echo "❌ Node.js is not installed."
echo ""
echo "To install Node.js, visit the official download page:"
echo "👉 https://nodejs.org/en/download/"
echo ""
echo "Or install it using a package manager:"
echo ""
echo "🔹 macOS (Homebrew):"
echo " brew install node"
echo ""
echo "🔹 Ubuntu/Debian:"
echo " curl -fsSL https://deb.nodesource.com/setup_current.x | sudo -E bash -"
echo " sudo apt-get install -y nodejs"
echo ""
echo "🔹 CentOS/RHEL:"
echo " curl -fsSL https://rpm.nodesource.com/setup_current.x | sudo bash -"
echo " sudo yum install -y nodejs"
echo ""
echo "🔹 Arch Linux:"
echo " sudo pacman -S nodejs"
echo ""
fi
# Check for npm
if ! command_exists npm; then
missing_deps=1
echo "❌ npm is not installed."
echo ""
echo "npm typically comes with Node.js. Please install Node.js to get npm."
echo ""
echo "Alternatively, install npm using a package manager:"
echo ""
echo "🔹 macOS (Homebrew):"
echo " brew install npm"
echo ""
echo "🔹 Ubuntu/Debian:"
echo " sudo apt-get install npm"
echo ""
echo "🔹 CentOS/RHEL:"
echo " sudo yum install npm"
echo ""
echo "🔹 Arch Linux:"
echo " sudo pacman -S npm"
echo ""
fi
# Exit with a bad exit code if any dependencies are missing
if [ "$missing_deps" -ne 0 ]; then
echo "Install the missing dependencies and ensure they are on your path. Then run this command again."
# TODO: remove sleep when cli bug is fixed
sleep 2
exit 1
fi
# Check for extism-js
if ! command_exists extism-js; then
echo "❌ extism-js is not installed."
echo ""
echo "extism-js is needed to compile the plug-in. You can find the instructions to install it here: https://github.com/extism/js-pdk"
echo ""
echo "Alternatively, you can use an install script."
echo ""
echo "🔹 Mac / Linux:"
echo "curl -L https://raw.githubusercontent.com/extism/js-pdk/main/install.sh | bash"
echo ""
echo "🔹 Windows:"
echo "powershell Invoke-WebRequest -Uri https://raw.githubusercontent.com/extism/js-pdk/main/install-windows.ps1 -OutFile install-windows.ps1"
echo "powershell -executionpolicy bypass -File .\install-windows.ps1"
echo ""
# TODO: remove sleep when cli bug is fixed
sleep 2
exit 1
fi
npm install

4
crates/notary/plugin/js/src/index.d.ts vendored Executable file
View File

@@ -0,0 +1,4 @@
declare module "main" {
export function config(): I32;
export function verify(): I32;
}

View File

@@ -0,0 +1,24 @@
import * as main from "./main";
import { PluginOutput, PluginVerifierConfig, VerifierOutput } from "./pdk";
export function config(): number {
const output = main.configImpl();
console.log(`configImpl untyped output: ${JSON.stringify(output)}`);
const untypedOutput = PluginVerifierConfig.toJson(output);
Host.outputString(JSON.stringify(untypedOutput));
return 0;
}
export function verify(): number {
const untypedInput = JSON.parse(Host.inputString());
const input = VerifierOutput.fromJson(untypedInput);
const output = main.verifyImpl(input);
console.log(`verifyImpl untyped output: ${JSON.stringify(output)}`);
Host.outputString(JSON.stringify(output));
return 0;
}

View File

@@ -0,0 +1,83 @@
import { PluginOutput, PluginVerifierConfig, VerifierOutput } from "./pdk";
const SERVER_DOMAIN = "api.x.com";
/**
* Returns the verifier configuration.
* The configuration is used to initialize the verifier in the host.
*
* @returns {PluginVerifierConfig}
*/
export function configImpl(): PluginVerifierConfig {
console.log("Composing verifier configuration...");
return new PluginVerifierConfig();
}
/**
* Verifies the output from the TLS verifier.
* This function is called after the MPC-TLS verification is complete
* and allows the plugin to perform custom verification logic.
*
* @param {VerifierOutput} input -
* @returns {PluginOutput}
*/
export function verifyImpl(input: VerifierOutput): PluginOutput {
console.log("Starting verification...");
const { serverName, transcript, transcriptCommitments } = input;
console.log(
"Transcript commitments:",
JSON.stringify(transcriptCommitments, null, 2),
);
if (!transcript) {
throw new Error("prover should have revealed transcript data");
}
if (!serverName) {
throw new Error("prover should have revealed server name");
}
// Check sent data: check host.
console.log("Starting sent data verification...");
const sent = new Uint8Array(transcript.sent);
const sentData = new TextDecoder().decode(sent);
if (!sentData.includes(SERVER_DOMAIN)) {
throw new Error(`Verification failed: Expected host ${SERVER_DOMAIN}`);
}
// Check received data: check json and version number.
console.log("Starting received data verification...");
const received = new Uint8Array(transcript.received);
const response = new TextDecoder().decode(received);
if (!response.includes("screen_name")) {
throw new Error("Verification failed: missing data in received data");
}
// Check Session info: server name.
if (serverName !== SERVER_DOMAIN) {
throw new Error("Verification failed: server name mismatches");
}
const match = response.match(/"screen_name":"([^"]+)"/);
const screenName = match ? match[1] : "";
const sentString = bytesToRedactedString(sent);
const receivedString = bytesToRedactedString(received);
console.log(`Successfully verified ${SERVER_DOMAIN}`);
console.log(`Verified sent data:\n${sentString}`);
console.log(`Verified received data:\n${receivedString}`);
console.log(`Verified screen name: ${screenName}`);
return PluginOutput.fromJson({ screenName: screenName });
}
/**
* Render redacted bytes as `🙈`.
*/
function bytesToRedactedString(bytes: Uint8Array): string {
return new TextDecoder().decode(bytes).replace(/\0/g, "🙈");
}

View File

@@ -0,0 +1,394 @@
function isNull(v: any): boolean {
return v === undefined || v === null;
}
function cast(caster: (v: any) => any, v: any): any {
if (isNull(v)) return v;
return caster(v);
}
function castArray(caster: (v: any) => any) {
return (v?: Array<any>) => {
if (isNull(v)) return v;
caster = cast.bind(null, caster); // bind to null-preserving logic in `cast`
return v!.map(caster);
};
}
function castMap(caster: (v: any) => any) {
return (v?: any) => {
if (isNull(v)) return v;
caster = cast.bind(null, caster); // bind to null-preserving logic in `cast`
const newMap: any = {};
for (const k in v) {
newMap[k] = caster(v![k]);
}
return newMap;
};
}
function dateToJson(v?: Date): string | undefined | null {
if (v === undefined || v === null) return v;
return v.toISOString();
}
function dateFromJson(v?: string): Date | undefined | null {
if (v === undefined || v === null) return v;
return new Date(v);
}
function bufferToJson(v?: ArrayBuffer): string | undefined | null {
if (v === undefined || v === null) return v;
return Host.arrayBufferToBase64(v);
}
function bufferFromJson(v?: string): ArrayBuffer | undefined | null {
if (v === undefined || v === null) return v;
return Host.base64ToArrayBuffer(v);
}
/**
* Direction of the plaintext
*/
export enum Direction {
Sent = "Sent",
Received = "Received",
}
/**
* Secret used to generate the encodings
*/
export class EncoderSecret {
/**
* Delta used to generate the encodings
*/
// @ts-expect-error TS2564
delta: Array<number>;
/**
* Seed used to generate the encodings
*/
// @ts-expect-error TS2564
seed: Array<number>;
static fromJson(obj: any): EncoderSecret {
return {
...obj,
};
}
static toJson(obj: EncoderSecret): any {
return {
...obj,
};
}
}
export class PluginOutput {
// @ts-expect-error TS2564
screenName: string;
static fromJson(obj: any): PluginOutput {
console.log(`PluginOutput fromJson: ${JSON.stringify(obj)}`);
return {
...obj,
};
}
static toJson(obj: PluginOutput): any {
return {
...obj,
};
}
}
/**
* Commitment to the encoding of the transcript data
*/
export class EncodingCommitment {
/**
* Merkle root of the encoding commitments
*/
// @ts-expect-error TS2564
root: TypedHash;
/**
* Secret used to generate the encodings
*/
// @ts-expect-error TS2564
secret: EncoderSecret;
static fromJson(obj: any): EncodingCommitment {
return {
...obj,
root: cast(TypedHash.fromJson, obj.root),
secret: cast(EncoderSecret.fromJson, obj.secret),
};
}
static toJson(obj: EncodingCommitment): any {
return {
...obj,
root: cast(TypedHash.toJson, obj.root),
secret: cast(EncoderSecret.toJson, obj.secret),
};
}
}
/**
* A partial transcript containing authenticated application data
*/
export class PartialTranscript {
/**
* Data received by the prover from the server (byte array)
*/
// @ts-expect-error TS2564
received: Array<number>;
/**
* Index ranges of authenticated received data
*/
// @ts-expect-error TS2564
recvAuthedIdx: Array<Range>;
/**
* Data sent from the prover to the server (byte array)
*/
// @ts-expect-error TS2564
sent: Array<number>;
/**
* Index ranges of authenticated sent data
*/
// @ts-expect-error TS2564
sentAuthedIdx: Array<Range>;
static fromJson(obj: any): PartialTranscript {
return {
...obj,
recvAuthedIdx: cast(castArray(Range.fromJson), obj.recvAuthedIdx),
sentAuthedIdx: cast(castArray(Range.fromJson), obj.sentAuthedIdx),
};
}
static toJson(obj: PartialTranscript): any {
return {
...obj,
recvAuthedIdx: cast(castArray(Range.toJson), obj.recvAuthedIdx),
sentAuthedIdx: cast(castArray(Range.toJson), obj.sentAuthedIdx),
};
}
}
/**
* Hash of plaintext in the transcript
*/
export class PlaintextHash {
/**
* Direction of the plaintext
*/
// @ts-expect-error TS2564
direction: Direction;
/**
* The hash of the data
*/
// @ts-expect-error TS2564
hash: TypedHash;
/**
* Indexes of the plaintext in the transcript
*/
// @ts-expect-error TS2564
idx: Array<Range>;
static fromJson(obj: any): PlaintextHash {
return {
...obj,
hash: cast(TypedHash.fromJson, obj.hash),
idx: cast(castArray(Range.fromJson), obj.idx),
};
}
static toJson(obj: PlaintextHash): any {
return {
...obj,
hash: cast(TypedHash.toJson, obj.hash),
idx: cast(castArray(Range.toJson), obj.idx),
};
}
}
/**
* The verifier configuration.
*/
export class PluginVerifierConfig {
/**
* Maximum data that can be received by the prover in bytes
*/
maxRecvData?: number | null;
/**
* Maximum number of application data records that can be received online
*/
maxRecvRecordsOnline?: number | null;
/**
* Maximum data that can be sent by the prover in bytes
*/
maxSentData?: number | null;
/**
* Maximum number of application data records that can be sent
*/
maxSentRecords?: number | null;
static fromJson(obj: any): PluginVerifierConfig {
return {
...obj,
};
}
static toJson(obj: PluginVerifierConfig): any {
return {
...obj,
};
}
}
/**
* A range with start (inclusive) and end (exclusive) positions
*/
export class Range {
/**
* End position (exclusive)
*/
// @ts-expect-error TS2564
end: number;
/**
* Start position (inclusive)
*/
// @ts-expect-error TS2564
start: number;
static fromJson(obj: any): Range {
return {
...obj,
};
}
static toJson(obj: Range): any {
return {
...obj,
};
}
}
/**
* Cryptographic commitment to transcript data
*/
export class TranscriptCommitment {
/**
* Commitment to the encoding of the transcript data
*/
encodingCommitment?: EncodingCommitment;
/**
* Hash of plaintext in the transcript
*/
plaintextHash?: PlaintextHash;
static fromJson(obj: any): TranscriptCommitment {
return {
...obj,
encodingCommitment: cast(
EncodingCommitment.fromJson,
obj.encodingCommitment,
),
plaintextHash: cast(PlaintextHash.fromJson, obj.plaintextHash),
};
}
static toJson(obj: TranscriptCommitment): any {
return {
...obj,
encodingCommitment: cast(
EncodingCommitment.toJson,
obj.encodingCommitment,
),
plaintextHash: cast(PlaintextHash.toJson, obj.plaintextHash),
};
}
}
/**
* Typed hash with algorithm ID and value
*/
export class TypedHash {
/**
* The algorithm ID of the hash
*/
// @ts-expect-error TS2564
alg: number;
/**
* The hash value in bytes
*/
// @ts-expect-error TS2564
value: Array<number>;
static fromJson(obj: any): TypedHash {
return {
...obj,
};
}
static toJson(obj: TypedHash): any {
return {
...obj,
};
}
}
/**
* Output from the MPC-TLS verification containing verified session data
*/
export class VerifierOutput {
/**
* The server's DNS name if revealed by the prover
*/
serverName?: string | null;
/**
* The partial transcript containing authenticated application data
*/
transcript?: PartialTranscript;
/**
* Cryptographic commitments to transcript data
*/
// @ts-expect-error TS2564
transcriptCommitments: Array<TranscriptCommitment>;
static fromJson(obj: any): VerifierOutput {
return {
...obj,
transcript: cast(PartialTranscript.fromJson, obj.transcript),
transcriptCommitments: cast(
castArray(TranscriptCommitment.fromJson),
obj.transcriptCommitments,
),
};
}
static toJson(obj: VerifierOutput): any {
return {
...obj,
transcript: cast(PartialTranscript.toJson, obj.transcript),
transcriptCommitments: cast(
castArray(TranscriptCommitment.toJson),
obj.transcriptCommitments,
),
};
}
}

View File

@@ -0,0 +1,25 @@
{
"compilerOptions": {
"target": "es2020", // Specify ECMAScript target version
"module": "commonjs", // Specify module code generation
"lib": [
"es2020"
], // Specify a list of library files to be included in the compilation
"types": [
"@extism/js-pdk",
"./src/index.d.ts"
], // Specify a list of type definition files to be included in the compilation
"strict": true, // Enable all strict type-checking options
"esModuleInterop": true, // Enables compatibility with Babel-style module imports
"skipLibCheck": true, // Skip type checking of declaration files
"allowJs": true, // Allow JavaScript files to be compiled
"noEmit": true, // Do not emit outputs (no .js or .d.ts files)
"resolveJsonModule": true // Automatically resolve .json imports
},
"include": [
"src/**/*.ts" // Include all TypeScript files in src directory
],
"exclude": [
"node_modules" // Exclude the node_modules directory
]
}

View File

@@ -0,0 +1,17 @@
app_id = ""
# This is where 'xtp plugin push' expects to find the wasm file after the build script has run.
bin = "../wasm/plugin-js.wasm"
extension_point_id = ""
name = "plugin-js"
[scripts]
# xtp plugin build runs this script to generate the wasm file
build = "mkdir -p ../wasm && bash prepare.sh && npm run build"
# xtp plugin init runs this script to format the plugin code
format = "npm run format"
# xtp plugin init runs this script before running the format script
prepare = "bash prepare.sh"

View File

@@ -0,0 +1,2 @@
[build]
target = "wasm32-unknown-unknown"

21
crates/notary/plugin/rust/.gitignore vendored Normal file
View File

@@ -0,0 +1,21 @@
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
# RustRover
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

View File

@@ -0,0 +1,19 @@
[package]
name = "plugin_rs"
version = "0.1.0"
edition = "2021"
[lib]
name = "plugin_rs"
crate-type = ["cdylib"]
[dependencies]
anyhow = { version = "1.0" }
extism-pdk = "1.1.0"
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
base64-serde = "0.7"
base64 = "0.21"
[workspace]

View File

@@ -0,0 +1,58 @@
#!/bin/bash
# Function to check if a command exists
command_exists () {
command -v "$1" >/dev/null 2>&1
}
missing_deps=0
# Check for Cargo
if ! (command_exists cargo); then
missing_deps=1
echo "❌ Cargo/rust is not installed."
echo ""
echo "To install Rust, visit the official download page:"
echo "👉 https://www.rust-lang.org/tools/install"
echo ""
echo "Or install it using a package manager:"
echo ""
echo "🔹 macOS (Homebrew):"
echo " brew install cargo"
echo ""
echo "🔹 Ubuntu/Debian:"
echo " sudo apt-get install -y cargo"
echo ""
echo "🔹 Arch Linux:"
echo " sudo pacman -S rust"
echo ""
fi
if ! (command_exists rustup); then
missing_deps=1
echo "❌ rustup is missing. Check your rust installation."
echo ""
fi
# Exit with a bad exit code if any dependencies are missing
if [ "$missing_deps" -ne 0 ]; then
echo "Install the missing dependencies and ensure they are on your path. Then run this command again."
# TODO: remove sleep when cli bug is fixed
sleep 2
exit 1
fi
if ! (rustup target list --installed | grep -q '^wasm32-wasip1$'); then
if ! (rustup target add wasm32-wasip1); then
echo "❌ error encountered while adding target \"wasm32-wasip1\""
echo ""
echo "Update rustup with:"
echo "👉 rustup update"
echo ""
exit 1
fi
fi
if ! (rustup target list --installed | grep -q '^wasm32-unknown-unknown$'); then
rustup target add wasm32-unknown-unknown
fi

View File

@@ -0,0 +1,80 @@
mod pdk;
use anyhow::anyhow;
use extism_pdk::*;
use pdk::*;
const SERVER_DOMAIN: &str = "api.x.com";
// Returns the verifier configuration.
// The configuration is used to initialize the verifier in the host.
pub(crate) fn config() -> Result<types::PluginVerifierConfig, Error> {
debug!("Composing verifier configuration...");
Ok(types::PluginVerifierConfig::default())
}
// Verifies the output from the TLS verifier.
// This function is called after the MPC-TLS verification is complete
// and allows the plugin to perform custom verification logic.
pub(crate) fn verify(input: types::VerifierOutput) -> Result<(), Error> {
debug!("Starting verification...");
let types::VerifierOutput {
server_name,
transcript,
transcript_commitments,
} = input;
debug!("Transcript commitments: {:?}", transcript_commitments);
let transcript = transcript.ok_or(anyhow!("prover should have revealed transcript data"))?;
let server_name = server_name.ok_or(anyhow!("prover should have revealed server name"))?;
// Check sent data: check host.
debug!("Starting sent data verification...");
let sent: Vec<u8> = transcript
.sent
.into_iter()
.map(|x| x.try_into().unwrap())
.collect();
let sent_data = String::from_utf8(sent.clone())
.map_err(|err| anyhow!("Verifier expected sent data: {err}"))?;
sent_data.find(SERVER_DOMAIN).ok_or(anyhow!(
"Verification failed: Expected host {}",
SERVER_DOMAIN
))?;
// Check received data: check json and version number.
debug!("Starting received data verification...");
let received: Vec<u8> = transcript
.received
.into_iter()
.map(|x| x.try_into().unwrap())
.collect();
let response = String::from_utf8(received.clone())
.map_err(|err| anyhow!("Verifier expected received data: {err}"))?;
response.find("123 Elm Street").ok_or(anyhow!(
"Verification failed: missing data in received data"
))?;
// Check Session info: server name.
if server_name.as_str() != SERVER_DOMAIN {
return Err(anyhow!("Verification failed: server name mismatches").into());
}
let sent_string = bytes_to_redacted_string(&sent)?;
let received_string = bytes_to_redacted_string(&received)?;
info!("Successfully verified {}", SERVER_DOMAIN);
info!("Verified sent data:\n{}", sent_string);
info!("Verified received data:\n{received_string}",);
Ok(())
}
/// Render redacted bytes as `🙈`.
fn bytes_to_redacted_string(bytes: &[u8]) -> Result<String, Error> {
Ok(String::from_utf8(bytes.to_vec())
.map_err(|err| anyhow!("Failed to parse bytes to redacted string: {err}"))?
.replace('\0', "🙈"))
}

View File

@@ -0,0 +1,320 @@
// THIS FILE WAS GENERATED BY `xtp-rust-bindgen`. DO NOT EDIT.
#![allow(non_snake_case)]
#![allow(unused_macros)]
use extism_pdk::*;
#[allow(unused)]
fn panic_if_key_missing() -> ! {
panic!("missing key");
}
pub(crate) mod internal {
pub(crate) fn return_error(e: extism_pdk::Error) -> i32 {
let err = format!("{:?}", e);
let mem = extism_pdk::Memory::from_bytes(&err).unwrap();
unsafe {
extism_pdk::extism::error_set(mem.offset());
}
-1
}
}
#[allow(unused)]
macro_rules! try_input {
() => {{
let x = extism_pdk::input();
match x {
Ok(x) => x,
Err(e) => return internal::return_error(e),
}
}};
}
#[allow(unused)]
macro_rules! try_input_json {
() => {{
let x = extism_pdk::input();
match x {
Ok(extism_pdk::Json(x)) => x,
Err(e) => return internal::return_error(e),
}
}};
}
use base64_serde::base64_serde_type;
base64_serde_type!(Base64Standard, base64::engine::general_purpose::STANDARD);
mod exports {
use super::*;
#[no_mangle]
pub extern "C" fn config() -> i32 {
let ret = crate::config().and_then(|x| extism_pdk::output(extism_pdk::Json(x)));
match ret {
Ok(()) => 0,
Err(e) => internal::return_error(e),
}
}
#[no_mangle]
pub extern "C" fn verify() -> i32 {
let ret = crate::verify(try_input_json!()).and_then(extism_pdk::output);
match ret {
Ok(()) => 0,
Err(e) => internal::return_error(e),
}
}
}
pub mod types {
use super::*;
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub enum Direction {
#[default]
#[serde(rename = "Sent")]
Sent,
#[serde(rename = "Received")]
Received,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct EncoderSecret {
/// Delta used to generate the encodings
#[serde(rename = "delta")]
pub delta: Vec<i64>,
/// Seed used to generate the encodings
#[serde(rename = "seed")]
pub seed: Vec<i64>,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct EncodingCommitment {
/// Merkle root of the encoding commitments
#[serde(rename = "root")]
pub root: types::TypedHash,
/// Secret used to generate the encodings
#[serde(rename = "secret")]
pub secret: types::EncoderSecret,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct PartialTranscript {
/// Data received by the prover from the server (byte array)
#[serde(rename = "received")]
pub received: Vec<i64>,
/// Index ranges of authenticated received data
#[serde(rename = "recvAuthedIdx")]
pub recv_authed_idx: Vec<types::Range>,
/// Data sent from the prover to the server (byte array)
#[serde(rename = "sent")]
pub sent: Vec<i64>,
/// Index ranges of authenticated sent data
#[serde(rename = "sentAuthedIdx")]
pub sent_authed_idx: Vec<types::Range>,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct PlaintextHash {
/// Direction of the plaintext
#[serde(rename = "direction")]
pub direction: types::Direction,
/// The hash of the data
#[serde(rename = "hash")]
pub hash: types::TypedHash,
/// Indexes of the plaintext in the transcript
#[serde(rename = "idx")]
pub idx: Vec<types::Range>,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct PluginVerifierConfig {
/// Maximum data that can be received by the prover in bytes
#[serde(rename = "maxRecvData")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub max_recv_data: Option<i64>,
/// Maximum number of application data records that can be received online
#[serde(rename = "maxRecvRecordsOnline")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub max_recv_records_online: Option<i64>,
/// Maximum data that can be sent by the prover in bytes
#[serde(rename = "maxSentData")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub max_sent_data: Option<i64>,
/// Maximum number of application data records that can be sent
#[serde(rename = "maxSentRecords")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub max_sent_records: Option<i64>,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct Range {
/// End position (exclusive)
#[serde(rename = "end")]
pub end: i64,
/// Start position (inclusive)
#[serde(rename = "start")]
pub start: i64,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct TranscriptCommitment {
/// Commitment to the encoding of the transcript data
#[serde(rename = "encodingCommitment")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub encoding_commitment: Option<types::EncodingCommitment>,
/// Hash of plaintext in the transcript
#[serde(rename = "plaintextHash")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub plaintext_hash: Option<types::PlaintextHash>,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct TypedHash {
/// The algorithm ID of the hash
#[serde(rename = "alg")]
pub alg: i64,
/// The hash value in bytes
#[serde(rename = "value")]
pub value: Vec<i64>,
}
#[derive(
Default,
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
extism_pdk::FromBytes,
extism_pdk::ToBytes,
)]
#[encoding(Json)]
pub struct VerifierOutput {
/// The server's DNS name if revealed by the prover
#[serde(rename = "serverName")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub server_name: Option<String>,
/// The partial transcript containing authenticated application data
#[serde(rename = "transcript")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub transcript: Option<types::PartialTranscript>,
/// Cryptographic commitments to transcript data
#[serde(rename = "transcriptCommitments")]
pub transcript_commitments: Vec<types::TranscriptCommitment>,
}
}
mod raw_imports {
use super::*;
#[host_fn]
extern "ExtismHost" {}
}

View File

@@ -0,0 +1,17 @@
app_id = ""
# This is where 'xtp plugin push' expects to find the wasm file after the build script has run.
bin = "../wasm/plugin_rs.wasm"
extension_point_id = ""
name = "plugin_rs"
[scripts]
# xtp plugin build runs this script to generate the wasm file
build = "cargo build --release --target wasm32-unknown-unknown --target-dir ../wasm/ && cp ../wasm/wasm32-unknown-unknown/release/plugin_rs.wasm ../wasm/"
# xtp plugin init runs this script to format the plugin code
format = "cargo fmt"
# xtp plugin init runs this script before running the format script
prepare = "bash prepare.sh"

View File

@@ -0,0 +1,174 @@
version: v1-draft
exports:
config:
description: |
Returns the verifier configuration.
The configuration is used to initialize the verifier in the host.
output:
$ref: "#/components/schemas/PluginVerifierConfig"
contentType: application/json
verify:
description: |
Verifies the output from the MPC-TLS verifier.
This function is called after the MPC-TLS verification is complete
and allows the plugin to perform custom verification logic.
input:
$ref: "#/components/schemas/VerifierOutput"
contentType: application/json
components:
schemas:
PluginVerifierConfig:
description: The verifier configuration.
properties:
maxSentData:
type: integer
nullable: true
description: Maximum data that can be sent by the prover in bytes
maxRecvData:
type: integer
nullable: true
description: Maximum data that can be received by the prover in bytes
maxSentRecords:
type: integer
nullable: true
description: Maximum number of application data records that can be sent
maxRecvRecordsOnline:
type: integer
nullable: true
description: Maximum number of application data records that can be received online
VerifierOutput:
description: Output from the MPC-TLS verification containing verified session data
properties:
serverName:
type: string
nullable: true
description: The server's DNS name if revealed by the prover
transcript:
$ref: "#/components/schemas/PartialTranscript"
nullable: true
description: The partial transcript containing authenticated application data
transcriptCommitments:
type: array
items:
$ref: "#/components/schemas/TranscriptCommitment"
description: Cryptographic commitments to transcript data
required:
- transcriptCommitments
PartialTranscript:
description: A partial transcript containing authenticated application data
properties:
sent:
type: array
items:
type: integer
description: Data sent from the prover to the server (byte array)
received:
type: array
items:
type: integer
description: Data received by the prover from the server (byte array)
sentAuthedIdx:
type: array
items:
$ref: "#/components/schemas/Range"
description: Index ranges of authenticated sent data
recvAuthedIdx:
type: array
items:
$ref: "#/components/schemas/Range"
description: Index ranges of authenticated received data
required:
- sent
- received
- sentAuthedIdx
- recvAuthedIdx
Range:
description: A range with start (inclusive) and end (exclusive) positions
properties:
start:
type: integer
description: Start position (inclusive)
end:
type: integer
description: End position (exclusive)
required:
- start
- end
TranscriptCommitment:
description: Cryptographic commitment to transcript data
properties:
encodingCommitment:
$ref: "#/components/schemas/EncodingCommitment"
description: Commitment to the encoding of the transcript data
nullable: true
plaintextHash:
$ref: "#/components/schemas/PlaintextHash"
description: Hash of plaintext in the transcript
nullable: true
EncodingCommitment:
description: Commitment to the encoding of the transcript data
properties:
root:
$ref: "#/components/schemas/TypedHash"
description: Merkle root of the encoding commitments
secret:
$ref: "#/components/schemas/EncoderSecret"
description: Secret used to generate the encodings
required:
- root
- secret
PlaintextHash:
description: Hash of plaintext in the transcript
properties:
direction:
$ref: "#/components/schemas/Direction"
description: Direction of the plaintext
idx:
type: array
items:
$ref: "#/components/schemas/Range"
description: Indexes of the plaintext in the transcript
hash:
$ref: "#/components/schemas/TypedHash"
description: The hash of the data
required:
- direction
- idx
- hash
Direction:
description: Direction of the plaintext
type: string
enum:
- Sent
- Received
EncoderSecret:
description: Secret used to generate the encodings
properties:
seed:
type: array
items:
type: integer
description: Seed used to generate the encodings
delta:
type: array
items:
type: integer
description: Delta used to generate the encodings
required:
- seed
- delta
TypedHash:
description: Typed hash with algorithm ID and value
properties:
alg:
type: integer
description: The algorithm ID of the hash
value:
type: array
items:
type: integer
description: The hash value in bytes
required:
- alg
- value

View File

@@ -23,10 +23,13 @@ base64 = { version = "0.21" }
config = { version = "0.14", features = ["yaml"] }
const-oid = { version = "0.9.6", features = ["db"] }
csv = { version = "1.3" }
extism = { version = "1.11.1" }
eyre = { version = "0.6" }
futures-util = { workspace = true }
glob = { version = "0.3.2" }
http = { workspace = true }
http-body-util = { workspace = true }
reqwest = "0.12"
hyper = { workspace = true, features = ["client", "http1", "server"] }
hyper-util = { workspace = true, features = ["full"] }
jsonwebtoken = { version = "9.3.1", features = ["use_pem"] }

View File

@@ -23,6 +23,8 @@ pub struct NotaryServerProperties {
pub log: LogProperties,
/// Setting for authorization
pub auth: AuthorizationProperties,
/// Plugin properties
pub plugin: PluginProperties,
}
impl NotaryServerProperties {
@@ -52,6 +54,9 @@ impl NotaryServerProperties {
if let Some(path) = config.tls.certificate_path {
config.tls.certificate_path = Some(prepend_file_path(&path, &parent_dir)?);
}
// Prepend plugin path.
config.plugin.folder = prepend_file_path(&config.plugin.folder, &parent_dir)?;
// Prepend auth file path.
if let Some(mode) = config.auth.mode {
config.auth.mode = Some(match mode {
@@ -95,6 +100,19 @@ impl NotaryServerProperties {
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PluginProperties {
/// Path to the plugin directory
pub folder: String,
/// Global limit for maximum number of bytes that can be sent
pub max_sent_data: usize,
/// Global limit for maximum number of bytes that can be received
pub max_recv_data: usize,
/// Number of seconds before verification timeouts to prevent unreleased
/// memory
pub timeout: u64,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NotarizationProperties {
/// Global limit for maximum number of bytes that can be sent
@@ -217,6 +235,7 @@ impl Default for NotaryServerProperties {
tls: Default::default(),
log: Default::default(),
auth: Default::default(),
plugin: Default::default(),
}
}
}
@@ -234,6 +253,18 @@ impl Default for NotarizationProperties {
}
}
impl Default for PluginProperties {
fn default() -> Self {
Self {
// Todo: Remove default path
folder: "../plugin/wasm".to_string(),
max_sent_data: 4096,
max_recv_data: 16384,
timeout: 1800,
}
}
}
impl Default for LogProperties {
fn default() -> Self {
Self {

View File

@@ -3,6 +3,7 @@ mod cli;
mod config;
mod error;
mod middleware;
mod plugin;
mod server;
mod server_tracing;
mod service;

View File

@@ -16,8 +16,6 @@ async fn main() -> Result<(), NotaryServerError> {
// Set up tracing for logging
init_tracing(&config).map_err(|err| eyre!("Failed to set up tracing: {err}"))?;
// debug!("Server config loaded: \n{}", config);
debug!(
"Server config loaded: \n{}",
serde_yaml::to_string(&config).map_err(|err| eyre!("Failed to print config: {err}"))?

View File

@@ -0,0 +1,229 @@
use std::{path::Path, time::Duration};
use extism::{convert::Json, *};
use eyre::{eyre, Result};
use glob::glob;
use serde::{Deserialize, Serialize};
use tlsn_common::config::ProtocolConfigValidator;
use tlsn_core::{
connection::ServerName,
transcript::{
encoding::EncodingCommitment, hash::PlaintextHash, Idx,
PartialTranscript as CorePartialTranscript,
TranscriptCommitment as CoreTranscriptCommitment,
},
VerifierOutput as CoreVerifierOutput, VerifyConfig,
};
use tlsn_verifier::{Verifier, VerifierConfig};
use tokio::{
io::{AsyncRead, AsyncWrite},
time::timeout,
};
use tokio_util::compat::TokioAsyncReadCompatExt;
use tracing::debug;
use crate::{types::NotaryGlobals, NotaryServerError};
use reqwest;
use serde_json::json;
use std::env;
#[derive(Deserialize, FromBytes, Serialize, ToBytes, Debug)]
#[encoding(Json)]
#[serde(rename_all = "camelCase")]
struct PluginVerifierConfig {
/// Maximum number of bytes that can be sent.
max_sent_data: Option<usize>,
/// Maximum number of application data records that can be sent.
max_sent_records: Option<usize>,
/// Maximum number of bytes that can be received.
max_recv_data: Option<usize>,
/// Maximum number of application data records that can be received.
max_recv_records_online: Option<usize>,
}
#[derive(Deserialize, FromBytes, Serialize, ToBytes, Debug)]
#[encoding(Json)]
#[serde(rename_all = "camelCase")]
struct PluginOutput {
screen_name: String,
}
#[derive(Deserialize, FromBytes, Serialize, ToBytes)]
#[encoding(Json)]
#[serde(rename_all = "camelCase")]
struct VerifierOutput {
/// Server identity.
pub server_name: Option<ServerName>,
/// Transcript data.
pub transcript: Option<PartialTranscript>,
/// Transcript commitments.
pub transcript_commitments: Vec<TranscriptCommitment>,
}
impl From<CoreVerifierOutput> for VerifierOutput {
fn from(output: CoreVerifierOutput) -> Self {
Self {
server_name: output.server_name,
transcript: output.transcript.map(PartialTranscript::from),
transcript_commitments: output
.transcript_commitments
.into_iter()
.map(TranscriptCommitment::from)
.collect(),
}
}
}
#[derive(Deserialize, FromBytes, Serialize, ToBytes)]
#[encoding(Json)]
#[serde(rename_all = "camelCase")]
struct PartialTranscript {
/// Data sent from the Prover to the Server.
sent: Vec<u8>,
/// Data received by the Prover from the Server.
received: Vec<u8>,
/// Index of `sent` which have been authenticated.
sent_authed_idx: Idx,
/// Index of `received` which have been authenticated.
recv_authed_idx: Idx,
}
impl From<CorePartialTranscript> for PartialTranscript {
fn from(transcript: CorePartialTranscript) -> Self {
Self {
sent: transcript.sent_unsafe().to_vec(),
received: transcript.received_unsafe().to_vec(),
sent_authed_idx: transcript.sent_authed().clone(),
recv_authed_idx: transcript.received_authed().clone(),
}
}
}
#[derive(Deserialize, FromBytes, Serialize, ToBytes)]
#[encoding(Json)]
#[non_exhaustive]
enum TranscriptCommitment {
/// Encoding commitment.
#[serde(rename = "encodingCommitment")]
Encoding(EncodingCommitment),
/// Plaintext hash commitment.
#[serde(rename = "plaintextHash")]
Hash(PlaintextHash),
}
impl From<CoreTranscriptCommitment> for TranscriptCommitment {
fn from(commitment: tlsn_core::transcript::TranscriptCommitment) -> Self {
match commitment {
tlsn_core::transcript::TranscriptCommitment::Encoding(encoding) => {
TranscriptCommitment::Encoding(encoding)
}
tlsn_core::transcript::TranscriptCommitment::Hash(hash) => {
TranscriptCommitment::Hash(hash)
}
_ => panic!("Unsupported transcript commitment type in plugin output"),
}
}
}
pub fn get_plugin_names(dir: &str) -> Result<Vec<String>, NotaryServerError> {
let names: Vec<String> = glob(&format!("{}/*.wasm", dir))
.map_err(|e| eyre!("Failed to find wasm files in plugin directory: {}", e))?
.filter_map(|path| {
path.ok()?.file_stem()?.to_str().map(String::from)
})
.collect();
if names.is_empty() {
return Err(eyre!("No readable plugin files found in directory: {}", dir).into());
}
Ok(names)
}
pub async fn verifier_service<T: AsyncWrite + AsyncRead + Send + Unpin + 'static>(
socket: T,
notary_globals: NotaryGlobals,
session_id: &str,
plugin_name: &str,
) -> Result<(), NotaryServerError> {
debug!(?session_id, "Starting verification...");
let path = Wasm::file(Path::new(&notary_globals.plugin_config.folder).join(format!("{}.wasm", plugin_name)));
let manifest = Manifest::new([path]);
let mut plugin = PluginBuilder::new(manifest)
// needed this for JS plugins — https://github.com/extism/js-pdk?tab=readme-ov-file#exports
// but the plugins can't access filesystem or network calls without whitelisting folder or host
.with_wasi(true)
.build()
.map_err(|e| eyre!("Failed to build plugin: {}", e))?;
debug!("Plugin built successfully");
let plugin_config = plugin
.call::<(), PluginVerifierConfig>("config", ())
.map_err(|e| eyre!("Failed to get plugin config: {}", e))?;
debug!("Plugin configuration: {:?}", plugin_config);
let max_sent_data = plugin_config
.max_sent_data
.unwrap_or(notary_globals.notarization_config.max_sent_data);
let max_recv_data = plugin_config
.max_recv_data
.unwrap_or(notary_globals.notarization_config.max_recv_data);
let mut validator_builder = ProtocolConfigValidator::builder();
validator_builder
.max_sent_data(max_sent_data)
.max_recv_data(max_recv_data);
if let Some(max_sent_records) = plugin_config.max_sent_records {
validator_builder.max_sent_records(max_sent_records);
}
if let Some(max_recv_records_online) = plugin_config.max_recv_records_online {
validator_builder.max_recv_records_online(max_recv_records_online);
}
let validator = validator_builder.build()?;
let config = VerifierConfig::builder()
.protocol_config_validator(validator)
.crypto_provider(notary_globals.crypto_provider.clone())
.build()?;
let output = timeout(
Duration::from_secs(notary_globals.notarization_config.timeout),
Verifier::new(config).verify(socket.compat(), &VerifyConfig::default()),
)
.await
.map_err(|_| eyre!("Timeout reached before verification completes"))??;
let result = plugin
.call::<VerifierOutput, PluginOutput>("verify", output.into())
.map_err(|e| eyre!("Failed to verify on plugin: {}", e))?;
debug!("Plugin verification result: {:?}", result);
plugin
.reset()
.map_err(|e| eyre!("Failed to reset plugin memory: {}", e))?;
let client = reqwest::Client::new();
let url = "http://localhost:3030/update-session";
let body = json!({"screen_name": result.screen_name, "session_id": session_id});
let response = client.post(url)
.header("Content-Type", "application/json")
.header("X-VERIFIER-SECRET-KEY", env::var("VERIFIER_SECRET_KEY").unwrap())
.body(body.to_string())
.send()
.await
.map_err(|e| eyre!("Failed to send request: {}", e))?;
let status = response.status();
let body_text = response.text().await.map_err(|e| eyre!("Failed to get response body: {}", e))?;
println!("Status: {}", status);
println!("Response body: {}", body_text);
Ok(())
}

View File

@@ -32,6 +32,7 @@ use crate::{
config::{NotarizationProperties, NotaryServerProperties},
error::NotaryServerError,
middleware::AuthorizationMiddleware,
plugin::get_plugin_names,
service::{initialize, upgrade_protocol},
signing::AttestationKey,
types::{InfoResponse, NotaryGlobals},
@@ -46,6 +47,10 @@ use tokio::sync::Semaphore;
/// both TCP and WebSocket clients
#[tracing::instrument(skip(config))]
pub async fn run_server(config: &NotaryServerProperties) -> Result<(), NotaryServerError> {
// Get plugin names
let plugin_names = get_plugin_names(&config.plugin.folder)?;
debug!("Available plugins: {:?}", plugin_names);
let attestation_key = get_attestation_key(&config.notarization).await?;
let verifying_key_pem = attestation_key
.verifying_key_pem()
@@ -115,6 +120,8 @@ pub async fn run_server(config: &NotaryServerProperties) -> Result<(), NotarySer
let notary_globals = NotaryGlobals::new(
Arc::new(crypto_provider),
config.notarization.clone(),
config.plugin.clone(),
Arc::new(plugin_names.clone()),
authorization_mode,
Arc::new(Semaphore::new(config.concurrency)),
);
@@ -150,6 +157,7 @@ pub async fn run_server(config: &NotaryServerProperties) -> Result<(), NotarySer
version,
public_key: verifying_key_pem,
git_commit_hash,
plugins: plugin_names,
#[cfg(feature = "tee_quote")]
quote: quote(verifying_key_bytes).await,
}),

View File

@@ -27,7 +27,7 @@ pub fn init_tracing(config: &NotaryServerProperties) -> Result<()> {
// Use the default filter when only verbosity level is provided
None => {
let level = Level::from_str(&config.log.level)?;
format!("notary_server={level},tlsn_verifier={level},mpc_tls={level}")
format!("notary_server={level},tlsn_verifier={level},mpc_tls={level},extism::pdk={level}")
}
};
let filter_layer = EnvFilter::builder().parse(directives)?;

View File

@@ -92,13 +92,7 @@ pub async fn upgrade_protocol(
let session_id = params.session_id;
// Check if session_id exists in the store, this also removes session_id from
// the store as each session_id can only be used once
if notary_globals
.store
.lock()
.unwrap()
.remove(&session_id)
.is_none()
{
let Some(plugin_name) = notary_globals.store.lock().unwrap().remove(&session_id) else {
let err_msg = format!("Session id {} does not exist", session_id);
error!(err_msg);
return NotaryServerError::BadProverRequest(err_msg).into_response();
@@ -107,11 +101,11 @@ pub async fn upgrade_protocol(
// the client, meanwhile initiating the websocket or tcp connection
match protocol_upgrade {
ProtocolUpgrade::Ws(ws) => ws.on_upgrade(move |socket| async move {
websocket_notarize(socket, notary_globals, session_id).await;
websocket_notarize(socket, notary_globals, session_id, plugin_name).await;
drop(permit);
}),
ProtocolUpgrade::Tcp(tcp) => tcp.on_upgrade(move |stream| async move {
tcp_notarize(stream, notary_globals, session_id).await;
tcp_notarize(stream, notary_globals, session_id, plugin_name).await;
drop(permit);
}),
}
@@ -138,6 +132,19 @@ pub async fn initialize(
}
};
let plugin_name = payload.plugin.clone();
if !notary_globals.plugin_names.contains(&plugin_name) {
error!(
"Plugin {} is not supported by the notary server",
payload.plugin
);
return NotaryServerError::BadProverRequest(
"Plugin is not supported by the notary server".to_string(),
)
.into_response();
}
// Ensure that the max_sent_data, max_recv_data submitted is not larger than the
// global max limits configured in notary server
if payload.max_sent_data.is_some() || payload.max_recv_data.is_some() {
@@ -176,7 +183,7 @@ pub async fn initialize(
.store
.lock()
.unwrap()
.insert(prover_session_id.clone(), ());
.insert(prover_session_id.clone(), plugin_name);
trace!("Latest store state: {:?}", notary_globals.store);

View File

@@ -10,7 +10,7 @@ use std::future::Future;
use tokio::time::Instant;
use tracing::{debug, error, info};
use crate::{service::notary_service, types::NotaryGlobals, NotaryServerError};
use crate::{plugin::verifier_service, service::notary_service, types::NotaryGlobals, NotaryServerError};
/// Custom extractor used to extract underlying TCP connection for TCP client —
/// using the same upgrade primitives used by the WebSocket implementation where
@@ -84,10 +84,11 @@ pub async fn tcp_notarize(
stream: TokioIo<Upgraded>,
notary_globals: NotaryGlobals,
session_id: String,
plugin_name: String,
) {
let start = Instant::now();
debug!(?session_id, "Upgraded to tcp connection");
match notary_service(stream, notary_globals, &session_id).await {
match verifier_service(stream, notary_globals, &session_id, &plugin_name).await {
Ok(_) => {
info!(
?session_id,

View File

@@ -3,8 +3,7 @@ use tracing::{debug, error, info};
use ws_stream_tungstenite::WsStream;
use crate::{
service::{axum_websocket::WebSocket, notary_service},
types::NotaryGlobals,
plugin::verifier_service, service::{axum_websocket::WebSocket, notary_service}, types::NotaryGlobals
};
/// Perform notarization using the established websocket connection
@@ -12,13 +11,14 @@ pub async fn websocket_notarize(
socket: WebSocket,
notary_globals: NotaryGlobals,
session_id: String,
plugin_name: String,
) {
let start = Instant::now();
debug!(?session_id, "Upgraded to websocket connection");
// Wrap the websocket in WsStream so that we have AsyncRead and AsyncWrite
// implemented
let stream = WsStream::new(socket.into_inner());
match notary_service(stream, notary_globals, &session_id).await {
match verifier_service(stream, notary_globals, &session_id, &plugin_name).await {
Ok(_) => {
info!(
?session_id,

View File

@@ -8,7 +8,10 @@ use tokio::sync::Semaphore;
#[cfg(feature = "tee_quote")]
use crate::tee::Quote;
use crate::{auth::AuthorizationMode, config::NotarizationProperties};
use crate::{
auth::AuthorizationMode,
config::{NotarizationProperties, PluginProperties},
};
/// Response object of the /info API
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -20,6 +23,8 @@ pub struct InfoResponse {
pub public_key: String,
/// Current git commit hash of notary-server
pub git_commit_hash: String,
/// List of plugins that are loaded
pub plugins: Vec<String>,
/// Hardware attestation
#[cfg(feature = "tee_quote")]
pub quote: Quote,
@@ -38,8 +43,10 @@ pub struct NotarizationRequestQuery {
pub struct NotaryGlobals {
pub crypto_provider: Arc<CryptoProvider>,
pub notarization_config: NotarizationProperties,
/// A temporary storage to store session_id
pub store: Arc<Mutex<HashMap<String, ()>>>,
pub plugin_config: PluginProperties,
pub plugin_names: Arc<Vec<String>>,
/// A temporary storage to store session_id and name of plugin requested
pub store: Arc<Mutex<HashMap<String, String>>>,
/// Selected authorization mode if any
pub authorization_mode: Option<AuthorizationMode>,
/// A semaphore to acquire a permit for notarization
@@ -50,12 +57,16 @@ impl NotaryGlobals {
pub fn new(
crypto_provider: Arc<CryptoProvider>,
notarization_config: NotarizationProperties,
plugin_config: PluginProperties,
plugin_names: Arc<Vec<String>>,
authorization_mode: Option<AuthorizationMode>,
semaphore: Arc<Semaphore>,
) -> Self {
Self {
crypto_provider,
notarization_config,
plugin_config,
plugin_names,
store: Default::default(),
authorization_mode,
semaphore,

View File

@@ -44,3 +44,9 @@ log:
auth:
enabled: false
whitelist: "../auth/whitelist.csv"
plugin:
folder: "../../../plugin/wasm"
max_sent_data: 4096
max_recv_data: 16384
timeout: 1800

View File

@@ -37,6 +37,7 @@ use notary_server::{
const MAX_SENT_DATA: usize = 1 << 13;
const MAX_RECV_DATA: usize = 1 << 13;
const PLUGIN_NAME: &str = "plugin_rs";
const NOTARY_HOST: &str = "127.0.0.1";
const NOTARY_DNS: &str = "tlsnotaryserver.io";
@@ -162,6 +163,7 @@ async fn accepted_client(client: NotaryClient) -> Result<Accepted, ClientError>
let notarization_request = NotarizationRequest::builder()
.max_sent_data(MAX_SENT_DATA)
.max_recv_data(MAX_RECV_DATA)
.plugin(PLUGIN_NAME.to_string())
.build()
.unwrap();
@@ -348,6 +350,7 @@ async fn test_websocket_prover() {
// Build the HTTP request to configure notarization
let payload = serde_json::to_string(&NotarizationSessionRequest {
client_type: ClientType::Websocket,
plugin: PLUGIN_NAME.to_string(),
max_sent_data: Some(MAX_SENT_DATA),
max_recv_data: Some(MAX_RECV_DATA),
})

View File

@@ -14,7 +14,7 @@ use tower_http::trace::TraceLayer;
use futures::{channel::oneshot, AsyncRead, AsyncWrite};
use futures_rustls::{
pki_types::{CertificateDer, PrivateKeyDer},
rustls::{server::WebPkiClientVerifier, RootCertStore, ServerConfig},
rustls::{crypto::aws_lc_rs::default_provider, server::WebPkiClientVerifier, RootCertStore, ServerConfig},
TlsAcceptor,
};
use hyper::{
@@ -55,6 +55,10 @@ fn app(state: AppState) -> Router {
pub async fn bind<T: AsyncRead + AsyncWrite + Send + Unpin + 'static>(
socket: T,
) -> anyhow::Result<()> {
// Need to do this as notary server's dependency (ureq used by extism) uses ring
// as rustls crypto provider.
let _ = default_provider().install_default();
let key = PrivateKeyDer::Pkcs8(SERVER_KEY_DER.into());
let cert = CertificateDer::from(SERVER_CERT_DER);

View File

@@ -8,7 +8,7 @@ use bytes::Bytes;
use futures::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use futures_rustls::{
pki_types::{CertificateDer, PrivateKeyDer},
rustls::ServerConfig,
rustls::{crypto::aws_lc_rs::default_provider, ServerConfig},
TlsAcceptor,
};
use http_body_util::{combinators::BoxBody, BodyExt, Empty, Full};
@@ -44,6 +44,10 @@ pub static CLOSE_DELAY: u64 = 1000;
pub async fn bind_test_server_hyper<T: AsyncRead + AsyncWrite + Send + Unpin + 'static>(
socket: T,
) -> Result<(), hyper::Error> {
// Need to do this as notary server's dependency (ureq used by extism) uses ring
// as rustls crypto provider.
let _ = default_provider().install_default();
let key = PrivateKeyDer::Pkcs8(SERVER_KEY_DER.into());
let cert = CertificateDer::from(SERVER_CERT_DER);
@@ -74,6 +78,10 @@ pub async fn bind_test_server<
>(
socket: Compat<T>,
) {
// Need to do this as notary server's dependency (ureq used by extism) uses ring
// as rustls crypto provider.
let _ = default_provider().install_default();
let key = PrivateKeyDer::Pkcs8(SERVER_KEY_DER.into());
let cert = CertificateDer::from(SERVER_CERT_DER);