mirror of
https://github.com/tlsnotary/PageSigner.git
synced 2026-01-08 06:14:02 -05:00
- use Salsa20 instead of tweetnacl-js's secretbox for a 30% garbling speedup in the browser.
- implement garbled row reduction GRR3 for 25% bandwidth saving - implement KOS15 OT extension - linting
This commit is contained in:
@@ -4,12 +4,11 @@
|
||||
<body>
|
||||
<script src="core/third-party/simple-js-ec-math.js"></script>
|
||||
<script src="core/third-party/sodium.js"></script> <!--dont put this any lower in the list or youll get require() not defined -->
|
||||
<script src="core/third-party/nacl-fast.js"></script>
|
||||
<script src="core/third-party/fastsha256.js"></script>
|
||||
<script src="core/third-party/cbor.js"></script>
|
||||
<script src="core/third-party/cose.js"></script>
|
||||
<script src="core/twopc/circuits/casmbundle.js"></script>
|
||||
<script type="module" src="core/Main.js"></script>
|
||||
<!-- <script type="module" src="core/test.js"></script> -->
|
||||
<script type="module" src="core/internal.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,12 +1,14 @@
|
||||
/* global chrome, CASM*/
|
||||
|
||||
import {wait} from './utils.js';
|
||||
|
||||
// class FakeFS imitates node.js's fs.readFileSync() by
|
||||
// reading the files in advance and outputting their content when readFileSync() is called
|
||||
class FakeFS{
|
||||
constructor(){
|
||||
this.fileList = {}; // {fileName: <text string>}
|
||||
this.fileList = {}; // {fileName: <text string>}
|
||||
}
|
||||
|
||||
|
||||
// on init we read all .casm and .txt files in core/twopc/circuits
|
||||
async init(){
|
||||
const that = this;
|
||||
@@ -40,10 +42,10 @@ class FakeFS{
|
||||
|
||||
// FirstTimeSetup.start() is invoked once on first install. It assembles the circuits,
|
||||
// serializes them into a compact binary format and stores them in the browser cache.
|
||||
// All future invocations of Pagesigner use these serialized cached circuits.
|
||||
// All future invocations of Pagesigner use these serialized cached circuits.
|
||||
export class FirstTimeSetup{
|
||||
async start(pm){
|
||||
const url = chrome.extension.getURL('core/twopc/webWorkers/serializeCircuits.js')
|
||||
const url = chrome.extension.getURL('core/twopc/webWorkers/serializeCircuits.js');
|
||||
const worker = new Worker(url);
|
||||
console.log('Parsing circuits. This is done only once on first launch and will take ~30 secs');
|
||||
console.time('time_to_parse');
|
||||
@@ -67,7 +69,7 @@ export class FirstTimeSetup{
|
||||
});
|
||||
obj[i] = newobj;
|
||||
if (pm) pm.update('first_time', {'current': n, 'total': 6});
|
||||
await wait(100); // make sure update reaches popup
|
||||
await wait(100); // make sure update reaches popup
|
||||
}
|
||||
console.timeEnd('time_to_parse');
|
||||
return obj;
|
||||
|
||||
212
core/Main.js
212
core/Main.js
@@ -1,10 +1,13 @@
|
||||
/* eslint-disable no-import-assign */
|
||||
/* eslint-disable no-case-declarations */
|
||||
import {parse_certs, verifyChain, getCommonName, getAltNames} from './verifychain.js';
|
||||
import {ba2str, b64decode, concatTA, int2ba, sha256, b64encode, verifySig, assert,
|
||||
ba2int, xor, eq, wait, AESECBencrypt, wildcardTest, pubkeyPEM2raw, import_resource} from './utils.js';
|
||||
import {getPref, getSessionBlob, getSession, getAllSessions, saveNewSession, init_db,
|
||||
addNewPreference, setPref, renameSession, deleteSession} from './indexeddb.js';
|
||||
/* global chrome, browser */
|
||||
|
||||
import {parse_certs, verifyChain, getCommonName, getAltNames} from
|
||||
'./verifychain.js';
|
||||
import {ba2str, b64decode, concatTA, int2ba, sha256, b64encode, verifySig,
|
||||
assert, ba2int, xor, eq, wait, AESECBencrypt, wildcardTest, pubkeyPEM2raw,
|
||||
import_resource} from './utils.js';
|
||||
import {getPref, getSessionBlob, getSession, getAllSessions, saveNewSession,
|
||||
init_db, addNewPreference, setPref, renameSession, deleteSession}
|
||||
from './indexeddb.js';
|
||||
import {globals} from './globals.js';
|
||||
import {Socket} from './Socket.js';
|
||||
import {TLS, getExpandedKeys, decrypt_tls_responseV6} from './TLS.js';
|
||||
@@ -18,15 +21,15 @@ export class Main{
|
||||
this.messageListener;
|
||||
this.notarization_in_progress = false;
|
||||
this.isFirstTimeSetupNeeded = false;
|
||||
this.waiting_for_click = false;
|
||||
this.waiting_for_click = false;
|
||||
// popupError will be set to non-null when there is some error message that must be shown
|
||||
// via the popup
|
||||
this.popupError = null;
|
||||
// tabid set to 0 is a sign that this is the main window when querying with .getViews()
|
||||
this.tabid = 0;
|
||||
this.tabid = 0;
|
||||
// pendingAction is Firefox only: the action which must be taken as soon as the user allows
|
||||
// access to the website (either notarize or notarizeAfter)
|
||||
this.pendingAction = null;
|
||||
this.pendingAction = null;
|
||||
// trustedOracle is an object {'IP':<IP address>, 'pubkeyPEM':<pubkey in PEM format>}
|
||||
// describing the oracle server which was verified and can be used for notarization.
|
||||
this.trustedOracle = null;
|
||||
@@ -44,13 +47,13 @@ export class Main{
|
||||
this.trustedOracleReady = false;
|
||||
}
|
||||
|
||||
async main() {
|
||||
// perform browser-specific init first
|
||||
async main() {
|
||||
// perform browser-specific init first
|
||||
if (this.is_edge || this.is_firefox || this.is_opera){
|
||||
globals.usePythonBackend = true;
|
||||
}
|
||||
if (this.is_firefox){
|
||||
// Firefox asks user for permission to access the current website.
|
||||
// Firefox asks user for permission to access the current website.
|
||||
// Listen when the permission was given and run the pending action.
|
||||
// This way user doesnt have to click notarize->allow->notarize
|
||||
const listener = function(permissions){
|
||||
@@ -67,7 +70,7 @@ export class Main{
|
||||
};
|
||||
browser.permissions.onAdded.addListener(listener);
|
||||
}
|
||||
|
||||
|
||||
// browser-agnostic init
|
||||
const that = this;
|
||||
this.messageListener = chrome.runtime.onMessage.addListener(function(data) {
|
||||
@@ -145,6 +148,7 @@ export class Main{
|
||||
mode: 'no-cors'
|
||||
});
|
||||
const out = await Promise.race([fProm, wait(5000)])
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
.catch(err => {
|
||||
// fetch got 404; do nothing, just prevent exception from propagating
|
||||
});
|
||||
@@ -156,7 +160,7 @@ export class Main{
|
||||
}
|
||||
|
||||
// tryBackupNotary tries to use a backup notary. It checks that the backup notary
|
||||
// is not the same as failedNotaryIP
|
||||
// is not the same as failedNotaryIP
|
||||
async tryBackupNotary(failedNotaryIP){
|
||||
const resp = await fetch(globals.backupUrl);
|
||||
const backupIP = await resp.text();
|
||||
@@ -181,7 +185,7 @@ export class Main{
|
||||
this.trustedOracleReady = true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
openChromeExtensions(){
|
||||
chrome.tabs.query({url: 'chrome://extensions/*'},
|
||||
function(tabs) {
|
||||
@@ -192,9 +196,9 @@ export class Main{
|
||||
chrome.tabs.update(tabs[0].id, {active: true});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Pagesigner's popup has been clicked
|
||||
|
||||
|
||||
// Pagesigner's popup has been clicked
|
||||
async popupProcess(){
|
||||
if (this.notarization_in_progress) {
|
||||
chrome.runtime.sendMessage({
|
||||
@@ -242,13 +246,14 @@ export class Main{
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// checkIfTabOpened checks if a "tab" containing window[property] has signalled that it has
|
||||
// been loaded and its message listeners are ready
|
||||
// openTabs is an optional array of tab ids to skip when checking because they were already
|
||||
// opened BEFORE we initiated the opening of "tab"
|
||||
// we abort if tab doesn't open after 5 secs.
|
||||
checkIfTabOpened(tab, property, openTabs){
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
openTabs = openTabs || [];
|
||||
let isTimeoutTriggered = false;
|
||||
setTimeout(function(){
|
||||
@@ -260,7 +265,7 @@ export class Main{
|
||||
function tryAgain(){
|
||||
console.log('checking if '+property+' is ready...');
|
||||
const views = chrome.extension.getViews();
|
||||
// sometimes the View for the newly opened tab may not yet be available
|
||||
// sometimes the View for the newly opened tab may not yet be available
|
||||
// so we must wait a little longer
|
||||
for (const win of views){
|
||||
if (win[property] == undefined) continue;
|
||||
@@ -283,7 +288,7 @@ export class Main{
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
openFileChooser(){
|
||||
const myTabs = [];
|
||||
const views = chrome.extension.getViews();
|
||||
@@ -300,19 +305,19 @@ export class Main{
|
||||
// create a separate filechooser.html because as soon as a file is chosen, the
|
||||
// same tab will be reused as a viewer.
|
||||
// Otherwise we would have to close filechooser tab and instantly open a
|
||||
// viewer tab with an unpleasant flicker.
|
||||
// viewer tab with an unpleasant flicker.
|
||||
const url = chrome.extension.getURL('ui/html/viewer.html#filechooser');
|
||||
chrome.tabs.create({url: url}, async function(t){
|
||||
const win = await that.checkIfTabOpened(t, 'isViewer', myTabs);
|
||||
win.viewer.showFileChooser();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
openManager() {
|
||||
const url = chrome.extension.getURL('ui/html/manager.html');
|
||||
for (const win of chrome.extension.getViews()){
|
||||
if (win.isManager){
|
||||
if (win.isManager){
|
||||
// re-focus tab if manager already open
|
||||
console.log('will refocus manger tab', win.tabid);
|
||||
chrome.tabs.update(win.tabid, {active: true});
|
||||
@@ -324,8 +329,8 @@ export class Main{
|
||||
that.checkIfTabOpened(t, 'is_manager');
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
async prepareNotarization(after_click) {
|
||||
if (!this.trustedOracleReady) {
|
||||
this.sendAlert({
|
||||
@@ -334,16 +339,16 @@ export class Main{
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
let clickTimeout = null;
|
||||
const that = this;
|
||||
|
||||
|
||||
const active_tab = await new Promise(function(resolve) {
|
||||
chrome.tabs.query({active: true}, function(t) {
|
||||
resolve(t[0]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
if (! active_tab.url.startsWith('https://')) {
|
||||
this.sendAlert({
|
||||
'title': 'PageSigner error.',
|
||||
@@ -351,7 +356,7 @@ export class Main{
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (after_click){
|
||||
const url = chrome.extension.getURL('ui/img/arrow24.png');
|
||||
chrome.browserAction.setIcon({path: url});
|
||||
@@ -365,8 +370,8 @@ export class Main{
|
||||
});
|
||||
}, 30 * 1000);
|
||||
}
|
||||
|
||||
let oBR_details;
|
||||
|
||||
let oBR_details;
|
||||
const oBR_handler = function(details){
|
||||
console.log('in onBeforeRequest', details);
|
||||
chrome.webRequest.onBeforeRequest.removeListener(oBR_handler);
|
||||
@@ -377,15 +382,15 @@ export class Main{
|
||||
urls: ['<all_urls>'],
|
||||
tabId: active_tab.id,
|
||||
types: ['main_frame', 'xmlhttprequest']
|
||||
// types: ["main_frame", "sub_frame", "stylesheet", "script",
|
||||
// types: ["main_frame", "sub_frame", "stylesheet", "script",
|
||||
// "image", "font", "object", "xmlhttprequest", "ping", "csp_report", "media", "websocket", "other"]
|
||||
}, ['requestBody']);
|
||||
|
||||
|
||||
let oBSH_details;
|
||||
const oBSH_handler = function(details){
|
||||
console.log('in onBeforeSendHeaders', details);
|
||||
chrome.webRequest.onBeforeSendHeaders.removeListener(oBSH_handler);
|
||||
oBSH_details = details;
|
||||
oBSH_details = details;
|
||||
console.log(oBR_details, oBSH_details);
|
||||
};
|
||||
const extraInfoSpec = ['requestHeaders'];
|
||||
@@ -396,10 +401,10 @@ export class Main{
|
||||
tabId: active_tab.id,
|
||||
types: ['main_frame', 'xmlhttprequest']
|
||||
}, extraInfoSpec);
|
||||
|
||||
|
||||
|
||||
|
||||
// wait for the request to pass oBR and oBHS and reach onSendHeaders
|
||||
await new Promise(function(resolve) {
|
||||
await new Promise(function(resolve) {
|
||||
const oSH_handler = function(details){
|
||||
console.log('in onSendHeaders');
|
||||
chrome.webRequest.onSendHeaders.removeListener(oSH_handler);
|
||||
@@ -411,18 +416,18 @@ export class Main{
|
||||
tabId: active_tab.id,
|
||||
types: ['main_frame', 'xmlhttprequest']
|
||||
});
|
||||
|
||||
|
||||
// if not Notarize After Click mode,
|
||||
// reload current tab in order to trigger the HTTP request
|
||||
if (!that.waiting_for_click) chrome.tabs.reload(active_tab.id);
|
||||
// otherwise just wait for the user to click smth and trigger onBeforeRequest
|
||||
});
|
||||
|
||||
|
||||
if (this.waiting_for_click) {
|
||||
clearTimeout(clickTimeout);
|
||||
this.waiting_for_click = false;
|
||||
}
|
||||
|
||||
|
||||
if (oBR_details.url !== oBSH_details.url) return;
|
||||
if (oBR_details.requestId !== oBSH_details.requestId) return;
|
||||
if (oBR_details.method == 'POST') {
|
||||
@@ -451,15 +456,15 @@ export class Main{
|
||||
this.loadDefaultIcon();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
getHeaders(obj) {
|
||||
console.log('headers are', obj);
|
||||
const x = obj.url.split('/');
|
||||
const host = x[2].split(':')[0];
|
||||
x.splice(0, 3);
|
||||
const resource_url = x.join('/');
|
||||
|
||||
|
||||
const http_version = globals.useHTTP11 ? ' HTTP/1.1':' HTTP/1.0';
|
||||
let headers = obj.method + ' /' + resource_url + http_version + '\r\n';
|
||||
// Chrome doesnt add Host header. Firefox does
|
||||
@@ -476,7 +481,7 @@ export class Main{
|
||||
}
|
||||
if (obj.method == 'GET') {
|
||||
headers += '\r\n';
|
||||
}
|
||||
}
|
||||
else if (obj.method == 'POST') {
|
||||
let content;
|
||||
if (obj.requestBody.raw != undefined) {
|
||||
@@ -489,7 +494,7 @@ export class Main{
|
||||
content += key + '=' + obj.requestBody.formData[key] + '&';
|
||||
}
|
||||
// get rid of the last &
|
||||
content = content.slice(0,-1);
|
||||
content = content.slice(0, -1);
|
||||
// Chrome doesn't expose Content-Length which chokes nginx
|
||||
headers += 'Content-Length: ' + parseInt(content.length) + '\r\n\r\n';
|
||||
headers += content;
|
||||
@@ -506,16 +511,16 @@ export class Main{
|
||||
'port': port
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
async getPGSG(sid){
|
||||
const blob = await getSessionBlob(sid);
|
||||
return blob.pgsg;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// processMessages is the main entrypoint for all extension's logic
|
||||
async processMessages(data) {
|
||||
if (data.destination !== 'extension') return;
|
||||
@@ -534,9 +539,8 @@ export class Main{
|
||||
this.importPgsgAndShow(new Uint8Array(data.args.data));
|
||||
break;
|
||||
case 'export':
|
||||
const pgsg = await this.getPGSG(data.args.dir);
|
||||
const value = await getSession(data.args.dir);
|
||||
this.sendToManager({'pgsg': JSON.stringify(pgsg), 'name': value.sessionName}, 'export');
|
||||
this.sendToManager({'pgsg': JSON.stringify(await this.getPGSG(data.args.dir)),
|
||||
'name': await getSession(data.args.dir).sessionName}, 'export');
|
||||
break;
|
||||
case 'notarize':
|
||||
this.prepareNotarization(false);
|
||||
@@ -585,9 +589,9 @@ export class Main{
|
||||
break;
|
||||
case 'removeNotary':
|
||||
await setPref('trustedOracle', {});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
async startNotarization(headers, server, port) {
|
||||
this.notarization_in_progress = true;
|
||||
this.pm.init();
|
||||
@@ -624,23 +628,23 @@ export class Main{
|
||||
const url = chrome.extension.getURL('ui/img/icon.png');
|
||||
chrome.browserAction.setIcon({path: url});
|
||||
}
|
||||
|
||||
|
||||
// opens a tab showing the session. sid is a unique session id
|
||||
// creation time is sid.
|
||||
async showSession (sid){
|
||||
await this.openViewer(sid);
|
||||
this.sendSessions( await getAllSessions()); // refresh manager
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
openPythonScript(){
|
||||
const url = chrome.extension.getURL('pagesigner.py');
|
||||
chrome.tabs.create({url: url}, function(t){
|
||||
chrome.tabs.executeScript(t.id, {file: ('ui/python_script_header.js')});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
async verifyPgsg(json){
|
||||
if (json['version'] == 6){
|
||||
return await this.verifyPgsgV6(json);
|
||||
@@ -649,8 +653,8 @@ export class Main{
|
||||
throw ('Unrecognized version of the imported pgsg file.');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// Serialize fields of json object
|
||||
serializePgsg(json){
|
||||
// b64encode every field
|
||||
@@ -698,7 +702,7 @@ export class Main{
|
||||
}
|
||||
return newjson;
|
||||
}
|
||||
|
||||
|
||||
// verifyPgsgV6 verifies a decoded pgsg
|
||||
// obj is pgsg with values b64decoded and certificates deserialized into Certificate class
|
||||
async verifyPgsgV6(obj) {
|
||||
@@ -707,7 +711,7 @@ export class Main{
|
||||
|
||||
// Step 1. Verify URLFetcher attestation doc and get notary's pubkey
|
||||
if (! globals.useNotaryNoSandbox){
|
||||
// by default we verify that the notary is indeed a properly sandboxed machine
|
||||
// by default we verify that the notary is indeed a properly sandboxed machine
|
||||
var URLFetcherDoc = obj['URLFetcher attestation'];
|
||||
var notaryPubkey = await verifyNotary(URLFetcherDoc);
|
||||
}
|
||||
@@ -726,7 +730,7 @@ export class Main{
|
||||
const certPath = vcRV.certificatePath;
|
||||
const commonName = getCommonName(certPath[0]);
|
||||
const altNames = getAltNames(certPath[0]);
|
||||
|
||||
|
||||
// Step 3. Verify that RSA signature over ephemeral EC key corresponds to the public key
|
||||
// from the leaf certificate
|
||||
const serverEcPubkey = obj['server pubkey for ECDHE'];
|
||||
@@ -735,13 +739,13 @@ export class Main{
|
||||
const sr = obj['server random'];
|
||||
const vepsRV = await TLS.verifyECParamsSig(certPath[0], serverEcPubkey, rsaSig, cr, sr);
|
||||
assert (vepsRV === true);
|
||||
|
||||
|
||||
// Step 4. Combine PMS shares and derive expanded keys.
|
||||
const P256prime = 2n**256n - 2n**224n + 2n**192n + 2n**96n - 1n;
|
||||
// we may need to reduce mod prime if the sum overflows the prime
|
||||
const pms = int2ba((ba2int(obj['notary PMS share']) + ba2int(obj['client PMS share'])) % P256prime, 32);
|
||||
const [cwk, swk, civ, siv] = await getExpandedKeys(pms, cr, sr);
|
||||
|
||||
|
||||
// Step 5. Check that expanded keys match key shares
|
||||
const clientCwkShare = obj['client client_write_key share'];
|
||||
const clientCivShare = obj['client client_write_iv share'];
|
||||
@@ -755,7 +759,7 @@ export class Main{
|
||||
assert(eq( xor(notaryCivShare, clientCivShare), civ));
|
||||
assert(eq( xor(notarySwkShare, clientSwkShare), swk));
|
||||
assert(eq( xor(notarySivShare, clientSivShare), siv));
|
||||
|
||||
|
||||
// Step 6. Check session signature
|
||||
const commitHash = await TLSNotarySession.computeCommitHash(obj['server response records']);
|
||||
const keyShareHash = await sha256(concatTA(clientCwkShare, clientCivShare,
|
||||
@@ -775,29 +779,29 @@ export class Main{
|
||||
obj['notarization time']);
|
||||
|
||||
assert(await verifySig(
|
||||
obj['ephemeral pubkey'],
|
||||
obj['session signature'],
|
||||
obj['ephemeral pubkey'],
|
||||
obj['session signature'],
|
||||
tbs1) === true,
|
||||
'Session signature verification failed.');
|
||||
|
||||
// Step 7. Verify ephemeral key
|
||||
const tbs2 = concatTA(
|
||||
obj['ephemeral valid from'],
|
||||
obj['ephemeral valid until'],
|
||||
obj['ephemeral valid from'],
|
||||
obj['ephemeral valid until'],
|
||||
obj['ephemeral pubkey']);
|
||||
assert(await verifySig(
|
||||
pubkeyPEM2raw(notaryPubkey),
|
||||
obj['ephemeral signed by master key'],
|
||||
obj['ephemeral signed by master key'],
|
||||
tbs2) === true,
|
||||
'Master key signature verification failed.');
|
||||
// notarization time must be within the time of ephemeral key validity
|
||||
assert(
|
||||
ba2int(obj['ephemeral valid from']) <
|
||||
ba2int(obj['notarization time']) <
|
||||
ba2int(obj['ephemeral valid from']) <
|
||||
ba2int(obj['notarization time']) <
|
||||
ba2int(obj['ephemeral valid until']));
|
||||
|
||||
|
||||
// Step 8. Decrypt client request and make sure that "Host" HTTP header corresponds to
|
||||
// Step 8. Decrypt client request and make sure that "Host" HTTP header corresponds to
|
||||
// Common Name from the leaf certificate.
|
||||
const ghashInputs = [];
|
||||
const blockCount = obj['client request ciphertext'].length/16;
|
||||
@@ -807,10 +811,10 @@ export class Main{
|
||||
// aad is additional authenticated data
|
||||
const aad = ghashInputs[0];
|
||||
// TLS record seq number must be 1
|
||||
assert(eq(aad.slice(0,8), int2ba(1, 8)));
|
||||
assert(eq(aad.slice(0, 8), int2ba(1, 8)));
|
||||
// TLS record type must be "application data"
|
||||
assert(eq(aad.slice(8,11), new Uint8Array([23,3,3])));
|
||||
const recordLen = ba2int(aad.slice(11,13));
|
||||
assert(eq(aad.slice(8, 11), new Uint8Array([23, 3, 3])));
|
||||
const recordLen = ba2int(aad.slice(11, 13));
|
||||
const plaintextBlocks = [];
|
||||
const ciphertext = ghashInputs.slice(1, ghashInputs.length-1);
|
||||
for (let i=0; i < ciphertext.length; i++){
|
||||
@@ -841,14 +845,14 @@ export class Main{
|
||||
}
|
||||
}
|
||||
assert(isFound, 'Host not found in certificate');
|
||||
|
||||
|
||||
// Step 9. Check authentication tags of server response and decrypt it.
|
||||
const responseRecords = await decrypt_tls_responseV6(
|
||||
obj['server response records'], swk, siv);
|
||||
const response = ba2str(concatTA(...responseRecords));
|
||||
return [host, request, response, date.toGMTString()];
|
||||
return [host, request, response, date.toGMTString()];
|
||||
}
|
||||
|
||||
|
||||
async importPgsgAndShow(importedData) {
|
||||
console.log('importedData', importedData);
|
||||
try {
|
||||
@@ -879,8 +883,8 @@ export class Main{
|
||||
await saveNewSession (date, host, request, response, serializedPgsg, 'imported');
|
||||
this.showSession(date);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
async openViewer(sid) {
|
||||
const data = await getSession(sid);
|
||||
const blob = await getSessionBlob(sid);
|
||||
@@ -889,10 +893,10 @@ export class Main{
|
||||
const request = blob.request;
|
||||
const response = blob.response;
|
||||
let tabId = null;// the id of the tab that we will be sending to
|
||||
|
||||
|
||||
const url = chrome.extension.getURL('ui/html/viewer.html');
|
||||
await chrome.webRequest.handlerBehaviorChanged(); // flush the in-memory cache
|
||||
|
||||
|
||||
// reuse a tab if viewer was already open because we were importing file
|
||||
// this tab must be still active
|
||||
const active_tab = await new Promise(function(resolve) {
|
||||
@@ -911,7 +915,7 @@ export class Main{
|
||||
}
|
||||
}
|
||||
tabId = active_tab.id;
|
||||
|
||||
|
||||
if (!isImportTab){
|
||||
const myTabs = [];
|
||||
for (const win of views ){
|
||||
@@ -925,9 +929,9 @@ export class Main{
|
||||
console.log('checkIfTabOpened resolved');
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
console.log('send to viewer');
|
||||
// the tab is either an already opened import tab or a fully-loaded new viewer tab
|
||||
// We already checked that the new viewer's tab DOM was loaded. Proceed to send the data
|
||||
@@ -943,13 +947,13 @@ export class Main{
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
async openDetails(sid, isEditor) {
|
||||
const data = await getSession(sid);
|
||||
const blob = await getSessionBlob(sid);
|
||||
const url = chrome.extension.getURL('ui/html/rawviewer.html');
|
||||
let tabId = null; // id of the tab to which we will send the data
|
||||
|
||||
|
||||
const myTabs = [];
|
||||
const myViews = chrome.extension.getViews();
|
||||
for (const win of myViews ){
|
||||
@@ -963,8 +967,8 @@ export class Main{
|
||||
await that.checkIfTabOpened(t, 'isRawViewer', myTabs);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
chrome.runtime.sendMessage({
|
||||
destination: 'rawviewer',
|
||||
message: isEditor ? 'edit' : 'show',
|
||||
@@ -975,10 +979,10 @@ export class Main{
|
||||
sessionId: sid,
|
||||
serverName: data.serverName
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
sendSessions(sessions) {
|
||||
const rows = [];
|
||||
for (const session of sessions){
|
||||
@@ -993,7 +997,7 @@ export class Main{
|
||||
}
|
||||
this.sendToManager(rows);
|
||||
}
|
||||
|
||||
|
||||
sendToManager(data, command) {
|
||||
console.log('sending sendToManager ', data);
|
||||
if (!command) command = 'payload'; // commands can be: payload, export
|
||||
@@ -1004,7 +1008,7 @@ export class Main{
|
||||
payload: data
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// for some pages we cant inject js/css, use the ugly alert
|
||||
uglyAlert(alertData) {
|
||||
const url = chrome.extension.getURL('ui/img/icon_error.png');
|
||||
@@ -1013,7 +1017,7 @@ export class Main{
|
||||
});
|
||||
this.popupError = alertData;
|
||||
}
|
||||
|
||||
|
||||
sendAlert(alertData) {
|
||||
const that = this;
|
||||
chrome.tabs.query({active: true},
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
/* global chrome */
|
||||
|
||||
// class ProgressMonitor receives progress information about client's garbling,
|
||||
// evaluation, blob upload, blob download. It dispatches progress status messages
|
||||
// periodically or when queried.
|
||||
@@ -48,7 +50,7 @@ export class ProgressMonitor{
|
||||
// destroy de-registers listeners
|
||||
// doesn't do anything because of what seems like a Chrome bug.
|
||||
destroy(){
|
||||
// TODO it seems like Chrome does not remove onMessage listener
|
||||
// TODO it seems like Chrome does not remove onMessage listener
|
||||
chrome.runtime.onMessage.removeListener(this.listener);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
/* global chrome*/
|
||||
|
||||
// The only way to determine if the server is done sending data is to check that out receiving
|
||||
// buffer has nothing but complete TLS records i.e. that there is no incomplete TLS records
|
||||
// However it was observed that in cases when getting e.g. zip files, some servers first send HTTP header as one
|
||||
@@ -21,12 +23,12 @@ export class Socket {
|
||||
// connect will throw if we couldnt establish a connection to the server for this long
|
||||
this.connectTimeout = 5 * 1000;
|
||||
// recv() will reject if no data was seen for this long
|
||||
this.noDataTimeout = 5 * 1000;
|
||||
this.noDataTimeout = 5 * 1000;
|
||||
// close the socket after this time, even if it is in the middle of receiving data
|
||||
this.lifeTime = 40 * 1000;
|
||||
this.lifeTime = 40 * 1000;
|
||||
// delay after which we make a final check of the receicing buffer and if there was no data,
|
||||
// from the server, the we consider the data transmission finished
|
||||
this.delayBeforeFinalIteration = 500;
|
||||
this.delayBeforeFinalIteration = 500;
|
||||
this.wasClosed = false;
|
||||
this.backendPort = 20022;
|
||||
}
|
||||
@@ -41,7 +43,7 @@ export class Socket {
|
||||
return;
|
||||
}, that.connectTimeout);
|
||||
|
||||
const msg = {'command': 'connect','args': {'name': that.name,'port': that.port},'uid': that.uid};
|
||||
const msg = {'command': 'connect', 'args': {'name': that.name, 'port': that.port}, 'uid': that.uid};
|
||||
if (globals.usePythonBackend){
|
||||
const url = 'http://127.0.0.1:' + that.backendPort;
|
||||
const payload = JSON.stringify(msg);
|
||||
@@ -66,7 +68,7 @@ export class Socket {
|
||||
});
|
||||
|
||||
// we need to access runtime.lastError to prevent Chrome from complaining
|
||||
// about unchecked error
|
||||
// about unchecked error
|
||||
chrome.runtime.lastError;
|
||||
clearTimeout(timer);
|
||||
if (response == undefined){
|
||||
@@ -82,7 +84,7 @@ export class Socket {
|
||||
}, that.lifeTime);
|
||||
// endless data fetching loop for the lifetime of this Socket
|
||||
that.fetchLoop();
|
||||
return 'ready';
|
||||
return 'ready';
|
||||
}
|
||||
|
||||
async send(data_in) {
|
||||
@@ -127,7 +129,7 @@ export class Socket {
|
||||
that.fetchLoop();
|
||||
}, 100);
|
||||
}
|
||||
|
||||
|
||||
// fetchLoop has built up the recv buffer
|
||||
// check if there are complete records in the buffer,return them if yes or wait some more if no
|
||||
recv (is_handshake = false) {
|
||||
@@ -180,7 +182,7 @@ export class Socket {
|
||||
buf = rv.incomprecs;
|
||||
setTimeout(function() {check();}, 100);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.log('got complete records', that.uid);
|
||||
if (is_handshake) {
|
||||
@@ -205,7 +207,7 @@ export class Socket {
|
||||
|
||||
async close() {
|
||||
this.wasClosed = true;
|
||||
var msg = {'command': 'close','uid': this.uid};
|
||||
var msg = {'command': 'close', 'uid': this.uid};
|
||||
console.log('closing socket', this.uid);
|
||||
if (globals.usePythonBackend){
|
||||
await fetch('http://127.0.0.1:20022', {method:'POST', body: JSON.stringify(msg),
|
||||
@@ -250,10 +252,4 @@ export class Socket {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined'){ // we are in node.js environment
|
||||
module.exports={
|
||||
};
|
||||
}
|
||||
213
core/TLS.js
213
core/TLS.js
@@ -1,37 +1,12 @@
|
||||
/* global SocketNode */
|
||||
|
||||
import {concatTA, int2ba, sha256, str2ba, assert, ba2int, getRandom, sigDER2p1363,
|
||||
pubkeyPEM2raw, eq, xor, AESECBencrypt, b64urlencode} from './utils.js';
|
||||
import {verifyChain, checkCertSubjName} from './verifychain.js';
|
||||
import {Socket} from './Socket.js';
|
||||
|
||||
|
||||
export class TLS {
|
||||
// allHandshakes is a concatenation of all handshake messages up to this point.
|
||||
// This is only data visible at the handshake layer and does not include record layer headers
|
||||
allHandshakes;
|
||||
// certPath is an array of certificates from the server arranged by pkijs in the ascending
|
||||
// order from leaf to root
|
||||
certPath;
|
||||
// cke is TLS handshake's Client Key Exchange message
|
||||
cke;
|
||||
clientRandom;
|
||||
commPrivkey;
|
||||
commSymmetricKey;
|
||||
headers;
|
||||
isMhm; // multiple handshake messages
|
||||
mustVerifyCert;
|
||||
notaryWillEncryptRequest;
|
||||
options;
|
||||
port;
|
||||
rsaSig;
|
||||
serverRandom;
|
||||
sckt;
|
||||
serverEcPubkey;
|
||||
secret; // for debug purposes only
|
||||
// sid is id for this notarization session
|
||||
sid;
|
||||
serverName;
|
||||
useMaxFragmentLength;
|
||||
|
||||
export class TLS {
|
||||
constructor (serverName, port, headers, options){
|
||||
this.serverName = serverName;
|
||||
this.port = port;
|
||||
@@ -41,10 +16,38 @@ export class TLS {
|
||||
this.useMaxFragmentLength = options.useMaxFragmentLength;
|
||||
this.notaryWillEncryptRequest = options.notaryWillEncryptRequest;
|
||||
this.mustVerifyCert = options.mustVerifyCert;
|
||||
// allHandshakes is a concatenation of all handshake messages up to this point.
|
||||
// This is only data visible at the handshake layer and does not include record layer headers
|
||||
this.allHandshakes;
|
||||
// certPath is an array of certificates from the server arranged by pkijs in the ascending
|
||||
// order from leaf to root
|
||||
this.certPath;
|
||||
// cke is TLS handshake's Client Key Exchange message
|
||||
this.cke;
|
||||
this.clientRandom;
|
||||
this.commPrivkey;
|
||||
this.commSymmetricKey;
|
||||
this.headers;
|
||||
this.isMhm; // multiple handshake messages
|
||||
this.mustVerifyCert;
|
||||
this.notaryWillEncryptRequest;
|
||||
this.options;
|
||||
this.port;
|
||||
this.rsaSig;
|
||||
this.serverRandom;
|
||||
this.sckt;
|
||||
this.serverEcPubkey;
|
||||
this.secret; // for debug purposes only
|
||||
// sid is id for this notarization session
|
||||
this.sid;
|
||||
this.serverName;
|
||||
this.useMaxFragmentLength;
|
||||
|
||||
|
||||
if (typeof(window) !== 'undefined'){
|
||||
this.sckt = new Socket(serverName, port);
|
||||
} else {
|
||||
// in node SocketNode was made global
|
||||
// in node SocketNode was made global
|
||||
this.sckt = new SocketNode(serverName, port);
|
||||
}
|
||||
}
|
||||
@@ -56,7 +59,7 @@ export class TLS {
|
||||
tmp.push(0x00, 0x02); // Supported Groups List Length
|
||||
tmp.push(0x00, 0x17); // Supported Group: secp256r1
|
||||
const supported_groups_extension = new Uint8Array(tmp);
|
||||
|
||||
|
||||
tmp = [];
|
||||
tmp.push(0x00, 0x0d); // Type signature_algorithms
|
||||
tmp.push(0x00, 0x04); // Length
|
||||
@@ -73,7 +76,7 @@ export class TLS {
|
||||
tmp.push(...Array.from(int2ba(server_name.length, 2))); // Server Name Length
|
||||
tmp.push(...Array.from(server_name));
|
||||
const server_name_extension = new Uint8Array(tmp);
|
||||
|
||||
|
||||
tmp = [];
|
||||
if (this.useMaxFragmentLength){
|
||||
tmp.push(0x00, 0x01); // Type: max_fragment_length
|
||||
@@ -83,12 +86,12 @@ export class TLS {
|
||||
tmp.push(0x04);
|
||||
}
|
||||
const max_fragment_length_extension = new Uint8Array(tmp);
|
||||
|
||||
|
||||
const extlen = supported_groups_extension.length + signature_algorithm_extension.length +
|
||||
server_name_extension.length + max_fragment_length_extension.length;
|
||||
|
||||
|
||||
tmp = [];
|
||||
tmp.push(0x01); // Handshake type: Client Hello
|
||||
tmp.push(0x01); // Handshake type: Client Hello
|
||||
tmp.push(...int2ba(extlen + 43, 3) ); // Length
|
||||
tmp.push(0x03, 0x03); // Version: TLS 1.2
|
||||
this.clientRandom = getRandom(32);
|
||||
@@ -105,23 +108,23 @@ export class TLS {
|
||||
signature_algorithm_extension,
|
||||
server_name_extension,
|
||||
max_fragment_length_extension);
|
||||
|
||||
|
||||
this.allHandshakes = ch;
|
||||
|
||||
|
||||
tmp = [];
|
||||
tmp.push(0x16); // Type: Handshake
|
||||
tmp.push(0x03, 0x03); // Version: TLS 1.2
|
||||
tmp.push(...int2ba(ch.length, 2)); // Length
|
||||
tmp.push(...int2ba(ch.length, 2)); // Length
|
||||
const tls_record_header = new Uint8Array(tmp);
|
||||
|
||||
return concatTA(tls_record_header, ch);
|
||||
}
|
||||
|
||||
async verifyNotarySig(sigDER, pubKey, signed_data, options){
|
||||
const isRaw = (options == 'raw') ? true : false;
|
||||
const isRaw = (options == 'raw') ? true : false;
|
||||
const sig_p1363 = sigDER2p1363(sigDER);
|
||||
const notaryPubkey = isRaw ? pubKey : pubkeyPEM2raw(pubKey);
|
||||
|
||||
|
||||
const pubkeyCryptoKey = await crypto.subtle.importKey(
|
||||
'raw', notaryPubkey.buffer, {name: 'ECDSA', namedCurve:'P-256'}, true, ['verify']);
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -130,43 +133,6 @@ export class TLS {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
async parse_commpk_commpksig_cpk(dataEnc){
|
||||
// Notary's EC pubkey (to derive ECDH secret for communication) is not encrypted
|
||||
const comm_pk = dataEnc.slice(0,65);
|
||||
// for debug purposes only
|
||||
this.secret = dataEnc.slice(65,97);
|
||||
|
||||
this.commSymmetricKey = await getECDHSecret(comm_pk, this.commPrivkey);
|
||||
const data = await this.decryptFromNotary(
|
||||
this.commSymmetricKey,
|
||||
dataEnc.slice(65+this.secret.length));
|
||||
|
||||
let o = 0; // offset
|
||||
// get signature over communication pubkey
|
||||
const ssrvLen = ba2int(data.slice(o,o+=1));
|
||||
const signingServerRetval = data.slice(o, o+=ssrvLen);
|
||||
const cpk = data.slice(o,o+=65); // Client's pubkey for ECDH
|
||||
// parse signing server's return value
|
||||
o = 0;
|
||||
const sessionSigLen = ba2int(signingServerRetval.slice(o, o+=1));
|
||||
const sessionSig = signingServerRetval.slice(o, o+=sessionSigLen);
|
||||
const ephemKeySigLen = ba2int(signingServerRetval.slice(o, o+=1));
|
||||
const ephemKeySig = signingServerRetval.slice(o, o+=ephemKeySigLen);
|
||||
const ephemPubKey = signingServerRetval.slice(o, o+=65);
|
||||
const ephemValidFrom = signingServerRetval.slice(o, o+=4);
|
||||
const ephemValidUntil = signingServerRetval.slice(o, o+=4);
|
||||
|
||||
// check signature
|
||||
const to_be_signed = await sha256(comm_pk);
|
||||
assert(await this.verifyNotarySig(sessionSig, ephemPubKey, to_be_signed, 'raw') == true);
|
||||
// the ephemeral key with its validity time range is signed by master key
|
||||
const ephemTBS = await sha256(concatTA(ephemPubKey, ephemValidFrom, ephemValidUntil));
|
||||
assert(await this.verifyNotarySig(ephemKeySig, this.notary.pubkeyPEM, ephemTBS) == true);
|
||||
this.checkEphemKeyExpiration(ephemValidFrom, ephemValidUntil);
|
||||
return cpk;
|
||||
}
|
||||
|
||||
parseServerHello(s){
|
||||
let p = 0;
|
||||
assert(eq(s.slice(p, p+=1), [0x02])); // Server Hello
|
||||
@@ -177,8 +143,8 @@ export class TLS {
|
||||
const sidlen = ba2int(s.slice(p, p+=1));
|
||||
if (sidlen > 0){
|
||||
p+=sidlen; // 32 bytes of session ID, if any
|
||||
}
|
||||
assert(eq(s.slice(p, p+=2), [0xc0, 0x2f])); // Cipher Suite: TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256
|
||||
}
|
||||
assert(eq(s.slice(p, p+=2), [0xc0, 0x2f])); // Cipher Suite: TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256
|
||||
assert(eq(s.slice(p, p+=1), [0x00])); // Compression Method: null (0)
|
||||
// May contain Extensions. We don't need to parse them
|
||||
}
|
||||
@@ -187,7 +153,7 @@ export class TLS {
|
||||
let p = 0;
|
||||
assert(eq(s.slice(p, p+=1), [0x0b])); // Certificate
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const clen = ba2int(s.slice(p, p+=3));
|
||||
const clen = ba2int(s.slice(p, p+=3));
|
||||
const certslen = ba2int(s.slice(p, p+=3));
|
||||
const certs_last_pos = p + certslen;
|
||||
const certs = [];
|
||||
@@ -205,10 +171,10 @@ export class TLS {
|
||||
}
|
||||
|
||||
async parseServerKeyExchange(s){
|
||||
let p = 0;
|
||||
let p = 0;
|
||||
assert(eq(s.slice(p, p+=1), [0x0c])); // Handshake Type: Server Key Exchange (12)
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const skelen = ba2int(s.slice(p, p+=3));
|
||||
const skelen = ba2int(s.slice(p, p+=3));
|
||||
// EC Diffie-Hellman Server Params
|
||||
assert(eq(s.slice(p, p+=1), [0x03])); // Curve Type: named_curve (0x03)
|
||||
assert(eq(s.slice(p, p+=2), [0x00, 0x17])); // Named Curve: secp256r1 (0x0017)
|
||||
@@ -218,7 +184,7 @@ export class TLS {
|
||||
assert(eq(s.slice(p, p+=2), [0x04, 0x01])); // #Signature Algorithm: rsa_pkcs1_sha256 (0x0401)
|
||||
const siglen = ba2int(s.slice(p, p+=2));
|
||||
this.rsaSig = s.slice(p, p+=siglen);
|
||||
|
||||
|
||||
const result = await TLS.verifyECParamsSig(this.certPath[0], this.serverEcPubkey, this.rsaSig, this.clientRandom, this.serverRandom);
|
||||
assert (result == true);
|
||||
return p;
|
||||
@@ -235,7 +201,7 @@ export class TLS {
|
||||
if (i == chunks-1){
|
||||
// last chunk may be smaller
|
||||
const rem = headers.length % chunkSize;
|
||||
thisChunkSize = (rem == 0) ? chunkSize : rem;
|
||||
thisChunkSize = (rem == 0) ? chunkSize : rem;
|
||||
}
|
||||
const explicit_nonce = int2ba(2+i, 8);
|
||||
// const explicit_nonce = getRandom(8)
|
||||
@@ -244,20 +210,20 @@ export class TLS {
|
||||
const aad = concatTA(
|
||||
int2ba(seq_num, 8),
|
||||
new Uint8Array([0x17, 0x03, 0x03]), // type 0x17 = Application data , TLS Version 1.2
|
||||
int2ba(thisChunkSize, 2)); // unencrypted data length in bytes
|
||||
int2ba(thisChunkSize, 2)); // unencrypted data length in bytes
|
||||
const cwkCryptoKey = await crypto.subtle.importKey(
|
||||
'raw',
|
||||
client_write_key.buffer,
|
||||
'AES-GCM',
|
||||
true,
|
||||
'raw',
|
||||
client_write_key.buffer,
|
||||
'AES-GCM',
|
||||
true,
|
||||
['encrypt', 'decrypt']);
|
||||
const ciphertext = await crypto.subtle.encrypt({
|
||||
name: 'AES-GCM',
|
||||
name: 'AES-GCM',
|
||||
iv: nonce.buffer,
|
||||
additionalData: aad.buffer},
|
||||
cwkCryptoKey,
|
||||
additionalData: aad.buffer},
|
||||
cwkCryptoKey,
|
||||
headers.slice(chunkSize*i, chunkSize*(i+1)).buffer);
|
||||
encReq.push(concatTA(explicit_nonce, new Uint8Array(ciphertext)));
|
||||
encReq.push(concatTA(explicit_nonce, new Uint8Array(ciphertext)));
|
||||
}
|
||||
return encReq;
|
||||
}
|
||||
@@ -271,7 +237,7 @@ export class TLS {
|
||||
}
|
||||
|
||||
async buildAndSendClientHello(){
|
||||
const ch = this.buildClientHello();
|
||||
const ch = this.buildClientHello();
|
||||
await this.sckt.connect();
|
||||
this.sckt.send(ch); // Send Client Hello
|
||||
}
|
||||
@@ -287,18 +253,18 @@ export class TLS {
|
||||
// some incompatible websites silently do not respond to ClientHello
|
||||
throw('Failed to receive a response from a webserver. Make sure your internet connection is working and try again. If this error persists, this may mean that the webserver is not compatible with PageSigner. Please contact the PageSigner devs about this issue.');
|
||||
}
|
||||
// restore normal timeout value
|
||||
// restore normal timeout value
|
||||
this.sckt.recv_timeout = 20 * 1000;
|
||||
|
||||
// Parse Server Hello, Certificate, Server Key Exchange, Server Hello Done
|
||||
if (eq(s.slice(0,2), [0x15, 0x03])){
|
||||
if (eq(s.slice(0, 2), [0x15, 0x03])){
|
||||
console.log('Server sent Alert instead of Server Hello');
|
||||
throw ('Unfortunately PageSigner is not yet able to notarize this website. Please contact the PageSigner devs about this issue.');
|
||||
}
|
||||
let p = 0; // current position in the byte stream
|
||||
assert(eq(s.slice(p, p+=1), [0x16])); // Type: Handshake
|
||||
assert(eq(s.slice(p, p+=2), [0x03, 0x03])); // Version: TLS 1.2
|
||||
const handshakelen = ba2int(s.slice(p, p+=2));
|
||||
const handshakelen = ba2int(s.slice(p, p+=2));
|
||||
// This may be the length of multiple handshake messages (MHM)
|
||||
// For MHM there is only 1 TLS Record layer header followed by Handshake layer messages
|
||||
// Without MHM, each handshake message has its own TLS Record header
|
||||
@@ -314,7 +280,7 @@ export class TLS {
|
||||
this.isMhm = true; }// multiple handshake messages
|
||||
let reclenMhm = 0;
|
||||
if (!this.isMhm){
|
||||
// read the TLS Record header
|
||||
// read the TLS Record header
|
||||
assert(eq(s.slice(p, p+=3), [0x16, 0x03, 0x03])); // Type: Handshake # Version: TLS 1.2
|
||||
reclenMhm = ba2int(s.slice(p, p+=2));
|
||||
}
|
||||
@@ -327,7 +293,7 @@ export class TLS {
|
||||
this.allHandshakes = concatTA(this.allHandshakes, c);
|
||||
const cParsedByted = await this.parseCertificate(c);
|
||||
p += cParsedByted;
|
||||
|
||||
|
||||
if (this.isMhm && (handshakelen+5 == p)){
|
||||
// another MHM header will follow, read its header
|
||||
assert(eq(s.slice(p, p+=1), [0x16])); // Type: Handshake
|
||||
@@ -337,7 +303,7 @@ export class TLS {
|
||||
}
|
||||
reclenMhm = 0;
|
||||
if (!this.isMhm){
|
||||
// read the TLS Record header
|
||||
// read the TLS Record header
|
||||
assert(eq(s.slice(p, p+=3), [0x16, 0x03, 0x03])); // Type: Handshake # Version: TLS 1.2
|
||||
reclenMhm = ba2int(s.slice(p, p+=2));
|
||||
}
|
||||
@@ -350,10 +316,10 @@ export class TLS {
|
||||
this.allHandshakes = concatTA(this.allHandshakes, ske);
|
||||
const skeParsedByted = await this.parseServerKeyExchange(ske);
|
||||
p += skeParsedByted;
|
||||
|
||||
|
||||
// Parse Server Hello Done
|
||||
if (!this.isMhm) {
|
||||
// read the TLS Record header
|
||||
// read the TLS Record header
|
||||
assert(eq(s.slice(p, p+=3), [0x16, 0x03, 0x03])); // Type: Handshake # Version: TLS 1.2
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const reclen = ba2int(s.slice(p, p+=2));
|
||||
@@ -368,7 +334,7 @@ export class TLS {
|
||||
// buildClientKeyExchange builds the TLS handshake's Client Key Exchange message
|
||||
// cpubBytes is client's pubkey for the ECDH
|
||||
async buildClientKeyExchange(cpubBytes){
|
||||
let tmp = [0x10]; // Handshake type: Client Key Exchange
|
||||
let tmp = [0x10]; // Handshake type: Client Key Exchange
|
||||
tmp.push(0x00, 0x00, 0x42); // Length
|
||||
tmp.push(0x41); // Pubkey Length: 65
|
||||
// 0x04 means compressed pubkey format
|
||||
@@ -392,10 +358,11 @@ export class TLS {
|
||||
return this.rsaSig;
|
||||
}
|
||||
|
||||
// TODO this description is incorrect
|
||||
// sendClientFinished accepts encrypted Client Finished (CF), auth tag for CF, verify_data for CF.
|
||||
// It then sends Client Key Exchange, Change Cipher Spec and encrypted Client Finished
|
||||
async sendClientFinished(encCF, tagCF){
|
||||
const cke_tls_record_header = new Uint8Array([0x16, 0x03, 0x03, 0x00, 0x46]); // Type: Handshake, Version: TLS 1.2, Length
|
||||
const cke_tls_record_header = new Uint8Array([0x16, 0x03, 0x03, 0x00, 0x46]); // Type: Handshake, Version: TLS 1.2, Length
|
||||
const ccs = new Uint8Array([0x14, 0x03, 0x03, 0x00, 0x01, 0x01]);
|
||||
const client_finished = concatTA(int2ba(1, 8), encCF, tagCF);
|
||||
// Finished message of 40 (0x28) bytes length
|
||||
@@ -413,12 +380,12 @@ export class TLS {
|
||||
async receiveServerFinished(){
|
||||
const data = await this.sckt.recv(true);
|
||||
|
||||
if (eq(data.slice(0,2), [0x15, 0x03])){
|
||||
if (eq(data.slice(0, 2), [0x15, 0x03])){
|
||||
console.log('Server sent Alert instead of Server Finished');
|
||||
throw('Server sent Alert instead of Server Finished');
|
||||
}
|
||||
// Parse CCS and Server's Finished
|
||||
const ccs_server = data.slice(0,6);
|
||||
const ccs_server = data.slice(0, 6);
|
||||
assert(eq(ccs_server, [0x14, 0x03, 0x03, 0x00, 0x01, 0x01]));
|
||||
|
||||
let f = null; // server finished
|
||||
@@ -429,8 +396,8 @@ export class TLS {
|
||||
else {
|
||||
f = data.slice(6);
|
||||
}
|
||||
|
||||
assert (eq(f.slice(0,5), [0x16, 0x03, 0x03, 0x00, 0x28]));
|
||||
|
||||
assert (eq(f.slice(0, 5), [0x16, 0x03, 0x03, 0x00, 0x28]));
|
||||
const encSF = f.slice(5, 45); // encrypted Server Finished
|
||||
// There may be some extra data received after the Server Finished. We ignore it.
|
||||
return encSF;
|
||||
@@ -479,7 +446,7 @@ export class TLS {
|
||||
for (var i=0; i<encRecords.length; i++){
|
||||
var rec = encRecords[i];
|
||||
var numberOfBlocks = Math.ceil((rec.length-8-16) / 16);
|
||||
var nonce = rec.slice(0,8);
|
||||
var nonce = rec.slice(0, 8);
|
||||
var hashesOfEncCountersInRecord = [];
|
||||
var encCountersInRecord = [];
|
||||
for (var j=0; j<numberOfBlocks; j++){
|
||||
@@ -506,9 +473,9 @@ export class TLS {
|
||||
var p = 0; // position in the stream
|
||||
|
||||
while (p < s.length){
|
||||
if (! eq(s.slice(p,p+3), [0x17,0x03,0x03])){
|
||||
if (eq(s.slice(p,p+3), [0x15,0x03,0x03])){
|
||||
// if the alert is not the first record, then it is most likely a
|
||||
if (! eq(s.slice(p, p+3), [0x17, 0x03, 0x03])){
|
||||
if (eq(s.slice(p, p+3), [0x15, 0x03, 0x03])){
|
||||
// if the alert is not the first record, then it is most likely a
|
||||
// close_notify
|
||||
if (records.length == 0){
|
||||
console.log('Server sent Alert instead of response');
|
||||
@@ -520,8 +487,8 @@ export class TLS {
|
||||
throw('Server sent an unknown message');
|
||||
}
|
||||
}
|
||||
|
||||
p+=3;
|
||||
|
||||
p+=3;
|
||||
let reclen = ba2int(s.slice(p, p+=2));
|
||||
let record = s.slice(p, p+=reclen);
|
||||
records.push(record);
|
||||
@@ -535,9 +502,9 @@ export class TLS {
|
||||
const modulus = new Uint8Array(cert.subjectPublicKeyInfo.parsedKey.modulus.valueBlock.valueHex);
|
||||
// JSON web key format for public key with exponent 65537
|
||||
const jwk = {'kty':'RSA', 'use':'sig', 'e': 'AQAB', 'n': b64urlencode(modulus)};
|
||||
|
||||
|
||||
// 4 bytes of EC Diffie-Hellman Server Params + pubkey
|
||||
const to_be_signed = concatTA(cr, sr, new Uint8Array([0x03, 0x00, 0x17, 0x41]), ECpubkey);
|
||||
const to_be_signed = concatTA(cr, sr, new Uint8Array([0x03, 0x00, 0x17, 0x41]), ECpubkey);
|
||||
const rsa_pubkey = await crypto.subtle.importKey(
|
||||
'jwk',
|
||||
jwk,
|
||||
@@ -570,11 +537,11 @@ export async function decrypt_tls_responseV6(encRecords, key, IV){
|
||||
let tmp = [];
|
||||
seq_num += 1;
|
||||
tmp.push(...int2ba(seq_num, 8));
|
||||
tmp.push(...[0x17,0x03,0x03]); // type 0x17 = Application Data, TLS Version 1.2
|
||||
tmp.push(...[0x17, 0x03, 0x03]); // type 0x17 = Application Data, TLS Version 1.2
|
||||
// len(unencrypted data) == len (encrypted data) - len(explicit nonce) - len (auth tag)
|
||||
tmp.push(...int2ba(rec.length - 8 - 16, 2));
|
||||
const aad = new Uint8Array(tmp);// additional authenticated data
|
||||
const nonce = concatTA(IV, rec.slice(0,8));
|
||||
const nonce = concatTA(IV, rec.slice(0, 8));
|
||||
plaintext.push( new Uint8Array (await crypto.subtle.decrypt(
|
||||
{name: 'AES-GCM', iv: nonce.buffer, additionalData: aad.buffer},
|
||||
cryptoKey,
|
||||
@@ -596,9 +563,9 @@ export async function getExpandedKeys(preMasterSecret, cr, sr){
|
||||
const a0 = seed;
|
||||
const a1 = new Uint8Array (await crypto.subtle.sign('HMAC', Secret_CryptoKey, a0.buffer));
|
||||
const a2 = new Uint8Array (await crypto.subtle.sign('HMAC', Secret_CryptoKey, a1.buffer));
|
||||
const p1 = new Uint8Array (await crypto.subtle.sign('HMAC', Secret_CryptoKey, concatTA(a1,seed).buffer));
|
||||
const p2 = new Uint8Array (await crypto.subtle.sign('HMAC', Secret_CryptoKey, concatTA(a2,seed).buffer));
|
||||
const ms = concatTA(p1, p2).slice(0,48);
|
||||
const p1 = new Uint8Array (await crypto.subtle.sign('HMAC', Secret_CryptoKey, concatTA(a1, seed).buffer));
|
||||
const p2 = new Uint8Array (await crypto.subtle.sign('HMAC', Secret_CryptoKey, concatTA(a2, seed).buffer));
|
||||
const ms = concatTA(p1, p2).slice(0, 48);
|
||||
const MS_CryptoKey = await crypto.subtle.importKey('raw', ms.buffer, {name: 'HMAC', hash:'SHA-256'}, true, ['sign']);
|
||||
|
||||
// Expand keys
|
||||
@@ -606,10 +573,10 @@ export async function getExpandedKeys(preMasterSecret, cr, sr){
|
||||
const ea0 = eseed;
|
||||
const ea1 = new Uint8Array (await crypto.subtle.sign('HMAC', MS_CryptoKey, ea0.buffer));
|
||||
const ea2 = new Uint8Array (await crypto.subtle.sign('HMAC', MS_CryptoKey, ea1.buffer));
|
||||
const ep1 = new Uint8Array (await crypto.subtle.sign('HMAC', MS_CryptoKey, concatTA(ea1,eseed).buffer));
|
||||
const ep2 = new Uint8Array (await crypto.subtle.sign('HMAC', MS_CryptoKey, concatTA(ea2,eseed).buffer));
|
||||
const ep1 = new Uint8Array (await crypto.subtle.sign('HMAC', MS_CryptoKey, concatTA(ea1, eseed).buffer));
|
||||
const ep2 = new Uint8Array (await crypto.subtle.sign('HMAC', MS_CryptoKey, concatTA(ea2, eseed).buffer));
|
||||
|
||||
const ek = concatTA(ep1, ep2).slice(0,40);
|
||||
const ek = concatTA(ep1, ep2).slice(0, 40);
|
||||
// GCM doesnt need MAC keys
|
||||
const client_write_key = ek.slice(0, 16);
|
||||
const server_write_key = ek.slice(16, 32);
|
||||
|
||||
@@ -1,145 +1,139 @@
|
||||
import {TWOPC} from './twopc/TWOPC.js';
|
||||
import {TLS} from './TLS.js';
|
||||
import {concatTA, sha256, assert, xor, str2ba} from './utils.js';
|
||||
|
||||
|
||||
// class TLSNotarySession impements one notarization session
|
||||
|
||||
// class TLSNotarySession impements one notarization session
|
||||
// (one client request followed by one server response) using the TLSNotary protocol.
|
||||
export class TLSNotarySession{
|
||||
constructor(server, port, request, notary, sessionOptions, circuits, progressMonitor){
|
||||
// twopc is an instance of class TWOPC. Used to speak to the notary.
|
||||
twopc;
|
||||
this.twopc = new TWOPC(notary, request.length, circuits, progressMonitor);
|
||||
// tls is an instance of class TLS. Used to speak to the webserver.
|
||||
tls;
|
||||
// probeTLS is used to probe the webserver to see if it supports TLSNotary,
|
||||
this.tls = new TLS(server, port, request, sessionOptions);
|
||||
// probeTLS is used to probe the webserver to see if it supports TLSNotary,
|
||||
// before we start any time-intensive 2PC
|
||||
probeTLS;
|
||||
options;
|
||||
request;
|
||||
notary;
|
||||
pm;
|
||||
constructor(server, port, request, notary, sessionOptions, circuits, progressMonitor){
|
||||
this.twopc = new TWOPC(notary, request.length, circuits, progressMonitor);
|
||||
this.tls = new TLS(server, port, request, sessionOptions);
|
||||
this.probeTLS = new TLS(server, port, request, sessionOptions);
|
||||
this.request = str2ba(request);
|
||||
this.notary = notary;
|
||||
this.pm = progressMonitor;
|
||||
this.probeTLS = new TLS(server, port, request, sessionOptions);
|
||||
this.request = str2ba(request);
|
||||
this.notary = notary;
|
||||
this.pm = progressMonitor;
|
||||
this.options = null;
|
||||
}
|
||||
|
||||
async start(){
|
||||
await this.probeTLS.buildAndSendClientHello();
|
||||
await this.probeTLS.receiveAndParseServerHello();
|
||||
await this.probeTLS.sckt.close();
|
||||
await this.twopc.init();
|
||||
await this.tls.buildAndSendClientHello();
|
||||
const serverEcPubkey = await this.tls.receiveAndParseServerHello();
|
||||
const serverX = serverEcPubkey.slice(1, 33);
|
||||
const serverY = serverEcPubkey.slice(33, 65);
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 3, 'total': 10});
|
||||
const [pmsShare, cpubBytes] = await this.twopc.getECDHShare(serverX, serverY);
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 4, 'total': 10});
|
||||
|
||||
await this.tls.buildClientKeyExchange(cpubBytes);
|
||||
const [cr, sr] = await this.tls.getRandoms();
|
||||
const [encCF, tagCF, vdCF] = await this.twopc.run(cr, sr, this.tls.getAllHandshakes(), pmsShare);
|
||||
// Finished (0x14) with length 12 (0x0c)
|
||||
this.tls.updateAllHandshakes(concatTA(new Uint8Array([0x14, 0x00, 0x00, 0x0c]), vdCF));
|
||||
await this.tls.sendClientFinished(encCF, tagCF);
|
||||
const encSF = await this.tls.receiveServerFinished();
|
||||
await this.twopc.checkServerFinished(encSF, this.tls.getAllHandshakes());
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 5, 'total': 10});
|
||||
|
||||
const encCountersForRequest = await this.twopc.getEncryptedCounters();
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 9, 'total': 10});
|
||||
const encRequestBlocks = this.encryptRequest(this.request, encCountersForRequest);
|
||||
const gctrBlocks = await this.twopc.getGctrBlocks();
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 10, 'total': 10});
|
||||
const [ghashOutputs, ghashInputsBlob] = await this.twopc.getTagFromPowersOfH(encRequestBlocks);
|
||||
await this.tls.buildAndSendRequest(gctrBlocks, ghashOutputs, encRequestBlocks);
|
||||
const serverRecords = await this.tls.receiveServerResponse();
|
||||
await this.tls.sckt.close();
|
||||
|
||||
const commitHash = await TLSNotarySession.computeCommitHash(serverRecords);
|
||||
const [cwkShare, civShare, swkShare, sivShare] = this.twopc.getKeyShares();
|
||||
const keyShareHash = await sha256(concatTA(cwkShare, civShare, swkShare, sivShare));
|
||||
const pmsShareHash = await sha256(pmsShare);
|
||||
const data5 = await this.twopc.send('commitHash', concatTA(
|
||||
commitHash,
|
||||
keyShareHash,
|
||||
pmsShareHash));
|
||||
this.twopc.destroy();
|
||||
|
||||
let o = 0; // offset
|
||||
const signature = data5.slice(o, o+=64);
|
||||
const notaryPMSShare = data5.slice(o, o+=32);
|
||||
const notaryCwkShare = data5.slice(o, o+=16);
|
||||
const notaryCivShare = data5.slice(o, o+=4);
|
||||
const notarySwkShare = data5.slice(o, o+=16);
|
||||
const notarySivShare = data5.slice(o, o+=4);
|
||||
const timeBytes = data5.slice(o, o+=8);
|
||||
assert(data5.length == o);
|
||||
|
||||
const [eKey, eValidFrom, eValidUntil, eSigByMasterKey] = this.twopc.getEphemeralKey();
|
||||
|
||||
// convert certPath to DER.
|
||||
const certPath = this.tls.getCertPath();
|
||||
var certs = [];
|
||||
for (let cert of certPath){
|
||||
certs.push(new Uint8Array(cert.toSchema(true).toBER(false)));
|
||||
}
|
||||
|
||||
async start(){
|
||||
await this.probeTLS.buildAndSendClientHello();
|
||||
await this.probeTLS.receiveAndParseServerHello();
|
||||
await this.probeTLS.sckt.close();
|
||||
await this.twopc.preCompute();
|
||||
await this.tls.buildAndSendClientHello();
|
||||
const serverEcPubkey = await this.tls.receiveAndParseServerHello();
|
||||
const serverX = serverEcPubkey.slice(1,33);
|
||||
const serverY = serverEcPubkey.slice(33,65);
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 3, 'total': 10});
|
||||
const [pmsShare, cpubBytes] = await this.twopc.getECDHShare(serverX, serverY);
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 4, 'total': 10});
|
||||
return {
|
||||
'certificates': certs,
|
||||
'notarization time': timeBytes,
|
||||
'server RSA sig': this.tls.getRSAsignature(),
|
||||
'server pubkey for ECDHE': serverEcPubkey,
|
||||
'notary PMS share': notaryPMSShare,
|
||||
'client PMS share': pmsShare,
|
||||
'client random': cr,
|
||||
'server random': sr,
|
||||
'notary client_write_key share': notaryCwkShare,
|
||||
'notary client_write_iv share': notaryCivShare,
|
||||
'notary server_write_key share': notarySwkShare,
|
||||
'notary server_write_iv share': notarySivShare,
|
||||
'client client_write_key share': cwkShare,
|
||||
'client client_write_iv share': civShare,
|
||||
'client server_write_key share': swkShare,
|
||||
'client server_write_iv share': sivShare,
|
||||
'client request ciphertext': ghashInputsBlob,
|
||||
'server response records': serverRecords,
|
||||
'session signature': signature,
|
||||
'ephemeral pubkey': eKey,
|
||||
'ephemeral valid from': eValidFrom,
|
||||
'ephemeral valid until': eValidUntil,
|
||||
'ephemeral signed by master key': eSigByMasterKey,
|
||||
};
|
||||
|
||||
await this.tls.buildClientKeyExchange(cpubBytes);
|
||||
const [cr, sr] = await this.tls.getRandoms();
|
||||
const [encCF, tagCF, vdCF] = await this.twopc.run(cr, sr, this.tls.getAllHandshakes(), pmsShare);
|
||||
// Finished (0x14) with length 12 (0x0c)
|
||||
this.tls.updateAllHandshakes(concatTA(new Uint8Array([0x14, 0x00, 0x00, 0x0c]), vdCF));
|
||||
await this.tls.sendClientFinished(encCF, tagCF);
|
||||
const encSF = await this.tls.receiveServerFinished();
|
||||
await this.twopc.checkServerFinished(encSF, this.tls.getAllHandshakes());
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 5, 'total': 10});
|
||||
|
||||
const encCountersForRequest = await this.twopc.getEncryptedCounters();
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 9, 'total': 10});
|
||||
const encRequestBlocks = this.encryptRequest(this.request, encCountersForRequest);
|
||||
const gctrBlocks = await this.twopc.getGctrBlocks();
|
||||
if ( this.pm) this.pm.update('last_stage', {'current': 10, 'total': 10});
|
||||
const [ghashOutputs, ghashInputsBlob] = await this.twopc.getTagFromPowersOfH(encRequestBlocks);
|
||||
await this.tls.buildAndSendRequest(gctrBlocks, ghashOutputs, encRequestBlocks);
|
||||
const serverRecords = await this.tls.receiveServerResponse();
|
||||
await this.tls.sckt.close();
|
||||
|
||||
const commitHash = await TLSNotarySession.computeCommitHash(serverRecords);
|
||||
const [cwkShare, civShare, swkShare, sivShare] = this.twopc.getKeyShares();
|
||||
const keyShareHash = await sha256(concatTA(cwkShare, civShare, swkShare, sivShare));
|
||||
const pmsShareHash = await sha256(pmsShare);
|
||||
const data5 = await this.twopc.send('commitHash', concatTA(
|
||||
commitHash,
|
||||
keyShareHash,
|
||||
pmsShareHash));
|
||||
this.twopc.destroy();
|
||||
|
||||
let o = 0; // offset
|
||||
const signature = data5.slice(o, o+=64);
|
||||
const notaryPMSShare = data5.slice(o, o+=32);
|
||||
const notaryCwkShare = data5.slice(o, o+=16);
|
||||
const notaryCivShare = data5.slice(o, o+=4);
|
||||
const notarySwkShare = data5.slice(o, o+=16);
|
||||
const notarySivShare = data5.slice(o, o+=4);
|
||||
const timeBytes = data5.slice(o, o+=8);
|
||||
assert(data5.length == o);
|
||||
|
||||
const [eKey, eValidFrom, eValidUntil, eSigByMasterKey] = this.twopc.getEphemeralKey();
|
||||
|
||||
// convert certPath to DER.
|
||||
const certPath = this.tls.getCertPath();
|
||||
var certs = [];
|
||||
for (let cert of certPath){
|
||||
certs.push(new Uint8Array(cert.toSchema(true).toBER(false)));
|
||||
}
|
||||
|
||||
return {
|
||||
'certificates': certs,
|
||||
'notarization time': timeBytes,
|
||||
'server RSA sig': this.tls.getRSAsignature(),
|
||||
'server pubkey for ECDHE': serverEcPubkey,
|
||||
'notary PMS share': notaryPMSShare,
|
||||
'client PMS share': pmsShare,
|
||||
'client random': cr,
|
||||
'server random': sr,
|
||||
'notary client_write_key share': notaryCwkShare,
|
||||
'notary client_write_iv share': notaryCivShare,
|
||||
'notary server_write_key share': notarySwkShare,
|
||||
'notary server_write_iv share': notarySivShare,
|
||||
'client client_write_key share': cwkShare,
|
||||
'client client_write_iv share': civShare,
|
||||
'client server_write_key share': swkShare,
|
||||
'client server_write_iv share': sivShare,
|
||||
'client request ciphertext': ghashInputsBlob,
|
||||
'server response records': serverRecords,
|
||||
'session signature': signature,
|
||||
'ephemeral pubkey': eKey,
|
||||
'ephemeral valid from': eValidFrom,
|
||||
'ephemeral valid until': eValidUntil,
|
||||
'ephemeral signed by master key': eSigByMasterKey,
|
||||
};
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// encryptRequest encrypts (i.e. XORs) the plaintext request with encrypted counter blocks.
|
||||
// (This is how AES-GCM encryption works - first counter blocks are AES-ECB-encrypted,
|
||||
// then encrypted counter blocks are XORes with the plaintext to get the ciphertext.)
|
||||
encryptRequest(request, encCounterBlocks){
|
||||
assert(Math.ceil(request.length/16) === encCounterBlocks.length);
|
||||
const encRequestBlocks = [];
|
||||
for (let i=0; i < encCounterBlocks.length; i++){
|
||||
if (i == encCounterBlocks.length-1){
|
||||
// encryptRequest encrypts (i.e. XORs) the plaintext request with encrypted counter blocks.
|
||||
// (This is how AES-GCM encryption works - first counter blocks are AES-ECB-encrypted,
|
||||
// then encrypted counter blocks are XORes with the plaintext to get the ciphertext.)
|
||||
encryptRequest(request, encCounterBlocks){
|
||||
assert(Math.ceil(request.length/16) === encCounterBlocks.length);
|
||||
const encRequestBlocks = [];
|
||||
for (let i=0; i < encCounterBlocks.length; i++){
|
||||
if (i == encCounterBlocks.length-1){
|
||||
// last block, make sure arrays are of the same length before xoring
|
||||
let lastBlockLen = this.request.length - i*16;
|
||||
encRequestBlocks.push(xor(encCounterBlocks[i].slice(0,lastBlockLen), this.request.slice(i*16)));
|
||||
}
|
||||
else {
|
||||
encRequestBlocks.push(xor(encCounterBlocks[i], this.request.slice(i*16, i*16+16)));
|
||||
}
|
||||
let lastBlockLen = this.request.length - i*16;
|
||||
encRequestBlocks.push(xor(encCounterBlocks[i].slice(0, lastBlockLen), this.request.slice(i*16)));
|
||||
}
|
||||
else {
|
||||
encRequestBlocks.push(xor(encCounterBlocks[i], this.request.slice(i*16, i*16+16)));
|
||||
}
|
||||
return encRequestBlocks;
|
||||
}
|
||||
return encRequestBlocks;
|
||||
}
|
||||
|
||||
|
||||
// computeCommitHash computes a hash over all TLS records with MACs
|
||||
static async computeCommitHash(encRecords){
|
||||
return await sha256(concatTA(...encRecords));
|
||||
}
|
||||
// computeCommitHash computes a hash over all TLS records with MACs
|
||||
static async computeCommitHash(encRecords){
|
||||
return await sha256(concatTA(...encRecords));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ export const globals = {
|
||||
// defaultNotaryIP/Port is the default IP address/port of the notary server.
|
||||
// If this IP address becomes unavailable, Pagesigner will query backupUrl for
|
||||
// a new notary's IP address and will save the new IP address in the preferences.
|
||||
// defaultNotaryIP: '127.0.0.1',
|
||||
defaultNotaryIP: '3.236.244.77',
|
||||
defaultNotaryIP: '127.0.0.1',
|
||||
//defaultNotaryIP: '3.236.244.77',
|
||||
defaultNotaryPort: 10011,
|
||||
// backupUrl is the URL to query to get the IP address of another notary
|
||||
// server in case if defaultNotaryIP is unreachable
|
||||
@@ -19,12 +19,13 @@ export const globals = {
|
||||
// if useHTTP11 is set to true then we use HTTP/1.1 in the request, otherwise
|
||||
// we use HTTP/1.0. Using HTTP/1.0 is the only way to prevent a webserver from using
|
||||
// chunked transfer encoding. This may be useful e.g. when webserver response is used
|
||||
// inside zk proofs and we want simpler parsing without de-chunking.
|
||||
// inside zk proofs and we want simpler parsing without de-chunking.
|
||||
useHTTP11: true,
|
||||
// appId is Chrome-only: the id of Chrome app used to send raw TCP packets.
|
||||
appId: 'oclohfdjoojomkfddjclanpogcnjhemd',
|
||||
// if useNotaryNoSandbox is set to true, then we fetch notary's pubkey by
|
||||
// if useNotaryNoSandbox is set to true, then we fetch notary's pubkey by
|
||||
// querying /getPubKey and trust it. This is only useful when notary runs
|
||||
// in a non-sandbox environment.
|
||||
useNotaryNoSandbox: false
|
||||
//useNotaryNoSandbox: false
|
||||
useNotaryNoSandbox: true
|
||||
};
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-unused-vars */
|
||||
let db;
|
||||
let db_blobs;
|
||||
|
||||
@@ -6,7 +7,7 @@ export async function init_db() {
|
||||
|
||||
const dbReq = indexedDB.open('PageSigner', 1);
|
||||
dbReq.onupgradeneeded = function (event) {
|
||||
// Set the db variable to our database so we can use it!
|
||||
// Set the db variable to our database so we can use it!
|
||||
db = event.target.result;
|
||||
|
||||
if (!db.objectStoreNames.contains('sessions')) {
|
||||
@@ -36,7 +37,7 @@ export async function init_db() {
|
||||
// i/o operations a very slow when there's blobs in a db
|
||||
const dbReq2 = indexedDB.open('PageSigner_blobs', 1);
|
||||
dbReq2.onupgradeneeded = function (event) {
|
||||
// Set the db variable to our database so we can use it!
|
||||
// Set the db variable to our database so we can use it!
|
||||
db_blobs = event.target.result;
|
||||
if (!db_blobs.objectStoreNames.contains('sessions')) {
|
||||
db_blobs.createObjectStore('sessions', { keyPath: 'creationTime', autoIncrement: true });
|
||||
@@ -71,7 +72,7 @@ export async function addNewPreference(key, value){
|
||||
reject('error in cursor request ' + event.target.errorCode);
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
for (let pref of allPreferences){
|
||||
if (pref['name'] == key){
|
||||
return;
|
||||
@@ -83,7 +84,7 @@ export async function addNewPreference(key, value){
|
||||
const tx = db.transaction(['preferences'], 'readwrite');
|
||||
const store = tx.objectStore('preferences');
|
||||
store.add({name:key, value:value});
|
||||
tx.oncomplete = function() {
|
||||
tx.oncomplete = function() {
|
||||
resolve();
|
||||
};
|
||||
tx.onerror = function(event) {
|
||||
@@ -134,7 +135,7 @@ export async function getAllSessions() {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
export async function saveNewSession(date, host, request, response, pgsg, options){
|
||||
await new Promise(function(resolve, reject) {
|
||||
@@ -143,7 +144,7 @@ export async function saveNewSession(date, host, request, response, pgsg, option
|
||||
let isImported = false;
|
||||
let isEdited = false;
|
||||
if (options != undefined){
|
||||
if (options.indexOf('imported') > -1) isImported = true;
|
||||
if (options.indexOf('imported') > -1) isImported = true;
|
||||
if (options.indexOf('edited') > -1) isEdited = true;
|
||||
}
|
||||
// sessionName can be changed by the user in the manager window
|
||||
@@ -154,7 +155,7 @@ export async function saveNewSession(date, host, request, response, pgsg, option
|
||||
isImported: isImported,
|
||||
isEdited: isEdited,
|
||||
version: 6});
|
||||
tx.oncomplete = function() {
|
||||
tx.oncomplete = function() {
|
||||
resolve();
|
||||
};
|
||||
tx.onerror = function(event) {
|
||||
@@ -164,14 +165,14 @@ export async function saveNewSession(date, host, request, response, pgsg, option
|
||||
});
|
||||
await new Promise(function(resolve, reject) {
|
||||
const tx2 = db_blobs.transaction(['sessions'], 'readwrite');
|
||||
const store2 = tx2.objectStore('sessions');
|
||||
const store2 = tx2.objectStore('sessions');
|
||||
store2.add({
|
||||
creationTime: date,
|
||||
serverName:host,
|
||||
serverName:host,
|
||||
request:request,
|
||||
response:response,
|
||||
pgsg:pgsg});
|
||||
tx2.oncomplete = function() {
|
||||
pgsg:pgsg});
|
||||
tx2.oncomplete = function() {
|
||||
resolve();
|
||||
};
|
||||
tx2.onerror = function(event) {
|
||||
@@ -181,15 +182,15 @@ export async function saveNewSession(date, host, request, response, pgsg, option
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
export async function getSession(idx){
|
||||
return await new Promise(function(resolve, reject) {
|
||||
|
||||
const tx = db.transaction(['sessions'], 'readonly');
|
||||
const store = tx.objectStore('sessions');
|
||||
const store = tx.objectStore('sessions');
|
||||
const req = store.get(idx);
|
||||
req.onsuccess = function(event) {
|
||||
const entry = event.target.result;
|
||||
const entry = event.target.result;
|
||||
if (entry) {
|
||||
console.log(entry);
|
||||
resolve(entry);
|
||||
@@ -205,7 +206,7 @@ export async function getSession(idx){
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -215,10 +216,10 @@ export async function getSessionBlob(idx){
|
||||
return await new Promise(function(resolve, reject) {
|
||||
|
||||
const tx = db_blobs.transaction(['sessions'], 'readonly');
|
||||
const store = tx.objectStore('sessions');
|
||||
const store = tx.objectStore('sessions');
|
||||
const req = store.get(idx);
|
||||
req.onsuccess = function(event) {
|
||||
const entry = event.target.result;
|
||||
const entry = event.target.result;
|
||||
if (entry) {
|
||||
resolve(entry);
|
||||
} else {
|
||||
@@ -239,10 +240,10 @@ export async function getPref(pref){
|
||||
return await new Promise(function(resolve, reject) {
|
||||
|
||||
let tx = db.transaction(['preferences'], 'readonly');
|
||||
let store = tx.objectStore('preferences');
|
||||
let store = tx.objectStore('preferences');
|
||||
let req = store.get(pref);
|
||||
req.onsuccess = function(event) {
|
||||
let entry = event.target.result;
|
||||
let entry = event.target.result;
|
||||
if (entry) {
|
||||
console.log(entry);
|
||||
resolve(entry.value);
|
||||
@@ -261,18 +262,18 @@ export async function getPref(pref){
|
||||
|
||||
export async function setPref(pref, newvalue) {
|
||||
await new Promise(function(resolve, reject) {
|
||||
|
||||
|
||||
const tx = db.transaction(['preferences'], 'readwrite');
|
||||
const store = tx.objectStore('preferences');
|
||||
const request = store.get(pref);
|
||||
|
||||
|
||||
request.onsuccess = function(event) {
|
||||
// Get the old value that we want to update
|
||||
const data = event.target.result;
|
||||
|
||||
|
||||
// update the value(s) in the object that you want to change
|
||||
data.value = newvalue;
|
||||
|
||||
|
||||
// Put this updated object back into the database.
|
||||
const requestUpdate = store.put(data);
|
||||
requestUpdate.onerror = function(event) {
|
||||
@@ -296,14 +297,14 @@ export async function renameSession(id, newname) {
|
||||
const tx = db.transaction(['sessions'], 'readwrite');
|
||||
const sessions = tx.objectStore('sessions');
|
||||
const request = sessions.get(id);
|
||||
|
||||
|
||||
request.onsuccess = function(event) {
|
||||
// Get the old value that we want to update
|
||||
const data = event.target.result;
|
||||
|
||||
|
||||
// update the value(s) in the object that you want to change
|
||||
data.sessionName = newname;
|
||||
|
||||
|
||||
// Put this updated object back into the database.
|
||||
const requestUpdate = sessions.put(data);
|
||||
requestUpdate.onerror = function(event) {
|
||||
@@ -316,4 +317,4 @@ export async function renameSession(id, newname) {
|
||||
};
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
18
core/internal.js
Normal file
18
core/internal.js
Normal file
@@ -0,0 +1,18 @@
|
||||
// allows to access PageSigner's internal classes by exposing them to the
|
||||
// extension's window global
|
||||
|
||||
import * as utils from './utils.js';
|
||||
import * as indexeddb from './indexeddb.js';
|
||||
import * as globals from './globals.js';
|
||||
import * as Main from './Main.js';
|
||||
import {OTSender} from './twopc/OTSender.js';
|
||||
import {OTReceiver} from './twopc/OTReceiver.js';
|
||||
|
||||
|
||||
window.PageSigner = {
|
||||
Main:Main,
|
||||
globals:globals,
|
||||
utils:utils,
|
||||
indexeddb:indexeddb,
|
||||
OTSender:OTSender,
|
||||
OTReceiver:OTReceiver};
|
||||
@@ -1,4 +1,4 @@
|
||||
import {ba2str, b64decode, assert, ba2int, verifyAttestationDoc, ba2hex, eq,
|
||||
import {ba2str, b64decode, assert, ba2int, verifyAttestationDoc, ba2hex, eq,
|
||||
sha256} from './utils.js';
|
||||
|
||||
// rootsOfTrust contains an array of trusted EBS snapshots
|
||||
@@ -45,8 +45,8 @@ function checkDescribeInstances(xmlDoc, instanceId, imageId, volumeId) {
|
||||
assert(parent.getElementsByTagName('instanceId')[0].textContent === instanceId);
|
||||
assert(parent.getElementsByTagName('imageId')[0].textContent === imageId);
|
||||
assert(parent.getElementsByTagName('instanceState')[0].getElementsByTagName('name')[0].textContent === 'running');
|
||||
// other instance types may use non-nvme disks and thus would bypass the check that
|
||||
// only one nvme* disk is allowed
|
||||
// other instance types may use non-nvme disks and thus would bypass the check that
|
||||
// only one nvme* disk is allowed
|
||||
assert (parent.getElementsByTagName('instanceType')[0].textContent.startsWith('t3'));
|
||||
var launchTime = parent.getElementsByTagName('launchTime')[0].textContent;
|
||||
assert(parent.getElementsByTagName('rootDeviceType')[0].textContent === 'ebs');
|
||||
@@ -112,8 +112,8 @@ function checkGetConsoleOutput(xmlDoc, instanceId) {
|
||||
// a redundant check, because the patched ramdisk must halt the boot process if
|
||||
// it detects more than one disk device.
|
||||
const allowedSet = ['nvme', 'nvme0', 'nvme0n1', 'nvme0n1p1'];
|
||||
// match all substrings starting with nvme, folowed by a count of from 0 to 7 symbols from
|
||||
// the ranges 0-9 and a-z
|
||||
// match all substrings starting with nvme, folowed by a count of from 0 to 7 symbols from
|
||||
// the ranges 0-9 and a-z
|
||||
for (const match of [...logstr.matchAll(/nvme[0-9a-z]{0,7}/g)]){
|
||||
assert(match.length == 1);
|
||||
assert(allowedSet.includes(match[0]), 'disallowed nvme* string present in log');
|
||||
@@ -192,7 +192,7 @@ function checkGetUser(xmlDoc, ownerId) {
|
||||
|
||||
|
||||
function checkDescribeImages(xmlDoc, imageId, snapshotIds){
|
||||
try {
|
||||
try {
|
||||
assert(xmlDoc.getElementsByTagName('DescribeImagesResponse').length == 1);
|
||||
const images = xmlDoc.getElementsByTagName('imagesSet')[0].children;
|
||||
assert(images.length == 1);
|
||||
@@ -224,7 +224,7 @@ async function fetch_and_parse(obj){
|
||||
}
|
||||
|
||||
export async function getURLFetcherDoc(IP, port = 10011){
|
||||
// get URLFetcher document containing attestation for AWS HTTP API URLs needed to
|
||||
// get URLFetcher document containing attestation for AWS HTTP API URLs needed to
|
||||
// verify that the oracle was correctly set up.
|
||||
// https://github.com/tlsnotary/URLFetcher
|
||||
const resp = await fetch('http://' + IP + ':' + port + '/getURLFetcherDoc', {
|
||||
@@ -237,8 +237,8 @@ export async function getURLFetcherDoc(IP, port = 10011){
|
||||
|
||||
export async function verifyNotary(URLFetcherDoc) {
|
||||
// URLFetcherDoc is a concatenation of 4-byte transcript length | transcript | attestation doc
|
||||
const transcriptLen = ba2int(URLFetcherDoc.slice(0,4));
|
||||
const transcript = URLFetcherDoc.slice(4,4+transcriptLen);
|
||||
const transcriptLen = ba2int(URLFetcherDoc.slice(0, 4));
|
||||
const transcript = URLFetcherDoc.slice(4, 4+transcriptLen);
|
||||
const attestation = URLFetcherDoc.slice(4+transcriptLen);
|
||||
|
||||
// transcript is a JSON array for each request[ {"request":<URL>, "response":<text>} , {...}]
|
||||
@@ -266,7 +266,7 @@ export async function verifyNotary(URLFetcherDoc) {
|
||||
|
||||
|
||||
// check that the URLs are formatted in a canonical way
|
||||
// Note that AWS expects URL params to be sorted alphabetically. If we put them in
|
||||
// Note that AWS expects URL params to be sorted alphabetically. If we put them in
|
||||
// arbitrary order, the query will be rejected
|
||||
|
||||
// "AWSAccessKeyId" should be the same in all URLs to prove that the queries are made
|
||||
@@ -316,7 +316,7 @@ export async function verifyNotary(URLFetcherDoc) {
|
||||
|
||||
const xmlDocGCO = await fetch_and_parse(o.GCO);
|
||||
const pubkeyPEM = checkGetConsoleOutput(xmlDocGCO, instanceId);
|
||||
|
||||
|
||||
const xmlDocDIAud = await fetch_and_parse(o.DIAud);
|
||||
checkDescribeInstanceAttributeUserdata(xmlDocDIAud, instanceId);
|
||||
|
||||
@@ -338,12 +338,4 @@ export async function verifyNotary(URLFetcherDoc) {
|
||||
|
||||
console.log('oracle verification successfully finished');
|
||||
return pubkeyPEM;
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined'){ // we are in node.js environment
|
||||
module.exports={
|
||||
check_oracle: verify_oracle,
|
||||
oracle,
|
||||
};
|
||||
}
|
||||
2391
core/third-party/nacl-fast.js
vendored
2391
core/third-party/nacl-fast.js
vendored
File diff suppressed because it is too large
Load Diff
@@ -3,18 +3,6 @@ export class Evaluator{
|
||||
// s is the current session
|
||||
this.s = parent;
|
||||
}
|
||||
|
||||
getNonFixedLabels(encLabels, otKeys, nonFixedBits){
|
||||
const nonFixedLabels = [];
|
||||
for (let i = 0; i < nonFixedBits.length; i += 1) {
|
||||
const bit = nonFixedBits[i];
|
||||
const ct = encLabels.slice(i * 32, (i+1) * 32);
|
||||
const inputLabel = this.s.ot.decryptWithKey(ct, bit, otKeys[i]);
|
||||
nonFixedLabels.push(inputLabel);
|
||||
}
|
||||
return nonFixedLabels;
|
||||
}
|
||||
|
||||
|
||||
async evaluateBatch(batch, cNo){
|
||||
const outputs = await this.s.workers[cNo].evaluateBatch(batch);
|
||||
@@ -24,5 +12,4 @@ export class Evaluator{
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
/* global chrome */
|
||||
|
||||
import WorkerPool from './WorkerPool.js';
|
||||
|
||||
export class GCWorker extends WorkerPool{
|
||||
// class CGWorker provides convenience functions to speak to the web worker
|
||||
// class CGWorker provides convenience functions to speak to the web worker
|
||||
constructor(numWorkers, processMonitor){
|
||||
super(numWorkers, chrome.extension.getURL('core/twopc/webWorkers/gcworker.js'));
|
||||
// pm is an instance of ProcessMonitor
|
||||
|
||||
446
core/twopc/GHASH.js
Normal file
446
core/twopc/GHASH.js
Normal file
@@ -0,0 +1,446 @@
|
||||
import {sortKeys, assert, bytesToBits, concatTA, int2ba, xor, ba2int,
|
||||
splitIntoChunks} from '../utils.js';
|
||||
|
||||
|
||||
// class GHASH implements a method of computing the AES-GCM's GHASH function
|
||||
// in a secure two-party computation (2PC) setting using 1-of-2 Oblivious
|
||||
// Transfer (OT). The parties start with their secret shares of H (GHASH key) end at
|
||||
// the end each gets their share of the GHASH output.
|
||||
// The method is decribed here:
|
||||
// (https://tlsnotary.org/how_it_works#section4).
|
||||
|
||||
|
||||
export class GHASH {
|
||||
constructor(noOfAESBlocks){
|
||||
// otR is an instance of OTReceiver
|
||||
this.otR = null;
|
||||
// shares is a sparse array of shares of the powers of H. H is also
|
||||
// known as GHASH key - it is an all-zero bytestring AES-ECB-encrypted
|
||||
// with TLS session's client_write_key. Array's key is the power n,
|
||||
// Array's value is the value of the client's xor-share of H^n.
|
||||
this.shares = [];
|
||||
// maxPowerNeeded is the maximum power of H that we'll need in order to
|
||||
// compute the GHASH function. It is the number of AES blocks +
|
||||
// aad (1 block) + suffix (1 block)
|
||||
this.maxPowerNeeded = noOfAESBlocks + 2;
|
||||
// lastChoiceBits is the choice bits in client's most recent OT request.
|
||||
this.lastChoiceBits = null;
|
||||
// lastStrategy contains one of the two strategies which was used when
|
||||
// preparing the most recent OT request. The same strategy will be used
|
||||
// when processing an OT response.
|
||||
this.lastStrategy = null;
|
||||
// res contains an intermediate result during Block Aggregation. We
|
||||
// save it here before making an OT request and pick it up after
|
||||
// receiving a response.
|
||||
this.res = new Uint8Array(16).fill(0);
|
||||
|
||||
// strategy1&2 shows what existing shares we will be multiplying (values
|
||||
// 0 and 1) to obtain other odd shares (key).
|
||||
// Max sequential odd share that we can obtain on first round of
|
||||
// communication is 19 (we already have 1) shares of H^1, H^2, H^3 from
|
||||
// the Client Finished message and 2) squares of those 3 shares).
|
||||
// Note that "sequential" is a keyword here. We can't obtain 21 but we
|
||||
// indeed can obtain 25==24+1, 33==32+1 etc. However with 21 missing,
|
||||
// even if we have 25,33,etc, there will be a gap and we will not be able
|
||||
// to obtain all the needed shares by Block Aggregation.
|
||||
|
||||
// We request OT for each share in each pair of the strategy, i.e. for
|
||||
// shares: 4,1,4,3,8,1, etc. Even though it would be possible to introduce
|
||||
// optimizations in order to avoid requesting OT for the same share more
|
||||
// than once, that would only save us ~2000 OT instances at the cost of
|
||||
// complicating the code.
|
||||
|
||||
this.strategy1 = {
|
||||
5: [4, 1],
|
||||
7: [4, 3],
|
||||
9: [8, 1],
|
||||
11: [8, 3],
|
||||
13: [12, 1],
|
||||
15: [12, 3],
|
||||
17: [16, 1],
|
||||
19: [16, 3]};
|
||||
this.strategy2 = {
|
||||
21: [17, 4],
|
||||
23: [17, 6],
|
||||
25: [17, 8],
|
||||
27: [19, 8],
|
||||
29: [17, 12],
|
||||
31: [19, 12],
|
||||
33: [17, 16],
|
||||
35: [19, 16]};
|
||||
|
||||
// maxOddPowerNeeded is the maximum odd share that we need (it is a key
|
||||
// from one of the strategies)
|
||||
this.maxOddPowerNeeded = this.findMaxOddPower(this.maxPowerNeeded);
|
||||
// otCount is how many instances of OT the client (who is the OT receiver)
|
||||
// will have to execute. The OT count for Client_Finished is 256 and for
|
||||
// Server_Finished it is also 256.
|
||||
this.otCount = 256 + 256 + this.calculateOTCount();
|
||||
}
|
||||
|
||||
// sets the OTReceiver instance
|
||||
setOTReceiver(otR){
|
||||
this.otR = otR;
|
||||
}
|
||||
|
||||
// return true if we need another communication roundtrip with the notary
|
||||
isStep2Needed(){
|
||||
return this.maxOddPowerNeeded > 19;
|
||||
}
|
||||
|
||||
// findMaxOddPower finds the max odd share that we
|
||||
findMaxOddPower(maxPowerNeeded){
|
||||
assert(maxPowerNeeded <= 1026);
|
||||
|
||||
// maxHTable's <value> shows how many GHASH blocks can be processed
|
||||
// with Block Aggregation if we have all the sequential shares
|
||||
// starting with 1 up to and including <key>.
|
||||
// e.g. {5:29} means that if we have shares of H^1,H^2,H^3,H^4,H^5,
|
||||
// then we can process 29 GHASH blocks.
|
||||
// max TLS record size of 16KB requires 1026 GHASH blocks
|
||||
const maxHTable = {0: 0, 3: 19, 5: 29, 7: 71, 9: 89, 11: 107, 13: 125, 15: 271, 17: 305, 19: 339, 21: 373,
|
||||
23: 407, 25: 441, 27: 475, 29: 509, 31: 1023, 33: 1025, 35: 1027};
|
||||
|
||||
let maxOddPowerNeeded = null;
|
||||
for (const key of sortKeys(maxHTable)){
|
||||
const maxH = maxHTable[key];
|
||||
if (maxH >= maxPowerNeeded){
|
||||
maxOddPowerNeeded = key;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return maxOddPowerNeeded;
|
||||
}
|
||||
|
||||
// calculateOTCount calculates the amount of OTs the OT receiver
|
||||
// will have to execute when sending the request (Client/Server Finished
|
||||
// are not included)
|
||||
calculateOTCount(){
|
||||
// z is a dummy value which we set to indicate that we have the share
|
||||
const z = new Uint8Array(16).fill(0);
|
||||
// all shares of powers which the client has
|
||||
// add powers 1,2,3 which the client will already have
|
||||
const allShares = [undefined, z, z, z];
|
||||
// powerCount contains powers from strategies. Each power requires
|
||||
// 128 OTs.
|
||||
let powerCount = 0;
|
||||
const strategy = {...this.strategy1, ...this.strategy2};
|
||||
for (const k of sortKeys(strategy)){
|
||||
if (k > this.maxOddPowerNeeded){
|
||||
break;
|
||||
}
|
||||
allShares[k] = z;
|
||||
powerCount += 2;
|
||||
}
|
||||
this.freeSquare(allShares, this.maxPowerNeeded);
|
||||
|
||||
// how many auxillary shares needed for Block Aggregation. Each aux
|
||||
// share requires 128 OTs for the share itself and 128 OTs for
|
||||
// its aggregated value.
|
||||
const auxShares = [];
|
||||
for (let i=1; i <= this.maxPowerNeeded; i++){
|
||||
if (allShares[i] != undefined){
|
||||
continue; // the client already has this share
|
||||
}
|
||||
// a is the smaller power
|
||||
const [a, b] = this.findSum(allShares, i);
|
||||
if (! auxShares.includes(a)){
|
||||
auxShares.push(a);
|
||||
}
|
||||
}
|
||||
return powerCount * 128 + auxShares.length * 256;
|
||||
}
|
||||
|
||||
// prepareOTRequest prepares a request for Notary's masked XTables
|
||||
// corresponding to our shares in the strategies.
|
||||
async prepareOTRequest(strategyNo){
|
||||
assert(this.lastChoiceBits == null);
|
||||
this.freeSquare(this.shares, this.maxPowerNeeded);
|
||||
const strategy = strategyNo == 1 ? this.strategy1 : this.strategy2;
|
||||
const inputBits1Arr = [];
|
||||
const keys = Object.keys(strategy);
|
||||
for (const k of keys){
|
||||
if (k > this.maxOddPowerNeeded){
|
||||
break;
|
||||
}
|
||||
const v = strategy[k];
|
||||
inputBits1Arr.push(bytesToBits(this.shares[v[0]]).reverse());
|
||||
inputBits1Arr.push(bytesToBits(this.shares[v[1]]).reverse());
|
||||
}
|
||||
this.lastChoiceBits = [].concat(...inputBits1Arr);
|
||||
this.lastStrategy = strategy;
|
||||
return this.otR.createRequest(this.lastChoiceBits);
|
||||
}
|
||||
|
||||
processOTResponse(otResp){
|
||||
assert(this.lastChoiceBits != null);
|
||||
const hisXTables = this.otR.parseResponse(this.lastChoiceBits, otResp);
|
||||
this.getPowerShares(hisXTables, this.shares);
|
||||
this.freeSquare(this.shares, this.maxPowerNeeded);
|
||||
this.lastChoiceBits = null;
|
||||
this.lastStrategy = null;
|
||||
}
|
||||
|
||||
// buildStep1 starts the process of computing our share of the tag for the
|
||||
// client request. We already have shares of H^1, H^2, H^3 from the tag for
|
||||
// the Client_Finished earlier
|
||||
async buildStep1(){
|
||||
console.log('maxPowerNeeded is', this.maxPowerNeeded);
|
||||
assert(this.maxPowerNeeded <= 1026);
|
||||
if (this.maxOddPowerNeeded === 3){
|
||||
this.freeSquare(this.shares, this.maxPowerNeeded);
|
||||
return int2ba(this.maxPowerNeeded, 2);
|
||||
}
|
||||
return concatTA(int2ba(this.maxPowerNeeded, 2), await this.prepareOTRequest(1));
|
||||
}
|
||||
|
||||
// Step2 is needed when the amount of odd powers needed is > 19
|
||||
async buildStep2(){
|
||||
return await this.prepareOTRequest(2);
|
||||
}
|
||||
|
||||
// Step3 performs Block Aggregation for GHASH inputs
|
||||
async buildStep3(ghashInputs){
|
||||
this.res = this.multiplyShares(ghashInputs);
|
||||
return await this.blockAggregationBuildRequest(ghashInputs);
|
||||
}
|
||||
|
||||
// Add notary's masked XTables to our tag share
|
||||
processStep3Response(resp){
|
||||
assert(this.lastChoiceBits != null);
|
||||
let o = 0;
|
||||
const otResp = resp.slice(o, o += this.lastChoiceBits.length * 32);
|
||||
const hisTagShare = resp.slice(o, o += 16);
|
||||
assert(resp.length === o);
|
||||
const res = this.blockAggregationProcessResponse(otResp);
|
||||
return xor(res, hisTagShare);
|
||||
}
|
||||
|
||||
// buildFinRequest builds an OT request to compute the tag of either
|
||||
// Client Finished (CF) or Server Finished (SF) message
|
||||
// Note that you must use separate instances of the GHASH class: one for
|
||||
// CF and one for SF
|
||||
buildFinRequest(H1){
|
||||
assert(this.lastChoiceBits == null);
|
||||
const H2 = blockMult(H1, H1);
|
||||
const h1Bits = bytesToBits(H1).reverse();
|
||||
const h2Bits = bytesToBits(H2).reverse();
|
||||
this.shares[1] = H1;
|
||||
this.shares[2] = H2;
|
||||
this.lastChoiceBits = [].concat(h1Bits, h2Bits);
|
||||
return this.otR.createRequest(this.lastChoiceBits);
|
||||
}
|
||||
|
||||
// processFinResponse processes notary's OT response and multiplies
|
||||
// each share of H with the corresponding GHASH block
|
||||
processFinResponse(otResp, ghashInputs){
|
||||
assert(this.lastChoiceBits != null);
|
||||
const twoXTables = this.otR.parseResponse(this.lastChoiceBits, otResp);
|
||||
let H3 = new Uint8Array(16).fill(0);
|
||||
// we multiply our H1 share with his H2's masked XTable and
|
||||
// we multiply our H2 share with his H1's masked XTable
|
||||
const items = splitIntoChunks(twoXTables, 16);
|
||||
for (let i = 0; i < 256; i++) {
|
||||
H3 = xor(H3, items[i]);
|
||||
}
|
||||
H3 = xor(H3, blockMult(this.shares[1], this.shares[2]));
|
||||
this.shares[3] = H3;
|
||||
const s1 = blockMult(ghashInputs[0], this.shares[3]);
|
||||
const s2 = blockMult(ghashInputs[1], this.shares[2]);
|
||||
const s3 = blockMult(ghashInputs[2], this.shares[1]);
|
||||
this.lastChoiceBits = null;
|
||||
return xor(xor(s1, s2), s3);
|
||||
}
|
||||
|
||||
// findSum finds summands from "powers" which add up to "sumNeeded"
|
||||
// those powers which we don't have are undefined
|
||||
findSum(powers, sumNeeded){
|
||||
for (let i=1; i < powers.length; i++){
|
||||
if (powers[i] == undefined){
|
||||
continue;
|
||||
}
|
||||
for (let j=1; j < powers.length; j++){
|
||||
if (powers[j] == undefined){
|
||||
continue;
|
||||
}
|
||||
if (i+j === sumNeeded){
|
||||
return [i, j];
|
||||
}
|
||||
}
|
||||
}
|
||||
// this should never happen because we always call
|
||||
// findSum() knowing that the sum can be found
|
||||
throw('sum not found');
|
||||
}
|
||||
|
||||
// Perform squaring of each odd share up to maxPower. It is "free" because
|
||||
// it is done locally without OT.
|
||||
// "powers" is a sparse array where idx is the power (or undefined if not set) and
|
||||
// item at that idx is client's share of H^power. Modifies "powers" in-place,
|
||||
// e.g if "powers" contains 1,2,3 and maxPower==19, then upon return "powers"
|
||||
// will contain 1,2,3,4,6,8,12,16
|
||||
freeSquare(powers, maxPower){
|
||||
for (let i=0; i < powers.length; i++){
|
||||
if (powers[i] == undefined || i % 2 == 0){
|
||||
continue;
|
||||
}
|
||||
if (i > maxPower){
|
||||
return;
|
||||
}
|
||||
let power = i;
|
||||
while (power <= maxPower){
|
||||
power = power * 2;
|
||||
if (powers.includes(power)){
|
||||
continue;
|
||||
}
|
||||
powers[power] = blockMult(powers[power/2], powers[power/2]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// multiply H shares with corresponding ciphertext blocks
|
||||
// for all H shares which we do not have we wll use the Block Aggregation method
|
||||
multiplyShares(ghashInputs){
|
||||
let res = new Uint8Array(16).fill(0);
|
||||
// this.powersOfH is a sparse array, its .length is the max index
|
||||
for (let i=1; i < this.shares.length; i++){
|
||||
if (i > ghashInputs.length){
|
||||
// we will have more powers than the input blocks
|
||||
break;
|
||||
}
|
||||
if (this.shares[i] == undefined){
|
||||
continue;
|
||||
}
|
||||
const x = ghashInputs[ghashInputs.length-i];
|
||||
res = xor(res, blockMult(this.shares[i], x));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
// for those shares of powers which are not in powersOfH, we do not compute the shares of
|
||||
// powers, but instead we compute the share of (X_n*H), where X_n is a ciphertext block
|
||||
|
||||
// e.g. if we need H^21*X_1 and we have shares of H^19 and H^2, we compute it as follows:
|
||||
// (H19_a + H19_b)*(H2_a + H2_b)*X_1 == H19_a*H2_a*X_1 + H19_a*X_1*H2_b + H19_b*X_1*H2_a +
|
||||
// H19_b*H2_b*X_1
|
||||
// only the 2 middle cross-terms need to be computed using OT
|
||||
// A will receive OT for H19_a*X_1 from B
|
||||
// B will receive OT for H19_b*X_1 from A
|
||||
// All other powers where one of the factors is H^2 can be "collapsed" into (i.e xored with)
|
||||
// the above two cross-terms, eg if parties have shares of H^23 and need to compute
|
||||
// H^25 == H^23*H^2, then H23_a*X_2 can be collapsed into H19_a*X_1 and
|
||||
// H23_b*X_2 can be collapsed into H19_b*X_1
|
||||
// Thus we would not need any extra OT to compute shares of H^25
|
||||
|
||||
async blockAggregationBuildRequest(ghashInputs){
|
||||
assert(this.lastChoiceBits == null);
|
||||
// aggregated object's keys are shares and values are the aggregated
|
||||
// value of shares * GHASH block
|
||||
let aggregated = {};
|
||||
// this.powersOfH is a sparse array. It contains: the powers 1,2,3 + odd powers that we
|
||||
// computed earlier + squares of all those powers
|
||||
for (let i=4; i < this.shares.length; i++){
|
||||
if (i > ghashInputs.length){
|
||||
// we stop iterating shares of powers of H
|
||||
break;
|
||||
}
|
||||
if (this.shares[i] != undefined){
|
||||
// we already multiplied the block with this share in multiplyShares()
|
||||
continue;
|
||||
}
|
||||
// found a share which does not exist in our sparse array,
|
||||
// we need X*H for this missing power
|
||||
// a is the smaller power, b is the bigger power
|
||||
const [a, b] = this.findSum(this.shares, i);
|
||||
const x = ghashInputs[ghashInputs.length-i];
|
||||
this.res = xor(this.res, blockMult(blockMult(this.shares[a], this.shares[b]), x));
|
||||
if (aggregated[a] == undefined){
|
||||
aggregated[a] = new Uint8Array(16).fill(0);
|
||||
}
|
||||
aggregated[a] = xor(aggregated[a], blockMult(this.shares[b], x));
|
||||
}
|
||||
|
||||
// request Notary's masked X-table for each Notary's small power a.
|
||||
// We send bits of our aggregated values
|
||||
// and also
|
||||
// request Notary's X-table for each Notary's aggregated value. We send bits
|
||||
// of our small power
|
||||
const sortedKeys = sortKeys(aggregated);
|
||||
let allBits = [];
|
||||
for (const key of sortedKeys){
|
||||
// OT starts with the highest bit because ours is the y value of block multiplication
|
||||
allBits = [].concat(allBits, bytesToBits(aggregated[key]).reverse());
|
||||
allBits = [].concat(allBits, bytesToBits(this.shares[key]).reverse());
|
||||
}
|
||||
this.lastChoiceBits = allBits;
|
||||
// TODO rename requestMaskedOT to createRequest and
|
||||
// rename the rest to createResponse, parseResponse
|
||||
return this.otR.createRequest(allBits);
|
||||
}
|
||||
|
||||
// add NOtary's XTable to our share of the tag
|
||||
blockAggregationProcessResponse(otResp){
|
||||
assert(this.lastChoiceBits != null);
|
||||
const XTableEntries = this.otR.parseResponse(this.lastChoiceBits, otResp);
|
||||
const items = splitIntoChunks(XTableEntries, 16);
|
||||
for (let i = 0; i < items.length; i += 1) {
|
||||
this.res = xor(this.res, items[i]);
|
||||
}
|
||||
this.lastChoiceBits = null;
|
||||
return this.res;
|
||||
}
|
||||
|
||||
// Compute shares of powers listed in strategies
|
||||
// modifies PowersOfH in-place
|
||||
getPowerShares(hisXTables, powersOfH){
|
||||
const stratKeys = sortKeys(this.lastStrategy);
|
||||
// for each strategy we have 128 values for 1st factor and 128 values for 2nd factor
|
||||
const chunks = splitIntoChunks(hisXTables, 256*16);
|
||||
for (let j=0; j < stratKeys.length; j++){
|
||||
const oddPower = stratKeys[j];
|
||||
if (oddPower > this.maxOddPowerNeeded){
|
||||
assert(chunks.length === j);
|
||||
break;
|
||||
}
|
||||
let xorSum = new Uint8Array(16).fill(0); // start with 0 sum
|
||||
// TODO find a better name for subChunks
|
||||
const subChunks = splitIntoChunks(chunks[j], 16);
|
||||
for (let i=0; i < 256; i++){
|
||||
xorSum = xor(xorSum, subChunks[i]);
|
||||
}
|
||||
const Cx = powersOfH[this.lastStrategy[oddPower][0]];
|
||||
const Cy = powersOfH[this.lastStrategy[oddPower][1]];
|
||||
const CxCy = blockMult(Cx, Cy);
|
||||
powersOfH[oddPower] = xor(xorSum, CxCy);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// perform GCM Galois Field block multiplication
|
||||
// x_,y_ are Uint8Arrays
|
||||
export function blockMult(x_, y_){
|
||||
// casting to BigInt just in case if ba2int returns a Number
|
||||
let x = BigInt(ba2int(x_));
|
||||
const y = BigInt(ba2int(y_));
|
||||
let res = 0n;
|
||||
for (let i=127n; i >= 0n; i--){
|
||||
res ^= x * ((y >> i) & 1n);
|
||||
x = (x >> 1n) ^ ((x & 1n) * BigInt(0xE1000000000000000000000000000000));
|
||||
}
|
||||
return int2ba(res, 16);
|
||||
}
|
||||
|
||||
// return a table of 128 x values needed for block multiplication
|
||||
// x is Uint8Array
|
||||
// Not in use because the Client is the OT receiver, he supplies
|
||||
// the bits of y. The Notary supplies the XTable.
|
||||
export function getXTable(x_){
|
||||
let x = ba2int(x_);
|
||||
const table = [];
|
||||
for (let i=0; i < 128; i++){
|
||||
table[i] = int2ba(x, 16);
|
||||
x = (x >> 1n) ^ ((x & 1n) * BigInt(0xE1000000000000000000000000000000));
|
||||
}
|
||||
return table;
|
||||
}
|
||||
@@ -1,182 +1,97 @@
|
||||
import {concatTA, assert, ba2int, expandRange} from './../utils.js';
|
||||
import {concatTA, assert} from './../utils.js';
|
||||
|
||||
// class Garbler implements the role of the client as the garbler
|
||||
export class Garbler{
|
||||
// class Garbler implements the role of the client as the garbler
|
||||
s;
|
||||
garbledC;
|
||||
|
||||
constructor(parent){
|
||||
// this.s is the TWOPC class
|
||||
this.s = parent;
|
||||
this.s = parent;
|
||||
// this.garbledC will contain truth table, output labels and input labels for each
|
||||
// circuit after it is garbled
|
||||
this.garbledC = [];
|
||||
this.garbledC = [];
|
||||
}
|
||||
|
||||
async getAllB(allB){
|
||||
let fixedCount = 0;
|
||||
let nonFixedCount = 0;
|
||||
|
||||
for (let i = 1; i < this.s.cs.length; i++) {
|
||||
if (i !== 6){
|
||||
fixedCount += this.s.cs[i].notaryFixedInputSize;
|
||||
}
|
||||
else {
|
||||
fixedCount += 160 + this.s.C6Count * 128;
|
||||
}
|
||||
console.log('fixed count for ', i, ' is ', fixedCount);
|
||||
nonFixedCount += this.s.cs[i].notaryNonFixedInputSize;
|
||||
}
|
||||
|
||||
console.log('in getAllB fixedCount, nonFixedCount', fixedCount, nonFixedCount);
|
||||
const OT0PoolSize = Math.ceil(nonFixedCount/2 * 1.2);
|
||||
const OT1PoolSize = Math.ceil(nonFixedCount/2 * 1.2);
|
||||
|
||||
// 4000 OT for OT-ghash
|
||||
const expectedLen = (OT0PoolSize + OT1PoolSize + fixedCount + 6000)*32;
|
||||
assert(allB.length === expectedLen);
|
||||
|
||||
const fixedBlob = allB.slice(0, fixedCount*32);
|
||||
const nonFixedPoolBlob = allB.slice(fixedCount*32);
|
||||
|
||||
const encLabels = [];
|
||||
let fixedBlobIdx = 0;
|
||||
for (let j = 1; j < this.s.cs.length; j++) {
|
||||
const c = this.s.cs[j];
|
||||
let inputCount = c.notaryFixedInputSize;
|
||||
if (j === 6){
|
||||
inputCount = 160 + this.s.C6Count * 128;
|
||||
}
|
||||
assert(inputCount*32 === this.garbledC[j].il.notaryFixed.length);
|
||||
for (let i = 0; i < inputCount; i++) {
|
||||
const m0 = this.garbledC[j].il.notaryFixed.slice(i*32, i*32+16);
|
||||
const m1 = this.garbledC[j].il.notaryFixed.slice(i*32+16, i*32+32);
|
||||
const B = fixedBlob.slice(fixedBlobIdx*32, fixedBlobIdx*32+32);
|
||||
encLabels.push(this.s.ot.encrypt(m0, m1, B));
|
||||
fixedBlobIdx++;
|
||||
}
|
||||
}
|
||||
assert(fixedBlobIdx*32 === fixedBlob.length);
|
||||
|
||||
const arrOfB = [];
|
||||
for ( let i = 0; i < nonFixedPoolBlob.length/32; i++) {
|
||||
arrOfB[i] = nonFixedPoolBlob.slice(i*32, i*32+32);
|
||||
}
|
||||
await this.s.ot.prepareEncryptionKeys(arrOfB);
|
||||
return concatTA(...encLabels);
|
||||
}
|
||||
|
||||
async garbleAll(){
|
||||
// first garble circuit 5 once, so that future invocations can reuse labels
|
||||
const rv5 = await this.s.workers[5].garble();
|
||||
|
||||
// garble the rest of the circuits asyncronously
|
||||
// we don't parallelize garbling of non-c5 circuits, we garble them
|
||||
// one by one
|
||||
const allPromises = [];
|
||||
for (let cNo = 1; cNo < this.s.cs.length; cNo++){
|
||||
if (cNo === 5){
|
||||
allPromises.push(Promise.resolve('empty'));
|
||||
continue;
|
||||
continue;
|
||||
}
|
||||
const worker = this.s.workers[cNo];
|
||||
allPromises.push(worker.garble());
|
||||
}
|
||||
|
||||
// reuse labels of c5 and garble in batches
|
||||
const allTt = [new Uint8Array(rv5.tt)];
|
||||
const allOl = [new Uint8Array(rv5.ol)];
|
||||
const allIl = this.separateLabels(new Uint8Array(rv5.il), 5);
|
||||
|
||||
const outputs = await this.s.workers[5].garbleBatch(this.s.C5Count-1, {
|
||||
reuseLabels: concatTA(allIl.notaryFixed, allIl.clientNonFixed),
|
||||
reuseIndexes: expandRange(0, 320),
|
||||
reuseR: new Uint8Array(rv5.R)
|
||||
});
|
||||
|
||||
for (let i=0; i < this.s.C5Count-1; i++){
|
||||
const allTt = [];
|
||||
const allOl = [];
|
||||
const allIl = [];
|
||||
// while non-c5 circuits are being garbled, we saturate the CPU with
|
||||
// parallel garbling of c5
|
||||
const outputs = await this.s.workers[5].garbleBatch(this.s.C5Count, {});
|
||||
for (let i=0; i < this.s.C5Count; i++){
|
||||
const out = outputs[i];
|
||||
allTt.push(new Uint8Array(out.tt));
|
||||
allOl.push(new Uint8Array(out.ol));
|
||||
const labels = this.separateLabels(new Uint8Array(out.il), 5);
|
||||
allIl.clientFixed = concatTA(allIl.clientFixed, labels.clientFixed);
|
||||
allIl.push(new Uint8Array(out.il));
|
||||
}
|
||||
this.garbledC[5] = {
|
||||
tt: concatTA(...allTt),
|
||||
ol: concatTA(...allOl),
|
||||
il: allIl};
|
||||
|
||||
// all the other circuits have been garbled by now
|
||||
il: concatTA(...allIl)};
|
||||
|
||||
// all non-c5 circuits have been garbled by now
|
||||
const allRv = await Promise.all(allPromises);
|
||||
|
||||
for (let cNo=1; cNo < this.s.cs.length; cNo++){
|
||||
const rv = allRv[cNo-1];
|
||||
if (cNo === 5){
|
||||
// c5 already dealt with
|
||||
continue;
|
||||
continue; // c5 already dealt with
|
||||
}
|
||||
this.garbledC[cNo] = {
|
||||
tt: new Uint8Array(rv.tt),
|
||||
ol: new Uint8Array(rv.ol),
|
||||
il: this.separateLabels(new Uint8Array(rv.il), cNo)};
|
||||
il: new Uint8Array(rv.il)};
|
||||
}
|
||||
}
|
||||
|
||||
getNonFixedEncLabels(idxBlob, cNo){
|
||||
const nfis = this.s.cs[cNo].notaryNonFixedInputSize;
|
||||
assert(nfis*2 === idxBlob.length);
|
||||
|
||||
const encLabels = [];
|
||||
console.time('encryptWithKeyAtIndex');
|
||||
for (let i=0; i < nfis; i++){
|
||||
const idx = ba2int(idxBlob.subarray(i*2, i*2+2));
|
||||
const m0 = this.garbledC[cNo].il.notaryNonFixed.subarray(i*32, i*32+16);
|
||||
const m1 = this.garbledC[cNo].il.notaryNonFixed.subarray(i*32+16, i*32+32);
|
||||
const encr = this.s.ot.encryptWithKeyAtIndex(m0, m1, idx);
|
||||
encLabels.push(encr);
|
||||
// Notary's inputs are always the first inputs in the circuit
|
||||
getNotaryLabels(cNo){
|
||||
// exeCount is how many executions of this circuit we need
|
||||
const exeCount = [0, 1, 1, 1, 1, this.s.C5Count, this.s.C6Count][cNo];
|
||||
const il = this.garbledC[cNo].il;
|
||||
const c = this.s.cs[cNo];
|
||||
const ilArray = [];
|
||||
// chunkSize is the bytesize of input labels for one circuit
|
||||
const chunkSize = (c.notaryInputSize+c.clientInputSize)*32;
|
||||
assert(chunkSize*exeCount == il.length);
|
||||
for (let i=0; i < exeCount; i++){
|
||||
ilArray.push(il.slice(i*chunkSize, i*chunkSize+c.notaryInputSize*32));
|
||||
}
|
||||
console.timeEnd('encryptWithKeyAtIndex');
|
||||
console.log('encryptWithKeyAtIndex for count:', nfis);
|
||||
|
||||
return concatTA(...encLabels);
|
||||
return concatTA(...ilArray);
|
||||
}
|
||||
|
||||
getClientLabels(nonFixedBits, cNo){
|
||||
const fixedBits = this.s.cs[cNo].fixedInputs;
|
||||
const clientInputs = [].concat(nonFixedBits, fixedBits);
|
||||
const inputLabels = [];
|
||||
const clientLabelBlob = concatTA(
|
||||
this.garbledC[cNo].il.clientNonFixed,
|
||||
this.garbledC[cNo].il.clientFixed);
|
||||
// Client's inputs always come after the Notary's inputs in the circuit
|
||||
getClientLabels(clientInputs, cNo){
|
||||
const repeatCount = [0, 1, 1, 1, 1, this.s.C5Count, this.s.C6Count][cNo];
|
||||
const il = this.garbledC[cNo].il;
|
||||
const c = this.s.cs[cNo];
|
||||
const ilArray = [];
|
||||
// chunkSize is the bytesize of input labels for one circuit
|
||||
const chunkSize = (c.notaryInputSize+c.clientInputSize)*32;
|
||||
assert(chunkSize*repeatCount == il.length);
|
||||
for (let i=0; i < repeatCount; i++){
|
||||
ilArray.push(il.slice(i*chunkSize+c.notaryInputSize*32, (i+1)*chunkSize));
|
||||
}
|
||||
const clientLabelBlob = concatTA(...ilArray);
|
||||
assert(clientInputs.length*32 == clientLabelBlob.length);
|
||||
|
||||
const out = [];
|
||||
for (let i=0; i < clientInputs.length; i++){
|
||||
const bit = clientInputs[i];
|
||||
const label = clientLabelBlob.subarray(i*32+bit*16, i*32+bit*16+16);
|
||||
inputLabels.push(label);
|
||||
out.push(label);
|
||||
}
|
||||
return concatTA(...inputLabels);
|
||||
}
|
||||
|
||||
// separate one continuous blob of input labels into 4 blobs as in Labels struct
|
||||
separateLabels(blob, cNo) {
|
||||
const c = this.s.cs[cNo];
|
||||
if (blob.length != (c.notaryInputSize+c.clientInputSize)*32) {
|
||||
throw('in separateLabels');
|
||||
}
|
||||
const labels = {};
|
||||
let offset = 0;
|
||||
labels['notaryNonFixed'] = blob.slice(offset, offset+c.notaryNonFixedInputSize*32);
|
||||
offset += c.notaryNonFixedInputSize * 32;
|
||||
|
||||
labels['notaryFixed'] = blob.slice(offset, offset+c.notaryFixedInputSize*32);
|
||||
offset += c.notaryFixedInputSize * 32;
|
||||
|
||||
labels['clientNonFixed'] = blob.slice(offset, offset+c.clientNonFixedInputSize*32);
|
||||
offset += c.clientNonFixedInputSize * 32;
|
||||
|
||||
labels['clientFixed'] = blob.slice(offset, offset+c.clientFixedInputSize*32);
|
||||
offset += c.clientFixedInputSize * 32;
|
||||
assert(offset === blob.length);
|
||||
return labels;
|
||||
return concatTA(...out);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
201
core/twopc/OT.js
201
core/twopc/OT.js
@@ -1,201 +0,0 @@
|
||||
import {OTWorker} from './OTWorker.js';
|
||||
import {concatTA, int2ba, assert, encrypt_generic, decrypt_generic} from './../utils.js';
|
||||
|
||||
export class OT{
|
||||
// class OT implements oblivious transfer protocol based on
|
||||
// Chou-Orlandi "Simplest OT"
|
||||
// as much pre-computation as possiblle is done in the offline phase
|
||||
|
||||
constructor(){
|
||||
this.decryptionKeys = [];
|
||||
this.notaryA = null; // A value of notary the sender
|
||||
this.worker = new OTWorker(4);
|
||||
|
||||
// pools are used to pre-compute in the offline phase the decryption key
|
||||
// for a receiver's bit in the oblivious transfer
|
||||
this.poolOf0 = []; // item format {k_R:<value>, B:<value>, idx:<>}
|
||||
this.poolOf1 = []; //
|
||||
|
||||
// OT for the sender
|
||||
this.a = sodium.crypto_core_ristretto255_scalar_random();
|
||||
this.A = sodium.crypto_scalarmult_ristretto255_base(this.a);
|
||||
this.encryptionKeys = [];
|
||||
}
|
||||
|
||||
setA(A){
|
||||
this.notaryA = A;
|
||||
}
|
||||
|
||||
getSenderA(){
|
||||
return this.A;
|
||||
}
|
||||
|
||||
// for each bit in bits pre-compute B and k_R (per [1])
|
||||
saveDecryptionKeysOld(bits){
|
||||
const receiverBs = [];
|
||||
// we will reuse the same B to save time
|
||||
// during release REUSE IS NOT ALLOWED - IT BREAKS SECURITY
|
||||
const b0 = sodium.crypto_core_ristretto255_scalar_random();
|
||||
const B0 = sodium.crypto_scalarmult_ristretto255_base(b0);
|
||||
const k0 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b0, this.notaryA));
|
||||
|
||||
const b1 = sodium.crypto_core_ristretto255_scalar_random();
|
||||
const gb1 = sodium.crypto_scalarmult_ristretto255_base(b1);
|
||||
const B1 = sodium.crypto_core_ristretto255_add(this.notaryA, gb1);
|
||||
const k1 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b1, this.notaryA));
|
||||
|
||||
for (let i=0; i < bits.length; i++){
|
||||
const bit = bits[i];
|
||||
this.decryptionKeys.push(bit === 0 ? k0 : k1);
|
||||
receiverBs.push(bit === 0 ? B0 : B1);
|
||||
}
|
||||
console.log('saveDecryptionKeys for count:', bits.length);
|
||||
return receiverBs;
|
||||
}
|
||||
|
||||
async saveDecryptionKeys(bits){
|
||||
const receiverBs = [];
|
||||
const entries = await this.worker.saveDecryptionKeys(bits, this.notaryA);
|
||||
for (const e of entries){
|
||||
this.decryptionKeys.push(e[0]);
|
||||
receiverBs.push(e[1]);
|
||||
}
|
||||
return receiverBs;
|
||||
}
|
||||
|
||||
|
||||
// decrypts 1-of-2 ciphertexts for a bit "bit" with a key "key" at index "idx"
|
||||
// returns the plaintext
|
||||
decryptWithKeyFromIndex(ciphertext, bit, idx){
|
||||
assert(ciphertext.length == 32);
|
||||
assert(bit === 0 || bit === 1);
|
||||
assert(this.decryptionKeys[idx] != undefined);
|
||||
const encMessage = ciphertext.slice(16*bit, 16*bit+16);
|
||||
return decrypt_generic(encMessage, this.decryptionKeys[idx], 0);
|
||||
}
|
||||
|
||||
// decrypts 1-of-2 ciphertexts for a bit "bit" with a key from obj.k_R
|
||||
decryptWithKey(ciphertext, bit, key){
|
||||
assert(ciphertext.length === 32 && key.length === 16);
|
||||
assert(bit === 0 || bit === 1);
|
||||
const encMessage = ciphertext.slice(16*bit, 16*bit+16);
|
||||
return decrypt_generic(encMessage, key, 0);
|
||||
}
|
||||
|
||||
// we prepare B and k_R for 120% of the bits. The extra 20% is needed because we don't
|
||||
// know in advance exactly how many 1s and 0s we'll need during the online phase
|
||||
precomputePoolOld(numBits){
|
||||
// we will reuse the same B to save time
|
||||
// during release REUSE IS NOT ALLOWED - IT BREAKS SECURITY
|
||||
const b0 = sodium.crypto_core_ristretto255_scalar_random();
|
||||
const B0 = sodium.crypto_scalarmult_ristretto255_base(b0);
|
||||
const k0 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b0, this.notaryA));
|
||||
|
||||
const b1 = sodium.crypto_core_ristretto255_scalar_random();
|
||||
const gb1 = sodium.crypto_scalarmult_ristretto255_base(b1);
|
||||
const B1 = sodium.crypto_core_ristretto255_add(this.notaryA, gb1);
|
||||
const k1 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b1, this.notaryA));
|
||||
|
||||
console.time('precomputePool');
|
||||
for (let i = 0; i < Math.ceil(numBits/2 * 1.2) ; i++){
|
||||
this.poolOf0.push({k_R:k0, B:B0});
|
||||
}
|
||||
for (let i = 0; i < Math.ceil(numBits/2 * 1.2); i++){
|
||||
this.poolOf1.push({k_R:k1, B:B1});
|
||||
}
|
||||
console.timeEnd('precomputePool');
|
||||
console.log('total bits', numBits);
|
||||
}
|
||||
|
||||
// we prepare B and k_R for 120% of the bits. The extra 20% is needed because we don't
|
||||
// know in advance exactly how many 1s and 0s we'll need during the online phase
|
||||
async precomputePool(numBits){
|
||||
const count = Math.ceil(numBits/2 * 1.2); // we need "count" zeroes and "count" ones
|
||||
const entries = await this.worker.precomputePool(count, this.notaryA);
|
||||
assert(entries.length === count*2);
|
||||
for (const e of entries.slice(0, count)){
|
||||
this.poolOf0.push({k_R:e[0], B:e[1]});
|
||||
}
|
||||
for (const e of entries.slice(count, count*2)){
|
||||
this.poolOf1.push({k_R:e[0], B:e[1]});
|
||||
}
|
||||
}
|
||||
|
||||
// given an array of bits, return the index for each bit in the pool
|
||||
// and decryption keys for OT
|
||||
getIndexesFromPool(bits){
|
||||
const idxArray = [];
|
||||
const otKeys = [];
|
||||
for (let i=0; i < bits.length; i++){
|
||||
const otbit = this.getFromPool(bits[i]);
|
||||
idxArray.push(int2ba(otbit.idx, 2));
|
||||
otKeys.push(otbit.k_R);
|
||||
}
|
||||
return [concatTA(...idxArray), otKeys];
|
||||
}
|
||||
|
||||
|
||||
// return an array of B values from poolOf0 and poolOf1 in random sequence
|
||||
// and remember each B's index in that sequence.
|
||||
getRandomizedPool(){
|
||||
function getRandomInt(min, max) {
|
||||
min = Math.ceil(min);
|
||||
max = Math.floor(max);
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||
}
|
||||
|
||||
const randomizedB = [];
|
||||
const fullPool = [].concat(this.poolOf0, this.poolOf1);
|
||||
const origLen = fullPool.length;
|
||||
for (let i=0; i < origLen; i++){
|
||||
const randIdx = getRandomInt(0, fullPool.length-1);
|
||||
const ot = fullPool.splice(randIdx, 1)[0];
|
||||
// modifying ot will be reflected in this.poolOf0/this.poolOf1 because of pass-by-reference
|
||||
ot.idx = i;
|
||||
randomizedB.push(ot.B);
|
||||
}
|
||||
return randomizedB;
|
||||
}
|
||||
|
||||
|
||||
// gets either 0 or 1 from pool
|
||||
getFromPool(bit){
|
||||
assert(bit === 0 || bit === 1);
|
||||
const pool = (bit === 0) ? this.poolOf0 : this.poolOf1;
|
||||
const item = pool.pop();
|
||||
assert(this.poolOf0.length > 0 && this.poolOf0.length > 0);
|
||||
return item;
|
||||
}
|
||||
|
||||
// given the receiver's B, encrypt m0 and m1
|
||||
// we don't parallelize this function because the amount of encryptions is small
|
||||
encrypt(m0, m1, B){
|
||||
const k0 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(this.a, B));
|
||||
const sub = sodium.crypto_core_ristretto255_sub(B, this.A);
|
||||
const k1 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(this.a, sub));
|
||||
const e0 = encrypt_generic(m0, k0, 0);
|
||||
const e1 = encrypt_generic(m1, k1, 0);
|
||||
return concatTA(e0, e1);
|
||||
}
|
||||
|
||||
encryptWithKeyAtIndex(m0, m1, idx){
|
||||
const k0 = this.encryptionKeys[idx][0];
|
||||
const k1 = this.encryptionKeys[idx][1];
|
||||
const e0 = encrypt_generic(m0, k0, 0);
|
||||
const e1 = encrypt_generic(m1, k1, 0);
|
||||
return concatTA(e0, e1);
|
||||
}
|
||||
|
||||
// (client as the sender) for each B in arrOfB save an encryption keypair [k0,k1]
|
||||
async prepareEncryptionKeys(arrOfB){
|
||||
console.time('prepareEncryptionKeys');
|
||||
const entries = await this.worker.prepareEncryptionKeys(arrOfB, this.a, this.A);
|
||||
assert(entries.length === arrOfB.length);
|
||||
for (const e of entries){
|
||||
this.encryptionKeys.push([e[0], e[1]]);
|
||||
}
|
||||
console.timeEnd('prepareEncryptionKeys');
|
||||
console.log('prepareEncryptionKeys for count:', arrOfB.length);
|
||||
}
|
||||
}
|
||||
|
||||
169
core/twopc/OTCommon.js
Normal file
169
core/twopc/OTCommon.js
Normal file
@@ -0,0 +1,169 @@
|
||||
import {assert, getRandom, bytesToBits, concatTA, int2ba, xor, ba2int,
|
||||
bitsToBytes, splitIntoChunks, AESCTRencrypt, Salsa20} from '../utils.js';
|
||||
|
||||
// methods used by both OTSender and OTReceiver classes
|
||||
export default class OTCommon{
|
||||
// extend r (Uint8Array) into a matrix of 128 columns where depending on r's bit, each row
|
||||
// is either all 0s or all 1s
|
||||
extend_r(r){
|
||||
// 128 bits all set to 1
|
||||
const all_1 = new Uint8Array(16).fill(255);
|
||||
// 128 bits all set to 0
|
||||
const all_0 = new Uint8Array(16).fill(0);
|
||||
const matrix = [];
|
||||
const bits = bytesToBits(r).reverse();
|
||||
for (const bit of bits){
|
||||
matrix.push(bit == 0 ? all_0 : all_1);
|
||||
}
|
||||
return matrix;
|
||||
}
|
||||
|
||||
|
||||
// given a matrix, output 2 xor shares of it
|
||||
secretShareMatrix(matrix){
|
||||
const T0 = [];
|
||||
const T1 = [];
|
||||
for (let i=0; i < matrix.length; i++){
|
||||
const rand = getRandom(16);
|
||||
T0.push(rand);
|
||||
T1.push(xor(matrix[i], rand));
|
||||
}
|
||||
return [T0, T1];
|
||||
}
|
||||
|
||||
|
||||
// transpose a matrix of bits. matrix is an array of rows (each row is a Uin8Array)
|
||||
transposeMatrix(matrix){
|
||||
const colCount = matrix[0].length*8;
|
||||
const rowCount = matrix.length;
|
||||
assert(colCount == 128 || rowCount == 128);
|
||||
const newRows = [];
|
||||
for (let j=0; j < colCount; j++){
|
||||
// in which byte of the column is j located
|
||||
const byteNo = j >> 3; //Math.floor(j / 8);
|
||||
// what is the index of j inside the byte
|
||||
const bitIdx = j % 8;
|
||||
const newRowBits = [];
|
||||
for (let i=0; i < rowCount; i++){
|
||||
newRowBits.push((matrix[i][byteNo] >> (7-bitIdx)) & 1);
|
||||
}
|
||||
newRows.push(bitsToBytes(newRowBits.reverse()));
|
||||
}
|
||||
return newRows;
|
||||
}
|
||||
|
||||
|
||||
// pseudorandomly expands a 16-byte seed into a bytestring of bytesize "count"*16
|
||||
// to benefit from AES-NI, we use browser WebCrypto's AES-CTR: with seed as the key
|
||||
// we encrypt an all-zero bytestring.
|
||||
async expandSeed(seed, count){
|
||||
assert(seed.length == 16);
|
||||
return await AESCTRencrypt(seed, new Uint8Array(count*16).fill(0));
|
||||
}
|
||||
|
||||
|
||||
// encrypt each 16-byte chunk of msg with a fixed-key Salsa20
|
||||
async fixedKeyCipher(msg){
|
||||
assert(msg.length % 16 == 0);
|
||||
const fixedKey = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]);
|
||||
const encryptedArr = [];
|
||||
const chunks = splitIntoChunks(msg, 16);
|
||||
for (const chunk of chunks){
|
||||
encryptedArr.push(Salsa20(fixedKey, chunk));
|
||||
}
|
||||
return concatTA(...encryptedArr);
|
||||
}
|
||||
|
||||
|
||||
// to break the correlation, KOS15 needs a hash function which has tweakable correlation
|
||||
// robustness (tcr). GKWY20 shows (Section 7.4) how to achieve tcr using a fixed-key cipher C
|
||||
// instead of a hash, i.e. instead of Hash(x, i) we can do C(C(x) xor i) xor C(x)
|
||||
async breakCorrelation(rows){
|
||||
assert(rows[0].length == 16);
|
||||
const AESx = await this.fixedKeyCipher(concatTA(...rows));
|
||||
const indexesArray = [];
|
||||
for (let i=0; i < rows.length; i++){
|
||||
indexesArray.push(int2ba(i, 16));
|
||||
}
|
||||
const indexes = concatTA(...indexesArray);
|
||||
return xor(await this.fixedKeyCipher(xor(AESx, indexes)), AESx);
|
||||
}
|
||||
|
||||
|
||||
// carry-less multiplication (i.e. multiplication in galois field) without reduction.
|
||||
// Let a's right-most bit have index 0. Then for every bit set in a, b is left-shifted
|
||||
// by the set bit's index value. All the left-shifted values are then XORed.
|
||||
// a and b are both UintArray's of 16 bytes. Returns UintArray's of 32 bytes
|
||||
|
||||
// this version is 25% faster than the naive implementation clmul128_unoptimized
|
||||
clmul128(a, b){
|
||||
const aBits = bytesToBits(a); // faster if turned to bits rather than shift each time
|
||||
const b_bi = ba2int(b);
|
||||
const shiftedB = [];
|
||||
// shift only 7 times and then use these 7 shifts to construct all other shifts
|
||||
for (let i=0n; i < 8n; i++){
|
||||
const tmp = new Uint8Array(32).fill(0);
|
||||
tmp.set(int2ba(b_bi << i, 17), 0);
|
||||
shiftedB.push(tmp);
|
||||
}
|
||||
const res = new Uint8Array(32).fill(0);
|
||||
for (let i = 0; i < 128; i++){
|
||||
if (aBits[i]){ // a's bit is set
|
||||
const byteNo = i >> 3; // this is faster than Math.floor(i / 8);
|
||||
const bitIdx = i % 8;
|
||||
const bLen = 17+byteNo;
|
||||
for (let i=0; i < bLen; i++){
|
||||
res[31-i] = res[31-i] ^ shiftedB[bitIdx][bLen-1-i];
|
||||
}
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
// not in use, just for reference. This is the unoptimized version of clmul128
|
||||
clmul128_unoptimized(a, b){
|
||||
let res = 0n;
|
||||
const aBits = bytesToBits(a); // faster if turned to bits rather than shift each time
|
||||
const b_bi = ba2int(b);
|
||||
for (let i = 0n; i < 128n; i++){
|
||||
if (aBits[i]){ // a's bit is set
|
||||
res ^= (b_bi << i);
|
||||
}
|
||||
}
|
||||
return int2ba(res, 32);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// To test the protocol end-to-end, copy-paste this function into the extension's console
|
||||
async function testFullProtocol(){
|
||||
let otR = new PageSigner.OTReceiver(8);
|
||||
let otS = new PageSigner.OTSender(8);
|
||||
const [A, seedCommit] = await otR.setupStep1();
|
||||
const [allBs, senderSeedShare] = otS.setupStep1(A, seedCommit);
|
||||
const [encryptedColumns, receiverSeedShare, x, t] = await otR.setupStep2(allBs, senderSeedShare);
|
||||
await otS.setupStep2(encryptedColumns, receiverSeedShare, x, t);
|
||||
const requestBits = [0, 1, 1, 0];
|
||||
const otReq1 = otR.createRequest(requestBits);
|
||||
const senderMsg = new Uint8Array([].concat(
|
||||
Array(16).fill(0),
|
||||
Array(16).fill(1),
|
||||
Array(16).fill(2),
|
||||
Array(16).fill(3),
|
||||
Array(16).fill(4),
|
||||
Array(16).fill(5),
|
||||
Array(16).fill(6),
|
||||
Array(16).fill(7)));
|
||||
const otResp = otS.processRequest(otReq1, senderMsg);
|
||||
const decoded = otR.parseResponse(requestBits, otResp);
|
||||
console.log('the result is: ', decoded);
|
||||
const expected = new Uint8Array([].concat(
|
||||
Array(16).fill(0),
|
||||
Array(16).fill(3),
|
||||
Array(16).fill(5),
|
||||
Array(16).fill(6),
|
||||
));
|
||||
console.log('expected result is: ', expected);
|
||||
}
|
||||
153
core/twopc/OTReceiver.js
Normal file
153
core/twopc/OTReceiver.js
Normal file
@@ -0,0 +1,153 @@
|
||||
/* global sodium */
|
||||
|
||||
import {assert, getRandom, bytesToBits, concatTA, sha256, xor, int2ba,
|
||||
bitsToBytes, AESCTRencrypt} from '../utils.js';
|
||||
import OTCommon from './OTCommon.js';
|
||||
|
||||
// class OTReceiver implements the receiver of the 1-of-2 Oblivious Transfer.
|
||||
// run the full KOS15 protocol
|
||||
export class OTReceiver extends OTCommon{
|
||||
constructor(otCount){
|
||||
super(); //noop but JS requires it to be called
|
||||
this.otCount = otCount;
|
||||
this.extraOT = 256; // extended OT which will be sacrificed as part of KOS15 protocol
|
||||
this.totalOT = Math.ceil(otCount/8)*8 + this.extraOT;
|
||||
// seedShare is my xor share of a PRG seed
|
||||
this.seedShare = null;
|
||||
this.rbits = [];
|
||||
this.T0 = [];
|
||||
this.T1 = [];
|
||||
this.a = null;
|
||||
this.A = null;
|
||||
this.RT0 = [];
|
||||
// receivedSoFar counts how many bits of OT have been used up by the receiver
|
||||
this.receivedSoFar = 0;
|
||||
// expectingResponseSize is how many OTs the receiver expects to receive from the sender
|
||||
// at this point
|
||||
this.expectingResponseSize = 0;
|
||||
}
|
||||
|
||||
// run KOS15 to prepare Random OT. Step 1
|
||||
async setupStep1(){
|
||||
this.seedShare = getRandom(16);
|
||||
const seedCommit = await sha256(this.seedShare);
|
||||
const r = getRandom(this.totalOT/8);
|
||||
const R = this.extend_r(r);
|
||||
this.rbits = bytesToBits(r).reverse();
|
||||
[this.T0, this.T1] = this.secretShareMatrix(R);
|
||||
// for baseOT Bob is the sender, he chooses a and sends A
|
||||
this.a = sodium.crypto_core_ristretto255_scalar_random();
|
||||
this.A = sodium.crypto_scalarmult_ristretto255_base(this.a);
|
||||
return [this.A, seedCommit];
|
||||
}
|
||||
|
||||
// run KOS15 to prepare Random OT. Step 2
|
||||
async setupStep2(allBsBlob, senderSeedShare){
|
||||
// compute key_0 and key_1 for each B of the base OT
|
||||
assert(allBsBlob.length == 128*32);
|
||||
assert(senderSeedShare.length == 16);
|
||||
const encrKeys = [];
|
||||
for (let i=0; i < 128; i++){
|
||||
const B = allBsBlob.slice(i*32, (i+1)*32);
|
||||
const k0 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(this.a, B));
|
||||
const sub = sodium.crypto_core_ristretto255_sub(B, this.A);
|
||||
const k1 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(this.a, sub));
|
||||
encrKeys.push([k0, k1]);
|
||||
}
|
||||
// Use the i-th k0 to encrypt the i-th column in T0, likewise
|
||||
// use the i-th k1 to encrypt the i-th column in T1
|
||||
const T0columns = this.transposeMatrix(this.T0);
|
||||
const T1columns = this.transposeMatrix(this.T1);
|
||||
const encryptedColumns = [];
|
||||
for (let i=0; i < 128; i++){
|
||||
encryptedColumns.push(await AESCTRencrypt(encrKeys[i][0], T0columns[i]));
|
||||
encryptedColumns.push(await AESCTRencrypt(encrKeys[i][1], T1columns[i]));
|
||||
}
|
||||
|
||||
// KOS15 kicks in at this point to check if Receiver sent the correct columnsArray
|
||||
// combine seed shares and expand the seed
|
||||
const seed = xor(this.seedShare, senderSeedShare);
|
||||
const expandedSeed = await this.expandSeed(seed, this.totalOT);
|
||||
// Bob multiplies every 128-bit row of matrix T1 with the corresponding random
|
||||
// value in expandedSeed and XORs the products.
|
||||
// Bob multiplies every bit of r with the corresponding random
|
||||
// value in expandedSeed and XORs the products.
|
||||
// Bob sends seed,x,t to Alice
|
||||
let x = new Uint8Array(16).fill(0);
|
||||
let t = new Uint8Array(32).fill(0);
|
||||
for (let i=0; i < this.T0.length; i++){
|
||||
const rand = expandedSeed.subarray(i*16, (i+1)*16);
|
||||
if (this.rbits[i] == 1){
|
||||
x = xor(x, rand);
|
||||
}
|
||||
t = xor(t, this.clmul128(this.T0[i], rand));
|
||||
}
|
||||
|
||||
// we need to break correlations between Q0 and Q1
|
||||
// The last extraOTs were sacrificed as part of the KOS15 protocol
|
||||
// and so we don't need them anymore
|
||||
console.log('start breakCorrelation');
|
||||
this.RT0 = await this.breakCorrelation(this.T0.slice(0, -this.extraOT));
|
||||
console.log('end breakCorrelation');
|
||||
// also drop the unneeded bytes and bits of r
|
||||
this.rbits = this.rbits.slice(0, -this.extraOT);
|
||||
|
||||
|
||||
// now we have instances of Random OT where depending on r's bit,
|
||||
// each row in RT0 equals to a row either in RQ0 or RQ1
|
||||
console.log('done 2');
|
||||
// use Beaver Derandomization [Beaver91] to convert randomOT into standardOT
|
||||
return [concatTA(...encryptedColumns), this.seedShare, x, t];
|
||||
}
|
||||
|
||||
// Steps 5, 6 and 7 are repeated for every batch of OT
|
||||
|
||||
|
||||
// createRequest takes OT receiver's choice bits and instructs the
|
||||
// OT sender which random masks need to be flipped (Beaver Derandomization)
|
||||
createRequest(choiceBits){
|
||||
assert(this.receivedSoFar + choiceBits.length <= this.otCount, 'No more OTs left.');
|
||||
assert(this.expectingResponseSize == 0, 'The previous request must be processed before requesting more OTs.');
|
||||
// for Beaver Derandomization, tell the Sender which masks to flip: 0 means
|
||||
// no flip needed, 1 means a flip is needed
|
||||
const bitsToFlip = [];
|
||||
for (let i=0; i < choiceBits.length; i++){
|
||||
bitsToFlip.push(choiceBits[i] ^ this.rbits[this.receivedSoFar+i]);
|
||||
}
|
||||
// pad the bitcount to a multiple of 8
|
||||
let padCount = 0;
|
||||
if (choiceBits.length % 8 > 0){
|
||||
padCount = 8 - choiceBits.length % 8;
|
||||
}
|
||||
for (let i=0; i < padCount; i++){
|
||||
bitsToFlip.push(0);
|
||||
}
|
||||
|
||||
this.expectingResponseSize = choiceBits.length;
|
||||
// prefix with the amount of bits that Sender needs to drop
|
||||
// in cases when bitsArr.length is not a multiple of 8
|
||||
return concatTA(int2ba(padCount, 1), bitsToBytes(bitsToFlip));
|
||||
}
|
||||
|
||||
// parseResponse unmasks the OT sender's masked values based on the choice
|
||||
// bit and the random mask of the OT receiver
|
||||
parseResponse(choiceBits, maskedOT){
|
||||
assert(this.expectingResponseSize == choiceBits.length);
|
||||
assert(this.expectingResponseSize*32 == maskedOT.length);
|
||||
const decodedArr = [];
|
||||
for (let i=0; i < choiceBits.length; i++){
|
||||
const mask = this.RT0.slice((this.receivedSoFar+i)*16, (this.receivedSoFar+i)*16+16);
|
||||
const m0 = maskedOT.slice(i*32, i*32+16);
|
||||
const m1 = maskedOT.slice(i*32+16, i*32+32);
|
||||
if (choiceBits[i] == 0){
|
||||
decodedArr.push(xor(m0, mask));
|
||||
} else {
|
||||
decodedArr.push(xor(m1, mask));
|
||||
}
|
||||
}
|
||||
this.receivedSoFar += choiceBits.length;
|
||||
this.expectingResponseSize = 0;
|
||||
console.log('this.receivedSoFar', this.receivedSoFar);
|
||||
return concatTA(...decodedArr);
|
||||
}
|
||||
}
|
||||
136
core/twopc/OTSender.js
Normal file
136
core/twopc/OTSender.js
Normal file
@@ -0,0 +1,136 @@
|
||||
/* global sodium */
|
||||
|
||||
import {assert, getRandom, bytesToBits, concatTA, xor, eq, AESCTRdecrypt,
|
||||
sha256, ba2int} from '../utils.js';
|
||||
import OTCommon from './OTCommon.js';
|
||||
|
||||
|
||||
// class OTSender implements the sender of the Oblivious Transfer acc.to.
|
||||
// the KOS15 protocol
|
||||
export class OTSender extends OTCommon{
|
||||
constructor(otCount){
|
||||
super(); //noop but JS requires it to be called
|
||||
this.otCount = otCount;
|
||||
this.extraOT = 256; // extended OT which will be sacrificed as part of KOS15 protocol
|
||||
this.totalOT = Math.ceil(otCount/8)*8 + this.extraOT;
|
||||
this.S = null;
|
||||
this.sBits = [];
|
||||
this.decrKeys = [];
|
||||
this.RQ0 = [];
|
||||
this.RQ1 = [];
|
||||
// sentSoFar is how many OTs the sender has already sent
|
||||
this.sentSoFar = 0;
|
||||
// hisCommit is Receiver's commit to the PRG seed
|
||||
this.hisCommit = null;
|
||||
// seedShare is my xor share of a PRG seed
|
||||
this.seedShare = null;
|
||||
}
|
||||
|
||||
// part of KOS15
|
||||
setupStep1(A, hisCommit){
|
||||
this.hisCommit = hisCommit;
|
||||
this.seedShare = getRandom(16);
|
||||
// Alice computes her Bs and decryption keys based on each bit in S
|
||||
this.S = getRandom(16);
|
||||
this.sBits = bytesToBits(this.S).reverse();
|
||||
const allBs = [];
|
||||
this.decrKeys = [];
|
||||
for (const bit of this.sBits){
|
||||
const b = sodium.crypto_core_ristretto255_scalar_random();
|
||||
let B = sodium.crypto_scalarmult_ristretto255_base(b);
|
||||
if (bit == 1){
|
||||
B = sodium.crypto_core_ristretto255_add(A, B);
|
||||
}
|
||||
const k = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b, A));
|
||||
this.decrKeys.push(k);
|
||||
allBs.push(B);
|
||||
}
|
||||
return [concatTA(...allBs), this.seedShare];
|
||||
}
|
||||
|
||||
|
||||
// part of KOS15
|
||||
async setupStep2(encryptedColumnsBlob, receiverSeedShare, x, t){
|
||||
assert(receiverSeedShare.length == 16);
|
||||
assert(encryptedColumnsBlob.length % 256 == 0);
|
||||
const encryptedColumns = [];
|
||||
const columnSize = encryptedColumnsBlob.length/256;
|
||||
for (let i=0; i < 256; i++){
|
||||
encryptedColumns.push(encryptedColumnsBlob.slice(i*columnSize, (i+1)*columnSize));
|
||||
}
|
||||
// Decrypt only those columns which correspond to S's bit
|
||||
const columns = [];
|
||||
for (let i=0; i < 128; i++){
|
||||
const col0 = encryptedColumns[i*2];
|
||||
const col1 = encryptedColumns[i*2+1];
|
||||
if (this.sBits[i] == 0){
|
||||
columns.push(await AESCTRdecrypt(this.decrKeys[i], col0));
|
||||
} else {
|
||||
columns.push(await AESCTRdecrypt(this.decrKeys[i], col1));
|
||||
}
|
||||
}
|
||||
const Q0 = this.transposeMatrix(columns);
|
||||
|
||||
// KOS15: Alice multiplies every 128-bit row of matrix Q1 with the corresponding random
|
||||
// value in expandedSeed and XORs the products
|
||||
assert(eq(await sha256(receiverSeedShare), this.hisCommit), 'Bad seed commit');
|
||||
const seed = xor(receiverSeedShare, this.seedShare);
|
||||
const expandedSeed = await this.expandSeed(seed, this.totalOT);
|
||||
let q = new Uint8Array(32).fill(0);
|
||||
for (let i=0; i < Q0.length; i++){
|
||||
const rand = expandedSeed.subarray(i*16, (i+1)*16);
|
||||
q = xor(q, this.clmul128(Q0[i], rand));
|
||||
}
|
||||
// Alice checks that t = q xor x * S
|
||||
assert(eq(t, xor(q, this.clmul128(x, this.S))));
|
||||
|
||||
// Alice xors each row of Q0 with S to get Q1
|
||||
const Q1 = [];
|
||||
for (let i=0; i < Q0.length; i++){
|
||||
Q1.push(xor(Q0[i], this.S));
|
||||
}
|
||||
|
||||
// we need to break correlations between Q0 and Q1
|
||||
// The last extraOTs were sacrificed as part of the KOS15 protocol
|
||||
// and so we don't need them anymore
|
||||
console.log('start breakCorrelation');
|
||||
this.RQ0 = await this.breakCorrelation(Q0.slice(0, -this.extraOT));
|
||||
this.RQ1 = await this.breakCorrelation(Q1.slice(0, -this.extraOT));
|
||||
console.log('end breakCorrelation');
|
||||
// now we have instances of Random OT where depending on r's bit,
|
||||
// each row in RT0 equals to a row either in RQ0 or RQ1
|
||||
console.log('done 2');
|
||||
// in Steps 5,6,7 we will use Beaver Derandomization to convert
|
||||
// randomOT into standardOT
|
||||
}
|
||||
|
||||
|
||||
// processRequest performs Beaver Derandomization:
|
||||
// for every bit in bitsToFlip, the Sender has two 16-byte messages for 1-of-2 OT and
|
||||
// two random masks (from the KOS15 protocol) r0 and r1
|
||||
// if the bit is 0, the Sender sends (m0 xor r0) and (m1 xor r1),
|
||||
// if the bit is 1, the Sender sends (m0 xor r1) and (m1 xor r0)
|
||||
processRequest(bitsBlob, messages){
|
||||
const dropCount = ba2int(bitsBlob.slice(0, 1));
|
||||
const bitsToFlipWithRem = bytesToBits(bitsBlob.slice(1));
|
||||
const bitsToFlip = bitsToFlipWithRem.slice(0, bitsToFlipWithRem.length-dropCount);
|
||||
assert(this.sentSoFar + bitsToFlip.length <= this.otCount);
|
||||
assert(bitsToFlip.length*32 == messages.length);
|
||||
const encodedToSend = [];
|
||||
for (let i=0; i < bitsToFlip.length; i++){
|
||||
const m0 = messages.slice(i*32, i*32+16);
|
||||
const m1 = messages.slice(i*32+16, i*32+32);
|
||||
const r0 = this.RQ0.slice((this.sentSoFar+i)*16, (this.sentSoFar+i)*16+16);
|
||||
const r1 = this.RQ1.slice((this.sentSoFar+i)*16, (this.sentSoFar+i)*16+16);
|
||||
if (bitsToFlip[i] == 0){
|
||||
encodedToSend.push(xor(m0, r0));
|
||||
encodedToSend.push(xor(m1, r1));
|
||||
} else {
|
||||
encodedToSend.push(xor(m0, r1));
|
||||
encodedToSend.push(xor(m1, r0));
|
||||
}
|
||||
}
|
||||
this.sentSoFar += bitsToFlip.length;
|
||||
return concatTA(...encodedToSend);
|
||||
}
|
||||
}
|
||||
@@ -1,147 +0,0 @@
|
||||
// note that unlike with GCWorker class, the caller of OTWorker's methods doesn't have
|
||||
// to manage the worker pool. We manage them internally inside the class.
|
||||
import {bitsToBytes, concatTA, assert} from './../utils.js';
|
||||
|
||||
import WorkerPool from './WorkerPool.js';
|
||||
|
||||
export class OTWorker extends WorkerPool{
|
||||
constructor(numWorkers){
|
||||
super(numWorkers, chrome.extension.getURL('core/twopc/webWorkers/otworker.js'));
|
||||
}
|
||||
|
||||
async saveDecryptionKeys(bits, A){
|
||||
let padCount = 0;
|
||||
if (bits.length % 8 > 0){
|
||||
// make bits length a multiple of 8
|
||||
padCount = 8 - bits.length % 8;
|
||||
bits.push(...Array(padCount).fill(0));
|
||||
}
|
||||
const bytes = bitsToBytes(bits);
|
||||
// put chunks of 128 bytes into a batch
|
||||
const batch = [];
|
||||
const chunkSize = 128;
|
||||
for (let i=0; i < Math.ceil(bytes.length/chunkSize); i++){
|
||||
batch.push([bytes.slice(i*chunkSize, (i+1)*chunkSize), A]);
|
||||
}
|
||||
const outputs = await this.workerPool(batch, this.saveDecryptionKeysDoWork);
|
||||
const arrOutput = [];
|
||||
for (let i=0; i < batch.length; i++){
|
||||
arrOutput[i] = new Uint8Array(outputs[batch.length-1-i]);
|
||||
}
|
||||
const dataBlob = concatTA(...arrOutput);
|
||||
assert(dataBlob.length === bits.length*48);
|
||||
// deserialize data
|
||||
const entries = [];
|
||||
for (let i=0; i < bits.length; i++){
|
||||
const k = dataBlob.slice(i*48, i*48+16);
|
||||
const B = dataBlob.slice(i*48+16, i*48+48);
|
||||
entries.push([k,B]);
|
||||
}
|
||||
return entries.slice(0, entries.length-padCount);
|
||||
}
|
||||
|
||||
async precomputePool(count, A){
|
||||
// chunk up into 512 items and put chunks into a batch
|
||||
const batch = [];
|
||||
const chunkSize = 512;
|
||||
const chunkCount = Math.ceil(count/chunkSize);
|
||||
const lastChunkSize = count - (chunkSize * (chunkCount-1));
|
||||
for (let i=0; i < chunkCount; i++){
|
||||
if (i === chunkCount-1){
|
||||
batch.push([lastChunkSize, A]);
|
||||
}
|
||||
else{
|
||||
batch.push([chunkSize, A]);
|
||||
}
|
||||
}
|
||||
const outputs = await this.workerPool(batch, this.precomputePoolDoWork);
|
||||
const arrOutput = [];
|
||||
for (let i=0; i < batch.length; i++){
|
||||
arrOutput[i] = new Uint8Array(outputs[i]);
|
||||
}
|
||||
// deserialize data
|
||||
const entries0 = [];
|
||||
const entries1 = [];
|
||||
for (let i=0; i < arrOutput.length; i++){
|
||||
const size = (i < (chunkCount-1)) ? chunkSize : lastChunkSize;
|
||||
const batch = arrOutput[i];
|
||||
for (let j=0; j < size*2; j++){
|
||||
const k = batch.slice(j*48, j*48+16);
|
||||
const B = batch.slice(j*48+16, j*48+48);
|
||||
if (j < size){
|
||||
entries0.push([k,B]);
|
||||
}
|
||||
else {
|
||||
entries1.push([k,B]);
|
||||
}
|
||||
}
|
||||
}
|
||||
const allEntries = [].concat(entries0, entries1);
|
||||
assert(allEntries.length === count*2);
|
||||
return allEntries;
|
||||
}
|
||||
|
||||
async prepareEncryptionKeys(arrOfB, a, A){
|
||||
const batch = [];
|
||||
const chunkSize = 1024;
|
||||
const chunkCount = Math.ceil(arrOfB.length/chunkSize);
|
||||
for (let i=0; i < chunkCount; i++){
|
||||
batch.push([concatTA(...arrOfB.slice(i*chunkSize, (i+1)*chunkSize)), a, A]);
|
||||
}
|
||||
const outputs = await this.workerPool(batch, this.prepareEncryptionKeysDoWork);
|
||||
const arrOutput = [];
|
||||
for (let i=0; i < batch.length; i++){
|
||||
arrOutput[i] = new Uint8Array(outputs[i]);
|
||||
}
|
||||
const dataBlob = concatTA(...arrOutput);
|
||||
assert(dataBlob.length === arrOfB.length*32);
|
||||
// deserialize data
|
||||
const entries = [];
|
||||
for (let i=0; i < arrOfB.length; i++){
|
||||
const k0 = dataBlob.slice(i*32, i*32+16);
|
||||
const k1 = dataBlob.slice(i*32+16, i*32+32);
|
||||
entries.push([k0,k1]);
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
prepareEncryptionKeysDoWork(batchItem, worker){
|
||||
const bytes = batchItem[0];
|
||||
const a = batchItem[1];
|
||||
const A = batchItem[2];
|
||||
return new Promise(function(resolve) {
|
||||
worker.onmessage = function(event) {
|
||||
worker['isResolved'] = true;
|
||||
resolve(event.data.blob);
|
||||
};
|
||||
const obj = {msg:'prepareEncryptionKeys', data:{bytes:bytes.buffer, a:a.buffer, A:A.buffer}};
|
||||
worker.postMessage(obj);
|
||||
});
|
||||
}
|
||||
|
||||
precomputePoolDoWork(batchItem, worker){
|
||||
const count = batchItem[0];
|
||||
const A = batchItem[1];
|
||||
return new Promise(function(resolve) {
|
||||
worker.onmessage = function(event) {
|
||||
worker['isResolved'] = true;
|
||||
resolve(event.data.blob);
|
||||
};
|
||||
const obj = {msg:'precomputePool', data:{count:count, A:A.buffer}};
|
||||
worker.postMessage(obj);
|
||||
});
|
||||
}
|
||||
|
||||
saveDecryptionKeysDoWork(batchItem, worker){
|
||||
const bytes = batchItem[0];
|
||||
const A = batchItem[1];
|
||||
return new Promise(function(resolve) {
|
||||
worker.onmessage = function(event) {
|
||||
worker['isResolved'] = true;
|
||||
resolve(event.data.blob);
|
||||
};
|
||||
const obj = {msg:'saveDecryptionKeys', data:{bytes:bytes.buffer, A:A.buffer}};
|
||||
worker.postMessage(obj);
|
||||
});
|
||||
}
|
||||
}
|
||||
1133
core/twopc/TWOPC.js
1133
core/twopc/TWOPC.js
File diff suppressed because it is too large
Load Diff
@@ -1,26 +1,26 @@
|
||||
// gcworker.js is a WebWorker which performs garbling and evaluation
|
||||
// of garbled circuits
|
||||
// of garbled circuits.
|
||||
// This is a fixed-key-cipher garbling method from BHKR13 https://eprint.iacr.org/2013/426.pdf
|
||||
|
||||
// eslint-disable-next-line no-undef
|
||||
var parentPort_;
|
||||
let circuit = null;
|
||||
let truthTable = null;
|
||||
let timeEvaluating = 0;
|
||||
|
||||
// sha0 is used by randomOracle
|
||||
const sha0 = new Uint8Array( hex2ba('da5698be17b9b46962335799779fbeca8ce5d491c0d26243bafef9ea1837a9d8'));
|
||||
// byteArray is used by randomOracle
|
||||
const byteArray = new Uint8Array(24);
|
||||
// fixedKey is used by randomOracle(). We need a 32-byte key because we use Salsa20. The last 4
|
||||
// bytes will be filled with the index of the circuit's wire.
|
||||
const fixedKey = new Uint8Array([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,
|
||||
25,26,27,28,0,0,0,0]);
|
||||
// sigma is Salsa's constant "expand 32-byte k"
|
||||
const sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]);
|
||||
// randomPool will be filled with data from getRandom
|
||||
let randomPool;
|
||||
// randomPoolOffset will be moved after data was read from randomPool
|
||||
let randomPoolOffset = 0;
|
||||
let garbledAssigment;
|
||||
var crypto_;
|
||||
var nacl;
|
||||
|
||||
if (typeof(importScripts) !== 'undefined') {
|
||||
importScripts('./../../third-party/nacl-fast.js');
|
||||
crypto_ = self.crypto;
|
||||
self.onmessage = function(event) {
|
||||
processMessage(event.data);
|
||||
@@ -34,7 +34,6 @@ if (typeof(importScripts) !== 'undefined') {
|
||||
const filePath = 'file://' + process.argv[1];
|
||||
// this workaround allows to require() from ES6 modules, which is not allowed by default
|
||||
const require = module.createRequire(filePath)
|
||||
nacl = require('tweetnacl')
|
||||
const { parentPort } = require('worker_threads');
|
||||
parentPort_ = parentPort
|
||||
const { Crypto } = require("@peculiar/webcrypto");
|
||||
@@ -56,7 +55,7 @@ function processMessage(obj){
|
||||
// no need to respond to this message
|
||||
}
|
||||
else if (msg === 'setTruthTable'){
|
||||
assert(data.byteLength == circuit.andGateCount*64);
|
||||
assert(data.byteLength == circuit.andGateCount*48);
|
||||
truthTable = new Uint8Array(data);
|
||||
}
|
||||
else if (msg === 'garble'){
|
||||
@@ -70,7 +69,7 @@ function processMessage(obj){
|
||||
const reuseR = (data == undefined) ? undefined : data.reuseR;
|
||||
|
||||
const [truthTable, inputLabels, outputLabels, R] = garble(circuit, garbledAssigment, reuseLabels, reuseIndexes, reuseR);
|
||||
assert (truthTable.length === circuit.andGateCount*64);
|
||||
assert (truthTable.length === circuit.andGateCount*48);
|
||||
assert (inputLabels.length === circuit.clientInputSize*32 + circuit.notaryInputSize*32);
|
||||
assert (outputLabels.length === circuit.outputSize*32);
|
||||
const obj = {'tt': truthTable.buffer, 'il': inputLabels.buffer, 'ol': outputLabels.buffer, 'R': R};
|
||||
@@ -106,6 +105,8 @@ function postMsg(value, transferList){
|
||||
|
||||
function newR(){
|
||||
const R = getRandom(16);
|
||||
// set the last bit of R to 1 for point-and-permute
|
||||
// this guarantees that 2 labels of the same wire will have the opposite last bits
|
||||
R[15] = R[15] | 0x01;
|
||||
return R;
|
||||
}
|
||||
@@ -145,7 +146,8 @@ function garble(circuit, ga, reuseLabels = new Uint8Array(0) , reuseIndexes = []
|
||||
}
|
||||
}
|
||||
|
||||
const truthTable = new Uint8Array(circuit.andGateCount*64);
|
||||
const truthTable = new Uint8Array(circuit.andGateCount*48);
|
||||
|
||||
let andGateIdx = 0;
|
||||
// garble gates
|
||||
for (let i = 0; i < circuit.gatesCount; i++) {
|
||||
@@ -167,41 +169,64 @@ function garble(circuit, ga, reuseLabels = new Uint8Array(0) , reuseIndexes = []
|
||||
|
||||
}
|
||||
|
||||
|
||||
const garbleAnd = function (gateBlob, R, ga, tt, andGateIdx, id) {
|
||||
// get wire numbers
|
||||
const in1 = threeBytesToInt(gateBlob.subarray(1,4));
|
||||
const in2 = threeBytesToInt(gateBlob.subarray(4,7));
|
||||
const out = threeBytesToInt(gateBlob.subarray(7,10));
|
||||
|
||||
const randomLabel = getRandom(16);
|
||||
|
||||
gaSetIndexG(ga, out, 0, randomLabel);
|
||||
gaSetIndexG(ga, out, 1, xor(randomLabel, R, true));
|
||||
|
||||
// get labels of each wire
|
||||
const in1_0 = gaGetIndexG(ga, in1, 0);
|
||||
const in1_1 = gaGetIndexG(ga, in1, 1);
|
||||
const in2_0 = gaGetIndexG(ga, in2, 0);
|
||||
const in2_1 = gaGetIndexG(ga, in2, 1);
|
||||
const out_0 = gaGetIndexG(ga, out, 0);
|
||||
const out_1 = gaGetIndexG(ga, out, 1);
|
||||
|
||||
// rows is a truthtable if wire labels in a canonical order, the third
|
||||
// item shows an index of output label
|
||||
const rows = [
|
||||
[in1_0, in2_0, 0],
|
||||
[in1_0, in2_1, 0],
|
||||
[in1_1, in2_0, 0],
|
||||
[in1_1, in2_1, 1]
|
||||
]
|
||||
|
||||
const values = [
|
||||
encrypt(in1_0, in2_0, id, out_0),
|
||||
encrypt(in1_0, in2_1, id, out_0),
|
||||
encrypt(in1_1, in2_0, id, out_0),
|
||||
encrypt(in1_1, in2_1, id, out_1)
|
||||
];
|
||||
|
||||
const points = [
|
||||
2 * getPoint(in1_0) + getPoint(in2_0),
|
||||
2 * getPoint(in1_0) + getPoint(in2_1),
|
||||
2 * getPoint(in1_1) + getPoint(in2_0),
|
||||
2 * getPoint(in1_1) + getPoint(in2_1)
|
||||
];
|
||||
// GRR3: garbled row reduction
|
||||
// We want to reduce a row where both labels' points are set to 1.
|
||||
// We first need to encrypt those labels with a dummy all-zero output label. The
|
||||
// result X will be the actual value of the output label that we need to set.
|
||||
// After we set the output label to X and encrypt again, the result will be 0 (but
|
||||
// we don't actually need to encrypt it again, we just know that the result will be 0)
|
||||
|
||||
tt.set(values[0], andGateIdx*64+16*points[0]);
|
||||
tt.set(values[1], andGateIdx*64+16*points[1]);
|
||||
tt.set(values[2], andGateIdx*64+16*points[2]);
|
||||
tt.set(values[3], andGateIdx*64+16*points[3]);
|
||||
let outLabels
|
||||
// idxToReduce is the index of the row that will be reduced
|
||||
let idxToReduce = -1;
|
||||
for (let i=0; i < rows.length; i++){
|
||||
if (getPoint(rows[i][0]) == 1 && getPoint(rows[i][1]) == 1){
|
||||
const outWire = encrypt(rows[i][0], rows[i][1], id, new Uint8Array(16).fill(0));
|
||||
if (i==3){
|
||||
outLabels = [xor(outWire, R), outWire]
|
||||
} else {
|
||||
outLabels = [outWire, xor(outWire, R)]
|
||||
}
|
||||
idxToReduce = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
gaSetIndexG(ga, out, 0, outLabels[0]);
|
||||
gaSetIndexG(ga, out, 1, outLabels[1]);
|
||||
assert(idxToReduce != -1)
|
||||
|
||||
for (let i=0; i < rows.length; i++){
|
||||
if (i == idxToReduce){
|
||||
// not encrypting this row because we already know that its encryption is 0
|
||||
// and the sum of its points is 3
|
||||
continue;
|
||||
}
|
||||
const value = encrypt(rows[i][0], rows[i][1], id, outLabels[rows[i][2]]);
|
||||
const point = 2 * getPoint(rows[i][0]) + getPoint(rows[i][1])
|
||||
tt.set(value, andGateIdx*48+16*point);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -255,7 +280,6 @@ function evaluate (circuit, ga, tt, inputLabels) {
|
||||
}
|
||||
console.timeEnd('worker_evaluate');
|
||||
const t1 = performance.now();
|
||||
timeEvaluating += (t1 - t0);
|
||||
|
||||
return ga.slice((circuit.wiresCount-circuit.outputSize)*16, circuit.wiresCount*16);
|
||||
}
|
||||
@@ -268,13 +292,20 @@ const evaluateAnd = function (ga, tt, andGateIdx, gateBlob, id) {
|
||||
const label1 = gaGetIndexE(ga, in1); // ga[in1];
|
||||
const label2 = gaGetIndexE(ga, in2); // ga[in2];
|
||||
|
||||
let cipher
|
||||
const point = 2 * getPoint(label1) + getPoint(label2);
|
||||
const offset = andGateIdx*64+16*point;
|
||||
const cipher = tt.subarray(offset, offset+16);
|
||||
|
||||
if (point == 3){
|
||||
// GRR3: all rows with point sum of 3 have been reduced
|
||||
// their encryption is an all-zero bytestring
|
||||
cipher = new Uint8Array(16).fill(0);
|
||||
} else {
|
||||
const offset = andGateIdx*48+16*point;
|
||||
cipher = tt.subarray(offset, offset+16);
|
||||
}
|
||||
gaSetIndexE(ga, out, decrypt(label1, label2, id, cipher));
|
||||
};
|
||||
|
||||
|
||||
const evaluateXor = function (ga, gateBlob) {
|
||||
const in1 = threeBytesToInt(gateBlob.subarray(1,4));
|
||||
const in2 = threeBytesToInt(gateBlob.subarray(4,7));
|
||||
@@ -315,10 +346,7 @@ function gaSetIndexG(ga, idx, pos, value){
|
||||
|
||||
|
||||
function xor(a, b, reuse) {
|
||||
if (a.length !== b.length){
|
||||
console.log('a.length !== b.length');
|
||||
throw('a.length !== b.length');
|
||||
}
|
||||
assert(a.length == b.length, 'a.length !== b.length')
|
||||
let bytes;
|
||||
if (reuse === true){
|
||||
// in some cases the calling function will have no more use of "a"
|
||||
@@ -341,16 +369,18 @@ function getPoint(arr) {
|
||||
|
||||
const decrypt = encrypt;
|
||||
|
||||
let a2;
|
||||
let b4;
|
||||
// Based on the the A4 method from Fig.1 and the D4 method in Fig6 of the BHKR13 paper
|
||||
// (https://eprint.iacr.org/2013/426.pdf)
|
||||
// Note that the paper doesn't prescribe a specific method to break the symmetry between A and B,
|
||||
// so we choose a circular byte shift instead of a circular bitshift as in Fig6.
|
||||
function encrypt(a, b, t, m) {
|
||||
// double a
|
||||
a2 = a.slice();
|
||||
const a2 = a.slice();
|
||||
const leastbyte = a2[0];
|
||||
a2.copyWithin(0,1,15); // Logical left shift by 1 byte
|
||||
a2[14] = leastbyte; // Restore old least byte as new greatest (non-pointer) byte
|
||||
// quadruple b
|
||||
b4 = b.slice();
|
||||
const b4 = b.slice();
|
||||
const leastbytes = [b4[0], b4[1]];
|
||||
b4.copyWithin(0,2,15); // Logical left shift by 2 byte
|
||||
[b4[13], b4[14]] = leastbytes; // Restore old least two bytes as new greatest bytes
|
||||
@@ -361,41 +391,23 @@ function encrypt(a, b, t, m) {
|
||||
return xor(ro, mXorK, true);
|
||||
}
|
||||
|
||||
|
||||
function randomOracle(m, t) {
|
||||
return nacl.secretbox(
|
||||
m,
|
||||
longToByteArray(t),
|
||||
sha0,
|
||||
).subarray(0,16);
|
||||
// convert the integer t to a 4-byte big-endian array and append
|
||||
// it to fixedKey in-place
|
||||
for (let index = 0; index < 4; index++) {
|
||||
const byte = t & 0xff;
|
||||
fixedKey[31-index] = byte;
|
||||
t = (t - byte) / 256;
|
||||
}
|
||||
return Salsa20(fixedKey, m);
|
||||
}
|
||||
|
||||
function longToByteArray(long) {
|
||||
// we want to represent the input as a 24-bytes array
|
||||
for (let index = 0; index < byteArray.length; index++) {
|
||||
const byte = long & 0xff;
|
||||
byteArray[index] = byte;
|
||||
long = (long - byte) / 256;
|
||||
}
|
||||
return byteArray;
|
||||
}
|
||||
|
||||
function threeBytesToInt(b){
|
||||
return b[2] + b[1]*256 + b[0]*65536;
|
||||
}
|
||||
|
||||
// convert a hex string into byte array
|
||||
function hex2ba(str) {
|
||||
var ba = [];
|
||||
// pad with a leading 0 if necessary
|
||||
if (str.length % 2) {
|
||||
str = '0' + str;
|
||||
}
|
||||
for (var i = 0; i < str.length; i += 2) {
|
||||
ba.push(parseInt('0x' + str.substr(i, 2)));
|
||||
}
|
||||
return ba;
|
||||
}
|
||||
|
||||
|
||||
function getRandom(count) {
|
||||
const rand = randomPool.subarray(randomPoolOffset, randomPoolOffset+count);
|
||||
@@ -438,4 +450,148 @@ function concatTA (...arr){
|
||||
offset += item.length;
|
||||
}
|
||||
return newArray;
|
||||
}
|
||||
}
|
||||
|
||||
// use Salsa20 as a random permutator. Instead of the nonce, we feed the data that needs
|
||||
// to be permuted.
|
||||
function Salsa20(key, data){
|
||||
const out = new Uint8Array(16);
|
||||
core_salsa20(out, data, key, sigma)
|
||||
return out;
|
||||
}
|
||||
|
||||
// copied from https://github.com/dchest/tweetnacl-js/blob/master/nacl-fast.js
|
||||
// and modified to output only 16 bytes
|
||||
function core_salsa20(o, p, k, c) {
|
||||
var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24,
|
||||
j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24,
|
||||
j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24,
|
||||
j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24,
|
||||
j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24,
|
||||
j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24,
|
||||
j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24,
|
||||
j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24,
|
||||
j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24,
|
||||
j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24,
|
||||
j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24,
|
||||
j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24,
|
||||
j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24,
|
||||
j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24,
|
||||
j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24,
|
||||
j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24;
|
||||
|
||||
var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7,
|
||||
x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14,
|
||||
x15 = j15, u;
|
||||
|
||||
for (var i = 0; i < 20; i += 2) {
|
||||
u = x0 + x12 | 0;
|
||||
x4 ^= u<<7 | u>>>(32-7);
|
||||
u = x4 + x0 | 0;
|
||||
x8 ^= u<<9 | u>>>(32-9);
|
||||
u = x8 + x4 | 0;
|
||||
x12 ^= u<<13 | u>>>(32-13);
|
||||
u = x12 + x8 | 0;
|
||||
x0 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x5 + x1 | 0;
|
||||
x9 ^= u<<7 | u>>>(32-7);
|
||||
u = x9 + x5 | 0;
|
||||
x13 ^= u<<9 | u>>>(32-9);
|
||||
u = x13 + x9 | 0;
|
||||
x1 ^= u<<13 | u>>>(32-13);
|
||||
u = x1 + x13 | 0;
|
||||
x5 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x10 + x6 | 0;
|
||||
x14 ^= u<<7 | u>>>(32-7);
|
||||
u = x14 + x10 | 0;
|
||||
x2 ^= u<<9 | u>>>(32-9);
|
||||
u = x2 + x14 | 0;
|
||||
x6 ^= u<<13 | u>>>(32-13);
|
||||
u = x6 + x2 | 0;
|
||||
x10 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x15 + x11 | 0;
|
||||
x3 ^= u<<7 | u>>>(32-7);
|
||||
u = x3 + x15 | 0;
|
||||
x7 ^= u<<9 | u>>>(32-9);
|
||||
u = x7 + x3 | 0;
|
||||
x11 ^= u<<13 | u>>>(32-13);
|
||||
u = x11 + x7 | 0;
|
||||
x15 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x0 + x3 | 0;
|
||||
x1 ^= u<<7 | u>>>(32-7);
|
||||
u = x1 + x0 | 0;
|
||||
x2 ^= u<<9 | u>>>(32-9);
|
||||
u = x2 + x1 | 0;
|
||||
x3 ^= u<<13 | u>>>(32-13);
|
||||
u = x3 + x2 | 0;
|
||||
x0 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x5 + x4 | 0;
|
||||
x6 ^= u<<7 | u>>>(32-7);
|
||||
u = x6 + x5 | 0;
|
||||
x7 ^= u<<9 | u>>>(32-9);
|
||||
u = x7 + x6 | 0;
|
||||
x4 ^= u<<13 | u>>>(32-13);
|
||||
u = x4 + x7 | 0;
|
||||
x5 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x10 + x9 | 0;
|
||||
x11 ^= u<<7 | u>>>(32-7);
|
||||
u = x11 + x10 | 0;
|
||||
x8 ^= u<<9 | u>>>(32-9);
|
||||
u = x8 + x11 | 0;
|
||||
x9 ^= u<<13 | u>>>(32-13);
|
||||
u = x9 + x8 | 0;
|
||||
x10 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x15 + x14 | 0;
|
||||
x12 ^= u<<7 | u>>>(32-7);
|
||||
u = x12 + x15 | 0;
|
||||
x13 ^= u<<9 | u>>>(32-9);
|
||||
u = x13 + x12 | 0;
|
||||
x14 ^= u<<13 | u>>>(32-13);
|
||||
u = x14 + x13 | 0;
|
||||
x15 ^= u<<18 | u>>>(32-18);
|
||||
}
|
||||
x0 = x0 + j0 | 0;
|
||||
x1 = x1 + j1 | 0;
|
||||
x2 = x2 + j2 | 0;
|
||||
x3 = x3 + j3 | 0;
|
||||
x4 = x4 + j4 | 0;
|
||||
x5 = x5 + j5 | 0;
|
||||
x6 = x6 + j6 | 0;
|
||||
x7 = x7 + j7 | 0;
|
||||
x8 = x8 + j8 | 0;
|
||||
x9 = x9 + j9 | 0;
|
||||
x10 = x10 + j10 | 0;
|
||||
x11 = x11 + j11 | 0;
|
||||
x12 = x12 + j12 | 0;
|
||||
x13 = x13 + j13 | 0;
|
||||
x14 = x14 + j14 | 0;
|
||||
x15 = x15 + j15 | 0;
|
||||
|
||||
o[ 0] = x0 >>> 0 & 0xff;
|
||||
o[ 1] = x0 >>> 8 & 0xff;
|
||||
o[ 2] = x0 >>> 16 & 0xff;
|
||||
o[ 3] = x0 >>> 24 & 0xff;
|
||||
|
||||
o[ 4] = x1 >>> 0 & 0xff;
|
||||
o[ 5] = x1 >>> 8 & 0xff;
|
||||
o[ 6] = x1 >>> 16 & 0xff;
|
||||
o[ 7] = x1 >>> 24 & 0xff;
|
||||
|
||||
o[ 8] = x2 >>> 0 & 0xff;
|
||||
o[ 9] = x2 >>> 8 & 0xff;
|
||||
o[10] = x2 >>> 16 & 0xff;
|
||||
o[11] = x2 >>> 24 & 0xff;
|
||||
|
||||
o[12] = x3 >>> 0 & 0xff;
|
||||
o[13] = x3 >>> 8 & 0xff;
|
||||
o[14] = x3 >>> 16 & 0xff;
|
||||
o[15] = x3 >>> 24 & 0xff;
|
||||
// we only need 16 bytes of the output
|
||||
}
|
||||
|
||||
@@ -1,159 +0,0 @@
|
||||
// otworker.js is a WebWorker where most of the heavy computations for
|
||||
// Oblivious Transfer happen. Based on Chou-Orlandi "Simplest OT"
|
||||
|
||||
var sodium
|
||||
var parentPort_;
|
||||
|
||||
|
||||
if (typeof(importScripts) === 'undefined'){
|
||||
// we are in nodejs
|
||||
import('module').then((module) => {
|
||||
// we cannot use the "import" keyword here because on first pass the browser unconditionaly
|
||||
// parses this if clause and will error out if "import" is found
|
||||
// using process.argv instead of import.meta.url to get the name of this script
|
||||
const filePath = 'file://' + process.argv[1];
|
||||
// this workaround allows to require() from ES6 modules, which is not allowed by default
|
||||
const require = module.createRequire(filePath)
|
||||
const { parentPort } = require('worker_threads');
|
||||
parentPort_ = parentPort
|
||||
sodium = require('libsodium-wrappers-sumo');
|
||||
parentPort.on('message', msg => {
|
||||
processMessage(msg);
|
||||
})
|
||||
})
|
||||
} else {
|
||||
importScripts('./../../third-party/sodium.js');
|
||||
self.onmessage = function(event) {
|
||||
processMessage(event.data);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function processMessage(obj){
|
||||
const msg = obj.msg;
|
||||
const data = obj.data;
|
||||
if (msg === 'saveDecryptionKeys'){
|
||||
const bytes = new Uint8Array(data.bytes);
|
||||
const A = new Uint8Array(data.A);
|
||||
const rv = saveDecryptionKeys(bytes, A);
|
||||
postMsg({'blob': rv.buffer});
|
||||
|
||||
}
|
||||
else if (msg === 'precomputePool'){
|
||||
const count = data.count;
|
||||
const A = new Uint8Array(data.A);
|
||||
const rv = precomputePool(count, A);
|
||||
postMsg({'blob': rv.buffer});
|
||||
}
|
||||
else if (msg === 'prepareEncryptionKeys'){
|
||||
const bytes = new Uint8Array(data.bytes);
|
||||
const a = new Uint8Array(data.a, data.A);
|
||||
const A = new Uint8Array(data.A);
|
||||
const rv = prepareEncryptionKeys(bytes, a, A);
|
||||
postMsg({'blob': rv.buffer});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function postMsg(value, transferList){
|
||||
if (typeof importScripts !== 'function'){
|
||||
parentPort_.postMessage({data:value}, transferList)
|
||||
} else {
|
||||
postMessage(value, transferList);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function prepareEncryptionKeys(bytes, a, A){
|
||||
const blob = [];
|
||||
const Bcount = bytes.length/32;
|
||||
for (let i=0; i < Bcount; i++){
|
||||
const B = bytes.slice(i*32, (i+1)*32);
|
||||
const k0 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(a, B));
|
||||
const sub = sodium.crypto_core_ristretto255_sub(B, A);
|
||||
const k1 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(a, sub));
|
||||
blob.push(k0);
|
||||
blob.push(k1);
|
||||
}
|
||||
return concatTA(...blob);
|
||||
}
|
||||
|
||||
function saveDecryptionKeys(bytes, A){
|
||||
const bits = bytesToBits(bytes);
|
||||
const blob = [];
|
||||
for (const bit of bits){
|
||||
if (bit === 0){
|
||||
const b0 = sodium.crypto_core_ristretto255_scalar_random();
|
||||
const B0 = sodium.crypto_scalarmult_ristretto255_base(b0);
|
||||
const k0 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b0, A));
|
||||
blob.push(k0);
|
||||
blob.push(B0);
|
||||
}
|
||||
else {
|
||||
const b1 = sodium.crypto_core_ristretto255_scalar_random();
|
||||
const gb1 = sodium.crypto_scalarmult_ristretto255_base(b1);
|
||||
const B1 = sodium.crypto_core_ristretto255_add(A, gb1);
|
||||
const k1 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b1, A));
|
||||
blob.push(k1);
|
||||
blob.push(B1);
|
||||
}
|
||||
}
|
||||
return concatTA(...blob);
|
||||
}
|
||||
|
||||
function precomputePool(count, A){
|
||||
const blob = [];
|
||||
for (let i = 0; i < count ; i++){
|
||||
const b0 = sodium.crypto_core_ristretto255_scalar_random();
|
||||
const B0 = sodium.crypto_scalarmult_ristretto255_base(b0);
|
||||
const k0 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b0, A));
|
||||
blob.push(k0);
|
||||
blob.push(B0);
|
||||
}
|
||||
for (let i = 0; i < count; i++){
|
||||
const b1 = sodium.crypto_core_ristretto255_scalar_random();
|
||||
const gb1 = sodium.crypto_scalarmult_ristretto255_base(b1);
|
||||
const B1 = sodium.crypto_core_ristretto255_add(A, gb1);
|
||||
const k1 = sodium.crypto_generichash(16, sodium.crypto_scalarmult_ristretto255(b1, A));
|
||||
blob.push(k1);
|
||||
blob.push(B1);
|
||||
}
|
||||
return concatTA(...blob);
|
||||
}
|
||||
|
||||
// convert Uint8Array into an array of 0/1 where least bit has index 0
|
||||
function bytesToBits (ba){
|
||||
assert(ba instanceof Uint8Array);
|
||||
const bitArr = Array(ba.length*8);
|
||||
let idx = 0;
|
||||
for (let i=ba.length-1; i >= 0; i--){
|
||||
for (let j=0; j < 8; j++){
|
||||
bitArr[idx] = (ba[i] >> j) & 0x01;
|
||||
idx++;
|
||||
}
|
||||
}
|
||||
return bitArr;
|
||||
}
|
||||
|
||||
function assert(condition, message) {
|
||||
if (!condition) {
|
||||
console.trace();
|
||||
throw message || 'Assertion failed';
|
||||
}
|
||||
}
|
||||
|
||||
// concatenate an array of typed arrays (specifically Uint8Array)
|
||||
function concatTA (...arr){
|
||||
let newLen = 0;
|
||||
for (const item of arr){
|
||||
assert(item instanceof Uint8Array);
|
||||
newLen += item.length;
|
||||
}
|
||||
const newArray = new Uint8Array(newLen);
|
||||
let offset = 0;
|
||||
for (const item of arr){
|
||||
newArray.set(item, offset);
|
||||
offset += item.length;
|
||||
}
|
||||
return newArray;
|
||||
}
|
||||
@@ -7,14 +7,14 @@ if (typeof(importScripts) === 'undefined'){
|
||||
// parses this if clause and will error out if "import" is found
|
||||
// using process.argv instead of import.meta.url to get the name of this script
|
||||
const filePath = 'file://' + process.argv[1];
|
||||
// this workaround allows to require() from ES6 modules, which is not allowed by default
|
||||
const require = module.createRequire(filePath)
|
||||
// this workaround allows to require() from ES6 modules, which is not allowed by default
|
||||
const require = module.createRequire(filePath);
|
||||
const { parentPort } = require('worker_threads');
|
||||
parentPort.on('message', msg => {
|
||||
const text = msg.text;
|
||||
const [obj, blob] = serializeCircuit(text);
|
||||
parentPort.postMessage({data: {'obj': obj, blob: blob.buffer}});
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
else {
|
||||
@@ -37,7 +37,7 @@ function serializeCircuit(text){
|
||||
obj['notaryInputSize'] = Number(rows[1].split(' ')[1]);
|
||||
obj['clientInputSize'] = Number(rows[1].split(' ')[2]);
|
||||
obj['outputSize'] = Number(rows[2].split(' ')[1]);
|
||||
|
||||
|
||||
// each gate is serialized as
|
||||
// 1 byte: gate type XOR==0 AND==1 INV==2
|
||||
// 3 bytes: 1st input wire number
|
||||
@@ -75,7 +75,7 @@ function serializeCircuit(text){
|
||||
const out = intToThreeBytes(tokens[tokens.length-2]);
|
||||
blob.set(in1, blobOffset);
|
||||
blobOffset+=3;
|
||||
blob.set([0,0,0], blobOffset);
|
||||
blob.set([0, 0, 0], blobOffset);
|
||||
blobOffset+=3;
|
||||
blob.set(out, blobOffset);
|
||||
blobOffset+=3;
|
||||
|
||||
435
core/utils.js
435
core/utils.js
@@ -1,14 +1,18 @@
|
||||
/* eslint-disable no-unused-vars */
|
||||
/* eslint-disable no-undef */
|
||||
/* global chrome, CBOR, COSE, Buffer, fastsha256 */
|
||||
|
||||
import {verifyChain} from './verifychain.js';
|
||||
import * as asn1js from './third-party/pkijs/asn1.js';
|
||||
import Certificate from './third-party/pkijs/Certificate.js';
|
||||
|
||||
// returns an array of obj's keys converted to numbers sorted ascendingly
|
||||
export function sortKeys(obj){
|
||||
const numArray = Object.keys(obj).map(function(x){return Number(x);});
|
||||
const numArray = Object.keys(obj).map(function(x){return Number(x);});
|
||||
return numArray.sort(function(a, b){return a-b;});
|
||||
}
|
||||
|
||||
// convert a byte array into a hex string
|
||||
// convert a Uint8Array into a hex string
|
||||
export function ba2hex(ba) {
|
||||
assert(ba instanceof Uint8Array);
|
||||
let hexstring = '';
|
||||
@@ -22,7 +26,7 @@ export function ba2hex(ba) {
|
||||
return hexstring;
|
||||
}
|
||||
|
||||
// convert a hex string into byte array
|
||||
// convert a hex string into a Uint8Array
|
||||
export function hex2ba(str) {
|
||||
const ba = [];
|
||||
// pad with a leading 0 if necessary
|
||||
@@ -78,7 +82,7 @@ export function ba2int(ba){
|
||||
export function int2ba(int, size){
|
||||
assert(typeof(int) == 'bigint' || typeof(int) == 'number', 'Only can convert Number or BigInt');
|
||||
let hexstr = int.toString(16);
|
||||
if (hexstr.length % 2) {
|
||||
if (hexstr.length % 2) {
|
||||
hexstr = '0' + hexstr; }
|
||||
const ba = [];
|
||||
for (let i=0; i < hexstr.length/2; i++){
|
||||
@@ -116,9 +120,9 @@ export function ba2str(ba) {
|
||||
}
|
||||
|
||||
// xor 2 byte arrays of equal length
|
||||
export function xor (a,b){
|
||||
export function xor (a, b){
|
||||
assert(a instanceof Uint8Array && b instanceof Uint8Array);
|
||||
assert(a.length === b.length);
|
||||
assert(a.length == b.length);
|
||||
var c = new Uint8Array(a.length);
|
||||
for (var i=0; i< a.length; i++){
|
||||
c[i] = a[i]^b[i];
|
||||
@@ -173,12 +177,12 @@ export async function verifyAttestationDoc(doc){
|
||||
const doc_obj = CBOR.decode(payload.buffer);
|
||||
const leafCertDer = doc_obj.certificate.slice();
|
||||
const cert_asn1 = asn1js.fromBER(leafCertDer.buffer);
|
||||
const leafCert = new Certificate({ schema: cert_asn1.result });
|
||||
const leafCert = new Certificate({ schema: cert_asn1.result });
|
||||
const x = new Uint8Array(leafCert.subjectPublicKeyInfo.parsedKey.x);
|
||||
const y = new Uint8Array(leafCert.subjectPublicKeyInfo.parsedKey.y);
|
||||
// verify the signature
|
||||
COSE.verify(x, y, doc.buffer);
|
||||
|
||||
|
||||
// verify certificate chain
|
||||
|
||||
// this is a sha256 hash of root cert from https://aws-nitro-enclaves.amazonaws.com/AWS_NitroEnclaves_Root-G1.zip
|
||||
@@ -237,7 +241,7 @@ export function b64decode(str) {
|
||||
return new Uint8Array(dec);
|
||||
}
|
||||
|
||||
// conform to base64url format replace +/= with -_
|
||||
// conform to base64url format replace +/= with -_
|
||||
export function b64urlencode (ba){
|
||||
assert(ba instanceof Uint8Array);
|
||||
let str = b64encode(ba);
|
||||
@@ -250,7 +254,7 @@ export function buildChunkMetadata(plaintextArr){
|
||||
for (const pt of plaintextArr){
|
||||
http_data += ba2str(pt);
|
||||
}
|
||||
|
||||
|
||||
const chunkMetadata = [];
|
||||
// '''Dechunk only if http_data is chunked otherwise return http_data unmodified'''
|
||||
const http_header = http_data.slice(0, http_data.search('\r\n\r\n') + '\r\n\r\n'.length);
|
||||
@@ -307,7 +311,7 @@ export function dechunk_http(decrRecords) {
|
||||
|
||||
var chunkMetadata = buildChunkMetadata(decrRecords);
|
||||
var dechunkedPlaintexts = [];
|
||||
var totalOffset = -1; // an offset at which the last byte is found of plaintexts processed so far
|
||||
var totalOffset = -1; // an offset at which the last byte is found of plaintexts processed so far
|
||||
var shrinkNextRecordBy = 0; // used when chunking metadata spans 2 TLS records
|
||||
var shrinkThisRecordBy = 0;
|
||||
for (var i=0; i < decrRecords.length; i++){
|
||||
@@ -320,7 +324,7 @@ export function dechunk_http(decrRecords) {
|
||||
var s = true;
|
||||
}
|
||||
}
|
||||
var metadataInThisRecord = [];
|
||||
var metadataInThisRecord = [];
|
||||
var tmpArray = [...chunkMetadata];
|
||||
// every even index contains the start of metadata
|
||||
for (var j=0; j < tmpArray.length; j+=2){
|
||||
@@ -389,15 +393,15 @@ export function gunzip_http(dechunkedRecords) {
|
||||
return dechunkedRecords; // #nothing to gunzip
|
||||
}
|
||||
throw ('gzip enabled');
|
||||
var http_body = http_data.slice(http_header.length);
|
||||
var ungzipped = http_header;
|
||||
if (!http_body) {
|
||||
// HTTP 304 Not Modified has no body
|
||||
return [ungzipped];
|
||||
}
|
||||
var inflated = pako.inflate(http_body);
|
||||
ungzipped += ba2str(inflated);
|
||||
return [ungzipped];
|
||||
// var http_body = http_data.slice(http_header.length);
|
||||
// var ungzipped = http_header;
|
||||
// if (!http_body) {
|
||||
// // HTTP 304 Not Modified has no body
|
||||
// return [ungzipped];
|
||||
// }
|
||||
// var inflated = pako.inflate(http_body);
|
||||
// ungzipped += ba2str(inflated);
|
||||
// return [ungzipped];
|
||||
}
|
||||
|
||||
export function getTime() {
|
||||
@@ -406,7 +410,7 @@ export function getTime() {
|
||||
('00' + (today.getMonth() + 1)).slice(-2) + '-' +
|
||||
('00' + today.getDate()).slice(-2) + '-' +
|
||||
('00' + today.getHours()).slice(-2) + '-' +
|
||||
('00' + today.getMinutes()).slice(-2) + '-' +
|
||||
('00' + today.getMinutes()).slice(-2) + '-' +
|
||||
('00' + today.getSeconds()).slice(-2);
|
||||
return time;
|
||||
}
|
||||
@@ -424,11 +428,11 @@ export function pem2ba(pem) {
|
||||
encoded += line.trim();
|
||||
}
|
||||
}
|
||||
return b64decode(encoded);
|
||||
return b64decode(encoded);
|
||||
}
|
||||
|
||||
|
||||
// compare bytes in 2 arrays. a or b can be either Array or Uint8Array
|
||||
// compares two Uint8Arrays or Arrays
|
||||
export function eq(a, b) {
|
||||
assert(Array.isArray(a) || a instanceof Uint8Array);
|
||||
assert(Array.isArray(b) || b instanceof Uint8Array);
|
||||
@@ -436,7 +440,7 @@ export function eq(a, b) {
|
||||
a.every((val, index) => val === b[index]);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// expand the range [min:max) into array of ints 1,2,3,4... up to but not including max
|
||||
export function expandRange(min, max){
|
||||
const arr = [];
|
||||
@@ -459,31 +463,6 @@ export function splitIntoChunks(ba, chunkSize) {
|
||||
return newArray;
|
||||
}
|
||||
|
||||
// perform GCM Galois Field block multiplication
|
||||
// x,y are byte arrays
|
||||
export function blockMult(x_,y_){
|
||||
// casting to BigInt just in case if ba2int returns a Number
|
||||
let x = BigInt(ba2int(x_));
|
||||
const y = BigInt(ba2int(y_));
|
||||
let res = 0n;
|
||||
for (let i=127n; i >= 0n; i--){
|
||||
res ^= x * ((y >> i) & 1n);
|
||||
x = (x >> 1n) ^ ((x & 1n) * BigInt(0xE1000000000000000000000000000000));
|
||||
}
|
||||
return int2ba(res, 16);
|
||||
}
|
||||
|
||||
// x is Uint8Array
|
||||
export function getXTable(x_){
|
||||
let x = ba2int(x_);
|
||||
const table = [];
|
||||
for (let i=0; i < 128; i++){
|
||||
table[i] = int2ba(x, 16);
|
||||
x = (x >> 1n) ^ ((x & 1n) * BigInt(0xE1000000000000000000000000000000));
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
|
||||
// convert Uint8Array into an array of 0/1 where least bit has index 0
|
||||
export function bytesToBits (ba){
|
||||
@@ -517,29 +496,29 @@ export function bitsToBytes(arr){
|
||||
// convert OpenSSL's signature format (asn1 DER) into WebCrypto's IEEE P1363 format
|
||||
export function sigDER2p1363(sigDER){
|
||||
var o = 0;
|
||||
assert(eq(sigDER.slice(o,o+=1), [0x30]));
|
||||
var total_len = ba2int(sigDER.slice(o,o+=1));
|
||||
assert(eq(sigDER.slice(o, o+=1), [0x30]));
|
||||
var total_len = ba2int(sigDER.slice(o, o+=1));
|
||||
assert(sigDER.length == total_len+2);
|
||||
assert(eq(sigDER.slice(o,o+=1), [0x02]));
|
||||
var r_len = ba2int(sigDER.slice(o,o+=1));
|
||||
assert(eq(sigDER.slice(o, o+=1), [0x02]));
|
||||
var r_len = ba2int(sigDER.slice(o, o+=1));
|
||||
assert(r_len === 32 || r_len === 33);
|
||||
var r = sigDER.slice(o,o+=r_len);
|
||||
assert(eq(sigDER.slice(o,o+=1), [0x02]));
|
||||
var s_len = ba2int(sigDER.slice(o,o+=1));
|
||||
var r = sigDER.slice(o, o+=r_len);
|
||||
assert(eq(sigDER.slice(o, o+=1), [0x02]));
|
||||
var s_len = ba2int(sigDER.slice(o, o+=1));
|
||||
assert(s_len >= 31 && s_len <= 33);
|
||||
var s = sigDER.slice(o,o+=s_len);
|
||||
var s = sigDER.slice(o, o+=s_len);
|
||||
if (s.length === 31){
|
||||
s = concatTA(new Uint8Array([0x00]), s);
|
||||
}
|
||||
if (r_len === 33){
|
||||
assert(eq(r.slice(0,1), [0x00]));
|
||||
assert(eq(r.slice(0, 1), [0x00]));
|
||||
r = r.slice(1);
|
||||
}
|
||||
if (s_len == 33){
|
||||
assert(eq(s.slice(0,1), [0x00]));
|
||||
assert(eq(s.slice(0, 1), [0x00]));
|
||||
s = s.slice(1);
|
||||
}
|
||||
var sig_p1363 = concatTA(r,s);
|
||||
var sig_p1363 = concatTA(r, s);
|
||||
return sig_p1363;
|
||||
}
|
||||
|
||||
@@ -555,7 +534,7 @@ export async function import_resource(filename) {
|
||||
// take PEM EC pubkey and output a "raw" pubkey with all asn1 data stripped
|
||||
export function pubkeyPEM2raw(pkPEM){
|
||||
// prepended asn1 data for ECpubkey prime256v1
|
||||
const preasn1 = [0x30, 0x59, 0x30, 0x13, 0x06, 0x07, 0x2A, 0x86, 0x48, 0xCE, 0x3D, 0x02, 0x01, 0x06,0x08, 0x2A, 0x86, 0x48, 0xCE, 0x3D, 0x03, 0x01, 0x07, 0x03, 0x42, 0x00];
|
||||
const preasn1 = [0x30, 0x59, 0x30, 0x13, 0x06, 0x07, 0x2A, 0x86, 0x48, 0xCE, 0x3D, 0x02, 0x01, 0x06, 0x08, 0x2A, 0x86, 0x48, 0xCE, 0x3D, 0x03, 0x01, 0x07, 0x03, 0x42, 0x00];
|
||||
const pk = pem2ba(pkPEM);
|
||||
assert(eq(pk.slice(0, preasn1.length), preasn1));
|
||||
return pk.slice(preasn1.length);
|
||||
@@ -610,29 +589,21 @@ export function encrypt_generic(plaintext, key, nonce) {
|
||||
return xor(tmp, ro);
|
||||
}
|
||||
|
||||
const byteArray = new Uint8Array(24);
|
||||
const sha0 = hex2ba('da5698be17b9b46962335799779fbeca8ce5d491c0d26243bafef9ea1837a9d8');
|
||||
|
||||
// class PRF is initialized once and then it is a read-only
|
||||
function randomOracle(m, t) {
|
||||
// fixedKey is used by randomOracle(). We need a 32-byte key because we use Salsa20. The last 4
|
||||
// bytes will be filled with the index of the circuit's wire.
|
||||
const fixedKey = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
|
||||
25, 26, 27, 28, 0, 0, 0, 0]);
|
||||
|
||||
export function longToByteArray(long) {
|
||||
// we want to represent the input as a 24-bytes array
|
||||
for (let index = 0; index < byteArray.length; index++) {
|
||||
const byte = long & 0xff;
|
||||
byteArray[index] = byte;
|
||||
long = (long - byte) / 256;
|
||||
// convert the integer t to a 4-byte big-endian array and append
|
||||
// it to fixedKey in-place
|
||||
for (let index = 0; index < 4; index++) {
|
||||
const byte = t & 0xff;
|
||||
fixedKey[31-index] = byte;
|
||||
t = (t - byte) / 256;
|
||||
}
|
||||
return byteArray;
|
||||
}
|
||||
|
||||
|
||||
export function randomOracle(m, t) {
|
||||
const nonce = longToByteArray(t);
|
||||
return nacl.secretbox(
|
||||
m,
|
||||
nonce, // Nonce 24 bytes because this sodium uses 192 bit blocks.
|
||||
sha0,
|
||||
).slice(0,16);
|
||||
return Salsa20(fixedKey, m);
|
||||
}
|
||||
|
||||
export const decrypt_generic = encrypt_generic;
|
||||
@@ -671,17 +642,17 @@ export function concatTA(...arr){
|
||||
|
||||
async function gcmEncrypt(key, plaintext, IV, aad){
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
'raw',
|
||||
key.buffer,
|
||||
'AES-GCM',
|
||||
true,
|
||||
'raw',
|
||||
key.buffer,
|
||||
'AES-GCM',
|
||||
true,
|
||||
['encrypt', 'decrypt']);
|
||||
|
||||
const ciphertext = await crypto.subtle.encrypt({
|
||||
name: 'AES-GCM',
|
||||
name: 'AES-GCM',
|
||||
iv: IV.buffer,
|
||||
additionalData: aad.buffer},
|
||||
cryptoKey,
|
||||
additionalData: aad.buffer},
|
||||
cryptoKey,
|
||||
plaintext.buffer,
|
||||
);
|
||||
|
||||
@@ -709,7 +680,7 @@ async function gcmEncrypt(key, plaintext, IV, aad){
|
||||
const H2a = times_auth_key(H1a, H1a);
|
||||
const H2b = times_auth_key(H1b, H1b);
|
||||
|
||||
const X1 = ba2int(aad);
|
||||
const X1 = ba2int(aad);
|
||||
const X2 = ba2int(ct);
|
||||
const X3 = ba2int(lenAlenC);
|
||||
|
||||
@@ -724,25 +695,212 @@ async function gcmEncrypt(key, plaintext, IV, aad){
|
||||
}
|
||||
|
||||
// WebCrypto doesn't provide AES-ECB encryption. We achieve it by using
|
||||
// the CTR mode and setting CTR's counter to what we want to AES-encrypt and setting CTR's
|
||||
// data to encrypt to zero, because in CTR ciphertext = AES(counter) XOR plaintext
|
||||
//
|
||||
export async function AESECBencrypt(key, data){
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
'raw',
|
||||
key.buffer,
|
||||
'AES-CTR',
|
||||
true,
|
||||
['encrypt', 'decrypt']);
|
||||
// the CBC mode with a zero IV. This workaraound only works for encrypting
|
||||
// 16 bytes at a time.
|
||||
|
||||
const zeroes = int2ba(0, 16);
|
||||
export async function AESECBencrypt(key, data){
|
||||
assert(data.length == 16, 'can only AES-ECB encrypt 16 bytes at a time');
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
'raw', key.buffer, 'AES-CBC', false, ['encrypt']);
|
||||
|
||||
// Even if data is a multiple of 16, WebCrypto adds 16 bytes of
|
||||
// padding. We drop it.
|
||||
return new Uint8Array (await crypto.subtle.encrypt({
|
||||
name: 'AES-CTR',
|
||||
counter: data.buffer, length: 16},
|
||||
name: 'AES-CBC',
|
||||
iv: new Uint8Array(16).fill(0).buffer},
|
||||
cryptoKey,
|
||||
zeroes.buffer));
|
||||
data.buffer)).slice(0, 16);
|
||||
}
|
||||
|
||||
|
||||
// AEC-CTR encrypt data, setting initial counter to 0
|
||||
export async function AESCTRencrypt(key, data){
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
'raw', key.buffer, 'AES-CTR', false, ['encrypt']);
|
||||
|
||||
return new Uint8Array (await crypto.subtle.encrypt({
|
||||
name: 'AES-CTR',
|
||||
counter: new Uint8Array(16).fill(0).buffer,
|
||||
length:64},
|
||||
cryptoKey,
|
||||
data.buffer));
|
||||
}
|
||||
|
||||
|
||||
// AEC-CTR decrypt ciphertext, setting initial counter to 0
|
||||
export async function AESCTRdecrypt(key, ciphertext){
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
'raw', key.buffer, 'AES-CTR', false, ['decrypt']);
|
||||
|
||||
return new Uint8Array (await crypto.subtle.decrypt({
|
||||
name: 'AES-CTR',
|
||||
counter: new Uint8Array(16).fill(0).buffer,
|
||||
length:64},
|
||||
cryptoKey,
|
||||
ciphertext.buffer));
|
||||
}
|
||||
|
||||
|
||||
// use Salsa20 as a random permutator. Instead of the nonce, we feed the data that needs
|
||||
// to be permuted.
|
||||
export function Salsa20(key, data){
|
||||
// sigma is Salsa's constant "expand 32-byte k"
|
||||
const sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]);
|
||||
const out = new Uint8Array(16);
|
||||
core_salsa20(out, data, key, sigma);
|
||||
return out;
|
||||
}
|
||||
|
||||
// copied from https://github.com/dchest/tweetnacl-js/blob/master/nacl-fast.js
|
||||
// and modified to output only 16 bytes
|
||||
function core_salsa20(o, p, k, c) {
|
||||
var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24,
|
||||
j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24,
|
||||
j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24,
|
||||
j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24,
|
||||
j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24,
|
||||
j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24,
|
||||
j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24,
|
||||
j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24,
|
||||
j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24,
|
||||
j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24,
|
||||
j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24,
|
||||
j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24,
|
||||
j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24,
|
||||
j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24,
|
||||
j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24,
|
||||
j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24;
|
||||
|
||||
var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7,
|
||||
x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14,
|
||||
x15 = j15, u;
|
||||
|
||||
for (var i = 0; i < 20; i += 2) {
|
||||
u = x0 + x12 | 0;
|
||||
x4 ^= u<<7 | u>>>(32-7);
|
||||
u = x4 + x0 | 0;
|
||||
x8 ^= u<<9 | u>>>(32-9);
|
||||
u = x8 + x4 | 0;
|
||||
x12 ^= u<<13 | u>>>(32-13);
|
||||
u = x12 + x8 | 0;
|
||||
x0 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x5 + x1 | 0;
|
||||
x9 ^= u<<7 | u>>>(32-7);
|
||||
u = x9 + x5 | 0;
|
||||
x13 ^= u<<9 | u>>>(32-9);
|
||||
u = x13 + x9 | 0;
|
||||
x1 ^= u<<13 | u>>>(32-13);
|
||||
u = x1 + x13 | 0;
|
||||
x5 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x10 + x6 | 0;
|
||||
x14 ^= u<<7 | u>>>(32-7);
|
||||
u = x14 + x10 | 0;
|
||||
x2 ^= u<<9 | u>>>(32-9);
|
||||
u = x2 + x14 | 0;
|
||||
x6 ^= u<<13 | u>>>(32-13);
|
||||
u = x6 + x2 | 0;
|
||||
x10 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x15 + x11 | 0;
|
||||
x3 ^= u<<7 | u>>>(32-7);
|
||||
u = x3 + x15 | 0;
|
||||
x7 ^= u<<9 | u>>>(32-9);
|
||||
u = x7 + x3 | 0;
|
||||
x11 ^= u<<13 | u>>>(32-13);
|
||||
u = x11 + x7 | 0;
|
||||
x15 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x0 + x3 | 0;
|
||||
x1 ^= u<<7 | u>>>(32-7);
|
||||
u = x1 + x0 | 0;
|
||||
x2 ^= u<<9 | u>>>(32-9);
|
||||
u = x2 + x1 | 0;
|
||||
x3 ^= u<<13 | u>>>(32-13);
|
||||
u = x3 + x2 | 0;
|
||||
x0 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x5 + x4 | 0;
|
||||
x6 ^= u<<7 | u>>>(32-7);
|
||||
u = x6 + x5 | 0;
|
||||
x7 ^= u<<9 | u>>>(32-9);
|
||||
u = x7 + x6 | 0;
|
||||
x4 ^= u<<13 | u>>>(32-13);
|
||||
u = x4 + x7 | 0;
|
||||
x5 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x10 + x9 | 0;
|
||||
x11 ^= u<<7 | u>>>(32-7);
|
||||
u = x11 + x10 | 0;
|
||||
x8 ^= u<<9 | u>>>(32-9);
|
||||
u = x8 + x11 | 0;
|
||||
x9 ^= u<<13 | u>>>(32-13);
|
||||
u = x9 + x8 | 0;
|
||||
x10 ^= u<<18 | u>>>(32-18);
|
||||
|
||||
u = x15 + x14 | 0;
|
||||
x12 ^= u<<7 | u>>>(32-7);
|
||||
u = x12 + x15 | 0;
|
||||
x13 ^= u<<9 | u>>>(32-9);
|
||||
u = x13 + x12 | 0;
|
||||
x14 ^= u<<13 | u>>>(32-13);
|
||||
u = x14 + x13 | 0;
|
||||
x15 ^= u<<18 | u>>>(32-18);
|
||||
}
|
||||
x0 = x0 + j0 | 0;
|
||||
x1 = x1 + j1 | 0;
|
||||
x2 = x2 + j2 | 0;
|
||||
x3 = x3 + j3 | 0;
|
||||
x4 = x4 + j4 | 0;
|
||||
x5 = x5 + j5 | 0;
|
||||
x6 = x6 + j6 | 0;
|
||||
x7 = x7 + j7 | 0;
|
||||
x8 = x8 + j8 | 0;
|
||||
x9 = x9 + j9 | 0;
|
||||
x10 = x10 + j10 | 0;
|
||||
x11 = x11 + j11 | 0;
|
||||
x12 = x12 + j12 | 0;
|
||||
x13 = x13 + j13 | 0;
|
||||
x14 = x14 + j14 | 0;
|
||||
x15 = x15 + j15 | 0;
|
||||
|
||||
o[ 0] = x0 >>> 0 & 0xff;
|
||||
o[ 1] = x0 >>> 8 & 0xff;
|
||||
o[ 2] = x0 >>> 16 & 0xff;
|
||||
o[ 3] = x0 >>> 24 & 0xff;
|
||||
|
||||
o[ 4] = x1 >>> 0 & 0xff;
|
||||
o[ 5] = x1 >>> 8 & 0xff;
|
||||
o[ 6] = x1 >>> 16 & 0xff;
|
||||
o[ 7] = x1 >>> 24 & 0xff;
|
||||
|
||||
o[ 8] = x2 >>> 0 & 0xff;
|
||||
o[ 9] = x2 >>> 8 & 0xff;
|
||||
o[10] = x2 >>> 16 & 0xff;
|
||||
o[11] = x2 >>> 24 & 0xff;
|
||||
|
||||
o[12] = x3 >>> 0 & 0xff;
|
||||
o[13] = x3 >>> 8 & 0xff;
|
||||
o[14] = x3 >>> 16 & 0xff;
|
||||
o[15] = x3 >>> 24 & 0xff;
|
||||
// we only need 16 bytes of the output
|
||||
}
|
||||
|
||||
|
||||
// ephemeral key usage time must be within the time of ephemeral key validity
|
||||
export function checkExpiration(validFrom, validUntil, time){
|
||||
time = time || Math.floor(new Date().getTime() / 1000);
|
||||
if (ba2int(validFrom) > time || time > ba2int(validUntil)){
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// -----------OBSOLETE functions below this line
|
||||
function bestPathNew(num){
|
||||
const mainPowers = [];
|
||||
const auxPowers = []; // aux powers
|
||||
@@ -750,7 +908,7 @@ function bestPathNew(num){
|
||||
for (let i=0; i<10; i++){
|
||||
mainPowers.push(2**i);
|
||||
}
|
||||
mainPowers.sort(function(a, b){return a-b;});
|
||||
mainPowers.sort(function(a, b){return a-b;});
|
||||
const primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503];
|
||||
let paths = [];
|
||||
paths.push([mainPowers, auxPowers, auxIncrements]);
|
||||
@@ -759,7 +917,7 @@ function bestPathNew(num){
|
||||
const emptyIndexes = [];
|
||||
for (let pathIdx=0; pathIdx < paths.length; pathIdx++ ){
|
||||
const mainPowers = paths[pathIdx][0];
|
||||
const auxPowers = paths[pathIdx][1];
|
||||
const auxPowers = paths[pathIdx][1];
|
||||
if (! isSumFound(mainPowers, auxPowers, i)){
|
||||
emptyIndexes.push(pathIdx);
|
||||
}
|
||||
@@ -769,7 +927,7 @@ function bestPathNew(num){
|
||||
// TODO: do we want to discard, or maybe to add primes?
|
||||
for (let i=0; i < emptyIndexes.length; i++){
|
||||
// console.log('discarding path with index ', i)
|
||||
paths.splice(i,1);
|
||||
paths.splice(i, 1);
|
||||
}
|
||||
}
|
||||
else { // sum was not found in any path
|
||||
@@ -778,7 +936,7 @@ function bestPathNew(num){
|
||||
for (let pathIdx=0; pathIdx < paths.length; pathIdx++ ){
|
||||
const mainPowers = paths[pathIdx][0];
|
||||
const auxPowers = paths[pathIdx][1];
|
||||
const auxIncrements = paths[pathIdx][2];
|
||||
const auxIncrements = paths[pathIdx][2];
|
||||
|
||||
for (let p=0; p < primes.length; p++){
|
||||
const prime = primes[p];
|
||||
@@ -800,7 +958,7 @@ function bestPathNew(num){
|
||||
}
|
||||
|
||||
// add new numbers to auxPowers
|
||||
// this can be any number - prime or non-prime that is already
|
||||
// this can be any number - prime or non-prime that is already
|
||||
// available in mainPowers
|
||||
for (let p=0; p < mainPowers.length; p++){
|
||||
const num = mainPowers[p];
|
||||
@@ -867,11 +1025,11 @@ function bestPathNewer(num){
|
||||
// if (paths.length > sampleSize){
|
||||
// for (let i=0; i < paths.length-sampleSize; i++){
|
||||
// const randIdx = Math.ceil(Math.random()*paths.length)
|
||||
// paths.splice(randIdx, 1)
|
||||
// paths.splice(randIdx, 1)
|
||||
// }
|
||||
// }
|
||||
|
||||
// take each path and see if sum is found.
|
||||
// take each path and see if sum is found.
|
||||
// if found at least in one path, advance to the next number
|
||||
let foundAtLeastOnce = false;
|
||||
for (let pathIdx=0; pathIdx < paths.length; pathIdx++ ){
|
||||
@@ -900,7 +1058,7 @@ function bestPathNewer(num){
|
||||
// add the next num to main powers and check if sum is found
|
||||
for (let pathIdx=0; pathIdx < paths.length; pathIdx++ ){
|
||||
const mainPowers = paths[pathIdx][0];
|
||||
const auxIncrements = paths[pathIdx][2];
|
||||
const auxIncrements = paths[pathIdx][2];
|
||||
|
||||
for (let p=0; p <= i; p++){
|
||||
if (p%2 === 0 || mainPowers.includes(p)){
|
||||
@@ -938,9 +1096,9 @@ function bestPathNewer(num){
|
||||
}
|
||||
paths = newPaths;
|
||||
}
|
||||
|
||||
|
||||
const numFreq = {};
|
||||
let minLen = paths[0][0].length;
|
||||
let minLen = paths[0][0].length;
|
||||
for (let i=0; i < paths.length; i++){
|
||||
if (paths[i][3] !== true){
|
||||
continue;
|
||||
@@ -966,7 +1124,7 @@ function bestPathNewer(num){
|
||||
}
|
||||
}
|
||||
console.log(nums.sort(function(a, b){return a-b;}));
|
||||
}
|
||||
}
|
||||
console.log(numFreq);
|
||||
}
|
||||
|
||||
@@ -1108,7 +1266,7 @@ function bestPath(num){
|
||||
mainPowers.push(2**i);
|
||||
auxPowers.push(2**i);
|
||||
}
|
||||
mainPowers.sort(function(a, b){return a-b;});
|
||||
mainPowers.sort(function(a, b){return a-b;});
|
||||
auxPowers.sort(function(a, b){return a-b;});
|
||||
const primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503];
|
||||
let paths = [];
|
||||
@@ -1118,7 +1276,7 @@ function bestPath(num){
|
||||
const emptyIndexes = [];
|
||||
for (let pathIdx=0; pathIdx < paths.length; pathIdx++ ){
|
||||
const mainPowers = paths[pathIdx][0];
|
||||
const auxPowers = paths[pathIdx][1];
|
||||
const auxPowers = paths[pathIdx][1];
|
||||
if (! isSumFound(mainPowers, auxPowers, i)){
|
||||
emptyIndexes.push(pathIdx);
|
||||
}
|
||||
@@ -1128,7 +1286,7 @@ function bestPath(num){
|
||||
// TODO: do we want to discard, or maybe to add primes?
|
||||
for (let i=0; i < emptyIndexes.length; i++){
|
||||
// console.log('discarding path with index ', i)
|
||||
paths.splice(i,1);
|
||||
paths.splice(i, 1);
|
||||
}
|
||||
}
|
||||
else { // sum was not found in any path
|
||||
@@ -1136,8 +1294,8 @@ function bestPath(num){
|
||||
// add the next prime to main powers and check if sum is found
|
||||
for (let pathIdx=0; pathIdx < paths.length; pathIdx++ ){
|
||||
const mainPowers = paths[pathIdx][0];
|
||||
const auxPowers = paths[pathIdx][1];
|
||||
|
||||
const auxPowers = paths[pathIdx][1];
|
||||
|
||||
for (let p=0; p < primes.length; p++){
|
||||
const prime = primes[p];
|
||||
if (mainPowers.includes(prime)){
|
||||
@@ -1226,7 +1384,7 @@ function isSumFound(a, b, sum){
|
||||
if (b[j] + a[i] == sum){
|
||||
// sums.push([i, j])
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
@@ -1242,14 +1400,6 @@ function allPrimeMultiples(num){
|
||||
return arr;
|
||||
}
|
||||
|
||||
// ephemeral key usage time must be within the time of ephemeral key validity
|
||||
export function checkExpiration(validFrom, validUntil, time){
|
||||
time = time || Math.floor(new Date().getTime() / 1000);
|
||||
if (ba2int(validFrom) > time || time > ba2int(validUntil)){
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// computes AES GCM authentication tag
|
||||
// all 4 inputs arrays of bytes
|
||||
@@ -1307,31 +1457,6 @@ function getAuthTag(aad, ct, encZero, encIV, precompute){
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined'){ // we are in node.js environment
|
||||
module.exports={
|
||||
assert,
|
||||
ba2ab,
|
||||
ba2bigint,
|
||||
ba2str,
|
||||
bi2ba,
|
||||
ab2ba,
|
||||
ba2int,
|
||||
buildChunkMetadata,
|
||||
b64encode,
|
||||
b64decode,
|
||||
b64urlencode,
|
||||
dechunk_http,
|
||||
gunzip_http,
|
||||
eq,
|
||||
getTime,
|
||||
pem2ba,
|
||||
pubkeyPEM2raw,
|
||||
sha256,
|
||||
sigDER2p1363,
|
||||
str2ba,
|
||||
xor
|
||||
};
|
||||
}
|
||||
@@ -1,12 +1,11 @@
|
||||
import {pem2ba, eq, import_resource} from './utils.js';
|
||||
import {pem2ba, eq} from './utils.js';
|
||||
import * as asn1js from './third-party/pkijs/asn1.js';
|
||||
import Certificate from './third-party/pkijs/Certificate.js';
|
||||
import CertificateChainValidationEngine from './third-party/pkijs/CertificateChainValidationEngine.js';
|
||||
|
||||
import CertificateChainValidationEngine from
|
||||
'./third-party/pkijs/CertificateChainValidationEngine.js';
|
||||
|
||||
var trustedCertificates = [];
|
||||
|
||||
|
||||
// extract PEMs from Mozilla's CA store and convert into asn1js's Certificate object
|
||||
export async function parse_certs(text){
|
||||
// wait for pkijs module to load
|
||||
@@ -22,15 +21,10 @@ export async function parse_certs(text){
|
||||
const lines = text.split('"\n"').slice(1); // discard the first line - headers
|
||||
for (const line of lines){
|
||||
const fields = line.split('","');
|
||||
const pem = fields[32].slice(1,-1);
|
||||
const pem = fields[32].slice(1, -1);
|
||||
const asn1cert = asn1js.fromBER(pem2ba(pem).buffer);
|
||||
trustedCertificates.push(new Certificate({ schema: asn1cert.result }));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function getPubkey(c){
|
||||
return new Uint8Array(c.subjectPublicKeyInfo.subjectPublicKey.valueBlock.valueHex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -57,7 +51,7 @@ export function checkCertSubjName(cert, serverName){
|
||||
}
|
||||
|
||||
for (let nameInCert of allNames){
|
||||
if (wildTest(nameInCert, serverName) == true)
|
||||
if (wildTest(nameInCert, serverName) == true)
|
||||
return true;
|
||||
}
|
||||
throw 'Server name is not the same as the certificate\'s subject name(s)';
|
||||
@@ -89,10 +83,10 @@ export function getAltNames(cert) {
|
||||
|
||||
|
||||
|
||||
// verifyChain verifies a certificate chain "chain_der" against the time "date". If "date" is not
|
||||
// verifyChain verifies a certificate chain "chain_der" against the time "date". If "date" is not
|
||||
// given, verifies againt the current time.
|
||||
// Returns true on success or throws if verification failed.
|
||||
// Sometimes servers do not put intermediate certs into the chain. In such case we
|
||||
// Sometimes servers do not put intermediate certs into the chain. In such case we
|
||||
// fetch the missing cert from a URL embedded in the leaf cert. We return the fetched cert.
|
||||
export async function verifyChain(chain_der, date, trustedCerts) {
|
||||
if (trustedCerts == undefined){
|
||||
@@ -109,17 +103,17 @@ export async function verifyChain(chain_der, date, trustedCerts) {
|
||||
const cert = new Certificate({ schema: cert_asn1.result });
|
||||
chain.push(cert);
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function do_verify(chain, date, trustedCerts){
|
||||
// CertificateChainValidationEngine will fail the verification if the root cert is
|
||||
// included in the chain. To prevent this, we remove the root CA from the chain.
|
||||
// CertificateChainValidationEngine will fail the verification if the root cert is
|
||||
// included in the chain. To prevent this, we remove the root CA from the chain.
|
||||
// Check by pubkey if the last cert is a root CA known to us.
|
||||
|
||||
var pubkeyToFind = new Uint8Array(chain.slice(-1)[0].subjectPublicKeyInfo.subjectPublicKey.valueBlock.valueHex);
|
||||
for (let cert of trustedCerts){
|
||||
if (eq(new Uint8Array(cert.subjectPublicKeyInfo.subjectPublicKey.valueBlock.valueHex), pubkeyToFind)){
|
||||
chain = chain.slice(0,-1);
|
||||
chain = chain.slice(0, -1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -127,8 +121,8 @@ export async function verifyChain(chain_der, date, trustedCerts) {
|
||||
// pkijs requires that the leaf cert is last in the array
|
||||
// if not, the verification will still succeed, but the returned certificatePath
|
||||
// will be incomplete
|
||||
var leafCert = chain.splice(0,1)[0];
|
||||
chain.push(leafCert);
|
||||
var leafCert = chain.splice(0, 1)[0];
|
||||
chain.push(leafCert);
|
||||
const ccve = new CertificateChainValidationEngine({
|
||||
trustedCerts: trustedCerts,
|
||||
certs: chain,
|
||||
@@ -163,15 +157,4 @@ export async function verifyChain(chain_der, date, trustedCerts) {
|
||||
throw ('Could not notarize because the website presented an untrusted certificate');
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
|
||||
|
||||
if (typeof module !== 'undefined'){ // we are in node.js environment
|
||||
module.exports={
|
||||
checkCertSubjName,
|
||||
getCommonName,
|
||||
getModulus,
|
||||
parse_certs,
|
||||
verifyChain
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
// FileChooser create a "choose file" button and sends the chosen file
|
||||
// to the extension
|
||||
/* global chrome*/
|
||||
|
||||
// class FileChooser create a "choose file" button and sends the chosen file
|
||||
// to the extension
|
||||
export class FileChooser{
|
||||
// show is called by extension's Main.openFileChooser()
|
||||
show(){
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
/* global chrome, swal*/
|
||||
|
||||
import {str2ba} from './utils.js';
|
||||
|
||||
document.addEventListener('load', onload);
|
||||
@@ -21,7 +23,7 @@ class Manager{
|
||||
that.processData(data.payload);
|
||||
}
|
||||
else if (data.command == 'export'){
|
||||
// .payload contains {pgsg: json, name: session_name}
|
||||
// .payload contains {pgsg: json, name: session_name}
|
||||
const exportedBlobUrl = URL.createObjectURL(new Blob([str2ba(data.payload.pgsg)]), {
|
||||
type: 'application/octet-stream'
|
||||
});
|
||||
@@ -38,7 +40,7 @@ class Manager{
|
||||
'message': 'refresh'
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
processData(rows) {
|
||||
const tb = document.getElementsByTagName('tbody')[0];
|
||||
const initial_row_length = tb.rows.length;
|
||||
@@ -49,7 +51,7 @@ class Manager{
|
||||
rows.sort(function(a, b) {
|
||||
return Date.parse(a.creationTime) < Date.parse(b.creationTime) ? 1 : -1;
|
||||
});
|
||||
|
||||
|
||||
for (const r of rows) {
|
||||
this.addRow({
|
||||
'sessionName': r.sessionName,
|
||||
@@ -61,14 +63,14 @@ class Manager{
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
addRow(args) {
|
||||
const that = this;
|
||||
const session = args.creationTime;
|
||||
const tb = document.getElementById('tableBody');
|
||||
const row = tb.insertRow(tb.rows.length);
|
||||
|
||||
|
||||
const td_session = document.createElement('td');
|
||||
if (args.isImported){
|
||||
const importedIcon = document.createElement('img');
|
||||
@@ -91,9 +93,9 @@ class Manager{
|
||||
td_session.appendChild(editedIcon);
|
||||
}
|
||||
td_session.appendChild(document.createTextNode(args.sessionName));
|
||||
|
||||
|
||||
const iconDiv = document.createElement('div');
|
||||
|
||||
|
||||
const imgExp = document.createElement('img');
|
||||
imgExp.classList.add('icon');
|
||||
imgExp.src = '../img/export.svg';
|
||||
@@ -120,7 +122,7 @@ class Manager{
|
||||
};
|
||||
imgExp.value = 'Export';
|
||||
iconDiv.appendChild(imgExp);
|
||||
|
||||
|
||||
const imgRen = document.createElement('img');
|
||||
imgRen.classList.add('icon');
|
||||
imgRen.src = '../img/rename.svg';
|
||||
@@ -132,7 +134,7 @@ class Manager{
|
||||
that.doRename(event.target, session);
|
||||
};
|
||||
iconDiv.appendChild(imgRen);
|
||||
|
||||
|
||||
const imgDel = document.createElement('img');
|
||||
imgDel.classList.add('icon');
|
||||
imgDel.src = '../img/delete.svg';
|
||||
@@ -158,22 +160,22 @@ class Manager{
|
||||
};
|
||||
imgDel.value = 'Delete';
|
||||
iconDiv.appendChild(imgDel);
|
||||
|
||||
|
||||
iconDiv.style.position = 'absolute';
|
||||
iconDiv.style.top = 2;
|
||||
iconDiv.style.right = 4;
|
||||
|
||||
|
||||
td_session.style.position = 'relative';
|
||||
td_session.appendChild(iconDiv);
|
||||
row.appendChild(td_session);
|
||||
|
||||
|
||||
const td_time = document.createElement('td');
|
||||
td_time.style.textAlign = 'center';
|
||||
td_time.textContent = args.creationTime;
|
||||
row.appendChild(td_time);
|
||||
|
||||
|
||||
const buttonDiv = document.createElement('div');
|
||||
|
||||
|
||||
const input1 = document.createElement('input');
|
||||
input1.type = 'button';
|
||||
input1.className = 'btn';
|
||||
@@ -188,7 +190,7 @@ class Manager{
|
||||
};
|
||||
input1.value = 'HTML';
|
||||
buttonDiv.appendChild(input1);
|
||||
|
||||
|
||||
const input2 = document.createElement('input');
|
||||
input2.type = 'button';
|
||||
input2.className = 'btn';
|
||||
@@ -203,7 +205,7 @@ class Manager{
|
||||
};
|
||||
input2.value = 'Details';
|
||||
buttonDiv.appendChild(input2);
|
||||
|
||||
|
||||
const input3 = document.createElement('input');
|
||||
input3.type = 'button';
|
||||
input3.className = 'btn';
|
||||
@@ -218,7 +220,7 @@ class Manager{
|
||||
};
|
||||
input3.value = 'Edit';
|
||||
// Edit button will be used in fututre versions
|
||||
input3.style.visibility = "hidden";
|
||||
input3.style.visibility = 'hidden';
|
||||
if (args.isImported || args.isEdited || (args.version < 5)){
|
||||
input3.style.opacity = '0.3';
|
||||
input3.onclick = null;
|
||||
@@ -228,7 +230,7 @@ class Manager{
|
||||
}
|
||||
}
|
||||
buttonDiv.appendChild(input3);
|
||||
|
||||
|
||||
const td3 = document.createElement('td');
|
||||
td3.style.textAlign = 'center';
|
||||
td3.appendChild(buttonDiv);
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
/* global chrome*/
|
||||
|
||||
export class NotificationBar{
|
||||
constructor(){}
|
||||
|
||||
show(sessionId, serverName, hideButton) {
|
||||
hideButton = hideButton || false;
|
||||
|
||||
|
||||
const table = document.createElement('table');
|
||||
table.style.position = 'fixed';
|
||||
table.style.top = '0px';
|
||||
|
||||
16
ui/Popup.js
16
ui/Popup.js
@@ -1,4 +1,4 @@
|
||||
// import {ProgressBar} from './progressbar.min.js';
|
||||
/* global chrome, browser*/
|
||||
|
||||
class Popup{
|
||||
constructor(){
|
||||
@@ -14,7 +14,7 @@ class Popup{
|
||||
// to pass it to the click event listener so that it could run synchronously.
|
||||
// document.getElementById("notarize").addEventListener("mouseover",
|
||||
// function() {
|
||||
// chrome.tabs.query({active: true}, async function(t) {
|
||||
// chrome.tabs.query({active: true}, async function(t) {
|
||||
// currentUrl = t[0].url
|
||||
// hasPermission = await browser.permissions.contains({origins: [currentUrl]})
|
||||
// })
|
||||
@@ -56,7 +56,7 @@ class Popup{
|
||||
});
|
||||
window.close();
|
||||
});
|
||||
|
||||
|
||||
chrome.runtime.onMessage.addListener(function(data) {
|
||||
that.processMessages(data);
|
||||
});
|
||||
@@ -70,7 +70,7 @@ class Popup{
|
||||
// notarizeClicked is triggered when Notarize of Notariza after click was pressed
|
||||
notarizeClicked(isAfterClick){
|
||||
isAfterClick = isAfterClick || false;
|
||||
const msg = isAfterClick ? 'notarizeAfter' : 'notarize';
|
||||
const msg = isAfterClick ? 'notarizeAfter' : 'notarize';
|
||||
if (this.is_firefox && ! this.hasPermission && this.currentUrl.startsWith('https://')){
|
||||
// in Firefox we give a temporary permission just for the current tab's URL
|
||||
// also no async/await/callback here, otherwise Firefox will complain
|
||||
@@ -142,13 +142,13 @@ class Popup{
|
||||
console.log('popup received unexpected message ' + data.message);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// showInProgressDiv show the <div> with progress info and listens for
|
||||
// progress updates
|
||||
showInProgressDiv(isFirstTimeSetup){
|
||||
document.getElementById('menu').setAttribute('hidden', '');
|
||||
document.getElementById('in_progress').removeAttribute('hidden');
|
||||
|
||||
|
||||
const progressBars = {};
|
||||
const types = ['download', 'upload', 'garbling', 'last_stage'];
|
||||
if (isFirstTimeSetup){
|
||||
@@ -186,12 +186,12 @@ class Popup{
|
||||
|
||||
|
||||
moveBar(bar, goalWidth) {
|
||||
const curWidth = Number(bar.style.width.slice(0,-1));
|
||||
const curWidth = Number(bar.style.width.slice(0, -1));
|
||||
if (curWidth === goalWidth){
|
||||
return; // no update needed
|
||||
}
|
||||
bar.style.width = String(goalWidth) + '%';
|
||||
bar.innerHTML = String(goalWidth) + '%';
|
||||
bar.innerHTML = String(goalWidth) + '%';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
/* global chrome*/
|
||||
|
||||
import {decode_str} from './utils.js';
|
||||
|
||||
class RawViewer{
|
||||
@@ -7,7 +9,7 @@ class RawViewer{
|
||||
// isReady will be se to true after message listener is installed
|
||||
window.isReady = false;
|
||||
}
|
||||
|
||||
|
||||
main(){
|
||||
chrome.runtime.onMessage.addListener(function(obj) {
|
||||
if (obj.destination !== 'rawviewer') return;
|
||||
|
||||
14
ui/Viewer.js
14
ui/Viewer.js
@@ -1,3 +1,5 @@
|
||||
/* global chrome*/
|
||||
|
||||
import {NotificationBar} from './NotificationBar.js';
|
||||
import {FileChooser} from './FileChooser.js';
|
||||
import {decode_str, str2ba } from './utils.js';
|
||||
@@ -7,7 +9,7 @@ class Viewer{
|
||||
window.tabid = null; // allow the extension to put the id of the tab which opened this page
|
||||
window.isViewer = true;
|
||||
// isFileChooser will be toggled to true if extension calls Main.openFileChooser()
|
||||
window.isFileChooser = false;
|
||||
window.isFileChooser = false;
|
||||
// isReady will be se to true after message listener is installed
|
||||
window.isReady = false;
|
||||
}
|
||||
@@ -32,7 +34,7 @@ class Viewer{
|
||||
throw 'unexpected message';
|
||||
}
|
||||
console.log('got data in viewer');
|
||||
var hideButton = false;
|
||||
var hideButton = false;
|
||||
var text = msg.data.response;
|
||||
console.log('text size is', text.length);
|
||||
// remove the HTTP headers
|
||||
@@ -43,8 +45,8 @@ class Viewer{
|
||||
// add CSP to prevent loading any resources from the page
|
||||
const csp = '<meta http-equiv=\'Content-Security-Policy\' content="default-src \'none\'; img-src data:"></meta>\r\n';
|
||||
document.write(csp + decode_str(http_body));
|
||||
}
|
||||
else {
|
||||
}
|
||||
else {
|
||||
// a file which cannot be shown but has to be downloaded like e.g. PDF
|
||||
document.getElementById('type').textContent = type;
|
||||
document.getElementById('view file button').onclick = function() {
|
||||
@@ -68,10 +70,10 @@ class Viewer{
|
||||
if (line.search(/content-type:\s*/i) < 0) continue;
|
||||
if (line.match('application/pdf')){
|
||||
return 'pdf';
|
||||
}
|
||||
}
|
||||
else if (line.match('image/jpeg')){
|
||||
return 'jpg';
|
||||
}
|
||||
}
|
||||
return 'html';
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user