[BLY-66] direct upload pparams (#28)

* bucket check and async setup
clients perform direct setup by default

* (python) more consistent json for internal api

all requests and response are JSON.
all binary payloads are explicitly encoded as base64
within api.py, and decoded back to bytes before leaving api.py.
User-facing code, e.g. bucket.py and bucket_service.py,
should not see base64 wrangling.

* Support async for all ops

refactor api.py to be async-first
use new asyncio loops to support non-async interface;
cannot call non-async methods from async context

* [js] update client to work with unified service
bump both versions to 0.2.1
disable npm/pypi publish except on manual workflow run

* disable request compression

* fix workflow tests

update standalone Spiral test server to use new JSON interface
This commit is contained in:
Neil Movva
2023-09-11 16:55:35 -07:00
committed by GitHub
parent 7740f75ec2
commit 9604fd30e1
23 changed files with 1021 additions and 826 deletions

View File

@@ -67,7 +67,7 @@ jobs:
client-publish:
name: Publish JavaScript SDK
runs-on: ubuntu-latest
if: ${{ inputs.publish || github.ref == 'refs/heads/main' }}
if: ${{ inputs.publish }}
needs: [client-build]
steps:
- uses: actions/checkout@v3

View File

@@ -144,7 +144,7 @@ jobs:
publish:
name: Publish
runs-on: ubuntu-latest
if: ${{ inputs.publish || github.ref == 'refs/heads/main' }}
if: ${{ inputs.publish }}
needs: [linux, macos, windows, sdist]
steps:
- uses: actions/download-artifact@v3

View File

@@ -30,15 +30,29 @@ function generateKeys(n: number, seed: number = 0): string[] {
);
}
async function generateKVPairs(n: number, seed: number, itemSize: number): Promise<{ [key: string]: Uint8Array }> {
const keys = generateKeys(n, seed);
const kvPairs: { [key: string]: Uint8Array } = {};
for (const key of keys) {
kvPairs[key] = await keyToValue(key, itemSize);
}
return kvPairs;
}
function getRandomKey(kvP: { [key: string]: Uint8Array }): string {
return Object.keys(kvP)[Math.floor(Math.random() * Object.keys(kvP).length)];
}
function generateBucketName(): string {
return 'api-tester-' + Math.random().toString(16).substring(2, 10);
}
async function testBlyssService(endpoint: string = 'https://dev2.api.blyss.dev') {
const apiKey = process.env.BLYSS_API_KEY;
if (!apiKey) {
throw new Error('BLYSS_API_KEY environment variable is not set');
}
async function testBlyssService(endpoint: string, apiKey: string) {
console.log('Using key: ' + apiKey + ' to connect to ' + endpoint);
const client: Client = await new blyss.Client(
{
@@ -55,50 +69,44 @@ async function testBlyssService(endpoint: string = 'https://dev2.api.blyss.dev')
// generate N random keys
const N = 100;
const itemSize = 32;
let localKeys = generateKeys(N);
function getRandomKey(): string {
return localKeys[Math.floor(Math.random() * localKeys.length)];
}
let kvPairs = await generateKVPairs(N, 0, itemSize);
// write all N keys
await bucket.write(
await Promise.all(localKeys.map(
async (k) => ({
k: await keyToValue(k, itemSize)
})
))
kvPairs
);
console.log(`Wrote ${N} keys`);
// read a random key
let testKey = getRandomKey();
let testKey = getRandomKey(kvPairs);
console.log(`Reading key ${testKey}`)
await bucket.setup();
console.log("1111");
let value = await bucket.privateRead(testKey);
await verifyRead(testKey, value);
console.log(`Read key ${testKey}`);
// delete testKey from the bucket, and localData.
await bucket.deleteKey(testKey);
localKeys.splice(localKeys.indexOf(testKey), 1);
console.log(`Deleted key ${testKey}`);
// write a new value
testKey = 'newKey0';
await bucket.write({ testKey: keyToValue(testKey, itemSize) });
localKeys.push(testKey);
let newValue = await keyToValue(testKey, itemSize);
await bucket.write({ testKey: newValue });
kvPairs[testKey] = newValue;
console.log(`Wrote key ${testKey}`);
// clear all keys
await bucket.clearEntireBucket();
localKeys = [];
kvPairs = {};
console.log('Cleared bucket');
// write a new set of N keys
localKeys = generateKeys(N, 1);
kvPairs = await generateKVPairs(N, 1, itemSize);
await bucket.write(
await Promise.all(localKeys.map(
async (k) => ({
k: await keyToValue(k, itemSize)
})
))
kvPairs
);
console.log(`Wrote ${N} keys`);
@@ -109,7 +117,7 @@ async function testBlyssService(endpoint: string = 'https://dev2.api.blyss.dev')
console.log(await bucket.info());
// random read
testKey = getRandomKey();
testKey = getRandomKey(kvPairs);
value = await bucket.privateRead(testKey);
await verifyRead(testKey, value);
console.log(`Read key ${testKey}`);
@@ -119,11 +127,15 @@ async function testBlyssService(endpoint: string = 'https://dev2.api.blyss.dev')
console.log(`Destroyed bucket ${bucket.name}`);
}
async function main() {
const endpoint = "https://dev2.api.blyss.dev"
console.log('Testing Blyss service at URL ' + endpoint);
await testBlyssService(endpoint);
async function main(endpoint: string, apiKey: string) {
if (!apiKey) {
throw new Error('BLYSS_API_KEY environment variable is not set');
}
await testBlyssService(endpoint, apiKey);
console.log('All tests completed successfully.');
}
main();
// get endpoint and api key from command line, or fallback to defaults
const endpoint = process.argv[2] || 'https://beta.api.blyss.dev';
const apiKey = process.argv[3] || process.env.BLYSS_API_KEY;
main(endpoint, apiKey);

View File

@@ -8,17 +8,20 @@ export default async function main(port: string) {
console.log(bucket.metadata);
// buckets are bytes-in/bytes-out. SDK write() will automatically serialize as UTF-8.
await bucket.write({
Ohio: 'Columbus',
California: 'Sacramento'
});
let capital = await bucket.privateRead('Ohio');
// but reads are always bytes-out, and must be decoded.
let capital = new TextDecoder().decode(await bucket.privateRead('Ohio'));
if (capital !== 'Columbus') {
throw 'Incorrect result.';
}
capital = await bucket.privateRead('California');
// capital = await bucket.privateRead('California');
capital = new TextDecoder().decode(await bucket.privateRead('California'));
if (capital !== 'Sacramento') {
throw 'Incorrect result.';
}

2
js/bridge/Cargo.lock generated
View File

@@ -487,7 +487,7 @@ dependencies = [
[[package]]
name = "spiral-rs-js-bridge"
version = "0.1.0"
version = "0.2.0"
dependencies = [
"console_error_panic_hook",
"doublepir-rs",

View File

@@ -152,34 +152,27 @@ export class Bucket {
keys: string[]
): Promise<any[]> {
if (!this.uuid || !this.check(this.uuid)) {
console.log('Setting up client');
await this.setup();
}
const queries: { key: string; queryData: Uint8Array }[] = [];
// For each key, generate a query, encode it as base64, and append it to a list.
let queries: Uint8Array[] = [];
for (const key of keys) {
const rowIdx = this.lib.getRow(key);
const queryData = this.lib.generateQuery(this.uuid, rowIdx);
queries.push({ key, queryData });
queries.push(queryData);
}
const endResults = [];
const batches = Math.ceil(queries.length / this.batchSize);
for (let i = 0; i < batches; i++) {
const queriesForBatch = queries.slice(
i * this.batchSize,
(i + 1) * this.batchSize
);
// Send the list of queries to the server.
const rawResults = await this.api.privateReadJson(this.name, queries);
const queryBatch = serializeChunks(queriesForBatch.map(x => x.queryData));
const rawResultChunks = await this.getRawResponse(queryBatch);
const rawResults = deserializeChunks(rawResultChunks);
const batchEndResults = await Promise.all(
rawResults.map((r, i) => this.getEndResult(queriesForBatch[i].key, r))
);
endResults.push(...batchEndResults);
}
// For each query, decrypt the result, decompress it, and extract the
// result.
const endResults = await Promise.all(
rawResults.map((r, i) => this.getEndResult(keys[i], r))
);
return endResults;
}
@@ -328,12 +321,6 @@ export class Bucket {
this.name = newBucketName;
}
/** Gets info on all keys in this bucket. */
async listKeys(): Promise<KeyInfo[]> {
this.ensureSpiral();
return await this.api.listKeys(this.name);
}
/**
* Make a write to this bucket.
*
@@ -343,27 +330,20 @@ export class Bucket {
* 1024 UTF-8 bytes.
*/
async write(
keyValuePairs: { [key: string]: any }
keyValuePairs: { [key: string]: Uint8Array | string | null }
) {
this.ensureSpiral();
const data = [];
// convert any string KV pairs to Uint8Array
const kvPairs: { [key: string]: Uint8Array | null } = {};
for (const key in keyValuePairs) {
if (Object.prototype.hasOwnProperty.call(keyValuePairs, key)) {
const value = keyValuePairs[key];
const valueBytes = serialize(value);
const keyBytes = new TextEncoder().encode(key);
const serializedKeyValue = wrapKeyValue(keyBytes, valueBytes);
data.push(serializedKeyValue);
// const kv = {
// key: key,
// value: Buffer.from(valueBytes).toString('base64')
// }
const value = keyValuePairs[key];
if (!(value instanceof Uint8Array)) {
kvPairs[key] = new TextEncoder().encode(value);
} else {
kvPairs[key] = value;
}
}
const concatenatedData = concatBytes(data);
// const concatenatedData = serialize(data);
await this.api.write(this.name, concatenatedData);
await this.api.write(this.name, kvPairs);
}
/**

View File

@@ -97,7 +97,7 @@ export class BucketService {
const parameters = { ...DEFAULT_BUCKET_PARAMETERS, ...params };
const bucketCreateReq = {
name: bucketName,
parameters: JSON.stringify(parameters),
parameters,
open_access: openAccess
};
await this.api.create(JSON.stringify(bucketCreateReq));

View File

@@ -2,6 +2,7 @@ import { KeyInfo } from '../bucket/bucket';
import { BLYSS_HINT_URL_PREFIX } from '../bucket/bucket_service';
import { gzip } from '../compression/pako';
import { BloomFilter, bloomFilterFromBytes } from '../data/bloom';
import { base64ToBytes, bytesToBase64 } from './seed';
const CREATE_PATH = '/create';
const MODIFY_PATH = '/modify';
@@ -46,7 +47,7 @@ async function getData(
headers
});
if (response.status < 200 || response.status > 299) {
if (!response.ok) {
throw new ApiError(
response.status,
url,
@@ -87,7 +88,7 @@ async function postData(
headers
});
if (response.status < 200 || response.status > 299) {
if (!response.ok) {
throw new ApiError(
response.status,
url,
@@ -104,6 +105,48 @@ async function postData(
}
}
async function postDataJson(
apiKey: string | null,
url: string,
data: Uint8Array | string,
): Promise<any> {
const headers = new Headers(
{
'Content-Type': 'application/json',
'Accept-Encoding': 'gzip'
}
);
if (apiKey) headers.append('X-API-Key', apiKey);
// base64 encode bytes-like data
if (typeof data !== 'string' && !(data instanceof String)) {
data = JSON.stringify(bytesToBase64(data));
}
// // compress
// data = gzip(data);
// headers.append('Content-Encoding', 'gzip');
const response = await fetch(url, {
method: 'POST',
body: data,
headers
});
if (!response.ok) {
throw new ApiError(
response.status,
url,
await response.text(),
response.statusText
);
}
return response.json();
}
async function postFormData(
url: string,
fields: any,
@@ -124,7 +167,7 @@ async function postFormData(
const response = await fetch(req);
if (response.status < 200 || response.status > 299) {
if (!response.ok) {
throw new ApiError(
response.status,
url,
@@ -250,20 +293,6 @@ class Api {
return filter;
}
/**
* Lists all keys in a bucket.
*
* @param bucketName The name of the bucket.
* @returns A list of information on every key in the bucket.
*/
async listKeys(bucketName: string): Promise<KeyInfo[]> {
return await getData(
this.apiKey,
this.urlFor(bucketName, LIST_KEYS_PATH),
true
);
}
/**
* Upload new setup data.
*
@@ -271,7 +300,7 @@ class Api {
* @param data The setup data.
* @returns The setup data upload response, containing a UUID.
*/
async setup(bucketName: string, data: Uint8Array): Promise<any> {
async setupS3(bucketName: string, data: Uint8Array): Promise<any> {
if (this.bucketEndpoint) {
return await postData(
this.apiKey,
@@ -294,6 +323,10 @@ class Api {
return prelim_result;
}
async setup(bucketName: string, data: Uint8Array): Promise<any> {
return await postDataJson(this.apiKey, this.urlFor(bucketName, SETUP_PATH), data);
}
/**
* Download hint data.
*
@@ -329,22 +362,30 @@ class Api {
}
/** Write to this bucket. */
async write(bucketName: string, data: Uint8Array) {
async write(bucketName: string, kvPairs: { [key: string]: Uint8Array | null }) {
// replace non-null values with base64-encoded strings, and leave null values as is
const json_data = JSON.stringify(
Object.fromEntries(
Object.entries(kvPairs).map(([k, v]) => [k, v ? bytesToBase64(v) : null])
)
);
await postData(
this.apiKey,
this.urlFor(bucketName, WRITE_PATH),
data,
json_data,
false
);
}
/** Delete a key in this bucket. */
async deleteKey(bucketName: string, key: string) {
await postData(
const kvDelete: { [key: string]: string | null } = { [key]: null };
await postDataJson(
this.apiKey,
this.urlFor(bucketName, DELETE_PATH),
new TextEncoder().encode(key),
false
this.urlFor(bucketName, WRITE_PATH),
JSON.stringify(kvDelete),
);
}
@@ -358,6 +399,29 @@ class Api {
);
}
/**
* Privately read data from this bucket, returning bytes or null if the item was not found.
*/
async privateReadJson(bucketName: string, queries: Uint8Array[]): Promise<(Uint8Array | null)[]> {
// base64 encode each query
const queryStrings = queries.map(q => btoa(String.fromCharCode.apply(null, q)));
// Send list of Base64-encoded queries to the server
const resultsB64: (string | null)[] = await postData(
this.apiKey,
this.urlFor(bucketName, READ_PATH),
JSON.stringify(queryStrings),
true
);
// Parse results from the server, which are each either a Base64-encoded string or null
const results: (Uint8Array | null)[] = resultsB64.map(r => r ? Uint8Array.from(atob(r), c => c.charCodeAt(0)) : null);
return results;
}
/** Privately read data from this bucket. */
async privateReadMultipart(
bucketName: string,

15
lib/server/Cargo.lock generated
View File

@@ -937,6 +937,20 @@ name = "serde"
version = "1.0.159"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.159"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.13",
]
[[package]]
name = "serde_json"
@@ -1041,6 +1055,7 @@ dependencies = [
"rand",
"rand_chacha",
"rayon",
"serde",
"serde_json",
"sha2",
"spiral-rs",

View File

@@ -24,7 +24,8 @@ default = []
[dependencies]
spiral-rs = { version = "0.2.1-alpha.2", path = "../spiral-rs" }
rand = { version = "0.8.5", features = ["small_rng"] }
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0"}
rayon = "1.5.2"
rand_chacha = "0.3.1"

View File

@@ -1,4 +1,5 @@
use actix_web::HttpServer;
use serde::Serialize;
use spiral_rs::client::*;
use spiral_rs::params::*;
use spiral_rs::util::*;
@@ -17,7 +18,6 @@ use uuid::Uuid;
use actix_web::error::PayloadError;
use actix_web::{get, post, web, App};
use base64::{engine::general_purpose, Engine as _};
struct ServerState {
params: &'static Params,
@@ -49,7 +49,11 @@ async fn write(body: web::Bytes, data: web::Data<ServerState>) -> Result<String,
let mut db_mut = data.db.write().unwrap();
let kv_pairs = unwrap_kv_pairs(&body);
update_database(data.params, &kv_pairs, &mut rows_mut, &mut db_mut);
let kv_pairs_slices: Vec<(&str, &[u8])> = kv_pairs
.iter()
.map(|(key, value)| (key.as_str(), value.as_slice()))
.collect();
update_database(data.params, &kv_pairs_slices, &mut rows_mut, &mut db_mut);
let mut version_mut = data.version.write().unwrap();
*version_mut += 1;
@@ -59,19 +63,34 @@ async fn write(body: web::Bytes, data: web::Data<ServerState>) -> Result<String,
))
}
#[derive(Serialize)]
pub struct UuidResponse {
pub uuid: String,
}
#[post("/setup")]
async fn setup(
body: web::Bytes,
body: String,
data: web::Data<ServerState>,
) -> Result<String, actix_web::error::Error> {
// parse body as json str
let body_str = serde_json::from_str::<String>(&body).unwrap();
// decode body from base64
let client_pub_params = base64::decode(&body_str).unwrap();
let mut pub_params_map_mut = data.pub_params.write().unwrap();
assert_eq!(body.len(), data.params.setup_bytes());
let pub_params = PublicParameters::deserialize(&data.params, &body);
assert_eq!(client_pub_params.len(), data.params.setup_bytes());
let pub_params = PublicParameters::deserialize(&data.params, &client_pub_params);
let uuid = Uuid::new_v4();
pub_params_map_mut.insert(uuid.to_string(), pub_params);
Ok(format!("{{\"uuid\":\"{}\"}}", uuid.to_string()))
// return uuid as JSON string
let uuid_json = serde_json::to_string(&UuidResponse {
uuid: uuid.to_string(),
})
.unwrap();
Ok(uuid_json)
}
const UUID_V4_STR_BYTES: usize = 36;
@@ -126,22 +145,22 @@ async fn private_read(
body: web::Bytes,
data: web::Data<ServerState>,
) -> Result<String, actix_web::error::Error> {
let mut out = Vec::new();
let mut i = 0;
let num_chunks = u64::from_le_bytes(body[..8].try_into().unwrap()) as usize;
i += 8;
out.extend(u64::to_le_bytes(num_chunks as u64));
for _ in 0..num_chunks {
let chunk_len = u64::from_le_bytes(body[i..i + 8].try_into().unwrap()) as usize;
i += 8;
let result = private_read_impl(&body[i..i + chunk_len], data.clone()).await?;
i += chunk_len;
// parse body as list of json strings
let query_strs = serde_json::from_slice::<Vec<String>>(&body).unwrap();
out.extend(u64::to_le_bytes(result.len() as u64));
out.extend(result);
let mut out = Vec::new();
for query_str in query_strs.iter() {
// decode each query from base64
let query_bytes = base64::decode(query_str).unwrap();
let result = private_read_impl(&query_bytes, data.clone()).await?;
// store base64-encoded results in out
let result_str = base64::encode(&result);
out.push(result_str);
}
Ok(general_purpose::STANDARD.encode(out))
let out_json = serde_json::to_string(&out).unwrap();
Ok(out_json)
}
#[get("/meta")]

View File

@@ -1,5 +1,6 @@
use std::{collections::HashMap, io::Read};
use base64::{engine::general_purpose, Engine};
use bzip2::{read::BzEncoder, Compression};
use sha2::{Digest, Sha256};
use spiral_rs::params::Params;
@@ -125,30 +126,20 @@ pub fn update_row(row: &mut Vec<u8>, key: &str, value: &[u8]) {
}
}
pub fn unwrap_kv_pairs(data: &[u8]) -> Vec<(&str, &[u8])> {
pub fn unwrap_kv_pairs(data: &[u8]) -> Vec<(String, Vec<u8>)> {
let mut kv_pairs = Vec::new();
let mut i = 0;
while i < data.len() {
// 1. Read key length.
let (key_len, key_len_bytes) = varint_decode(&data[i..]);
i += key_len_bytes;
// 2. Read key.
let key_bytes = &data[i..i + key_len];
i += key_len;
// 3. Read value length.
let (value_len, value_len_bytes) = varint_decode(&data[i..]);
i += value_len_bytes;
// 4. Read value.
let value_bytes = &data[i..i + value_len];
i += value_len;
// 5. Yield key/value pair.
let pair = (std::str::from_utf8(key_bytes).unwrap(), value_bytes);
kv_pairs.push(pair);
// Parse the data as a JSON object
if let Ok(json_data) = serde_json::from_slice::<HashMap<String, String>>(data) {
for (key, base64_value) in json_data.iter() {
// Decode the Base64-encoded value
if let Ok(decoded_value) = base64::decode(base64_value) {
kv_pairs.push((key.clone(), decoded_value));
}
}
}
// print KV pairs
println!("kv_pairs: {:?}", kv_pairs);
kv_pairs
}

584
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "@blyss/sdk",
"version": "0.1.8",
"version": "0.2.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@blyss/sdk",
"version": "0.1.8",
"version": "0.2.0",
"license": "MIT",
"devDependencies": {
"@jest/globals": "^29.2.2",
@@ -144,9 +144,9 @@
"dev": true
},
"node_modules/@babel/core/node_modules/semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
@@ -194,9 +194,9 @@
}
},
"node_modules/@babel/helper-compilation-targets/node_modules/semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
@@ -1828,9 +1828,9 @@
}
},
"node_modules/@jridgewell/source-map": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz",
"integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==",
"version": "0.3.5",
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.5.tgz",
"integrity": "sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ==",
"dev": true,
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.0",
@@ -2027,9 +2027,9 @@
}
},
"node_modules/@types/estree": {
"version": "0.0.51",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz",
"integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz",
"integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==",
"dev": true
},
"node_modules/@types/express": {
@@ -2555,148 +2555,148 @@
}
},
"node_modules/@webassemblyjs/ast": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
"integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz",
"integrity": "sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==",
"dev": true,
"dependencies": {
"@webassemblyjs/helper-numbers": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1"
"@webassemblyjs/helper-numbers": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6"
}
},
"node_modules/@webassemblyjs/floating-point-hex-parser": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz",
"integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz",
"integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==",
"dev": true
},
"node_modules/@webassemblyjs/helper-api-error": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz",
"integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz",
"integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==",
"dev": true
},
"node_modules/@webassemblyjs/helper-buffer": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz",
"integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz",
"integrity": "sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==",
"dev": true
},
"node_modules/@webassemblyjs/helper-numbers": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz",
"integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz",
"integrity": "sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==",
"dev": true,
"dependencies": {
"@webassemblyjs/floating-point-hex-parser": "1.11.1",
"@webassemblyjs/helper-api-error": "1.11.1",
"@webassemblyjs/floating-point-hex-parser": "1.11.6",
"@webassemblyjs/helper-api-error": "1.11.6",
"@xtuc/long": "4.2.2"
}
},
"node_modules/@webassemblyjs/helper-wasm-bytecode": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz",
"integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz",
"integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==",
"dev": true
},
"node_modules/@webassemblyjs/helper-wasm-section": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz",
"integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz",
"integrity": "sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==",
"dev": true,
"dependencies": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-buffer": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1",
"@webassemblyjs/wasm-gen": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-buffer": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/wasm-gen": "1.11.6"
}
},
"node_modules/@webassemblyjs/ieee754": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz",
"integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz",
"integrity": "sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==",
"dev": true,
"dependencies": {
"@xtuc/ieee754": "^1.2.0"
}
},
"node_modules/@webassemblyjs/leb128": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz",
"integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz",
"integrity": "sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==",
"dev": true,
"dependencies": {
"@xtuc/long": "4.2.2"
}
},
"node_modules/@webassemblyjs/utf8": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz",
"integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz",
"integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==",
"dev": true
},
"node_modules/@webassemblyjs/wasm-edit": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz",
"integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz",
"integrity": "sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==",
"dev": true,
"dependencies": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-buffer": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1",
"@webassemblyjs/helper-wasm-section": "1.11.1",
"@webassemblyjs/wasm-gen": "1.11.1",
"@webassemblyjs/wasm-opt": "1.11.1",
"@webassemblyjs/wasm-parser": "1.11.1",
"@webassemblyjs/wast-printer": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-buffer": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/helper-wasm-section": "1.11.6",
"@webassemblyjs/wasm-gen": "1.11.6",
"@webassemblyjs/wasm-opt": "1.11.6",
"@webassemblyjs/wasm-parser": "1.11.6",
"@webassemblyjs/wast-printer": "1.11.6"
}
},
"node_modules/@webassemblyjs/wasm-gen": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz",
"integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz",
"integrity": "sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==",
"dev": true,
"dependencies": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1",
"@webassemblyjs/ieee754": "1.11.1",
"@webassemblyjs/leb128": "1.11.1",
"@webassemblyjs/utf8": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/ieee754": "1.11.6",
"@webassemblyjs/leb128": "1.11.6",
"@webassemblyjs/utf8": "1.11.6"
}
},
"node_modules/@webassemblyjs/wasm-opt": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz",
"integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz",
"integrity": "sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==",
"dev": true,
"dependencies": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-buffer": "1.11.1",
"@webassemblyjs/wasm-gen": "1.11.1",
"@webassemblyjs/wasm-parser": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-buffer": "1.11.6",
"@webassemblyjs/wasm-gen": "1.11.6",
"@webassemblyjs/wasm-parser": "1.11.6"
}
},
"node_modules/@webassemblyjs/wasm-parser": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz",
"integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz",
"integrity": "sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==",
"dev": true,
"dependencies": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-api-error": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1",
"@webassemblyjs/ieee754": "1.11.1",
"@webassemblyjs/leb128": "1.11.1",
"@webassemblyjs/utf8": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-api-error": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/ieee754": "1.11.6",
"@webassemblyjs/leb128": "1.11.6",
"@webassemblyjs/utf8": "1.11.6"
}
},
"node_modules/@webassemblyjs/wast-printer": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz",
"integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz",
"integrity": "sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==",
"dev": true,
"dependencies": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/ast": "1.11.6",
"@xtuc/long": "4.2.2"
}
},
@@ -2762,9 +2762,9 @@
}
},
"node_modules/acorn": {
"version": "8.8.1",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz",
"integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==",
"version": "8.10.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz",
"integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==",
"dev": true,
"bin": {
"acorn": "bin/acorn"
@@ -2774,9 +2774,9 @@
}
},
"node_modules/acorn-import-assertions": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz",
"integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==",
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz",
"integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==",
"dev": true,
"peerDependencies": {
"acorn": "^8"
@@ -3842,9 +3842,9 @@
}
},
"node_modules/enhanced-resolve": {
"version": "5.10.0",
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz",
"integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==",
"version": "5.15.0",
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz",
"integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==",
"dev": true,
"dependencies": {
"graceful-fs": "^4.2.4",
@@ -3876,9 +3876,9 @@
}
},
"node_modules/es-module-lexer": {
"version": "0.9.3",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz",
"integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==",
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.3.0.tgz",
"integrity": "sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA==",
"dev": true
},
"node_modules/escalade": {
@@ -5298,9 +5298,9 @@
}
},
"node_modules/istanbul-lib-instrument/node_modules/semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
@@ -7276,9 +7276,9 @@
}
},
"node_modules/make-dir/node_modules/semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
@@ -8742,9 +8742,9 @@
}
},
"node_modules/semver": {
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dev": true,
"dependencies": {
"lru-cache": "^6.0.0"
@@ -8787,9 +8787,9 @@
"dev": true
},
"node_modules/serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz",
"integrity": "sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==",
"dev": true,
"dependencies": {
"randombytes": "^2.1.0"
@@ -9209,13 +9209,13 @@
}
},
"node_modules/terser": {
"version": "5.15.1",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.15.1.tgz",
"integrity": "sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw==",
"version": "5.19.4",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.19.4.tgz",
"integrity": "sha512-6p1DjHeuluwxDXcuT9VR8p64klWJKo1ILiy19s6C9+0Bh2+NWTX6nD9EPppiER4ICkHDVB1RkVpin/YW2nQn/g==",
"dev": true,
"dependencies": {
"@jridgewell/source-map": "^0.3.2",
"acorn": "^8.5.0",
"@jridgewell/source-map": "^0.3.3",
"acorn": "^8.8.2",
"commander": "^2.20.0",
"source-map-support": "~0.5.20"
},
@@ -9227,16 +9227,16 @@
}
},
"node_modules/terser-webpack-plugin": {
"version": "5.3.6",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz",
"integrity": "sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ==",
"version": "5.3.9",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz",
"integrity": "sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA==",
"dev": true,
"dependencies": {
"@jridgewell/trace-mapping": "^0.3.14",
"@jridgewell/trace-mapping": "^0.3.17",
"jest-worker": "^27.4.5",
"schema-utils": "^3.1.1",
"serialize-javascript": "^6.0.0",
"terser": "^5.14.1"
"serialize-javascript": "^6.0.1",
"terser": "^5.16.8"
},
"engines": {
"node": ">= 10.13.0"
@@ -9292,9 +9292,9 @@
"dev": true
},
"node_modules/terser-webpack-plugin/node_modules/schema-utils": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz",
"integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==",
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz",
"integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==",
"dev": true,
"dependencies": {
"@types/json-schema": "^7.0.8",
@@ -9749,22 +9749,22 @@
}
},
"node_modules/webpack": {
"version": "5.74.0",
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.74.0.tgz",
"integrity": "sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA==",
"version": "5.88.2",
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.88.2.tgz",
"integrity": "sha512-JmcgNZ1iKj+aiR0OvTYtWQqJwq37Pf683dY9bVORwVbUrDhLhdn/PlO2sHsFHPkj7sHNQF3JwaAkp49V+Sq1tQ==",
"dev": true,
"dependencies": {
"@types/eslint-scope": "^3.7.3",
"@types/estree": "^0.0.51",
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/wasm-edit": "1.11.1",
"@webassemblyjs/wasm-parser": "1.11.1",
"@types/estree": "^1.0.0",
"@webassemblyjs/ast": "^1.11.5",
"@webassemblyjs/wasm-edit": "^1.11.5",
"@webassemblyjs/wasm-parser": "^1.11.5",
"acorn": "^8.7.1",
"acorn-import-assertions": "^1.7.6",
"acorn-import-assertions": "^1.9.0",
"browserslist": "^4.14.5",
"chrome-trace-event": "^1.0.2",
"enhanced-resolve": "^5.10.0",
"es-module-lexer": "^0.9.0",
"enhanced-resolve": "^5.15.0",
"es-module-lexer": "^1.2.1",
"eslint-scope": "5.1.1",
"events": "^3.2.0",
"glob-to-regexp": "^0.4.1",
@@ -9773,9 +9773,9 @@
"loader-runner": "^4.2.0",
"mime-types": "^2.1.27",
"neo-async": "^2.6.2",
"schema-utils": "^3.1.0",
"schema-utils": "^3.2.0",
"tapable": "^2.1.1",
"terser-webpack-plugin": "^5.1.3",
"terser-webpack-plugin": "^5.3.7",
"watchpack": "^2.4.0",
"webpack-sources": "^3.2.3"
},
@@ -9992,9 +9992,9 @@
"dev": true
},
"node_modules/webpack/node_modules/schema-utils": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz",
"integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==",
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz",
"integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==",
"dev": true,
"dependencies": {
"@types/json-schema": "^7.0.8",
@@ -10054,9 +10054,9 @@
"dev": true
},
"node_modules/word-wrap": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
"integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
"dev": true,
"engines": {
"node": ">=0.10.0"
@@ -10290,9 +10290,9 @@
"dev": true
},
"semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true
}
}
@@ -10329,9 +10329,9 @@
},
"dependencies": {
"semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true
}
}
@@ -11564,9 +11564,9 @@
"dev": true
},
"@jridgewell/source-map": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz",
"integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==",
"version": "0.3.5",
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.5.tgz",
"integrity": "sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ==",
"dev": true,
"requires": {
"@jridgewell/gen-mapping": "^0.3.0",
@@ -11754,9 +11754,9 @@
}
},
"@types/estree": {
"version": "0.0.51",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz",
"integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz",
"integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==",
"dev": true
},
"@types/express": {
@@ -12160,148 +12160,148 @@
}
},
"@webassemblyjs/ast": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
"integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz",
"integrity": "sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==",
"dev": true,
"requires": {
"@webassemblyjs/helper-numbers": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1"
"@webassemblyjs/helper-numbers": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6"
}
},
"@webassemblyjs/floating-point-hex-parser": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz",
"integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz",
"integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==",
"dev": true
},
"@webassemblyjs/helper-api-error": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz",
"integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz",
"integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==",
"dev": true
},
"@webassemblyjs/helper-buffer": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz",
"integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz",
"integrity": "sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==",
"dev": true
},
"@webassemblyjs/helper-numbers": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz",
"integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz",
"integrity": "sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==",
"dev": true,
"requires": {
"@webassemblyjs/floating-point-hex-parser": "1.11.1",
"@webassemblyjs/helper-api-error": "1.11.1",
"@webassemblyjs/floating-point-hex-parser": "1.11.6",
"@webassemblyjs/helper-api-error": "1.11.6",
"@xtuc/long": "4.2.2"
}
},
"@webassemblyjs/helper-wasm-bytecode": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz",
"integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz",
"integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==",
"dev": true
},
"@webassemblyjs/helper-wasm-section": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz",
"integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz",
"integrity": "sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==",
"dev": true,
"requires": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-buffer": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1",
"@webassemblyjs/wasm-gen": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-buffer": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/wasm-gen": "1.11.6"
}
},
"@webassemblyjs/ieee754": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz",
"integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz",
"integrity": "sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==",
"dev": true,
"requires": {
"@xtuc/ieee754": "^1.2.0"
}
},
"@webassemblyjs/leb128": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz",
"integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz",
"integrity": "sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==",
"dev": true,
"requires": {
"@xtuc/long": "4.2.2"
}
},
"@webassemblyjs/utf8": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz",
"integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz",
"integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==",
"dev": true
},
"@webassemblyjs/wasm-edit": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz",
"integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz",
"integrity": "sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==",
"dev": true,
"requires": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-buffer": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1",
"@webassemblyjs/helper-wasm-section": "1.11.1",
"@webassemblyjs/wasm-gen": "1.11.1",
"@webassemblyjs/wasm-opt": "1.11.1",
"@webassemblyjs/wasm-parser": "1.11.1",
"@webassemblyjs/wast-printer": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-buffer": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/helper-wasm-section": "1.11.6",
"@webassemblyjs/wasm-gen": "1.11.6",
"@webassemblyjs/wasm-opt": "1.11.6",
"@webassemblyjs/wasm-parser": "1.11.6",
"@webassemblyjs/wast-printer": "1.11.6"
}
},
"@webassemblyjs/wasm-gen": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz",
"integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz",
"integrity": "sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==",
"dev": true,
"requires": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1",
"@webassemblyjs/ieee754": "1.11.1",
"@webassemblyjs/leb128": "1.11.1",
"@webassemblyjs/utf8": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/ieee754": "1.11.6",
"@webassemblyjs/leb128": "1.11.6",
"@webassemblyjs/utf8": "1.11.6"
}
},
"@webassemblyjs/wasm-opt": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz",
"integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz",
"integrity": "sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==",
"dev": true,
"requires": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-buffer": "1.11.1",
"@webassemblyjs/wasm-gen": "1.11.1",
"@webassemblyjs/wasm-parser": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-buffer": "1.11.6",
"@webassemblyjs/wasm-gen": "1.11.6",
"@webassemblyjs/wasm-parser": "1.11.6"
}
},
"@webassemblyjs/wasm-parser": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz",
"integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz",
"integrity": "sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==",
"dev": true,
"requires": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/helper-api-error": "1.11.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.1",
"@webassemblyjs/ieee754": "1.11.1",
"@webassemblyjs/leb128": "1.11.1",
"@webassemblyjs/utf8": "1.11.1"
"@webassemblyjs/ast": "1.11.6",
"@webassemblyjs/helper-api-error": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/ieee754": "1.11.6",
"@webassemblyjs/leb128": "1.11.6",
"@webassemblyjs/utf8": "1.11.6"
}
},
"@webassemblyjs/wast-printer": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz",
"integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==",
"version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz",
"integrity": "sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==",
"dev": true,
"requires": {
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/ast": "1.11.6",
"@xtuc/long": "4.2.2"
}
},
@@ -12351,15 +12351,15 @@
}
},
"acorn": {
"version": "8.8.1",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz",
"integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==",
"version": "8.10.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz",
"integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==",
"dev": true
},
"acorn-import-assertions": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz",
"integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==",
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz",
"integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==",
"dev": true,
"requires": {}
},
@@ -13152,9 +13152,9 @@
"dev": true
},
"enhanced-resolve": {
"version": "5.10.0",
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz",
"integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==",
"version": "5.15.0",
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz",
"integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==",
"dev": true,
"requires": {
"graceful-fs": "^4.2.4",
@@ -13177,9 +13177,9 @@
}
},
"es-module-lexer": {
"version": "0.9.3",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz",
"integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==",
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.3.0.tgz",
"integrity": "sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA==",
"dev": true
},
"escalade": {
@@ -14210,9 +14210,9 @@
},
"dependencies": {
"semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true
}
}
@@ -15681,9 +15681,9 @@
},
"dependencies": {
"semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true
}
}
@@ -16658,9 +16658,9 @@
}
},
"semver": {
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dev": true,
"requires": {
"lru-cache": "^6.0.0"
@@ -16696,9 +16696,9 @@
}
},
"serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz",
"integrity": "sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==",
"dev": true,
"requires": {
"randombytes": "^2.1.0"
@@ -17029,28 +17029,28 @@
"dev": true
},
"terser": {
"version": "5.15.1",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.15.1.tgz",
"integrity": "sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw==",
"version": "5.19.4",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.19.4.tgz",
"integrity": "sha512-6p1DjHeuluwxDXcuT9VR8p64klWJKo1ILiy19s6C9+0Bh2+NWTX6nD9EPppiER4ICkHDVB1RkVpin/YW2nQn/g==",
"dev": true,
"requires": {
"@jridgewell/source-map": "^0.3.2",
"acorn": "^8.5.0",
"@jridgewell/source-map": "^0.3.3",
"acorn": "^8.8.2",
"commander": "^2.20.0",
"source-map-support": "~0.5.20"
}
},
"terser-webpack-plugin": {
"version": "5.3.6",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz",
"integrity": "sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ==",
"version": "5.3.9",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz",
"integrity": "sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA==",
"dev": true,
"requires": {
"@jridgewell/trace-mapping": "^0.3.14",
"@jridgewell/trace-mapping": "^0.3.17",
"jest-worker": "^27.4.5",
"schema-utils": "^3.1.1",
"serialize-javascript": "^6.0.0",
"terser": "^5.14.1"
"serialize-javascript": "^6.0.1",
"terser": "^5.16.8"
},
"dependencies": {
"ajv": {
@@ -17079,9 +17079,9 @@
"dev": true
},
"schema-utils": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz",
"integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==",
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz",
"integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==",
"dev": true,
"requires": {
"@types/json-schema": "^7.0.8",
@@ -17390,22 +17390,22 @@
}
},
"webpack": {
"version": "5.74.0",
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.74.0.tgz",
"integrity": "sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA==",
"version": "5.88.2",
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.88.2.tgz",
"integrity": "sha512-JmcgNZ1iKj+aiR0OvTYtWQqJwq37Pf683dY9bVORwVbUrDhLhdn/PlO2sHsFHPkj7sHNQF3JwaAkp49V+Sq1tQ==",
"dev": true,
"requires": {
"@types/eslint-scope": "^3.7.3",
"@types/estree": "^0.0.51",
"@webassemblyjs/ast": "1.11.1",
"@webassemblyjs/wasm-edit": "1.11.1",
"@webassemblyjs/wasm-parser": "1.11.1",
"@types/estree": "^1.0.0",
"@webassemblyjs/ast": "^1.11.5",
"@webassemblyjs/wasm-edit": "^1.11.5",
"@webassemblyjs/wasm-parser": "^1.11.5",
"acorn": "^8.7.1",
"acorn-import-assertions": "^1.7.6",
"acorn-import-assertions": "^1.9.0",
"browserslist": "^4.14.5",
"chrome-trace-event": "^1.0.2",
"enhanced-resolve": "^5.10.0",
"es-module-lexer": "^0.9.0",
"enhanced-resolve": "^5.15.0",
"es-module-lexer": "^1.2.1",
"eslint-scope": "5.1.1",
"events": "^3.2.0",
"glob-to-regexp": "^0.4.1",
@@ -17414,9 +17414,9 @@
"loader-runner": "^4.2.0",
"mime-types": "^2.1.27",
"neo-async": "^2.6.2",
"schema-utils": "^3.1.0",
"schema-utils": "^3.2.0",
"tapable": "^2.1.1",
"terser-webpack-plugin": "^5.1.3",
"terser-webpack-plugin": "^5.3.7",
"watchpack": "^2.4.0",
"webpack-sources": "^3.2.3"
},
@@ -17447,9 +17447,9 @@
"dev": true
},
"schema-utils": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz",
"integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==",
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz",
"integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==",
"dev": true,
"requires": {
"@types/json-schema": "^7.0.8",
@@ -17594,9 +17594,9 @@
"dev": true
},
"word-wrap": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
"integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
"dev": true
},
"wrap-ansi": {

View File

@@ -1,7 +1,7 @@
{
"author": "Samir Menon <samir@blyss.dev>",
"name": "@blyss/sdk",
"version": "0.2.0",
"version": "0.2.1",
"description": "Blyss SDK, enabling private retrievals from Blyss buckets",
"type": "module",
"main": "./dist/index.js",

2
python/Cargo.lock generated
View File

@@ -25,7 +25,7 @@ dependencies = [
[[package]]
name = "blyss-client-python"
version = "0.2.0"
version = "0.2.1"
dependencies = [
"pyo3",
"spiral-rs",

View File

@@ -1,6 +1,6 @@
[package]
name = "blyss-client-python"
version = "0.2.0"
version = "0.2.1"
edition = "2021"
rust-version = "1.70.0"

View File

@@ -6,13 +6,12 @@ Abstracts all functionality offered by Blyss services.
"""
from typing import Any, Optional, Union
import requests
import httpx
import gzip
import asyncio
import json
import logging
import base64
import asyncio
from blyss.bloom import BloomFilter
@@ -30,6 +29,22 @@ SETUP_PATH = "/setup"
WRITE_PATH = "/write"
READ_PATH = "/private-read"
APIGW_MAX_SIZE = 6e6 / (4 / 3) * 0.95 # 6MB, base64 encoded, plus 5% margin
_GLOBAL_ENABLE_REQUEST_COMPRESSION = False
# Not compatible with nested asyncio loops.
# If the caller is running in an asyncio context, use the async methods directly.
def async_runner(func, *args, **kwargs):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# Run the async function and get the result
result = loop.run_until_complete(func(*args, **kwargs))
return result
finally:
loop.close()
class ApiException(Exception):
"""Exception raised when an API call to the Blyss service fails."""
@@ -42,36 +57,21 @@ class ApiException(Exception):
super().__init__(message)
def _check_http_error(resp: requests.Response | httpx.Response):
def _check_http_error(r: httpx.Response):
"""Throws an ApiException with message on any unsuccessful HTTP response."""
status_code = resp.status_code
status_code = r.status_code
if status_code < 200 or status_code > 299:
try:
errmsg = resp.text
errmsg = r.text
except:
errmsg = f"<undecodable response body, size {len(resp.content)} bytes>"
errmsg = f"<undecodable response body, size {len(r.content)} bytes>"
raise ApiException(
errmsg,
status_code,
)
def _get_data(api_key: Optional[str], url: str) -> bytes:
"""Perform an HTTP GET request, returning bytes"""
headers = {}
if api_key:
headers["x-api-key"] = api_key
logging.info(f"GET {url} {headers}")
resp = requests.get(url, headers=headers)
_check_http_error(resp)
return resp.content
async def _async_get_data(
api_key: Optional[str], url: str, decode_json: bool = True
) -> Any:
async def _async_get(api_key: Optional[str], url: str) -> Any:
headers = {}
if api_key:
headers["x-api-key"] = api_key
@@ -81,82 +81,41 @@ async def _async_get_data(
r = await client.get(url, headers=headers)
_check_http_error(r)
if decode_json:
return r.json()
else:
return r.content
def _get_data_json(api_key: str, url: str) -> dict[Any, Any]:
"""Perform an HTTP GET request, returning a JSON-parsed dict"""
return json.loads(_get_data(api_key, url))
def _post_data(api_key: str, url: str, data: Union[bytes, str]) -> bytes:
"""Perform an HTTP POST request."""
headers = {}
if api_key:
headers["x-api-key"] = api_key
if type(data) == bytes:
headers["Content-Type"] = "application/octet-stream"
logging.info(f"POST {url} (length: {len(data)} bytes)")
resp = None
if type(data) == bytes:
# compress data before sending
zdata = gzip.compress(data)
headers["Content-Encoding"] = "gzip"
resp = requests.post(url, zdata, headers=headers)
else:
resp = requests.post(url, data, headers=headers)
_check_http_error(resp)
return resp.content
def _post_data_json(api_key: str, url: str, data: Union[bytes, str]) -> dict[Any, Any]:
"""Perform an HTTP POST request, returning a JSON-parsed dict"""
return json.loads(_post_data(api_key, url, data))
def _post_form_data(url: str, fields: dict[Any, Any], data: bytes):
"""Perform a multipart/form-data POST"""
files = {"file": data}
resp = requests.post(url, data=fields, files=files)
_check_http_error(resp)
return r.json()
async def _async_post_data(
api_key: str,
url: str,
data: Union[str, bytes],
compress: bool = True,
decode_json: bool = True,
api_key: str, url: str, data: Union[bytes, Any], compress: bool = True
) -> Any:
"""Perform an async HTTP POST request, returning a JSON-parsed dict response"""
headers = {
"x-api-key": api_key,
}
if type(data) == str:
headers["Content-Type"] = "application/json"
data = data.encode("utf-8")
else:
headers["Content-Type"] = "application/octet-stream"
assert type(data) == bytes
headers = {"x-api-key": api_key, "Content-Type": "application/json"}
if compress:
# apply gzip compression to data before sending
data = gzip.compress(data)
headers["Content-Encoding"] = "gzip"
if data is None:
payload = None
else:
if type(data) == bytes:
data_jsonable = base64.b64encode(data).decode("utf-8")
else:
data_jsonable = data
data_json = json.dumps(data_jsonable).encode("utf-8")
if len(data_json) > APIGW_MAX_SIZE:
raise ValueError(
f"Request data is too large ({len(data_json)} JSON bytes); maximum size is {APIGW_MAX_SIZE} bytes"
)
if compress and _GLOBAL_ENABLE_REQUEST_COMPRESSION:
# apply gzip compression to data before sending
payload = gzip.compress(data_json)
headers["Content-Encoding"] = "gzip"
else:
payload = data_json
async with httpx.AsyncClient(timeout=httpx.Timeout(5, read=None)) as client:
r = await client.post(url, content=data, headers=headers)
r = await client.post(url, content=payload, headers=headers)
_check_http_error(r) # type: ignore
if decode_json:
return r.json()
else:
return r.content
_check_http_error(r)
return r.json()
class API:
@@ -180,64 +139,102 @@ class API:
def _service_url_for(self, path: str) -> str:
return self.service_endpoint + path
def create(self, data_json: str) -> dict[Any, Any]:
async def create(self, data_jsonable: dict) -> dict[Any, Any]:
"""Create a new bucket, given the supplied data.
Args:
data_json (str): A JSON-encoded string of the new bucket request.
"""
return _post_data_json(
self.api_key, self._service_url_for(CREATE_PATH), data_json
return await _async_post_data(
self.api_key,
self._service_url_for(CREATE_PATH),
data_jsonable,
)
def check(self, uuid: str) -> dict[Any, Any]:
def _blocking_create(self, *args, **kwargs):
return async_runner(self.create, *args, **kwargs)
async def check(self, uuid: str) -> bool:
"""Check that a UUID is still valid on the server.
Args:
uuid (str): The UUID to check
"""
return _get_data_json(
self.api_key, self._service_url_for("/" + uuid + CHECK_PATH)
)
async def async_check(self, uuid: str) -> dict[Any, Any]:
return await _async_get_data(
self.api_key,
self._service_url_for("/" + uuid + CHECK_PATH),
decode_json=True,
)
try:
await _async_get(
self.api_key, self._service_url_for("/" + uuid + CHECK_PATH)
)
return True
except ApiException as e:
if e.code == 404:
return False
else:
raise e
def list_buckets(self) -> dict[Any, Any]:
def _blocking_check(self, *args, **kwargs):
return async_runner(self.check, *args, **kwargs)
async def exists(self, bucket_name: str) -> bool:
"""Check if a bucket exists.
Args:
bucket_name (str): The name of the bucket to check
"""
try:
await _async_get(
self.api_key, self._service_url_for("/" + bucket_name + CHECK_PATH)
)
return True
except ApiException as e:
if e.code == 404:
return False
else:
raise e
def _blocking_exists(self, *args, **kwargs):
return async_runner(self.exists, *args, **kwargs)
async def list_buckets(self) -> dict[Any, Any]:
"""List all buckets accessible to this API key.
Returns:
dict: A JSON-parsed dict of all buckets accessible to this API key.
"""
return _get_data_json(self.api_key, self._service_url_for(LIST_BUCKETS_PATH))
return await _async_get(self.api_key, self._service_url_for(LIST_BUCKETS_PATH))
def _blocking_list_buckets(self, *args, **kwargs):
return async_runner(self.list_buckets, *args, **kwargs)
# Bucket-specific methods
def _url_for(self, bucket_name: str, path: str) -> str:
return self.service_endpoint + "/" + bucket_name + path
def modify(self, bucket_name: str, data_json: str) -> dict[Any, Any]:
async def modify(self, bucket_name: str, data_jsonable: Any) -> dict[Any, Any]:
"""Modify existing bucket.
Args:
data_json (str): same as create.
"""
return _post_data_json(
self.api_key, self._url_for(bucket_name, MODIFY_PATH), data_json
return await _async_post_data(
self.api_key, self._url_for(bucket_name, MODIFY_PATH), data_jsonable
)
def meta(self, bucket_name: str) -> dict[Any, Any]:
def _blocking_modify(self, *args, **kwargs):
return async_runner(self.modify, *args, **kwargs)
async def meta(self, bucket_name: str) -> dict[Any, Any]:
"""Get metadata about a bucket.
Returns:
dict: Metadata about a bucket.
"""
return _get_data_json(self.api_key, self._url_for(bucket_name, META_PATH))
return await _async_get(self.api_key, self._url_for(bucket_name, META_PATH))
def bloom(self, bucket_name: str) -> BloomFilter:
def _blocking_meta(self, *args, **kwargs):
return async_runner(self.meta, *args, **kwargs)
async def bloom(self, bucket_name: str) -> BloomFilter:
"""Get the Bloom filter for keys in this bucket. The Bloom filter contains all
keys ever inserted into this bucket; it does not remove deleted keys.
@@ -246,69 +243,74 @@ class API:
Returns:
BloomFilter: A Bloom filter for keys in the bucket.
"""
presigned_url = _get_data_json(
self.api_key, self._url_for(bucket_name, BLOOM_PATH)
)["url"]
r = await _async_get(self.api_key, self._url_for(bucket_name, BLOOM_PATH))
presigned_url = r["url"]
raw_bloom_filter = _get_data(None, presigned_url)
raw_bloom_filter = await _async_get(None, presigned_url)
bloom_filter = BloomFilter.from_bytes(raw_bloom_filter)
return bloom_filter
def setup(self, bucket_name: str, data: bytes) -> dict[Any, Any]:
def _blocking_bloom(self, *args, **kwargs):
return async_runner(self.bloom, *args, **kwargs)
async def setup(self, bucket_name: str, data: bytes) -> str:
"""Upload new setup data.
Args:
data (bytes): Setup data to upload.
"""
prelim_result = _post_data_json(
self.api_key,
self._url_for(bucket_name, SETUP_PATH),
json.dumps({"length": len(data)}),
resp = await _async_post_data(
self.api_key, self._url_for(bucket_name, SETUP_PATH), data
)
_post_form_data(prelim_result["url"], prelim_result["fields"], data)
return resp["uuid"]
return prelim_result
def _blocking_setup(self, *args, **kwargs):
return async_runner(self.setup, *args, **kwargs)
def list_keys(self, bucket_name: str) -> list[str]:
"""List all keys in this bucket."""
return _get_data_json(self.api_key, self._url_for(bucket_name, LIST_KEYS_PATH)) # type: ignore
def destroy(self, bucket_name: str):
async def destroy(self, bucket_name: str):
"""Destroy this bucket."""
_post_data(self.api_key, self._url_for(bucket_name, DESTROY_PATH), "")
def clear(self, bucket_name: str):
"""Delete all keys in this bucket."""
_post_data(self.api_key, self._url_for(bucket_name, CLEAR_PATH), "")
def write(self, bucket_name: str, data: bytes):
"""Write some data to this bucket."""
_post_data(self.api_key, self._url_for(bucket_name, WRITE_PATH), data)
async def async_write(self, bucket_name: str, data: str):
"""Write JSON payload to this bucket."""
await _async_post_data(
self.api_key, self._url_for(bucket_name, WRITE_PATH), data, decode_json=True
self.api_key, self._url_for(bucket_name, DESTROY_PATH), data=None
)
def delete_key(self, bucket_name: str, key: str):
"""Delete a key in this bucket."""
_post_data(
self.api_key, self._url_for(bucket_name, DELETE_PATH), key.encode("utf-8")
def _blocking_destroy(self, *args, **kwargs):
return async_runner(self.destroy, *args, **kwargs)
async def clear(self, bucket_name: str):
"""Delete all keys in this bucket."""
await _async_post_data(
self.api_key, self._url_for(bucket_name, CLEAR_PATH), data=None
)
def private_read(self, bucket_name: str, data: bytes) -> bytes:
def _blocking_clear(self, *args, **kwargs):
return async_runner(self.clear, *args, **kwargs)
async def write(self, bucket_name: str, data_jsonable: dict[str, Optional[str]]):
"""Write JSON payload to this bucket."""
return await _async_post_data(
self.api_key,
self._url_for(bucket_name, WRITE_PATH),
data_jsonable,
compress=True,
)
def _blocking_write(self, *args, **kwargs):
async_runner(self.write, *args, **kwargs)
async def private_read(
self, bucket_name: str, queries: list[bytes]
) -> list[Optional[bytes]]:
"""Privately read data from this bucket."""
val = _post_data(self.api_key, self._url_for(bucket_name, READ_PATH), data)
return base64.b64decode(val)
async def async_private_read(self, bucket_name: str, data: bytes) -> bytes:
"""Privately read data from this bucket."""
val: bytes = await _async_post_data(
self.api_key, self._url_for(bucket_name, READ_PATH), data, decode_json=False
data_jsonable = [base64.b64encode(q).decode("utf-8") for q in queries]
r: list[str] = await _async_post_data(
self.api_key,
self._url_for(bucket_name, READ_PATH),
data_jsonable,
compress=True,
)
# AWS APIGW encodes its responses as base64
return base64.b64decode(val)
# return self.private_read(bucket_name, data)
return [base64.b64decode(v) if v is not None else None for v in r]
def _blocking_private_read(self, *args, **kwargs):
return async_runner(self.private_read, *args, **kwargs)

View File

@@ -43,11 +43,12 @@ class BlyssLib:
bytes: The generated public parameters, if requested.
"""
return blyss.generate_keys( # type: ignore
r = blyss.generate_keys(
self.inner_client,
seed_from_string(self.secret_seed),
True,
)
return bytes(r)
def get_row(self, key: str) -> int:
"""Gets the target row in the database for a given key.

View File

@@ -5,36 +5,22 @@ Abstracts functionality on an existing bucket.
from typing import Optional, Any, Union, Iterator
from . import api, serializer, seed
from . import api, seed
from .blyss_lib import BlyssLib
import json
import base64
import bz2
import time
import asyncio
def _chunk_parser(raw_data: bytes) -> Iterator[bytes]:
"""
Parse a bytestream containing an arbitrary number of length-prefixed chunks.
"""
data = memoryview(raw_data)
i = 0
num_chunks = int.from_bytes(data[:8], "little", signed=False)
i += 8
for _ in range(num_chunks):
chunk_len = int.from_bytes(data[i : i + 8], "little", signed=False)
i += 8
chunk_data = bytes(data[i : i + chunk_len])
i += chunk_len
yield chunk_data
import base64
class Bucket:
"""Interface to a single Blyss bucket."""
name: str
"""Name of the bucket. See [bucket naming rules](https://docs.blyss.dev/docs/buckets#names)."""
_public_uuid: Optional[str] = None
def __init__(self, api: api.API, name: str, secret_seed: Optional[str] = None):
"""
@private
@@ -46,24 +32,23 @@ class Bucket:
secret_seed: An optional secret seed to initialize the client with.
A random one will be generated if not supplied.
"""
self.name: str = name
"""Name of the bucket. See [bucket naming rules](https://docs.blyss.dev/docs/buckets#names)."""
self._basic_init(api, name, secret_seed)
self._metadata = self._api._blocking_meta(self.name)
self._lib = BlyssLib(
json.dumps(self._metadata["pir_scheme"]), self._secret_seed
)
def _basic_init(self, api: api.API, name: str, secret_seed: Optional[str]):
self.name: str = name
# Internal attributes
self._api = api
self._metadata = self._api.meta(self.name)
if secret_seed:
self._secret_seed = secret_seed
else:
self._secret_seed = seed.get_random_seed()
self._lib = BlyssLib(
json.dumps(self._metadata["pir_scheme"]), self._secret_seed
)
self._public_uuid: Optional[str] = None
self._exfil: Any = None # used for benchmarking
def _check(self, uuid: str) -> bool:
"""Checks if the server has the given UUID.
def _check(self) -> bool:
"""Checks if the server has this client's public params.
Args:
uuid (str): The key to check.
@@ -71,28 +56,13 @@ class Bucket:
Returns:
bool: Whether the server has the given UUID.
"""
try:
self._api.check(uuid)
return True
except api.ApiException as e:
if e.code == 404:
return False
else:
raise e
if self._public_uuid is None:
raise RuntimeError("Bucket not initialized. Call setup() first.")
return self._api._blocking_check(self._public_uuid)
async def _async_check(self, uuid: str) -> bool:
try:
await self._api.async_check(uuid)
return True
except api.ApiException as e:
if e.code == 404:
return False
else:
raise e
def _split_into_chunks(
self, kv_pairs: dict[str, bytes]
) -> list[list[dict[str, str]]]:
def _split_into_json_chunks(
self, kv_pairs: dict[str, Optional[bytes]]
) -> list[dict[str, Optional[str]]]:
_MAX_PAYLOAD = 5 * 2**20 # 5 MiB
# 1. Bin keys by row index
@@ -107,25 +77,25 @@ class Bucket:
# 2. Prepare chunks of items, where each is a JSON-ready structure.
# Each chunk is less than the maximum payload size, and guarantees
# zero overlap of rows across chunks.
kv_chunks: list[list[dict[str, str]]] = []
current_chunk: list[dict[str, str]] = []
kv_chunks: list[dict[str, Optional[str]]] = []
current_chunk: dict[str, Optional[str]] = {}
current_chunk_size = 0
sorted_indices = sorted(keys_by_index.keys())
for i in sorted_indices:
keys = keys_by_index[i]
# prepare all keys in this row
row = []
row = {}
row_size = 0
for key in keys:
value = kv_pairs[key]
value_str = base64.b64encode(value).decode("utf-8")
fmt = {
"key": key,
"value": value_str,
"content-type": "application/octet-stream",
}
row.append(fmt)
row_size += int(24 + len(key) + len(value_str) + 48)
vi = kv_pairs[key]
if vi is None:
v = None
else:
v = base64.b64encode(vi).decode("utf-8")
row[key] = v
row_size += int(
16 + len(key) + (len(v) if v is not None else 4)
) # 4 bytes for 'null'
# if the new row doesn't fit into the current chunk, start a new one
if current_chunk_size + row_size > _MAX_PAYLOAD:
@@ -133,7 +103,7 @@ class Bucket:
current_chunk = row
current_chunk_size = row_size
else:
current_chunk.extend(row)
current_chunk.update(row)
current_chunk_size += row_size
# add the last chunk
@@ -142,62 +112,26 @@ class Bucket:
return kv_chunks
def _generate_query_stream(self, keys: list[str]) -> bytes:
def _generate_query_stream(self, keys: list[str]) -> list[bytes]:
assert self._public_uuid
# generate encrypted queries
queries: list[bytes] = [
self._lib.generate_query(self._public_uuid, self._lib.get_row(k))
for k in keys
]
# interleave the queries with their lengths (uint64_t)
query_lengths = [len(q).to_bytes(8, "little") for q in queries]
lengths_and_queries = [x for lq in zip(query_lengths, queries) for x in lq]
# prepend the total number of queries (uint64_t)
lengths_and_queries.insert(0, len(queries).to_bytes(8, "little"))
# serialize the queries
multi_query = b"".join(lengths_and_queries)
return multi_query
return queries
def _unpack_query_result(
self, keys: list[str], raw_result: bytes, ignore_errors=False
) -> list[Optional[bytes]]:
retrievals = []
for key, result in zip(keys, _chunk_parser(raw_result)):
if len(result) == 0:
# error in processing this query
if ignore_errors:
extracted_result = None
else:
raise RuntimeError(f"Failed to process query for key {key}.")
else:
decrypted_result = self._lib.decode_response(result)
decompressed_result = bz2.decompress(decrypted_result)
extracted_result = self._lib.extract_result(key, decompressed_result)
retrievals.append(extracted_result)
return retrievals
def _private_read(self, keys: list[str]) -> list[Optional[bytes]]:
"""Performs the underlying private retrieval.
Args:
keys (str): A list of keys to retrieve.
Returns:
a list of values (bytes) corresponding to keys. None for keys not found.
"""
if not self._public_uuid or not self._check(self._public_uuid):
self.setup()
assert self._public_uuid
multi_query = self._generate_query_stream(keys)
start = time.perf_counter()
multi_result = self._api.private_read(self.name, multi_query)
self._exfil = time.perf_counter() - start
retrievals = self._unpack_query_result(keys, multi_result)
return retrievals
def _decode_result_row(
self, result_row: bytes, silence_errors: bool = True
) -> Optional[bytes]:
try:
decrypted_result = self._lib.decode_response(result_row)
decompressed_result = bz2.decompress(decrypted_result)
return decompressed_result
except:
if not silence_errors:
raise
return None
def setup(self):
"""Prepares this bucket client for private reads.
@@ -210,49 +144,50 @@ class Bucket:
"""
public_params = self._lib.generate_keys_with_public_params()
setup_resp = self._api.setup(self.name, bytes(public_params))
self._public_uuid = setup_resp["uuid"]
self._public_uuid = self._api._blocking_setup(self.name, public_params)
assert self._check()
def info(self) -> dict[Any, Any]:
"""Fetch this bucket's properties from the service, such as access permissions and PIR scheme parameters."""
return self._api.meta(self.name)
def list_keys(self) -> list[str]:
"""List all key strings in this bucket. Only available if bucket was created with keyStoragePolicy="full"."""
return self._api.list_keys(self.name)
return self._api._blocking_meta(self.name)
def rename(self, new_name: str):
"""Rename this bucket to new_name."""
bucket_create_req = {
"name": new_name,
}
self._api.modify(self.name, json.dumps(bucket_create_req))
self._api._blocking_modify(self.name, bucket_create_req)
self.name = new_name
def write(self, kv_pairs: dict[str, bytes]):
def write(self, kv_pairs: dict[str, Optional[bytes]]):
"""Writes the supplied key-value pair(s) into the bucket.
Args:
kv_pairs: A dictionary of key-value pairs to write into the bucket.
Keys must be UTF8 strings, and values may be arbitrary bytes.
"""
concatenated_kv_items = b""
for key, value in kv_pairs.items():
concatenated_kv_items += serializer.wrap_key_val(key.encode("utf-8"), value)
# single call to API endpoint
self._api.write(self.name, concatenated_kv_items)
kv_json = {
k: base64.b64encode(v).decode("utf-8") if v else None
for k, v in kv_pairs.items()
}
self._api._blocking_write(self.name, kv_json)
def delete_key(self, key: str):
"""Deletes a single key-value pair from the bucket.
def delete_key(self, keys: str | list[str]):
"""Deletes key-value pairs from the bucket.
Args:
key: The key to delete.
"""
self._api.delete_key(self.name, key)
if isinstance(keys, str):
keys = [keys]
# Writing None to a key is interpreted as a delete.
delete_payload = {k: None for k in keys}
self._api._blocking_write(self.name, delete_payload)
def destroy_entire_bucket(self):
"""Destroys the entire bucket. This action is permanent and irreversible."""
self._api.destroy(self.name)
self._api._blocking_destroy(self.name)
def clear_entire_bucket(self):
"""Deletes all keys in this bucket. This action is permanent and irreversible.
@@ -260,38 +195,56 @@ class Bucket:
Differs from destroy in that the bucket's metadata
(e.g. permissions, PIR scheme parameters, and clients' setup data) are preserved.
"""
self._api.clear(self.name)
self._api._blocking_clear(self.name)
def private_read(
self, keys: Union[str, list[str]]
) -> Union[Optional[bytes], list[Optional[bytes]]]:
"""Privately reads the supplied key(s) from the bucket,
and returns the corresponding value(s).
def private_read(self, keys: list[str]) -> list[Optional[bytes]]:
"""Privately reads the supplied keys from the bucket,
and returns the corresponding values.
Data will be accessed using fully homomorphic encryption, designed to
make it impossible for any entity (including the Blyss service!) to
determine which key(s) are being read.
determine which keys are being read.
Args:
keys: A key or list of keys to privately retrieve.
If a list of keys is supplied,
results will be returned in the same order.
keys: A list of keys to privately retrieve.
Results will be returned in the same order.
Returns:
For each key, the value found for the key in the bucket,
or None if the key was not found.
"""
single_query = False
if isinstance(keys, str):
keys = [keys]
single_query = True
results = [r if r is not None else None for r in self._private_read(keys)]
if single_query:
return results[0]
row_indices_per_key = [self._lib.get_row(k) for k in keys]
rows_per_result = self.private_read_row(row_indices_per_key)
results = [
self._lib.extract_result(key, row) if row else None
for key, row in zip(keys, rows_per_result)
]
return results
def private_read_row(self, row_indices: list[int]) -> list[Optional[bytes]]:
"""Direct API for private reads; fetches full bucket rows rather than individual keys.
Args:
row_indices: A list of row indices to privately retrieve.
Results will be returned in the same order.
Returns:
For each row index, the value found for the row in the bucket,
or None if the row was not found.
"""
if not self._public_uuid or not self._check():
self.setup()
assert self._public_uuid
queries = [self._lib.generate_query(self._public_uuid, i) for i in row_indices]
raw_rows_per_result = self._api._blocking_private_read(self.name, queries)
rows_per_result = [
self._decode_result_row(rr) if rr else None for rr in raw_rows_per_result
]
return rows_per_result
def private_key_intersect(self, keys: list[str]) -> list[str]:
"""Privately intersects the given set of keys with the keys in this bucket,
returning the keys that intersected. This is generally slower than a single
@@ -306,7 +259,7 @@ class Bucket:
Args:
keys: A list of keys to privately intersect with this bucket.
"""
bloom_filter = self._api.bloom(self.name)
bloom_filter = self._api._blocking_bloom(self.name)
present_keys = list(filter(bloom_filter.lookup, keys))
return present_keys
@@ -314,10 +267,55 @@ class Bucket:
class AsyncBucket(Bucket):
"""Asyncio-compatible version of Bucket."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __init__(self, api: api.API, name: str, secret_seed: Optional[str] = None):
self._basic_init(api, name, secret_seed)
async def write(self, kv_pairs: dict[str, bytes], CONCURRENCY=4):
async def async_init(self):
"""Python constructors can't be async, so instances of `AsyncBucket` must call this method after construction."""
self._metadata = await self._api.meta(self.name)
self._lib = BlyssLib(
json.dumps(self._metadata["pir_scheme"]), self._secret_seed
)
async def _check(self) -> bool:
if self._public_uuid is None:
raise RuntimeError("Bucket not initialized. Call setup() first.")
try:
await self._api.check(self._public_uuid)
return True
except api.ApiException as e:
if e.code == 404:
return False
else:
raise e
async def setup(self):
public_params = self._lib.generate_keys_with_public_params()
self._public_uuid = await self._api.setup(self.name, public_params)
assert await self._check()
async def info(self) -> dict[str, Any]:
return await self._api.meta(self.name)
async def rename(self, new_name: str):
bucket_create_req = {
"name": new_name,
}
await self._api.modify(self.name, bucket_create_req)
self.name = new_name
async def delete_key(self, keys: str | list[str]):
keys = [keys] if isinstance(keys, str) else keys
delete_payload: dict[str, Optional[str]] = {k: None for k in keys}
await self._api.write(self.name, delete_payload)
async def destroy_entire_bucket(self):
await self._api.destroy(self.name)
async def clear_entire_bucket(self):
await self._api.clear(self.name)
async def write(self, kv_pairs: dict[str, Optional[bytes]], CONCURRENCY=4):
"""
Functionally equivalent to Bucket.write.
@@ -331,28 +329,38 @@ class AsyncBucket(Bucket):
CONCURRENCY = min(CONCURRENCY, 8)
# Split the key-value pairs into chunks not exceeding max payload size.
kv_chunks = self._split_into_chunks(kv_pairs)
# kv_chunks are JSON-ready, i.e. values are base64-encoded strings.
kv_chunks = self._split_into_json_chunks(kv_pairs)
# Make one write call per chunk, while respecting a max concurrency limit.
sem = asyncio.Semaphore(CONCURRENCY)
async def _paced_writer(chunk):
async def _paced_writer(chunk: dict[str, Optional[str]]):
async with sem:
await self._api.async_write(self.name, json.dumps(chunk))
await self._api.write(self.name, chunk)
_tasks = [asyncio.create_task(_paced_writer(c)) for c in kv_chunks]
await asyncio.gather(*_tasks)
async def private_read(self, keys: list[str]) -> list[Optional[bytes]]:
if not self._public_uuid or not await self._async_check(self._public_uuid):
self.setup()
assert self._public_uuid
row_indices_per_key = [self._lib.get_row(k) for k in keys]
rows_per_result = await self.private_read_row(row_indices_per_key)
multi_query = self._generate_query_stream(keys)
results = [
self._lib.extract_result(key, row) if row else None
for key, row in zip(keys, rows_per_result)
]
start = time.perf_counter()
multi_result = await self._api.async_private_read(self.name, multi_query)
self._exfil = time.perf_counter() - start
return results
retrievals = self._unpack_query_result(keys, multi_result)
async def private_read_row(self, row_indices: list[int]) -> list[Optional[bytes]]:
if not self._public_uuid or not await self._check():
await self.setup()
assert self._public_uuid
return retrievals
queries = [self._lib.generate_query(self._public_uuid, i) for i in row_indices]
raw_rows_per_result = await self._api.private_read(self.name, queries)
rows_per_result = [
self._decode_result_row(rr) if rr else None for rr in raw_rows_per_result
]
return rows_per_result

View File

@@ -1,6 +1,5 @@
from typing import Any, Optional, Union
from . import bucket, api, seed
import json
BLYSS_BUCKET_URL = "https://beta.api.blyss.dev"
DEFAULT_BUCKET_PARAMETERS = {
@@ -15,21 +14,15 @@ ApiConfig = dict[str, str]
class BucketService:
"""A client to the hosted Blyss bucket service. Allows creation, deletion, and modification of buckets."""
def __init__(self, api_config: Union[str, ApiConfig]):
def __init__(self, api_key: str, endpoint: str = BLYSS_BUCKET_URL):
"""Initialize a client of the Blyss bucket service.
Args:
api_config: An API key string, or
a dictionary containing an API configuration.
The minimum set of keys is:
"endpoint": A fully qualified endpoint URL for the bucket service.
"api_key" : An API key to supply with every request.
"""
if isinstance(api_config, str):
api_config = {"api_key": api_config}
api_key: A valid Blyss API key.
endpoint: A fully qualified endpoint URL for the bucket service, e.g. https://beta.api.blyss.dev.
service_endpoint = api_config.get("endpoint", BLYSS_BUCKET_URL)
self._api = api.API(api_config["api_key"], service_endpoint)
"""
self._api = api.API(api_key, endpoint)
def connect(
self,
@@ -52,11 +45,18 @@ class BucketService:
b = bucket.Bucket(self._api, bucket_name, secret_seed=secret_seed)
return b
def connect_async(
self, bucket_name: str, secret_seed: Optional[str] = None
) -> bucket.AsyncBucket:
"""Returns an asynchronous client to the Blyss bucket. Identical functionality to `connect`."""
return bucket.AsyncBucket(self._api, bucket_name, secret_seed=secret_seed)
@staticmethod
def _build_create_req(
bucket_name: str, open_access: bool, usage_hints: dict[str, Any]
) -> dict[str, Any]:
parameters = {**DEFAULT_BUCKET_PARAMETERS}
parameters.update(usage_hints)
bucket_create_req = {
"name": bucket_name,
"parameters": parameters,
"open_access": open_access,
}
return bucket_create_req
def create(
self,
@@ -76,17 +76,12 @@ class BucketService:
- "keyStoragePolicy": The key storage policy to use for this bucket. Options:
- "none" (default): Stores no key-related information. This is the most performant option and will maximize write speed.
- "bloom": Enables `Bucket.private_intersect()`. Uses a bloom filter to store probablistic information of key membership, with minimal impact on write speed.
- "full": Store all keys in full. Enables `Bucket.list_keys()`. Will result in significantly slower writes.
"""
parameters = {**DEFAULT_BUCKET_PARAMETERS}
parameters.update(usage_hints)
bucket_create_req = {
"name": bucket_name,
"parameters": json.dumps(parameters),
"open_access": open_access,
}
self._api.create(json.dumps(bucket_create_req))
bucket_create_req = self._build_create_req(
bucket_name, open_access, usage_hints
)
self._api._blocking_create(bucket_create_req)
def exists(self, name: str) -> bool:
"""Check if a bucket exists.
@@ -97,15 +92,7 @@ class BucketService:
Returns:
True if a bucket with the given name currently exists.
"""
try:
self.connect(name)
return True
except api.ApiException as e:
if e.code in [403, 404]: # Forbidden (need read permission to see metadata)
# or Not Found (bucket of this name doesn't exist)
return False
else:
raise e
return self._api._blocking_exists(name)
def list_buckets(self) -> dict[str, Any]:
"""List all buckets accessible to this API key.
@@ -114,7 +101,40 @@ class BucketService:
A dictionary of bucket metadata, keyed by bucket name.
"""
buckets = {}
for b in self._api.list_buckets()["buckets"]:
for b in self._api._blocking_list_buckets()["buckets"]:
n = b.pop("name")
buckets[n] = b
return buckets
class BucketServiceAsync(BucketService):
async def create(
self,
bucket_name: str,
open_access: bool = False,
usage_hints: dict[str, Any] = {},
):
bucket_create_req = self._build_create_req(
bucket_name, open_access, usage_hints
)
await self._api.create(bucket_create_req)
async def connect(
self,
bucket_name: str,
secret_seed: Optional[str] = None,
) -> bucket.AsyncBucket:
"""Returns an asynchronous client to the Blyss bucket. Identical functionality to `BucketService.connect`."""
b = bucket.AsyncBucket(self._api, bucket_name, secret_seed=secret_seed)
await b.async_init()
return b
async def exists(self, name: str) -> bool:
return await self._api.exists(name)
async def list_buckets(self) -> dict[str, Any]:
buckets = {}
for b in (await self._api.list_buckets())["buckets"]:
n = b.pop("name")
buckets[n] = b
return buckets

View File

@@ -6,3 +6,4 @@ from .seed import *
from .api import ApiException
Client = BucketService
AsyncClient = BucketServiceAsync

View File

@@ -19,9 +19,14 @@ def key_to_gold_value(key: str, length: int = 512) -> bytes:
def verify_read(key: str, value: bytes):
expected = key_to_gold_value(key, len(value))
try:
assert value == key_to_gold_value(key, len(value))
assert value == expected
except:
print(f"read mismatch for key {key}")
print(f"received {value.hex()[:16]}")
print(f"expected {expected.hex()[:16]}")
print(traceback.format_exc())
raise
@@ -35,75 +40,141 @@ def generateBucketName() -> str:
return f"api-tester-{tag:#0{6}x}"
async def main(endpoint: str, api_key: str):
print("Testing Blyss server at " + endpoint)
client = blyss.Client({"endpoint": endpoint, "api_key": api_key})
async def test_e2e_async(
endpoint: str, api_key: str, N: int = 4000, itemSize: int = 32
):
client = blyss.AsyncClient(api_key, endpoint)
# generate random string for bucket name
bucketName = generateBucketName()
client.create(bucketName, usage_hints={"maxItemSize": 40_000})
bucket = client.connect_async(bucketName)
print(bucket.info())
bucket_name = generateBucketName()
await client.create(bucket_name, usage_hints={"maxItemSize": 40_000})
print("Created bucket")
bucket = await client.connect(bucket_name)
print(await bucket.info())
# generate N random keys
N = 4000
itemSize = 32
localKeys = generate_keys(N, 0)
local_keys = generate_keys(N, 0)
# write all N keys
await bucket.write({k: key_to_gold_value(k, itemSize) for k in localKeys})
await bucket.write({k: key_to_gold_value(k, itemSize) for k in local_keys})
print(f"Wrote {N} keys")
# read a random key
testKey = random.choice(localKeys)
testKey = random.choice(local_keys)
value = (await bucket.private_read([testKey]))[0]
assert value is not None
verify_read(testKey, value)
print(f"Read key {testKey}, got {value.hex()[:8]}[...]")
# delete testKey from the bucket, and localData.
bucket.delete_key(testKey)
localKeys.remove(testKey)
await bucket.delete_key(testKey)
local_keys.remove(testKey)
value = (await bucket.private_read([testKey]))[0]
def _test_delete(key: str, value: Optional[bytes]):
if value is None:
print(f"Deleted key {testKey}")
print(f"Deleted key {key}")
else:
# this happens only sometimes??
print("ERROR: delete not reflected in read!")
print(f"Read deleted key {testKey} and got {value.hex()[:8]}[...]")
print(f"Read deleted key {key} and got {value.hex()[:8]}[...]")
_test_delete(testKey, value)
# clear all keys
await bucket.clear_entire_bucket()
local_keys = []
print("Cleared bucket")
# write a new set of N keys
local_keys = generate_keys(N, 2)
await bucket.write({k: key_to_gold_value(k, itemSize) for k in local_keys})
print(f"Wrote {N} keys")
# read a random key
testKey = random.choice(local_keys)
value = (await bucket.private_read([testKey]))[0]
assert value is not None
verify_read(testKey, value)
# rename the bucket
new_bucket_name = bucket_name + "-rn"
await bucket.rename(new_bucket_name)
print("Renamed bucket")
print(await bucket.info())
# read a random key
testKey = random.choice(local_keys)
value = (await bucket.private_read([testKey]))[0]
assert value is not None
verify_read(testKey, value)
print(f"Read key {testKey}")
# destroy the bucket
await bucket.destroy_entire_bucket()
print("Destroyed bucket")
def test_e2e(endpoint: str, api_key: str, N: int = 4000, itemSize: int = 32):
client = blyss.Client(api_key, endpoint)
# generate random string for bucket name
bucket_name = generateBucketName()
client.create(bucket_name, usage_hints={"maxItemSize": 40_000})
print("Created bucket")
bucket = client.connect(bucket_name)
print(bucket.info())
# generate N random keys
local_keys = generate_keys(N, 0)
# write all N keys
bucket.write({k: key_to_gold_value(k, itemSize) for k in local_keys})
print(f"Wrote {N} keys")
# read a random key
testKey = random.choice(local_keys)
value = bucket.private_read([testKey])[0]
assert value is not None
verify_read(testKey, value)
print(f"Read key {testKey}, got {value.hex()[:8]}[...]")
# delete testKey from the bucket, and localData.
bucket.delete_key(testKey)
local_keys.remove(testKey)
value = bucket.private_read([testKey])[0]
def _test_delete(key: str, value: Optional[bytes]):
if value is None:
print(f"Deleted key {key}")
else:
# this happens only sometimes??
print("ERROR: delete not reflected in read!")
print(f"Read deleted key {key} and got {value.hex()[:8]}[...]")
_test_delete(testKey, value)
# clear all keys
bucket.clear_entire_bucket()
localKeys = []
local_keys = []
print("Cleared bucket")
# write a new set of N keys
localKeys = generate_keys(N, 2)
await bucket.write({k: key_to_gold_value(k, itemSize) for k in localKeys})
local_keys = generate_keys(N, 2)
bucket.write({k: key_to_gold_value(k, itemSize) for k in local_keys})
print(f"Wrote {N} keys")
# read a random key
testKey = random.choice(localKeys)
value = (await bucket.private_read([testKey]))[0]
testKey = random.choice(local_keys)
value = bucket.private_read([testKey])[0]
assert value is not None
verify_read(testKey, value)
# test if clear took AFTER the new write
value = (await bucket.private_read([testKey]))[0]
if value is not None:
print(f"ERROR: {testKey} was not deleted or cleared!")
# rename the bucket
newBucketName = bucketName + "-rn"
bucket.rename(newBucketName)
new_bucket_name = bucket_name + "-rn"
bucket.rename(new_bucket_name)
print("Renamed bucket")
print(bucket.info())
# read a random key
testKey = random.choice(localKeys)
value = (await bucket.private_read([testKey]))[0]
testKey = random.choice(local_keys)
value = bucket.private_read([testKey])[0]
assert value is not None
verify_read(testKey, value)
print(f"Read key {testKey}")
@@ -124,8 +195,15 @@ if __name__ == "__main__":
if len(sys.argv) > 2:
print("Using api_key from command line")
api_key = sys.argv[2]
if api_key == "none":
api_key = None
print("DEBUG", api_key, endpoint)
assert endpoint is not None
assert api_key is not None
asyncio.run(main(endpoint, api_key))
print(f"testing Blyss endpoint at {endpoint}")
asyncio.run(test_e2e_async(endpoint, api_key))
print("async ✅")
test_e2e(endpoint, api_key)
print("sync ✅")

View File

@@ -12,7 +12,7 @@ const distCjs = path.resolve(dist, 'cjs');
const config = {
name: 'web',
mode: 'production',
mode: 'development',
context: path.resolve(__dirname, 'js'),
entry: {
index: './index'