commit 7fbe5cef5c8b72ace483d641aec5b177d58a45c3 Author: Vikram Saraph <93892166+vhxs@users.noreply.github.com> Date: Thu Jan 9 11:26:56 2025 -0500 Initial commit diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..bf6f532 --- /dev/null +++ b/LICENSE @@ -0,0 +1,31 @@ +(c) 2021-2023 The Johns Hopkins University Applied Physics +Laboratory LLC (JHU/APL). + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following +disclaimer in the documentation and/or other materials provided +with the distribution. + +3. Neither the name of the copyright holder nor the names of its +contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED +OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/LICENSES/Optuna/LICENSE b/LICENSES/Optuna/LICENSE new file mode 100644 index 0000000..6bbc1aa --- /dev/null +++ b/LICENSES/Optuna/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Preferred Networks, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/LICENSES/PyTorch/LICENSE b/LICENSES/PyTorch/LICENSE new file mode 100644 index 0000000..9315c4e --- /dev/null +++ b/LICENSES/PyTorch/LICENSE @@ -0,0 +1,80 @@ +From PyTorch: + +Copyright (c) 2016- Facebook, Inc (Adam Paszke) +Copyright (c) 2014- Facebook, Inc (Soumith Chintala) +Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert) +Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu) +Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu) +Copyright (c) 2011-2013 NYU (Clement Farabet) +Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston) +Copyright (c) 2006 Idiap Research Institute (Samy Bengio) +Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz) + +From Caffe2: + +Copyright (c) 2016-present, Facebook Inc. All rights reserved. + +All contributions by Facebook: +Copyright (c) 2016 Facebook Inc. + +All contributions by Google: +Copyright (c) 2015 Google Inc. +All rights reserved. + +All contributions by Yangqing Jia: +Copyright (c) 2015 Yangqing Jia +All rights reserved. + +All contributions by Kakao Brain: +Copyright 2019-2020 Kakao Brain + +All contributions by Cruise LLC: +Copyright (c) 2022 Cruise LLC. +All rights reserved. + +All contributions by Arm: +Copyright (c) 2021, 2023-2024 Arm Limited and/or its affiliates + +All contributions from Caffe: +Copyright(c) 2013, 2014, 2015, the respective contributors +All rights reserved. + +All other contributions: +Copyright(c) 2015, 2016 the respective contributors +All rights reserved. + +Caffe2 uses a copyright model similar to Caffe: each contributor holds +copyright over their contributions to Caffe2. The project versioning records +all such contribution and copyright details. If a contributor wants to further +mark their specific copyright on a particular contribution, they should +indicate their copyright solely in the commit message of the change when it is +committed. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the names of Facebook, Deepmind Technologies, NYU, NEC Laboratories America + and IDIAP Research Institute nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..43cb356 --- /dev/null +++ b/README.md @@ -0,0 +1,76 @@ +# SHIELD: Secure Homomorphic Inference for Encrypted Learning on Data + +SHIELD is a library for evalating pre-trained convolutional neural networks on homomorphically encrypted images. It includes code for training models that are suitable for homomorphic evaluation. Implemented neural network operations include convolution, average pooling, GELU, and linear layers. + +This code was used to run the experiments supporting the following paper: [High-Resolution Convolutional Neural Networks on Homomorphically Encrypted Data via Sharding Ciphertexts +](https://arxiv.org/abs/2306.09189). However, operators defined in this project are generic enough to build arbitrary convolutional neural networks as specified in the paper. + +## Requirements +This project's dependencies are managed by Poetry, so installing [Poetry](https://python-poetry.org/) is a requirement. OpenFHE Python bindings are used to interface with OpenFHE, so the wheel file for these bindings will also need to be built. See the OpenFHE Python bindings repository for further instructions. + +Once the bindings are builts, ensure that the `pyproject.toml` file contains a correct path to the bindings. Then to install the Python environment for this project, run `poetry install`. For running unit tests and the small neural network as described below, 32GB of RAM is recommended. For hardware requirements needed to reproduce results for the larger ResNet architecures, see the paper for details. + +Code was developed and tested on Ubuntu 20.04. While it should run on Windows platforms as well, this has not been explicitly tested. + +## Features + +SHIELD implements the following neural network operators: + +- Convolution +- Average pooling +- Batch normalization (which are fused with convolution operators for performance) +- Linear +- GELU (Gaussian Error Linear Unit, a smooth alternative to ReLU) +- Upsample + +For performance reasons, the core of these algorithms are mostly implemented in the companion OpenFHE Python bindings project (in C++), with this project providing a minimal but more user-friendly Python inference for using them. + +The following neural network architectures are implemented using homomorphic implementations of the above operators: a neural network consisting of three convolution blocks (mainly for integration testing), and variations on ResNet including ResNet9 and ResNet50. In addition, code for training models suitable for homomorphic evaluation, using these architectures is included. Training code includes kurtosis regularization required for homomorphic inference. See the referenced paper for more details on the algorithms implemented, as well as performance metrics for homomorphic inference using these neural networks. + +## Running the code + +### Units tests + +Tests are run with `pytest`: + +``` +poetry run python palisade_he_cnn/test.py +``` + +### A small neural network + +`small_model.py` includes code defining a 3-layer convolutional neural network, as well as code to train a model, on MNIST, instantiated from this network. The training code can be run with: + +``` +poetry run python palisade_he_cnn/src/small_model.py +``` + +This will save model weights to `small_model.pt`. To run homomorphic inference with these weights, move the weights to `palisade_he_cnn/src/weights/` and then run: + +``` +poetry run python palisade_he_cnn/src/small_model_inference.py +``` + +This script builds an equivalent homomorphic architecture, extracting weights from the plaintext model, and runs inference on MNIST. It prints out inference times to the terminal. For convenience, example weights are already included in `palisade_he_cnn/src/weights`. + +### Larger neural networks + +Scripts to train larger models are included in `palisade_he_cnn/training`. Scripts that run inference with these models are in `palisade_he_cnn/inference`. Due to significant resources required to train and run homomorphic inference with these larger models, weights used in the paper will be added to this repository in the future. + +## Citation and Acknowledgements + +Please cite this work as follows: + +``` +@misc{maloney2024highresolutionconvolutionalneuralnetworks, + title={High-Resolution Convolutional Neural Networks on Homomorphically Encrypted Data via Sharding Ciphertexts}, + author={Vivian Maloney and Richard F. Obrecht and Vikram Saraph and Prathibha Rama and Kate Tallaksen}, + year={2024}, + eprint={2306.09189}, + archivePrefix={arXiv}, + primaryClass={cs.CR}, + url={https://arxiv.org/abs/2306.09189}, +} +``` + +In addition to the authors on the supporting manuscript (Vivian Maloney, Freddy Obrect, Vikram Saraph, Prathibha Rama, and Kate Tallaksen), Lindsay Spriggs and Court Climer also contributed to this work by testing the software and integrating it with internal infrastructure. \ No newline at end of file diff --git a/palisade_he_cnn/inference/resnet50_cifar_inference.py b/palisade_he_cnn/inference/resnet50_cifar_inference.py new file mode 100644 index 0000000..5c38873 --- /dev/null +++ b/palisade_he_cnn/inference/resnet50_cifar_inference.py @@ -0,0 +1,132 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + + +import argparse +import copy +import json +from time import time + +import torch +import torchvision +import torchvision.transforms as transforms + +from palisade_he_cnn.src.cnn_context import create_cnn_context, TIMING_DICT +from palisade_he_cnn.src.he_cnn.utils import * +from palisade_he_cnn.src.utils import pad_conv_input_channels, PadChannel + +np.set_printoptions(formatter={'float': lambda x: "{0:0.4f}".format(x)}) + +parser = argparse.ArgumentParser() +parser.add_argument("-i", "--idx", default="0") +args = vars(parser.parse_args()) +img_idx = int(args["idx"]) + +print("img_idx", img_idx) + +# create HE cc and keys +mult_depth = 35 +scale_factor_bits = 59 +batch_size = 32 * 32 * 32 + + +# if using bootstrapping, you must increase scale_factor_bits to 59 +cc, keys = get_keys(mult_depth, scale_factor_bits, batch_size, bootstrapping=True) + + +stats = ((0.4914, 0.4822, 0.4465), # mean + (0.247, 0.243, 0.261)) # std + +transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(*stats,inplace=True), + PadChannel(npad=1), + transforms.Resize(32) + ]) +validset = torchvision.datasets.CIFAR10(root="./data", download=True, transform=transform) +validloader = torch.utils.data.DataLoader(validset, batch_size=1, shuffle=True) + +# top level model +resnet_model = torch.load("palisade_he_cnn/src/weights/resnet50_cifar_gelu_kurt.pt") +resnet_model.eval() + +print(resnet_model) + + +############################################################################## + +conv1 = resnet_model.conv1 +bn1 = resnet_model.bn1 + +padded_conv1 = pad_conv_input_channels(conv1) + +embedder = copy.deepcopy(torch.nn.Sequential(resnet_model.conv1, resnet_model.bn1, resnet_model.relu, resnet_model.maxpool)) + +for i, (padded_test_data, test_label) in enumerate(validloader): + if i == img_idx: + break + +unpadded_test_data = padded_test_data[:,:3] +ptxt_embedded = embedder(unpadded_test_data).detach().cpu() + + +############################################################################## + +cnn_context = create_cnn_context(padded_test_data[0], cc, keys.publicKey, verbose=True) + +start = time() + +# embedding layer +cnn_context = cnn_context.apply_conv(padded_conv1, bn1) +cnn_context = cnn_context.apply_gelu(bound=15.0) + +unencrypted = ptxt_embedded + +compare_accuracy(keys, cnn_context, unencrypted, "embedding", num_digits=7) + +############################################################################### + + +for i, layer in enumerate([resnet_model.layer1, resnet_model.layer2, resnet_model.layer3, resnet_model.layer4]): + for j, bottleneck in enumerate(layer): + + bootstrap = False if (i == 0 and j == 0) else True + name = f"bottleneck #{i+1}-{j}" + cnn_context = cnn_context.apply_bottleneck(bottleneck, bootstrap=bootstrap, bootstrap_params={"meta" : True}) + unencrypted = bottleneck(unencrypted) + compare_accuracy(keys, cnn_context, unencrypted, name, num_digits=7) + +############################################################################### + +linear = resnet_model.fc +ctxt_logits = cnn_context.apply_fused_pool_linear(linear) + +inference_time = time() - start +print(f"\nTotal Time: {inference_time:.0f} s = {inference_time / 60:.01f} min") + +flattened = torch.nn.Flatten()(resnet_model.avgpool(unencrypted)) +ptxt_logits = linear(flattened) +ptxt_logits = ptxt_logits.detach().cpu().numpy().ravel() + +decrypted_logits = cc.decrypt(keys.secretKey, ctxt_logits)[:linear.out_features] + +print(f"[+] decrypted logits = {decrypted_logits}") +print(f"[+] plaintext logits = {ptxt_logits}") + +############################################################################### + +dataset = "cifar10" +model_type = "resnet50_metaBTS" + +filename = Path("logs") / dataset / model_type / f"log_{img_idx}.json" +filename.parent.mkdir(exist_ok=True, parents=True) +data = dict(TIMING_DICT) +data["decrypted logits"] = decrypted_logits.tolist() +data["unencrypted logits"] = ptxt_logits.tolist() +data["inference time"] = inference_time + +# avoid double-counting the strided conv operations +data['Pool'] = data['Pool'][1:2] + +with open(filename, "w") as f: + json.dump(data, f, indent=4) + diff --git a/palisade_he_cnn/inference/resnet50_imagenet128_inference.py b/palisade_he_cnn/inference/resnet50_imagenet128_inference.py new file mode 100644 index 0000000..fce87f9 --- /dev/null +++ b/palisade_he_cnn/inference/resnet50_imagenet128_inference.py @@ -0,0 +1,159 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +# srun -p hybrid -n 128 --mem=300G --pty bash -i +# srun -p himem -n 128 --mem=300G --pty bash -i + +# export OMP_DISPLAY_ENV=TRUE +# export OMP_NUM_THREADS=32 + +import torch +import numpy as np +from time import time +import copy +import json +import argparse +from pathlib import Path + +from palisade_he_cnn.src.cnn_context import create_cnn_context, TIMING_DICT + +from torch.utils.data import DataLoader +from torchvision import transforms +from torchvision.datasets import ImageFolder + +from palisade_he_cnn.src.he_cnn.utils import compare_accuracy, get_keys +from palisade_he_cnn.src.utils import pad_conv_input_channels +from palisade_he_cnn.training.utils.utils import PadChannel + +np.set_printoptions(formatter={'float': lambda x: "{0:0.4f}".format(x)}) + +parser = argparse.ArgumentParser() +parser.add_argument("-i", "--idx", default="0") +args = vars(parser.parse_args()) +img_idx = int(args["idx"]) + +print("img_idx", img_idx) + +IMAGENET_CHANNEL_MEAN = (0.485, 0.456, 0.406) +IMAGENET_CHANNEL_STD = (0.229, 0.224, 0.225) + +stats = (IMAGENET_CHANNEL_MEAN, IMAGENET_CHANNEL_STD) + +IMAGENET_DIR = Path("/aoscluster/he-cnn/vivian/imagenet/datasets/ILSVRC/Data/CLS-LOC") +resize_size = 136 +crop_size = 128 + +transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(*stats,inplace=True), + PadChannel(npad=1), + transforms.Resize(resize_size), + transforms.CenterCrop(crop_size) + ]) + +validset = ImageFolder(IMAGENET_DIR / "val", transform=transform) + +validloader = DataLoader(validset, + batch_size = 1, + pin_memory = True, + num_workers = 1, + shuffle=True) + +# top level model +resnet_model = torch.load("weights/resnet50_imagenet128_gelu_kurt.pt") +resnet_model.eval() + +print(resnet_model) + + +############################################################################## + +conv1 = resnet_model.conv1 +bn1 = resnet_model.bn1 + +padded_conv1 = pad_conv_input_channels(conv1) + +embedder = copy.deepcopy(torch.nn.Sequential(resnet_model.conv1, resnet_model.bn1, resnet_model.relu, resnet_model.maxpool)) + +for i, (padded_test_data, test_label) in enumerate(validloader): + if i == img_idx: + break + +unpadded_test_data = padded_test_data[:,:3] +ptxt_embedded = embedder(unpadded_test_data).detach().cpu() + +unencrypted = ptxt_embedded + +############################################################################## + +# create HE cc and keys +mult_depth = 34 +scale_factor_bits = 59 +batch_size = 32 * 32 * 32 + +# if using bootstrapping, you must increase scale_factor_bits to 59 +cc, keys = get_keys(mult_depth, scale_factor_bits, batch_size, bootstrapping=True) + + +############################################################################## + +cnn_context = create_cnn_context(padded_test_data[0], cc, keys.publicKey, verbose=True) + +while cnn_context.shards[0].getTowersRemaining() > 18: + for i in range(cnn_context.num_shards): + cnn_context.shards[i] *= 1.0 + +start = time() + +# embedding layer +cnn_context = cnn_context.apply_conv(padded_conv1, bn1) +cnn_context = cnn_context.apply_gelu(bound=50.0, degree=200) +cnn_context = cnn_context.apply_pool(conv=True) + +compare_accuracy(keys, cnn_context, unencrypted, "embedding") + +############################################################################### + + +for i, layer in enumerate([resnet_model.layer1, resnet_model.layer2, resnet_model.layer3, resnet_model.layer4]): + for j, bottleneck in enumerate(layer): + + name = f"bottleneck #{i+1}-{j}" + cnn_context = cnn_context.apply_bottleneck(bottleneck, bootstrap=True, gelu_params={"bound" : 15.0, "degree": 59}) + unencrypted = bottleneck(unencrypted) + compare_accuracy(keys, cnn_context, unencrypted, name) + +############################################################################### + +linear = resnet_model.fc +ctxt_logits = cnn_context.apply_fused_pool_linear(linear) + +inference_time = time() - start +print(f"\nTotal Time: {inference_time:.0f} s = {inference_time / 60:.01f} min") + +flattened = torch.nn.Flatten()(resnet_model.avgpool(unencrypted)) +ptxt_logits = linear(flattened) +ptxt_logits = ptxt_logits.detach().cpu().numpy().ravel() + +decrypted_logits = cc.decrypt(keys.secretKey, ctxt_logits)[:linear.out_features] + +print(f"[+] decrypted logits = {decrypted_logits}") +print(f"[+] plaintext logits = {ptxt_logits}") + +############################################################################### + +dataset = "imagenet" +model_type = "resnet50_128" + +filename = Path("logs") / dataset / model_type / f"log_{img_idx}.json" +filename.parent.mkdir(exist_ok=True, parents=True) +data = dict(TIMING_DICT) +data["decrypted logits"] = decrypted_logits.tolist() +data["unencrypted logits"] = ptxt_logits.tolist() +data["inference time"] = inference_time + +# avoid double-counting the strided conv operations +data['Pool'] = data['Pool'][:1] + +with open(filename, "w") as f: + json.dump(data, f, indent=4) + diff --git a/palisade_he_cnn/inference/resnet50_imagenet256_inference.py b/palisade_he_cnn/inference/resnet50_imagenet256_inference.py new file mode 100644 index 0000000..a9192fb --- /dev/null +++ b/palisade_he_cnn/inference/resnet50_imagenet256_inference.py @@ -0,0 +1,155 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +# srun -p hybrid -n 128 --mem=300G --pty bash -i +# srun -p himem -n 128 --mem=300G --pty bash -i + +# export OMP_DISPLAY_ENV=TRUE +# export OMP_NUM_THREADS=32 + +import argparse +import copy +import json +from time import time + +import torch +from torch.utils.data import DataLoader +from torchvision import transforms +from torchvision.datasets import ImageFolder + +from palisade_he_cnn.src.cnn_context import create_cnn_context, TIMING_DICT +from palisade_he_cnn.src.cnn_context.utils import * +from palisade_he_cnn.src.he_cnn.utils import get_keys, compare_accuracy +from palisade_he_cnn.src.utils import pad_conv_input_channels +from palisade_he_cnn.training.utils.utils import PadChannel + +np.set_printoptions(formatter={'float': lambda x: "{0:0.4f}".format(x)}) + +parser = argparse.ArgumentParser() +parser.add_argument("-i", "--idx", default="0") +args = vars(parser.parse_args()) +img_idx = int(args["idx"]) + +print("img_idx", img_idx) + +IMAGENET_CHANNEL_MEAN = (0.485, 0.456, 0.406) +IMAGENET_CHANNEL_STD = (0.229, 0.224, 0.225) + +stats = (IMAGENET_CHANNEL_MEAN, IMAGENET_CHANNEL_STD) + +IMAGENET_DIR = Path("/aoscluster/he-cnn/vivian/imagenet/datasets/ILSVRC/Data/CLS-LOC") +resize_size = 264 +crop_size = 256 + +transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(*stats, inplace=True), + PadChannel(npad=1), + transforms.Resize(resize_size), + transforms.CenterCrop(crop_size) +]) + +validset = ImageFolder(IMAGENET_DIR / "val", transform=transform) + +validloader = DataLoader(validset, + batch_size=1, + pin_memory=True, + num_workers=1, + shuffle=True) + +# top level model +resnet_model = torch.load("weights/resnet50_imagenet256_gelu_kurt.pt") +resnet_model.eval() + +print(resnet_model) + +############################################################################## + +conv1 = resnet_model.conv1 +bn1 = resnet_model.bn1 + +padded_conv1 = pad_conv_input_channels(conv1) + +embedder = copy.deepcopy( + torch.nn.Sequential(resnet_model.conv1, resnet_model.bn1, resnet_model.relu, resnet_model.maxpool)) + +for i, (padded_test_data, test_label) in enumerate(validloader): + if i == img_idx: + break + +unpadded_test_data = padded_test_data[:, :3] +ptxt_embedded = embedder(unpadded_test_data).detach().cpu() + +unencrypted = ptxt_embedded + +############################################################################## + +# create HE cc and keys +mult_depth = 34 +scale_factor_bits = 59 +batch_size = 32 * 32 * 32 + +# if using bootstrapping, you must increase scale_factor_bits to 59 +cc, keys = get_keys(mult_depth, scale_factor_bits, batch_size, bootstrapping=True) + +############################################################################## + +cnn_context = create_cnn_context(padded_test_data[0], cc, keys.publicKey, verbose=True) + +while cnn_context.shards[0].getTowersRemaining() > 18: + for i in range(cnn_context.num_shards): + cnn_context.shards[i] *= 1.0 + +start = time() + +# embedding layer +cnn_context = cnn_context.apply_conv(padded_conv1, bn1) +cnn_context = cnn_context.apply_gelu(bound=50.0, degree=200) +cnn_context = cnn_context.apply_pool(conv=True) + +compare_accuracy(keys, cnn_context, unencrypted, "embedding") + +############################################################################### + + +for i, layer in enumerate([resnet_model.layer1, resnet_model.layer2, resnet_model.layer3, resnet_model.layer4]): + for j, bottleneck in enumerate(layer): + name = f"bottleneck #{i + 1}-{j}" + cnn_context = cnn_context.apply_bottleneck(bottleneck, bootstrap=True, + gelu_params={"bound": 15.0, "degree": 59}) + unencrypted = bottleneck(unencrypted) + compare_accuracy(keys, cnn_context, unencrypted, name) + +############################################################################### + +linear = resnet_model.fc +ctxt_logits = cnn_context.apply_fused_pool_linear(linear) + +inference_time = time() - start +print(f"\nTotal Time: {inference_time:.0f} s = {inference_time / 60:.01f} min") + +flattened = torch.nn.Flatten()(resnet_model.avgpool(unencrypted)) +ptxt_logits = linear(flattened) +ptxt_logits = ptxt_logits.detach().cpu().numpy().ravel() + +decrypted_logits = cc.decrypt(keys.secretKey, ctxt_logits)[:linear.out_features] + +print(f"[+] decrypted logits = {decrypted_logits}") +print(f"[+] plaintext logits = {ptxt_logits}") + +############################################################################### + +dataset = "imagenet" +model_type = "resnet50_256" + +filename = Path("logs") / dataset / model_type / f"log_{img_idx}.json" +filename.parent.mkdir(exist_ok=True, parents=True) +data = dict(TIMING_DICT) +data["decrypted logits"] = decrypted_logits.tolist() +data["unencrypted logits"] = ptxt_logits.tolist() +data["inference time"] = inference_time + +# avoid double-counting the strided conv operations +data['Pool'] = data['Pool'][:1] + +with open(filename, "w") as f: + json.dump(data, f, indent=4) diff --git a/palisade_he_cnn/inference/run_inference.sh b/palisade_he_cnn/inference/run_inference.sh new file mode 100644 index 0000000..7c21c8d --- /dev/null +++ b/palisade_he_cnn/inference/run_inference.sh @@ -0,0 +1,17 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +#!/bin/bash + +# srun -p hybrid -n 128 --mem=800G --pty bash -i + +# ./run_vivian.sh 2>&1 >> logs/imagenet/resnet50_256_log_v2.txt + +export OMP_DISPLAY_ENV=TRUE +export OMP_NUM_THREADS=64 +export OMP_PROC_BIND=TRUE + +for i in `seq 0 50` ; do + python resnet50_cifar_inference.py -i $i + # python resnet50_imagenet128_inference.py -i $i + # python resnet50_imagenet256_inference.py -i $i +done \ No newline at end of file diff --git a/palisade_he_cnn/notebooks/analyze_layers_logits.ipynb b/palisade_he_cnn/notebooks/analyze_layers_logits.ipynb new file mode 100644 index 0000000..0acbeb9 --- /dev/null +++ b/palisade_he_cnn/notebooks/analyze_layers_logits.ipynb @@ -0,0 +1,386 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 5, + "id": "4b70441e", + "metadata": {}, + "outputs": [], + "source": [ + "# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL).\n", + "\n", + "import json\n", + "import glob\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import sys\n", + "\n", + "sys.path.insert(1, '../training/')\n", + "from utils_resnetN import get_best_weights" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "bf8fa4bb", + "metadata": {}, + "outputs": [], + "source": [ + "dataset= 'CIFAR10'\n", + "model_types = ['resnet20', 'resnet32', 'resnet44', 'resnet56', 'resnet110']\n", + "#model_types = ['resnet32']" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "e315e636", + "metadata": {}, + "outputs": [], + "source": [ + "def get_relevant_data(logs):\n", + " agg_data = {\n", + " \"conv\" : [],\n", + " \"gelu\" : [],\n", + " \"bootstrapping\" : [],\n", + " \"residual\" : [],\n", + " \"pool\" : [],\n", + " \"linear\" : [],\n", + " \"total\" : []\n", + " }\n", + " resolutions = []\n", + "\n", + " for idx, log in enumerate(logs):\n", + " try:\n", + " with open(log) as f:\n", + " contents = json.load(f)\n", + " except FileNotFoundError:\n", + " print(\"%s does not exist.\" % log)\n", + "\n", + " # Aggregated time information\n", + " conv = sum(contents[\"Conv\"])\n", + " gelu = sum(contents[\"GELU\"])\n", + " bootstrapping = sum(contents[\"Bootstrapping\"])\n", + " residual = sum(contents[\"Residual\"])\n", + " pool = sum(contents[\"Pool\"])\n", + " linear = sum(contents[\"Linear\"])\n", + " total = conv + gelu + bootstrapping + residual + pool + linear\n", + "\n", + " pred, truth = contents['decrypted logits'], contents['unencrypted logits']\n", + " resolution = [truth[i]-pred[i] for i in range(len(truth))]\n", + "\n", + " agg_data[\"conv\"].append(conv)\n", + " agg_data[\"gelu\"].append(gelu)\n", + " agg_data[\"bootstrapping\"].append(bootstrapping)\n", + " agg_data[\"residual\"].append(residual)\n", + " agg_data[\"pool\"].append(pool)\n", + " agg_data[\"linear\"].append(linear)\n", + " agg_data[\"total\"].append(total)\n", + " resolutions.append(resolution)\n", + " return agg_data, resolutions\n", + "\n", + "def stats(data):\n", + " if len(data)==0:\n", + " print(\"This should never happen...\")\n", + " return 0,0\n", + " mean = sum(data) / len(data)\n", + " variance = sum((d - mean)**2 for d in data) / len(data)\n", + " std = variance ** 0.5\n", + " return mean, std\n", + "\n", + "def print_stats(model_type, agg_data, resolutions):\n", + " print(f'{model_type:17s} {\"Mean\":8.4s} {\"Std.\":6.7s} {\"Percentage\":7.19s}')\n", + " idx = 0\n", + " sum_percent = 0.0\n", + " \n", + " for key, val in agg_data.items():\n", + " mean, std = stats(val)\n", + " avg_total = sum(agg_data[\"total\"]) / len(agg_data[\"total\"])\n", + " percent = 100*mean/avg_total\n", + " if key!='total':\n", + " sum_percent += round(percent,1) \n", + "\n", + " print(f'{key:15s} {round(mean,0):7.3f} {std:7.3f} {round(percent,1):10.2f}')\n", + " idx+=1\n", + "\n", + " print(f'{\"rounded percent\":15s} {sum_percent:7.3f}')\n", + " resolutions = np.array(resolutions).flatten()\n", + " mean, std = stats(resolutions)\n", + " print(f'{\"logit res\":15s} {mean:7.3f} {std:7.3f}')\n", + " print(\"\\n\")\n", + " \n", + "def plot(data, bins, xlabel):\n", + " plt.hist(data, bins=bins, color='black', alpha=0.8)\n", + " plt.xlabel(xlabel)\n", + " plt.ylabel(\"Count\")\n", + " plt.grid(True)\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "70fdcf93", + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Finding the best model according to logs...\n", + "{'weights': 'weights/resnet20_cifar10', 'model_type': 'resnet20', 'kwargs': {'num_classes': 10, 'debug': False}, 'params': {'epochs': 100, 'batch_size': 256, 'momentum': 0.9, 'weight_decay': 0.256, 'weight_decay_bias': 0.004, 'ema_update_freq': 5, 'ema_rho': 0.9509900498999999, 'model_type': 'resnet20', 'kwargs': {'num_classes': 10, 'debug': False}}, 'run0': 0.9024000000000001, 'run1': 0.8987, 'run2': 0.9016000000000001, 'run3': 0.8998, 'run4': 0.8974000000000001, 'accuracy': [0.89998, 0.0018334666618185376]}\n", + "\n", + "Average (5 runs): 89.998% +/- 0.183%\n", + "Best (idx 0): 0.902\n", + "resnet20 Mean Std. Percentage\n", + "conv 242.000 0.360 37.70\n", + "gelu 59.000 1.069 9.10\n", + "bootstrapping 312.000 2.128 48.60\n", + "residual 0.000 0.009 0.00\n", + "pool 27.000 0.099 4.20\n", + "linear 2.000 0.075 0.30\n", + "total 642.000 2.750 100.00\n", + "rounded percent 99.900\n", + "logit res -0.000 0.007\n", + "\n", + "\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj4AAAGwCAYAAACpYG+ZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA8RklEQVR4nO3de1yUdf7//+eAMwMIgycUSDzmeVNbTaK20BTNds3K7aCtqWu2Fmnptpm75an8dNg2rdZsa1OzQtsOdvSQhyTL8xFRoyRLS9TUAA8Io7y/f/Rjfk6AAc4wg9fjfrvNLa/T+3pd13uGeXadxmaMMQIAALCAkEAXAAAAUF0IPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDJqBboAfysuLtb+/fsVFRUlm80W6HIAAEAFGGN07NgxxcfHKyTEd8dpLvjgs3//fiUkJAS6DAAAUAX79u1T48aNfdbeBR98oqKiJP2841wuV4CrsR63261PPvlEvXv3lt1uD3Q5lkZfBA/6InjQF8Hl7P4oKChQQkKC53vcVy744FNyesvlchF8AsDtdisiIkIul4s/KgFGXwQP+iJ40BfBpaz+8PVlKlzcDAAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALIPgAwAALKNWoAsAAKtJTk7WuHHjlJycrKKiokotu3HjRj9VBVgDR3wAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlBDT4zJw5Ux07dpTL5ZLL5VJSUpIWLVrkmd69e3fZbDav18iRIwNYMQAAqMlqBXLljRs31hNPPKFWrVrJGKNXX31V/fv315YtW9ShQwdJ0ogRIzRlyhTPMhEREYEqFwAA1HABDT79+vXzGp46dapmzpyptWvXeoJPRESEYmNjA1EeAAC4wAQ0+JztzJkzeuutt3TixAklJSV5xr/xxht6/fXXFRsbq379+umRRx4551GfwsJCFRYWeobz8/MlSW63W263238bgDKV7HP2feDRF8HD4XB4/bcy6D/f4nMRXM7uD3/1ic0YY/zScgVt375dSUlJOnXqlCIjI5WWlqbrrrtOkvTSSy+padOmio+PV0ZGhsaNG6du3brp3XffLbe9SZMmafLkyaXGp6WlcZoMAIAa4uTJkxo0aJDy8vLkcrl81m7Ag09RUZH27t2rvLw8vf322/rvf/+r9PR0tW/fvtS8K1asUM+ePbV79261bNmyzPbKOuKTkJCgw4cP+3THoWLcbreWLl2qlJQU2e32QJdjafSFbyUnJ1d5WYfDoTFjxmjatGkqKiqq1LLp6elVXi9K43MRXM7uj4KCAjVo0MDnwSfgp7ocDocuvvhiSVKXLl20YcMGPfvss/rPf/5Tat7ExERJOmfwcTqdcjqdpcbb7Xbe1AHE/g8e9IVvVDawlNdGZduh7/yDz0VwsdvtOn36tF/aDrrn+BQXF3sdsTnb1q1bJUlxcXHVWBEAALhQBPSIz/jx49W3b181adJEx44dU1pamlauXKklS5YoOzvbc71P/fr1lZGRoTFjxujqq69Wx44dA1k2AACooQIafA4dOqQ77rhDOTk5io6OVseOHbVkyRKlpKRo3759WrZsmaZPn64TJ04oISFBAwYM0MMPPxzIkgEAQA0W0ODzyiuvlDstISGBi/gAAIBPBd01PgAAAP5C8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJYR0OAzc+ZMdezYUS6XSy6XS0lJSVq0aJFn+qlTp5Samqr69esrMjJSAwYM0MGDBwNYMQAAqMkCGnwaN26sJ554Qps2bdLGjRt1zTXXqH///tqxY4ckacyYMfrwww/11ltvKT09Xfv379dNN90UyJIBAEANViuQK+/Xr5/X8NSpUzVz5kytXbtWjRs31iuvvKK0tDRdc801kqTZs2erXbt2Wrt2rS6//PJAlAwAAGqwgAafs505c0ZvvfWWTpw4oaSkJG3atElut1u9evXyzNO2bVs1adJEa9asKTf4FBYWqrCw0DOcn58vSXK73XK73f7dCJRSss/Z94FHX/iWw+E472Wr0gb951t8LoLL2f3hrz6xGWOMX1quoO3btyspKUmnTp1SZGSk0tLSdN111yktLU3Dhg3zCjGS1K1bN/Xo0UNPPvlkme1NmjRJkydPLjU+LS1NERERftkGAADgWydPntSgQYOUl5cnl8vls3YDfsSnTZs22rp1q/Ly8vT2229ryJAhSk9Pr3J748eP19ixYz3D+fn5SkhIUO/evX2641AxbrdbS5cuVUpKiux2e6DLsTT6wreSk5OrvKzD4dCYMWM0bdo0FRUVVWrZ8/n7iNL4XASXs/ujoKDAL+sIePBxOBy6+OKLJUldunTRhg0b9Oyzz+rWW29VUVGRcnNzVadOHc/8Bw8eVGxsbLntOZ1OOZ3OUuPtdjtv6gBi/wcP+sI3KhtYymujsu3Qd/7B5yK42O12nT592i9tB91zfIqLi1VYWKguXbrIbrdr+fLlnmlZWVnau3evkpKSAlghAACoqQJ6xGf8+PHq27evmjRpomPHjiktLU0rV67UkiVLFB0dreHDh2vs2LGqV6+eXC6XRo0apaSkJO7oAgAAVRLQ4HPo0CHdcccdysnJUXR0tDp27KglS5YoJSVFkjRt2jSFhIRowIABKiwsVJ8+ffTCCy8EsmQAAFCDBTT4vPLKK+ecHhYWphkzZmjGjBnVVBEAALiQBd01PgAAAP5C8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZRK9AFAAAqrmvXrlVeduPGjT6sBKiZOOIDAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsI6DB5/HHH9dll12mqKgoNWzYUDfccIOysrK85unevbtsNpvXa+TIkQGqGAAA1GQBDT7p6elKTU3V2rVrtXTpUrndbvXu3VsnTpzwmm/EiBHKycnxvJ566qkAVQwAAGqyWoFc+eLFi72G58yZo4YNG2rTpk26+uqrPeMjIiIUGxtboTYLCwtVWFjoGc7Pz5ckud1uud1uH1SNyijZ5+z7wKMvfMvhcJz3sufTRlXQ96XxuQguZ/eHv/rEZowxfmm5Cnbv3q1WrVpp+/bt+s1vfiPp51NdO3bskDFGsbGx6tevnx555BFFRESU2cakSZM0efLkUuPT0tLKXQYAAASXkydPatCgQcrLy5PL5fJZu0ETfIqLi3X99dcrNzdXn3/+uWf8Sy+9pKZNmyo+Pl4ZGRkaN26cunXrpnfffbfMdso64pOQkKDDhw/7dMehYtxut5YuXaqUlBTZ7fZAl2Np9IVvJScnV3lZh8OhMWPGaNq0aSoqKvJhVeeWnp5ebeuqKfhcBJez+6OgoEANGjTwefAJ6Kmus6WmpiozM9Mr9EjSXXfd5fn3JZdcori4OPXs2VPZ2dlq2bJlqXacTqecTmep8Xa7nTd1ALH/gwd94Ru+CCxFRUXVGnzo9/LxuQgudrtdp0+f9kvbQXE7+7333quPPvpIn376qRo3bnzOeRMTEyX9fFoMAACgMgJ6xMcYo1GjRmnBggVauXKlmjdv/qvLbN26VZIUFxfn5+oAAMCFJqDBJzU1VWlpaXr//fcVFRWlAwcOSJKio6MVHh6u7OxspaWl6brrrlP9+vWVkZGhMWPG6Oqrr1bHjh0DWToAAKiBAhp8Zs6cKennO7fONnv2bA0dOlQOh0PLli3T9OnTdeLECSUkJGjAgAF6+OGHA1AtAACo6QJ+qutcEhISuAsBAAD4TFBc3AwAAFAdCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyqhR8WrRooSNHjpQan5ubqxYtWpx3UQAAAP5QpeDz7bff6syZM6XGFxYW6ocffjjvogAAAPyhVmVm/uCDDzz/XrJkiaKjoz3DZ86c0fLly9WsWTOfFQcAAOBLlQo+N9xwgyTJZrNpyJAhXtPsdruaNWumf/3rXz4rDgAAwJcqFXyKi4slSc2bN9eGDRvUoEEDvxQFAADgD5UKPiX27Nnj6zoAAAD8rkrBR5KWL1+u5cuX69ChQ54jQSVmzZp13oUBAAD4WpWCz+TJkzVlyhR17dpVcXFxstlsvq4LAADA56oUfF588UXNmTNHgwcP9nU9AAAAflOl5/gUFRXpiiuu8HUtAAAAflWl4HPnnXcqLS3N17UAAAD4VZVOdZ06dUovvfSSli1bpo4dO8put3tNf+aZZ3xSHAAAgC9VKfhkZGSoc+fOkqTMzEyvaVzoDAAAglWVgs+nn37q6zoAAAD8rkrX+PjK448/rssuu0xRUVFq2LChbrjhBmVlZXnNc+rUKaWmpqp+/fqKjIzUgAEDdPDgwQBVDAAAarIqHfHp0aPHOU9prVixokLtpKenKzU1VZdddplOnz6tv//97+rdu7d27typ2rVrS5LGjBmjjz/+WG+99Zaio6N177336qabbtIXX3xRldIBAICFVSn4lFzfU8Ltdmvr1q3KzMws9eOl57J48WKv4Tlz5qhhw4batGmTrr76auXl5emVV15RWlqarrnmGknS7Nmz1a5dO61du1aXX355qTYLCwtVWFjoGc7Pz/fU6Ha7K1wbfKNkn7PvA4++8C2Hw3Hey55PG1VB35fG5yK4nN0f/uoTmzHG+KqxSZMm6fjx43r66aertPzu3bvVqlUrbd++Xb/5zW+0YsUK9ezZUz/99JPq1Knjma9p06a6//77NWbMmDJrmDx5cqnxaWlpioiIqFJdAACgep08eVKDBg1SXl6eXC6Xz9r1afDZvXu3unXrpqNHj1Z62eLiYl1//fXKzc3V559/LunnsDJs2DCvIziS1K1bN/Xo0UNPPvlkqXbKOuKTkJCgw4cP+3THoWLcbreWLl2qlJSUUo89QPWiL0pLTk4OyHodDofGjBmjadOmqaioqNrWm56eXm3rqin4XASXs/ujoKBADRo08HnwqfKPlJZlzZo1CgsLq9KyqampyszM9ISeqnI6nXI6naXG2+123tQBxP4PHvTF/686Q0d566/OGuj38vG5CC52u12nT5/2S9tVCj433XST17AxRjk5Odq4caMeeeSRSrd377336qOPPtJnn32mxo0be8bHxsaqqKhIubm5Xqe6Dh48qNjY2KqUDgAALKxKwSc6OtprOCQkRG3atNGUKVPUu3fvCrdjjNGoUaO0YMECrVy5Us2bN/ea3qVLF9ntdi1fvlwDBgyQJGVlZWnv3r1KSkqqSukAAMDCqhR8Zs+e7ZOVp6amKi0tTe+//76ioqJ04MABST8Hq/DwcEVHR2v48OEaO3as6tWrJ5fLpVGjRikpKanMO7oAAADO5byu8dm0aZN27dolSerQoYMuvfTSSi0/c+ZMSVL37t29xs+ePVtDhw6VJE2bNk0hISEaMGCACgsL1adPH73wwgvnUzYAALCoKgWfQ4cO6bbbbtPKlSs9197k5uaqR48emj9/vmJiYirUTkVuKAsLC9OMGTM0Y8aMqpQKAADgUaWfrBg1apSOHTumHTt26OjRozp69KgyMzOVn5+v0aNH+7pGAAAAn6jSEZ/Fixdr2bJlateunWdc+/btNWPGjEpd3AwAAFCdqnTEp7i4uMznHdjtdhUXF593UQAAAP5QpeBzzTXX6L777tP+/fs943744QeNGTNGPXv29FlxAAAAvlSl4PPvf/9b+fn5atasmVq2bKmWLVuqefPmys/P1/PPP+/rGgEAAHyiStf4JCQkaPPmzVq2bJm+/PJLSVK7du3Uq1cvnxYHAADgS5U64rNixQq1b99e+fn5stlsSklJ0ahRozRq1Chddtll6tChg1atWuWvWgEAAM5LpYLP9OnTNWLEiDJ/JTU6Olp/+ctf9Mwzz/isOAAAAF+qVPDZtm2brr322nKn9+7dW5s2bTrvogAAAPyhUsHn4MGDZd7GXqJWrVr68ccfz7soAAAAf6hU8LnooouUmZlZ7vSMjAzFxcWdd1EAAAD+UKm7uq677jo98sgjuvbaaxUWFuY1raCgQBMnTtQf/vAHnxYIAOfStWvXQJcAoAapVPB5+OGH9e6776p169a699571aZNG0nSl19+qRkzZujMmTP6xz/+4ZdCAQAAzlelgk+jRo20evVq3X333Ro/frzn19VtNpv69OmjGTNmqFGjRn4pFAAA4HxV+gGGTZs21cKFC/XTTz9p9+7dMsaoVatWqlu3rj/qAwAA8JkqPblZkurWravLLrvMl7UAAAD4VZV+qwsAAKAmIvgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLIPgAAADLCGjw+eyzz9SvXz/Fx8fLZrPpvffe85o+dOhQ2Ww2r9e1114bmGIBAECNF9Dgc+LECXXq1EkzZswod55rr71WOTk5nte8efOqsUIAAHAhqRXIlfft21d9+/Y95zxOp1OxsbHVVBEAALiQBTT4VMTKlSvVsGFD1a1bV9dcc40ee+wx1a9fv9z5CwsLVVhY6BnOz8+XJLndbrndbr/XC28l+5x9H3gXal84HI5Al1BpJTVXd+0XWt/7woX6uaipzu4Pf/WJzRhj/NJyJdlsNi1YsEA33HCDZ9z8+fMVERGh5s2bKzs7W3//+98VGRmpNWvWKDQ0tMx2Jk2apMmTJ5can5aWpoiICH+VDwAAfOjkyZMaNGiQ8vLy5HK5fNZuUAefX/rmm2/UsmVLLVu2TD179ixznrKO+CQkJOjw4cM+3XGoGLfbraVLlyolJUV2uz3Q5VjahdoXycnJgS6h0hwOh8aMGaNp06apqKio2tabnp5ebeuqKS7Uz0VNdXZ/FBQUqEGDBj4PPkF/qutsLVq0UIMGDbR79+5yg4/T6ZTT6Sw13m6386YOIPZ/8LjQ+qI6g4OvFRUVVWv9F1K/+9qF9rmo6ex2u06fPu2XtmvUc3y+//57HTlyRHFxcYEuBQAA1EABPeJz/Phx7d692zO8Z88ebd26VfXq1VO9evU0efJkDRgwQLGxscrOztaDDz6oiy++WH369Alg1QAAoKYKaPDZuHGjevTo4RkeO3asJGnIkCGaOXOmMjIy9Oqrryo3N1fx8fHq3bu3Hn300TJPZQEAAPyagAaf7t2761zXVi9ZsqQaqwEAABe6GnWNDwAAwPkg+AAAAMuoUbezAwCqrmvXrlVeduPGjTVuvUBZOOIDAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAsg+ADAAAso1agCwCArl27BroEABbBER8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZBB8AAGAZAQ0+n332mfr166f4+HjZbDa99957XtONMZowYYLi4uIUHh6uXr166euvvw5MsQAAoMYLaPA5ceKEOnXqpBkzZpQ5/amnntJzzz2nF198UevWrVPt2rXVp08fnTp1qporBQAAF4JagVx537591bdv3zKnGWM0ffp0Pfzww+rfv78kae7cuWrUqJHee+893XbbbdVZKgAAuAAENPicy549e3TgwAH16tXLMy46OlqJiYlas2ZNucGnsLBQhYWFnuH8/HxJktvtltvt9m/RKKVkn7PvAy+Y+8LhcAS6hGpVsr01abvP531zPtvp7/drMH8urOjs/vBXn9iMMcYvLVeSzWbTggULdMMNN0iSVq9erSuvvFL79+9XXFycZ75bbrlFNptNb775ZpntTJo0SZMnTy41Pi0tTREREX6pHQAA+NbJkyc1aNAg5eXlyeVy+azdoD3iU1Xjx4/X2LFjPcP5+flKSEhQ7969fbrjUDFut1tLly5VSkqK7HZ7oMuxtGDui+Tk5ECXUK0cDofGjBmjadOmqaioKNDlVEh6enqVlz2f/j2f9VZEMH8urOjs/igoKPDLOoI2+MTGxkqSDh486HXE5+DBg+rcuXO5yzmdTjmdzlLj7XY7b+oAYv8Hj2Dsi5ry5e9rRUVFNWbbz+c9cz7bWF3v1WD8XFiZ3W7X6dOn/dJ20D7Hp3nz5oqNjdXy5cs94/Lz87Vu3TolJSUFsDIAAFBTBfSIz/Hjx7V7927P8J49e7R161bVq1dPTZo00f3336/HHntMrVq1UvPmzfXII48oPj7ecx0QAABAZQQ0+GzcuFE9evTwDJdcmzNkyBDNmTNHDz74oE6cOKG77rpLubm5+t3vfqfFixcrLCwsUCUDAIAaLKDBp3v37jrXTWU2m01TpkzRlClTqrEqAABwoQraa3wAAAB8jeADAAAsI2hvZwcABI+uXbvWuPVu3LjRh5XgQsERHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBk8uRm4wJT3pFuHw6Fx48YpOTlZRUVFZc7Dk24BXOg44gMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyDBxgC8InyHpwIAMGEIz4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyeIAhAOCCVJGHajocDo0bN07JyckqKiryjN+4caM/S0MAccQHAABYBsEHAABYBsEHAABYBsEHAABYBsEHAABYRlAHn0mTJslms3m92rZtG+iyAABADRX0t7N36NBBy5Yt8wzXqhX0JQMAgCAV9CmiVq1aio2NrfD8hYWFKiws9Azn5+dLktxut9xut8/rw7mV7HP2ffVxOBznHF/edOn8+ulc7cJbRfoC1aO8vuBvVmCc/Z3hrz6wGWOMX1r2gUmTJumf//ynoqOjFRYWpqSkJD3++ONq0qTJOZeZPHlyqfFpaWmKiIjwZ7kAAMBHTp48qUGDBikvL08ul8tn7QZ18Fm0aJGOHz+uNm3aKCcnR5MnT9YPP/ygzMxMRUVFlblMWUd8EhISdPjwYZ/uOFSM2+3W0qVLlZKSIrvdHuhyLCE5ObnM8Q6HQ2PGjNG0adO8nlB7tvT0dJ+vF6VVpC9QPcrri0B9Fs5nvReCs78zCgoK1KBBA58Hn6A+1dW3b1/Pvzt27KjExEQ1bdpU//vf/zR8+PAyl3E6nXI6naXG2+12vngDiP1ffX7ti7SoqKjcec6nj/gCr7xz9QWq1y/7IlCfBf5O/sxut+v06dN+aTuo7+r6pTp16qh169bavXt3oEsBAAA1UI0KPsePH1d2drbi4uICXQoAAKiBgjr4PPDAA0pPT9e3336r1atX68Ybb1RoaKgGDhwY6NIAAEANFNTX+Hz//fcaOHCgjhw5opiYGP3ud7/T2rVrFRMTE+jSAABADRTUwWf+/PmBLgEAAFxAgvpUFwAAgC8RfAAAgGUE9akuANWra9eugS4BAPyKIz4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyCD4AAMAyeIAhEIR4kCAQWDXxMxiomjdu3BiQ9VYVR3wAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBl8ABD1Ajn82Cu83m4Vk18iBkAoHwc8QEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJbBAwzPAw+3+3UOh0Pjxo1TcnKyVq9eHehyACCo8b3ifxzxAQAAlkHwAQAAlkHwAQAAlkHwAQAAlkHwAQAAllEjgs+MGTPUrFkzhYWFKTExUevXrw90SQAAoAYK+uDz5ptvauzYsZo4caI2b96sTp06qU+fPjp06FCgSwMAADVM0AefZ555RiNGjNCwYcPUvn17vfjii4qIiNCsWbMCXRoAAKhhgvoBhkVFRdq0aZPGjx/vGRcSEqJevXppzZo1ZS5TWFiowsJCz3BeXp4k6ejRo3K73T6tLyQk6HNjwIWEhOjkyZMKCQnRkSNHzqudqgrUeoPN2X1xIW1XTURfBA/64vydz9/YX3K73Tp58qSOHDmiU6dOSZKMMT5rv6TBoPXDDz8YSWb16tVe4//2t7+Zbt26lbnMxIkTjSRevHjx4sWL1wXw2rdvn0+zRVAf8amK8ePHa+zYsZ7h4uJiHT16VPXr15fNZgtgZdaUn5+vhIQE7du3Ty6XK9DlWBp9ETzoi+BBXwSXs/sjKipKx44dU3x8vE/XEdTBp0GDBgoNDdXBgwe9xh88eFCxsbFlLuN0OuV0Or3G1alTx18looJcLhd/VIIEfRE86IvgQV8El5L+iI6O9nnbQX1C0+FwqEuXLlq+fLlnXHFxsZYvX66kpKQAVgYAAGqioD7iI0ljx47VkCFD1LVrV3Xr1k3Tp0/XiRMnNGzYsECXBgAAapigDz633nqrfvzxR02YMEEHDhxQ586dtXjxYjVq1CjQpaECnE6nJk6cWOr0I6offRE86IvgQV8El+roD5sxvr5PDAAAIDgF9TU+AAAAvkTwAQAAlkHwAQAAlkHwAQAAlkHwwXk5evSobr/9drlcLtWpU0fDhw/X8ePHz7nMqVOnlJqaqvr16ysyMlIDBgzwekjltm3bNHDgQCUkJCg8PFzt2rXTs88+6+9NqfH80ReSNHr0aHXp0kVOp1OdO3f24xbUbDNmzFCzZs0UFhamxMRErV+//pzzv/XWW2rbtq3CwsJ0ySWXaOHChV7TjTGaMGGC4uLiFB4erl69eunrr7/25yZcMHzdF++++6569+7t+QWArVu3+rH6C4sv+8LtdmvcuHG65JJLVLt2bcXHx+uOO+7Q/v37K1eUT38AA5Zz7bXXmk6dOpm1a9eaVatWmYsvvtgMHDjwnMuMHDnSJCQkmOXLl5uNGzeayy+/3FxxxRWe6a+88ooZPXq0WblypcnOzjavvfaaCQ8PN88//7y/N6dG80dfGGPMqFGjzL///W8zePBg06lTJz9uQc01f/5843A4zKxZs8yOHTvMiBEjTJ06dczBgwfLnP+LL74woaGh5qmnnjI7d+40Dz/8sLHb7Wb79u2eeZ544gkTHR1t3nvvPbNt2zZz/fXXm+bNm5uCgoLq2qwayR99MXfuXDN58mTz8ssvG0lmy5Yt1bQ1NZuv+yI3N9f06tXLvPnmm+bLL780a9asMd26dTNdunSpVF0EH1TZzp07jSSzYcMGz7hFixYZm81mfvjhhzKXyc3NNXa73bz11luecbt27TKSzJo1a8pd1z333GN69Ojhu+IvMNXRFxMnTiT4lKNbt24mNTXVM3zmzBkTHx9vHn/88TLnv+WWW8zvf/97r3GJiYnmL3/5izHGmOLiYhMbG2v++c9/eqbn5uYap9Np5s2b54ctuHD4ui/OtmfPHoJPJfizL0qsX7/eSDLfffddheviVBeqbM2aNapTp466du3qGderVy+FhIRo3bp1ZS6zadMmud1u9erVyzOubdu2atKkidasWVPuuvLy8lSvXj3fFX+Bqc6+gLeioiJt2rTJaz+GhISoV69e5e7HNWvWeM0vSX369PHMv2fPHh04cMBrnujoaCUmJtI35+CPvkDVVFdf5OXlyWazVeo3OQk+qLIDBw6oYcOGXuNq1aqlevXq6cCBA+Uu43A4Sr1JGzVqVO4yq1ev1ptvvqm77rrLJ3VfiKqrL1Da4cOHdebMmVJPkz/Xfjxw4MA55y/5b2XahH/6AlVTHX1x6tQpjRs3TgMHDqzUD8wSfFDKQw89JJvNds7Xl19+WS21ZGZmqn///po4caJ69+5dLesMJsHUFwAQLNxut2655RYZYzRz5sxKLRv0v9WF6vfXv/5VQ4cOPec8LVq0UGxsrA4dOuQ1/vTp0zp69KhiY2PLXC42NlZFRUXKzc31OtJw8ODBUsvs3LlTPXv21F133aWHH364SttS0wVLX6B8DRo0UGhoaKm74c61H2NjY885f8l/Dx48qLi4OK95uLOufP7oC1SNP/uiJPR89913WrFiRaWO9kgc8UEZYmJi1LZt23O+HA6HkpKSlJubq02bNnmWXbFihYqLi5WYmFhm2126dJHdbtfy5cs947KysrR3714lJSV5xu3YsUM9evTQkCFDNHXqVP9tbJALhr7AuTkcDnXp0sVrPxYXF2v58uXl7sekpCSv+SVp6dKlnvmbN2+u2NhYr3ny8/O1bt06+uYc/NEXqBp/9UVJ6Pn666+1bNky1a9fv/LFVfgyaKAM1157rbn00kvNunXrzOeff25atWrldQv1999/b9q0aWPWrVvnGTdy5EjTpEkTs2LFCrNx40aTlJRkkpKSPNO3b99uYmJizJ/+9CeTk5PjeR06dKhat62m8UdfGGPM119/bbZs2WL+8pe/mNatW5stW7aYLVu2mMLCwmrbtmA3f/5843Q6zZw5c8zOnTvNXXfdZerUqWMOHDhgjDFm8ODB5qGHHvLM/8UXX5hatWqZp59+2uzatctMnDixzNvZ69SpY95//32TkZFh+vfvz+3sFeCPvjhy5IjZsmWL+fjjj40kM3/+fLNlyxaTk5NT7dtXk/i6L4qKisz1119vGjdubLZu3er1/VCZv0cEH5yXI0eOmIEDB5rIyEjjcrnMsGHDzLFjxzzTS27//PTTTz3jCgoKzD333GPq1q1rIiIizI033uj1B2TixIlGUqlX06ZNq3HLah5/9IUxxiQnJ5fZH3v27KmmLasZnn/+edOkSRPjcDhMt27dzNq1az3TkpOTzZAhQ7zm/9///mdat25tHA6H6dChg/n444+9phcXF5tHHnnENGrUyDidTtOzZ0+TlZVVHZtS4/m6L2bPnl3mZ2DixInVsDU1my/7ouRvWFmvs/+u/RqbMcZU/jgRAABAzcM1PgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPsAF4Ntvv5XNZtPWrVvLnadZs2aaPn26T9c7dOhQ3XDDDT5tE1XTvXt33X///eVOnzRpks9/4HTlypWy2WzKzc31abuSNHjwYP3f//3febdTke1+6KGHNGrUqPNeF2oGgg+qXXl/oOfMmeP1K+EXuoqElWD37LPPas6cOZ7hX/vy9ZU5c+bIZrPJZrMpNDRUdevWVWJioqZMmaK8vDy/r99Xqmt/+csVV1yhnJwcRUdHS/LdZ3jbtm1auHChRo8efd5tVcQDDzygV199Vd988021rA+BRfABKsntdge6hKARHR0dsLDqcrmUk5Oj77//XqtXr9Zdd92luXPnqnPnztq/f7/f1ltUVOS3tmsah8Oh2NhY2Ww2n7b7/PPP6+abb1ZkZGSV2zDG6PTp0xWat0GDBurTp49mzpxZ5fWh5iD4IGiVnEZ5+umnFRcXp/r16ys1NdUreBQWFuqBBx7QRRddpNq1aysxMVErV670TC/5P9AlS5aoXbt2ioyM1LXXXqucnByvdc2aNUsdOnSQ0+lUXFyc7r33Xs80m82mmTNn6vrrr1ft2rX12GOP6eKLL9bTTz/t1cbWrVtls9m0e/dur+X69u2r8PBwtWjRQm+//bZn/ubNm0uSLr30UtlsNnXv3t0z7b///a/atWunsLAwtW3bVi+88ILXutavX69LL71UYWFh6tq1q7Zs2VLp/bt37171799fkZGRcrlcuuWWW3Tw4EGveR577DE1bNhQUVFRuvPOO/XQQw95nTY4+1TX0KFDlZ6ermeffdZzNObbb7/VTz/9pNtvv10xMTEKDw9Xq1atNHv27ErX+0s2m02xsbGKi4tTu3btNHz4cK1evVrHjx/Xgw8+6JmvuLhYjz/+uJo3b67w8HB16tTJqx8kaceOHfrDH/4gl8ulqKgoXXXVVcrOzvbaxqlTpyo+Pl5t2rTRlClT9Jvf/KZUTZ07d9YjjzzitdzkyZMVExMjl8ulkSNHeoJTeftLkjIzM9W3b19FRkaqUaNGGjx4sA4fPuxZz4kTJ3THHXcoMjJScXFx+te//lXp/VdcXKwpU6aocePGcjqd6ty5sxYvXuw1z+rVq9W5c2fP++y9997zOkp59qmulStXatiwYcrLy/Nsz6RJkyRJL7zwglq1aqWwsDA1atRIf/zjH8ut68yZM3r77bfVr18/r/GvvfaaunbtqqioKMXGxmrQoEE6dOiQZ3pJLYsWLVKXLl3kdDr1+eefe6b/5z//UUJCgiIiInTLLbeUOjLYr18/zZ8/v9L7ETXQef3sKlAFycnJ5r777is1fvbs2SY6OtozPGTIEONyuczIkSPNrl27zIcffmgiIiLMSy+95JnnzjvvNFdccYX57LPPzO7du80///lP43Q6zVdffeVp0263m169epkNGzaYTZs2mXbt2plBgwZ52njhhRdMWFiYmT59usnKyjLr168306ZN80yXZBo2bGhmzZplsrOzzXfffWemTp1q2rdv71X/6NGjzdVXX+21XP369c3LL79ssrKyzMMPP2xCQ0PNzp07jTHGrF+/3kgyy5YtMzk5OebIkSPGGGNef/11ExcXZ9555x3zzTffmHfeecfUq1fPzJkzxxhjzLFjx0xMTIwZNGiQyczMNB9++KFp0aKFkWS2bNlS7n5v2rSpZ7vOnDljOnfubH73u9+ZjRs3mrVr15ouXbqY5ORkz/yvv/66CQsLM7NmzTJZWVlm8uTJxuVymU6dOnn1Uf/+/Y0xxuTm5pqkpCQzYsQIk5OTY3Jycszp06dNamqq6dy5s9mwYYPZs2ePWbp0qfnggw/KrbMifvleOdt9991noqKizOnTp40xxjz22GOmbdu2ZvHixSY7O9vMnj3bOJ1Os3LlSmOMMd9//72pV6+euemmm8yGDRtMVlaWmTVrlvnyyy892xgZGWkGDx5sMjMzTWZmptm3b58JCQkx69ev96x38+bNxmazmezsbK/lbr31VpOZmWk++ugjExMTY/7+97+fc3/99NNPJiYmxowfP97s2rXLbN682aSkpJgePXp41nX33XebJk2amGXLlpmMjAzzhz/8wURFRZX5uSoxceJEr7575plnjMvlMvPmzTNffvmlefDBB43dbvd8dvLy8ky9evXMn/70J7Njxw6zcOFC07p1a6/32aeffmokmZ9++skUFhaa6dOnG5fL5dmeY8eOmQ0bNpjQ0FCTlpZmvv32W7N582bz7LPPllvn5s2bjSRz4MABr/GvvPKKWbhwocnOzjZr1qwxSUlJpm/fvp7pJbV07NjRfPLJJ2b37t3myJEjZuLEiaZ27drmmmuuMVu2bDHp6enm4osv9vobYIwxu3btMpLMnj17yq0NFwaCD6pdZYJP06ZNPV9gxhhz8803m1tvvdUYY8x3331nQkNDzQ8//ODVTs+ePc348eM9bUoyu3fv9kyfMWOGadSokWc4Pj7e/OMf/yi3Xknm/vvv9xr3ww8/mNDQULNu3TpjjDFFRUWmQYMGnnBSstzIkSO9lktMTDR33323McaYPXv2lBlWWrZsadLS0rzGPfrooyYpKckYY8x//vMfU79+fVNQUOCZPnPmzEoFn08++cSEhoaavXv3eqbv2LHDSPJ8mScmJprU1FSvNq688spyg48xZfdtv379zLBhw8qtqyrOFXxK9sXBgwfNqVOnTEREhFm9erXXPMOHDzcDBw40xhgzfvx407x5c1NUVFRme0OGDDGNGjUyhYWFXuP79u3r6UtjjBk1apTp3r2713L16tUzJ06c8KotMjLSnDlzxhhT9v569NFHTe/evb3G7du3z0gyWVlZ5tixY8bhcJj//e9/nulHjhwx4eHhlQo+8fHxZurUqV7zXHbZZeaee+7x1PrL99nLL79cbvAxpux+eeedd4zL5TL5+fnl1na2BQsWmNDQUFNcXHzO+TZs2GAkmWPHjnnV8t5775Xa7tDQUPP99997xi1atMiEhISYnJwcz7i8vDwjyROIceHiVBeCWocOHRQaGuoZjouL8xze3r59u86cOaPWrVsrMjLS80pPT/ecppCkiIgItWzZssw2Dh06pP3796tnz57nrKNr165ew/Hx8fr973+vWbNmSZI+/PBDFRYW6uabb/aaLykpqdTwrl27yl3PiRMnlJ2dreHDh3tt02OPPebZpl27dqljx44KCwsrdz2/ZteuXUpISFBCQoJnXPv27VWnTh1PfVlZWerWrZvXcr8croi7775b8+fPV+fOnfXggw9q9erV5c77xhtveG33qlWrKr0+Y4wkeU47njx5UikpKV7tzp0717M/t27dqquuukp2u73cNi+55BI5HA6vcSNGjNC8efN06tQpFRUVKS0tTX/+85+95unUqZMiIiI8w0lJSTp+/Lj27dtX7rq2bdumTz/91Kvetm3bSpKys7OVnZ2toqIiJSYmepapV6+e2rRpU8E9JOXn52v//v268sorvcZfeeWVXv3/y/dZVfo/JSVFTZs2VYsWLTR48GC98cYbOnnyZLnzFxQUyOl0lrpuaNOmTerXr5+aNGmiqKgoJScnS/r5lO3ZfvlZlaQmTZrooosu8gwnJSWpuLhYWVlZnnHh4eGSdM7acGGoFegCYD0ul6vMO29yc3M9d4eU+OWXkc1mU3FxsSTp+PHjCg0N1aZNm7zCkSSviyLLaqPky7Hkj92vqV27dqlxd955pwYPHqxp06Zp9uzZuvXWW72+5Kri+PHjkqSXX37Z64tNUqltrCn69u2r7777TgsXLtTSpUvVs2dPpaamlrpGSpKuv/56r+0++8uqonbt2iWXy6X69et77tL5+OOPS7XldDolVew9UFb/9+vXT06nUwsWLJDD4ZDb7T7ntSsVdfz4cfXr109PPvlkqWlxcXGea8hqiqioKG3evFkrV67UJ598ogkTJmjSpEnasGFDmRfGN2jQQCdPnlRRUZEnbJ44cUJ9+vRRnz599MYbbygmJkZ79+5Vnz59Sl1sXlZfVcTRo0clSTExMVVaHjUHR3xQ7dq0aaPNmzeXGr9582a1bt26wu1ceumlOnPmjA4dOqSLL77Y6xUbG1uhNqKiotSsWTMtX768wustcd1116l27dqaOXOmFi9eXOr/9iVp7dq1pYbbtWsnSZ4/6mfOnPFMb9SokeLj4/XNN9+U2qaSi6HbtWunjIwMnTp1qtz1/Jp27dpp3759Xkcedu7cqdzcXLVv317Sz/20YcMGr+V+OfxLDofDa3tKxMTEaMiQIXr99dc1ffp0vfTSS2UuHxUV5bXNFQ2mJQ4dOqS0tDTdcMMNCgkJUfv27eV0OrV3795S+7PkaFfHjh21atWqSt+tV6tWLQ0ZMkSzZ8/W7Nmzddttt5Wqd9u2bSooKPAMr127VpGRkZ51l7W/fvvb32rHjh1q1qxZqZpr166tli1bym63a926dZ5lfvrpJ3311VcVrt3lcik+Pl5ffPGF1/gvvvjCq/+3b9+uwsJCz/Sq9n+tWrXUq1cvPfXUU8rIyNC3336rFStWlNlGycXzO3fu9Iz78ssvdeTIET3xxBO66qqr1LZtW68Lm3/N3r17ve70W7t2rUJCQryOkmVmZsput6tDhw4Vbhc1E8EH1e7uu+/WV199pdGjRysjI0NZWVl65plnNG/ePP31r3+tcDutW7fW7bffrjvuuEPvvvuu9uzZo/Xr1+vxxx/Xxx9/XOF2Jk2apH/961967rnn9PXXX2vz5s16/vnnf3W50NBQDR06VOPHj1erVq3KPN301ltvadasWfrqq680ceJErV+/3nPHWMOGDRUeHq7Fixfr4MGDnqNgkydP1uOPP67nnntOX331lbZv367Zs2frmWeekSQNGjRINptNI0aM0M6dO7Vw4cIyj56cS69evXTJJZfo9ttv1+bNm7V+/XrdcccdSk5O9pwqGDVqlF555RW9+uqr+vrrr/XYY48pIyPjnLcuN2vWTOvWrdO3336rw4cPq7i4WBMmTND777+v3bt3a8eOHfroo4884e98GGN04MAB5eTkaNeuXZo1a5auuOIKRUdH64knnpD0c5B64IEHNGbMGL366qvKzs729O+rr74qSbr33nuVn5+v2267TRs3btTXX3+t1157zes0SHnuvPNOrVixotzgW1RUpOHDh3v6aeLEibr33nsVEhJS7v5KTU3V0aNHNXDgQG3YsEHZ2dlasmSJhg0bpjNnzigyMlLDhw/X3/72N61YsUKZmZkaOnSop82K+tvf/qYnn3xSb775prKysvTQQw9p69atuu+++yT9/D4rLi7WXXfdpV27dmnJkiWe91l574FmzZrp+PHjWr58uQ4fPqyTJ0/qo48+0nPPPaetW7fqu+++09y5c1VcXFzuqbmYmBj99re/9bojq0mTJnI4HHr++ef1zTff6IMPPtCjjz5a4W0NCwvTkCFDtG3bNq1atUqjR4/WLbfc4vU/SKtWrdJVV11V6bCNGijA1xjBotavX29SUlJMTEyMiY6ONomJiWbBggVe8/zywlljfr5j5+w7j4qKisyECRNMs2bNjN1uN3FxcebGG280GRkZxpiyL7ZcsGCB+eVb/8UXXzRt2rTxtDFq1CjPNEmlaiuRnZ1tJJmnnnqq1DRJZsaMGSYlJcU4nU7TrFkz8+abb3rN8/LLL5uEhAQTEhLitV1vvPGG6dy5s3E4HKZu3brm6quvNu+++65n+po1a0ynTp2Mw+EwnTt3Nu+8806lLm425ueLw6+//npTu3ZtExUVZW6++eZSd9JMmTLFNGjQwERGRpo///nPZvTo0ebyyy/3TP9lH2VlZZnLL7/chIeHe+6QefTRR027du1MeHi4qVevnunfv7/55ptvyq2zIkouWpdkbDabiY6ONt26dTNTpkwxeXl5XvMWFxeb6dOne/o3JibG9OnTx6Snp3vm2bZtm+ndu7eJiIgwUVFR5qqrrvK6O+uX78OzXXXVVaZDhw6lxpcsN2HCBFO/fn0TGRlpRowYYU6dOnXO/WWMMV999ZW58cYbTZ06dUx4eLhp27atuf/++z0X/B47dsz86U9/MhEREaZRo0bmqaeeKvemgRK/vLj5zJkzZtKkSeaiiy4ydrvddOrUySxatMhrmS+++MJ07NjROBwO06VLF5OWlmYkee54++XFzcYYM3LkSFO/fn0jyUycONGsWrXKJCcnm7p165rw8HDTsWPHUp+DX3rhhRe83mfGGJOWlmaaNWtmnE6nSUpKMh988ME5L7T+5Xa/8MILJj4+3oSFhZk//vGP5ujRo17ztWnTxsybN++cdeHCYDPm/7vYAUClrVq1Sj179tS+ffvUqFEjr2k2m00LFiy4oH7SISUlRbGxsXrttdcCXUpQMMaoVatWuueeezR27FivaUOHDlVubq7ee++9wBTnB2+88YbnWT3+PDJSUFCgNm3a6M0336z0hftVsWjRIv31r39VRkaGatXi0tcLHT0MVEFhYaF+/PFHTZo0STfffHOp0HMhOHnypF588UX16dNHoaGhmjdvnpYtW6alS5cGurSg8OOPP2r+/Pk6cOCAhg0bFuhy/GLu3Llq0aKFLrroIm3btk3jxo3TLbfc4vfTQeHh4Zo7d67XQxv96cSJE5o9ezahxyLoZaAK5s2bp+HDh6tz586aO3duoMvxC5vNpoULF2rq1Kk6deqU2rRpo3feeUe9evUKdGlBoWHDhmrQoIFeeukl1a1bN9Dl+MWBAwc0YcIEHThwQHFxcbr55ps1derUaln32U8y9zdf3I2HmoNTXQAAwDK4qwsAAFgGwQcAAFgGwQcAAFgGwQcAAFgGwQcAAFgGwQcAAFgGwQcAAFgGwQcAAFjG/wNMuM7c1OhLzAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Finding the best model according to logs...\n", + "{'weights': 'weights/resnet32_cifar10', 'model_type': 'resnet32', 'kwargs': {'num_classes': 10, 'debug': False}, 'params': {'epochs': 100, 'batch_size': 256, 'momentum': 0.9, 'weight_decay': 0.256, 'weight_decay_bias': 0.004, 'ema_update_freq': 5, 'ema_rho': 0.9509900498999999, 'model_type': 'resnet32', 'kwargs': {'num_classes': 10, 'debug': False}}, 'run0': 0.9131, 'run1': 0.9127000000000001, 'run2': 0.9102, 'run3': 0.915, 'run4': 0.9129, 'accuracy': [0.91278, 0.001530228741071094]}\n", + "\n", + "Average (5 runs): 91.278% +/- 0.153%\n", + "Best (idx 3): 0.915\n", + "resnet32 Mean Std. Percentage\n", + "conv 365.000 0.527 39.40\n", + "gelu 82.000 0.841 8.80\n", + "bootstrapping 450.000 2.069 48.60\n", + "residual 0.000 0.008 0.00\n", + "pool 27.000 0.074 2.90\n", + "linear 2.000 0.034 0.20\n", + "total 926.000 2.625 100.00\n", + "rounded percent 99.900\n", + "logit res -0.000 0.007\n", + "\n", + "\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAGwCAYAAACzXI8XAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA7FklEQVR4nO3de1yUdf7//+eAzADCoCJyKDxUnrLU0lRqWzPxtLtm5WZpmbpmW5GmdjA+W6JWnw67m1aLtvVRzDaydTtsB7MQ07U8IR5RozRNS8HSAE8cgvf3j/0xv0ZAAWeYufBxv93mVtf7Or1eM9fA02uui7EZY4wAAAAsKMDXBQAAANQXQQYAAFgWQQYAAFgWQQYAAFgWQQYAAFgWQQYAAFgWQQYAAFhWE18X4G0VFRU6ePCgwsPDZbPZfF0OAACoBWOMjh07pri4OAUE1HzepdEHmYMHDyo+Pt7XZQAAgHo4cOCALrzwwhrnN/ogEx4eLum/T4TT6fRxNfVXVlamTz/9VAMHDlRQUJCvy2kQ51vP9Nv4nW8902/j582ei4qKFB8f7/o9XpNGH2QqP05yOp2WDzKhoaFyOp3n1RvkfOqZfhu/861n+m38GqLns10WwsW+AADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAspr4ugCgserZs2edlrfb7Zo2bZr69u2rNWvWeKkqAGhcOCMDAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsy6dBZsaMGbLZbG6PTp06ueYXFxcrKSlJkZGRCgsL0/Dhw5Wfn+/DigEAgD/x+RmZLl266NChQ67H559/7po3ZcoUffDBB1qyZIlWrVqlgwcP6uabb/ZhtQAAwJ/4/LuWmjRpopiYmCrjhYWFmj9/vtLT03X99ddLktLS0tS5c2etW7dOffr0qXZ7JSUlKikpcU0XFRVJksrKylRWVuaFDhpGZe1W7qGurN6z3W6v1/J2u92yPdeF1V/f+jjfeqbfxs+bPdd2mzZjjPH43mtpxowZ+vOf/6yIiAgFBwcrISFBTz/9tFq3bq0VK1aof//++umnn9SsWTPXOm3atNHkyZM1ZcqUGrc5c+bMKuPp6ekKDQ31VisAAMCDTp48qVGjRqmwsFBOp7PG5Xx6RqZ3795auHChOnbsqEOHDmnmzJm69tprlZOTo7y8PNntdrcQI0nR0dHKy8urcZvJycmaOnWqa7qoqEjx8fEaOHDgGZ8If1dWVqaMjAwNGDBAQUFBvi6nQVi95759+9ZpebvdrilTpmj27NnKyMjwUlX+w+qvb32cbz3Tb+PnzZ4rP1E5G58GmSFDhrj+v2vXrurdu7fatGmjf/7znwoJCanXNh0OhxwOR5XxoKCgRnFgNZY+6sKqPZeWltZ7PSv2W19WfX3PxfnWM/02ft7oubbb8/nFvr/UrFkzdejQQbt371ZMTIxKS0tVUFDgtkx+fn6119QAAIDzj18FmePHj2vPnj2KjY1Vjx49FBQUpMzMTNf83Nxc7d+/XwkJCT6sEgAA+AuffrT00EMPaejQoWrTpo0OHjyolJQUBQYGauTIkYqIiND48eM1depUtWjRQk6nUxMnTlRCQkKNdywBAIDzi0+DzHfffaeRI0fqyJEjioqK0q9+9SutW7dOUVFRkqTZs2crICBAw4cPV0lJiQYNGqS5c+f6smQAAOBHfBpkFi9efMb5wcHBSk1NVWpqagNVBAAArMSvrpEBAACoC4IMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLJ/efg2gej179qz3uhs3bvRgJQDg3zgjAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALKuJrwsA/FnPnj19XQIA4Aw4IwMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACzLb4LMM888I5vNpsmTJ7vGiouLlZSUpMjISIWFhWn48OHKz8/3XZEAAMCv+EWQycrK0t///nd17drVbXzKlCn64IMPtGTJEq1atUoHDx7UzTff7KMqAQCAv/F5kDl+/Lhuv/12vfrqq2revLlrvLCwUPPnz9fzzz+v66+/Xj169FBaWprWrFmjdevW+bBiAADgL5r4uoCkpCT99re/VWJiop588knXeHZ2tsrKypSYmOga69Spk1q3bq21a9eqT58+1W6vpKREJSUlrumioiJJUllZmcrKyrzUhfdV1m7lHurKH3q22+0Nvq9z3adVjhF/eH0b2vnWM/02ft7subbb9GmQWbx4sTZt2qSsrKwq8/Ly8mS329WsWTO38ejoaOXl5dW4zaefflozZ86sMv7pp58qNDT0nGv2tYyMDF+X0OB82fO0adMafJ9Tpkw5p/WXLl3qoUoaBsd040e/jZ83ej558mStlvNZkDlw4IAeeOABZWRkKDg42GPbTU5O1tSpU13TRUVFio+P18CBA+V0Oj22n4ZWVlamjIwMDRgwQEFBQb4up0H4Q899+/ZtsH3Z7XZNmTJFs2fPVmlpab23s2rVKg9W5T3+8Po2tPOtZ/pt/LzZc+UnKmfjsyCTnZ2tw4cP68orr3SNlZeX6z//+Y/+9re/6ZNPPlFpaakKCgrczsrk5+crJiamxu06HA45HI4q40FBQY3iwGosfdSFL3s+l0BxLvs8l/1a7fjgmG786Lfx80bPtd2ez4JM//79tX37drexcePGqVOnTpo2bZri4+MVFBSkzMxMDR8+XJKUm5ur/fv3KyEhwRclAwAAP+OzIBMeHq7LLrvMbaxp06aKjIx0jY8fP15Tp05VixYt5HQ6NXHiRCUkJNR4oS8AADi/+PyupTOZPXu2AgICNHz4cJWUlGjQoEGaO3eur8sCAAB+wq+CzMqVK92mg4ODlZqaqtTUVN8UBAAA/JrP/yAeAABAfRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZfk0yMybN09du3aV0+mU0+lUQkKCPv74Y9f84uJiJSUlKTIyUmFhYRo+fLjy8/N9WDEAAPAnPg0yF154oZ555hllZ2dr48aNuv766zVs2DDt2LFDkjRlyhR98MEHWrJkiVatWqWDBw/q5ptv9mXJAADAjzTx5c6HDh3qNv3UU09p3rx5WrdunS688ELNnz9f6enpuv766yVJaWlp6ty5s9atW6c+ffr4omQAAOBHfBpkfqm8vFxLlizRiRMnlJCQoOzsbJWVlSkxMdG1TKdOndS6dWutXbu2xiBTUlKikpIS13RRUZEkqaysTGVlZd5twosqa7dyD3XlDz3b7fYG39e57tMqx4g/vL4N7XzrmX4bP2/2XNtt2owxpq4bv+iii5SVlaXIyEi38YKCAl155ZX65ptvar2t7du3KyEhQcXFxQoLC1N6erp+85vfKD09XePGjXMLJZLUq1cv9evXT88++2y125sxY4ZmzpxZZTw9PV2hoaG1rgsAAPjOyZMnNWrUKBUWFsrpdNa4XL3OyOzbt0/l5eVVxktKSvT999/XaVsdO3bUli1bVFhYqH/9618aM2aMVq1aVZ+yJEnJycmaOnWqa7qoqEjx8fEaOHDgGZ8If1dWVqaMjAwNGDBAQUFBvi6nQXiq5759+3qwKu+x2+2aMmWKZs+erdLS0npv51zePw2JY7rx90y/jZ83e678ROVs6hRk3n//fdf/f/LJJ4qIiHBNl5eXKzMzU23btq3LJmW323XJJZdIknr06KGsrCy98MILuvXWW1VaWqqCggI1a9bMtXx+fr5iYmJq3J7D4ZDD4agyHhQU1CgOrMbSR12ca8/nEgp8obS09JxqttrxwTHd+NFv4+eNnmu7vToFmRtvvFGSZLPZNGbMmCo7bNu2rf7617/WZZNVVFRUqKSkRD169FBQUJAyMzM1fPhwSVJubq7279+vhISEc9oHAABoHOoUZCoqKiRJ7dq1U1ZWllq2bHlOO09OTtaQIUPUunVrHTt2TOnp6Vq5cqXrbM/48eM1depUtWjRQk6nUxMnTlRCQgJ3LAEAAEn1vEZm7969Htn54cOHdeedd+rQoUOKiIhQ165d9cknn2jAgAGSpNmzZysgIEDDhw9XSUmJBg0apLlz53pk3wAAwPrqfft1ZmamMjMzdfjwYdeZmkoLFiyo1Tbmz59/xvnBwcFKTU1VampqfcsEAACNWL2CzMyZMzVr1iz17NlTsbGxstlsnq4LAADgrOoVZF5++WUtXLhQo0eP9nQ9AAAAtVav71oqLS3V1Vdf7elaAAAA6qReQeauu+5Senq6p2sBAACok3p9tFRcXKxXXnlFy5cvV9euXav80Zrnn3/eI8UBAACcSb2CzLZt29S9e3dJUk5Ojts8LvwFAAANpV5B5rPPPvN0HQA8pGfPnvVed+PGjR6sBAC8r17XyAAAAPiDep2R6dev3xk/QlqxYkW9CwIAAKitegWZyutjKpWVlWnLli3Kycmp8mWSAAAA3lKvIDN79uxqx2fMmKHjx4+fU0EAAAC15dFrZO64445af88SAADAufJokFm7dq2Cg4M9uUkAAIAa1eujpZtvvtlt2hijQ4cOaePGjXr88cc9UhgAAMDZ1CvIREREuE0HBASoY8eOmjVrlgYOHOiRwgAAAM6mXkEmLS3N03UAAADUWb2CTKXs7Gzt2rVLktSlSxddccUVHikKAACgNuoVZA4fPqzbbrtNK1euVLNmzSRJBQUF6tevnxYvXqyoqChP1ggAAFCtet21NHHiRB07dkw7duzQ0aNHdfToUeXk5KioqEiTJk3ydI0AAADVqtcZmWXLlmn58uXq3Lmza+zSSy9VamoqF/sCAIAGU68zMhUVFQoKCqoyHhQUpIqKinMuCgAAoDbqFWSuv/56PfDAAzp48KBr7Pvvv9eUKVPUv39/jxUHAABwJvUKMn/7299UVFSktm3b6uKLL9bFF1+sdu3aqaioSC+99JKnawQAAKhWva6RiY+P16ZNm7R8+XJ9+eWXkqTOnTsrMTHRo8UBAACcSZ3OyKxYsUKXXnqpioqKZLPZNGDAAE2cOFETJ07UVVddpS5dumj16tXeqhUAAMBNnYLMnDlzNGHCBDmdzirzIiIi9Mc//lHPP/+8x4oDAAA4kzoFma1bt2rw4ME1zh84cKCys7PPuSgAAIDaqFOQyc/Pr/a260pNmjTRDz/8cM5FAQAA1EadgswFF1ygnJycGudv27ZNsbGx51wUAABAbdQpyPzmN7/R448/ruLi4irzTp06pZSUFP3ud7/zWHEAAABnUqfbrx977DG988476tChg+6//3517NhRkvTll18qNTVV5eXl+tOf/uSVQgEAAE5XpyATHR2tNWvW6N5771VycrKMMZIkm82mQYMGKTU1VdHR0V4pFAAA4HR1/oN4bdq00dKlS/XTTz9p9+7dMsaoffv2at68uTfqAwAAqFG9/rKvJDVv3lxXXXWVJ2sBAACok3p91xIAAIA/IMgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADL8mmQefrpp3XVVVcpPDxcrVq10o033qjc3Fy3ZYqLi5WUlKTIyEiFhYVp+PDhys/P91HFAADAn/g0yKxatUpJSUlat26dMjIyVFZWpoEDB+rEiROuZaZMmaIPPvhAS5Ys0apVq3Tw4EHdfPPNPqwaAAD4iya+3PmyZcvcphcuXKhWrVopOztbv/71r1VYWKj58+crPT1d119/vSQpLS1NnTt31rp169SnT58q2ywpKVFJSYlruqioSJJUVlamsrIyL3bjXZW1W7mHuvJUz3a73RPleF1lnb6styGPL47pxo9+Gz9v9lzbbdqMMcbje6+n3bt3q3379tq+fbsuu+wyrVixQv3799dPP/2kZs2auZZr06aNJk+erClTplTZxowZMzRz5swq4+np6QoNDfVm+QAAwENOnjypUaNGqbCwUE6ns8blfHpG5pcqKio0efJkXXPNNbrsssskSXl5ebLb7W4hRpKio6OVl5dX7XaSk5M1depU13RRUZHi4+M1cODAMz4R/q6srEwZGRkaMGCAgoKCfF1Og/BUz3379vVgVd5jt9s1ZcoUzZ49W6WlpT6pYdWqVQ22L47pxt8z/TZ+3uy58hOVs/GbIJOUlKScnBx9/vnn57Qdh8Mhh8NRZTwoKKhRHFiNpY+6ONeefRUK6qu0tNRnNfvi2OKYbvzot/HzRs+13Z5f3H59//3368MPP9Rnn32mCy+80DUeExOj0tJSFRQUuC2fn5+vmJiYBq4SAAD4G58GGWOM7r//fr377rtasWKF2rVr5za/R48eCgoKUmZmpmssNzdX+/fvV0JCQkOXCwAA/IxPP1pKSkpSenq6/v3vfys8PNx13UtERIRCQkIUERGh8ePHa+rUqWrRooWcTqcmTpyohISEau9YAgAA5xefBpl58+ZJkq677jq38bS0NI0dO1aSNHv2bAUEBGj48OEqKSnRoEGDNHfu3AauFAAA+COfBpna3PkdHBys1NRUpaamNkBFAADASvziYl8AAID6IMgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADL8mmQ+c9//qOhQ4cqLi5ONptN7733ntt8Y4ymT5+u2NhYhYSEKDExUV9//bVvigUAAH7Hp0HmxIkT6tatm1JTU6ud/9xzz+nFF1/Uyy+/rPXr16tp06YaNGiQiouLG7hSAADgj5r4cudDhgzRkCFDqp1njNGcOXP02GOPadiwYZKkRYsWKTo6Wu+9955uu+22hiwVAAD4IZ8GmTPZu3ev8vLylJiY6BqLiIhQ7969tXbt2hqDTElJiUpKSlzTRUVFkqSysjKVlZV5t2gvqqzdyj3Ulad6ttvtnijH6yrr9GW9DXl8cUw3fvTb+Hmz59pu02aMMR7fez3YbDa9++67uvHGGyVJa9as0TXXXKODBw8qNjbWtdyIESNks9n01ltvVbudGTNmaObMmVXG09PTFRoa6pXaAQCAZ508eVKjRo1SYWGhnE5njcv57RmZ+kpOTtbUqVNd00VFRYqPj9fAgQPP+ET4u7KyMmVkZGjAgAEKCgrydTkNwlM99+3b14NVeY/dbteUKVM0e/ZslZaW+qSGVatW1Xvduj7Pv+w3IyOj3vu1kvPtfUy/jZ83e678ROVs/DbIxMTESJLy8/Pdzsjk5+ere/fuNa7ncDjkcDiqjAcFBTWKA6ux9FEX59qzr0JBfZWWlvqsZl88z6WlpRzTjRz9Nn7e6Lm22/PbvyPTrl07xcTEKDMz0zVWVFSk9evXKyEhwYeVAQAAf+HTMzLHjx/X7t27XdN79+7Vli1b1KJFC7Vu3VqTJ0/Wk08+qfbt26tdu3Z6/PHHFRcX57qOBgAAnN98GmQ2btyofv36uaYrr20ZM2aMFi5cqEceeUQnTpzQ3XffrYKCAv3qV7/SsmXLFBwc7KuSAQCAH/FpkLnuuut0ppumbDabZs2apVmzZjVgVQAAwCr89mJf4Jd69uzp6xIAAH7Iby/2BQAAOBuCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCxuvwbgwm3uAKyGMzIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCymvi6AJw/evbsWafl7Xa7pk2bpr59+3qpIgCA1XFGBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZ3LQE4b9X1Trpf2rhxowcrAVBfnJEBAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWxe3X54BbNwHPOJf30vmGnzuAO87IAAAAyyLIAAAAyyLIAAAAyyLIAAAAyyLIAAAAyyLIAAAAy+L2a4vidlXAt+rzHrTb7Zo2bZr69u2r0tJSL1R1Zty6jZrU99ioPKZ9iTMyAADAsggyAADAsiwRZFJTU9W2bVsFBwerd+/e2rBhg69LAgAAfsDvg8xbb72lqVOnKiUlRZs2bVK3bt00aNAgHT582NelAQAAH/P7IPP8889rwoQJGjdunC699FK9/PLLCg0N1YIFC3xdGgAA8DG/vmuptLRU2dnZSk5Odo0FBAQoMTFRa9eurXadkpISlZSUuKYLCwslSUePHlVZWZlH6wsIqH8OPHLkSJ2WLysr08mTJ3XkyBEFBQWd076tIiAgQCdPnlRAQAD9NkLnW7+StXuu688sqerPrcbOyv3W93isPKa90fOxY8ckScaYMy9o/Nj3339vJJk1a9a4jT/88MOmV69e1a6TkpJiJPHgwYMHDx48GsHjwIEDZ8wKfn1Gpj6Sk5M1depU13RFRYWOHj2qyMhI2Ww2H1Z2boqKihQfH68DBw7I6XT6upwGcb71TL+N3/nWM/02ft7s2RijY8eOKS4u7ozL+XWQadmypQIDA5Wfn+82np+fr5iYmGrXcTgccjgcbmPNmjXzVokNzul0njdvkErnW8/02/idbz3Tb+PnrZ4jIiLOuoxff0hrt9vVo0cPZWZmusYqKiqUmZmphIQEH1YGAAD8gV+fkZGkqVOnasyYMerZs6d69eqlOXPm6MSJExo3bpyvSwMAAD7m90Hm1ltv1Q8//KDp06crLy9P3bt317JlyxQdHe3r0hqUw+FQSkpKlY/NGrPzrWf6bfzOt57pt/Hzh55txpztviYAAAD/5NfXyAAAAJwJQQYAAFgWQQYAAFgWQQYAAFgWQcaPHD16VLfffrucTqeaNWum8ePH6/jx42dcp7i4WElJSYqMjFRYWJiGDx/u9gcEt27dqpEjRyo+Pl4hISHq3LmzXnjhBW+3Uive6FeSJk2apB49esjhcKh79+5e7ODsUlNT1bZtWwUHB6t3797asGHDGZdfsmSJOnXqpODgYF1++eVaunSp23xjjKZPn67Y2FiFhIQoMTFRX3/9tTdbqBNP9/vOO+9o4MCBrr/MvWXLFi9WX3ee7LesrEzTpk3T5ZdfrqZNmyouLk533nmnDh486O026sTTr/GMGTPUqVMnNW3aVM2bN1diYqLWr1/vzRbqxNP9/tI999wjm82mOXPmeLjq+vN0v2PHjpXNZnN7DB482LNFe+RLkeARgwcPNt26dTPr1q0zq1evNpdccokZOXLkGde55557THx8vMnMzDQbN240ffr0MVdffbVr/vz5882kSZPMypUrzZ49e8zrr79uQkJCzEsvveTtds7KG/0aY8zEiRPN3/72NzN69GjTrVs3L3ZwZosXLzZ2u90sWLDA7Nixw0yYMME0a9bM5OfnV7v8F198YQIDA81zzz1ndu7caR577DETFBRktm/f7lrmmWeeMREREea9994zW7duNTfccINp166dOXXqVEO1VSNv9Lto0SIzc+ZM8+qrrxpJZvPmzQ3Uzdl5ut+CggKTmJho3nrrLfPll1+atWvXml69epkePXo0ZFtn5I3X+I033jAZGRlmz549Jicnx4wfP944nU5z+PDhhmqrRt7ot9I777xjunXrZuLi4szs2bO93EnteKPfMWPGmMGDB5tDhw65HkePHvVo3QQZP7Fz504jyWRlZbnGPv74Y2Oz2cz3339f7ToFBQUmKCjILFmyxDW2a9cuI8msXbu2xn3dd999pl+/fp4rvh4aot+UlBSfBplevXqZpKQk13R5ebmJi4szTz/9dLXLjxgxwvz2t791G+vdu7f54x//aIwxpqKiwsTExJg///nPrvkFBQXG4XCYN9980wsd1I2n+/2lvXv3+l2Q8Wa/lTZs2GAkmW+//dYzRZ+jhui5sLDQSDLLly/3TNHnwFv9fvfdd+aCCy4wOTk5pk2bNn4TZLzR75gxY8ywYcO8Um8lPlryE2vXrlWzZs3Us2dP11hiYqICAgJqPM2anZ2tsrIyJSYmusY6deqk1q1ba+3atTXuq7CwUC1atPBc8fXQkP36QmlpqbKzs91qDQgIUGJiYo21rl271m15SRo0aJBr+b179yovL89tmYiICPXu3dvn/XujX3/WUP0WFhbKZrP5xffFNUTPpaWleuWVVxQREaFu3bp5rvh68Fa/FRUVGj16tB5++GF16dLFO8XXgzdf35UrV6pVq1bq2LGj7r33Xh05csSjtRNk/EReXp5atWrlNtakSRO1aNFCeXl5Na5jt9ur/JCLjo6ucZ01a9borbfe0t133+2Ruuurofr1lR9//FHl5eVV/gL1mWrNy8s74/KV/63LNhuKN/r1Zw3Rb3FxsaZNm6aRI0f6xRcQerPnDz/8UGFhYQoODtbs2bOVkZGhli1beraBOvJWv88++6yaNGmiSZMmeb7oc+CtfgcPHqxFixYpMzNTzz77rFatWqUhQ4aovLzcY7UTZLzs0UcfrXKh0+mPL7/8skFqycnJ0bBhw5SSkqKBAwd6ZR/+1C9gVWVlZRoxYoSMMZo3b56vy/G6fv36acuWLVqzZo0GDx6sESNG6PDhw74uy+Oys7P1wgsvaOHChbLZbL4up0HcdtttuuGGG3T55Zfrxhtv1IcffqisrCytXLnSY/vw++9asroHH3xQY8eOPeMyF110kWJiYqq8cX/++WcdPXpUMTEx1a4XExOj0tJSFRQUuJ2lyM/Pr7LOzp071b9/f91999167LHH6tVLbfhLv77WsmVLBQYGVrmj6ky1xsTEnHH5yv/m5+crNjbWbRlf353ljX79mTf7rQwx3377rVasWOEXZ2Mk7/bctGlTXXLJJbrkkkvUp08ftW/fXvPnz1dycrJnm6gDb/S7evVqHT58WK1bt3bNLy8v14MPPqg5c+Zo3759nm2iDhrqPXzRRRepZcuW2r17t/r373/uhYszMl4XFRWlTp06nfFht9uVkJCggoICZWdnu9ZdsWKFKioq1Lt372q33aNHDwUFBSkzM9M1lpubq/379yshIcE1tmPHDvXr109jxozRU0895b1m5R/9+gO73a4ePXq41VpRUaHMzMwaa01ISHBbXpIyMjJcy7dr104xMTFuyxQVFWn9+vU+798b/fozb/VbGWK+/vprLV++XJGRkd5poB4a8jWuqKhQSUnJuRd9DrzR7+jRo7Vt2zZt2bLF9YiLi9PDDz+sTz75xHvN1EJDvb7fffedjhw54vaPsXPm1UuJUSeDBw82V1xxhVm/fr35/PPPTfv27d1uR/7uu+9Mx44dzfr1611j99xzj2ndurVZsWKF2bhxo0lISDAJCQmu+du3bzdRUVHmjjvucLv9zR9ubfRGv8YY8/XXX5vNmzebP/7xj6ZDhw5m8+bNZvPmzaakpKTBejPmv7cyOhwOs3DhQrNz505z9913m2bNmpm8vDxjjDGjR482jz76qGv5L774wjRp0sT85S9/Mbt27TIpKSnV3n7drFkz8+9//9ts27bNDBs2zK9uv/Z0v0eOHDGbN282H330kZFkFi9ebDZv3mwOHTrU4P2dztP9lpaWmhtuuMFceOGFZsuWLW7v14Y+dmvi6Z6PHz9ukpOTzdq1a82+ffvMxo0bzbhx44zD4TA5OTk+6fGXvHFMn86f7lrydL/Hjh0zDz30kFm7dq3Zu3evWb58ubnyyitN+/btTXFxscfqJsj4kSNHjpiRI0easLAw43Q6zbhx48yxY8dc8ytvQf3ss89cY6dOnTL33Xefad68uQkNDTU33XST2w/5lJQUI6nKo02bNg3YWfW80a8xxvTt27fanvfu3dtAnf3/XnrpJdO6dWtjt9tNr169zLp169zqHDNmjNvy//znP02HDh2M3W43Xbp0MR999JHb/IqKCvP444+b6Oho43A4TP/+/U1ubm5DtFIrnu43LS2t2tcyJSWlAbo5O0/2W3m8V/f45XvA1zzZ86lTp8xNN91k4uLijN1uN7GxseaGG24wGzZsaKh2zsrTx/Tp/CnIGOPZfk+ePGkGDhxooqKiTFBQkGnTpo2ZMGGCKxh5is0YYzx3fgcAAKDhcI0MAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIM0Ajs27dPNptNW7ZsqXGZtm3bas6cOR7d79ixY3XjjTd6dJuon+uuu06TJ0+ucf6MGTM8/uWiK1eulM1mU0FBgUe3K/33e4n+93//95y3U5u+H330UU2cOPGc9wXfIMigwdX0A3fhwoVu32rd2NUmfPi7F154QQsXLnRNn+2XqacsXLhQNptNNptNgYGBat68uXr37q1Zs2apsLDQ6/v3lIZ6vrzl6quv1qFDhxQRESHJc+/hrVu3aunSpZo0adI5b6s2HnroIb322mv65ptvGmR/8CyCDFBHZWVlvi7Bb0RERPgsfDqdTh06dEjfffed1qxZo7vvvluLFi1S9+7ddfDgQa/tt7S01Gvbthq73a6YmBjZbDaPbvell17SLbfcorCwsHpvwxijn3/+uVbLtmzZUoMGDdK8efPqvT/4DkEGfqvyY4u//OUvio2NVWRkpJKSktyCRElJiR566CFdcMEFatq0qXr37q2VK1e65lf+C/GTTz5R586dFRYWpsGDB+vQoUNu+1qwYIG6dOkih8Oh2NhY3X///a55NptN8+bN0w033KCmTZvqySef1CWXXKK//OUvbtvYsmWLbDabdu/e7bbekCFDFBISoosuukj/+te/XMu3a9dOknTFFVfIZrPpuuuuc837v//7P3Xu3FnBwcHq1KmT5s6d67avDRs26IorrlBwcLB69uypzZs31/n53b9/v4YNG6awsDA5nU6NGDFC+fn5bss8+eSTatWqlcLDw3XXXXfp0UcfdTtN/8uPlsaOHatVq1bphRdecJ0t2bdvn3766SfdfvvtioqKUkhIiNq3b6+0tLQ613s6m82mmJgYxcbGqnPnzho/frzWrFmj48eP65FHHnEtV1FRoaefflrt2rVTSEiIunXr5vY6SNKOHTv0u9/9Tk6nU+Hh4br22mu1Z88etx6feuopxcXFqWPHjpo1a5Yuu+yyKjV1795djz/+uNt6M2fOVFRUlJxOp+655x5XEKrp+ZKknJwcDRkyRGFhYYqOjtbo0aP1448/uvZz4sQJ3XnnnQoLC1NsbKz++te/1vn5q6io0KxZs3ThhRfK4XCoe/fuWrZsmdsya9asUffu3V3H2Xvvved2FvGXHy2tXLlS48aNU2FhoaufGTNmSJLmzp2r9u3bKzg4WNHR0fr9739fY13l5eX617/+paFDh7qNv/766+rZs6fCw8MVExOjUaNG6fDhw675lbV8/PHH6tGjhxwOhz7//HPX/L///e+Kj49XaGioRowYUeXM3dChQ7V48eI6P4/wAx79CkqgFvr27WseeOCBKuNpaWkmIiLCNT1mzBjjdDrNPffcY3bt2mU++OADExoaal555RXXMnfddZe5+uqrzX/+8x+ze/du8+c//9k4HA7z1VdfubYZFBRkEhMTTVZWlsnOzjadO3c2o0aNcm1j7ty5Jjg42MyZM8fk5uaaDRs2uH0brSTTqlUrs2DBArNnzx7z7bffmqeeespceumlbvVPmjTJ/PrXv3ZbLzIy0rz66qsmNzfXPPbYYyYwMNDs3LnTGGPMhg0bjCSzfPlyc+jQIXPkyBFjjDH/+Mc/TGxsrHn77bfNN998Y95++23TokULs3DhQmOMMceOHTNRUVFm1KhRJicnx3zwwQfmoosuMpLM5s2ba3zef/ktu+Xl5aZ79+7mV7/6ldm4caNZt26d6dGjh+nbt69r+X/84x8mODjYLFiwwOTm5pqZM2cap9NpunXr5vYaDRs2zBhjTEFBgUlISDATJkwwhw4dMocOHTI///yzSUpKMt27dzdZWVlm7969JiMjw7z//vs11lkbpx8rv/TAAw+Y8PBw8/PPPxtjjHnyySdNp06dzLJly8yePXtMWlqacTgcZuXKlcYYY7777jvTokULc/PNN5usrCyTm5trFixYYL788ktXj2FhYWb06NEmJyfH5OTkmAMHDpiAgAC3b2netGmTsdlsZs+ePW7r3XrrrSYnJ8d8+OGHJioqyvzP//zPGZ+vn376yURFRZnk5GSza9cus2nTJjNgwADTr18/177uvfde07p1a7N8+XKzbds287vf/c6Eh4dX+76qlJKS4vbaPf/888bpdJo333zTfPnll+aRRx4xQUFBrvdOYWGhadGihbnjjjvMjh07zNKlS02HDh3cjrPPPvvMSDI//fSTKSkpMXPmzDFOp9PVz7Fjx0xWVpYJDAw06enpZt++fWbTpk3mhRdeqLHOTZs2GUlVviF5/vz5ZunSpWbPnj1m7dq1JiEhwQwZMsQ1v7KWrl27mk8//dTs3r3bHDlyxKSkpJimTZua66+/3mzevNmsWrXKXHLJJW4/A4wxZteuXUaS2bt3b421wT8RZNDg6hJk2rRp4/qFZIwxt9xyi7n11luNMcZ8++23JjAw0Hz//fdu2+nfv79JTk52bVOS2b17t2t+amqqiY6Odk3HxcWZP/3pTzXWK8lMnjzZbez77783gYGBZv369cYYY0pLS03Lli1dYaNyvXvuucdtvd69e5t7773XGGPM3r17qw0fF198sUlPT3cbe+KJJ0xCQoIxxpi///3vJjIy0pw6dco1f968eXUKMp9++qkJDAw0+/fvd83fsWOHkeT65dy7d2+TlJTkto1rrrmmxiBjTPWv7dChQ824ceNqrKs+zhRkKp+L/Px8U1xcbEJDQ82aNWvclhk/frwZOXKkMcaY5ORk065dO1NaWlrt9saMGWOio6NNSUmJ2/iQIUNcr6UxxkycONFcd911buu1aNHCnDhxwq22sLAwU15eboyp/vl64oknzMCBA93GDhw4YCSZ3Nxcc+zYMWO3280///lP1/wjR46YkJCQOgWZuLg489RTT7ktc9VVV5n77rvPVevpx9mrr75aY5AxpvrX5e233zZOp9MUFRXVWNsvvfvuuyYwMNBUVFSccbmsrCwjyRw7dsytlvfee69K34GBgea7775zjX388ccmICDAHDp0yDVWWFhoJLkCLqyDj5bg17p06aLAwEDXdGxsrOt08vbt21VeXq4OHTooLCzM9Vi1apXrYwFJCg0N1cUXX1ztNg4fPqyDBw+qf//+Z6yjZ8+ebtNxcXH67W9/qwULFkiSPvjgA5WUlOiWW25xWy4hIaHK9K5du2rcz4kTJ7Rnzx6NHz/eracnn3zS1dOuXbvUtWtXBQcH17ifs9m1a5fi4+MVHx/vGrv00kvVrFkzV325ubnq1auX23qnT9fGvffeq8WLF6t79+565JFHtGbNmhqXfeONN9z6Xr16dZ33Z4yRJNfHfCdPntSAAQPctrto0SLX87llyxZde+21CgoKqnGbl19+uex2u9vYhAkT9Oabb6q4uFilpaVKT0/XH/7wB7dlunXrptDQUNd0QkKCjh8/rgMHDtS4r61bt+qzzz5zq7dTp06SpD179mjPnj0qLS1V7969Xeu0aNFCHTt2rOUzJBUVFengwYO65ppr3MavueYat9f/9OOsPq//gAED1KZNG1100UUaPXq03njjDZ08ebLG5U+dOiWHw1Hlupvs7GwNHTpUrVu3Vnh4uPr27Svpvx+R/tLp71VJat26tS644ALXdEJCgioqKpSbm+saCwkJkaQz1gb/1MTXBeD843Q6q72zpKCgwHX3Q6XTf7nYbDZVVFRIko4fP67AwEBlZ2e7hR1JbhcJVreNyl92lT+8zqZp06ZVxu666y6NHj1as2fPVlpamm699Va3X1r1cfz4cUnSq6++6vaLSlKVHq1iyJAh+vbbb7V06VJlZGSof//+SkpKqnKNkSTdcMMNbn3/8pdPbe3atUtOp1ORkZGuu1A++uijKttyOBySancMVPf6Dx06VA6HQ++++67sdrvKysrOeO1HbR0/flxDhw7Vs88+W2VebGys6xosqwgPD9emTZu0cuVKffrpp5o+fbpmzJihrKysai8Ub9mypU6ePKnS0lJXeDxx4oQGDRqkQYMG6Y033lBUVJT279+vQYMGVbn4urrXqjaOHj0qSYqKiqrX+vAdzsigwXXs2FGbNm2qMr5p0yZ16NCh1tu54oorVF5ersOHD+uSSy5xe8TExNRqG+Hh4Wrbtq0yMzNrvd9Kv/nNb9S0aVPNmzdPy5Ytq/KvcUlat25dlenOnTtLkuuHdHl5uWt+dHS04uLi9M0331TpqfLi4M6dO2vbtm0qLi6ucT9n07lzZx04cMDtzMDOnTtVUFCgSy+9VNJ/X6esrCy39U6fPp3dbnfrp1JUVJTGjBmjf/zjH5ozZ45eeeWVatcPDw9367m2QbPS4cOHlZ6erhtvvFEBAQG69NJL5XA4tH///irPZ+XZqK5du2r16tV1vhutSZMmGjNmjNLS0pSWlqbbbrutSr1bt27VqVOnXNPr1q1TWFiYa9/VPV9XXnmlduzYobZt21apuWnTprr44osVFBSk9evXu9b56aef9NVXX9W6dqfTqbi4OH3xxRdu41988YXb6799+3aVlJS45tf39W/SpIkSExP13HPPadu2bdq3b59WrFhR7TYqLybfuXOna+zLL7/UkSNH9Mwzz+jaa69Vp06d3C70PZv9+/e73cm2bt06BQQEuJ3FysnJUVBQkLp06VLr7cI/EGTQ4O6991599dVXmjRpkrZt26bc3Fw9//zzevPNN/Xggw/WejsdOnTQ7bffrjvvvFPvvPOO9u7dqw0bNujpp5/WRx99VOvtzJgxQ3/961/14osv6uuvv9amTZv00ksvnXW9wMBAjR07VsnJyWrfvn21H+8sWbJECxYs0FdffaWUlBRt2LDBdUdUq1atFBISomXLlik/P991lmrmzJl6+umn9eKLL+qrr77S9u3blZaWpueff16SNGrUKNlsNk2YMEE7d+7U0qVLqz27cSaJiYm6/PLLdfvtt2vTpk3asGGD7rzzTvXt29d1an7ixImaP3++XnvtNX399dd68skntW3btjPeatu2bVutX79e+/bt048//qiKigpNnz5d//73v7V7927t2LFDH374oSvMnQtjjPLy8nTo0CHt2rVLCxYs0NVXX62IiAg988wzkv4bjB566CFNmTJFr732mvbs2eN6fV977TVJ0v3336+ioiLddttt2rhxo77++mu9/vrrbh871OSuu+7SihUragyypaWlGj9+vOt1SklJ0f3336+AgIAan6+kpCQdPXpUI0eOVFZWlvbs2aNPPvlE48aNU3l5ucLCwjR+/Hg9/PDDWrFihXJycjR27FjXNmvr4Ycf1rPPPqu33npLubm5evTRR7VlyxY98MADkv57nFVUVOjuu+/Wrl279Mknn7iOs5qOgbZt2+r48ePKzMzUjz/+qJMnT+rDDz/Uiy++qC1btujbb7/VokWLVFFRUeNHYVFRUbryyivd7jhq3bq17Ha7XnrpJX3zzTd6//339cQTT9S61+DgYI0ZM0Zbt27V6tWrNWnSJI0YMcLtHzyrV6/WtddeW+fwDD/g42t0cJ7asGGDGTBggImKijIRERGmd+/e5t1333Vb5vQLSY357x0pv7yzprS01EyfPt20bdvWBAUFmdjYWHPTTTeZbdu2GWOqv/jw3XffNacf+i+//LLp2LGjaxsTJ050zZNUpbZKe/bsMZLMc889V2WeJJOammoGDBhgHA6Hadu2rXnrrbfclnn11VdNfHy8CQgIcOvrjTfeMN27dzd2u900b97c/PrXvzbvvPOOa/7atWtNt27djN1uN927dzdvv/12nS72Nea/F0vfcMMNpmnTpiY8PNzccsstVe4UmTVrlmnZsqUJCwszf/jDH8ykSZNMnz59XPNPf41yc3NNnz59TEhIiOsOkCeeeMJ07tzZhISEmBYtWphhw4aZb775psY6a6PyIm5JxmazmYiICNOrVy8za9YsU1hY6LZsRUWFmTNnjuv1jYqKMoMGDTKrVq1yLbN161YzcOBAExoaasLDw821117rdvfR6cfhL1177bWmS5cuVcYr15s+fbqJjIw0YWFhZsKECaa4uPiMz5cxxnz11VfmpptuMs2aNTMhISGmU6dOZvLkya4LYI8dO2buuOMOExoaaqKjo81zzz1X40X0lU6/2Le8vNzMmDHDXHDBBSYoKMh069bNfPzxx27rfPHFF6Zr167GbrebHj16mPT0dCPJdUfX6Rf7GmPMPffcYyIjI40kk5KSYlavXm369u1rmjdvbkJCQkzXrl2rvA9ON3fuXLfjzBhj0tPTTdu2bY3D4TAJCQnm/fffP+OFx6f3PXfuXBMXF2eCg4PN73//e3P06FG35Tp27GjefPPNM9YF/2Qz5v+7WABAna1evVr9+/fXgQMHFB0d7TbPZrPp3XffbVR/wn/AgAGKiYnR66+/7utS/IIxRu3bt9d9992nqVOnus0bO3asCgoK9N577/mmOC944403XH8rxptnLk6dOqWOHTvqrbfeqvOF7PXx8ccf68EHH9S2bdvUpAmXjloNrxhQDyUlJfrhhx80Y8YM3XLLLVVCTGNw8uRJvfzyyxo0aJACAwP15ptvavny5crIyPB1aX7hhx9+0OLFi5WXl6dx48b5uhyvWLRokS666CJdcMEF2rp1q6ZNm6YRI0Z4/eOXkJAQLVq0yO2PAHrTiRMnlJaWRoixKF41oB7efPNNjR8/Xt27d9eiRYt8XY5X2Gw2LV26VE899ZSKi4vVsWNHvf3220pMTPR1aX6hVatWatmypV555RU1b97c1+V4RV5enqZPn668vDzFxsbqlltu0VNPPdUg+/7lX7r2Nk/cbQbf4aMlAABgWdy1BAAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALOv/AfqOQB7jx4nmAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Finding the best model according to logs...\n", + "{'weights': 'weights/resnet44_cifar10', 'model_type': 'resnet44', 'kwargs': {'num_classes': 10, 'debug': False}, 'params': {'epochs': 100, 'batch_size': 256, 'momentum': 0.9, 'weight_decay': 0.256, 'weight_decay_bias': 0.004, 'ema_update_freq': 5, 'ema_rho': 0.9509900498999999, 'model_type': 'resnet44', 'kwargs': {'num_classes': 10, 'debug': False}}, 'run0': 0.9196000000000001, 'run1': 0.9184, 'run2': 0.9194, 'run3': 0.9196000000000001, 'run4': 0.918, 'accuracy': [0.9189999999999999, 0.0006693280212272797]}\n", + "\n", + "Average (5 runs): 91.900% +/- 0.067%\n", + "Best (idx 0): 0.920\n", + "resnet44 Mean Std. Percentage\n", + "conv 487.000 0.724 40.10\n", + "gelu 106.000 1.614 8.70\n", + "bootstrapping 592.000 3.241 48.70\n", + "residual 0.000 0.012 0.00\n", + "pool 27.000 0.085 2.20\n", + "linear 2.000 0.083 0.20\n", + "total 1215.000 3.899 100.00\n", + "rounded percent 99.900\n", + "logit res 0.000 0.007\n", + "\n", + "\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAGwCAYAAACzXI8XAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA/vElEQVR4nO3de1yUZf7/8feAwwDC4JlDQVp5IDez1TQ6rJUg65bZ5mppBzPTrVALtpO/zWO2pltqtahbX8WsMNdKN0stw6SDeM5TKqlZVgqWBngIZoLr90df5tsIKCI4c8vr+XjwqPu6T59rrrnh7X2YsRljjAAAACwowNcFAAAA1BRBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWFYDXxdQ18rKyrR//36Fh4fLZrP5uhwAAFANxhgdOXJEMTExCgio+rzLOR9k9u/fr9jYWF+XAQAAauDbb7/V+eefX+X8cz7IhIeHS/r1hXA6nT6u5uxyu9364IMP1KNHD9ntdl+Xg2pi3KyJcbMmxs1/FRUVKTY21vN3vCrnfJApv5zkdDrrZZAJDQ2V0+nkALUQxs2aGDdrYtz836luC+FmXwAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFk+DTKlpaUaNWqUWrVqpZCQEF100UV66qmnZIzxLGOM0ejRoxUdHa2QkBAlJiZq165dPqwaAAD4C58GmUmTJmnGjBn617/+pR07dmjSpEmaPHmyXnzxRc8ykydP1gsvvKCZM2dqzZo1atiwoZKTk1VcXOzDygEAgD/w6bdfr1q1Sr1799aNN94oSWrZsqXmzZuntWvXSvr1bMy0adP05JNPqnfv3pKkuXPnKjIyUosWLdLtt9/us9oBAIDv+TTIXHXVVXrppZf05Zdfqk2bNtq8ebM+/fRTTZkyRZK0d+9e5eXlKTEx0bNORESEunbtqpycnEqDTElJiUpKSjzTRUVFkn79qna3213HPfIv5f2tb/22OsbNmhg3a2Lc/Fd1x8SnQeaJJ55QUVGR2rVrp8DAQJWWlurpp5/WHXfcIUnKy8uTJEVGRnqtFxkZ6Zl3ookTJ2rcuHEV2j/44AOFhobWcg+sYfny5b4uATXAuFkT42ZNjJv/OX78eLWW82mQ+c9//qPXX39dmZmZat++vTZt2qSHH35YMTExGjhwYI22OXLkSKWlpXmmi4qKFBsbqx49esjpdNZW6Zbgdru1fPlyJSUlyW63+7ocVBPjZg3dunXzmg4KClJqaqqmTp0ql8t10nWzs7PrsjScBo43/1V+ReVUfBpkHn30UT3xxBOeS0SXXnqpvvnmG02cOFEDBw5UVFSUJCk/P1/R0dGe9fLz89WxY8dKt+lwOORwOCq02+32evsmrc99tzLGzb9VFVZcLtcpgwzj6n843vxPdcfDp08tHT9+XAEB3iUEBgaqrKxMktSqVStFRUUpKyvLM7+oqEhr1qxRQkLCWa0VAAD4H5+ekenVq5eefvppxcXFqX379vr88881ZcoU3XvvvZIkm82mhx9+WBMmTFDr1q3VqlUrjRo1SjExMbrlllt8WToAAPADPg0yL774okaNGqUHH3xQBw8eVExMjP76179q9OjRnmUee+wxHTt2TEOHDlVBQYGuueYaLVu2TMHBwT6sHAAA+AOfBpnw8HBNmzZN06ZNq3IZm82m8ePHa/z48WevMAAAYAl81xIAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsnwaZli1bymazVfhJSUmRJBUXFyslJUVNmzZVWFiY+vTpo/z8fF+WDAAA/IhPg8y6det04MABz8/y5cslSX379pUkpaamavHixVqwYIGys7O1f/9+3Xrrrb4sGQAA+JEGvtx58+bNvaafeeYZXXTRRerWrZsKCws1a9YsZWZm6oYbbpAkZWRkKD4+XqtXr9aVV17pi5IBAIAf8WmQ+S2Xy6XXXntNaWlpstls2rBhg9xutxITEz3LtGvXTnFxccrJyakyyJSUlKikpMQzXVRUJElyu91yu9112wk/U97f+tZvq2PcrCEoKKjS6RPbK8PY+g+ON/9V3THxmyCzaNEiFRQU6J577pEk5eXlKSgoSI0aNfJaLjIyUnl5eVVuZ+LEiRo3blyF9g8++EChoaG1WbJllF+yg7Uwbv7t8ccfr7Q9NTX1lOsuWbKktsvBGeJ48z/Hjx+v1nJ+E2RmzZqlnj17KiYm5oy2M3LkSKWlpXmmi4qKFBsbqx49esjpdJ5pmZbidru1fPlyJSUlyW63+7ocVBPjZg3dunXzmg4KClJqaqqmTp0ql8t10nWzs7PrsjScBo43/1V+ReVU/CLIfPPNN/rwww/19ttve9qioqLkcrlUUFDgdVYmPz9fUVFRVW7L4XDI4XBUaLfb7fX2TVqf+25ljJt/qyqsuFyuUwYZxtX/cLz5n+qOh198jkxGRoZatGihG2+80dPWqVMn2e12ZWVledpyc3O1b98+JSQk+KJMAADgZ3x+RqasrEwZGRkaOHCgGjT4v3IiIiI0ePBgpaWlqUmTJnI6nRo+fLgSEhJ4YgkAAEjygyDz4Ycfat++fbr33nsrzJs6daoCAgLUp08flZSUKDk5WdOnT/dBlQAAwB/5PMj06NFDxphK5wUHBys9PV3p6elnuSoAAGAFfnGPDAAAQE0QZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGURZAAAgGX5PMh8//33uvPOO9W0aVOFhITo0ksv1fr16z3zjTEaPXq0oqOjFRISosTERO3atcuHFQMAAH/h0yDz008/6eqrr5bdbtfSpUu1fft2Pffcc2rcuLFnmcmTJ+uFF17QzJkztWbNGjVs2FDJyckqLi72YeUAAMAfNPDlzidNmqTY2FhlZGR42lq1auX5f2OMpk2bpieffFK9e/eWJM2dO1eRkZFatGiRbr/99rNeMwAA8B8+DTLvvPOOkpOT1bdvX2VnZ+u8887Tgw8+qCFDhkiS9u7dq7y8PCUmJnrWiYiIUNeuXZWTk1NpkCkpKVFJSYlnuqioSJLkdrvldrvruEf+pby/9a3fVse4WUNQUFCl0ye2V4ax9R8cb/6rumNiM8aYOq6lSsHBwZKktLQ09e3bV+vWrdNDDz2kmTNnauDAgVq1apWuvvpq7d+/X9HR0Z71+vXrJ5vNpvnz51fY5tixYzVu3LgK7ZmZmQoNDa27zgAAgFpz/PhxDRgwQIWFhXI6nVUu59MgExQUpM6dO2vVqlWethEjRmjdunXKycmpUZCp7IxMbGysfvzxx5O+EOcit9ut5cuXKykpSXa73dfloJoYN2vo1q2b13RQUJBSU1M1depUuVyuk66bnZ1dl6XhNHC8+a+ioiI1a9bslEHGp5eWoqOjdckll3i1xcfH66233pIkRUVFSZLy8/O9gkx+fr46duxY6TYdDoccDkeFdrvdXm/fpPW571bGuPm3qsKKy+U6ZZBhXP0Px5v/qe54+PSppauvvlq5ublebV9++aUuuOACSb/e+BsVFaWsrCzP/KKiIq1Zs0YJCQlntVYAAOB/fHpGJjU1VVdddZX+8Y9/qF+/flq7dq1eeuklvfTSS5Ikm82mhx9+WBMmTFDr1q3VqlUrjRo1SjExMbrlllt8WToAAPADPg0yV1xxhRYuXKiRI0dq/PjxatWqlaZNm6Y77rjDs8xjjz2mY8eOaejQoSooKNA111yjZcuWeW4UBgAA9ZdPg4wk3XTTTbrpppuqnG+z2TR+/HiNHz/+LFYFAACswOdfUQAAAFBTBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZPg0yY8eOlc1m8/pp166dZ35xcbFSUlLUtGlThYWFqU+fPsrPz/dhxQAAwJ/4/IxM+/btdeDAAc/Pp59+6pmXmpqqxYsXa8GCBcrOztb+/ft16623+rBaAADgTxr4vIAGDRQVFVWhvbCwULNmzVJmZqZuuOEGSVJGRobi4+O1evVqXXnllWe7VAAA4Gd8HmR27dqlmJgYBQcHKyEhQRMnTlRcXJw2bNggt9utxMREz7Lt2rVTXFyccnJyqgwyJSUlKikp8UwXFRVJktxut9xud912xs+U97e+9dvqGDdrCAoKqnT6xPbKMLb+g+PNf1V3TGzGGFPHtVRp6dKlOnr0qNq2basDBw5o3Lhx+v7777Vt2zYtXrxYgwYN8golktSlSxddf/31mjRpUqXbHDt2rMaNG1ehPTMzU6GhoXXSDwAAULuOHz+uAQMGqLCwUE6ns8rlfBpkTlRQUKALLrhAU6ZMUUhISI2CTGVnZGJjY/Xjjz+e9IU4F7ndbi1fvlxJSUmy2+2+LgfVxLhZQ7du3bymg4KClJqaqqlTp8rlcp103ezs7LosDaeB481/FRUVqVmzZqcMMj6/tPRbjRo1Ups2bbR7924lJSXJ5XKpoKBAjRo18iyTn59f6T015RwOhxwOR4V2u91eb9+k9bnvVsa4+beqworL5TplkGFc/Q/Hm/+p7nj4/Kml3zp69Kj27Nmj6OhoderUSXa7XVlZWZ75ubm52rdvnxISEnxYJQAA8Bc+PSPzyCOPqFevXrrgggu0f/9+jRkzRoGBgerfv78iIiI0ePBgpaWlqUmTJnI6nRo+fLgSEhJ4YgkAAEjycZD57rvv1L9/fx06dEjNmzfXNddco9WrV6t58+aSpKlTpyogIEB9+vRRSUmJkpOTNX36dF+WDAAA/IhPg8wbb7xx0vnBwcFKT09Xenr6WaoIAABYiV/dIwMAAHA6CDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyahRkLrzwQh06dKhCe0FBgS688MIzLgoAAKA6ahRkvv76a5WWllZoLykp0ffff3/GRQEAAFTHaX1p5DvvvOP5//fff18RERGe6dLSUmVlZally5a1VhwAAMDJnFaQueWWWyRJNptNAwcO9Jpnt9vVsmVLPffcc7VWHAAAwMmcVpApKyuTJLVq1Urr1q1Ts2bN6qQoAACA6jitIFNu7969tV0HAADAaatRkJGkrKwsZWVl6eDBg54zNeVmz559xoUBAACcSo2CzLhx4zR+/Hh17txZ0dHRstlstV0XAADAKdUoyMycOVNz5szRXXfdVdv1AAAAVFuNPkfG5XLpqquuqu1aAAAATkuNgsx9992nzMzM2q4FAADgtNTo0lJxcbFeeuklffjhh+rQoYPsdrvX/ClTptRKcQAAACdToyCzZcsWdezYUZK0bds2r3nc+AsAAM6WGgWZjz76qLbrAAAAOG01ukcGAADAH9TojMz1119/0ktIK1asqHFBAAAA1VWjIFN+f0w5t9utTZs2adu2bRW+TBIAAKCu1CjITJ06tdL2sWPH6ujRo2dUEAAAQHXV6j0yd955J9+zBAAAzppaDTI5OTkKDg6uzU0CAABUqUaXlm699VavaWOMDhw4oPXr12vUqFG1UhgAAMCp1CjIREREeE0HBASobdu2Gj9+vHr06FErhQEAAJxKjYJMRkZGbdcBAGdd586dfV0CgDN0RvfIbNiwQa+99ppee+01ff7552dUyDPPPCObzaaHH37Y01ZcXKyUlBQ1bdpUYWFh6tOnj/Lz889oPwAA4NxRozMyBw8e1O23366VK1eqUaNGkqSCggJdf/31euONN9S8efPT2t66dev073//Wx06dPBqT01N1XvvvacFCxYoIiJCw4YN06233qrPPvusJmUDAIBzTI3OyAwfPlxHjhzRF198ocOHD+vw4cPatm2bioqKNGLEiNPa1tGjR3XHHXfo5ZdfVuPGjT3thYWFmjVrlqZMmaIbbrhBnTp1UkZGhlatWqXVq1fXpGwAAHCOqdEZmWXLlunDDz9UfHy8p+2SSy5Renr6ad/sm5KSohtvvFGJiYmaMGGCp33Dhg1yu91KTEz0tLVr105xcXHKycnRlVdeWen2SkpKVFJS4pkuKiqS9OunD7vd7tOqzerK+1vf+m11jNvZExQUVOvbqs42GVv/wfHmv6o7JjUKMmVlZbLb7RXa7Xa7ysrKqr2dN954Qxs3btS6desqzMvLy1NQUJDn0lW5yMhI5eXlVbnNiRMnaty4cRXaP/jgA4WGhla7tnPJ8uXLfV0CaoBxq3uPP/54rW8zNTX1lMssWbKk1veLM8Px5n+OHz9ereVqFGRuuOEGPfTQQ5o3b55iYmIkSd9//71SU1PVvXv3am3j22+/1UMPPaTly5fX6ofojRw5UmlpaZ7poqIixcbGqkePHnI6nbW2Hytwu91avny5kpKSKg2e8E+M29nTrVu3WttWUFCQUlNTNXXqVLlcrpMum52dXWv7xZnhePNf5VdUTqVGQeZf//qXbr75ZrVs2VKxsbGSfg0mv/vd7/Taa69VaxsbNmzQwYMH9fvf/97TVlpaqo8//lj/+te/9P7778vlcqmgoMDrrEx+fr6ioqKq3K7D4ZDD4ajQbrfb6+2btD733coYt7p3qsBR022earuMq//hePM/1R2PGgWZ2NhYbdy4UR9++KF27twpSYqPj/e6n+VUunfvrq1bt3q1DRo0SO3atdPjjz+u2NhY2e12ZWVlqU+fPpKk3Nxc7du3TwkJCTUpGwAAnGNOK8isWLFCw4YN0+rVq+V0OpWUlKSkpCRJvz5l1L59e82cOVPXXnvtKbcVHh6u3/3ud15tDRs2VNOmTT3tgwcPVlpampo0aSKn06nhw4crISGhyht9AQBA/XJaj19PmzZNQ4YMqfRek4iICP31r3/VlClTaq24qVOn6qabblKfPn30hz/8QVFRUXr77bdrbfsAAMDaTuuMzObNmzVp0qQq5/fo0UPPPvtsjYtZuXKl13RwcLDS09OVnp5e420CAIBz12mdkcnPzz/pzTcNGjTQDz/8cMZFAQAAVMdpBZnzzjtP27Ztq3L+li1bFB0dfcZFAQAAVMdpBZk//elPGjVqlIqLiyvM+/nnnzVmzBjddNNNtVYcAADAyZzWPTJPPvmk3n77bbVp00bDhg1T27ZtJUk7d+5Uenq6SktL9fe//71OCgUAADjRaQWZyMhIrVq1Sg888IBGjhwpY4wkyWazKTk5Wenp6YqMjKyTQgEAAE502h+Id8EFF2jJkiX66aeftHv3bhlj1Lp1a69vrgYAADgbavTJvpLUuHFjXXHFFbVZCwAAwGk5rZt9AQAA/AlBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWFYDXxcAAPVN586da7zu+vXra7ESwPo4IwMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLz5EBYGln8pksAKzPp2dkZsyYoQ4dOsjpdMrpdCohIUFLly71zC8uLlZKSoqaNm2qsLAw9enTR/n5+T6sGAAA+BOfBpnzzz9fzzzzjDZs2KD169frhhtuUO/evfXFF19IklJTU7V48WItWLBA2dnZ2r9/v2699VZflgwAAPyITy8t9erVy2v66aef1owZM7R69Wqdf/75mjVrljIzM3XDDTdIkjIyMhQfH6/Vq1fryiuv9EXJAADAj/jNPTKlpaVasGCBjh07poSEBG3YsEFut1uJiYmeZdq1a6e4uDjl5ORUGWRKSkpUUlLimS4qKpIkud1uud3uuu2Enynvb33rt9UxbqcnKCjI1yVI+r866roe3he1i+PNf1V3TGzGGFPHtZzU1q1blZCQoOLiYoWFhSkzM1N/+tOflJmZqUGDBnmFEknq0qWLrr/+ek2aNKnS7Y0dO1bjxo2r0J6ZmanQ0NA66QMAAKhdx48f14ABA1RYWCin01nlcj4/I9O2bVtt2rRJhYWFevPNNzVw4EBlZ2fXeHsjR45UWlqaZ7qoqEixsbHq0aPHSV+Ic5Hb7dby5cuVlJQku93u63JQTYzb6enWrZuvS5D065mY1NRUTZ06VS6Xq872cya/H1ERx5v/Kr+icio+DzJBQUG6+OKLJUmdOnXSunXr9Pzzz+u2226Ty+VSQUGBGjVq5Fk+Pz9fUVFRVW7P4XDI4XBUaLfb7fX2TVqf+25ljFv11GVoqAmXy1WnNfGeqBscb/6nuuPhdx+IV1ZWppKSEnXq1El2u11ZWVmeebm5udq3b58SEhJ8WCEAAPAXPj0jM3LkSPXs2VNxcXE6cuSIMjMztXLlSr3//vuKiIjQ4MGDlZaWpiZNmsjpdGr48OFKSEjgiSUAACDJx0Hm4MGDuvvuu3XgwAFFRESoQ4cOev/995WUlCRJmjp1qgICAtSnTx+VlJQoOTlZ06dP92XJAADAj/g0yMyaNeuk84ODg5Wenq709PSzVBEAALASv7tHBgAAoLoIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIIMgAAwLIa+LoAAED1de7cucbrrl+/vhYrAfyDT8/ITJw4UVdccYXCw8PVokUL3XLLLcrNzfVapri4WCkpKWratKnCwsLUp08f5efn+6hiAADgT3waZLKzs5WSkqLVq1dr+fLlcrvd6tGjh44dO+ZZJjU1VYsXL9aCBQuUnZ2t/fv369Zbb/Vh1QAAwF/49NLSsmXLvKbnzJmjFi1aaMOGDfrDH/6gwsJCzZo1S5mZmbrhhhskSRkZGYqPj9fq1at15ZVX+qJsAADgJ/zqHpnCwkJJUpMmTSRJGzZskNvtVmJiomeZdu3aKS4uTjk5OZUGmZKSEpWUlHimi4qKJElut1tut7suy/c75f2tb/22Osbt9AQFBfm6BEn/V4e/1FMZ3lMVcbz5r+qOic0YY+q4lmopKyvTzTffrIKCAn366aeSpMzMTA0aNMgrmEhSly5ddP3112vSpEkVtjN27FiNGzeuQntmZqZCQ0PrpngAAFCrjh8/rgEDBqiwsFBOp7PK5fzmjExKSoq2bdvmCTE1NXLkSKWlpXmmi4qKFBsbqx49epz0hTgXud1uLV++XElJSbLb7b4uB9XEuJ2ebt26+boESb+eiUlNTdXUqVPlcrl8XU6lsrOzfV2C3+F481/lV1ROxS+CzLBhw/Tuu+/q448/1vnnn+9pj4qKksvlUkFBgRo1auRpz8/PV1RUVKXbcjgccjgcFdrtdnu9fZPW575bGeNWPf4WGlwul9/VVI73U9U43vxPdcfDp08tGWM0bNgwLVy4UCtWrFCrVq285nfq1El2u11ZWVmettzcXO3bt08JCQlnu1wAAOBnfHpGJiUlRZmZmfrvf/+r8PBw5eXlSZIiIiIUEhKiiIgIDR48WGlpaWrSpImcTqeGDx+uhIQEnlgCAAC+DTIzZsyQJF133XVe7RkZGbrnnnskSVOnTlVAQID69OmjkpISJScna/r06We5UgAA4I98GmSq88BUcHCw0tPTlZ6efhYqAgAAVsKXRgIAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMtq4OsCAKBz586+LgGARXFGBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBZBBgAAWBafIwMAOKUz+ayf9evX12IlgDefnpH5+OOP1atXL8XExMhms2nRokVe840xGj16tKKjoxUSEqLExETt2rXLN8UCAAC/49Mgc+zYMV122WVKT0+vdP7kyZP1wgsvaObMmVqzZo0aNmyo5ORkFRcXn+VKAQCAP/LppaWePXuqZ8+elc4zxmjatGl68skn1bt3b0nS3LlzFRkZqUWLFun2228/m6UCAAA/5Lf3yOzdu1d5eXlKTEz0tEVERKhr167KycmpMsiUlJSopKTEM11UVCRJcrvdcrvddVu0nynvb33rt9XVx3ELCgrydQlnrLwP/tyXM3lPnUm//Pm9XB+PN6uo7pjYjDGmjmupFpvNpoULF+qWW26RJK1atUpXX3219u/fr+joaM9y/fr1k81m0/z58yvdztixYzVu3LgK7ZmZmQoNDa2T2gEAQO06fvy4BgwYoMLCQjmdziqX89szMjU1cuRIpaWleaaLiooUGxurHj16nPSFOBe53W4tX75cSUlJstvtvi4H1VQfx61bt26+LuGMBQUFKTU1VVOnTpXL5fJ1OZXKzs6u8bpnMkZnst+6Vh+PN6sov6JyKn4bZKKioiRJ+fn5Xmdk8vPz1bFjxyrXczgccjgcFdrtdnu9fZPW575bWX0aN3/9w18TLpfLb/tzJu+nM+mTFd7H9el4s4rqjofffiBeq1atFBUVpaysLE9bUVGR1qxZo4SEBB9WBgAA/IVPz8gcPXpUu3fv9kzv3btXmzZtUpMmTRQXF6eHH35YEyZMUOvWrdWqVSuNGjVKMTExnvtoAABA/ebTILN+/Xpdf/31nunye1sGDhyoOXPm6LHHHtOxY8c0dOhQFRQU6JprrtGyZcsUHBzsq5IBAIAf8WmQue6663Syh6ZsNpvGjx+v8ePHn8WqAACAVfjtPTIAAACnQpABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACW5dOvKAAAnD2dO3f2dQlAreOMDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCy+/RqoIzX9puGgoCA9/vjjtVxN9fDtyACshjMyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsggyAADAsvgcmTNwJp+5sX79+lqsBAD8l68+n+hMfs/Wt9/vVu6vJc7IpKenq2XLlgoODlbXrl21du1aX5cEAAD8gN8Hmfnz5ystLU1jxozRxo0bddlllyk5OVkHDx70dWkAAMDH/D7ITJkyRUOGDNGgQYN0ySWXaObMmQoNDdXs2bN9XRoAAPAxv75HxuVyacOGDRo5cqSnLSAgQImJicrJyal0nZKSEpWUlHimCwsLJUmHDx+W2+2u1foCAmqeAw8dOlSLlVTO7Xbr+PHjOnTokOx2e53vD95q+v4ICAjw2bidyXu6visft4CAAF5HP1Gd37NV/Z7099/vtc0f+3vkyBFJkjHmpMv5dZD58ccfVVpaqsjISK/2yMhI7dy5s9J1Jk6cqHHjxlVob9WqVZ3UWFPNmjXzdQnwY5999pmvS0ANMG7+xVe/Z+vb7/e67u+RI0cUERFR5Xy/DjI1MXLkSKWlpXmmy8rKdPjwYTVt2lQ2m82HlZ19RUVFio2N1bfffiun0+nrclBNjJs1MW7WxLj5L2OMjhw5opiYmJMu59dBplmzZgoMDFR+fr5Xe35+vqKioipdx+FwyOFweLU1atSorkq0BKfTyQFqQYybNTFu1sS4+aeTnYkp59cXcoOCgtSpUydlZWV52srKypSVlaWEhAQfVgYAAPyBX5+RkaS0tDQNHDhQnTt3VpcuXTRt2jQdO3ZMgwYN8nVpAADAx/w+yNx222364YcfNHr0aOXl5aljx45atmxZhRuAUZHD4dCYMWMqXGqDf2PcrIlxsybGzfps5lTPNQEAAPgpv75HBgAA4GQIMgAAwLIIMgAAwLIIMgAAwLIIMhZ2+PBh3XHHHXI6nWrUqJEGDx6so0ePnnSd4uJipaSkqGnTpgoLC1OfPn0qfOBguUOHDun888+XzWZTQUFBHfSgfqqLcdu8ebP69++v2NhYhYSEKD4+Xs8//3xdd+Wcl56erpYtWyo4OFhdu3bV2rVrT7r8ggUL1K5dOwUHB+vSSy/VkiVLvOYbYzR69GhFR0crJCREiYmJ2rVrV112oV6qzXFzu916/PHHdemll6phw4aKiYnR3Xffrf3799d1N1BdBpb1xz/+0Vx22WVm9erV5pNPPjEXX3yx6d+//0nXuf/++01sbKzJysoy69evN1deeaW56qqrKl22d+/epmfPnkaS+emnn+qgB/VTXYzbrFmzzIgRI8zKlSvNnj17zKuvvmpCQkLMiy++WNfdOWe98cYbJigoyMyePdt88cUXZsiQIaZRo0YmPz+/0uU/++wzExgYaCZPnmy2b99unnzySWO3283WrVs9yzzzzDMmIiLCLFq0yGzevNncfPPNplWrVubnn38+W90659X2uBUUFJjExEQzf/58s3PnTpOTk2O6dOliOnXqdDa7hZMgyFjU9u3bjSSzbt06T9vSpUuNzWYz33//faXrFBQUGLvdbhYsWOBp27Fjh5FkcnJyvJadPn266datm8nKyiLI1KK6HrffevDBB831119fe8XXM126dDEpKSme6dLSUhMTE2MmTpxY6fL9+vUzN954o1db165dzV//+ldjjDFlZWUmKirK/POf//TMLygoMA6Hw8ybN68OelA/1fa4VWbt2rVGkvnmm29qp2icES4tWVROTo4aNWqkzp07e9oSExMVEBCgNWvWVLrOhg0b5Ha7lZiY6Glr166d4uLilJOT42nbvn27xo8fr7lz557RV7ujoroctxMVFhaqSZMmtVd8PeJyubRhwwav1zwgIECJiYlVvuY5OTley0tScnKyZ/m9e/cqLy/Pa5mIiAh17dr1pOOI6quLcatMYWGhbDZbvf8eP3/BXymLysvLU4sWLbzaGjRooCZNmigvL6/KdYKCgiocfJGRkZ51SkpK1L9/f/3zn/9UXFxcndRen9XVuJ1o1apVmj9/voYOHVorddc3P/74o0pLSyt8gvjJXvO8vLyTLl/+39PZJk5PXYzbiYqLi/X444+rf//+fMmknyDI+JknnnhCNpvtpD87d+6ss/2PHDlS8fHxuvPOO+tsH+ciX4/bb23btk29e/fWmDFj1KNHj7OyT6A+cLvd6tevn4wxmjFjhq/Lwf/y++9aqm/+9re/6Z577jnpMhdeeKGioqJ08OBBr/ZffvlFhw8fVlRUVKXrRUVFyeVyqaCgwOtf9/n5+Z51VqxYoa1bt+rNN9+U9OtTFpLUrFkz/f3vf9e4ceNq2LNzm6/Hrdz27dvVvXt3DR06VE8++WSN+oJf3++BgYEVnuir7DUvFxUVddLly/+bn5+v6Ohor2U6duxYi9XXX3UxbuXKQ8w333yjFStWcDbGn/j6Jh3UTPlNo+vXr/e0vf/++9W6afTNN9/0tO3cudPrptHdu3ebrVu3en5mz55tJJlVq1ZVedc/qq+uxs0YY7Zt22ZatGhhHn300brrQD3SpUsXM2zYMM90aWmpOe+880560+hNN93k1ZaQkFDhZt9nn33WM7+wsJCbfWtZbY+bMca4XC5zyy23mPbt25uDBw/WTeGoMYKMhf3xj380l19+uVmzZo359NNPTevWrb0e4/3uu+9M27ZtzZo1azxt999/v4mLizMrVqww69evNwkJCSYhIaHKfXz00Uc8tVTL6mLctm7dapo3b27uvPNOc+DAAc8Pv3Rr7o033jAOh8PMmTPHbN++3QwdOtQ0atTI5OXlGWOMueuuu8wTTzzhWf6zzz4zDRo0MM8++6zZsWOHGTNmTKWPXzdq1Mj897//NVu2bDG9e/fm8etaVtvj5nK5zM0332zOP/98s2nTJq/jq6SkxCd9hDeCjIUdOnTI9O/f34SFhRmn02kGDRpkjhw54pm/d+9eI8l89NFHnraff/7ZPPjgg6Zx48YmNDTU/PnPfzYHDhyoch8EmdpXF+M2ZswYI6nCzwUXXHAWe3buefHFF01cXJwJCgoyXbp0MatXr/bM69atmxk4cKDX8v/5z39MmzZtTFBQkGnfvr157733vOaXlZWZUaNGmcjISONwOEz37t1Nbm7u2ehKvVKb41Z+PFb289tjFL5jM+Z/b4IAAACwGJ5aAgAAlkWQAQAAlkWQAQAAlkWQAQAAlkWQAQAAlkWQAQAAlkWQAQAAlkWQAQAAlkWQAc4BX3/9tWw2mzZt2lTlMi1bttS0adNqdb/33HOPbrnlllrdJmrmuuuu08MPP1zl/LFjx9b6l1OuXLlSNptNBQUFtbpdSbrrrrv0j3/844y3U51+P/HEExo+fPgZ7wu+QZDBWVfVL9w5c+Z4fbvzua464cPfPf/885ozZ45n+lR/TGvLnDlzZLPZZLPZFBgYqMaNG6tr164aP368CgsL63z/teVsvV515aqrrtKBAwcUEREhqfaO4c2bN2vJkiUaMWLEGW+rOh555BG98sor+uqrr87K/lC7CDLAaXK73b4uwW9ERET4LHw6nU4dOHBA3333nVatWqWhQ4dq7ty56tixo/bv319n+3W5XHW2basJCgpSVFSUbDZbrW73xRdfVN++fRUWFlbjbRhj9Msvv1Rr2WbNmik5OVkzZsyo8f7gOwQZ+K3yyxbPPvusoqOj1bRpU6WkpHgFiZKSEj3yyCM677zz1LBhQ3Xt2lUrV670zC//F+L777+v+Ph4hYWF6Y9//KMOHDjgta/Zs2erffv2cjgcio6O1rBhwzzzbDabZsyYoZtvvlkNGzbUhAkTdPHFF+vZZ5/12samTZtks9m0e/dur/V69uypkJAQXXjhhXrzzTc9y7dq1UqSdPnll8tms+m6667zzPuf//kfxcfHKzg4WO3atdP06dO99rV27VpdfvnlCg4OVufOnfX555+f9uu7b98+9e7dW2FhYXI6nerXr5/y8/O9lpkwYYJatGih8PBw3XfffXriiSe8TtP/9tLSPffco+zsbD3//POesyVff/21fvrpJ91xxx1q3ry5QkJC1Lp1a2VkZJx2vSey2WyKiopSdHS04uPjNXjwYK1atUpHjx7VY4895lmurKxMEydOVKtWrRQSEqLLLrvMaxwk6YsvvtBNN90kp9Op8PBwXXvttdqzZ49XH59++mnFxMSobdu2Gj9+vH73u99VqKljx44aNWqU13rjxo1T8+bN5XQ6df/993uCUFWvlyRt27ZNPXv2VFhYmCIjI3XXXXfpxx9/9Ozn2LFjuvvuuxUWFqbo6Gg999xzp/36lZWVafz48Tr//PPlcDjUsWNHLVu2zGuZVatWqWPHjp732aJFi7zOIv720tLKlSs1aNAgFRYWevozduxYSdL06dPVunVrBQcHKzIyUn/5y1+qrKu0tFRvvvmmevXq5dX+6quvqnPnzgoPD1dUVJQGDBiggwcPeuaX17J06VJ16tRJDodDn376qWf+v//9b8XGxio0NFT9+vWrcOauV69eeuONN077dYQf8PGXVqIe6tatm3nooYcqtGdkZJiIiAjP9MCBA43T6TT333+/2bFjh1m8eLEJDQ01L730kmeZ++67z1x11VXm448/Nrt37zb//Oc/jcPhMF9++aVnm3a73SQmJpp169aZDRs2mPj4eDNgwADPNqZPn26Cg4PNtGnTTG5urlm7dq2ZOnWqZ74k06JFCzN79myzZ88e880335inn37aXHLJJV71jxgxwvzhD3/wWq9p06bm5ZdfNrm5uebJJ580gYGBZvv27cYYY9auXWskmQ8//NAcOHDAHDp0yBhjzGuvvWaio6PNW2+9Zb766ivz1ltvmSZNmpg5c+YYY4w5cuSIad68uRkwYIDZtm2bWbx4sbnwwguNJPP5559X+bpfcMEFnn6Vlpaajh07mmuuucasX7/erF692nTq1Ml069bNs/xrr71mgoODzezZs01ubq4ZN26ccTqd5rLLLvMao969extjjCkoKDAJCQlmyJAh5sCBA+bAgQPml19+MSkpKaZjx45m3bp1Zu/evWb58uXmnXfeqbLO6jjxvfJbDz30kAkPDze//PKLMcaYCRMmmHbt2plly5aZPXv2mIyMDONwOMzKlSuNMcZ89913pkmTJubWW28169atM7m5uWb27Nlm586dnj6GhYWZu+66y2zbts1s27bNfPvttyYgIMCsXbvWs9+NGzcam81m9uzZ47XebbfdZrZt22beffdd07x5c/P//t//O+nr9dNPP5nmzZubkSNHmh07dpiNGzeapKQkc/3113v29cADD5i4uDjz4Ycfmi1btpibbrrJhIeHV3pclRszZozX2E2ZMsU4nU4zb948s3PnTvPYY48Zu93uOXYKCwtNkyZNzJ133mm++OILs2TJEtOmTRuv99lHH31kJJmffvrJlJSUmGnTphmn0+npz5EjR8y6detMYGCgyczMNF9//bXZuHGjef7556usc+PGjUaSycvL82qfNWuWWbJkidmzZ4/JyckxCQkJpmfPnp755bV06NDBfPDBB2b37t3m0KFDZsyYMaZhw4bmhhtuMJ9//rnJzs42F198sdfvAGOM2bFjh5Fk9u7dW2Vt8E8EGZx1pxNkLrjgAs8fJGOM6du3r7ntttuMMcZ88803JjAw0Hz//fde2+nevbsZOXKkZ5uSzO7duz3z09PTTWRkpGc6JibG/P3vf6+yXknm4Ycf9mr7/vvvTWBgoFmzZo0xxhiXy2WaNWvmCRvl691///1e63Xt2tU88MADxhhj9u7dW2n4uOiii0xmZqZX21NPPWUSEhKMMcb8+9//Nk2bNjU///yzZ/6MGTNOK8h88MEHJjAw0Ozbt88z/4svvjCSPH+cu3btalJSUry2cfXVV1cZZIypfGx79eplBg0aVGVdNXGyIFP+WuTn55vi4mITGhpqVq1a5bXM4MGDTf/+/Y0xxowcOdK0atXKuFyuSrc3cOBAExkZaUpKSrzae/bs6RlLY4wZPny4ue6667zWa9KkiTl27JhXbWFhYaa0tNQYU/nr9dRTT5kePXp4tX377bdGksnNzTVHjhwxQUFB5j//+Y9n/qFDh0xISMhpBZmYmBjz9NNPey1zxRVXmAcffNBT64nvs5dffrnKIGNM5ePy1ltvGafTaYqKiqqs7bcWLlxoAgMDTVlZ2UmXW7dunZFkjhw54lXLokWLKvQ7MDDQfPfdd562pUuXmoCAAHPgwAFPW2FhoZHkCbiwDi4twa+1b99egYGBnuno6GjP6eStW7eqtLRUbdq0UVhYmOcnOzvbc1lAkkJDQ3XRRRdVuo2DBw9q//796t69+0nr6Ny5s9d0TEyMbrzxRs2ePVuStHjxYpWUlKhv375eyyUkJFSY3rFjR5X7OXbsmPbs2aPBgwd79WnChAmePu3YsUMdOnRQcHBwlfs5lR07dig2NlaxsbGetksuuUSNGjXy1Jebm6suXbp4rXfidHU88MADeuONN9SxY0c99thjWrVqVZXLvv766179/uSTT057f8YYSfJc5jt+/LiSkpK8tjt37lzP67lp0yZde+21stvtVW7z0ksvVVBQkFfbkCFDNG/ePBUXF8vlcikzM1P33nuv1zKXXXaZQkNDPdMJCQk6evSovv322yr3tXnzZn300Ude9bZr106StGfPHu3Zs0cul0tdu3b1rNOkSRO1bdu2mq+QVFRUpP379+vqq6/2ar/66qu9xv/E91lNxj8pKUkXXHCBLrzwQt111116/fXXdfz48SqX//nnn+VwOCrcd7Nhwwb16tVLcXFxCg8PV7du3ST9eon0t048ViUpLi5O5513nmc6ISFBZWVlys3N9bSFhIRI0klrg39q4OsCUP84nc5KnywpKCjwPP1Q7sQ/LjabTWVlZZKko0ePKjAwUBs2bPAKO5K8bhKsbBvlf+zKf3mdSsOGDSu03Xfffbrrrrs0depUZWRk6LbbbvP6o1UTR48elSS9/PLLXn+oJFXoo1X07NlT33zzjZYsWaLly5ere/fuSklJqXCPkSTdfPPNXv3+7R+f6tqxY4ecTqeaNm3qeQrlvffeq7Ath8MhqXrvgcrGv1evXnI4HFq4cKGCgoLkdrtPeu9HdR09elS9evXSpEmTKsyLjo723INlFeHh4dq4caNWrlypDz74QKNHj9bYsWO1bt26Sm8Ub9asmY4fPy6Xy+UJj8eOHVNycrKSk5P1+uuvq3nz5tq3b5+Sk5Mr3Hxd2VhVx+HDhyVJzZs3r9H68B3OyOCsa9u2rTZu3FihfePGjWrTpk21t3P55ZertLRUBw8e1MUXX+z1ExUVVa1thIeHq2XLlsrKyqr2fsv96U9/UsOGDTVjxgwtW7aswr/GJWn16tUVpuPj4yXJ80u6tLTUMz8yMlIxMTH66quvKvSp/Obg+Ph4bdmyRcXFxVXu51Ti4+P17bffep0Z2L59uwoKCnTJJZdI+nWc1q1b57XeidMnCgoK8upPuebNm2vgwIF67bXXNG3aNL300kuVrh8eHu7V5+oGzXIHDx5UZmambrnlFgUEBOiSSy6Rw+HQvn37Krye5WejOnTooE8++eS0n0Zr0KCBBg4cqIyMDGVkZOj222+vUO/mzZv1888/e6ZXr16tsLAwz74re71+//vf64svvlDLli0r1NywYUNddNFFstvtWrNmjWedn376SV9++WW1a3c6nYqJidFnn33m1f7ZZ595jf/WrVtVUlLimV/T8W/QoIESExM1efJkbdmyRV9//bVWrFhR6TbKbybfvn27p23nzp06dOiQnnnmGV177bVq166d142+p7Jv3z6vJ9lWr16tgIAAr7NY27Ztk91uV/v27au9XfgHggzOugceeEBffvmlRowYoS1btig3N1dTpkzRvHnz9Le//a3a22nTpo3uuOMO3X333Xr77be1d+9erV27VhMnTtR7771X7e2MHTtWzz33nF544QXt2rVLGzdu1IsvvnjK9QIDA3XPPfdo5MiRat26daWXdxYsWKDZs2fryy+/1JgxY7R27VrPE1EtWrRQSEiIli1bpvz8fM9ZqnHjxmnixIl64YUX9OWXX2rr1q3KyMjQlClTJEkDBgyQzWbTkCFDtH37di1ZsqTSsxsnk5iYqEsvvVR33HGHNm7cqLVr1+ruu+9Wt27dPKfmhw8frlmzZumVV17Rrl27NGHCBG3ZsuWkj9q2bNlSa9as0ddff60ff/xRZWVlGj16tP773/9q9+7d+uKLL/Tuu+96wtyZMMYoLy9PBw4c0I4dOzR79mxdddVVioiI0DPPPCPp12D0yCOPKDU1Va+88or27NnjGd9XXnlFkjRs2DAVFRXp9ttv1/r167Vr1y69+uqrXpcdqnLfffdpxYoVVQZZl8ulwYMHe8ZpzJgxGjZsmAICAqp8vVJSUnT48GH1799f69at0549e/T+++9r0KBBKi0tVVhYmAYPHqxHH31UK1as0LZt23TPPfd4tlldjz76qCZNmqT58+crNzdXTzzxhDZt2qSHHnpI0q/vs7KyMg0dOlQ7duzQ+++/73mfVfUeaNmypY4ePaqsrCz9+OOPOn78uN5991298MIL2rRpk7755hvNnTtXZWVlVV4Ka968uX7/+997PXEUFxenoKAgvfjii/rqq6/0zjvv6Kmnnqp2X4ODgzVw4EBt3rxZn3zyiUaMGKF+/fp5/YPnk08+0bXXXnva4Rl+wMf36KCeWrt2rUlKSjLNmzc3ERERpmvXrmbhwoVey5x4I6kxvz6R8tsna1wulxk9erRp2bKlsdvtJjo62vz5z382W7ZsMcZUfvPhwoULzYlv/ZkzZ5q2bdt6tjF8+HDPPEkVaiu3Z88eI8lMnjy5wjxJJj093SQlJRmHw2Fatmxp5s+f77XMyy+/bGJjY01AQIBXv15//XXTsWNHExQUZBo3bmz+8Ic/mLffftszPycnx1x22WUmKCjIdOzY0bz11lundbOvMb/eLH3zzTebhg0bmvDwcNO3b98KT4qMHz/eNGvWzISFhZl7773XjBgxwlx55ZWe+SeOUW5urrnyyitNSEiI5wmQp556ysTHx5uQkBDTpEkT07t3b/PVV19VWWd1lN/ELcnYbDYTERFhunTpYsaPH28KCwu9li0rKzPTpk3zjG/z5s1NcnKyyc7O9iyzefNm06NHDxMaGmrCw8PNtdde6/X00Ynvw9+69tprTfv27Su0l683evRo07RpUxMWFmaGDBliiouLT/p6GWPMl19+af785z+bRo0amZCQENOuXTvz8MMPe26APXLkiLnzzjtNaGioiYyMNJMnT67yJvpyJ97sW1paasaOHWvOO+88Y7fbzWWXXWaWLl3qtc5nn31mOnToYIKCgkynTp1MZmamkeR5ouvEm32NMeb+++83TZs2NZLMmDFjzCeffGK6detmGjdubEJCQkyHDh0qHAcnmj59utf7zBhjMjMzTcuWLY3D4TAJCQnmnXfeOemNxyf2e/r06SYmJsYEBwebv/zlL+bw4cNey7Vt29bMmzfvpHXBP9mM+d+bBQCctk8++UTdu3fXt99+q8jISK95NptNCxcuPKc+wj8pKUlRUVF69dVXfV2KXzDGqHXr1nrwwQeVlpbmNe+ee+5RQUGBFi1a5Jvi6sDrr7/u+ayYujxz8fPPP6tt27aaP3/+ad/IXhNLly7V3/72N23ZskUNGnDrqNUwYkANlJSU6IcfftDYsWPVt2/fCiHmXHD8+HHNnDlTycnJCgwM1Lx58/Thhx9q+fLlvi7NL/zwww964403lJeXp0GDBvm6nDoxd+5cXXjhhTrvvPO0efNmPf744+rXr1+dX34JCQnR3LlzvT4EsC4dO3ZMGRkZhBiLYtSAGpg3b54GDx6sjh07au7cub4up07YbDYtWbJETz/9tIqLi9W2bVu99dZbSkxM9HVpfqFFixZq1qyZXnrpJTVu3NjX5dSJvLw8jR49Wnl5eYqOjlbfvn319NNPn5V9//aTrutabTxtBt/h0hIAALAsnloCAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACW9f8BlV/r2FPZQCgAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Finding the best model according to logs...\n", + "{'weights': 'weights/resnet56_cifar10', 'model_type': 'resnet56', 'kwargs': {'num_classes': 10, 'debug': False}, 'params': {'epochs': 100, 'batch_size': 256, 'momentum': 0.9, 'weight_decay': 0.256, 'weight_decay_bias': 0.004, 'ema_update_freq': 5, 'ema_rho': 0.9509900498999999, 'model_type': 'resnet56', 'kwargs': {'num_classes': 10, 'debug': False}}, 'run0': 0.9235000000000001, 'run1': 0.9194, 'run2': 0.9212, 'run3': 0.9204, 'run4': 0.8703000000000001, 'accuracy': [0.91096, 0.02037494539869983]}\n", + "\n", + "Average (5 runs): 91.096% +/- 2.037%\n", + "Best (idx 0): 0.924\n", + "resnet56 Mean Std. Percentage\n", + "conv 609.000 0.981 40.60\n", + "gelu 128.000 1.318 8.60\n", + "bootstrapping 732.000 3.258 48.80\n", + "residual 1.000 0.010 0.00\n", + "pool 27.000 0.089 1.80\n", + "linear 2.000 0.030 0.10\n", + "total 1499.000 4.475 100.00\n", + "rounded percent 99.900\n", + "logit res 0.000 0.013\n", + "\n", + "\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj4AAAGwCAYAAACpYG+ZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA8EUlEQVR4nO3deViU9f7/8deAbMrizqKYWqnoSalMQ09ZiVInS6ujlWlqpqdCLe2Ueco1PaWVkh617OuSJeZp0Taz3I8lKmouJOKSpqlgaoArEHx+f/RjrkbAAGccmPv5uC4unc993595v+97hJf33DdjM8YYAQAAWICXuwsAAAC4Ugg+AADAMgg+AADAMgg+AADAMgg+AADAMgg+AADAMgg+AADAMqq4uwBXKygo0NGjRxUUFCSbzebucgAAQCkYY3T69GlFRETIy8t552k8PvgcPXpUkZGR7i4DAACUw+HDh1W/fn2nzefxwScoKEjS7zsuODjYzdUULy8vT9988406d+4sHx8fd5fjEp7eo6f3J3l+j/RX+Xl6j1brLzs7W5GRkfaf487i8cGn8O2t4ODgCh18qlatquDgYI98MUue36On9yd5fo/0V/l5eo9W7c/Zl6lwcTMAALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALCMKu4uAADcpXXr1va/+/r6avjw4erQoYNyc3P/dNvNmze7sjQALsIZHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBkEHwAAYBluDT75+fkaOXKkGjVqpICAAF199dV6+eWXZYyxr2OM0ahRoxQeHq6AgADFxsZq7969bqwaAABUVm4NPhMnTtTMmTP1n//8R6mpqZo4caImTZqkadOm2deZNGmSpk6dqrfeeksbN25UtWrVFBcXpwsXLrixcgAAUBlVceeTr1+/Xl27dtXdd98tSWrYsKEWLlyoTZs2Sfr9bE9CQoJeeuklde3aVZI0f/58hYaGasmSJXrooYfcVjsAAKh83Bp82rVrp1mzZmnPnj1q0qSJtm/frm+//VaTJ0+WJB04cEDp6emKjY21bxMSEqK2bdsqKSmp2OCTk5OjnJwc++Ps7GxJUl5envLy8lzcUfkU1lVR63MGT+/R0/uTPLNHX1/fIn//49ilVLb94InH72Ke3qPV+nNVnzbzxwtqrrCCggL961//0qRJk+Tt7a38/HxNmDBBI0aMkPT7GaH27dvr6NGjCg8Pt2/Xo0cP2Ww2LVq0qMicY8aM0dixY4uMJyYmqmrVqq5rBgAAOM25c+fUs2dPZWVlKTg42GnzuvWMz3//+18tWLBAiYmJatGihbZt26ZnnnlGERER6tOnT7nmHDFihIYNG2Z/nJ2drcjISHXu3NmpO86Z8vLytHz5cnXq1Ek+Pj7uLsclPL1HT+9P8sweO3ToYP+7r6+vhg4dqilTpig3N/dPt127dq0rS3M6Tzx+F/P0Hq3WX+E7Ns7m1uDz3HPP6YUXXrC/ZXXdddfpp59+0iuvvKI+ffooLCxMkpSRkeFwxicjI0PR0dHFzunn5yc/P78i4z4+PhX+hVIZarxcnt6jp/cneVaPxQWc3NzcUgWfyroPPOn4lcTTe7RKf67q0a13dZ07d05eXo4leHt7q6CgQJLUqFEjhYWFaeXKlfbl2dnZ2rhxo2JiYq5orQAAoPJz6xmfe+65RxMmTFCDBg3UokULff/995o8ebIee+wxSZLNZtMzzzyj8ePH69prr1WjRo00cuRIRUREqFu3bu4sHQAAVEJuDT7Tpk3TyJEj9dRTT+n48eOKiIjQP/7xD40aNcq+zvPPP6+zZ89q4MCByszM1F//+lctW7ZM/v7+bqwcAABURm4NPkFBQUpISFBCQkKJ69hsNo0bN07jxo27coUBAACPxGd1AQAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAy6ji7gIA4HK0bt3a3SUAqEQ44wMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACzD7cHnyJEj6tWrl2rVqqWAgABdd9112rx5s325MUajRo1SeHi4AgICFBsbq71797qxYgAAUFm5Nfj8+uuvat++vXx8fPTVV19p165deuONN1SjRg37OpMmTdLUqVP11ltvaePGjapWrZri4uJ04cIFN1YOAAAqoyrufPKJEycqMjJSc+fOtY81atTI/ndjjBISEvTSSy+pa9eukqT58+crNDRUS5Ys0UMPPXTFawYAAJWXW4PPZ599pri4OHXv3l1r165VvXr19NRTT2nAgAGSpAMHDig9PV2xsbH2bUJCQtS2bVslJSUVG3xycnKUk5Njf5ydnS1JysvLU15enos7Kp/Cuipqfc7g6T16en9Sxe3R19fXqfOUdr6Kth/+TEU9fs7k6T1arT9X9WkzxhiXzFwK/v7+kqRhw4ape/fuSk5O1tNPP6233npLffr00fr169W+fXsdPXpU4eHh9u169Oghm82mRYsWFZlzzJgxGjt2bJHxxMREVa1a1XXNAAAApzl37px69uyprKwsBQcHO21etwYfX19ftW7dWuvXr7ePDRkyRMnJyUpKSipX8CnujE9kZKROnDjh1B3nTHl5eVq+fLk6deokHx8fd5fjEp7eo6f3J1XcHjt06OCUeXx9fTV06FBNmTJFubm5f7r+2rVrnfK8V0pFPX7O5Ok9Wq2/7Oxs1a5d2+nBx61vdYWHh6t58+YOY1FRUfr4448lSWFhYZKkjIwMh+CTkZGh6OjoYuf08/OTn59fkXEfH58K/0KpDDVeLk/v0dP7kypej6UJKWWdrzRzVqR9UBYV7fi5gqf3aJX+XNWjW+/qat++vdLS0hzG9uzZo6uuukrS7xc6h4WFaeXKlfbl2dnZ2rhxo2JiYq5orQAAoPJz6xmfoUOHql27dvr3v/+tHj16aNOmTZo1a5ZmzZolSbLZbHrmmWc0fvx4XXvttWrUqJFGjhypiIgIdevWzZ2lAwCASsitweemm27S4sWLNWLECI0bN06NGjVSQkKCHnnkEfs6zz//vM6ePauBAwcqMzNTf/3rX7Vs2TL7hdEAAACl5dbgI0ldunRRly5dSlxus9k0btw4jRs37gpWBQAAPJHbP7ICAADgSiH4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyhX8GncuLFOnjxZZDwzM1ONGze+7KIAAABcoVzB5+DBg8rPzy8ynpOToyNHjlx2UQAAAK5QpSwrf/bZZ/a/f/311woJCbE/zs/P18qVK9WwYUOnFQcAAOBMZQo+3bp1kyTZbDb16dPHYZmPj48aNmyoN954w2nFAQAAOFOZgk9BQYEkqVGjRkpOTlbt2rVdUhQAAIArlCn4FDpw4ICz6wAAAHC5cgUfSVq5cqVWrlyp48eP288EFZozZ85lFwYAAOBs5Qo+Y8eO1bhx49S6dWuFh4fLZrM5uy4AAACnK1fweeuttzRv3jz17t3b2fUAAAC4TLl+j09ubq7atWvn7FoAAABcqlzB5/HHH1diYqKzawEAAHCpcr3VdeHCBc2aNUsrVqxQy5Yt5ePj47B88uTJTikOAADAmcoVfHbs2KHo6GhJUkpKisMyLnQGAAAVVbmCz+rVq51dBwAAgMuV6xofAACAyqhcZ3xuv/32S76ltWrVqnIXBAAA4CrlCj6F1/cUysvL07Zt25SSklLkw0sBAAAqinIFnylTphQ7PmbMGJ05c+ayCgIAAHAVp17j06tXLz6nCwAAVFhODT5JSUny9/d35pQAAABOU663uu6//36Hx8YYHTt2TJs3b9bIkSOdUhgAAICzlSv4hISEODz28vJS06ZNNW7cOHXu3NkphQEAADhbuYLP3LlznV0HAACAy5Ur+BTasmWLUlNTJUktWrTQ9ddf75SiAAAAXKFcwef48eN66KGHtGbNGlWvXl2SlJmZqdtvv10ffPCB6tSp48waAQAAnKJcd3UNHjxYp0+f1g8//KBTp07p1KlTSklJUXZ2toYMGeLsGgEAAJyiXGd8li1bphUrVigqKso+1rx5c02fPp2LmwEAQIVVrjM+BQUF8vHxKTLu4+OjgoKCyy4KAADAFcoVfO644w49/fTTOnr0qH3syJEjGjp0qDp27Oi04gAAAJypXMHnP//5j7Kzs9WwYUNdffXVuvrqq9WoUSNlZ2dr2rRpzq4RAADAKcp1jU9kZKS2bt2qFStWaPfu3ZKkqKgoxcbGOrU4AAAAZyrTGZ9Vq1apefPmys7Ols1mU6dOnTR48GANHjxYN910k1q0aKF169a5qlYAAIDLUqbgk5CQoAEDBig4OLjIspCQEP3jH//Q5MmTnVYcAACAM5Up+Gzfvl133nlnics7d+6sLVu2XHZRAAAArlCm4JORkVHsbeyFqlSpol9++eWyiwIAAHCFMgWfevXqKSUlpcTlO3bsUHh4+GUXBQAA4AplCj5/+9vfNHLkSF24cKHIsvPnz2v06NHq0qWL04oDAABwpjLdzv7SSy/pk08+UZMmTTRo0CA1bdpUkrR7925Nnz5d+fn5evHFF11SKAAAwOUqU/AJDQ3V+vXr9eSTT2rEiBEyxkiSbDab4uLiNH36dIWGhrqkUAAAgMtV5t/cfNVVV2np0qU6ceKENm7cqA0bNujEiRNaunSpGjVqVO5CXn31VdlsNj3zzDP2sQsXLig+Pl61atVSYGCgHnjgAWVkZJT7OQAAgLWV6yMrJKlGjRq66aab1KZNG9WoUeOyikhOTtbbb7+tli1bOowPHTpUn3/+uT788EOtXbtWR48e1f33339ZzwUAAKyr3MHHWc6cOaNHHnlE77zzjkOAysrK0uzZszV58mTdcccduvHGGzV37lytX79eGzZscGPFAACgsirXZ3U5U3x8vO6++27FxsZq/Pjx9vEtW7YoLy/P4fO/mjVrpgYNGigpKUk333xzsfPl5OQoJyfH/jg7O1uSlJeXp7y8PBd1cXkK66qo9TmDp/fo6f1JFbdHX19fp85T2vkq2n74MxX1+DmTp/dotf5c1afNFF6h7AYffPCBJkyYoOTkZPn7++u2225TdHS0EhISlJiYqH79+jmEGElq06aNbr/9dk2cOLHYOceMGaOxY8cWGU9MTFTVqlVd0gcAAHCuc+fOqWfPnsrKyir2o7LKy21nfA4fPqynn35ay5cvl7+/v9PmHTFihIYNG2Z/nJ2drcjISHXu3NmpO86Z8vLytHz5cnXq1OmSvxm7MvP0Hj29P6ni9tihQwenzOPr66uhQ4dqypQpys3N/dP1165d65TnvVIq6vFzJk/v0Wr9Fb5j42xuCz5btmzR8ePHdcMNN9jH8vPz9b///U//+c9/9PXXXys3N1eZmZmqXr26fZ2MjAyFhYWVOK+fn5/8/PyKjPv4+FT4F0plqPFyeXqPnt6fVPF6LE1IKet8pZmzIu2Dsqhox88VPL1Hq/Tnqh7dFnw6duyonTt3Ooz169dPzZo10/DhwxUZGSkfHx+tXLlSDzzwgCQpLS1Nhw4dUkxMjDtKBgAAlZzbgk9QUJD+8pe/OIxVq1ZNtWrVso/3799fw4YNU82aNRUcHKzBgwcrJiamxAubAQAALsXtd3VdypQpU+Tl5aUHHnhAOTk5iouL04wZM9xdFgAAqKQqVPBZs2aNw2N/f39Nnz5d06dPd09BAADAo7j9FxgCAABcKQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGQQfAABgGVXcXQAAVEatW7cu97abN292YiUAyoIzPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDIIPgAAwDLcGnxeeeUV3XTTTQoKClLdunXVrVs3paWlOaxz4cIFxcfHq1atWgoMDNQDDzygjIwMN1UMAAAqM7cGn7Vr1yo+Pl4bNmzQ8uXLlZeXp86dO+vs2bP2dYYOHarPP/9cH374odauXaujR4/q/vvvd2PVAACgsqrizidftmyZw+N58+apbt262rJli2699VZlZWVp9uzZSkxM1B133CFJmjt3rqKiorRhwwbdfPPN7igbAABUUm4NPhfLysqSJNWsWVOStGXLFuXl5Sk2Nta+TrNmzdSgQQMlJSUVG3xycnKUk5Njf5ydnS1JysvLU15enivLL7fCuipqfc7g6T16en9Sxe3R19fXqfM4a75Lccc+rKjHz5k8vUer9eeqPm3GGOOSmcuooKBA9957rzIzM/Xtt99KkhITE9WvXz+HICNJbdq00e23366JEycWmWfMmDEaO3ZskfHExERVrVrVNcUDAACnOnfunHr27KmsrCwFBwc7bd4Kc8YnPj5eKSkp9tBTXiNGjNCwYcPsj7OzsxUZGanOnTs7dcc5U15enpYvX65OnTrJx8fH3eW4hKf36On9SRW3xw4dOjhlHl9fXw0dOlRTpkxRbm6uU+Ysydq1a106f3Eq6vFzJk/v0Wr9Fb5j42wVIvgMGjRIX3zxhf73v/+pfv369vGwsDDl5uYqMzNT1atXt49nZGQoLCys2Ln8/Pzk5+dXZNzHx6fCv1AqQ42Xy9N79PT+pIrXo7NDSm5ursuDjzv3X0U7fq7g6T1apT9X9ejWu7qMMRo0aJAWL16sVatWqVGjRg7Lb7zxRvn4+GjlypX2sbS0NB06dEgxMTFXulwAAFDJufWMT3x8vBITE/Xpp58qKChI6enpkqSQkBAFBAQoJCRE/fv317Bhw1SzZk0FBwdr8ODBiomJ4Y4uAABQZm4NPjNnzpQk3XbbbQ7jc+fOVd++fSVJU6ZMkZeXlx544AHl5OQoLi5OM2bMuMKVAgAAT+DW4FOaG8r8/f01ffp0TZ8+/QpUBAAAPBmf1QUAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyD4AMAACyjQnw6OwBYSevWrcu97ebNm51YCWA9nPEBAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWQfABAACWUcXdBQBA69at3V0CAIvgjA8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMgg8AALAMfo8PAFQi5f2dR76+vho+fLiTqwEqH874AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAy6ji7gIAeIbWrVu7uwQA+FOc8QEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJZB8AEAAJbB7/EBAPypy/k9TZs3b3ZiJcDl4YwPAACwDIIPAACwDIIPAACwDK7xAS6htNc1+Pr6avjw4erQoYNyc3NdXJXrcC0GAE/HGR8AAGAZBB8AAGAZBB8AAGAZleIan+nTp+u1115Tenq6WrVqpWnTpqlNmzbuLqvS/l6Lylo3gMvnjuvQ3PU9h+91pWelfVXhz/gsWrRIw4YN0+jRo7V161a1atVKcXFxOn78uLtLAwAAlUyFDz6TJ0/WgAED1K9fPzVv3lxvvfWWqlatqjlz5ri7NAAAUMlU6Le6cnNztWXLFo0YMcI+5uXlpdjYWCUlJRW7TU5OjnJycuyPs7KyJEmnTp1SXl6eU+vz8ip/bjx58qT973l5eTp37pxOnjwpHx8fZ5R2Sc6quyyudI/OUtp95eXlpXPnzsnLy+uy9q+7Xer4/tkxrMx9S55zDEtSWfsry/eci1+j7vhe50qu/D5aEfbVxf2dPn1akmSMccr8dqYCO3LkiJFk1q9f7zD+3HPPmTZt2hS7zejRo40kvvjiiy+++OLLA74OHz7s1GxRoc/4lMeIESM0bNgw++OCggKdOnVKtWrVks1mc2NlJcvOzlZkZKQOHz6s4OBgd5fjEp7eo6f3J3l+j/RX+Xl6j1brzxij06dPKyIiwqnPU6GDT+3ateXt7a2MjAyH8YyMDIWFhRW7jZ+fn/z8/BzGqlev7qoSnSo4ONgjX8x/5Ok9enp/kuf3SH+Vn6f3aKX+QkJCnD5/hX6j19fXVzfeeKNWrlxpHysoKNDKlSsVExPjxsoAAEBlVKHP+EjSsGHD1KdPH7Vu3Vpt2rRRQkKCzp49q379+rm7NAAAUMlU+ODz4IMP6pdfftGoUaOUnp6u6OhoLVu2TKGhoe4uzWn8/Pw0evToIm/ReRJP79HT+5M8v0f6q/w8vUf6cw6bMc6+TwwAAKBiqtDX+AAAADgTwQcAAFgGwQcAAFgGwQcAAFgGwecKOXXqlB555BEFBwerevXq6t+/v86cOXPJbS5cuKD4+HjVqlVLgYGBeuCBB4r8Msfk5GR17NhR1atXV40aNRQXF6ft27e7spViuao/SZo3b55atmwpf39/1a1bV/Hx8a5q45Jc2aP0++fd1K9fXzabTZmZmS7o4NJc0d/27dv18MMPKzIyUgEBAYqKitKbb77p6lYkSdOnT1fDhg3l7++vtm3batOmTZdc/8MPP1SzZs3k7++v6667TkuXLnVYbozRqFGjFB4eroCAAMXGxmrv3r2ubOFPObPHvLw8DR8+XNddd52qVaumiIgIPfroozp69Kir2yiRs4/hHz3xxBOy2WxKSEhwctWl54r+UlNTde+99yokJETVqlXTTTfdpEOHDrmqhT/l7B7PnDmjQYMGqX79+goICLB/eHmZOPUDMFCiO++807Rq1cps2LDBrFu3zlxzzTXm4YcfvuQ2TzzxhImMjDQrV640mzdvNjfffLNp166dffnp06dNzZo1Td++fc3u3btNSkqKeeCBB0xoaKjJzc11dUsOXNGfMca88cYbJiIiwixYsMDs27fPbN++3Xz66aeubKVEruqxUNeuXc1dd91lJJlff/3VBR1cmiv6mz17thkyZIhZs2aN2b9/v3nvvfdMQECAmTZtmkt7+eCDD4yvr6+ZM2eO+eGHH8yAAQNM9erVTUZGRrHrf/fdd8bb29tMmjTJ7Nq1y7z00kvGx8fH7Ny5077Oq6++akJCQsySJUvM9u3bzb333msaNWpkzp8/79JeSuLsHjMzM01sbKxZtGiR2b17t0lKSjJt2rQxN95445Vsy84Vx7DQJ598Ylq1amUiIiLMlClTXNxJ8VzR3759+0zNmjXNc889Z7Zu3Wr27dtnPv300xLndDVX9DhgwABz9dVXm9WrV5sDBw6Yt99+23h7e5fp5wLB5wrYtWuXkWSSk5PtY1999ZWx2WzmyJEjxW6TmZlpfHx8zIcffmgfS01NNZJMUlKSMcaY5ORkI8kcOnTIvs6OHTuMJLN3714XdVOUq/o7deqUCQgIMCtWrHBtA6Xgqh4LzZgxw3To0MGsXLnSLcHH1f390VNPPWVuv/125xVfjDZt2pj4+Hj74/z8fBMREWFeeeWVYtfv0aOHufvuux3G2rZta/7xj38YY4wpKCgwYWFh5rXXXrMvz8zMNH5+fmbhwoUu6ODPObvH4mzatMlIMj/99JNzii4DV/X3888/m3r16pmUlBRz1VVXuS34uKK/Bx980PTq1cs1BZeDK3ps0aKFGTdunMM6N9xwg3nxxRdLXRdvdV0BSUlJql69ulq3bm0fi42NlZeXlzZu3FjsNlu2bFFeXp5iY2PtY82aNVODBg2UlJQkSWratKlq1aql2bNnKzc3V+fPn9fs2bMVFRWlhg0burSnP3JVf8uXL1dBQYGOHDmiqKgo1a9fXz169NDhw4dd21AxXNWjJO3atUvjxo3T/Pnz5eXlnn+SruzvYllZWapZs6bzir9Ibm6utmzZ4lCXl5eXYmNjS6wrKSnJYX1JiouLs69/4MABpaenO6wTEhKitm3bXrJXV3FFj8XJysqSzWa74p936Kr+CgoK1Lt3bz333HNq0aKFa4ovBVf0V1BQoC+//FJNmjRRXFyc6tatq7Zt22rJkiUu6+NSXHUM27Vrp88++0xHjhyRMUarV6/Wnj171Llz51LXRvC5AtLT01W3bl2HsSpVqqhmzZpKT08vcRtfX98i33BCQ0Pt2wQFBWnNmjV6//33FRAQoMDAQC1btkxfffWVqlS5cr+U21X9/fjjjyooKNC///1vJSQk6KOPPtKpU6fUqVMn5ebmuqSXkriqx5ycHD388MN67bXX1KBBA5fUXhqu6u9i69ev16JFizRw4ECn1F2cEydOKD8/v8hvd79UXenp6Zdcv/DPsszpSq7o8WIXLlzQ8OHD9fDDD1/xD8R0VX8TJ05UlSpVNGTIEOcXXQau6O/48eM6c+aMXn31Vd1555365ptvdN999+n+++/X2rVrXdPIJbjqGE6bNk3NmzdX/fr15evrqzvvvFPTp0/XrbfeWuraCD6X4YUXXpDNZrvk1+7du132/OfPn1f//v3Vvn17bdiwQd99953+8pe/6O6779b58+cve35391dQUKC8vDxNnTpVcXFxuvnmm7Vw4ULt3btXq1evdspzuLvHESNGKCoqSr169XLJ/O7u749SUlLUtWtXjR49ukz/O8OVl5eXpx49esgYo5kzZ7q7HKfYsmWL3nzzTc2bN082m83d5ThdQUGBJKlr164aOnSooqOj9cILL6hLly5lv/i3Aps2bZo2bNigzz77TFu2bNEbb7yh+Ph4rVixotRzVPjP6qrInn32WfXt2/eS6zRu3FhhYWE6fvy4w/hvv/2mU6dOKSwsrNjtwsLClJubq8zMTIf/UWdkZNi3SUxM1MGDB5WUlGR/iyQxMVE1atTQp59+qoceeqj8zcn9/YWHh0uSmjdvbl9ep04d1a5d22l3Kbi7x1WrVmnnzp366KOPJP1+55Ak1a5dWy+++KLGjh1bzs5+5+7+Cu3atUsdO3bUwIED9dJLL5Wrl9KqXbu2vL29i9w9V1xdhcLCwi65fuGfGRkZ9tdl4ePo6GgnVl86ruixUGHo+emnn7Rq1aorfrZHck1/69at0/Hjxx3OrObn5+vZZ59VQkKCDh486NwmLsEV/dWuXVtVqlRx+H4pSVFRUfr222+dWH3puKLH8+fP61//+pcWL16su+++W5LUsmVLbdu2Ta+//nqRt8lKVOqrgVBuhReObt682T729ddfl+rC0Y8++sg+tnv3bocLR6dOnWrCwsJMQUGBfZ28vDxTrVo1s2DBAhd1U5Sr+ktLSzOSHC5uPnnypPHy8jJff/21i7opnqt63Ldvn9m5c6f9a86cOUaSWb9+/RW9E8NV/RljTEpKiqlbt6557rnnXNfARdq0aWMGDRpkf5yfn2/q1at3yYsqu3Tp4jAWExNT5OLm119/3b48KyvL7Rc3O7NHY4zJzc013bp1My1atDDHjx93TeGl5Oz+Tpw44fBvbefOnSYiIsIMHz7c7N6923WNlMAVxy8mJqbIxc3dunX707szXcXZPWZlZRlJZunSpQ7rDBw40HTq1KnUdRF8rpA777zTXH/99Wbjxo3m22+/Nddee63Di/Hnn382TZs2NRs3brSPPfHEE6ZBgwZm1apVZvPmzSYmJsbExMTYl6empho/Pz/z5JNPml27dpmUlBTTq1cvExISYo4ePVrp+zPm91u8W7RoYb777juzc+dO06VLF9O8efMrfru+Ma7r8Y9Wr17t1tvZnd3fzp07TZ06dUyvXr3MsWPH7F+u/qH6wQcfGD8/PzNv3jyza9cuM3DgQFO9enWTnp5ujDGmd+/e5oUXXrCv/91335kqVaqY119/3aSmpprRo0cXezt79erVzaeffmp27Nhhunbt6vbb2Z3ZY25urrn33ntN/fr1zbZt2xyOV05OTqXvrzjuvKvLFf198sknxsfHx8yaNcvs3bvXTJs2zXh7e5t169Zd8f6McU2PHTp0MC1atDCrV682P/74o5k7d67x9/c3M2bMKHVdBJ8r5OTJk+bhhx82gYGBJjg42PTr18+cPn3avvzAgQNGklm9erV97Pz58+app54yNWrUMFWrVjX33XefOXbsmMO833zzjWnfvr0JCQkxNWrUMHfcccclbyV2FVf1l5WVZR577DFTvXp1U7NmTXPfffc53L5/Jbmqxz9yZ/BxRX+jR482kop8XXXVVS7vZ9q0aaZBgwbG19fXtGnTxmzYsMG+rEOHDqZPnz4O6//3v/81TZo0Mb6+vqZFixbmyy+/dFheUFBgRo4caUJDQ42fn5/p2LGjSUtLc3kfl+LMHguPb3FffzzmV5Kzj+HF3Bl8jHFNf7NnzzbXXHON8ff3N61atTJLlixxdRuX5Owejx07Zvr27WsiIiKMv7+/adq0qXnjjTcc3vn4MzZj/v9FBQAAAB6Ou7oAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAAIBlEHwAD3Dw4EHZbDZt27atxHUaNmyohIQEpz5v37591a1bN6fOifK57bbb9Mwzz5S4fMyYMU7/QNU1a9bIZrMpMzPTqfNKUu/evfXvf//7sucpTd8vvPCCBg8efNnPhcqB4IMrrqRv0PPmzXP4lG9PV5qwUtG9+eabmjdvnv3xn/3wdZZ58+bJZrPJZrPJ29tbNWrUUNu2bTVu3DhlZWW5/Pmd5UrtL1dp166djh07ppCQEEnO+ze8fft2LV26VEOGDLnsuUrjn//8p9599139+OOPV+T54F4EH6CM8vLy3F1ChRESEuK2sBocHKxjx47p559/1vr16zVw4EDNnz9f0dHROnr0qMueNzc312VzVza+vr4KCwuTzWZz6rzTpk1T9+7dFRgYWO45jDH67bffSrVu7dq1FRcXp5kzZ5b7+VB5EHxQYRW+jfL6668rPDxctWrVUnx8vEPwyMnJ0T//+U/Vq1dP1apVU9u2bbVmzRr78sL/gX799deKiopSYGCg7rzzTh07dszhuebMmaMWLVrIz89P4eHhGjRokH2ZzWbTzJkzde+996patWoaP368rrnmGr3++usOc2zbtk02m0379u1z2O6uu+5SQECAGjdurI8++si+fqNGjSRJ119/vWw2m2677Tb7sv/7v/9TVFSU/P391axZM82YMcPhuTZt2qTrr79e/v7+at26tb7//vsy799Dhw6pa9euCgwMVHBwsHr06KGMjAyHdcaPH6+6desqKChIjz/+uF544QWHtw3++FZX3759tXbtWr355pv2szEHDx7Ur7/+qkceeUR16tRRQECArr32Ws2dO7fM9V7MZrMpLCxM4eHhioqKUv/+/bV+/XqdOXNGzz//vH29goICvfLKK2rUqJECAgLUqlUrh+MgST/88IO6dOmi4OBgBQUF6ZZbbtH+/fsdepwwYYIiIiLUtGlTjRs3Tn/5y1+K1BQdHa2RI0c6bDd27FjVqVNHwcHBeuKJJ+zBqaT9JUkpKSm66667FBgYqNDQUPXu3VsnTpywP8/Zs2f16KOPKjAwUOHh4XrjjTfKvP8KCgo0btw41a9fX35+foqOjtayZcsc1lm/fr2io6Ptr7MlS5Y4nKX841tda9asUb9+/ZSVlWXvZ8yYMZKkGTNm6Nprr5W/v79CQ0P197//vcS68vPz9dFHH+mee+5xGH/vvffUunVrBQUFKSwsTD179tTx48ftywtr+eqrr3TjjTfKz89P3377rX3522+/rcjISFWtWlU9evQocmbwnnvu0QcffFDm/YhKqPyfuQqUT4cOHczTTz9dZHzu3LkmJCTE/rhPnz4mODjYPPHEEyY1NdV8/vnnpmrVqmbWrFn2dR5//HHTrl0787///c/s27fPvPbaa8bPz8/s2bPHPqePj4+JjY01ycnJZsuWLSYqKsr07NnTPseMGTOMv7+/SUhIMGlpaWbTpk0On9gsydStW9fMmTPH7N+/3/z0009mwoQJpnnz5g71DxkyxNx6660O29WqVcu88847Ji0tzbz00kvG29vb7Nq1yxhjzKZNm4wks2LFCnPs2DFz8uRJY4wx77//vgkPDzcff/yx+fHHH83HH39satasaebNm2eMMeb06dOmTp06pmfPniYlJcV8/vnnpnHjxkaS+f7770vc73/8JOr8/HwTHR1t/vrXv5rNmzebDRs2mBtvvNF06NDBvv77779v/P39zZw5c0xaWpoZO3asCQ4ONq1atXI4Rl27djXGGJOZmWliYmLMgAEDzLFjx8yxY8fMb7/9ZuLj4010dLRJTk42Bw4cMMuXLzefffZZiXWWxsWvlT96+umnTVBQkPntt9+MMcaMHz/eNGvWzCxbtszs37/fzJ071/j5+Zk1a9YYY4z5+eefTc2aNc39999vkpOTTVpampkzZ47ZvXu3vcfAwEDTu3dvk5KSYlJSUszhw4eNl5eX2bRpk/15t27damw2m9m/f7/Ddg8++KBJSUkxX3zxhalTp47517/+dcn99euvv5o6deqYESNGmNTUVLN161bTqVMnc/vtt9uf68knnzQNGjQwK1asMDt27DBdunQxQUFBxf67KjR69GiHYzd58mQTHBxsFi5caHbv3m2ef/554+PjY/+3k5WVZWrWrGl69eplfvjhB7N06VLTpEkTh9fZ6tWrjSTz66+/mpycHJOQkGCCg4Pt/Zw+fdokJycbb29vk5iYaA4ePGi2bt1q3nzzzRLr3Lp1q5Fk0tPTHcZnz55tli5davbv32+SkpJMTEyMueuuu+zLC2tp2bKl+eabb8y+ffvMyZMnzejRo021atXMHXfcYb7//nuzdu1ac8011zh8DzDGmNTUVCPJHDhwoMTa4BkIPrjiyhJ8rrrqKvsPMGOM6d69u3nwwQeNMcb89NNPxtvb2xw5csRhno4dO5oRI0bY55Rk9u3bZ18+ffp0Exoaan8cERFhXnzxxRLrlWSeeeYZh7EjR44Yb29vs3HjRmOMMbm5uaZ27dr2cFK43RNPPOGwXdu2bc2TTz5pjDHmwIEDxYaVq6++2iQmJjqMvfzyyyYmJsYYY8zbb79tatWqZc6fP29fPnPmzDIFn2+++cZ4e3ubQ4cO2Zf/8MMPRpL9h3nbtm1NfHy8wxzt27cvMfgYU/yxveeee0y/fv1KrKs8LhV8CvdFRkaGuXDhgqlatapZv369wzr9+/c3Dz/8sDHGmBEjRphGjRqZ3NzcYufr06ePCQ0NNTk5OQ7jd911l/1YGmPM4MGDzW233eawXc2aNc3Zs2cdagsMDDT5+fnGmOL318svv2w6d+7sMHb48GEjyaSlpZnTp08bX19f89///te+/OTJkyYgIKBMwSciIsJMmDDBYZ2bbrrJPPXUU/ZaL36dvfPOOyUGH2OKPy4ff/yxCQ4ONtnZ2SXW9keLFy823t7epqCg4JLrJScnG0nm9OnTDrUsWbKkSN/e3t7m559/to999dVXxsvLyxw7dsw+lpWVZSTZAzE8F291oUJr0aKFvL297Y/Dw8Ptp7d37typ/Px8NWnSRIGBgfavtWvX2t+mkKSqVavq6quvLnaO48eP6+jRo+rYseMl62jdurXD44iICN19992aM2eOJOnzzz9XTk6Ounfv7rBeTExMkcepqaklPs/Zs2e1f/9+9e/f36Gn8ePH23tKTU1Vy5Yt5e/vX+Lz/JnU1FRFRkYqMjLSPta8eXNVr17dXl9aWpratGnjsN3Fj0vjySef1AcffKDo6Gg9//zzWr9+fYnrLliwwKHvdevWlfn5jDGSZH/b8dy5c+rUqZPDvPPnz7fvz23btumWW26Rj49PiXNed9118vX1dRgbMGCAFi5cqAsXLig3N1eJiYl67LHHHNZp1aqVqlatan8cExOjM2fO6PDhwyU+1/bt27V69WqHeps1ayZJ2r9/v/bv36/c3Fy1bdvWvk3NmjXVtGnTUu4hKTs7W0ePHlX79u0dxtu3b+9w/C9+nZXn+Hfq1ElXXXWVGjdurN69e2vBggU6d+5cieufP39efn5+Ra4b2rJli+655x41aNBAQUFB6tChg6Tf37L9o4v/rUpSgwYNVK9ePfvjmJgYFRQUKC0tzT4WEBAgSZesDZ6hirsLgPUEBwcXe+dNZmam/e6QQhf/MLLZbCooKJAknTlzRt7e3tqyZYtDOJLkcFFkcXMU/nAs/Gb3Z6pVq1Zk7PHHH1fv3r01ZcoUzZ07Vw8++KDDD7nyOHPmjCTpnXfecfjBJqlIj5XFXXfdpZ9++klLly7V8uXL1bFjR8XHxxe5RkqS7r33Xoe+//jDqrRSU1MVHBysWrVq2e/S+fLLL4vM5efnJ6l0r4Hijv8999wjPz8/LV68WL6+vsrLy7vktSuldebMGd1zzz2aOHFikWXh4eH2a8gqi6CgIG3dulVr1qzRN998o1GjRmnMmDFKTk4u9sL42rVr69y5c8rNzbWHzbNnzyouLk5xcXFasGCB6tSpo0OHDikuLq7IxebFHavSOHXqlCSpTp065doelQdnfHDFNW3aVFu3bi0yvnXrVjVp0qTU81x//fXKz8/X8ePHdc011zh8hYWFlWqOoKAgNWzYUCtXriz18xb629/+pmrVqmnmzJlatmxZkf/tS9KGDRuKPI6KipIk+zf1/Px8+/LQ0FBFREToxx9/LNJT4cXQUVFR2rFjhy5cuFDi8/yZqKgoHT582OHMw65du5SZmanmzZtL+v04JScnO2x38eOL+fr6OvRTqE6dOurTp4/ef/99JSQkaNasWcVuHxQU5NBzaYNpoePHjysxMVHdunWTl5eXmjdvLj8/Px06dKjI/iw829WyZUutW7euzHfrValSRX369NHcuXM1d+5cPfTQQ0Xq3b59u86fP29/vGHDBgUGBtqfu7j9dcMNN+iHH35Qw4YNi9RcrVo1XX311fLx8dHGjRvt2/z666/as2dPqWsPDg5WRESEvvvuO4fx7777zuH479y5Uzk5Ofbl5T3+VapUUWxsrCZNmqQdO3bo4MGDWrVqVbFzFF48v2vXLvvY7t27dfLkSb366qu65ZZb1KxZM4cLm//MoUOHHO7027Bhg7y8vBzOkqWkpMjHx0ctWrQo9byonAg+uOKefPJJ7dmzR0OGDNGOHTuUlpamyZMna+HChXr22WdLPU+TJk30yCOP6NFHH9Unn3yiAwcOaNOmTXrllVf05ZdflnqeMWPG6I033tDUqVO1d+9ebd26VdOmTfvT7by9vdW3b1+NGDFC1157bbFvN3344YeaM2eO9uzZo9GjR2vTpk32O8bq1q2rgIAALVu2TBkZGfazYGPHjtUrr7yiqVOnas+ePdq5c6fmzp2ryZMnS5J69uwpm82mAQMGaNeuXVq6dGmxZ08uJTY2Vtddd50eeeQRbd26VZs2bdKjjz6qDh062N8qGDx4sGbPnq13331Xe/fu1fjx47Vjx45L3rrcsGFDbdy4UQcPHtSJEydUUFCgUaNG6dNPP9W+ffv0ww8/6IsvvrCHv8thjFF6erqOHTum1NRUzZkzR+3atVNISIheffVVSb8HqX/+858aOnSo3n33Xe3fv99+fN99911J0qBBg5Sdna2HHnpImzdv1t69e/Xee+85vA1Skscff1yrVq0qMfjm5uaqf//+9uM0evRoDRo0SF5eXiXur/j4eJ06dUoPP/ywkpOTtX//fn399dfq16+f8vPzFRgYqP79++u5557TqlWrlJKSor59+9rnLK3nnntOEydO1KJFi5SWlqYXXnhB27Zt09NPPy3p99dZQUGBBg4cqNTUVH399df211lJr4GGDRvqzJkzWrlypU6cOKFz587piy++0NSpU7Vt2zb99NNPmj9/vgoKCkp8a65OnTq64YYbHO7IatCggXx9fTVt2jT9+OOP+uyzz/Tyyy+Xuld/f3/16dNH27dv17p16zRkyBD16NHD4T9I69at0y233FLmsI1KyM3XGMGiNm3aZDp16mTq1KljQkJCTNu2bc3ixYsd1rn4wlljfr9j5493HuXm5ppRo0aZhg0bGh8fHxMeHm7uu+8+s2PHDmNM8RdbLl682Fz80n/rrbdM06ZN7XMMHjzYvkxSkdoK7d+/30gykyZNKrJMkpk+fbrp1KmT8fPzMw0bNjSLFi1yWOedd94xkZGRxsvLy6GvBQsWmOjoaOPr62tq1Khhbr31VvPJJ5/YlyclJZlWrVoZX19fEx0dbT7++OMyXdxszO8Xh997772mWrVqJigoyHTv3r3InTTjxo0ztWvXNoGBgeaxxx4zQ4YMMTfffLN9+cXHKC0tzdx8880mICDAfofMyy+/bKKiokxAQICpWbOm6dq1q/nxxx9LrLM0Ci9al2RsNpsJCQkxbdq0MePGjTNZWVkO6xYUFJiEhAT78a1Tp46Ji4sza9euta+zfft207lzZ1O1alUTFBRkbrnlFoe7sy5+Hf7RLbfcYlq0aFFkvHC7UaNGmVq1apnAwEAzYMAAc+HChUvuL2OM2bNnj7nvvvtM9erVTUBAgGnWrJl55pln7Bf8nj592vTq1ctUrVrVhIaGmkmTJpV400Chiy9uzs/PN2PGjDH16tUzPj4+plWrVuarr75y2Oa7774zLVu2NL6+vubGG280iYmJRpL9jreLL242xpgnnnjC1KpVy0gyo0ePNuvWrTMdOnQwNWrUMAEBAaZly5ZF/h1cbMaMGQ6vM2OMSUxMNA0bNjR+fn4mJibGfPbZZ5e80PrivmfMmGEiIiKMv7+/+fvf/25OnTrlsF7Tpk3NwoULL1kXPIPNmP9/sQOAMlu3bp06duyow4cPKzQ01GGZzWbT4sWLPeojHTp16qSwsDC999577i6lQjDG6Nprr9VTTz2lYcOGOSzr27evMjMztWTJEvcU5wILFiyw/64eV54ZOX/+vJo2bapFixaV+cL98vjqq6/07LPPaseOHapShUtfPR1HGCiHnJwc/fLLLxozZoy6d+9eJPR4gnPnzumtt95SXFycvL29tXDhQq1YsULLly93d2kVwi+//KIPPvhA6enp6tevn7vLcYn58+ercePGqlevnrZv367hw4erR48eLn87KCAgQPPnz3f4pY2udPbsWc2dO5fQYxEcZaAcFi5cqP79+ys6Olrz5893dzkuYbPZtHTpUk2YMEEXLlxQ06ZN9fHHHys2NtbdpVUIdevWVe3atTVr1izVqFHD3eW4RHp6ukaNGqX09HSFh4ere/fumjBhwhV57j/+JnNXc8bdeKg8eKsLAABYBnd1AQAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAyyD4AAAAy/h/1Jo31vvVocwAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Finding the best model according to logs...\n", + "{'weights': 'weights/resnet110_cifar10', 'model_type': 'resnet110', 'kwargs': {'num_classes': 10, 'debug': False}, 'params': {'epochs': 100, 'batch_size': 64, 'momentum': 0.9, 'weight_decay': 0.256, 'weight_decay_bias': 0.004, 'ema_update_freq': 5, 'ema_rho': 0.9509900498999999, 'model_type': 'resnet110', 'kwargs': {'num_classes': 10, 'debug': False}}, 'run0': 0.8956000000000001, 'run1': 0.8966000000000001, 'run2': 0.8966000000000001, 'run3': 0.896, 'run4': 0.8938, 'accuracy': [0.8957200000000001, 0.001032279032045122]}\n", + "\n", + "Average (5 runs): 89.572% +/- 0.103%\n", + "Best (idx 1): 0.897\n", + "resnet110 Mean Std. Percentage\n", + "conv 1160.000 1.197 41.60\n", + "gelu 234.000 2.097 8.40\n", + "bootstrapping 1366.000 5.036 49.00\n", + "residual 1.000 0.022 0.00\n", + "pool 27.000 0.099 1.00\n", + "linear 2.000 0.028 0.10\n", + "total 2790.000 6.853 100.00\n", + "rounded percent 100.100\n", + "logit res -0.000 0.021\n", + "\n", + "\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAGwCAYAAACzXI8XAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA9kklEQVR4nO3deXhU5d3/8c8kzGQhmbCFJGgggMimEAWBuDQKgUgVsVBccAGKWDWCJLUqfZRNLC5VUBuk+kCQasBShYoFFIJElC3sqxEQBSUJCiRhMUuT+/dHf5mHIQtJSJg58H5d11z13Oec+3zPfGfIp2fOJDZjjBEAAIAF+Xi6AAAAgNoiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMsiyAAAAMtq4OkC6ltpaakOHz6s4OBg2Ww2T5cDAACqwRijEydOqEWLFvLxqfy6y0UfZA4fPqzIyEhPlwEAAGrh0KFDuvzyyytdf9EHmeDgYEn/fSKcTqeHq6me4uJiffbZZ+rXr5/sdruny8EZ6I13oi/eib54Lyv0Jj8/X5GRka6f45W56INM2cdJTqfTUkEmMDBQTqfTa19glyp6453oi3eiL97LSr05120h3OwLAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsy6NBJioqSjabrdwjISFBklRQUKCEhAQ1bdpUQUFBGjx4sHJycjxZMgAA8CIeDTIZGRnKyspyPZYvXy5JGjJkiCQpMTFRixcv1oIFC5Senq7Dhw9r0KBBniwZAAB4EY/+9evQ0FC35RdffFFt27ZVbGys8vLyNGvWLKWmpqp3796SpJSUFHXs2FHr1q1Tr169PFEyAADwIh4NMmcqKirSe++9p6SkJNlsNm3atEnFxcWKi4tzbdOhQwe1bNlSa9eurTTIFBYWqrCw0LWcn58v6b9/sry4uLh+T6KOlNVplXovJfTGO9EX70RfvJcVelPd2rwmyCxatEi5ubkaPny4JCk7O1sOh0ONGjVy2y4sLEzZ2dmVzjN16lRNmjSp3Phnn32mwMDAuiy53pV91AbvQ2+8E33xTvTFe3lzb06fPl2t7bwmyMyaNUv9+/dXixYtzmuecePGKSkpybWcn5+vyMhI9evXT06n83zLvCCKi4u1fPly9e3bV3a73dPl4Az0pn7ExsbWet/09HT64qXoi/eyQm/KPlE5F68IMt9//71WrFihjz76yDUWHh6uoqIi5ebmul2VycnJUXh4eKVz+fn5yc/Pr9y43W732mZVxoo1XyroTd0qKiqq9b5n9oG+eCf64r28uTfVrcsrfo9MSkqKmjdvrttuu8011q1bN9ntdqWlpbnGMjMzdfDgQcXExHiiTAAA4GU8fkWmtLRUKSkpGjZsmBo0+L9yQkJCNHLkSCUlJalJkyZyOp0aPXq0YmJi+MYSAACQ5AVBZsWKFTp48KB+97vflVs3bdo0+fj4aPDgwSosLFR8fLxmzJjhgSoBAIA38niQ6devn4wxFa7z9/dXcnKykpOTL3BVAADACrziHhkAAIDaIMgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADL8niQ+fHHH3X//feradOmCggI0NVXX62NGze61htjNH78eEVERCggIEBxcXHau3evBysGAADewqNB5vjx47rhhhtkt9u1dOlS7d69W6+++qoaN27s2ubll1/WG2+8oZkzZ2r9+vVq2LCh4uPjVVBQ4MHKAQCAN2jgyYO/9NJLioyMVEpKimusdevWrv82xmj69Ol69tlnNXDgQEnS3LlzFRYWpkWLFumee+654DUDAADv4dEg8/HHHys+Pl5DhgxRenq6LrvsMj322GMaNWqUJOnAgQPKzs5WXFyca5+QkBD17NlTa9eurTDIFBYWqrCw0LWcn58vSSouLlZxcXE9n1HdKKvTKvVeSuhN/XA4HLXe98z3Nn3xLvTFe1mhN9WtzWaMMfVcS6X8/f0lSUlJSRoyZIgyMjL0xBNPaObMmRo2bJjWrFmjG264QYcPH1ZERIRrv7vuuks2m00ffPBBuTknTpyoSZMmlRtPTU1VYGBg/Z0MAACoM6dPn9bQoUOVl5cnp9NZ6XYeDTIOh0Pdu3fXmjVrXGNjxoxRRkaG1q5dW6sgU9EVmcjISP38889VPhHepLi4WMuXL1ffvn1lt9s9XQ7OQG/qR2xsbK33TU9Ppy9eir54Lyv0Jj8/X82aNTtnkPHoR0sRERHq1KmT21jHjh314YcfSpLCw8MlSTk5OW5BJicnR9HR0RXO6efnJz8/v3Ljdrvda5tVGSvWfKmgN3WrqKio1vue2Qf64p3oi/fy5t5Uty6PfmvphhtuUGZmptvYN998o1atWkn6742/4eHhSktLc63Pz8/X+vXrFRMTc0FrBQAA3sejV2QSExN1/fXX689//rPuuusubdiwQW+//bbefvttSZLNZtPYsWM1ZcoUtWvXTq1bt9Zzzz2nFi1a6M477/Rk6QAAwAt4NMhcd911WrhwocaNG6fJkyerdevWmj59uu677z7XNk899ZROnTqlhx9+WLm5ubrxxhu1bNky143CAADg0uXRICNJt99+u26//fZK19tsNk2ePFmTJ0++gFUBAAAr8PifKAAAAKgtggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsggwAALAsj//1awA4H927d5fD4dDTTz+t2NhYFRUVVXvfjRs31mNlAC4ErsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADLIsgAAADL8miQmThxomw2m9ujQ4cOrvUFBQVKSEhQ06ZNFRQUpMGDBysnJ8eDFQMAAG/i8SsynTt3VlZWluvx5ZdfutYlJiZq8eLFWrBggdLT03X48GENGjTIg9UCAABv0sDjBTRooPDw8HLjeXl5mjVrllJTU9W7d29JUkpKijp27Kh169apV69eFc5XWFiowsJC13J+fr4kqbi4WMXFxfVwBnWvrE6r1HspoTf1w+Fw1Mn+NZ2HPtYv3i/eywq9qW5tNmOMqedaKjVx4kS98sorCgkJkb+/v2JiYjR16lS1bNlSK1euVJ8+fXT8+HE1atTItU+rVq00duxYJSYmVjrnpEmTyo2npqYqMDCwvk4FAADUodOnT2vo0KHKy8uT0+msdDuPXpHp2bOn5syZo/bt2ysrK0uTJk3STTfdpJ07dyo7O1sOh8MtxEhSWFiYsrOzK51z3LhxSkpKci3n5+crMjJS/fr1q/KJ8CbFxcVavny5+vbtK7vd7ulycAZ6Uz9iY2PPa3+Hw6HExERNmzZNRUVF1d4vPT39vI6LqvF+8V5W6E3ZJyrn4tEg079/f9d/d+nSRT179lSrVq30j3/8QwEBAbWa08/PT35+fuXG7Xa71zarMlas+VJBb+pWTcLHueapyVz08MLg/eK9vLk31a3L4zf7nqlRo0a68sortW/fPoWHh6uoqEi5ublu2+Tk5FR4Tw0AALj0eFWQOXnypPbv36+IiAh169ZNdrtdaWlprvWZmZk6ePCgYmJiPFglAADwFh79aOnJJ5/UgAED1KpVKx0+fFgTJkyQr6+v7r33XoWEhGjkyJFKSkpSkyZN5HQ6NXr0aMXExFT6jSUAAHBp8WiQ+eGHH3Tvvffq6NGjCg0N1Y033qh169YpNDRUkjRt2jT5+Pho8ODBKiwsVHx8vGbMmOHJkgEAgBfxaJCZP39+lev9/f2VnJys5OTkC1QRAACwEq+6RwYAAKAmCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyvCbIvPjii7LZbBo7dqxrrKCgQAkJCWratKmCgoI0ePBg5eTkeK5IAADgVbwiyGRkZOhvf/ubunTp4jaemJioxYsXa8GCBUpPT9fhw4c1aNAgD1UJAAC8Ta2CTJs2bXT06NFy47m5uWrTpk2N5jp58qTuu+8+vfPOO2rcuLFrPC8vT7NmzdJrr72m3r17q1u3bkpJSdGaNWu0bt262pQNAAAuMg1qs9N3332nkpKScuOFhYX68ccfazRXQkKCbrvtNsXFxWnKlCmu8U2bNqm4uFhxcXGusQ4dOqhly5Zau3atevXqVeF8hYWFKiwsdC3n5+dLkoqLi1VcXFyj2jylrE6r1HspoTf1w+Fw1Mn+NZ2HPtYv3i/eywq9qW5tNQoyH3/8seu/P/30U4WEhLiWS0pKlJaWpqioqGrPN3/+fG3evFkZGRnl1mVnZ8vhcKhRo0Zu42FhYcrOzq50zqlTp2rSpEnlxj/77DMFBgZWuzZvsHz5ck+XgErQm7r19NNP18k8iYmJNdp+yZIldXJcVI33i/fy5t6cPn26WtvVKMjceeedkiSbzaZhw4a5rbPb7YqKitKrr75arbkOHTqkJ554QsuXL5e/v39NyqjSuHHjlJSU5FrOz89XZGSk+vXrJ6fTWWfHqU/FxcVavny5+vbtK7vd7ulycAZ6Uz9iY2PPa3+Hw6HExERNmzZNRUVF1d4vPT39vI6LqvF+8V5W6E3ZJyrnUqMgU1paKklq3bq1MjIy1KxZs5pX9v9t2rRJR44c0bXXXusaKykp0RdffKG//vWv+vTTT1VUVKTc3Fy3qzI5OTkKDw+vdF4/Pz/5+fmVG7fb7V7brMpYseZLBb2pWzUJH+eapyZz0cMLg/eL9/Lm3lS3rlrdI3PgwIHa7OamT58+2rFjh9vYiBEj1KFDBz399NOKjIyU3W5XWlqaBg8eLEnKzMzUwYMHFRMTc97HBwAA1lerICNJaWlpSktL05EjR1xXasrMnj37nPsHBwfrqquuchtr2LChmjZt6hofOXKkkpKS1KRJEzmdTo0ePVoxMTGV3ugLAAAuLbUKMpMmTdLkyZPVvXt3RUREyGaz1XVdkqRp06bJx8dHgwcPVmFhoeLj4zVjxox6ORYAALCeWgWZmTNnas6cOXrggQfqtJhVq1a5Lfv7+ys5OVnJycl1ehwAAHBxqNUvxCsqKtL1119f17UAAADUSK2CzEMPPaTU1NS6rgUAAKBGavXRUkFBgd5++22tWLFCXbp0KfcVqddee61OigMAAKhKrYLM9u3bFR0dLUnauXOn27r6uvEXAADgbLUKMp9//nld1wEAAFBjtbpHBgAAwBvU6orMLbfcUuVHSCtXrqx1QQAAANVVqyBTdn9MmeLiYm3dulU7d+4s98ckAQAA6kutgsy0adMqHJ84caJOnjx5XgUBAABUV53eI3P//fdX6+8sAQAA1IU6DTJr166Vv79/XU4JAABQqVp9tDRo0CC3ZWOMsrKytHHjRj333HN1UhgAAMC51CrIhISEuC37+Pioffv2mjx5svr161cnhQEAAJxLrYJMSkpKXdcBAABQY7UKMmU2bdqkPXv2SJI6d+6sa665pk6KAgAAqI5aBZkjR47onnvu0apVq9SoUSNJUm5urm655RbNnz9foaGhdVkjAABAhWr1raXRo0frxIkT2rVrl44dO6Zjx45p586dys/P15gxY+q6RgAAgArV6orMsmXLtGLFCnXs2NE11qlTJyUnJ3OzLwAAuGBqFWRKS0tlt9vLjdvtdpWWlp53UQCsp3v37p4uAcAlqFYfLfXu3VtPPPGEDh8+7Br78ccflZiYqD59+tRZcQAAAFWpVZD561//qvz8fEVFRalt27Zq27atWrdurfz8fL355pt1XSMAAECFavXRUmRkpDZv3qwVK1bo66+/liR17NhRcXFxdVocAABAVWp0RWblypXq1KmT8vPzZbPZ1LdvX40ePVqjR4/Wddddp86dO2v16tX1VSsAAICbGgWZ6dOna9SoUXI6neXWhYSE6Pe//71ee+21OisOAACgKjUKMtu2bdOtt95a6fp+/fpp06ZN510UAABAddQoyOTk5FT4tesyDRo00E8//XTeRQEAAFRHjYLMZZddpp07d1a6fvv27YqIiDjvogAAAKqjRkHm17/+tZ577jkVFBSUW/fLL79owoQJuv322+usOAAAgKrU6OvXzz77rD766CNdeeWVevzxx9W+fXtJ0tdff63k5GSVlJTof/7nf+qlUAAAgLPVKMiEhYVpzZo1evTRRzVu3DgZYyRJNptN8fHxSk5OVlhYWL0UCgAAcLYa/0K8Vq1aacmSJTp+/Lj27dsnY4zatWunxo0b10d9AAAAlarVb/aVpMaNG+u6666ry1oAAABqpFZ/awkAAMAbEGQAAIBlEWQAAIBlEWQAAIBlEWQAAIBlEWQAAIBlEWQAAIBleTTIvPXWW+rSpYucTqecTqdiYmK0dOlS1/qCggIlJCSoadOmCgoK0uDBg5WTk+PBigEAgDfxaJC5/PLL9eKLL2rTpk3auHGjevfurYEDB2rXrl2SpMTERC1evFgLFixQenq6Dh8+rEGDBnmyZAAA4EVq/Zt968KAAQPcll944QW99dZbWrdunS6//HLNmjVLqamp6t27tyQpJSVFHTt21Lp169SrVy9PlAwAALyIR4PMmUpKSrRgwQKdOnVKMTEx2rRpk4qLixUXF+fapkOHDmrZsqXWrl1baZApLCxUYWGhazk/P1+SVFxcrOLi4vo9iTpSVqdV6r2U0JvKORwOjx+7pjXQx/rF+8V7WaE31a3NZsr+hLWH7NixQzExMSooKFBQUJBSU1P161//WqmpqRoxYoRbKJGkHj166JZbbtFLL71U4XwTJ07UpEmTyo2npqYqMDCwXs4BAADUrdOnT2vo0KHKy8uT0+msdDuPX5Fp3769tm7dqry8PP3zn//UsGHDlJ6eXuv5xo0bp6SkJNdyfn6+IiMj1a9fvyqfCG9SXFys5cuXq2/fvrLb7Z4uB2egN5WLjY312LEdDocSExM1bdo0FRUVVXu/8/m3BufG+8V7WaE3ZZ+onIvHg4zD4dAVV1whSerWrZsyMjL0+uuv6+6771ZRUZFyc3PVqFEj1/Y5OTkKDw+vdD4/Pz/5+fmVG7fb7V7brMpYseZLBb0pryYBoj5rqEkd9PDC4P3ivby5N9Wty+t+j0xpaakKCwvVrVs32e12paWludZlZmbq4MGDiomJ8WCFAADAW3j0isy4cePUv39/tWzZUidOnFBqaqpWrVqlTz/9VCEhIRo5cqSSkpLUpEkTOZ1OjR49WjExMXxjCQAASPJwkDly5IgefPBBZWVlKSQkRF26dNGnn36qvn37SpKmTZsmHx8fDR48WIWFhYqPj9eMGTM8WTIAAPAiHg0ys2bNqnK9v7+/kpOTlZycfIEqAgAAVuJ198gAAABUF0EGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYVgNPFwAAntK9e/da77tx48Y6rARAbXFFBgAAWJZHg8zUqVN13XXXKTg4WM2bN9edd96pzMxMt20KCgqUkJCgpk2bKigoSIMHD1ZOTo6HKgYAAN7Eo0EmPT1dCQkJWrdunZYvX67i4mL169dPp06dcm2TmJioxYsXa8GCBUpPT9fhw4c1aNAgD1YNAAC8hUfvkVm2bJnb8pw5c9S8eXNt2rRJv/rVr5SXl6dZs2YpNTVVvXv3liSlpKSoY8eOWrdunXr16lVuzsLCQhUWFrqW8/PzJUnFxcUqLi6ux7OpO2V1WqXeSwm9qZzD4fD4sS9kDbwGzo33i/eyQm+qW5vNGGPquZZq27dvn9q1a6cdO3boqquu0sqVK9WnTx8dP35cjRo1cm3XqlUrjR07VomJieXmmDhxoiZNmlRuPDU1VYGBgfVZPgAAqCOnT5/W0KFDlZeXJ6fTWel2XvOtpdLSUo0dO1Y33HCDrrrqKklSdna2HA6HW4iRpLCwMGVnZ1c4z7hx45SUlORazs/PV2RkpPr161flE+FNiouLtXz5cvXt21d2u93T5eAM9KZysbGxHju2w+FQYmKipk2bpqKiogtyzPT09AtyHCvj/eK9rNCbsk9UzsVrgkxCQoJ27typL7/88rzm8fPzk5+fX7lxu93utc2qjBVrvlTQm/IuVIA4Vw0Xqg76X328X7yXN/emunV5xdevH3/8cX3yySf6/PPPdfnll7vGw8PDVVRUpNzcXLftc3JyFB4efoGrBAAA3sajQcYYo8cff1wLFy7UypUr1bp1a7f13bp1k91uV1pammssMzNTBw8eVExMzIUuFwAAeBmPfrSUkJCg1NRU/etf/1JwcLDrvpeQkBAFBAQoJCREI0eOVFJSkpo0aSKn06nRo0crJiamwm8sAQCAS4tHg8xbb70lSbr55pvdxlNSUjR8+HBJ0rRp0+Tj46PBgwersLBQ8fHxmjFjxgWuFAAAeCOPBpnqfPPb399fycnJSk5OvgAVAQAAK/GKm30BAABqgyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsy2v+aCQAWEn37t1rve/GjRvrsBLg0sYVGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFn8Zl8AuMD4rcBA3eGKDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCyPBpkvvvhCAwYMUIsWLWSz2bRo0SK39cYYjR8/XhEREQoICFBcXJz27t3rmWIBAIDX8WiQOXXqlLp27ark5OQK17/88st64403NHPmTK1fv14NGzZUfHy8CgoKLnClAADAGzXw5MH79++v/v37V7jOGKPp06fr2Wef1cCBAyVJc+fOVVhYmBYtWqR77rmnwv0KCwtVWFjoWs7Pz5ckFRcXq7i4uI7PoH6U1WmVei8l9KZyDofD48f2ZA0XipVee7xfvJcVelPd2mzGGFPPtVSLzWbTwoULdeedd0qSvv32W7Vt21ZbtmxRdHS0a7vY2FhFR0fr9ddfr3CeiRMnatKkSeXGU1NTFRgYWB+lAwCAOnb69GkNHTpUeXl5cjqdlW7n0SsyVcnOzpYkhYWFuY2HhYW51lVk3LhxSkpKci3n5+crMjJS/fr1q/KJ8CbFxcVavny5+vbtK7vd7ulycIaLvTexsbGeLqFWHA6HEhMTNW3aNBUVFXm6nHqVnp7u6RKq7WJ/v1iZFXpT9onKuXhtkKktPz8/+fn5lRu32+1e26zKWLHmS8XF2hurh4CioiLLn8O5WPF1d7G+Xy4G3tyb6tbltV+/Dg8PlyTl5OS4jefk5LjWAQCAS5vXBpnWrVsrPDxcaWlprrH8/HytX79eMTExHqwMAAB4C49+tHTy5Ent27fPtXzgwAFt3bpVTZo0UcuWLTV27FhNmTJF7dq1U+vWrfXcc8+pRYsWrhuCAQDApc2jQWbjxo265ZZbXMtlN+kOGzZMc+bM0VNPPaVTp07p4YcfVm5urm688UYtW7ZM/v7+nioZAAB4EY8GmZtvvllVffvbZrNp8uTJmjx58gWsCgAAWIXX3iMDAABwLgQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQQZAABgWQ08XQCAutW9e3dPlwAAFwxXZAAAgGURZAAAgGURZAAAgGURZAAAgGVxsy9QhbNvnHU4HHr66acVGxuroqKiKvfduHFjnR0X8LTavCbL3i9AfeKKDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCxu9oUlnM/Nr+dz0y3gbaz4XqjOzfGV4f2Lc+GKDAAAsCyCDAAAsCyCDAAAsCyCDAAAsCxu9j0PVrzpzpMutd9We6mdL7wfr8kLw1PPs6d+m7inf55Z4opMcnKyoqKi5O/vr549e2rDhg2eLgkAAHgBrw8yH3zwgZKSkjRhwgRt3rxZXbt2VXx8vI4cOeLp0gAAgId5fZB57bXXNGrUKI0YMUKdOnXSzJkzFRgYqNmzZ3u6NAAA4GFefY9MUVGRNm3apHHjxrnGfHx8FBcXp7Vr11a4T2FhoQoLC13LeXl5kqRjx46puLi4Tuvz8al9Djx69Gil64qLi3X69GkdPXpUdru91sfwNufzfJ2Pqp7rczm7Zh8fH50+fVo+Pj4eOx+UR1+8U1305Xzev55ihX/rzv45U18/z87HiRMnJEnGmKo3NF7sxx9/NJLMmjVr3Mb/+Mc/mh49elS4z4QJE4wkHjx48ODBg8dF8Dh06FCVWcGrr8jUxrhx45SUlORaLi0t1bFjx9S0aVPZbDYPVlZ9+fn5ioyM1KFDh+R0Oj1dDs5Ab7wTffFO9MV7WaE3xhidOHFCLVq0qHI7rw4yzZo1k6+vr3JyctzGc3JyFB4eXuE+fn5+8vPzcxtr1KhRfZVYr5xOp9e+wC519MY70RfvRF+8l7f3JiQk5JzbePWHyQ6HQ926dVNaWpprrLS0VGlpaYqJifFgZQAAwBt49RUZSUpKStKwYcPUvXt39ejRQ9OnT9epU6c0YsQIT5cGAAA8zOuDzN13362ffvpJ48ePV3Z2tqKjo7Vs2TKFhYV5urR64+fnpwkTJpT7iAyeR2+8E33xTvTFe11MvbEZc67vNQEAAHgnr75HBgAAoCoEGQAAYFkEGQAAYFkEGQAAYFkEGQ85duyY7rvvPjmdTjVq1EgjR47UyZMnq9zn7bff1s033yyn0ymbzabc3Nw6mRf/pzbPX0FBgRISEtS0aVMFBQVp8ODB5X6Jo81mK/eYP39+fZ6K5SUnJysqKkr+/v7q2bOnNmzYUOX2CxYsUIcOHeTv76+rr75aS5YscVtvjNH48eMVERGhgIAAxcXFae/evfV5Cheluu7L8OHDy703br311vo8hYtSTfqya9cuDR48WFFRUbLZbJo+ffp5z+lRdfJHkVBjt956q+natatZt26dWb16tbniiivMvffeW+U+06ZNM1OnTjVTp041kszx48frZF78n9o8f4888oiJjIw0aWlpZuPGjaZXr17m+uuvd9tGkklJSTFZWVmuxy+//FKfp2Jp8+fPNw6Hw8yePdvs2rXLjBo1yjRq1Mjk5ORUuP1XX31lfH19zcsvv2x2795tnn32WWO3282OHTtc27z44osmJCTELFq0yGzbts3ccccdpnXr1vShBuqjL8OGDTO33nqr23vj2LFjF+qULgo17cuGDRvMk08+aebNm2fCw8PNtGnTzntOTyLIeMDu3buNJJORkeEaW7p0qbHZbObHH3885/6ff/55hUHmfOe91NXm+cvNzTV2u90sWLDANbZnzx4jyaxdu9Y1JsksXLiw3mq/2PTo0cMkJCS4lktKSkyLFi3M1KlTK9z+rrvuMrfddpvbWM+ePc3vf/97Y4wxpaWlJjw83Lzyyiuu9bm5ucbPz8/MmzevHs7g4lTXfTHmv0Fm4MCB9VLvpaKmfTlTq1atKgwy5zPnhcZHSx6wdu1aNWrUSN27d3eNxcXFycfHR+vXr/e6eS8VtXn+Nm3apOLiYsXFxbnGOnTooJYtW2rt2rVu2yYkJKhZs2bq0aOHZs+efe4/TX+JKioq0qZNm9yeUx8fH8XFxZV7TsusXbvWbXtJio+Pd21/4MABZWdnu20TEhKinj17Vjon3NVHX8qsWrVKzZs3V/v27fXoo4/q6NGjdX8CF6na9MUTc9Ynr//Nvhej7OxsNW/e3G2sQYMGatKkibKzs71u3ktFbZ6/7OxsORyOcn+YNCwszG2fyZMnq3fv3goMDNRnn32mxx57TCdPntSYMWPq/Dys7ueff1ZJSUm5394dFhamr7/+usJ9srOzK9y+rAdl/1vVNqhaffRFkm699VYNGjRIrVu31v79+/WnP/1J/fv319q1a+Xr61v3J3KRqU1fPDFnfSLI1KFnnnlGL730UpXb7Nmz5wJVgzLe0JfnnnvO9d/XXHONTp06pVdeeYUgg0vePffc4/rvq6++Wl26dFHbtm21atUq9enTx4OVwSoIMnXoD3/4g4YPH17lNm3atFF4eLiOHDniNv6f//xHx44dU3h4eK2PX1/zWl199iU8PFxFRUXKzc11uyqTk5NT5XPes2dPPf/88yosLLwo/tZJXWrWrJl8fX3LffOrquc0PDy8yu3L/jcnJ0cRERFu20RHR9dh9Rev+uhLRdq0aaNmzZpp3759BJlqqE1fPDFnfeIemToUGhqqDh06VPlwOByKiYlRbm6uNm3a5Np35cqVKi0tVc+ePWt9/Pqa1+rqsy/dunWT3W5XWlqaaywzM1MHDx5UTExMpTVt3bpVjRs3JsRUwOFwqFu3bm7PaWlpqdLS0ip9TmNiYty2l6Tly5e7tm/durXCw8PdtsnPz9f69eur7BP+T330pSI//PCDjh496hY4Ubna9MUTc9YrT99tfKm69dZbzTXXXGPWr19vvvzyS9OuXTu3r/n+8MMPpn379mb9+vWusaysLLNlyxbzzjvvGEnmiy++MFu2bDFHjx6t9ryoWm368sgjj5iWLVualStXmo0bN5qYmBgTExPjWv/xxx+bd955x+zYscPs3bvXzJgxwwQGBprx48df0HOzkvnz5xs/Pz8zZ84cs3v3bvPwww+bRo0amezsbGOMMQ888IB55plnXNt/9dVXpkGDBuYvf/mL2bNnj5kwYUKFX79u1KiR+de//mW2b99uBg4cyNeva6iu+3LixAnz5JNPmrVr15oDBw6YFStWmGuvvda0a9fOFBQUeOQcraimfSksLDRbtmwxW7ZsMREREebJJ580W7ZsMXv37q32nN6EIOMhR48eNffee68JCgoyTqfTjBgxwpw4ccK1/sCBA0aS+fzzz11jEyZMMJLKPVJSUqo9L6pWm7788ssv5rHHHjONGzc2gYGB5je/+Y3JyspyrV+6dKmJjo42QUFBpmHDhqZr165m5syZpqSk5EKemuW8+eabpmXLlsbhcJgePXqYdevWudbFxsaaYcOGuW3/j3/8w1x55ZXG4XCYzp07m3//+99u60tLS81zzz1nwsLCjJ+fn+nTp4/JzMy8EKdyUanLvpw+fdr069fPhIaGGrvdblq1amVGjRrllT8svV1N+lL279jZj9jY2GrP6U1sxvAdUAAAYE3cIwMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIAMAACyLIANcBL777jvZbDZt3bq10m2ioqI0ffr0Oj3u8OHDdeedd9bpnKidm2++WWPHjq10/cSJE+v8D2SuWrVKNptNubm5dTqvJD3wwAP685//fN7zVOe8n3nmGY0ePfq8jwXPIMjggqvsH9w5c+a4/QXpi111woe3e/311zVnzhzX8rl+mNaVOXPmyGazyWazydfXV40bN1bPnj01efJk5eXl1fvx68qFer7qy/XXX6+srCyFhIRIqrv38LZt27RkyRKNGTPmvOeqjieffFLvvvuuvv322wtyPNQtggxQQ8XFxZ4uwWuEhIR4LHw6nU5lZWXphx9+0Jo1a/Twww9r7ty5io6O1uHDh+vtuEVFRfU2t9U4HA6Fh4fLZrPV6bxvvvmmhgwZoqCgoFrPYYzRf/7zn2pt26xZM8XHx+utt96q9fHgOQQZeK2yjy3+8pe/KCIiQk2bNlVCQoJbkCgsLNSTTz6pyy67TA0bNlTPnj21atUq1/qy/4f46aefqmPHjgoKCtKtt96qrKwst2PNnj1bnTt3lp+fnyIiIvT444+71tlsNr311lu644471LBhQ02ZMkVXXHGF/vKXv7jNsXXrVtlsNu3bt89tv/79+ysgIEBt2rTRP//5T9f2rVu3liRdc801stlsuvnmm13r/vd//1cdO3aUv7+/OnTooBkzZrgda8OGDbrmmmvk7++v7t27a8uWLTV+fg8ePKiBAwcqKChITqdTd911l3Jycty2mTJlipo3b67g4GA99NBDeuaZZ9wu05/50dLw4cOVnp6u119/3XW15LvvvtPx48d13333KTQ0VAEBAWrXrp1SUlJqXO/ZbDabwsPDFRERoY4dO2rkyJFas2aNTp48qaeeesq1XWlpqaZOnarWrVsrICBAXbt2deuDJO3atUu33367nE6ngoODddNNN2n//v1u5/jCCy+oRYsWat++vSZPnqyrrrqqXE3R0dF67rnn3PabNGmSQkND5XQ69cgjj7iCUGXPlyTt3LlT/fv3V1BQkMLCwvTAAw/o559/dh3n1KlTevDBBxUUFKSIiAi9+uqrNX7+SktLNXnyZF1++eXy8/NTdHS0li1b5rbNmjVrFB0d7XqdLVq0yO0q4pkfLa1atUojRoxQXl6e63wmTpwoSZoxY4batWsnf39/hYWF6be//W2ldZWUlOif//ynBgwY4Db+97//Xd27d1dwcLDCw8M1dOhQHTlyxLW+rJalS5eqW7du8vPz05dffula/7e//U2RkZEKDAzUXXfdVe7K3YABAzR//vwaP4/wAh7+o5W4BMXGxponnnii3HhKSooJCQlxLQ8bNsw4nU7zyCOPmD179pjFixebwMBA8/bbb7u2eeihh8z1119vvvjiC7Nv3z7zyiuvGD8/P/PNN9+45rTb7SYuLs5kZGSYTZs2mY4dO5qhQ4e65pgxY4bx9/c306dPN5mZmWbDhg1m2rRprvWSTPPmzc3s2bPN/v37zffff29eeOEF06lTJ7f6x4wZY371q1+57de0aVPzzjvvmMzMTPPss88aX19fs3v3bmOMMRs2bDCSzIoVK0xWVpY5evSoMcaY9957z0RERJgPP/zQfPvtt+bDDz80TZo0MXPmzDHGGHPixAkTGhpqhg4danbu3GkWL15s2rRpYySZLVu2VPq8t2rVynVeJSUlJjo62tx4441m48aNZt26daZbt25uf/32vffeM/7+/mb27NkmMzPTTJo0yTidTtO1a1e3Hg0cONAYY0xubq6JiYkxo0aNMllZWSYrK8v85z//MQkJCSY6OtpkZGSYAwcOmOXLl5uPP/640jqr4+zXypmeeOIJExwcbP7zn/8YY4yZMmWK6dChg1m2bJnZv3+/SUlJMX5+fmbVqlXGGGN++OEH06RJEzNo0CCTkZFhMjMzzezZs83XX3/tOsegoCDzwAMPmJ07d5qdO3eaQ4cOGR8fH7NhwwbXcTdv3mxsNpvZv3+/235333232blzp/nkk09MaGio+dOf/lTl83X8+HETGhpqxo0bZ/bs2WM2b95s+vbta2655RbXsR599FHTsmVLs2LFCrN9+3Zz++23m+Dg4ArfV2UmTJjg1rvXXnvNOJ1OM2/ePPP111+bp556ytjtdtd7Jy8vzzRp0sTcf//9ZteuXWbJkiXmyiuvdHudff7550aSOX78uCksLDTTp083TqfTdT4nTpwwGRkZxtfX16SmpprvvvvObN682bz++uuV1rl582YjqdxfwJ41a5ZZsmSJ2b9/v1m7dq2JiYkx/fv3d60vq6VLly7ms88+M/v27TNHjx41EyZMMA0bNjS9e/c2W7ZsMenp6eaKK65w+zfAGGP27NljJJkDBw5UWhu8E0EGF1xNgkyrVq1cP5CMMWbIkCHm7rvvNsYY8/333xtfX1/z448/us3Tp08fM27cONecksy+fftc65OTk01YWJhruUWLFuZ//ud/Kq1Xkhk7dqzb2I8//mh8fX3N+vXrjTHGFBUVmWbNmrnCRtl+jzzyiNt+PXv2NI8++qgxxpgDBw5UGD7atm1rUlNT3caef/55ExMTY4wx5m9/+5tp2rSp+eWXX1zr33rrrRoFmc8++8z4+vqagwcPutbv2rXLSHL9cO7Zs6dJSEhwm+OGG26oNMgYU3FvBwwYYEaMGFFpXbVRVZApey5ycnJMQUGBCQwMNGvWrHHbZuTIkebee+81xhgzbtw407p1a1NUVFThfMOGDTNhYWGmsLDQbbx///6uXhpjzOjRo83NN9/stl+TJk3MqVOn3GoLCgoyJSUlxpiKn6/nn3/e9OvXz23s0KFDRpLJzMw0J06cMA6Hw/zjH/9wrT969KgJCAioUZBp0aKFeeGFF9y2ue6668xjjz3mqvXs19k777xTaZAxpuK+fPjhh8bpdJr8/PxKazvTwoULja+vryktLa1yu4yMDCPJnDhxwq2WRYsWlTtvX19f88MPP7jGli5danx8fExWVpZrLC8vz0hyBVxYBx8twat17txZvr6+ruWIiAjX5eQdO3aopKREV155pYKCglyP9PR018cCkhQYGKi2bdtWOMeRI0d0+PBh9enTp8o6unfv7rbcokUL3XbbbZo9e7YkafHixSosLNSQIUPctouJiSm3vGfPnkqPc+rUKe3fv18jR450O6cpU6a4zmnPnj3q0qWL/P39Kz3OuezZs0eRkZGKjIx0jXXq1EmNGjVy1ZeZmakePXq47Xf2cnU8+uijmj9/vqKjo/XUU09pzZo1lW77/vvvu5336tWra3w8Y4wkuT7mO336tPr27es279y5c13P59atW3XTTTfJbrdXOufVV18th8PhNjZq1CjNmzdPBQUFKioqUmpqqn73u9+5bdO1a1cFBga6lmNiYnTy5EkdOnSo0mNt27ZNn3/+uVu9HTp0kCTt379f+/fvV1FRkXr27Onap0mTJmrfvn01nyEpPz9fhw8f1g033OA2fsMNN7j1/+zXWW3637dvX7Vq1Upt2rTRAw88oPfff1+nT5+udPtffvlFfn5+5e672bRpkwYMGKCWLVsqODhYsbGxkv77EemZzn6vSlLLli112WWXuZZjYmJUWlqqzMxM11hAQIAkVVkbvFMDTxeAS4/T6azwmyW5ubmubz+UOfuHi81mU2lpqSTp5MmT8vX11aZNm9zCjiS3mwQrmqPsh13ZP17n0rBhw3JjDz30kB544AFNmzZNKSkpuvvuu91+aNXGyZMnJUnvvPOO2w8qSeXO0Sr69++v77//XkuWLNHy5cvVp08fJSQklLvHSJLuuOMOt/M+84dPde3Zs0dOp1NNmzZ1fQvl3//+d7m5/Pz8JFXvNVBR/wcMGCA/Pz8tXLhQDodDxcXFVd77UV0nT57UgAED9NJLL5VbFxER4boHyyqCg4O1efNmrVq1Sp999pnGjx+viRMnKiMjo8IbxZs1a6bTp0+rqKjIFR5PnTql+Ph4xcfH6/3331doaKgOHjyo+Pj4cjdfV9Sr6jh27JgkKTQ0tFb7w3O4IoMLrn379tq8eXO58c2bN+vKK6+s9jzXXHONSkpKdOTIEV1xxRVuj/Dw8GrNERwcrKioKKWlpVX7uGV+/etfq2HDhnrrrbe0bNmycv9vXJLWrVtXbrljx46S5PpHuqSkxLU+LCxMLVq00LffflvunMpuDu7YsaO2b9+ugoKCSo9zLh07dtShQ4fcrgzs3r1bubm56tSpk6T/9ikjI8Ntv7OXz+ZwONzOp0xoaKiGDRum9957T9OnT9fbb79d4f7BwcFu51zdoFnmyJEjSk1N1Z133ikfHx916tRJfn5+OnjwYLnns+xqVJcuXbR69eoafxutQYMGGjZsmFJSUpSSkqJ77rmnXL3btm3TL7/84lpet26dgoKCXMeu6Pm69tprtWvXLkVFRZWruWHDhmrbtq3sdrvWr1/v2uf48eP65ptvql270+lUixYt9NVXX7mNf/XVV27937FjhwoLC13ra9v/Bg0aKC4uTi+//LK2b9+u7777TitXrqxwjrKbyXfv3u0a+/rrr3X06FG9+OKLuummm9ShQwe3G33P5eDBg27fZFu3bp18fHzcrmLt3LlTdrtdnTt3rva88A4EGVxwjz76qL755huNGTNG27dvV2Zmpl577TXNmzdPf/jDH6o9z5VXXqn77rtPDz74oD766CMdOHBAGzZs0NSpU/Xvf/+72vNMnDhRr776qt544w3t3btXmzdv1ptvvnnO/Xx9fTV8+HCNGzdO7dq1q/DjnQULFmj27Nn65ptvNGHCBG3YsMH1jajmzZsrICBAy5YtU05Ojusq1aRJkzR16lS98cYb+uabb7Rjxw6lpKTotddekyQNHTpUNptNo0aN0u7du7VkyZIKr25UJS4uTldffbXuu+8+bd68WRs2bNCDDz6o2NhY16X50aNHa9asWXr33Xe1d+9eTZkyRdu3b6/yq7ZRUVFav369vvvuO/38888qLS3V+PHj9a9//Uv79u3Trl279Mknn7jC3Pkwxig7O1tZWVnas2ePZs+ereuvv14hISF68cUXJf03GD355JNKTEzUu+++q/3797v6++6770qSHn/8ceXn5+uee+7Rxo0btXfvXv397393+9ihMg899JBWrlxZaZAtKirSyJEjXX2aMGGCHn/8cfn4+FT6fCUkJOjYsWO69957lZGRof379+vTTz/ViBEjVFJSoqCgII0cOVJ//OMftXLlSu3cuVPDhw93zVldf/zjH/XSSy/pgw8+UGZmpp555hlt3bpVTzzxhKT/vs5KS0v18MMPa8+ePfr0009dr7PKXgNRUVE6efKk0tLS9PPPP+v06dP65JNP9MYbb2jr1q36/vvvNXfuXJWWllb6UVhoaKiuvfZat28ctWzZUg6HQ2+++aa+/fZbffzxx3r++eerfa7+/v4aNmyYtm3bptWrV2vMmDG666673P4Pz+rVq3XTTTfVODzDC3j4Hh1cojZs2GD69u1rQkNDTUhIiOnZs6dZuHCh2zZn30hqzH+/kXLmN2uKiorM+PHjTVRUlLHb7SYiIsL85je/Mdu3bzfGVHzz4cKFC83ZL/2ZM2ea9u3bu+YYPXq0a52kcrWV2b9/v5FkXn755XLrJJnk5GTTt29f4+fnZ6KioswHH3zgts0777xjIiMjjY+Pj9t5vf/++yY6Oto4HA7TuHFj86tf/cp89NFHrvVr1641Xbt2NQ6Hw0RHR5sPP/ywRjf7GvPfm6XvuOMO07BhQxMcHGyGDBlS7psikydPNs2aNTNBQUHmd7/7nRkzZozp1auXa/3ZPcrMzDS9evUyAQEBrm+APP/886Zjx44mICDANGnSxAwcONB8++23ldZZHWU3cUsyNpvNhISEmB49epjJkyebvLw8t21LS0vN9OnTXf0NDQ018fHxJj093bXNtm3bTL9+/UxgYKAJDg42N910k9u3j85+HZ7ppptuMp07dy43Xrbf+PHjTdOmTU1QUJAZNWqUKSgoqPL5MsaYb775xvzmN78xjRo1MgEBAaZDhw5m7NixrhtgT5w4Ye6//34TGBhowsLCzMsvv1zpTfRlzr7Zt6SkxEycONFcdtllxm63m65du5qlS5e67fPVV1+ZLl26GIfDYbp162ZSU1ONJNc3us6+2dcYYx555BHTtGlTI8lMmDDBrF692sTGxprGjRubgIAA06VLl3Lvg7PNmDHD7XVmjDGpqakmKirK+Pn5mZiYGPPxxx9XeePx2ec9Y8YM06JFC+Pv729++9vfmmPHjrlt1759ezNv3rwq64J3shnz/28WAFBjq1evVp8+fXTo0CGFhYW5rbPZbFq4cOFF9Sv8+/btq/DwcP3973/3dClewRijdu3a6bHHHlNSUpLbuuHDhys3N1eLFi3yTHH14P3333f9rpj6vHLxyy+/qH379vrggw9qfCN7bSxdulR/+MMftH37djVowK2jVkPHgFooLCzUTz/9pIkTJ2rIkCHlQszF4PTp05o5c6bi4+Pl6+urefPmacWKFVq+fLmnS/MKP/30k+bPn6/s7GyNGDHC0+XUi7lz56pNmza67LLLtG3bNj399NO666676v3jl4CAAM2dO9ftlwDWp1OnTiklJYUQY1F0DaiFefPmaeTIkYqOjtbcuXM9XU69sNlsWrJkiV544QUVFBSoffv2+vDDDxUXF+fp0rxC8+bN1axZM7399ttq3Lixp8upF9nZ2Ro/fryys7MVERGhIUOG6IUXXrggxz7zN13Xt7r4thk8h4+WAACAZfGtJQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFkEGQAAYFn/Dxu5nqQjJjuOAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "for model_type in model_types:\n", + "\n", + " logs = glob.glob(\"logs_no_overwrite/%s/%s/*.json\" % (dataset, model_type))\n", + "\n", + " weights = get_best_weights(dataset, model_type)\n", + "\n", + " agg_data, resolutions = get_relevant_data(logs)\n", + " \n", + " print_stats(model_type, agg_data, resolutions)\n", + " \n", + " resolutions = np.array(resolutions).flatten()\n", + " plot(data=resolutions, bins=30, xlabel=\"Unencrypted logits - Decrypted logits (arb)\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "afdbfa5a", + "metadata": {}, + "outputs": [], + "source": [ + "def plot(data, bins, xlabel):\n", + " plt.hist(data, bins=bins, color='black', alpha=0.8)\n", + " plt.xlabel(xlabel)\n", + " plt.ylabel(\"Count\")\n", + " plt.grid(True)\n", + " plt.show()\n", + " \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "941eae94", + "metadata": {}, + "outputs": [], + "source": [ + "xlabel_dict = {\n", + " \"conv\" : \"Convolution + BN\",\n", + " \"gelu\" : \"GELU\",\n", + " \"bootstrapping\" : \"Bootstrapping\",\n", + " \"residual\" : \"Residual\",\n", + " \"pool\" : \"Avg Pool\",\n", + " \"linear\" : \"Linear\",\n", + " \"total\" : \"Total Time\"\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dac8c517", + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "for k, v in agg_data.items():\n", + " plot(v, 30, xlabel='%s Times (sec)'%xlabel_dict[k])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "19cee0f4", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/palisade_he_cnn/src/cnn_context.py b/palisade_he_cnn/src/cnn_context.py new file mode 100644 index 0000000..92f464b --- /dev/null +++ b/palisade_he_cnn/src/cnn_context.py @@ -0,0 +1,340 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import numpy as np +import math +import torch +from time import time +from collections import defaultdict +import palisade_he_cnn.src.he_cnn.utils as utils +import palisade_he_cnn.src.he_cnn.conv as conv +import palisade_he_cnn.src.he_cnn.pool as pool +import palisade_he_cnn.src.he_cnn.linear as linear + +from pyOpenFHE import CKKS as pal + +TIMING_DICT = defaultdict(list) + +DUPLICATED, IMAGE_SHARDED, CHANNEL_SHARDED = range(3) + + +def reset_timing_dict(): + global TIMING_DICT + TIMING_DICT.clear() + + +# an image is a PyTorch 3-tensor +def create_cnn_context(image, cc, publicKey, verbose=False): + # create these to encrypt + shard_size = cc.getBatchSize() + + if len(image.shape) != 3: + raise ValueError("Input image must be a PyTorch 3-tensor") + + # do we want to address rectangular images at some point...? + if image.shape[1] != image.shape[2]: + raise ValueError("Non-square channels not currently supported") + + if not utils.is_power_of_2(image.shape[0]): + raise ValueError("Number of channels must be a power-of-two") + + if not utils.is_power_of_2(image.shape[1]): + raise ValueError("Image dimensions must be a power-of-two") + + mtx_size = image.shape[1] + num_channels = image.shape[0] + total_size = mtx_size * mtx_size * num_channels + num_shards = math.ceil(total_size / shard_size) + + if total_size <= shard_size: + duplication_factor = shard_size // total_size + else: + duplication_factor = 1 + + duplicated_image = np.repeat(image.numpy(), duplication_factor, axis=0).flatten() + shards = [] + for s in range(num_shards): + shard = cc.encrypt(publicKey, duplicated_image[shard_size * s: shard_size * (s + 1)]) + shards.append(shard) + + # return the cc and keys as well for decryption at the end + cnn_context = CNNContext(shards, mtx_size, num_channels, permutation=None, verbose=verbose) + + return cnn_context + + +def timing_decorator_factory(prefix=""): + def timing_decorator(func): + def wrapper_function(*args, **kwargs): + global TIMING_DICT + start = time() + res = func(*args, **kwargs) + layer_time = time() - start + self = args[0] + TIMING_DICT[prefix.strip()].append(layer_time) + if self.verbose: + print(prefix + f"Layer took {layer_time:.02f} seconds") + return res + + return wrapper_function + + return timing_decorator + + +class CNNContext: + r"""This class contains methods for applying network layers to an image.""" + + def __init__(self, shards, mtx_size, num_channels, permutation=None, verbose=False): + r"""Initializes the CNNContext object. We only needs shards and channel/matrix size to compute all other metadata.""" + + if permutation is None: + permutation = np.array(range(num_channels)) + + self.shards = shards + self.mtx_size = mtx_size + self.num_channels = num_channels + self.permutation = permutation + self.verbose = verbose + + self.compute_metadata() + + def compute_metadata(self): + # Shard information + self.num_shards = len(self.shards) + self.shard_size = self.shards[0].getBatchSize() + self.total_size = self.num_shards * self.shard_size + + # Channel information + self.channel_size = self.mtx_size * self.mtx_size + + # Duplication factor + self.duplication_factor = (self.total_size // self.channel_size) // self.num_channels + + if self.duplication_factor > 1: + self.shard_type = DUPLICATED + elif self.channel_size <= self.shard_size: + self.shard_type = IMAGE_SHARDED + else: + self.shard_type = CHANNEL_SHARDED + + # Channel and shard info + self.num_phys_chan_per_shard = self.shard_size // self.channel_size + self.num_phys_chan_total = self.num_shards * self.num_phys_chan_per_shard + self.num_log_chan_per_shard = self.num_phys_chan_per_shard // self.duplication_factor + self.num_log_chan_total = self.num_shards * self.num_log_chan_per_shard + + def print_metadata(self): + # shard information + print(f"num_shards: {self.num_shards}") + print(f"shard_size: {self.shard_size}") + print(f"total_size: {self.total_size}") + + # Channel information + print(f"channel_size: {self.channel_size}") + + # Duplication factor + print(f"duplication_factor: {self.duplication_factor}") + print(f"shard_type: {self.shard_type}") + + # Channel and shard info + print(f"num_phys_chan_per_shard: {self.num_phys_chan_per_shard}") + print(f"num_phys_chan_total: {self.num_phys_chan_total}") + print(f"num_log_chan_per_shard: {self.num_log_chan_per_shard}") + print(f"num_log_chan_total: {self.num_log_chan_total}") + + def decrypt_to_tensor(self, cc, keys): + # decrypt the shards + decrypted_shards = [cc.decrypt(keys.secretKey, shard) for shard in self.shards] + decrypted_output = np.concatenate(decrypted_shards) + + # reshape with possible duplication + duplicated_output = decrypted_output.reshape( + self.num_channels * self.duplication_factor, + self.mtx_size, + self.mtx_size + ) + + decrypted_deduplicated_output = duplicated_output[0 :: self.duplication_factor] + + return torch.from_numpy(decrypted_deduplicated_output) + + @timing_decorator_factory("Conv ") + def apply_conv(self, conv_layer, bn_layer=None, output_permutation=None, drop_levels=False): + pal.CNN.omp_set_nested(0) + # pal.CNN.omp_set_dynamic(0) + + # Get filters, biases + filters, biases = utils.get_filters_and_biases_from_conv2d(conv_layer) + + # Get batch norm info if one is passed in + if bn_layer: + scale, shift = utils.get_scale_and_shift_from_bn(bn_layer) + else: + scale = None + shift = None + + num_out_channels = filters.shape[1] + if output_permutation is None: + output_permutation = np.array(range(num_out_channels)) + elif len(output_permutation) != num_out_channels: + raise ValueError("output permutation is incorrect length") + + # TODO this should be a Compress() call + if drop_levels: + L = self.shards[0].getTowersRemaining() - 4 + for j in range(self.num_shards): + for i in range(L): + self.shards[j] *= 1.0 + + # Apply conv + new_shards = conv.conv2d( + ciphertext_shards=self.shards, + filters=filters, + mtx_size=self.mtx_size, + biases=biases, + permutation=self.permutation, + bn_scale=scale, + bn_shift=shift, + output_permutation=output_permutation + ) + + # Create new CNN Context + stride = conv_layer.stride + cnn_context = CNNContext(new_shards, self.mtx_size, num_out_channels, output_permutation, self.verbose) + if stride == (1, 1): + return cnn_context + elif stride == (2, 2): + return cnn_context.apply_pool(conv=False) + else: + raise ValueError("Unsupported stride: {stride}") + + @timing_decorator_factory("Pool ") + def apply_pool(self, conv=True): + pal.CNN.omp_set_nested(0) + # pal.CNN.omp_set_dynamic(0) + + # Apply pool + new_shards = pool.pool(self.shards, self.mtx_size, conv) + + # Get permutation + new_permutation = pool.get_pool_permutation(self.shards, self.num_channels, self.mtx_size) + new_permutation = pool.compose_permutations(self.permutation, new_permutation) + new_permutation = np.array(new_permutation) + + # Create new CNN Context + return CNNContext(new_shards, self.mtx_size // 2, self.num_channels, new_permutation, self.verbose) + + @timing_decorator_factory("Fused adaptive pool and linear ") + def apply_fused_pool_linear(self, linear_layer): + has_bias = hasattr(linear_layer, "bias") + return self.apply_linear(linear_layer, has_bias, pool_factor=self.mtx_size) + + @timing_decorator_factory("Bottleneck block ") + def apply_bottleneck(self, bottleneck_block, debug=False, gelu_params={}, bootstrap_params={}, bootstrap=True): + # Bottleneck block's forward pass is here: https://pytorch.org/vision/0.8/_modules/torchvision/models/resnet.html + + skip_connection = self + downsample_block = bottleneck_block.downsample + if downsample_block: + conv_downsample_layer = downsample_block[0] + bn_downsample_layer = downsample_block[1] + skip_connection = skip_connection.apply_conv(conv_downsample_layer, bn_downsample_layer) + + conv1_layer = bottleneck_block.conv1 + bn1_layer = bottleneck_block.bn1 + cnn_context = self.apply_conv(conv1_layer, bn1_layer) + + if not debug: + if bootstrap: cnn_context = cnn_context.apply_bootstrapping(**bootstrap_params) + cnn_context = cnn_context.apply_gelu(**gelu_params) + + conv2_layer = bottleneck_block.conv2 + bn2_layer = bottleneck_block.bn2 + cnn_context = cnn_context.apply_conv(conv2_layer, bn2_layer) + + if not debug: + if bootstrap: cnn_context = cnn_context.apply_bootstrapping(**bootstrap_params) + cnn_context = cnn_context.apply_gelu(**gelu_params) + + conv3_layer = bottleneck_block.conv3 + bn3_layer = bottleneck_block.bn3 + cnn_context = cnn_context.apply_conv(conv3_layer, bn3_layer, output_permutation=skip_connection.permutation) + + cnn_context = cnn_context.apply_residual(skip_connection) + + if not debug: + if bootstrap: cnn_context = cnn_context.apply_bootstrapping(**bootstrap_params) + cnn_context = cnn_context.apply_gelu(**gelu_params) + + return cnn_context + + # This operation doesn't return a CNNContext, that's returned by linear + @timing_decorator_factory("Linear ") + def apply_linear(self, linear_layer, bias=True, scale=1.0, pool_factor=1): + pal.CNN.omp_set_nested(0) + pal.CNN.omp_set_dynamic(1) + + linear_weights, linear_biases = utils.get_weights_and_biases_from_linear(linear_layer, + self.mtx_size, + bias, + pool_factor) + final_shard = linear.linear(self.shards, linear_weights, linear_biases, self.mtx_size, self.permutation, scale, + pool_factor) + + return final_shard + + @timing_decorator_factory("Square ") + def apply_square(self): + new_shards = [shard * shard for shard in self.shards] + + return CNNContext(new_shards, self.mtx_size, self.num_channels, self.permutation, self.verbose) + + @timing_decorator_factory("GELU ") + def apply_gelu(self, bound=10.0, degree=59): + """ + bound: + bound = an upper bound on the absolute value of the inputs. + the polynomial approximation is valid for [-bound, bound] + degree: + degree of Chebyshev polynomial + """ + if self.num_shards < 8: + pal.CNN.omp_set_nested(1) + pal.CNN.omp_set_dynamic(1) + else: + pal.CNN.omp_set_nested(0) + pal.CNN.omp_set_dynamic(1) + + # TODO this can be absorbed into the BN + new_shards = [x * (1 / bound) for x in self.shards] + new_shards = pal.CNN.fhe_gelu(new_shards, degree, bound) + + return CNNContext(new_shards, self.mtx_size, self.num_channels, self.permutation, self.verbose) + + @timing_decorator_factory("Bootstrapping ") + def apply_bootstrapping(self, meta=False): + if self.num_shards < 8: + pal.CNN.omp_set_nested(1) + pal.CNN.omp_set_dynamic(1) + else: + pal.CNN.omp_set_nested(0) + pal.CNN.omp_set_dynamic(1) + + cc = self.shards[0].getCryptoContext() + if meta: + new_shards = cc.evalMetaBootstrap(self.shards) + else: + new_shards = cc.evalBootstrap(self.shards) + + return CNNContext(new_shards, self.mtx_size, self.num_channels, self.permutation, self.verbose) + + @timing_decorator_factory("Residual ") + def apply_residual(self, C2): + if len(self.permutation) != len(C2.permutation): + raise ValueError("Incompatible number of channels") + if self.mtx_size != C2.mtx_size: + raise ValueError("Incompatible matrix size") + if any([i != j for i, j in zip(self.permutation, C2.permutation)]): + raise ValueError("Incompatible permutations") + + new_shards = [i + j for i, j in zip(self.shards, C2.shards)] + return CNNContext(new_shards, self.mtx_size, self.num_channels, self.permutation, self.verbose) diff --git a/palisade_he_cnn/src/he_cnn/activations.py b/palisade_he_cnn/src/he_cnn/activations.py new file mode 100644 index 0000000..e77a617 --- /dev/null +++ b/palisade_he_cnn/src/he_cnn/activations.py @@ -0,0 +1,166 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import math +from copy import copy + +SIN_COEFFS = [ + 0, + 9.99984594193494365437e-01, + 0, + -1.66632595072086745320e-01, + 0, + 8.31238887417884598346e-03, + 0, + -1.93162796407356830500e-04, + 0, + 2.17326217498596729611e-06, +] +COS_COEFFS = [ + 9.99971094606182687341e-01, + 0, + -4.99837602272995734437e-01, + 0, + 4.15223086250910767516e-02, + 0, + -1.34410769349285321733e-03, + 0, + 1.90652668840074246305e-05, + 0, +] +# you technically don't need the . to specify float division in python3 +LOG_COEFFS = [ + 0, + 1, + -0.5, + 1.0 / 3, + -1.0 / 4, + 1.0 / 5, + -1.0 / 6, + 1.0 / 7, + -1.0 / 8, + 1.0 / 9, + -1.0 / 10, +] +EXP_COEFFS = [ + 1, + 1, + 0.5, + 1.0 / 6, + 1.0 / 24, + 1.0 / 120, + 1.0 / 720, + 1.0 / 5040, + 1.0 / 40320, + 1.0 / 362880, + 1.0 / 3628800, +] +SIGMOID_COEFFS = [ + 1.0 / 2, + 1.0 / 4, + 0, + -1.0 / 48, + 0, + 1.0 / 480, + 0, + -17.0 / 80640, + 0, + 31.0 / 1451520, + 0, +] + + +def powerOf2Extended(cipher, logDegree): + res = [copy(cipher)] + for i in range(logDegree): + t = res[-1] + res.append(t * t) + return res + + +def powerExtended(cipher, degree): + res = [] + logDegree = int( + math.log2(degree) + ) # both python and C++ truncate when casting float->int + cpows = powerOf2Extended(cipher, logDegree) + + idx = 0 + for i in range(logDegree): + powi = pow(2, i) + res.append(cpows[i]) + + for j in range(powi - 1): + res.append(copy(res[j])) + res[-1] *= cpows[i] + + res.append(cpows[logDegree]) + + degree2 = pow(2, logDegree) + + for i in range(degree - degree2): + res.append(copy(res[i])) + res[-1] *= cpows[logDegree] + + return res + + +def polynomial_series_function(cipher, coeffs, verbose=False): + """ + Cipher is a CKKSCiphertext, coeffs should be array-like (generally either native list or numpy array) + """ + degree = len(coeffs) + + if verbose: + print("initial ciphertext level = {}".format(cipher.getTowersRemaining())) + + cpows = powerExtended(cipher, degree) # array of ciphertexts + + # cpows[0] == cipher, i.e. x^1 + res = cpows[0] * coeffs[1] # this should be defined + res += coeffs[0] + + for i in range(2, degree): + coeff = coeffs[i] + if abs(coeff) > 1e-27: + aixi = cpows[i - 1] * coeff + res += aixi + + if verbose: + print("final ciphertext level = {}".format(res.getTowersRemaining())) + + return res + + +""" +example: + +to approximate the sine function, do: + polynomial_series_function(c1, SIN_COEFFS) +""" + + +def sqrt_helper(cipher, steps): + a = copy(cipher) + b = a - 1 + + for i in range(steps): + a *= 1 - (0.5 * b) + + # there must be a better way to do this... + if i < steps - 1: + b = (b * b) * (0.25 * (b - 3)) + + return a + + +def sqrt(cipher, steps, upper_bound): + if upper_bound == 1: + return sqrt_helper(cipher, steps) + return sqrt_helper(cipher * (1 / upper_bound), steps) * math.sqrt(upper_bound) + + +def relu(cipher, steps, upper_bound): + x = cipher * cipher + + res = cipher + sqrt(x, steps, upper_bound) + return 0.5 * res diff --git a/palisade_he_cnn/src/he_cnn/conv.py b/palisade_he_cnn/src/he_cnn/conv.py new file mode 100644 index 0000000..3c90e2e --- /dev/null +++ b/palisade_he_cnn/src/he_cnn/conv.py @@ -0,0 +1,56 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import numpy as np +import math +from pyOpenFHE import CKKS as pal + +conv2d_cpp = pal.CNN.conv2d + + +def conv2d(ciphertext_shards, filters, mtx_size, biases, permutation=None, bn_scale=None, bn_shift=None, + output_permutation=None): + # if we're combining with a batch norm, fold the batch norm scale factor into the filters + # with sharded convolutions, filters are not duplicated or permuted in any way. + scaled_filters = filters + if bn_scale is not None and bn_shift is not None: + scaled_filters = filters * bn_scale.reshape(1, -1, 1, 1) + + # if we're combining with a batch norm, fold the batch norm shift factor into the biases + shifted_biases = biases + if bn_scale is not None and bn_shift is not None: + shifted_biases = biases * bn_scale + bn_shift + + # all of this should happen somewhere in the CNNContext class + shard_size = ciphertext_shards[0].getBatchSize() + num_out_channels = filters.shape[1] + channel_size = mtx_size * mtx_size + if channel_size < shard_size: + channels_per_shard = shard_size // (mtx_size * mtx_size) + output_dup_factor = math.ceil(channels_per_shard / num_out_channels) + else: + output_dup_factor = 1 + + num_in_channels = filters.shape[0] + if permutation is None: + permutation = np.array(range(num_in_channels)) + + if output_permutation is None: + output_permutation = np.array(range(num_out_channels)) + + if len(permutation) != num_in_channels: + raise ValueError("incorrect number of input channels") + + if len(output_permutation) != num_out_channels: + raise ValueError("incorrect number of output channels") + + scaled_filters = scaled_filters[:, output_permutation, :, :] + shifted_biases = shifted_biases[output_permutation] + + # compute the convolution + conv_shards = conv2d_cpp(ciphertext_shards, scaled_filters, mtx_size, permutation) + + repeated_shifted_biases = np.repeat(shifted_biases, mtx_size * mtx_size * output_dup_factor) + for s in range(len(conv_shards)): + conv_shards[s] += repeated_shifted_biases[s * shard_size: (s + 1) * shard_size] + + return conv_shards diff --git a/palisade_he_cnn/src/he_cnn/linear.py b/palisade_he_cnn/src/he_cnn/linear.py new file mode 100644 index 0000000..ea2cfc7 --- /dev/null +++ b/palisade_he_cnn/src/he_cnn/linear.py @@ -0,0 +1,28 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import numpy as np +from pyOpenFHE import CKKS as pal_ckks + +linear_cpp = pal_ckks.CNN.linear + + +def linear(channel_shards, weights, biases, mtx_size, permutation=None, scale=1.0, pool_factor=1): + shard_size = channel_shards[0].getBatchSize() + num_shards = len(channel_shards) + num_inputs = weights.shape[1] + channel_size = mtx_size * mtx_size + duplication_factor = max(shard_size // num_inputs, 1) + num_physical_channels_per_shard = shard_size // channel_size + num_physical_channels = num_physical_channels_per_shard * num_shards + num_logical_channels = num_physical_channels // duplication_factor + + if permutation is None: + permutation = np.array(range(num_logical_channels)) + + output = linear_cpp(channel_shards, weights * scale, mtx_size, permutation, pool_factor) + + # FO: if np.all(biases==0), then we do not need to compute biases*scale + num_out_activs = biases.shape[0] + output += np.pad(biases * scale, [(0, shard_size - num_out_activs)]) + + return output diff --git a/palisade_he_cnn/src/he_cnn/pool.py b/palisade_he_cnn/src/he_cnn/pool.py new file mode 100644 index 0000000..a8996e4 --- /dev/null +++ b/palisade_he_cnn/src/he_cnn/pool.py @@ -0,0 +1,72 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +from .utils import * +import math +from pyOpenFHE import CKKS as pal + +pool = pal.CNN.pool + + +def divide_chunks(l, n): + # looping till length l + for i in range(0, len(l), n): + yield l[i:i + n] + + +def interleave_lists(lists): + return [val for tup in zip(*lists) for val in tup] + + +def invert_permutation(P): + inverse_permutation = [0] * len(P) + for i, v in enumerate(P): + inverse_permutation[v] = i + return inverse_permutation + + +def compose_permutations(P1, P2): + if len(P1) != len(P2): + raise ValueError("permutations must have equal size") + permutation = [P1[P2[i]] for i in range(len(P1))] + return permutation + + +""" +metadata includes: + - the new channel permutation + - the duplication factor + - the new number of shards +""" + + +def get_pool_permutation(shards, num_channels, mtx_size): + initial_num_shards = len(shards) + shard_size = shards[0].getBatchSize() + channel_size = mtx_size * mtx_size + initial_num_physical_channels_per_shard = math.ceil(shard_size / channel_size) + num_physical_channels = initial_num_shards * initial_num_physical_channels_per_shard + initial_dup_factor = math.ceil(num_physical_channels / num_channels) + + # if we have channel sharding, then no permutation + if channel_size >= shard_size: + C = num_channels + P = list(range(C)) + return P + + if (initial_dup_factor > 1) and (initial_num_shards > 1): + raise ValueError("Should not have both duplication and shards at the same time") + + # if we have duplication, then no permutation + if initial_dup_factor > 1: + C = initial_num_physical_channels_per_shard // initial_dup_factor + P = list(range(C)) + return P + + C = initial_num_physical_channels_per_shard * initial_num_shards + I = list(range(C)) + I = list(divide_chunks(I, initial_num_physical_channels_per_shard)) + I = list(divide_chunks(I, 4)) + P = [interleave_lists(J) for J in I] + P = sum(P, start=[]) + + return np.array(P) diff --git a/palisade_he_cnn/src/he_cnn/upsample.py b/palisade_he_cnn/src/he_cnn/upsample.py new file mode 100644 index 0000000..9d95479 --- /dev/null +++ b/palisade_he_cnn/src/he_cnn/upsample.py @@ -0,0 +1,105 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import math +import numpy as np +from pyOpenFHE import CKKS as pal + +upsample_cpp = pal.CNN.upsample + + +def divide_chunks(l, n): + # looping till length l + for i in range(0, len(l), n): + yield l[i:i + n] + + +def interleave_lists(lists): + return [val for tup in zip(*lists) for val in tup] + + +def invert_permutation(P): + inverse_permutation = [0] * len(P) + for i, v in enumerate(P): + inverse_permutation[v] = i + return inverse_permutation + + +def compose_permutations(P1, P2): + if len(P1) != len(P2): + raise ValueError("permutations must have equal size") + permutation = [P1[P2[i]] for i in range(len(P1))] + return permutation + + +""" +metadata includes: + - the new channel permutation + - the duplication factor + - the new number of shards +""" + + +def get_upsample_permutation(shards, num_channels, mtx_size): + initial_num_shards = len(shards) + shard_size = shards[0].getBatchSize() + channel_size = mtx_size * mtx_size + initial_num_physical_channels_per_shard = math.ceil(shard_size / channel_size) + final_num_physical_channels_per_shard = math.ceil(shard_size / channel_size / 4) + num_physical_channels = initial_num_shards * initial_num_physical_channels_per_shard + initial_dup_factor = math.ceil(num_physical_channels / num_channels) + + # if we start with channel sharding, then no permutation + if channel_size >= shard_size: + P = list(range(num_channels)) + return P + + if (initial_dup_factor > 1) and (initial_num_shards > 1): + raise ValueError("Should not have both duplication and shards at the same time") + + # if we have duplication factor >= 4, then no permutation + if initial_dup_factor > 2: + P = list(range(num_channels)) + return P + + # if we have two-fold duplication + if initial_dup_factor == 2: + P = list(range(num_channels)) + if num_channels == 1: return P + P = P[::2] + P[1::2] + return P + + I = list(range(num_channels)) + I = list(divide_chunks(I, initial_num_physical_channels_per_shard)) + I = [list(divide_chunks(J, 4)) for J in I] + P = [interleave_lists(J) for J in I] + P = sum(P, start=[]) + + return np.array(P) + + +""" +This takes a permuted list of ciphertexts stored using channel sharding, +and it reorders them into the identity permutation. + +mtx_size and permutation refer to the values after upsampling, not of the input shards +""" + + +def undo_channel_sharding_permutation(shards, num_channels, mtx_size, permutation): + num_shards = len(shards) + shard_size = shards[0].getBatchSize() + channel_size = mtx_size * mtx_size + + if shard_size > channel_size: + raise ValueError("This function should only be called on a channel sharded image") + + num_shards_per_channel = channel_size // shard_size + + final_shards = [None for _ in range(num_shards)] + for i, x in enumerate(shards): + channel_idx = i // num_shards_per_channel + subshard_idx = i % num_shards_per_channel + correct_idx = permutation[channel_idx] * num_shards_per_channel + subshard_idx + final_shards[correct_idx] = x + + return final_shards diff --git a/palisade_he_cnn/src/he_cnn/utils.py b/palisade_he_cnn/src/he_cnn/utils.py new file mode 100644 index 0000000..dcbdd59 --- /dev/null +++ b/palisade_he_cnn/src/he_cnn/utils.py @@ -0,0 +1,228 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import shutil +from pathlib import Path + +import numpy as np +import pyOpenFHE as pal +from pyOpenFHE import CKKS as pal_ckks +import numpy as np +from pathlib import Path + +serial = pal_ckks.serial + + +def is_power_of_2(x): + return x > 0 and x & (x - 1) == 0 + + +def next_power_of_2(n): + p = 1 + if n and not (n & (n - 1)): + return n + while p < n: + p <<= 1 + return p + + +def load_cc_and_keys(batch_size, mult_depth=10, scale_factor_bits=40, bootstrapping=False): + f = "{}-{}-{}-{}".format(batch_size, mult_depth, scale_factor_bits, int(bootstrapping)) + path = Path("serialized") / f + + P = (path / "PublicKey.bin").as_posix() + publicKey = pal_ckks.serial.DeserializeFromFile_PublicKey(P, pal_ckks.serial.SerType.BINARY) + P = (path / "PrivateKey.bin").as_posix() + secretKey = pal_ckks.serial.DeserializeFromFile_PrivateKey(P, pal_ckks.serial.SerType.BINARY) + + keys = pal_ckks.KeyPair(publicKey, secretKey) + cc = publicKey.getCryptoContext() + + P = (path / "EvalMultKey.bin").as_posix() + pal_ckks.serial.DeserializeFromFile_EvalMultKey_CryptoContext(cc, P, pal_ckks.serial.SerType.BINARY) + P = (path / "EvalAutomorphismKey.bin").as_posix() + pal_ckks.serial.DeserializeFromFile_EvalAutomorphismKey_CryptoContext(cc, P, pal_ckks.serial.SerType.BINARY) + + if bootstrapping: + cc.evalBootstrapSetup() + + return cc, keys + + +def save_cc_and_keys(cc, keys, path): + P = (path / "PublicKey.bin").as_posix() + assert pal_ckks.serial.SerializeToFile(P, keys.publicKey, pal_ckks.serial.SerType.BINARY) + P = (path / "PrivateKey.bin").as_posix() + assert pal_ckks.serial.SerializeToFile(P, keys.secretKey, pal_ckks.serial.SerType.BINARY) + P = (path / "EvalMultKey.bin").as_posix() + assert pal_ckks.serial.SerializeToFile_EvalMultKey_CryptoContext(cc, P, pal_ckks.serial.SerType.BINARY) + P = (path / "EvalAutomorphismKey.bin").as_posix() + assert pal_ckks.serial.SerializeToFile_EvalAutomorphismKey_CryptoContext(cc, P, pal_ckks.serial.SerType.BINARY) + + +def create_cc_and_keys(batch_size, mult_depth=10, scale_factor_bits=40, bootstrapping=False, save=False): + # We make use of palisade HE by creating a crypto context object + # this specifies things like multiplicative depth + cc = pal_ckks.genCryptoContextCKKS( + mult_depth, # number of multiplications you can perform + scale_factor_bits, # kindof like number of bits of precision + batch_size, # length of your vector, can be any power-of-2 up to 2^14 + ) + + print(f"CKKS scheme is using ring dimension = {cc.getRingDimension()}, batch size = {cc.getBatchSize()}") + + cc.enable(pal.enums.PKESchemeFeature.PKE) + cc.enable(pal.enums.PKESchemeFeature.KEYSWITCH) + cc.enable(pal.enums.PKESchemeFeature.LEVELEDSHE) + cc.enable(pal.enums.PKESchemeFeature.ADVANCEDSHE) + cc.enable(pal.enums.PKESchemeFeature.FHE) + + # generate keys + keys = cc.keyGen() + cc.evalMultKeyGen(keys.secretKey) + cc.evalPowerOf2RotationKeyGen(keys.secretKey) + + if bootstrapping: + cc.evalBootstrapSetup() + cc.evalBootstrapKeyGen(keys.secretKey) + + if save: + f = "{}-{}-{}-{}".format(batch_size, mult_depth, scale_factor_bits, int(bootstrapping)) + path = Path("serialized") / f + path.mkdir(parents=True, exist_ok=True) + save_cc_and_keys(cc, keys, path) + + return cc, keys + + +def get_keys(mult_depth, + scale_factor_bits, + batch_size, + bootstrapping): + try: + cc, keys = load_cc_and_keys(batch_size, + mult_depth=mult_depth, + scale_factor_bits=scale_factor_bits, + bootstrapping=bootstrapping) + except: + cc, keys = create_cc_and_keys(batch_size, + mult_depth=mult_depth, + scale_factor_bits=scale_factor_bits, + bootstrapping=bootstrapping, + save=True) + return cc, keys + + +def get_filters_and_biases_from_conv2d(layer): + filters = layer.weight.detach().numpy() + if hasattr(layer, "bias") and layer.bias is not None: + biases = layer.bias.detach().numpy() + else: + # without bias + # same as number of output channels (each bias is broadcast over the channel) + biases = np.zeros((filters.shape[0],)) + + filters = filters.transpose(1, 0, 2, 3) + pad_to = next_power_of_2(filters.shape[0]) + + if pad_to is not None: + if filters.shape[0] < pad_to: + filters = np.concatenate( + [filters, np.zeros((pad_to - filters.shape[0],) + filters.shape[1:])] + ) + + return filters, biases + + +def get_scale_and_shift_from_bn(layer): + mu = layer.running_mean.detach().numpy() + var = layer.running_var.detach().numpy() + gamma = ( + layer.weight.detach().numpy() + ) # https://discuss.pytorch.org/t/getting-parameters-of-torch-nn-batchnorm2d-during-training/38913/3 + beta = layer.bias.detach().numpy() + eps = layer.eps + + sigma = np.sqrt(var + eps) # std dev + + # compute scale factor + scale = gamma / sigma + + # compute shift factor + shift = -gamma * mu / sigma + beta + + return scale, shift + + +# needs to know either number of channels or matrix size +def get_weights_and_biases_from_linear(layer, mtx_size, bias, pool_factor=1): + nout = layer.weight.size(0) + weights = layer.weight.detach().numpy() + num_channels = weights.shape[1] // (mtx_size * mtx_size) + weights = weights.reshape(nout, num_channels, mtx_size, mtx_size) + weights = weights.reshape(nout, -1) + weights = np.repeat(weights, pool_factor * pool_factor, axis=1) + + if bias: + biases = layer.bias.detach().numpy() + else: + biases = np.zeros(nout) + + return weights, biases + + +# Given a model and an input, get intermediate layer output +def get_intermediate_output(model, layer, inputs): + layer_name = "layer" + activation = {} + + def get_activation(name): + def hook(model, input, output): + activation[name] = output.detach() + + return hook + + layer.register_forward_hook( + get_activation(layer_name) + ) + _ = model(inputs) + return activation[layer_name] + + +def compare_accuracy(keys, cnn_context, unencrypted, name="block", num_digits=4): + A = decrypt_and_reshape(cnn_context, keys.secretKey, cnn_context.mtx_size) + B = unencrypted.detach().cpu().numpy()[0] + diff = np.abs(A - B[cnn_context.permutation]) + print(f"error in {name}:\nmax = {np.max(diff):.0{num_digits}f}\nmean = {np.mean(diff):.0{num_digits}f}") + + +def decrypt_and_reshape(cnn_context, secret_key, mtx_size): + cc = secret_key.getCryptoContext() + decrypted_output = [cc.decrypt(secret_key, ctxt) for ctxt in cnn_context.shards] + decrypted_output = np.hstack(decrypted_output) + num_out_chan = int(round(len(decrypted_output) / (mtx_size * mtx_size))) + decrypted_output = decrypted_output.reshape((num_out_chan, mtx_size, mtx_size)) + decrypted_output = decrypted_output[0:: cnn_context.duplication_factor] + + return decrypted_output + + +def serialize(cc, keys, ctxt): + path = Path("serialized") + path.mkdir(parents=True, exist_ok=True) + shutil.rmtree(path) + path.mkdir(parents=True, exist_ok=True) + + assert serial.SerializeToFile("serialized/CryptoContext.bin", ctxt, serial.SerType.BINARY) + assert serial.SerializeToFile("serialized/ciphertext.bin", ctxt, serial.SerType.BINARY) + assert serial.SerializeToFile("serialized/PublicKey.bin", keys.publicKey, serial.SerType.BINARY) + assert serial.SerializeToFile("serialized/PrivateKey.bin", keys.secretKey, serial.SerType.BINARY) + assert serial.SerializeToFile_EvalMultKey_CryptoContext(cc, "serialized/EvalMultKey.bin", serial.SerType.BINARY) + assert serial.SerializeToFile_EvalAutomorphismKey_CryptoContext(cc, "serialized/EvalAutomorphismKey.bin", + serial.SerType.BINARY) + + +if __name__ == "__main__": + cc, keys = get_keys(mult_depth=34, scale_factor_bits=59, batch_size=32 * 32 * 32, bootstrapping=True) + print(cc.getBatchSize()) + shard = cc.encrypt(keys.publicKey, [0.0 for _ in range(32768)]) + serialize(cc, keys, shard) diff --git a/palisade_he_cnn/src/small_model.py b/palisade_he_cnn/src/small_model.py new file mode 100644 index 0000000..dd98865 --- /dev/null +++ b/palisade_he_cnn/src/small_model.py @@ -0,0 +1,265 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torchvision import datasets +import torchvision.transforms as transforms +from typing import Union, Tuple, List + + +class Square(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x): + return torch.square(x) + + +def moment(x: torch.Tensor, std: float, mean: float, deg: int = 4, eps: float = 1e-4) -> torch.Tensor: + N = x.shape[0] + return (1.0 / N) * torch.sum((x - mean) ** deg) / (std ** deg + eps) + + +def activation_helper(activation: str = 'gelu', + gelu_degree: int = 16): + if activation == 'relu': + return nn.ReLU() + elif activation == 'gelu': + return nn.GELU() + elif activation == 'polygelu': + raise ValueError("Not supported.") + elif activation == 'square': + return Square() + else: + return nn.ReLU() + + +def conv_block(in_ch: int, + out_ch: int, + activation: str = 'relu', + gelu_degree: int = 16, + pool: bool = False, + pool_method: str = 'avg', + kernel: int = 3, + stride: int = 1, + padding: Union[int, str] = 1): + layers = [nn.Conv2d(in_ch, + out_ch, + kernel_size=kernel, + stride=stride, + padding=padding), + nn.BatchNorm2d(out_ch), + activation_helper(activation, gelu_degree) + ] + if pool: + layers.append(nn.MaxPool2d(2, 2) if pool_method == 'max' else nn.AvgPool2d(2, 2)) + return nn.Sequential(*layers) + + +def get_small_model_dict(activation='gelu', + gelu_degree: int = 16, + pool_method: str = 'avg') -> nn.ModuleDict: + classifier = nn.Sequential(nn.Flatten(), + nn.Linear(8 * 8 * 128, 10)) + return nn.ModuleDict( + { + "conv1": conv_block(in_ch=1, + out_ch=64, + kernel=4, + pool=True, + pool_method=pool_method, + padding='same', + activation=activation, + gelu_degree=gelu_degree), + "conv2": conv_block(in_ch=64, + out_ch=128, + kernel=4, + pool=True, + pool_method=pool_method, + padding='same', + activation=activation, + gelu_degree=gelu_degree), + "conv3": conv_block(in_ch=128, + out_ch=128, + kernel=4, + pool=False, + padding='same', + activation=activation, + gelu_degree=gelu_degree), + "classifier": classifier + } + ) + + +class SmallModel(nn.Module): + + def __init__(self, activation='gelu', gelu_degree: int = 16, pool_method: str = 'avg'): + super(SmallModel, self).__init__() + self.model_layers = get_small_model_dict(activation=activation, gelu_degree=gelu_degree, + pool_method=pool_method) + self.n_bn_classes = self.count_instances_of_a_class() + + def count_instances_of_a_class(self, cls: nn.BatchNorm2d = nn.BatchNorm2d) -> int: + n_classes = 0 + for _, block in self.model_layers.items(): + for layer in block: + # Handle the nested case + if isinstance(layer, nn.Sequential): + for sublayer in layer: + if isinstance(sublayer, cls): + n_classes += 1 + # Handle the unnested case + else: + if isinstance(layer, cls): + n_classes += 1 + return n_classes + + def forward(self, x: torch.Tensor) -> torch.Tensor: + self.bn_outputs = {} # key=layer name, v=list of torch.Tensors + self.outputs = {} + + for name, block in self.model_layers.items(): + block_output, block_bn_output = self.block_pass(block, x) + self.bn_outputs[name] = block_bn_output + + # Residual Connection + if "res" in name: + x = x + block_output + # Normal + else: + x = block_output + + return x + + def block_pass(self, block: nn.Sequential, x: torch.Tensor) -> Tuple[torch.Tensor, List[torch.Tensor]]: + bn_output = [] + + # Iterate through a block, which may be nested (residual connections are nested) + for layer in block: + # Handle the nested case + if isinstance(layer, nn.Sequential): + for sublayer in layer: + x = sublayer(x) + if isinstance(sublayer, nn.BatchNorm2d): + bn_output.append(x) + + # Handlle the unnested case + else: + x = layer(x) + if isinstance(layer, nn.BatchNorm2d): + bn_output.append(x) + + self.outputs[layer] = x + + return x, bn_output + + # Must be called after forward method to set self.bn_outputs + def get_bn_loss_metrics(self) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + means, stds, skews, kurts = self.get_moments_by_layer() + + # Aggregating + loss_means = F.mse_loss(means, torch.zeros(self.n_bn_classes)) + loss_stds = F.mse_loss(stds, torch.ones(self.n_bn_classes)) + # loss_skews = F.mse_loss(skews, torch.zeros(self.n_bn_classes)) + loss_kurts = F.mse_loss(kurts, 3 * torch.ones(self.n_bn_classes)) + return loss_means, loss_stds, loss_kurts + + def get_moments_by_layer(self) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: + means, stds = torch.zeros(self.n_bn_classes), torch.zeros(self.n_bn_classes) + skews, kurts = torch.zeros(self.n_bn_classes), torch.zeros(self.n_bn_classes) + + layer_index = 0 + for name, block in self.bn_outputs.items(): + + # Residual blocks are nested + for sublayer in range(0, len(block), 1): + dist = block[sublayer].flatten() + std, mean = torch.std_mean(dist) + skew = moment(dist, std, mean, deg=3) + kurt = moment(dist, std, mean, deg=4) + means[layer_index] = mean + stds[layer_index] = std + skews[layer_index] = skew + kurts[layer_index] = kurt + layer_index += 1 + return means, stds, skews, kurts + + def get_intermediate_layer_output(self, layer, inputs): + layer_name = "layer" + activation = {} + + def get_activation(name): + def hook(self, input, output): + print("calling hook") + activation[name] = output.detach() + + return hook + + layer.register_forward_hook( + get_activation(layer_name) + ) + _ = self(inputs) + return activation[layer_name] + +def train_small_model(): + DATA_DIR = "../data" + transform=transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)), + transforms.Pad(2) + ]) + BATCH_SIZE = 512 + train_kwargs = {'batch_size': BATCH_SIZE} + test_kwargs = {'batch_size': BATCH_SIZE} + dataset1 = datasets.MNIST(DATA_DIR, train=True, download=True, + transform=transform) + dataset2 = datasets.MNIST(DATA_DIR, train=False, + transform=transform) + + train_dl = torch.utils.data.DataLoader(dataset1,**train_kwargs) + val_dl = torch.utils.data.DataLoader(dataset2, **test_kwargs) + max_lr = 0.005 + weight_decay = 1e-5 + model = SmallModel() + optimizer = torch.optim.Adam(model.parameters(), + max_lr, + weight_decay = weight_decay) + EPOCHS = 13 + + for epoch in range(EPOCHS): + print(f"Epoch {epoch}") + + train_loss = 0 + bn_means, bn_stds, bn_kurts = 0,0,0 + N = 0 + + model.train() + + for i, (img, label) in enumerate(train_dl): + logit = model(img) + + # Model loss + loss = F.cross_entropy(logit,label) + + # Loss modifications + bn_mean, bn_std, bn_kurt = model.get_bn_loss_metrics() + loss += (bn_mean + bn_std + bn_kurt) + + loss.backward() + + # Save stuff + train_loss += loss.item() + bn_means += bn_mean.item() + bn_stds += bn_std.item() + bn_kurts += bn_kurt.item() + N += 1 + + optimizer.step() + optimizer.zero_grad() + + print("Saving model as %s" % "small_model.pt") + torch.save(model.state_dict(), "small_model.pt") + +if __name__ == "__main__": + train_small_model() diff --git a/palisade_he_cnn/src/small_model_inference.py b/palisade_he_cnn/src/small_model_inference.py new file mode 100644 index 0000000..92e1f07 --- /dev/null +++ b/palisade_he_cnn/src/small_model_inference.py @@ -0,0 +1,93 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +# export OMP_DISPLAY_ENV=TRUE +import os +from time import time + +import torch +import torchvision +import torchvision.transforms as transforms + +from cnn_context import create_cnn_context +from he_cnn.utils import * +from small_model import SmallModel + +np.set_printoptions(formatter={'float': lambda x: "{0:0.3f}".format(x)}) + +# create HE cc and keys +mult_depth = 30 +scale_factor_bits = 40 +batch_size = 32 * 32 * 32 # increased batch size b/c the ring dimension is higher due to the mult_depth + +# used for a small test of big shards +# batch_size = 128 + +# if using bootstrapping, you must increase scale_factor_bits to 59 +cc, keys = create_cc_and_keys(batch_size, mult_depth=mult_depth, scale_factor_bits=scale_factor_bits, + bootstrapping=False) + +# load the model +weight_file = "palisade_he_cnn/src/weights/small_model.pt" +print(os.getcwd()) +model = SmallModel(activation='gelu', pool_method='avg') +model.load_state_dict(torch.load(weight_file)) +model.eval() + +# load data +transform = transforms.Compose([transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)), + transforms.Pad(2)]) +validset = torchvision.datasets.MNIST(root="./data", download=True, transform=transform) +validloader = torch.utils.data.DataLoader(validset, batch_size=1, shuffle=True) + +total = 0 +correct = 0 +total_time = 0 + +for i, test_data in enumerate(validloader): + print(f"Inference {i + 1}:") + + x_test, y_test = test_data + + input_img = create_cnn_context(x_test[0], cc, keys.publicKey, verbose=True) + + start = time() + + layer = model.model_layers.conv1 + conv1 = input_img.apply_conv(layer[0], layer[1]) + act1 = conv1.apply_gelu() + pool1 = act1.apply_pool() + + layer = model.model_layers.conv2 + perm = np.random.permutation(128) # example of how to use an output permutation + conv2 = pool1.apply_conv(layer[0], layer[1], output_permutation=perm) + act2 = conv2.apply_gelu() + pool2 = act2.apply_pool() + + layer = model.model_layers.conv3 + conv3 = pool2.apply_conv(layer[0], layer[1]) + act3 = conv3.apply_gelu() + + layer = model.model_layers.classifier[1] + logits = act3.apply_linear(layer) + + logits_dec = cc.decrypt(keys.secretKey, logits)[:10] + logits_pt = model(x_test).detach().numpy().ravel() + + print(f"[+] decrypted logits = {logits_dec}") + print(f"[+] unencrypted logits = {logits_pt}") + + inference_time = time() - start + total_time += inference_time + total += 1 + + y_label = y_test[0] + correct += np.argmax(logits_dec) == y_label + + out_string = f""" + Count: {total} + Accuracy: {correct / total} + Average latency: {total_time / total:.02f}s + """ + + print(out_string) diff --git a/palisade_he_cnn/src/utils.py b/palisade_he_cnn/src/utils.py new file mode 100644 index 0000000..754ac57 --- /dev/null +++ b/palisade_he_cnn/src/utils.py @@ -0,0 +1,138 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import copy +from collections import OrderedDict, defaultdict +from typing import Dict, Callable + +import torch +import torchvision.transforms as tt +from torch.utils.data import DataLoader +from torchvision.datasets import ImageFolder + + +def pad_conv_input_channels(conv1): + conv1 = copy.deepcopy(conv1) + conv1.in_channels = 4 + data = conv1.weight.data + shape = list(data.shape) + shape[1] = 1 + padding = torch.zeros(*shape) + new_data = torch.cat((data, padding), 1) + conv1.weight.data = torch.Tensor(new_data) + return conv1 + + + +class PadChannel(object): + def __init__(self, npad: int=1): + self.n = npad + + def __call__(self, x): + _, width, height = x.shape + x = torch.cat([x, torch.zeros(self.n, width, height)]) + return x + +def patch_whitening(data, patch_size=(3, 3)): + # Compute weights from data such that + # torch.std(F.conv2d(data, weights), dim=(2, 3)) + # is close to 1. + h, w = patch_size + c = data.size(1) + patches = data.unfold(2, h, 1).unfold(3, w, 1) + patches = patches.transpose(1, 3).reshape(-1, c, h, w).to(torch.float32) + + n, c, h, w = patches.shape + X = patches.reshape(n, c * h * w) + X = X / (X.size(0) - 1) ** 0.5 + covariance = X.t() @ X + + eigenvalues, eigenvectors = torch.linalg.eigh(covariance) + + eigenvalues = eigenvalues.flip(0) + + eigenvectors = eigenvectors.t().reshape(c * h * w, c, h, w).flip(0) + + return eigenvectors / torch.sqrt(eigenvalues + 1e-2).view(-1, 1, 1, 1) + +def get_cifar10_dataloader(batch_size, + data_dir: str='../../datasets/cifar10/', + num_workers: int=4): + stats = ((0.4914, 0.4822, 0.4465), + (0.2023, 0.1994, 0.2010)) + + train_tfms = tt.Compose([ + tt.RandomCrop(32,padding=4,padding_mode='reflect'), + tt.RandomHorizontalFlip(), + tt.ToTensor(), + tt.Normalize(*stats,inplace=True), + PadChannel(npad=1) + ]) + + val_tfms = tt.Compose([ + tt.ToTensor(), + tt.Normalize(*stats,inplace=True), + PadChannel(npad=1) + ]) + + train_ds = ImageFolder(data_dir+'train',transform=train_tfms) + val_ds = ImageFolder(data_dir+'test',transform=val_tfms) + + train_dl = DataLoader(train_ds, + batch_size, + pin_memory = True, + num_workers = num_workers, + shuffle = True) + val_dl = DataLoader(val_ds, + batch_size, + pin_memory = True, + num_workers = num_workers) + return train_dl, val_dl + + +def remove_all_hooks(model: torch.nn.Module) -> None: + for name, child in model._modules.items(): + if child is not None: + if hasattr(child, "_forward_hooks"): + child._forward_hooks: Dict[int, Callable] = OrderedDict() + elif hasattr(child, "_forward_pre_hooks"): + child._forward_pre_hooks: Dict[int, Callable] = OrderedDict() + elif hasattr(child, "_backward_hooks"): + child._backward_hooks: Dict[int, Callable] = OrderedDict() + remove_all_hooks(child) + +# Given a model and an input, get intermediate layer output +def get_intermediate_output(model): + activation = defaultdict(list) + + def get_activation(name): + def hook(model, input, output): + x = output.detach() + activation[name].append(x) + + return hook + + BatchNorm_layers = [m for m in model.modules() if isinstance(m, torch.nn.BatchNorm2d)] + for i, b in enumerate(BatchNorm_layers): + b.register_forward_hook( + get_activation(f"bn_{i + 1}") + ) + return activation + + +def get_all_bn_activations(model, val_dl, DEVICE): + activation = get_intermediate_output(model) + + model.to(DEVICE) + model.eval() + + for img, label in (val_dl): + img, label = img.to(DEVICE), label.to(DEVICE) + out = model(img) + + remove_all_hooks(model) + + activation = {k:torch.cat(v) for k,v in activation.items()} + + return activation + + diff --git a/palisade_he_cnn/src/weights/small_model.pt b/palisade_he_cnn/src/weights/small_model.pt new file mode 100644 index 0000000..8e978dc Binary files /dev/null and b/palisade_he_cnn/src/weights/small_model.pt differ diff --git a/palisade_he_cnn/test.py b/palisade_he_cnn/test.py new file mode 100644 index 0000000..43fc5a8 --- /dev/null +++ b/palisade_he_cnn/test.py @@ -0,0 +1,248 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import pytest +import torch +import numpy as np + +from src.cnn_context import create_cnn_context +from src.he_cnn.utils import * + +np.set_printoptions(formatter={'float': lambda x: "{0:0.3f}".format(x)}) + +class Info(): + def __init__(self, mult_depth = 30, scale_factor_bits = 40, batch_size = 32 * 32 * 32, max = 255, min = 0, h = 128, w = 128, channel_size = 3, ker_size = 3): + self.mult_depth = mult_depth + self.scale_factor_bits = scale_factor_bits + self.batch_size = batch_size + self.max = max + self.min = min + self.h = h + self.w = w + self.channel_size = channel_size + self.ker_size = ker_size + + rand_tensor = (max-min)*torch.rand((channel_size, h, w)) + min + self.rand_tensor = rand_tensor + + self.cc, self.keys = create_cc_and_keys(batch_size, mult_depth=mult_depth, scale_factor_bits=scale_factor_bits, bootstrapping=False) + + self.input_img = create_cnn_context(self.rand_tensor, self.cc, self.keys.publicKey, verbose=True) + +@pytest.fixture +def check1(): + return Info(30, 40, 32 * 32 * 32, 1, -1, 64, 64, 4, 3) + +@pytest.fixture +def check2(): + return Info(30, 40, 32 * 32 * 32, 1, -1, 64, 64, 1, 3) + +@pytest.fixture +def check3(): + return Info(30, 40, 32, 1, -1, 16, 16, 2, 3) + +def test_apply_conv2d_c1(check1) -> None: + class ConvLayer(torch.nn.Module): + def __init__(self, in_channels, out_channels, kernel_size): + super(ConvLayer, self).__init__() + self.conv = torch.nn.Conv2d(in_channels, out_channels, kernel_size, padding="same") + + def forward(self, x): + x = self.conv(x) + return x + + model = ConvLayer(check1.channel_size, check1.channel_size, check1.ker_size) + model.eval() + layer = model.conv + + pt_conv = model(check1.rand_tensor) + pt_conv = torch.squeeze(pt_conv, axis=0).detach().numpy() + + conv1 = check1.input_img.apply_conv(layer) + dec_conv1 = conv1.decrypt_to_tensor(check1.cc, check1.keys).numpy().squeeze() + + assert np.allclose(dec_conv1, pt_conv, atol=1e-03), "Convolution result did not match between HE and PyTorch, failed image < shard" + +def test_apply_conv2d_c2(check2) -> None: + class ConvLayer(torch.nn.Module): + def __init__(self, in_channels, out_channels, kernel_size): + super(ConvLayer, self).__init__() + self.conv = torch.nn.Conv2d(in_channels, out_channels, kernel_size, padding="same") + + def forward(self, x): + x = self.conv(x) + return x + + model = ConvLayer(check2.channel_size, check2.channel_size, check2.ker_size) + model.eval() + layer = model.conv + + pt_conv = model(check2.rand_tensor) + pt_conv = torch.squeeze(pt_conv, axis=0).detach().numpy() + + conv1 = check2.input_img.apply_conv(layer) + dec_conv1 = conv1.decrypt_to_tensor(check2.cc, check2.keys).numpy().squeeze() + + assert np.allclose(dec_conv1, pt_conv, atol=1e-03), "Convolution result did not match between HE and PyTorch, failed channel < shard" + +def test_apply_conv2d_c3(check3) -> None: + class ConvLayer(torch.nn.Module): + def __init__(self, in_channels, out_channels, kernel_size): + super(ConvLayer, self).__init__() + self.conv = torch.nn.Conv2d(in_channels, out_channels, kernel_size, padding="same") + + def forward(self, x): + x = self.conv(x) + return x + + model = ConvLayer(check3.channel_size, check3.channel_size, check3.ker_size) + model.eval() + layer = model.conv + + pt_conv = model(check3.rand_tensor) + pt_conv = torch.squeeze(pt_conv, axis=0).detach().numpy() + + conv1 = check3.input_img.apply_conv(layer) + dec_conv1 = conv1.decrypt_to_tensor(check3.cc, check3.keys).numpy().squeeze() + + assert np.allclose(dec_conv1, pt_conv, atol=1e-03), "Convolution result did not match between HE and PyTorch, failed channel > shard" + + +def test_apply_pool_c1(check1) -> None: + class ConvLayer(torch.nn.Module): + def __init__(self, in_channels, out_channels, kernel_size): + super(ConvLayer, self).__init__() + self.conv = torch.nn.Conv2d(in_channels, out_channels, kernel_size, padding="same") + + def forward(self, x): + x = self.conv(x) + return x + + model = ConvLayer(check1.channel_size, check1.channel_size, check1.ker_size) + model.eval() + layer = model.conv + + pt_conv = model(check1.rand_tensor) + pt_max_pool = torch.nn.AvgPool2d(2) + pt_pool = pt_max_pool(pt_conv) + pt_pool = pt_pool.detach().numpy() + + conv1 = check1.input_img.apply_conv(layer) + pool = conv1.apply_pool() + dec_pool = pool.decrypt_to_tensor(check1.cc, check1.keys).numpy() + + assert np.allclose(dec_pool, pt_pool, atol=1e-03), "Pooling result did not match between HE and PyTorch, failed image < shard" + +def test_apply_pool_c2(check2) -> None: + class ConvLayer(torch.nn.Module): + def __init__(self, in_channels, out_channels, kernel_size): + super(ConvLayer, self).__init__() + self.conv = torch.nn.Conv2d(in_channels, out_channels, kernel_size, padding="same") + + def forward(self, x): + x = self.conv(x) + return x + + model = ConvLayer(check2.channel_size, check2.channel_size, check2.ker_size) + model.eval() + layer = model.conv + + pt_conv = model(check2.rand_tensor) + pt_max_pool = torch.nn.AvgPool2d(2) + pt_pool = pt_max_pool(pt_conv) + pt_pool = pt_pool.detach().numpy() + + conv1 = check2.input_img.apply_conv(layer) + pool = conv1.apply_pool() + dec_pool = pool.decrypt_to_tensor(check2.cc, check2.keys).numpy() + + assert np.allclose(dec_pool, pt_pool, atol=1e-03), "Pooling result did not match between HE and PyTorch, failed channel < shard" + +def test_apply_pool_c3(check3) -> None: + class ConvLayer(torch.nn.Module): + def __init__(self, in_channels, out_channels, kernel_size): + super(ConvLayer, self).__init__() + self.conv = torch.nn.Conv2d(in_channels, out_channels, kernel_size, padding="same") + + def forward(self, x): + x = self.conv(x) + return x + + model = ConvLayer(check3.channel_size, check3.channel_size, check3.ker_size) + model.eval() + layer = model.conv + + pt_conv = model(check3.rand_tensor) + pt_max_pool = torch.nn.AvgPool2d(2) + pt_pool = pt_max_pool(pt_conv) + pt_pool = pt_pool.detach().numpy() + + conv1 = check3.input_img.apply_conv(layer) + pool = conv1.apply_pool() + dec_pool = pool.decrypt_to_tensor(check3.cc, check3.keys).numpy() + + assert np.allclose(dec_pool, pt_pool, atol=1e-03), "Pooling result did not match between HE and PyTorch, failed channel > shard" + +def test_apply_linear_c1(check1) -> None: + class LinearLayer(torch.nn.Module): + def __init__(self, input_size, output_size): + super(LinearLayer, self).__init__() + self.linear_one = torch.nn.Linear(input_size, output_size) + + def forward(self, x): + x = self.linear_one(x) + return x + + linear = LinearLayer(len(check1.rand_tensor.flatten()), check1.rand_tensor.shape[0]) + linear.eval() + pt_linear = linear(check1.rand_tensor.flatten()).detach().numpy() + + he_linear = check1.input_img.apply_linear(linear.linear_one) + dec_linear = check1.cc.decrypt(check1.keys.secretKey, he_linear)[0:check1.rand_tensor.shape[0]] + + assert np.allclose(dec_linear, pt_linear, atol=1e-03), "Linear result did not match between HE and PyTorch, failed image < shard" + +def test_apply_linear_c2(check2) -> None: + class LinearLayer(torch.nn.Module): + def __init__(self, input_size, output_size): + super(LinearLayer, self).__init__() + self.linear_one = torch.nn.Linear(input_size, output_size) + + def forward(self, x): + x = self.linear_one(x) + return x + + linear = LinearLayer(len(check2.rand_tensor.flatten()), check2.rand_tensor.shape[0]) + linear.eval() + pt_linear = linear(check2.rand_tensor.flatten()).detach().numpy() + + he_linear = check2.input_img.apply_linear(linear.linear_one) + dec_linear = check2.cc.decrypt(check2.keys.secretKey, he_linear)[0:check2.rand_tensor.shape[0]] + + assert np.allclose(dec_linear, pt_linear, atol=1e-03), "Linear result did not match between HE and PyTorch, failed channel < shard" + +def test_apply_gelu_c1(check1) -> None: + gelu = torch.nn.GELU() + pt_gelu = gelu(check1.rand_tensor) + + he_gelu = check1.input_img.apply_gelu() + dec_gelu = he_gelu.decrypt_to_tensor(check1.cc, check1.keys).numpy() + + assert np.allclose(dec_gelu, pt_gelu, atol=1e-03), "GELU result did not match between HE and PyTorch, failed image < shard" + +def test_apply_gelu_c2(check2) -> None: + gelu = torch.nn.GELU() + pt_gelu = gelu(check2.rand_tensor) + + he_gelu = check2.input_img.apply_gelu() + dec_gelu = he_gelu.decrypt_to_tensor(check2.cc, check2.keys).numpy() + + assert np.allclose(dec_gelu, pt_gelu, atol=1e-03), "GELU result did not match between HE and PyTorch, failed channel < shard" + +def test_apply_gelu_c3(check3) -> None: + gelu = torch.nn.GELU() + pt_gelu = gelu(check3.rand_tensor) + + he_gelu = check3.input_img.apply_gelu() + dec_gelu = he_gelu.decrypt_to_tensor(check3.cc, check3.keys).numpy() + + assert np.allclose(dec_gelu, pt_gelu, atol=1e-03), "GELU result did not match between HE and PyTorch, failed channel > shard" \ No newline at end of file diff --git a/palisade_he_cnn/training/models/resnet50.py b/palisade_he_cnn/training/models/resnet50.py new file mode 100644 index 0000000..e66c2f7 --- /dev/null +++ b/palisade_he_cnn/training/models/resnet50.py @@ -0,0 +1,75 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import torch +import torchvision +import torchvision.transforms as transforms +from PIL import ImageFile +from tqdm import tqdm + +ImageFile.LOAD_TRUNCATED_IMAGES = True + + +# Set device +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") +device_ids = [0, 1, 2, 3, 4, 5, 6, 7] + +# Set hyperparameters +num_epochs = 1 +batch_size = 128 +learning_rate = 0.001 + +# Initialize transformations for data augmentation +transform = transforms.Compose([ + transforms.Resize(256), + transforms.RandomHorizontalFlip(), + transforms.RandomVerticalFlip(), + transforms.RandomRotation(degrees=45), + transforms.ColorJitter(brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) +]) + +# Load the ImageNet Object Localization Challenge dataset +train_dataset = torchvision.datasets.ImageFolder( + root='~/ImageNet/ILSVRC/Data/CLS-LOC/train', + transform=transform +) + +train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=8) + +# Load the ResNet50 model +model = torchvision.models.resnet50(weights='DEFAULT') + +# Parallelize training across multiple GPUs +model = torch.nn.DataParallel(model, device_ids = device_ids) + +# Set the model to run on the device +model = model.to(f'cuda:{model.device_ids[0]}') + +# Define the loss function and optimizer +criterion = torch.nn.CrossEntropyLoss() +optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate) + +# Train the model... +for epoch in range(num_epochs): + for inputs, labels in tqdm(train_loader): + # Move input and label tensors to the device + inputs = inputs.to(f'cuda:{model.device_ids[0]}') + labels = labels.to(f'cuda:{model.device_ids[0]}') + + # Zero out the optimizer + optimizer.zero_grad() + + # Forward pass + outputs = model(inputs) + loss = criterion(outputs, labels) + + # Backward pass + loss.backward() + optimizer.step() + + # Print the loss for every epoch + print(f'Epoch {epoch+1}/{num_epochs}, Loss: {loss.item():.4f}') + +print(f'Finished Training, Loss: {loss.item():.4f}') diff --git a/palisade_he_cnn/training/models/resnet9.py b/palisade_he_cnn/training/models/resnet9.py new file mode 100644 index 0000000..27ab2c4 --- /dev/null +++ b/palisade_he_cnn/training/models/resnet9.py @@ -0,0 +1,130 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import torch +import torch.nn as nn + + +class Scale(nn.Module): + def __init__(self, scale: float = 0.125): + super().__init__() + self.scale = scale + + def forward(self, x) -> torch.Tensor: + return self.scale * x + + +def conv_block( + in_ch: int, + out_ch: int, + kernel_size: int = 3, + stride: int = 1, + padding: str = "same", + pool: bool = False, + gelu: bool = False +): + layers = [nn.Conv2d(in_ch, + out_ch, + kernel_size=kernel_size, + stride=stride, + padding=padding), + nn.BatchNorm2d(out_ch) + ] + if pool: + layers.append(nn.AvgPool2d(2, 2)) + + if gelu: + layers.append(nn.GELU()) + return nn.Sequential(*layers) + + +class ResNet9(nn.Module): + def __init__(self, + c_in: int = 4, + c_out: int = 36, + num_classes: int = 10, + scale_out: float = 0.125): + super().__init__() + self.c_out = c_out + self.conv1 = nn.Conv2d(c_in, + c_out, + kernel_size=(3, 3), + padding="same", + bias=True) + self.conv2 = conv_block(c_out, + 64, + kernel_size=1, + padding="same", + pool=False, + gelu=True) + self.conv3 = conv_block(64, + 128, + kernel_size=3, + padding="same", + pool=True, + gelu=True) + self.res1 = nn.Sequential( + conv_block(128, + 128, + kernel_size=3, + padding="same", + pool=False, + gelu=True), + conv_block(128, + 128, + kernel_size=3, + padding="same", + pool=False, + gelu=True) + + ) + self.conv4 = conv_block(128, + 256, + kernel_size=3, + padding="same", + pool=True, + gelu=True) + self.conv5 = conv_block(256, + 512, + kernel_size=3, + padding="same", + pool=True, + gelu=True) + self.res2 = nn.Sequential( + conv_block(512, + 512, + kernel_size=3, + padding="same", + pool=False, + gelu=True), + conv_block(512, + 512, + kernel_size=3, + padding="same", + pool=False, + gelu=True) + ) + self.classifier = nn.Sequential( + nn.Flatten(), + nn.Linear(512 * 4 * 4, num_classes, bias=True), + Scale(scale_out) + ) + + def set_conv1_weights(self, + weights: torch.Tensor, + bias: torch.Tensor): + self.conv1.weight.data = weights + self.conv1.weight.requires_grad = False + self.conv1.bias.data = bias + self.conv1.bias.requires_grad = False + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.conv1(x) + x = self.conv2(x) + x = self.conv3(x) + res1 = self.res1(x) + x = x + res1 + x = self.conv4(x) + x = self.conv5(x) + res2 = self.res2(x) + x = x + res2 + return self.classifier(x) diff --git a/palisade_he_cnn/training/models/resnetN_multiplexed.py b/palisade_he_cnn/training/models/resnetN_multiplexed.py new file mode 100644 index 0000000..0bfa223 --- /dev/null +++ b/palisade_he_cnn/training/models/resnetN_multiplexed.py @@ -0,0 +1,321 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +from pathlib import Path +from typing import Any, List, Optional, Type + +import numpy as np +import torch +import torch.nn as nn +from torch import Tensor + +# Low-Complexity deep convolutional neural networks on +# FHE using multiplexed parallel convolutions +# +# https://eprint.iacr.org/2021/1688.pdf +# +# Our implementation is not an exact 1-to-1 + +__all__ = [ + "resnet_test", + "resnet20", + "resnet32", + "resnet44", + "resnet56", + "resnet110", +] + +POOL = nn.AvgPool2d(2, 2) +BN_MOMENTUM = 0.1 + + +class Debug(nn.Module): + def __init__(self, filename="temp.txt", debug=False): + super().__init__() + self.filename = Path("debug") / filename + self.debug = debug + # print(self.debug, filename) + + def forward(self, x) -> torch.Tensor: + if self.debug: + data = x.detach().cpu().numpy().ravel() + np.savetxt(self.filename, data, fmt="%0.04f") + return x + + +class Scale(nn.Module): + def __init__(self, scale: float = 0.125): + super().__init__() + self.scale = scale + + def forward(self, x) -> torch.Tensor: + return self.scale * x + + +def conv_bn(inchan: int, + outchan: int, + kernel: int = 3, + stride: int = 1, + padding: str = "same", + filenames: list = ["temp.txt"], + debug: bool = False) -> nn.Sequential: + return nn.Sequential( + nn.Conv2d( + inchan, + outchan, + kernel_size=kernel, + stride=1, + padding=padding + ), + nn.BatchNorm2d(outchan, momentum=BN_MOMENTUM), + Debug(filenames[0], debug) + ) + + +def conv_bn_down(inchan: int, + outchan: int, + kernel: int = 3, + stride: int = 1, + padding: str = "same", + filenames: list = ["temp.txt", "temp.txt"], + debug: bool = False, ) -> nn.Sequential: + return nn.Sequential( + nn.Conv2d( + inchan, + outchan, + kernel_size=kernel, + stride=1, + padding=padding + ), + nn.BatchNorm2d(outchan, momentum=BN_MOMENTUM), + Debug(filenames[0], debug), + POOL, + Debug(filenames[1], debug) + ) + + +class BasicBlock(nn.Module): + def __init__( + self, + inchan: int, + outchan: int, + kernel: int = 3, + stride: int = 1, + padding: str = "same", + activation: nn.Module = nn.GELU(), + downsample: Optional[nn.Module] = None, + prefix: str = "l1", + debug: bool = False + ) -> None: + super().__init__() + + # If a skip module is defined (defined as downsample), then our first block + # in series needs to also include a downsampling operation, aka pooling. + if downsample is not None: + self.conv_bn_1 = conv_bn_down( + inchan=inchan, + outchan=outchan, + kernel=3, + stride=1, + padding=padding, + filenames=["%s_bn1.txt" % prefix, + "%s_pool.txt" % prefix], + debug=debug + ) + + else: + self.conv_bn_1 = conv_bn( + inchan=outchan, + outchan=outchan, + kernel=3, + stride=1, + padding=padding, + filenames=["%s_bn1.txt" % prefix], + debug=debug + ) + + self.conv_bn_2 = conv_bn( + inchan=outchan, + outchan=outchan, + kernel=3, + stride=1, + padding=padding, + filenames=["%s_bn2.txt" % prefix], + debug=debug + ) + + self.gelu = activation + self.downsample = downsample + + def forward(self, x: Tensor) -> Tensor: + identity = x + + out = self.conv_bn_1(x) + out = self.gelu(out) + + out = self.conv_bn_2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + if self.downsample is not None: + out = self.gelu(out) + + return out + + +class ResNet(nn.Module): + def __init__( + self, + block: Type[BasicBlock], + layers: List[int], + num_classes: int = 10, + debug: bool = False + ): + super().__init__() + self.debug = debug + + self.conv_bn_1 = nn.Sequential( + nn.Conv2d(4, 16, kernel_size=3, stride=1, padding="same"), + nn.BatchNorm2d(16, momentum=BN_MOMENTUM), + Debug("l0_bn1.txt", debug=debug) + ) + self.gelu0 = nn.GELU() + self.gelu1 = nn.GELU() + self.gelu2 = nn.GELU() + self.gelu3 = nn.GELU() + + self.debug0 = Debug("l0_gelu.txt", debug) + self.debug1 = Debug("l1_gelu.txt", debug) + self.debug2 = Debug("l2_gelu.txt", debug) + self.debug3 = Debug("l3_gelu.txt", debug) + + self.layer1 = self._make_layer( + block=block, + inchan=16, + outchan=16, + nblocks=layers[0], + stride=1, + prefix="l1" + ) + self.layer2 = self._make_layer( + block, + inchan=16, + outchan=32, + nblocks=layers[1], + stride=2, # Triggers downsample != None, not a true stride + prefix="l2" + ) + self.layer3 = self._make_layer( + block, + inchan=32, + outchan=32, + nblocks=layers[2], + stride=2, # Triggers downsample != None, not a true stride + prefix="l3" + ) + self.classifier = nn.Sequential( + POOL, + nn.Flatten(), + nn.Linear(32 * 4 * 4, num_classes), + Scale() + ) + + def forward(self, x: Tensor) -> Tensor: + + x = self.conv_bn_1(x) + x = self.gelu0(x) + x = self.debug0(x) + + x = self.layer1(x) + x = self.gelu1(x) + x = self.debug1(x) + + x = self.layer2(x) + x = self.gelu2(x) + x = self.debug2(x) + + x = self.layer3(x) + x = self.gelu3(x) + x = self.debug3(x) + + return self.classifier(x) + + def _make_layer( + self, + block: Type[BasicBlock], + inchan: int, + outchan: int, + nblocks: int, + stride: int = 1, + prefix: str = "l1" + ) -> nn.Sequential: + + downsample = None + + if stride != 1: + downsample = conv_bn_down( + inchan=inchan, + outchan=outchan, + filenames=["%s_ds_bn1.txt" % prefix, + "%s_ds_pool.txt" % prefix], + debug=self.debug, + kernel=3, + stride=1, + padding="same" + ) + + layers = [] + for i in range(0, nblocks): + # Only need it for first iter + if i == 1: + downsample = None + + layers.append( + block( + inchan=inchan, + outchan=outchan, + kernel=3, + stride=stride, + padding="same", + activation=nn.GELU(), + downsample=downsample, + prefix=prefix + "_%s" % str(i), + debug=self.debug + ) + ) + + return nn.Sequential(*layers) + + +def _resnet( + block: Type[BasicBlock], + layers: List[int], + **kwargs: Any, +) -> ResNet: + return ResNet(block, layers, **kwargs) + + +def resnet_test(**kwargs: Any) -> ResNet: + return _resnet(BasicBlock, [1, 1, 1], **kwargs) + + +def resnet20(**kwargs: Any) -> ResNet: + return _resnet(BasicBlock, [3, 3, 3], **kwargs) + + +def resnet32(**kwargs: Any) -> ResNet: + return _resnet(BasicBlock, [5, 5, 5], **kwargs) + + +def resnet44(**kwargs: Any) -> ResNet: + return _resnet(BasicBlock, [7, 7, 7], **kwargs) + + +def resnet56(**kwargs: Any) -> ResNet: + return _resnet(BasicBlock, [9, 9, 9], **kwargs) + + +def resnet110(**kwargs: Any) -> ResNet: + return _resnet(BasicBlock, [18, 18, 18], **kwargs) diff --git a/palisade_he_cnn/training/optuna_params.py b/palisade_he_cnn/training/optuna_params.py new file mode 100644 index 0000000..66e9bc7 --- /dev/null +++ b/palisade_he_cnn/training/optuna_params.py @@ -0,0 +1,68 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +def get_optuna_params(model_type: str, dataset: str) -> dict: + params = {} + if dataset=='CIFAR10': + if model_type=='resnet20': + params["lr"] = 0.0016822249163093617 + params["lr_bias"] = 63.934695046801245 + params["momentum"] = 0.8484574950771097 + params["weight_decay"] = 0.11450934135118791 + elif model_type=='resnet32': + # 91.19: lr: 0.0013205254360784781, lr_bias: 61.138281101282544, momentum: 0.873508553678625, weight_decay: 0.26911634559915815 + # 91.1 : lr: 0.0013978655308274968, lr_bias: 70.43940111170473, momentum: 0.8611100787383372, weight_decay: 0.2604742590264777 + # 90.99: lr: 0.0019695910893940986, lr_bias: 60.930501987151686, momentum: 0.8831260271578129, weight_decay: 0.1456126229025426 + params["lr"] = 0.0013205254360784781 + params["lr_bias"] = 61.138281101282544 + params["momentum"] = 0.873508553678625 + params["weight_decay"] = 0.26911634559915815 + elif model_type=='resnet44': + # 91.49: 0.0017177668853317557 72.4258603207131 0.8353896320183106 0.16749858871622 + # 91.16: 0.0019608745758959625 67.9132255882833 0.8041541468923449 0.19517278422517992 + # 91.01: 0.0009350979452929332 71.95838038824016 0.858379476548086 0.06780300392316674 + params["lr"] = 0.0017177668853317557 + params["lr_bias"] = 72.4258603207131 + params["momentum"] = 0.8353896320183106 + params["weight_decay"] = 0.16749858871622 + elif model_type=='resnet56': + # 92.12: 0.0012022823706985977 71.31108702685964 0.8252747623136261 0.26463818739336625 + # 91.90: 0.0010850336892236205 55.20534833175523 0.8738224946147084 0.10705317777179325 + # 91.56: 0.0019151327847040805 63.38376732305882 0.9134938189630787 0.24446065595718675 + params["lr"] = 0.0012022823706985977 + params["lr_bias"] = 71.31108702685964 + params["momentum"] = 0.8252747623136261 + params["weight_decay"] = 0.26463818739336625 + elif model_type=='resnet110': + # 92.23: 0.001477698037686629 61.444988882569774 0.7241645867415002 0.23586225065185779 + # 92.17: 0.0017110807237653582 65.2511959805971 0.8078620231092996 0.19065715813207001 + # 92.16: 0.0015513227695282382 59.89497310126697 0.7355843250067341 0.13248840913478463 + params["lr"] = 0.001477698037686629 + params["lr_bias"] = 61.444988882569774 + params["momentum"] = 0.7241645867415002 + params["weight_decay"] = 0.23586225065185779 + else: + print("model_type and dataset are incorrectly specified. Returning resnet20 params.") + params["lr"] = 0.0016822249163093617 + params["lr_bias"] = 63.934695046801245 + params["momentum"] = 0.8484574950771097 + params["weight_decay"] = 0.11450934135118791 + else: + # only return resnet32 since CIFAR100 only needs this + if model_type=='resnet32': + # 65.09: 0.0018636209167742187 64.96657354785438 0.9186032548289501 0.15017464467868924 + # 64.70: 0.0017509006966116355 60.10884856596049 0.8921508582343675 0.10919043636429121 + # 64.49: 0.0015358614659175514 59.175398449172015 0.8553794786037812 0.20824545084283141 + params["lr"] = 0.0018636209167742187 + params["lr_bias"] = 64.96657354785438 + params["momentum"] = 0.9186032548289501 + params["weight_decay"] = 0.15017464467868924 + else: + # Default bag of trick params + params["lr"] = 0.001 + params["lr_bias"] = 64 + params["momentum"] = 0.9 + params["weight_decay"] = 0.256 + + print("Loading params for %s, %s" % (model_type, dataset)) + print(params) + return params diff --git a/palisade_he_cnn/training/train_resnet9.py b/palisade_he_cnn/training/train_resnet9.py new file mode 100644 index 0000000..3ea6c7e --- /dev/null +++ b/palisade_he_cnn/training/train_resnet9.py @@ -0,0 +1,308 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import argparse +import copy +import json +import time + +from palisade_he_cnn.training.models.resnet9 import ResNet9 +from palisade_he_cnn.training.utils.utils_dataloading import * +from palisade_he_cnn.training.utils.utils_kurtosis import * +from palisade_he_cnn.training.utils.utils_resnetN import ( + patch_whitening, update_nesterov, update_ema, label_smoothing_loss +) + + +def argparsing(): + parser = argparse.ArgumentParser() + parser.add_argument('-bs', '--batch', + help='Batch size', + type=int, + required=False, + default=512) + parser.add_argument('-e', '--epochs', + help='Number of epochs', + type=int, + required=False, + default=100) + parser.add_argument('-c', '--cuda', + help='CUDA device number', + type=int, + required=False, + default=0) + parser.add_argument('-r', '--nruns', + help='Number of training runs', + type=int, + required=False, + default=5) + parser.add_argument('-dataset', '--dataset', + help='CIFAR10 or CIFAR100', + type=str, + choices=['CIFAR10', 'CIFAR100'], + required=False, + default='CIFAR10') + parser.add_argument('-s', '--save', + help='Save model and log files', + type=bool, + required=False, + default=True) + + return vars(parser.parse_args()) + + +def train( + dataset, + epochs, + batch_size, + momentum, + weight_decay, + weight_decay_bias, + ema_update_freq, + ema_rho, + device, + dtype, + kwargs, + use_TTA, + seed=0 +): + lr_schedule = torch.cat([ + torch.linspace(0e+0, 2e-3, 194), + torch.linspace(2e-3, 2e-4, 582), + ]) + + lr_schedule_bias = 64.0 * lr_schedule + + kurt_schedule = torch.cat([ + torch.linspace(0, 1e-1, 2000), + ]) + + # Print information about hardware on first run + if seed == 0: + if device.type == "cuda": + print("Device :", torch.cuda.get_device_name(device.index)) + + print("Dtype :", dtype) + print() + + # Start measuring time + start_time = time.perf_counter() + + # Set random seed to increase chance of reproducability + torch.manual_seed(seed) + + # Setting cudnn.benchmark to True hampers reproducability, but is faster + torch.backends.cudnn.benchmark = True + + # Load dataset + if dataset == "CIFAR10": + train_data, train_targets, valid_data, valid_targets = load_cifar10(device, dtype) + else: + train_data, train_targets, valid_data, valid_targets = load_cifar100(device, dtype) + + train_data = torch.cat( + [train_data, torch.zeros(train_data.size(0), 1, train_data.size(2), train_data.size(3)).to(device)], dim=1) + valid_data = torch.cat( + [valid_data, torch.zeros(valid_data.size(0), 1, valid_data.size(2), valid_data.size(3)).to(device)], dim=1) + + temp = train_data[:10000, :, 4:-4, 4:-4] + weights = patch_whitening(temp) + + train_model = ResNet9(c_in=weights.size(1), + c_out=weights.size(0), + **kwargs).to(device) + train_model.set_conv1_weights( + weights=weights.to(device), + bias=torch.zeros(weights.size(0)).to(device) + ) + train_model.to(dtype) + + # Convert BatchNorm back to single precision for better accuracy + for module in train_model.modules(): + if isinstance(module, nn.BatchNorm2d): + module.float() + + # Collect weights and biases and create nesterov velocity values + weights = [ + (w, torch.zeros_like(w)) + for w in train_model.parameters() + if w.requires_grad and len(w.shape) > 1 + ] + biases = [ + (w, torch.zeros_like(w)) + for w in train_model.parameters() + if w.requires_grad and len(w.shape) <= 1 + ] + + # Copy the model for validation + valid_model = copy.deepcopy(train_model) + + print(f"Preprocessing: {time.perf_counter() - start_time:.2f} seconds") + + # Train and validate + print("\nepoch batch train time [sec] validation accuracy") + train_time = 0.0 + batch_count = 0 + best_acc = 0.0 + best_model = None + for epoch in range(1, epochs + 1): + start_time = time.perf_counter() + + # Randomly shuffle training data + indices = torch.randperm(len(train_data), device=device) + data = train_data[indices] + targets = train_targets[indices] + + # Crop random 32x32 patches from 40x40 training data + data = [ + random_crop(data[i: i + batch_size], crop_size=(32, 32)) + for i in range(0, len(data), batch_size) + ] + data = torch.cat(data) + + # Randomly flip half the training data + data[: len(data) // 2] = torch.flip(data[: len(data) // 2], [-1]) + + for i in range(0, len(data), batch_size): + # discard partial batches + if i + batch_size > len(data): + break + + # Slice batch from data + inputs = data[i: i + batch_size] + target = targets[i: i + batch_size] + batch_count += 1 + + # Compute new gradients + train_model.zero_grad() + train_model.train(True) + + # kurtosis setup + remove_all_hooks(train_model) + activations = get_intermediate_output(train_model) + + logits = train_model(inputs) + loss = label_smoothing_loss(logits, target, alpha=0.2) + + # kurtosis scheduler + kurt_index = min(batch_count, len(kurt_schedule) - 1) + kurt_scale = kurt_schedule[kurt_index] + + # kurtosis calculation and cleanup + remove_all_hooks(train_model) + activations = {k: torch.cat(v) for k, v in activations.items()} + loss_means, loss_stds, loss_kurts = get_statistics(activations) + loss += (loss_means + loss_stds + loss_kurts) * kurt_scale + + loss.sum().backward() + + lr_index = min(batch_count, len(lr_schedule) - 1) + lr = lr_schedule[lr_index] + lr_bias = lr_schedule_bias[lr_index] + + # Update weights and biases of training model + update_nesterov(weights, lr, weight_decay, momentum) + update_nesterov(biases, lr_bias, weight_decay_bias, momentum) + + # Update validation model with exponential moving averages + if (i // batch_size % ema_update_freq) == 0: + update_ema(train_model, valid_model, ema_rho) + + # Add training time + train_time += time.perf_counter() - start_time + + valid_correct = [] + for i in range(0, len(valid_data), batch_size): + valid_model.train(False) + + # Test time agumentation: Test model on regular and flipped data + regular_inputs = valid_data[i: i + batch_size] + logits = valid_model(regular_inputs).detach() + + if use_TTA: + flipped_inputs = torch.flip(regular_inputs, [-1]) + logits2 = valid_model(flipped_inputs).detach() + logits = torch.mean(torch.stack([logits, logits2], dim=0), dim=0) + + # Compute correct predictions + correct = logits.max(dim=1)[1] == valid_targets[i: i + batch_size] + valid_correct.append(correct.detach().type(torch.float64)) + + # Accuracy is average number of correct predictions + valid_acc = torch.mean(torch.cat(valid_correct)).item() + if valid_acc > best_acc: + best_acc = valid_acc + best_model = train_model + + print(f"{epoch:5} {batch_count:8d} {train_time:19.2f} {valid_acc:22.4f}") + + return best_acc, best_model + + +def main(): + args = argparsing() + + model_type = "resnet9" + cifar_dataset = args["dataset"] + save = args["save"] + weight_name = 'weights/%s_%s' % (model_type, cifar_dataset) + kwargs = { + "num_classes": 10 if cifar_dataset == 'CIFAR10' else 100, + "scale_out": 0.125 + } + + device = torch.device("cuda:%s" % args["cuda"] if torch.cuda.is_available() else "cpu") + dtype = torch.float32 + + # Configurable parameters + ema_update_freq = 5 + params = { + "dataset": cifar_dataset, + "epochs": args["epochs"], + "batch_size": args["batch"], + "momentum": 0.9, + "weight_decay": 0.256, + "weight_decay_bias": 0.004, + "ema_update_freq": ema_update_freq, + "ema_rho": 0.99 ** ema_update_freq, + "kwargs": kwargs, + "use_TTA": False + } + + log = { + "weights": weight_name, + "model_type": model_type, + "kwargs": kwargs, + "params": params + } + + nruns = args["nruns"] + + accuracies = [] + for run in range(nruns): + weight_name_seed = weight_name + "_run%d.pt" % run + + best_acc, best_model = train(**params, + device=device, + dtype=dtype, + seed=run) + accuracies.append(best_acc) + print("Best Run Accuracy: %1.4f" % best_acc) + log["run%s" % run] = best_acc + + if save: + print("Saving %s" % weight_name_seed) + torch.save(best_model.state_dict(), weight_name_seed) + + mean = sum(accuracies) / len(accuracies) + variance = sum((acc - mean) ** 2 for acc in accuracies) / len(accuracies) + std = variance ** 0.5 + print("Accuracy: %1.4f +/- %1.4f" % (mean, std)) + log["accuracy"] = [mean, std] + + if save: + with open("logs/logs_resnet9_%s.json" % cifar_dataset, 'w') as fp: + json.dump(log, fp) + + +if __name__ == "__main__": + main() diff --git a/palisade_he_cnn/training/train_resnetN.py b/palisade_he_cnn/training/train_resnetN.py new file mode 100644 index 0000000..64427b0 --- /dev/null +++ b/palisade_he_cnn/training/train_resnetN.py @@ -0,0 +1,354 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import argparse +import copy +import json +import time + +import torch.nn as nn + +from optuna_params import get_optuna_params +from palisade_he_cnn.training.utils.utils_dataloading import random_crop +from palisade_he_cnn.training.utils.utils_kurtosis import * +from palisade_he_cnn.training.utils.utils_resnetN import ( + get_model, update_nesterov, update_ema, label_smoothing_loss +) + +# training time augmentation +use_TTA = False + + +class EarlyStopper: + def __init__(self, patience=1, min_delta=0): + self.patience = patience + self.min_delta = min_delta + self.counter = 0 + self.max_accuracy = 0.0 + + def early_stop(self, accuracy): + if accuracy > self.max_accuracy: + self.max_accuracy = accuracy + self.counter = 0 + elif accuracy <= (self.max_accuracy + self.min_delta): + self.counter += 1 + if self.counter >= self.patience: + return True + return False + + +def argparsing(): + parser = argparse.ArgumentParser() + parser.add_argument('-n', '--nlayers', + help='ResNet model depth', + type=int, + choices=[20, 32, 44, 56, 110], + required=True) + parser.add_argument('-bs', '--batch', + help='Batch size', + type=int, + required=False, + default=256) + parser.add_argument('-e', '--epochs', + help='Number of epochs', + type=int, + required=False, + default=100) + parser.add_argument('-d', '--debug', + help='Debugging mode', + type=bool, + required=False, + default=False) + parser.add_argument('-c', '--cuda', + help='CUDA device number', + type=int, + required=False, + default=0) + parser.add_argument('-dataset', '--dataset', + help='CIFAR10 or CIFAR100', + type=str, + choices=['CIFAR10', 'CIFAR100'], + required=False, + default='CIFAR10') + parser.add_argument('-s', '--save', + help='Save model and log files', + type=bool, + required=False, + default=True) + + return vars(parser.parse_args()) + + +def train( + dataset, + epochs, + batch_size, + lr, + lr_bias, + momentum, + weight_decay, + weight_decay_bias, + ema_update_freq, + ema_rho, + device, + dtype, + model_type, + kwargs, + seed=0 +): + # Load dataset + if dataset == "CIFAR10": + train_data, train_targets, valid_data, valid_targets = load_cifar10(device, dtype) + else: + train_data, train_targets, valid_data, valid_targets = load_cifar100(device, dtype) + + train_data = torch.cat( + [train_data, torch.zeros(train_data.size(0), 1, train_data.size(2), train_data.size(3)).to(device)], dim=1) + valid_data = torch.cat( + [valid_data, torch.zeros(valid_data.size(0), 1, valid_data.size(2), valid_data.size(3)).to(device)], dim=1) + + N = int(len(train_data) / batch_size) # 50k / 256, now below is organized by epoch # + lr_schedule = torch.cat([ + torch.linspace(0.0, lr, N), + # torch.linspace(lr, lr, 2*N), + torch.linspace(lr, 1e-4, 3 * N), + torch.linspace(1e-4, 1e-4, 50 * N), + torch.linspace(1e-5, 1e-5, 25 * N), + torch.linspace(1e-6, 1e-6, 25 * N), + ]) + lr_schedule_bias = lr_bias * lr_schedule + + kurt_schedule = torch.cat([ + torch.linspace(0, 0, 10 * N), + torch.linspace(0.05, 0.05, 2 * N), + torch.linspace(0.1, 0.1, 200 * N), + ]) + # Print information about hardware on first run + if seed == 0: + if device.type == "cuda": + print("Device :", torch.cuda.get_device_name(device.index)) + + print("Dtype :", dtype) + print() + + # Start measuring time + start_time = time.perf_counter() + + # Set random seed to increase chance of reproducability + torch.manual_seed(seed) + + # Setting cudnn.benchmark to True hampers reproducability, but is faster + torch.backends.cudnn.benchmark = True + + # Convert model weights to half precision + train_model = get_model(model_type, kwargs).to(device) + train_model.to(dtype) + + # Convert BatchNorm back to single precision for better accuracy + for module in train_model.modules(): + if isinstance(module, nn.BatchNorm2d): + module.float() + + # Collect weights and biases and create nesterov velocity values + weights = [ + (w, torch.zeros_like(w)) + for w in train_model.parameters() + if w.requires_grad and len(w.shape) > 1 + ] + biases = [ + (w, torch.zeros_like(w)) + for w in train_model.parameters() + if w.requires_grad and len(w.shape) <= 1 + ] + + # Copy the model for validation + valid_model = copy.deepcopy(train_model) + + # Patience: + early_stopper = EarlyStopper(patience=120, min_delta=0.001) # this is % + + # Testing non-SGD optimizer + optimizer = torch.optim.Adam(train_model.parameters(), lr=0.001) + + print(f"Preprocessing: {time.perf_counter() - start_time:.2f} seconds") + print("\nepoch batch train time [sec] validation accuracy") + + train_time = 0.0 + batch_count = 0 + best_acc = 0.0 + best_model = None + for epoch in range(1, epochs + 1): + start_time = time.perf_counter() + + # Randomly shuffle training data + indices = torch.randperm(len(train_data), device=device) + data = train_data[indices] + targets = train_targets[indices] + + # Crop random 32x32 patches from 40x40 training data + data = [ + random_crop(data[i: i + batch_size], crop_size=(32, 32)) + for i in range(0, len(data), batch_size) + ] + data = torch.cat(data) + + # Randomly flip half the training data + data[: len(data) // 2] = torch.flip(data[: len(data) // 2], [-1]) + + for i in range(0, len(data), batch_size): + # Discard partial batches + if i + batch_size > len(data): + break + + # Slice batch from data + inputs = data[i: i + batch_size] + target = targets[i: i + batch_size] + batch_count += 1 + + # Compute new gradients + train_model.zero_grad() + train_model.train(True) + + # kurtosis setup + remove_all_hooks(train_model) + activations = get_intermediate_output(train_model) + + logits = train_model(inputs) + loss = label_smoothing_loss(logits, target, alpha=0.2) + + # kurtosis scheduler + kurt_index = min(batch_count, len(kurt_schedule) - 1) + kurt_scale = kurt_schedule[kurt_index] + + # kurtosis calculation and cleanup + remove_all_hooks(train_model) + activations = {k: torch.cat(v) for k, v in activations.items()} + loss_means, loss_stds, loss_kurts = get_statistics(activations) + loss += (loss_means + loss_stds + loss_kurts) * kurt_scale + + loss.sum().backward() + + lr_index = min(batch_count, len(lr_schedule) - 1) + lr = lr_schedule[lr_index] + lr_bias = lr_schedule_bias[lr_index] + + # Update weights and biases of training model + update_nesterov(weights, lr, weight_decay, momentum) + update_nesterov(biases, lr_bias, weight_decay_bias, momentum) + + # Update validation model with exponential moving averages + if (i // batch_size % ema_update_freq) == 0: + update_ema(train_model, valid_model, ema_rho) + + # Add training time + train_time += time.perf_counter() - start_time + + valid_correct = [] + for i in range(0, len(valid_data), batch_size): + valid_model.train(False) + regular_inputs = valid_data[i: i + batch_size] + logits = valid_model(regular_inputs).detach() + + if use_TTA: + flipped_inputs = torch.flip(regular_inputs, [-1]) + logits2 = valid_model(flipped_inputs).detach() + logits = torch.mean(torch.stack([logits, logits2], dim=0), dim=0) + + # Compute correct predictions + correct = logits.max(dim=1)[1] == valid_targets[i: i + batch_size] + + valid_correct.append(correct.detach().type(torch.float64)) + + # Accuracy is average number of correct predictions + valid_acc = torch.mean(torch.cat(valid_correct)).item() + if valid_acc > best_acc: + best_acc = valid_acc + best_model = train_model + + if early_stopper.early_stop(valid_acc): + print("Early stopping") + break + + print(f"{epoch:5} {batch_count:8d} {train_time:19.2f} {valid_acc:22.4f}") + + return best_acc, best_model + + +def main(): + args = argparsing() + + model_type = "resnet%s" % args["nlayers"] + cifar_dataset = args["dataset"] + save = args["save"] + weight_name = 'weights/%s_%s' % (model_type, cifar_dataset) + + print("ResNet%s" % args["nlayers"]) + print("Weight file:", weight_name) + + kwargs = { + "num_classes": 10 if cifar_dataset == 'CIFAR10' else 100, + "debug": args["debug"] + } + + device = torch.device("cuda:%s" % args["cuda"] if torch.cuda.is_available() else "cpu") + dtype = torch.float32 + + # Optuna: + optuna_params = get_optuna_params(model_type, cifar_dataset) + lr = optuna_params["lr"] + lr_bias = optuna_params["lr_bias"] + momentum = optuna_params["momentum"] + weight_decay = optuna_params["weight_decay"] + + # Configurable parameters + ema_update_freq = 5 + params = { + "dataset": cifar_dataset, + "epochs": args["epochs"], + "batch_size": args["batch"], + "lr": lr, + "lr_bias": lr_bias, + "momentum": momentum, + "weight_decay": weight_decay, + "weight_decay_bias": 0.004, + "ema_update_freq": ema_update_freq, + "ema_rho": 0.99 ** ema_update_freq, + "model_type": model_type, + "kwargs": kwargs + } + + nruns = 5 + log = { + "weights": weight_name, + "model_type": model_type, + "kwargs": kwargs, + "params": params + } + accuracies = [] + for run in range(nruns): + weight_name_seed = weight_name + "_run%d.pt" % run + + best_acc, best_model = train(**params, + device=device, + dtype=dtype, + seed=run) + accuracies.append(best_acc) + print("Best Run Accuracy: %1.4f" % best_acc) + log["run%s" % run] = best_acc + + if save: + print("Saving %s" % weight_name_seed) + torch.save(best_model.state_dict(), weight_name_seed) + + mean = sum(accuracies) / len(accuracies) + variance = sum((acc - mean) ** 2 for acc in accuracies) / len(accuracies) + std = variance ** 0.5 + print("Accuracy: %1.4f +/- %1.4f" % (mean, std)) + log["accuracy"] = [mean, std] + + if save: + with open("logs/logs_resnet%s_%s.json" % (args["nlayers"], cifar_dataset), 'w') as fp: + json.dump(log, fp) + + +if __name__ == "__main__": + main() diff --git a/palisade_he_cnn/training/train_resnetN_optuna.py b/palisade_he_cnn/training/train_resnetN_optuna.py new file mode 100644 index 0000000..78d112f --- /dev/null +++ b/palisade_he_cnn/training/train_resnetN_optuna.py @@ -0,0 +1,326 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import argparse +import copy +import time + +import optuna +import joblib +from optuna.trial import TrialState + +from palisade_he_cnn.training.utils.utils_dataloading import * +from palisade_he_cnn.training.utils.utils_kurtosis import * +from palisade_he_cnn.training.utils.utils_resnetN import ( + get_model, update_nesterov, update_ema, label_smoothing_loss +) + +use_TTA = False + + +def argparsing(): + parser = argparse.ArgumentParser() + parser.add_argument('-n', '--nlayers', + help='ResNet model depth', + type=int, + choices=[20, 32, 44, 56, 110], + required=True) + parser.add_argument('-bs', '--batch', + help='Batch size', + type=int, + required=False, + default=256) + parser.add_argument('-e', '--epochs', + help='Number of epochs', + type=int, + required=False, + default=100) + parser.add_argument('-d', '--debug', + help='Debugging mode', + type=bool, + required=False, + default=False) + parser.add_argument('-c', '--cuda', + help='CUDA device number', + type=int, + required=False, + default=0) + parser.add_argument('-dataset', '--dataset', + help='CIFAR10 or CIFAR100', + type=str, + choices=['CIFAR10', 'CIFAR100'], + required=False, + default='CIFAR10') + parser.add_argument('-s', '--save', + help='Save model and log files', + type=bool, + required=False, + default=True) + + return vars(parser.parse_args()) + + +def train( + trial, + dataset, + epochs, + batch_size, + weight_decay_bias, + ema_update_freq, + ema_rho, + device, + dtype, + model_type, + kwargs, + seed=0 +): + # Print information about hardware on first run + if seed == 0: + if device.type == "cuda": + print("Device :", torch.cuda.get_device_name(device.index)) + + print("Dtype :", dtype) + print() + + # Start measuring time + start_time = time.perf_counter() + + # Set random seed to increase chance of reproducability + torch.manual_seed(seed) + + # Load dataset + if dataset == "CIFAR10": + train_data, train_targets, valid_data, valid_targets = load_cifar10(device, dtype) + else: + train_data, train_targets, valid_data, valid_targets = load_cifar100(device, dtype) + + train_data = torch.cat( + [train_data, torch.zeros(train_data.size(0), 1, train_data.size(2), train_data.size(3)).to(device)], dim=1) + valid_data = torch.cat( + [valid_data, torch.zeros(valid_data.size(0), 1, valid_data.size(2), valid_data.size(3)).to(device)], dim=1) + + # Convert model weights to half precision + train_model = get_model(model_type, kwargs).to(device) + train_model.to(dtype) + train_model.train() + + # Generate the optimizers. + lr = trial.suggest_float("lr", 9e-4, 2e-3) + lr_bias = trial.suggest_float("lr_bias", 54, 74) + momentum = trial.suggest_float("momentum", 0.7, .99) + weight_decay = trial.suggest_float("weight_decay", 0.01, .3) + + N = int(len(train_data) / batch_size) + lr_schedule = torch.cat([ + torch.linspace(0.0, lr, N), + # torch.linspace(lr, lr, 2*N), + torch.linspace(lr, 1e-4, 3 * N), + torch.linspace(1e-4, 1e-4, 50 * N), + torch.linspace(1e-5, 1e-5, 25 * N), + torch.linspace(1e-6, 1e-6, 25 * N), + ]) + lr_schedule_bias = lr_bias * lr_schedule + + kurt_schedule = torch.cat([ + torch.linspace(0, 0, 10 * N), + torch.linspace(0.05, 0.05, 2 * N), + torch.linspace(0.1, 0.1, 200 * N), + ]) + + # Convert BatchNorm back to single precision for better accuracy + for module in train_model.modules(): + if isinstance(module, nn.BatchNorm2d): + module.float() + + # Collect weights and biases and create nesterov velocity values + weights = [ + (w, torch.zeros_like(w)) + for w in train_model.parameters() + if w.requires_grad and len(w.shape) > 1 + ] + biases = [ + (w, torch.zeros_like(w)) + for w in train_model.parameters() + if w.requires_grad and len(w.shape) <= 1 + ] + + # Copy the model for validation + valid_model = copy.deepcopy(train_model) + + print(f"Preprocessing: {time.perf_counter() - start_time:.2f} seconds") + + # Train and validate + print("\nepoch batch train time [sec] validation accuracy") + train_time = 0.0 + batch_count = 0 + best_acc = [] + for epoch in range(1, epochs + 1): + start_time = time.perf_counter() + + # Randomly shuffle training data + indices = torch.randperm(len(train_data), device=device) + data = train_data[indices] + targets = train_targets[indices] + + # Crop random 32x32 patches from 40x40 training data + data = [ + random_crop(data[i: i + batch_size], crop_size=(32, 32)) + for i in range(0, len(data), batch_size) + ] + data = torch.cat(data) + + # Randomly flip half the training data + data[: len(data) // 2] = torch.flip(data[: len(data) // 2], [-1]) + loss_epoch = 0.0 + for i in range(0, len(data), batch_size): + # discard partial batches + if i + batch_size > len(data): + break + + # Slice batch from data + inputs = data[i: i + batch_size] + target = targets[i: i + batch_size] + batch_count += 1 + + # Compute new gradients + train_model.zero_grad() + train_model.train(True) + + # kurtosis setup + remove_all_hooks(train_model) + activations = get_intermediate_output(train_model) + + logits = train_model(inputs) + loss = label_smoothing_loss(logits, target, alpha=0.2) + + # kurtosis scheduler + kurt_index = min(batch_count, len(kurt_schedule) - 1) + kurt_scale = kurt_schedule[kurt_index] + + # kurtosis calculation and cleanup + remove_all_hooks(train_model) + activations = {k: torch.cat(v) for k, v in activations.items()} + loss_means, loss_stds, loss_kurts = get_statistics(activations) + loss += (loss_means + loss_stds + loss_kurts) * kurt_scale + + loss.sum().backward() + + loss_epoch += loss.sum().item() / batch_size + + lr_index = min(batch_count, len(lr_schedule) - 1) + lr = lr_schedule[lr_index] + lr_bias = lr_schedule_bias[lr_index] + + # Update weights and biases of training model + update_nesterov(weights, lr, weight_decay, momentum) + update_nesterov(biases, lr_bias, weight_decay_bias, momentum) + + # Update validation model with exponential moving averages + if (i // batch_size % ema_update_freq) == 0: + update_ema(train_model, valid_model, ema_rho) + + # Add training time + train_time += time.perf_counter() - start_time + + correct = [] + with torch.no_grad(): + for i in range(0, len(valid_data), batch_size): + valid_model.train(False) + regular_inputs = valid_data[i: i + batch_size] + logits = valid_model(regular_inputs).detach() + + if use_TTA: + flipped_inputs = torch.flip(regular_inputs, [-1]) + logits2 = valid_model(flipped_inputs).detach() + logits = torch.mean(torch.stack([logits, logits2], dim=0), dim=0) + + # Compute correct predictions + temp = logits.max(dim=1)[1] == valid_targets[i: i + batch_size] + + correct.append(temp.detach().type(torch.float64)) + + # Accuracy is average number of correct predictions + accuracy = torch.mean(torch.cat(correct)).item() + best_acc.append(accuracy) + print(f"{epoch:5} {batch_count:8d} {train_time:19.2f} {accuracy:22.4f}") + + trial.report(accuracy, epoch) + + # Handle pruning based on the intermediate value. + if trial.should_prune(): + raise optuna.exceptions.TrialPruned() + + return max(best_acc) + + +def objective(trial): + args = argparsing() + + model_type = "resnet%s" % args["nlayers"] + cifar_dataset = args["dataset"] + save = args["save"] + weight_name = 'weights/optuna/%s_%s' % (model_type, cifar_dataset) + + print("ResNet%s" % args["nlayers"]) + print("Weight file:", weight_name) + + kwargs = { + "num_classes": 10 if cifar_dataset == 'CIFAR10' else 100, + "debug": args["debug"] + } + + device = torch.device("cuda:%s" % args["cuda"] if torch.cuda.is_available() else "cpu") + dtype = torch.float32 + + ema_update_freq = 5 + params = { + "trial": trial, + "dataset": cifar_dataset, + "epochs": args["epochs"], + "batch_size": args["batch"], + "weight_decay_bias": 0.004, + "ema_update_freq": ema_update_freq, + "ema_rho": 0.99 ** ema_update_freq, + "model_type": model_type, + "kwargs": kwargs + } + accuracy = train(**params, + device=device, + dtype=dtype, + seed=0) + return accuracy + + +def main(): + args = argparsing() + model_type = "resnet%s" % args["nlayers"] + cifar_dataset = args["dataset"] + save = args["save"] + + study = optuna.create_study(direction="maximize", + storage="sqlite:///db.sqlite3", + pruner=optuna.pruners.PatientPruner(optuna.pruners.MedianPruner(), patience=5, + min_delta=0.0)) + study.optimize(objective, n_trials=10) + + pruned_trials = study.get_trials(deepcopy=False, states=[TrialState.PRUNED]) + complete_trials = study.get_trials(deepcopy=False, states=[TrialState.COMPLETE]) + + print("Study statistics: ") + print(" Number of finished trials: ", len(study.trials)) + print(" Number of pruned trials: ", len(pruned_trials)) + print(" Number of complete trials: ", len(complete_trials)) + + print("Best trial:") + trial = study.best_trial + + print(" Value: ", trial.value) + + print(" Params: ") + for key, value in trial.params.items(): + print(" {}: {}".format(key, value)) + + joblib.dump(study, "study_%s_%s.pkl" % (model_type, cifar_dataset)) + + +if __name__ == "__main__": + main() diff --git a/palisade_he_cnn/training/train_resnets.py b/palisade_he_cnn/training/train_resnets.py new file mode 100644 index 0000000..38e0b75 --- /dev/null +++ b/palisade_he_cnn/training/train_resnets.py @@ -0,0 +1,76 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import subprocess +import argparse + +#layer = [20,32,44,56,110] +#batch = [256,256,256,256,64] +#epoch = [200,200,200,200,200] + +#layer = [32] +#batch = [256] +#epoch = [200] + +def argparsing(): + parser = argparse.ArgumentParser() + parser.add_argument('-c','--cuda', + help='CUDA device number', + type=int, + required=False, + default=0) + parser.add_argument('-dataset', '--dataset', + help='CIFAR10 or CIFAR100', + type=str, + choices=['CIFAR10','CIFAR100'], + required=False, + default='CIFAR10') + parser.add_argument('-n', '--nlayers', + help='List of layers in string format', + type=str, + required=True, + default='[20,32,44,56,110]') + parser.add_argument('-e', '--epochs', + help='List of epochs in string format', + type=str, + required=True, + default='[100,100,100,100,100]') + parser.add_argument('-bs', '--batch_size', + help='List of batch sizes in string format', + type=str, + required=True, + default='[256,256,256,256,64]') + return vars(parser.parse_args()) + +def str2list(arg): + return [int(item) for item in arg.split(',') if item!=''] + +def main(): + + args = argparsing() + layer = str2list(args["nlayers"]) + batch = str2list(args["epochs"]) + epoch = str2list(args["batch_size"]) + + dataset = args["dataset"] + cuda = args["cuda"] + + for i in range(len(layer)): + cmd = "python3 train_resnetN.py -n %s -bs %s -e %s -dataset %s -c %d" \ + % (layer[i], batch[i], epoch[i], dataset, cuda) + + print("\n") + print(cmd) + subprocess.run( + [ + "python3", + "train_resnetN.py", + "-n", "%s"%str(layer[i]), + "-bs", "%s"%str(batch[i]), + "-e", "%s"%str(epoch[i]), + "-c", "%s"%str(cuda), + "-dataset", dataset + ], + shell=False) + +if __name__ == "__main__": + main() diff --git a/palisade_he_cnn/training/train_resnets_optuna.py b/palisade_he_cnn/training/train_resnets_optuna.py new file mode 100644 index 0000000..39b64cb --- /dev/null +++ b/palisade_he_cnn/training/train_resnets_optuna.py @@ -0,0 +1,52 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import subprocess +import argparse + +layer = [20, 32, 44, 56, 110] +batch = [256, 256, 256, 256, 64] +epoch = [50, 50, 50, 50, 50] + + +def argparsing(): + parser = argparse.ArgumentParser() + parser.add_argument('-c', '--cuda', + help='CUDA device number', + type=int, + required=False, + default=0) + parser.add_argument('-dataset', '--dataset', + help='CIFAR10 or CIFAR100', + type=str, + choices=['CIFAR10', 'CIFAR100'], + required=False, + default='CIFAR10') + return vars(parser.parse_args()) + + +def main(): + args = argparsing() + dataset = args["dataset"] + cuda = args["cuda"] + + for i in range(len(layer)): + cmd = "python3 train_resnetN_optuna.py -n %s -bs %s -e %s -dataset %s -c %d" \ + % (layer[i], batch[i], epoch[i], dataset, cuda) + + print("\n") + print(cmd) + subprocess.run( + [ + "python3", + "train_resnetN_optuna.py", + "-n", "%s" % str(layer[i]), + "-bs", "%s" % str(batch[i]), + "-e", "%s" % str(epoch[i]), + "-c", "%s" % str(cuda), + "-dataset", dataset + ], + shell=False) + + +if __name__ == "__main__": + main() diff --git a/palisade_he_cnn/training/utils/utils.py b/palisade_he_cnn/training/utils/utils.py new file mode 100644 index 0000000..71d1933 --- /dev/null +++ b/palisade_he_cnn/training/utils/utils.py @@ -0,0 +1,146 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import ast +from collections import OrderedDict, defaultdict +from typing import Dict, Callable + +import torch +import torchvision.transforms as tt +from torch.utils.data import DataLoader +from torchvision.datasets import ImageFolder + + +class PadChannel(object): + def __init__(self, npad: int=1): + self.n = npad + + def __call__(self, x): + _, width, height = x.shape + x = torch.cat([x, torch.zeros(self.n, width, height)]) + return x + +def get_gelu_poly_coeffs(degree, filename='gelu_poly_approx_params.txt'): + with open(filename, 'r') as fp: + params = [] + for line in fp: + x = line[:-1] + params.append(ast.literal_eval(x)) + + if degree==2: + return params[0] + if degree==4: + return params[1] + elif degree==8: + return params[2] + elif degree==16: + return params[3] + elif degree==32: + return params[4] + else: + print("Defaulting to deg8") + return params[2] + +def patch_whitening(data, patch_size=(3, 3)): + # Compute weights from data such that + # torch.std(F.conv2d(data, weights), dim=(2, 3)) + # is close to 1. + h, w = patch_size + c = data.size(1) + patches = data.unfold(2, h, 1).unfold(3, w, 1) + patches = patches.transpose(1, 3).reshape(-1, c, h, w).to(torch.float32) + + n, c, h, w = patches.shape + X = patches.reshape(n, c * h * w) + X = X / (X.size(0) - 1) ** 0.5 + covariance = X.t() @ X + + eigenvalues, eigenvectors = torch.linalg.eigh(covariance) + + eigenvalues = eigenvalues.flip(0) + + eigenvectors = eigenvectors.t().reshape(c * h * w, c, h, w).flip(0) + + return eigenvectors / torch.sqrt(eigenvalues + 1e-2).view(-1, 1, 1, 1) + + +def get_cifar10_dataloader(batch_size, + data_dir: str='../../datasets/cifar10/', + num_workers: int=4): + stats = ((0.4914, 0.4822, 0.4465), + (0.2023, 0.1994, 0.2010)) + + train_tfms = tt.Compose([ + tt.RandomCrop(32,padding=4,padding_mode='reflect'), + tt.RandomHorizontalFlip(), + tt.ToTensor(), + tt.Normalize(*stats,inplace=True), + PadChannel(npad=1) + ]) + + val_tfms = tt.Compose([ + tt.ToTensor(), + tt.Normalize(*stats,inplace=True), + PadChannel(npad=1) + ]) + + train_ds = ImageFolder(data_dir+'train',transform=train_tfms) + val_ds = ImageFolder(data_dir+'test',transform=val_tfms) + + train_dl = DataLoader(train_ds, + batch_size, + pin_memory = True, + num_workers = num_workers, + shuffle = True) + val_dl = DataLoader(val_ds, + batch_size, + pin_memory = True, + num_workers = num_workers) + return train_dl, val_dl + + +def remove_all_hooks(model: torch.nn.Module) -> None: + for name, child in model._modules.items(): + if child is not None: + if hasattr(child, "_forward_hooks"): + child._forward_hooks: Dict[int, Callable] = OrderedDict() + elif hasattr(child, "_forward_pre_hooks"): + child._forward_pre_hooks: Dict[int, Callable] = OrderedDict() + elif hasattr(child, "_backward_hooks"): + child._backward_hooks: Dict[int, Callable] = OrderedDict() + remove_all_hooks(child) + +# Given a model and an input, get intermediate layer output +def get_intermediate_output(model): + activation = defaultdict(list) + + def get_activation(name): + def hook(model, input, output): + x = output.detach().cpu() + activation[name].append(x) + + return hook + + BatchNorm_layers = [m for m in model.modules() if isinstance(m, torch.nn.BatchNorm2d)] + for i, b in enumerate(BatchNorm_layers): + b.register_forward_hook( + get_activation(f"bn_{i + 1}") + ) + return activation + + +def get_all_bn_activations(model, val_dl, DEVICE): + activation = get_intermediate_output(model) + + model.to(DEVICE) + model.eval() + + for img, label in (val_dl): + img, label = img.to(DEVICE), label.to(DEVICE) + out = model(img) + + remove_all_hooks(model) + + activation = {k:torch.cat(v) for k,v in activation.items()} + + return activation + \ No newline at end of file diff --git a/palisade_he_cnn/training/utils/utils_dataloading.py b/palisade_he_cnn/training/utils/utils_dataloading.py new file mode 100644 index 0000000..f2b111b --- /dev/null +++ b/palisade_he_cnn/training/utils/utils_dataloading.py @@ -0,0 +1,75 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import torch +import torch.nn as nn +import torchvision + + +def load_cifar10(device, dtype, data_dir='./datasets/cifar10/'): + print("Loading CIFAR10") + train = torchvision.datasets.CIFAR10(root=data_dir, download=True) + valid = torchvision.datasets.CIFAR10(root=data_dir, train=False) + + train_data = preprocess_cifar10_data(train.data, device, dtype) + valid_data = preprocess_cifar10_data(valid.data, device, dtype) + + train_targets = torch.tensor(train.targets).to(device) + valid_targets = torch.tensor(valid.targets).to(device) + + # Pad 32x32 to 40x40 + train_data = nn.ReflectionPad2d(4)(train_data) + + return train_data, train_targets, valid_data, valid_targets + +def load_cifar100(device, dtype, data_dir='./datasets/cifar100/'): + print("Loading CIFAR100") + train = torchvision.datasets.CIFAR100(root=data_dir, download=True) + valid = torchvision.datasets.CIFAR100(root=data_dir, train=False) + + train_data = preprocess_cifar100_data(train.data, device, dtype) + valid_data = preprocess_cifar100_data(valid.data, device, dtype) + + train_targets = torch.tensor(train.targets).to(device) + valid_targets = torch.tensor(valid.targets).to(device) + + # Pad 32x32 to 40x40 + train_data = nn.ReflectionPad2d(4)(train_data) + + return train_data, train_targets, valid_data, valid_targets + +def random_crop(data, crop_size): + crop_h, crop_w = crop_size + h = data.size(2) + w = data.size(3) + x = torch.randint(w - crop_w, size=(1,))[0] + y = torch.randint(h - crop_h, size=(1,))[0] + return data[:, :, y : y + crop_h, x : x + crop_w] + +def preprocess_cifar10_data(data, device, dtype): + # Convert to torch float16 tensor + data = torch.tensor(data, device=device).to(dtype) + + # Normalize + mean = torch.tensor([125.31, 122.95, 113.87], device=device).to(dtype) + std = torch.tensor([62.99, 62.09, 66.70], device=device).to(dtype) + data = (data - mean) / std + + # Permute data from NHWC to NCHW format + data = data.permute(0, 3, 1, 2) + + return data + +def preprocess_cifar100_data(data, device, dtype): + # Convert to torch float16 tensor + data = torch.tensor(data, device=device).to(dtype) + + # Normalize + mean = torch.tensor([129.30, 124.07, 112.43], device=device).to(dtype) + std = torch.tensor([68.17, 65.39, 70.42], device=device).to(dtype) + data = (data - mean) / std + + # Permute data from NHWC to NCHW format + data = data.permute(0, 3, 1, 2) + + return data + diff --git a/palisade_he_cnn/training/utils/utils_kurtosis.py b/palisade_he_cnn/training/utils/utils_kurtosis.py new file mode 100644 index 0000000..5629ffe --- /dev/null +++ b/palisade_he_cnn/training/utils/utils_kurtosis.py @@ -0,0 +1,63 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +from collections import defaultdict, OrderedDict +from typing import Dict, Callable + +import torch +import torch.nn.functional as F + +def get_intermediate_output(model): + activations = defaultdict(list) + + def get_activation(name): + def hook(model, input, output): + x = input[0] + activations[name].append(x) + + return hook + + GELU_layers = [m for m in model.modules() if isinstance(m, torch.nn.GELU)] + for i, b in enumerate(GELU_layers): + b.register_forward_hook( + get_activation(f"GELU_{i + 1}") + ) + return activations + +def remove_all_hooks(model: torch.nn.Module) -> None: + for name, child in model._modules.items(): + if child is not None: + if hasattr(child, "_forward_hooks"): + child._forward_hooks: Dict[int, Callable] = OrderedDict() + elif hasattr(child, "_forward_pre_hooks"): + child._forward_pre_hooks: Dict[int, Callable] = OrderedDict() + elif hasattr(child, "_backward_hooks"): + child._backward_hooks: Dict[int, Callable] = OrderedDict() + remove_all_hooks(child) + +def moment(x: torch.Tensor, std: float, mean: float, deg: int=4, eps: float=1e-4) -> torch.Tensor: + x = x.double() + temp = (x-mean)**deg / x.shape[0] + return torch.sum(temp) / (std**deg + eps) + +def get_statistics(activations): + n = len(activations) + means = torch.zeros(n) + stds = torch.zeros(n) + kurts = torch.zeros(n) + + for layer_index,name in enumerate(sorted(activations.keys(), key=lambda x:int(x.split('_')[1]))): + dist = activations[name] + dist = dist.flatten() + + std, mean = torch.std_mean(dist) + kurt = moment(dist, std, mean, deg=4) + + means[layer_index] = mean + stds[layer_index] = std + kurts[layer_index] = kurt + + loss_means = F.mse_loss(means, torch.zeros(n)) + loss_stds = F.mse_loss(stds, torch.ones(n)) + loss_kurts = F.mse_loss(kurts, 3*torch.ones(n)) + + return loss_means, loss_stds, loss_kurts diff --git a/palisade_he_cnn/training/utils/utils_resnetN.py b/palisade_he_cnn/training/utils/utils_resnetN.py new file mode 100644 index 0000000..0f3a018 --- /dev/null +++ b/palisade_he_cnn/training/utils/utils_resnetN.py @@ -0,0 +1,95 @@ +# (c) 2021-2024 The Johns Hopkins University Applied Physics Laboratory LLC (JHU/APL). + +import numpy as np +import torch +import json +import glob +from palisade_he_cnn.training.models.resnetN_multiplexed import * + +def get_model(model_type, kwargs): + if model_type=='resnet20': + return resnet20(**kwargs) + elif model_type=='resnet32': + return resnet32(**kwargs) + elif model_type=='resnet44': + return resnet44(**kwargs) + elif model_type=='resnet56': + return resnet56(**kwargs) + elif model_type=='resnet110': + return resnet110(**kwargs) + elif model_type=='resnet_test': + return resnet_test(**kwargs) + else: + raise ValueError("Returning None bc you are wrong!") + +def get_best_weights(loc, dataset, model_type): + loc = '%s%s/' % (loc,dataset) + log_file = None + for log in glob.glob(loc+'logs/*.json'): + if model_type in log: + log_file = log + break + + if log_file is None: + raise ValueError("model_type number must be resnet9,20,32,44,56, or 110") + + with open(log_file) as f: + contents = json.load(f) + + print("Finding the best model according to logs...") + print(contents) + + runs = {"run%d"%i : contents["run%d"%i] for i in range(5)} + mean, std = contents["accuracy"] + accs = [contents["run%d"%i] for i in range(5)] + idx = accs.index(max(accs)) + + print("\nAverage (5 runs): %1.3f%% +/- %1.3f%%" % (100*mean, 100*std)) + print("Best (idx %d): %1.3f" % (idx,runs["run%d"%idx])) + weight_file = loc + "weights/%s_%s_run%d.pt" % (model_type, dataset, idx) + return weight_file + +def num_params(model) -> int: + model_parameters = filter(lambda p: p.requires_grad, model.parameters()) + return sum([np.prod(p.size()) for p in model_parameters]) + +def update_ema(train_model, valid_model, rho): + # The trained model is not used for validation directly. Instead, the + # validation model weights are updated with exponential moving averages. + train_weights = train_model.state_dict().values() + valid_weights = valid_model.state_dict().values() + for train_weight, valid_weight in zip(train_weights, valid_weights): + if valid_weight.dtype in [torch.float16, torch.float32]: + valid_weight *= rho + valid_weight += (1 - rho) * train_weight + +def update_nesterov(weights, lr, weight_decay, momentum): + for weight, velocity in weights: + if weight.requires_grad: + gradient = weight.grad.data + weight = weight.data + + gradient.add_(weight, alpha=weight_decay).mul_(-lr) + velocity.mul_(momentum).add_(gradient) + weight.add_(gradient.add_(velocity, alpha=momentum)) + +def label_smoothing_loss(inputs, targets, alpha): + log_probs = torch.nn.functional.log_softmax(inputs, dim=1, _stacklevel=5) + kl = -log_probs.mean(dim=1) + xent = torch.nn.functional.nll_loss(log_probs, targets, reduction="none") + loss = (1 - alpha) * xent + alpha * kl + return loss + +def patch_whitening(data, patch_size=(3, 3)): + h, w = patch_size + c = data.size(1) + patches = data.unfold(2, h, 1).unfold(3, w, 1) + patches = patches.transpose(1, 3).reshape(-1, c, h, w).to(torch.float32) + n, c, h, w = patches.shape + X = patches.reshape(n, c * h * w) + X = X / (X.size(0) - 1) ** 0.5 + covariance = X.t() @ X + eigenvalues, eigenvectors = torch.linalg.eigh(covariance) + eigenvalues = eigenvalues.flip(0) + eigenvectors = eigenvectors.t().reshape(c * h * w, c, h, w).flip(0) + return eigenvectors / torch.sqrt(eigenvalues + 1e-2).view(-1, 1, 1, 1) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..235f531 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1069 @@ +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. + +[[package]] +name = "alembic" +version = "1.14.0" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, + {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "certifi" +version = "2024.12.14" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorlog" +version = "6.9.0" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff"}, + {file = "colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "greenlet" +version = "3.1.1" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + +[[package]] +name = "mako" +version = "1.3.8" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.8-py3-none-any.whl", hash = "sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627"}, + {file = "mako-1.3.8.tar.gz", hash = "sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "nvidia-cublas-cu11" +version = "11.10.3.66" +description = "CUBLAS native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl", hash = "sha256:d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded"}, + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-win_amd64.whl", hash = "sha256:8ac17ba6ade3ed56ab898a036f9ae0756f1e81052a317bf98f8c6d18dc3ae49e"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-nvrtc-cu11" +version = "11.7.99" +description = "NVRTC native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:f7d9610d9b7c331fa0da2d1b2858a4a8315e6d49765091d28711c8946e7425e7"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:f2effeb1309bdd1b3854fc9b17eaf997808f8b25968ce0c7070945c4265d64a3"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-runtime-cu11" +version = "11.7.99" +description = "CUDA Runtime native Libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31"}, + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:bc77fa59a7679310df9d5c70ab13c4e34c64ae2124dd1efd7e5474b71be125c7"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cudnn-cu11" +version = "8.5.0.96" +description = "cuDNN runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7"}, + {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "OpenFHE" +version = "1.0.5a0" +description = "Python wrapper for OpenFHE" +optional = false +python-versions = ">=3.10" +files = [ + {file = "OpenFHE-1.0.5a0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85c3bbf5e2a6fe89a3ab99630dcb8c56d78dded43d9b8305c67cd3141258d8ca"}, +] + +[package.dependencies] +numpy = "*" + +[package.source] +type = "file" +url = "../palisade-python/wheelhouse/OpenFHE-1.0.5a0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + +[[package]] +name = "optuna" +version = "3.6.1" +description = "A hyperparameter optimization framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "optuna-3.6.1-py3-none-any.whl", hash = "sha256:b32e0490bd6552790b70ec94de77dd2855057c9e229cd9f4da48fe8a31c7f1cc"}, + {file = "optuna-3.6.1.tar.gz", hash = "sha256:146e530b57b4b9afd7526b3e642fbe65491f7e292b405913355f8e438e361ecf"}, +] + +[package.dependencies] +alembic = ">=1.5.0" +colorlog = "*" +numpy = "*" +packaging = ">=20.0" +PyYAML = "*" +sqlalchemy = ">=1.3.0" +tqdm = "*" + +[package.extras] +benchmark = ["asv (>=0.5.0)", "botorch", "cma", "virtualenv"] +checking = ["black", "blackdoc", "flake8", "isort", "mypy", "mypy-boto3-s3", "types-PyYAML", "types-redis", "types-setuptools", "types-tqdm", "typing-extensions (>=3.10.0.0)"] +document = ["ase", "cmaes (>=0.10.0)", "fvcore", "lightgbm", "matplotlib (!=3.6.0)", "pandas", "pillow", "plotly (>=4.9.0)", "scikit-learn", "sphinx", "sphinx-copybutton", "sphinx-gallery", "sphinx-plotly-directive", "sphinx-rtd-theme (>=1.2.0)", "torch", "torchvision"] +optional = ["boto3", "cmaes (>=0.10.0)", "google-cloud-storage", "matplotlib (!=3.6.0)", "pandas", "plotly (>=4.9.0)", "redis", "scikit-learn (>=0.24.2)", "scipy", "torch"] +test = ["coverage", "fakeredis[lua]", "kaleido", "moto", "pytest", "scipy (>=1.9.2)", "torch"] + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pillow" +version = "10.4.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools" +version = "75.3.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, + {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] + +[[package]] +name = "sqlalchemy" +version = "2.0.36" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "torch" +version = "1.13.1" +description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "torch-1.13.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:fd12043868a34a8da7d490bf6db66991108b00ffbeecb034228bfcbbd4197143"}, + {file = "torch-1.13.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d9fe785d375f2e26a5d5eba5de91f89e6a3be5d11efb497e76705fdf93fa3c2e"}, + {file = "torch-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:98124598cdff4c287dbf50f53fb455f0c1e3a88022b39648102957f3445e9b76"}, + {file = "torch-1.13.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:393a6273c832e047581063fb74335ff50b4c566217019cc6ace318cd79eb0566"}, + {file = "torch-1.13.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:0122806b111b949d21fa1a5f9764d1fd2fcc4a47cb7f8ff914204fd4fc752ed5"}, + {file = "torch-1.13.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:22128502fd8f5b25ac1cd849ecb64a418382ae81dd4ce2b5cebaa09ab15b0d9b"}, + {file = "torch-1.13.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:76024be052b659ac1304ab8475ab03ea0a12124c3e7626282c9c86798ac7bc11"}, + {file = "torch-1.13.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ea8dda84d796094eb8709df0fcd6b56dc20b58fdd6bc4e8d7109930dafc8e419"}, + {file = "torch-1.13.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2ee7b81e9c457252bddd7d3da66fb1f619a5d12c24d7074de91c4ddafb832c93"}, + {file = "torch-1.13.1-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:0d9b8061048cfb78e675b9d2ea8503bfe30db43d583599ae8626b1263a0c1380"}, + {file = "torch-1.13.1-cp37-none-macosx_11_0_arm64.whl", hash = "sha256:f402ca80b66e9fbd661ed4287d7553f7f3899d9ab54bf5c67faada1555abde28"}, + {file = "torch-1.13.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:727dbf00e2cf858052364c0e2a496684b9cb5aa01dc8a8bc8bbb7c54502bdcdd"}, + {file = "torch-1.13.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:df8434b0695e9ceb8cc70650afc1310d8ba949e6db2a0525ddd9c3b2b181e5fe"}, + {file = "torch-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:5e1e722a41f52a3f26f0c4fcec227e02c6c42f7c094f32e49d4beef7d1e213ea"}, + {file = "torch-1.13.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:33e67eea526e0bbb9151263e65417a9ef2d8fa53cbe628e87310060c9dcfa312"}, + {file = "torch-1.13.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:eeeb204d30fd40af6a2d80879b46a7efbe3cf43cdbeb8838dd4f3d126cc90b2b"}, + {file = "torch-1.13.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:50ff5e76d70074f6653d191fe4f6a42fdbe0cf942fbe2a3af0b75eaa414ac038"}, + {file = "torch-1.13.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2c3581a3fd81eb1f0f22997cddffea569fea53bafa372b2c0471db373b26aafc"}, + {file = "torch-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:0aa46f0ac95050c604bcf9ef71da9f1172e5037fdf2ebe051962d47b123848e7"}, + {file = "torch-1.13.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6930791efa8757cb6974af73d4996b6b50c592882a324b8fb0589c6a9ba2ddaf"}, + {file = "torch-1.13.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:e0df902a7c7dd6c795698532ee5970ce898672625635d885eade9976e5a04949"}, +] + +[package.dependencies] +nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\""} +nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""} +nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""} +nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\""} +typing-extensions = "*" + +[package.extras] +opt-einsum = ["opt-einsum (>=3.3)"] + +[[package]] +name = "torchvision" +version = "0.14.1" +description = "image and video datasets and models for torch deep learning" +optional = false +python-versions = ">=3.7" +files = [ + {file = "torchvision-0.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb05dd9dd3af5428fee525400759daf8da8e4caec45ddd6908cfb36571f6433"}, + {file = "torchvision-0.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8d0766ea92affa7af248e327dd85f7c9cfdf51a57530b43212d4e1858548e9d7"}, + {file = "torchvision-0.14.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6d7b35653113664ea3fdcb71f515cfbf29d2fe393000fd8aaff27a1284de6908"}, + {file = "torchvision-0.14.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:8a9eb773a2fa8f516e404ac09c059fb14e6882c48fdbb9c946327d2ce5dba6cd"}, + {file = "torchvision-0.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:13986f0c15377ff23039e1401012ccb6ecf71024ce53def27139e4eac5a57592"}, + {file = "torchvision-0.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb7a793fd33ce1abec24b42778419a3fb1e3159d7dfcb274a3ca8fb8cbc408dc"}, + {file = "torchvision-0.14.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:89fb0419780ec9a9eb9f7856a0149f6ac9f956b28f44b0c0080c6b5b48044db7"}, + {file = "torchvision-0.14.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a2d4237d3c9705d7729eb4534e4eb06f1d6be7ff1df391204dfb51586d9b0ecb"}, + {file = "torchvision-0.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:92a324712a87957443cc34223274298ae9496853f115c252f8fc02b931f2340e"}, + {file = "torchvision-0.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:68ed03359dcd3da9cd21b8ab94da21158df8a6a0c5bad0bf4a42f0e448d28cb3"}, + {file = "torchvision-0.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:30fcf0e9fe57d4ac4ce6426659a57dce199637ccb6c70be1128670f177692624"}, + {file = "torchvision-0.14.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0ed02aefd09bf1114d35f1aa7dce55aa61c2c7e57f9aa02dce362860be654e85"}, + {file = "torchvision-0.14.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a541e49fc3c4e90e49e6988428ab047415ed52ea97d0c0bfd147d8bacb8f4df8"}, + {file = "torchvision-0.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:6099b3191dc2516099a32ae38a5fb349b42e863872a13545ab1a524b6567be60"}, + {file = "torchvision-0.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5e744f56e5f5b452deb5fc0f3f2ba4d2f00612d14d8da0dbefea8f09ac7690b"}, + {file = "torchvision-0.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:758b20d079e810b4740bd60d1eb16e49da830e3360f9be379eb177ee221fa5d4"}, + {file = "torchvision-0.14.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:83045507ef8d3c015d4df6be79491375b2f901352cfca6e72b4723e9c4f9a55d"}, + {file = "torchvision-0.14.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:eaed58cf454323ed9222d4e0dd5fb897064f454b400696e03a5200e65d3a1e76"}, + {file = "torchvision-0.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:b337e1245ca4353623dd563c03cd8f020c2496a7c5d12bba4d2e381999c766e0"}, +] + +[package.dependencies] +numpy = "*" +pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" +requests = "*" +torch = "1.13.1" +typing-extensions = "*" + +[package.extras] +scipy = ["scipy"] + +[[package]] +name = "tqdm" +version = "4.67.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wheel" +version = "0.45.1" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "wheel-0.45.1-py3-none-any.whl", hash = "sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248"}, + {file = "wheel-0.45.1.tar.gz", hash = "sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "857c88f26b1da7d498e6d9653ec6fd4e889b67115916268d2b88cec733497ae5" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..360df84 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,29 @@ +[tool.poetry] +name = "palisade_he_cnn" +version = "0.1.0" +description = "" +authors = [ + "Vikram Saraph ", + "Vivian Maloney ", + "Freddy Obrecht ", + "Kate Tallaksen ", + "Prathibha Rama " +] +readme = "README.md" +packages = [ + {include = "palisade_he_cnn"} +] + +[tool.poetry.dependencies] +python = "^3.10" +torch = "^1.13.1" +torchvision = "^0.14.1" +optuna = "^3.2.0" +joblib = "^1.3.2" +pytest = "^7.4.0" +openfhe = {path = "../palisade-python/wheelhouse/OpenFHE-1.0.5a0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"} + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api"