From 2a5e5f101ddae4d2fe953caadc8ba73ff5287649 Mon Sep 17 00:00:00 2001 From: ManosMpampis <93824600+ManosMpampis@users.noreply.github.com> Date: Tue, 24 Jan 2023 12:50:08 +0200 Subject: [PATCH] Nanodet C API. Onnx and Libtorch (JIT) modules implementations and Libtorch Installation. (#352) * Onnx and Jit tracing-scripting implementation with python and c api inference. Docs and tests have changed acordingly * Implementation of libtorch and torch vision installation as part of tool installation. * add licence and file name corection * style, inconclusive cppcheck fixes. * Fixes for C API initilization errors * Deleting no used code * macos cppcheck -> C style fixes * macos cppcheck -c style fixes * c-style pointer delceration fixes * c-style pointer delceration fixes * cpp casting casting fixes * bug fixes * Update projects/python/perception/object_detection_2d/nanodet/export_torchscript.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update install_torch_c_api.sh * Update dependencies/install_torch_c_api.sh Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update include/nanodet_c.h Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update include/nanodet_c.h Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update include/nanodet_c.h Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update include/nanodet_c.h Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update include/opendr_utils.h Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update include/target.h Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update nanodet.md * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update nanodet.md * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update Makefile * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/nanodet.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update nanodet.md * Update projects/c_api/Makefile Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/python/perception/object_detection_2d/nanodet/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update eval_demo.py * Update projects/python/perception/object_detection_2d/nanodet/export_onnx.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/python/perception/object_detection_2d/nanodet/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/python/perception/object_detection_2d/nanodet/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * installing libtorch and vision with respect to CUDA of the user * better explanations of variables in optimization runtimes * small typing fixes * Fix dependency * Update dependencies.ini deleting torchmetrics * update docs for new data structures and utilities of c api * new source, samples, utilities and test for onnx C API. The new scripts are implemente the feed forward of almost all the onnx models that python api is provided. * bug fixes in onnx exporting. * Revert "update docs for new data structures and utilities of c api" This reverts commit 467c1f1e661462de46dd88a1762b23cbfaf7f4c8. * Revert "bug fixes in onnx exporting." This reverts commit fcd4f9c3a09d70deb51eec83ad1d469c7290ae27. * Revert "Revert "bug fixes in onnx exporting."" This reverts commit 953a193aa9a99522ac6069814a4b50f6837c0fac. * Revert "new source, samples, utilities and test for onnx C API. The new scripts are implemente the feed forward of almost all the onnx models that python api is provided." This reverts commit 617572d5d6d3e5d02004ece522f7d224d050bd61. * Revert "bug fixes in onnx exporting." This reverts commit fcd4f9c3a09d70deb51eec83ad1d469c7290ae27. * revert last commits, change files for cpp styles as wiki says and better directory construction for easier navigation and new tools implementation. * change function name to be uniform with the others * doc for c nanodet * Added small Json parser and installation script for easier navigation and universal use for future c api * Update projects/c_api/README.md Co-authored-by: Nikolaos Passalis * Update dependencies/install_torch_c_api.sh Co-authored-by: Nikolaos Passalis * Update dependencies/install_torch_c_api.sh Co-authored-by: Nikolaos Passalis * Update install_torch_c_api.sh not need for specified vision version in sh * Update download_torch.py not need for environment variables to be passed * Update utilities.py delete additional print from debugging * Fixes * Fixes of suggestions Combine optimizations and inference in one script * Fixes of suggestions Add more JSON parser capabilities Fix face recognition threshold not readed from JSON * Fixes of suggestions Delete not used loggers. * Fixes of suggestions * added json parser in utilities * unnecessary use of filesystem, easier implementation in embeded * default directory for saves change to `temp` * change to warnings errors and exceptions * Transfer warnings for jit and ort simultaneously loaded from _load to infer. * Change naming style. All .cpp files have only CPP style naming and .c files have CPP style naming in CPP functions and C style in everything else. * license test update * Apply suggestions from code review Co-authored-by: Kostas Tsampazis <27914645+tsampazk@users.noreply.github.com> * Apply suggestions from code review * fixe bug to find cuda * add automatic random colors * better implementation of optimize * add XMLBaseDataset option * train bug fix * train bug fix * tipo fix * add changes and some better namings Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> Co-authored-by: ad-daniel Co-authored-by: Nikolaos Passalis Co-authored-by: Kostas Tsampazis <27914645+tsampazk@users.noreply.github.com> --- Makefile | 2 + dependencies/download_torch.py | 125 ++++ dependencies/install_rapidjson.sh | 17 + dependencies/install_torch_c_api.sh | 64 ++ docs/reference/c-data-h.md | 12 +- docs/reference/c-face-recognition-h.md | 42 +- .../c-object-detection-2d-nanodet-jit-h.md | 47 ++ docs/reference/c-opendr-utils-h.md | 65 +- docs/reference/c-target-h.md | 78 ++- docs/reference/nanodet.md | 289 -------- docs/reference/object-detection-2d-nanodet.md | 410 ++++++++++++ include/data.h | 4 +- include/face_recognition.h | 58 +- include/object_detection_2d_nanodet_jit.h | 84 +++ include/opendr_utils.h | 54 +- include/target.h | 26 +- projects/c_api/Makefile | 17 +- projects/c_api/README.md | 1 + .../face_recognition/face_recognition_demo.c | 20 +- .../object_detection_2d/nanodet/README.md | 15 + .../nanodet/nanodet_jit_demo.c | 62 ++ .../object_detection_2d/nanodet/README.md | 32 +- .../object_detection_2d/nanodet/eval_demo.py | 11 +- .../nanodet/inference_demo.py | 13 +- .../nanodet/inference_tutorial.ipynb | 616 +----------------- .../object_detection_2d/nanodet/train_demo.py | 12 +- src/c_api/Makefile | 11 +- src/c_api/README.md | 2 +- src/c_api/face_recognition.cpp | 329 +++++----- src/c_api/object_detection_2d_nanodet_jit.cpp | 362 ++++++++++ src/c_api/opendr_utils.cpp | 120 +++- .../object_detection_2d/nanodet/README.md | 2 +- .../nanodet_EfficientNet_Lite0_320.yml | 2 +- .../nanodet_EfficientNet_Lite1_416.yml | 2 +- .../nanodet_EfficientNet_Lite2_512.yml | 2 +- .../RepVGG/nanodet_RepVGG_A0_416.yml | 2 +- .../Transformer/nanodet_t.yml | 7 +- .../config/legacy_v0.x_configs/nanodet_g.yml | 2 +- .../config/legacy_v0.x_configs/nanodet_m.yml | 2 +- .../legacy_v0.x_configs/nanodet_m_0.5x.yml | 2 +- .../legacy_v0.x_configs/nanodet_m_1.5x.yml | 2 +- .../nanodet_m_1.5x_416.yml | 2 +- .../legacy_v0.x_configs/nanodet_m_416.yml | 2 +- .../algorithm/config/nanodet_custom.yml | 125 ---- .../config/nanodet_plus_m_1.5x_320.yml | 2 +- .../config/nanodet_plus_m_1.5x_416.yml | 2 +- .../algorithm/config/nanodet_plus_m_320.yml | 2 +- .../algorithm/config/nanodet_plus_m_416.yml | 2 +- .../algorithm/nanodet/data/batch_process.py | 31 +- .../nanodet/data/dataset/__init__.py | 6 +- .../nanodet/data/transform/pipeline.py | 20 +- .../algorithm/nanodet/data/transform/warp.py | 85 +-- .../nanodet/evaluator/coco_detection.py | 15 +- .../algorithm/nanodet/inferencer/utilities.py | 66 +- .../nanodet/model/arch/nanodet_plus.py | 8 +- .../nanodet/model/arch/one_stage_detector.py | 6 +- .../nanodet/model/backbone/custom_csp.py | 5 +- .../model/backbone/efficientnet_lite.py | 6 +- .../nanodet/model/backbone/ghostnet.py | 14 +- .../nanodet/model/backbone/mobilenetv2.py | 13 +- .../nanodet/model/backbone/repvgg.py | 3 +- .../nanodet/model/backbone/resnet.py | 6 +- .../nanodet/model/backbone/shufflenetv2.py | 9 +- .../algorithm/nanodet/model/fpn/fpn.py | 20 +- .../algorithm/nanodet/model/fpn/ghost_pan.py | 14 +- .../algorithm/nanodet/model/fpn/pan.py | 17 +- .../algorithm/nanodet/model/fpn/tan.py | 7 +- .../algorithm/nanodet/model/head/gfl_head.py | 189 ++++-- .../nanodet/model/head/nanodet_head.py | 44 +- .../nanodet/model/head/nanodet_plus_head.py | 143 ++-- .../nanodet/model/head/simple_conv_head.py | 12 +- .../nanodet/model/loss/gfocal_loss.py | 2 + .../algorithm/nanodet/model/loss/iou_loss.py | 1 + .../algorithm/nanodet/model/module/conv.py | 26 +- .../algorithm/nanodet/model/module/nms.py | 48 +- .../nanodet/model/module/transformer.py | 2 + .../nanodet/algorithm/nanodet/trainer/task.py | 31 +- .../algorithm/nanodet/util/box_transform.py | 5 +- .../algorithm/nanodet/util/check_point.py | 27 +- .../nanodet/algorithm/nanodet/util/logger.py | 13 +- .../nanodet/algorithm/nanodet/util/path.py | 4 +- .../nanodet/dependencies.ini | 6 +- .../nanodet/nanodet_learner.py | 348 +++++++--- tests/Makefile | 19 +- tests/sources/c_api/test_face_recognition.c | 66 +- tests/sources/c_api/test_nanodet.c | 85 +++ tests/sources/c_api/test_opendr_utils.c | 8 +- .../nanodet/test_nanodet.py | 45 +- 88 files changed, 2787 insertions(+), 1850 deletions(-) create mode 100644 dependencies/download_torch.py create mode 100755 dependencies/install_rapidjson.sh create mode 100755 dependencies/install_torch_c_api.sh create mode 100644 docs/reference/c-object-detection-2d-nanodet-jit-h.md delete mode 100644 docs/reference/nanodet.md create mode 100644 docs/reference/object-detection-2d-nanodet.md create mode 100644 include/object_detection_2d_nanodet_jit.h create mode 100644 projects/c_api/samples/object_detection_2d/nanodet/README.md create mode 100644 projects/c_api/samples/object_detection_2d/nanodet/nanodet_jit_demo.c create mode 100644 src/c_api/object_detection_2d_nanodet_jit.cpp delete mode 100644 src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_custom.yml create mode 100644 tests/sources/c_api/test_nanodet.c diff --git a/Makefile b/Makefile index 7c4ca70dc7..dffd1227b5 100644 --- a/Makefile +++ b/Makefile @@ -37,6 +37,8 @@ install_compilation_dependencies: @+echo "#"; echo "# * Install Compilation Dependencies *"; echo "#" @+cd dependencies; ./install.sh compilation @+cd dependencies; ./install_onnx.sh + @+cd dependencies; ./install_rapidjson.sh + @+cd dependencies; ./install_torch_c_api.sh @+make --silent -C src/opendr/control/mobile_manipulation $(TARGET) OPENDR_HOME="$(OPENDR_HOME)"; @+make --silent -C src/opendr/control/single_demo_grasp $(TARGET) OPENDR_HOME="$(OPENDR_HOME)"; diff --git a/dependencies/download_torch.py b/dependencies/download_torch.py new file mode 100644 index 0000000000..3cf7496eef --- /dev/null +++ b/dependencies/download_torch.py @@ -0,0 +1,125 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import argparse +import glob +from urllib.request import urlretrieve +import os +import warnings + + +def search_on_path(filenames): + for p in os.environ.get('PATH', '').split(os.pathsep): + for filename in filenames: + full = os.path.join(p, filename) + if os.path.exists(full): + return os.path.abspath(full) + return None + + +def get_cuda_path(): + nvcc_path = search_on_path(('nvcc', 'nvcc.exe')) + if nvcc_path is not None: + cuda_path_default = os.path.normpath(os.path.join(os.path.dirname(nvcc_path), '..')) + _cuda_path = cuda_path_default + elif os.path.exists('/usr/local/cuda'): + _cuda_path = '/usr/local/cuda' + else: + _cuda_path = None + + return _cuda_path + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--cuda_path", help="Path to installed cuda", type=str, default=None) + parser.add_argument("--opendr_device", help="Target device for installation", + type=str, choices=["gpu", "cpu"], default="gpu") + parser.add_argument("--torch_version", help="Specifies LibTorch version to be installed", type=str, default="1.9.0") + args = parser.parse_args() + + COMPATIBILITY_VERSIONS = { + "1.13.1": "0.14.1", + "1.13.0": "0.14.0", + "1.12.0": "0.13.0", + "1.11.0": "0.12.0", + "1.10.2": "0.11.3", + "1.10.1": "0.11.2", + "1.10.0": "0.11.1", + "1.9.1": "0.10.1", + "1.9.0": "0.10.0", + } + + warnings.simplefilter("error") + + TORCH_VERSION = args.torch_version + VISION_VERSION = COMPATIBILITY_VERSIONS[TORCH_VERSION] + + CUDA_VERSION = None + DEVICE = None + # Find Device + if args.opendr_device == "gpu": + try: + if args.cuda_path is None: + CUDA_PATH = get_cuda_path() + else: + CUDA_PATH = args.cuda_path + version_file_type = glob.glob(f"{CUDA_PATH}/version*") + if version_file_type[0].endswith('.txt'): + version_file = open(f"{CUDA_PATH}/version.txt", mode='r') + version_line = version_file.readlines() + version_line = version_line[0].replace(".", "") + CUDA_VERSION = version_line[13:16] + elif version_file_type[0].endswith('.json'): + version_file = open(f"{CUDA_PATH}/version.json", mode='r') + version_dict = json.load(version_file) + CUDA_VERSION = version_dict["cuda"]["version"] + CUDA_VERSION = CUDA_VERSION.replace(".", "") + CUDA_VERSION = CUDA_VERSION[:3] + else: + warnings.warn("\033[93m No CUDA version file found.") + DEVICE = f"cu{CUDA_VERSION}" + except: + warnings.warn("\033[93m No CUDA installation found.\n" + "Please install CUDA or specify CUDA path with export CUDA_PATH=/path/to/your/cuda.") + exit() + else: + DEVICE = "cpu" + + # Download Libtorch + try: + file_url_libtorch = f"https://download.pytorch.org/libtorch/{DEVICE}/" \ + f"libtorch-cxx11-abi-shared-with-deps-{TORCH_VERSION}%2B{DEVICE}.zip" + + DOWNLOAD_DIRECTORY = "libtorch.zip" + + urlretrieve(file_url_libtorch, DOWNLOAD_DIRECTORY) + + except: + warnings.warn("\033[93m No LibTorch found for your specific device and torch version.\n" + "Please choose another version of torch or install a different version of CUDA.\n" + "Please refer to https://download.pytorch.org/whl/torch_stable.html") + exit() + # Download Vision + try: + file_url_vision = f"https://github.com/pytorch/vision/archive/refs/tags/" \ + f"v{VISION_VERSION}.tar.gz" + DOWNLOAD_DIRECTORY = "vision.tar.gz" + urlretrieve(file_url_vision, DOWNLOAD_DIRECTORY) + except: + warnings.warn("\033[93m No torchvision found for your specific torch version.\n" + "Please refer to https://github.com/pytorch/vision for more information.") + exit() + diff --git a/dependencies/install_rapidjson.sh b/dependencies/install_rapidjson.sh new file mode 100755 index 0000000000..a60d44132b --- /dev/null +++ b/dependencies/install_rapidjson.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +if [ ! -d /usr/local/include/rapidjson ]; then + + VERSION="1.1.0" + + wget https://github.com/Tencent/rapidjson/archive/refs/tags/v${VERSION}.tar.gz --quiet + tar zxf v${VERSION}.tar.gz + cd rapidjson-${VERSION} + sudo mkdir -p /usr/local/include/rapidjson + sudo mv include/rapidjson/* /usr/local/include/rapidjson + cd .. + rm -rf rapidjson-${VERSION} + rm -rf v${VERSION}.tar.gz + + +fi diff --git a/dependencies/install_torch_c_api.sh b/dependencies/install_torch_c_api.sh new file mode 100755 index 0000000000..3d56b5fb2b --- /dev/null +++ b/dependencies/install_torch_c_api.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +if [[ -z "$TORCH_VERSION" ]]; +then + echo "Torch version not defined, version 1.9.0 will be installed." + echo "For a specific Torch version please define TORCH_VERSION with 'export TORCH_VERSION=x.x.x'" + TORCH_VERSION="1.9.0" +fi + +if [ ! -f /usr/local/lib/libtorchvision.so ]; then + TORCH_DIRECTORY="/usr/local/libtorch" + + if [[ "$OPENDR_DEVICE" == "gpu" ]] + then + echo "Downloading and installing LibTorch and torchvision (gpu support) ..." + GPU="on" + DEVICE="cu"${CUDA_VERSION} + CUDA_COMPILER="/usr/local/cuda/bin/nvcc" + else + echo "Downloading and installing LibTorch and torchvision (cpu-only) ..." + GPU="off" + DEVICE="cpu" + fi + + # Find CUDA version and download torch and vision + echo "Downloading LibTorch and torchvision ..." + # Make sure that we can download files + if [[ -z "$CUDA_PATH" ]]; + then + python3 ./download_torch.py --opendr_device "$OPENDR_DEVICE" --torch_version "$TORCH_VERSION" + else + python3 ./download_torch.py --opendr_device "$OPENDR_DEVICE" --torch_version "$TORCH_VERSION" --cuda_path "$CUDA_PATH" + fi + echo "Downloading Libtorch and torchvision done." + + # TORCH INSTALLATION + unzip -qq libtorch.zip + cd libtorch + + sudo mkdir -p ${TORCH_DIRECTORY} + sudo cp -r ./* ${TORCH_DIRECTORY} + cd .. + + # TORCH VISION INSTALLATION + tar zxf vision.tar.gz + mv vision-* vision + cd vision + sudo mkdir -p build + cd build + sudo cmake .. -DCMAKE_CUDA_COMPILER=${CUDA_COMPILER} -DCMAKE_PREFIX_PATH=${TORCH_DIRECTORY} -DWITH_CUDA=${GPU} + sudo make + sudo make install + cd ../.. + + # CLEAN + sudo rm -rf libtorch + sudo rm -rf libtorch.zip + + sudo rm -rf vision + sudo rm -rf vision.tar.gz + + sudo ldconfig + +fi diff --git a/docs/reference/c-data-h.md b/docs/reference/c-data-h.md index 20b5e27b8e..35e8f0539a 100644 --- a/docs/reference/c-data-h.md +++ b/docs/reference/c-data-h.md @@ -3,24 +3,24 @@ The *data.h* header provides definitions of OpenDR data types that can be used in the C API of OpenDR. -### struct *opendr_image_t* +### struct *OpendrImageT* ```C -struct opendr_image { +struct OpendrImage { void *data; }; -typedef struct opendr_image opendr_image_t; +typedef struct OpendrImage OpendrImageT; ``` -The *opendr_image_t* structure provides a data structure for storing OpenDR images. +The *OpendrImageT* structure provides a data structure for storing OpenDR images. Every function in the C API receiving images is expected to use this structure. Helper functions that directly convert images into this format are provided in *opendr_utils.h*. -The *opendr_image_t* structure has the following field: +The *OpendrImageT* structure has the following field: #### `void *data` field A pointer where image data are stored. -*opendr_image_t* is using internally OpenCV images (*cv::Mat*) for storing images. +*OpendrImageT* is using internally OpenCV images (*cv::Mat*) for storing images. Therefore, only a pointer to the memory location of the corresponding *cv::Mat* is stored. Please note that the user is not expected to directly manipulate these data without first converting them into OpenCV data type or using the corresponding functions provided in *opendr_utils.h*. diff --git a/docs/reference/c-face-recognition-h.md b/docs/reference/c-face-recognition-h.md index 1ea2e5822d..bf1be372aa 100644 --- a/docs/reference/c-face-recognition-h.md +++ b/docs/reference/c-face-recognition-h.md @@ -3,62 +3,62 @@ The *face_recognition.h* header provides function definitions that can be used for accessing the OpenDR face recognition tool. -### Struct *face_recognition_model_t* +### Struct *FaceRecognitionModelT* ```C -struct face_recognition_model { +struct FaceRecognitionModel { ... }; -typedef struct face_recognition_model face_recognition_model_t; +typedef struct FaceRecognitionModel FaceRecognitionModelT; ``` -The *face_recognition_model_t* structure keeps all the neccesary information that are required by the OpenDR face recognition tool (e.g., model weights, normalization information, database for person recognition, ONNX session information, etc.). +The *FaceRecognitionModelT* structure keeps all the necessary information that are required by the OpenDR face recognition tool (e.g., model weights, normalization information, database for person recognition, ONNX session information, etc.). -### Function *load_face_recognition_model()* +### Function *loadFaceRecognitionModel()* ```C -void load_face_recognition_model(const char *model_path, face_recognition_model_t *model); +void loadFaceRecognitionModel(const char *modelPath, FaceRecognitionModelT *model); ``` - Loads a face recognition model saved in the local filesystem (*model path*) in OpenDR format. + Loads a face recognition model saved in the local filesystem (*modelPath*) in OpenDR format. This function also initializes a CPU-based ONNX session for performing inference using this model. The pre-trained models should follow the OpenDR conventions. The Python API can be used to train and export an optimized OpenDR model that can be used for inference using the C API. -### Function *free_face_recognition_model()* +### Function *freeFaceRecognitionModel()* ```C -void free_face_recognition_model(face_recognition_model_t *model); +void freeFaceRecognitionModel(FaceRecognitionModelT *model); ``` Releases the memory allocated for a face recognition model (*model*). -### Function *infer_face_recognition()* +### Function *inferFaceRecognition()* ```C -opendr_category_target_t infer_face_recognition(face_recognition_model_t *model, opendr_image_t *image); +OpendrCategoryTargetT inferFaceRecognition(FaceRecognitionModelT *model, OpendrImageT *image); ``` This function perform inference using a face recognition model (*model*) and an input image (*image*). The function returns an OpenDR category structure with the inference results. -### Function *decode_category_face_recognition()* +### Function *decodeCategoryFaceRecognition()* ```C -void decode_category_face_recognition(face_recognition_model_t *model, opendr_category_target_t category, char *person_name); +void decodeCategoryFaceRecognition(FaceRecognitionModelT *model, OpendrCategoryTargetT category, char *personName); ``` Returns the name of a recognized person by decoding the category id into a string (this function uses the information from the built person database). -### Function *build_database_face_recognition()* +### Function *buildDatabaseFaceRecognition()* ```C -void build_database_face_recognition(const char *database_folder, const char *output_path, face_recognition_model_t *model); +void buildDatabaseFaceRecognition(const char *databaseFolder, const char *outputPath, faceRecognitionModelT *model); ``` Build a face recognition database (containing images for persons to be recognized). -This function expects the *database_folder* to have the same format as the main Python toolkit. -The function calculates the features of the person that are contained in the database and it stores it into a binary file that can be then loaded to perform inference (*output_path*). -A loaded face recongition model should be provided (*model*), since this model will be used for the feature extraction process. +This function expects the (*databaseFolder*) to have the same format as the main Python toolkit. +The function calculates the features of the person that are contained in the database and it stores it into a binary file that can be then loaded to perform inference (*outputPath*). +A loaded face recognition model should be provided (*model*), since this model will be used for the feature extraction process. -### Function *load_database_face_recognition()* +### Function *loadDatabaseFaceRecognition()* ```C -void load_database_face_recognition(const char *database_path, face_recognition_model_t *model); +void loadDatabaseFaceRecognition(const char *databasePath, FaceRecognitionModelT *model); ``` -Loads an already built database (*database_path) into a face recognition model (*model*). +Loads an already built database (*databasePath*) into a face recognition model (*model*). After this step, the model can be used for performing inference. diff --git a/docs/reference/c-object-detection-2d-nanodet-jit-h.md b/docs/reference/c-object-detection-2d-nanodet-jit-h.md new file mode 100644 index 0000000000..d875834c3d --- /dev/null +++ b/docs/reference/c-object-detection-2d-nanodet-jit-h.md @@ -0,0 +1,47 @@ +## C_API: object_detection_2d_nanodet_jit.h + + +The *object_detection_2d_nanodet_jit.h* header provides function definitions that can be used for accessing the OpenDR object detection 2D Nanodet tool. + +### Struct *NanodetModelT* +```C + +struct NanodetModel { + ... +}; +typedef struct NanodetModel NanodetModelT; +``` +The *NanodetModelT* structure keeps all the necessary information that are required by the OpenDR object detection 2D Nanodet tool (e.g., model weights, normalization information, etc.). + + +### Function *loadNanodetModel()* +```C +void loadNanodetModel(char *modelPath, char *device, int height, int width, float scoreThreshold, NanodetModelT *model); +``` +Loads a Nanodet object detection model saved in the local filesystem (*modelPath*) in OpenDR format. +This function also initializes a (*device*) JIT network for performing inference using this model. +The pre-trained models should follow the OpenDR conventions. +The Python API can be used to train and export an optimized OpenDR model that can be used for inference using the C API. + +### Function *freeNanodetModel()* +```C +void freeNanodetModel(NanodetModelT *model); +``` +Releases the memory allocated for an object detection 2D Nanodet model (*model*). + + +### Function *inferNanodet()* +```C +OpendrDetectionVectorTargetT inferNanodet(NanodetModelT *model, OpendrImageT *image); +``` +This function performs inference using an object detection 2D Nanodet model (*model*) and an input image (*image*). +The function returns an OpenDR detection vector structure with the inference results. + + +### Function *drawBboxes()* +```C +void drawBboxes(OpendrImageT *image, NanodetModelT *model, OpendrDetectionVectorTargetT *detectionsVector); +``` +This function draws the given detections (*detectionsVector*) onto the input image (*image*) and then shows the image on screen. +The (*model*) keeps all the necessary information. + diff --git a/docs/reference/c-opendr-utils-h.md b/docs/reference/c-opendr-utils-h.md index 4e76a24258..baf3f4e19e 100644 --- a/docs/reference/c-opendr-utils-h.md +++ b/docs/reference/c-opendr-utils-h.md @@ -3,20 +3,67 @@ The *opendr_utils.h* header provides function definitions of OpenDR helpers (e.g., for creating OpenDR images). -### Function *load_image()* +### Function *jsonGetKeyString()* ```C -void load_image(const char *path, opendr_image_t *image); +const char* jsonGetKeyString(const char *json, const char *key, const int index); ``` -The *load_image()* function allows for reading an images from the local file system (*path*) into an OpenDR image data type. -A pointer (*image*) to an OpenDR *opendr_image_t* should be provided. -This function allocates memory during each function call, so be sure to use the *free_image()* function to release the allocated resources, when the corresponding image is no longer needed. +The *jsonGetKeyString()* function reads a JSON string from the pointer (*json*) and returns tha value of a key with pointer (*key*) as string. +If the value is an array it will return only the (*index*) value of the array. +If it fails it returns (""). +### Function *jsonGetKeyFloat()* +```C +float jsonGetKeyFloat(const char *json, const char *key, const int index); +``` +The *jsonGetKeyFloat()* function reads a JSON string from the pointer (*json*) and returns tha value of a key with pointer (*key*) as float. +If the value is an array it will return only the (*index*) value of the array. +If it fails it returns (*0.0f*). + +### Function *jsonGetKeyFromInferenceParams()* +```C +float jsonGetKeyFromInferenceParams(const char *json, const char *key, const int index); +``` +The *jsonGetKeyFromInferenceParams()* function reads a JSON string from the pointer (*json*) and returns tha value of a key with pointer (*key*) in inference_params section as float. +If the value is an array it will return only the (*index*) value of the array. +If it fails it returns (*0.0f*). +--- -### Function *free_image()* +### Function *loadImage()* ```C -void free_image(opendr_image_t *image); +void loadImage(const char *path, OpendrImageT *image); ``` -The *free_image()* function releases the memory allocated for an OpenDR image structure (*image*). -A pointer (*image*) to an OpenDR *opendr_image_t* should be provided. +The *loadImage()* function loads an image from the local file system (*path*) into an OpenDR image data type. +A pointer (*image*) to an OpenDR *OpendrImageT* should be provided. +This function allocates memory during each function call, so be sure to use the *freeImage()* function to release the allocated resources, when the corresponding image is no longer needed. + +### Function *freeImage()* +```C +void freeImage(OpendrImageT *image); +``` +The *freeImage()* function releases the memory allocated for an OpenDR image structure (*image*). +A pointer (*image*) to an OpenDR *OpendrImageT* should be provided. + +--- +### Function *initDetectionsVector()* +```C +void initDetectionsVector(OpendrDetectionVectorTargetT *detectionVector); +``` +The *initDetectionsVector()* function initializes the data of an OpenDR detection vector structure (*detectionVector*) with zero values. +A pointer (*detectionVector*) to an OpenDR *DetectionVectorTargetT* should be provided. + +### Function *loadDetectionsVector()* +```C +void loadDetectionsVector(OpendrDetectionVectorTargetT *detectionVector, OpendrDetectionTargetT *detection, + int vectorSize); +``` +The *loadDetectionsVector()* function stores OpenDR detection target structures in the memory allocated for multiple OpenDR detections structures (*detection*). +A pointer (*detectionVector*) to an OpenDR *OpendrDetectionVectorTargetT* should be provided. + +### Function *freeDetectionsVector()* +```C +void freeDetectionsVector(OpendrDetectionVectorTargetT *detectionVector); +``` +The *freeDetectionsVector()* function releases the memory allocated for an OpenDR detection vector structure (*detectionVector*). +A pointer (*detectionVector*) to an OpenDR *OpendrDetectionVectorTargetT* should be provided. diff --git a/docs/reference/c-target-h.md b/docs/reference/c-target-h.md index 6f748759e8..d8303cb58c 100644 --- a/docs/reference/c-target-h.md +++ b/docs/reference/c-target-h.md @@ -3,20 +3,20 @@ The *target.h* header provides definitions of OpenDR targets (inference outputs) that can be used in the C API of OpenDR. -### struct *opendr_category_target_t* +### struct *OpendrCategoryTargetT* ```C -struct opendr_category_target{ +struct OpendrCategoryTarget{ int data; float confidence; }; -typedef struct opendr_category_target opendr_category_target_t; +typedef struct OpendrCategoryTarget OpendrCategoryTargetT; ``` -The *opendr_category_target_t* structure provides a data structure for storing inference outputs of classification models. +The *OpendrCategoryTargetT* structure provides a data structure for storing inference outputs of classification models. Every function in the C API that outputs a classification decision is expected to use this structure. -The *opendr_category_target_t* structure has the following field: +The *OpendrCategoryTargetT* structure has the following fields: #### `int data` field @@ -25,3 +25,71 @@ A numerical id of the category to which the input objects belongs to. #### `float confidence` field The decision confidence (a value between 0 and 1). + + +### struct *OpendrDetectionTargetT* +```C +struct opendr_detection_target { + int name; + float left; + float top; + float width; + float height; + float score; +}; +typedef struct OpendrDetectionTarget OpendrDetectionTargetT; +``` + + +The *OpendrDetectionTargetT* structure provides a data structure for storing inference outputs of detection models. +Every function in the C API that outputs a detection decision is expected to use this structure or a vector of this structure. + +The *OpendrDetectionTargetT* structure has the following fields: + +#### `int name` field + +A numerical id of the category to which the input objects belongs to. + +#### `float left` field + +A numerical value that corresponds to the X value of the top-left point of a detection. + +#### `float top` field + +A numerical value that corresponds to the Y value of the top-left point of a detection. + +#### `float width` field + +A numerical value that corresponds to the width of a detection. + +#### `float height` field + +A numerical value that corresponds to the height of a detection. + +#### `float score` field + +The decision score (a value between 0 and 1). + + +### struct *OpendrDetectionVectorTargetT* +```C +struct OpendrDetectionVectorTarget { + OpendrDetectionTargetT *startingPointer; + int size; +}; +typedef struct OpendrDetectionVectorTarget OpendrDetectionVectorTargetT; +``` + + +The *OpendrDetectionVectorTargetT* structure provides a data structure for storing multiple inference outputs of detection models. +Every function in the C API that outputs a detection decision is expected to use this or a *OpendrDetectionTargetT* structure. + +The *OpendrDetectionVectorTargetT* structure has the following fields: + +#### `OpendrDetectionTargetT startingPointer` field + +A pointer to multiple OpenDR detection targets. + +#### `int size` field + +A numerical value that represents the number of OpenDR detection structures that are stored. diff --git a/docs/reference/nanodet.md b/docs/reference/nanodet.md deleted file mode 100644 index 765f210673..0000000000 --- a/docs/reference/nanodet.md +++ /dev/null @@ -1,289 +0,0 @@ -## nanodet module - -The *nanodet* module contains the *NanodetLearner* class, which inherits from the abstract class *Learner*. - -### Class NanodetLearner -Bases: `engine.learners.Learner` - -The *NanodetLearner* class is a wrapper of the Nanodet object detection algorithms based on the original -[Nanodet implementation](https://github.com/RangiLyu/nanodet). -It can be used to perform object detection on images (inference) and train All predefined Nanodet object detection models and new modular models from the user. - -The [NanodetLearner](../../src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py) class has the -following public methods: - -#### `NanodetLearner` constructor -```python -NanodetLearner(self, model_to_use, iters, lr, batch_size, checkpoint_after_iter, checkpoint_load_iter, temp_path, device, - weight_decay, warmup_steps, warmup_ratio, lr_schedule_T_max, lr_schedule_eta_min, grad_clip) -``` - -Constructor parameters: - -- **model_to_use**: *{"EfficientNet_Lite0_320", "EfficientNet_Lite1_416", "EfficientNet_Lite2_512", "RepVGG_A0_416", - "t", "g", "m", "m_416", "m_0.5x", "m_1.5x", "m_1.5x_416", "plus_m_320", "plus_m_1.5x_320", "plus_m_416", - "plus_m_1.5x_416", "custom"}, default=plus_m_1.5x_416*\ - Specifies the model to use and the config file that contains all hyperparameters for training, evaluation and inference as the original - [Nanodet implementation](https://github.com/RangiLyu/nanodet). If you want to overwrite some of the parameters you can - put them as parameters in the learner. -- **iters**: *int, default=None*\ - Specifies the number of epochs the training should run for. -- **lr**: *float, default=None*\ - Specifies the initial learning rate to be used during training. -- **batch_size**: *int, default=None*\ - Specifies number of images to be bundled up in a batch during training. - This heavily affects memory usage, adjust according to your system. -- **checkpoint_after_iter**: *int, default=None*\ - Specifies per how many training iterations a checkpoint should be saved. - If it is set to 0 no checkpoints will be saved. -- **checkpoint_load_iter**: *int, default=None*\ - Specifies which checkpoint should be loaded. - If it is set to 0, no checkpoints will be loaded. -- **temp_path**: *str, default=''*\ - Specifies a path where the algorithm looks for saving the checkpoints along with the logging files. If *''* the `cfg.save_dir` will be used instead. -- **device**: *{'cpu', 'cuda'}, default='cuda'*\ - Specifies the device to be used. -- **weight_decay**: *float, default=None*\ -- **warmup_steps**: *int, default=None*\ -- **warmup_ratio**: *float, default=None*\ -- **lr_schedule_T_max**: *int, default=None*\ -- **lr_schedule_eta_min**: *float, default=None*\ -- **grad_clip**: *int, default=None*\ - -#### `NanodetLearner.fit` -```python -NanodetLearner.fit(self, dataset, val_dataset, logging_path, verbose, seed) -``` - -This method is used for training the algorithm on a train dataset and validating on a val dataset. - -Parameters: - -- **dataset**: *ExternalDataset*\ - Object that holds the training dataset. - Can be of type `ExternalDataset`. -- **val_dataset** : *ExternalDataset, default=None*\ - Object that holds the validation dataset. - Can be of type `ExternalDataset`. -- **logging_path** : *str, default=''*\ - Subdirectory in temp_path to save log files and TensorBoard. -- **verbose** : *bool, default=True*\ - Enables the maximum verbosity and the logger. -- **seed** : *int, default=123*\ - Seed for repeatability. - -#### `NanodetLearner.eval` -```python -NanodetLearner.eval(self, dataset, verbose) -``` - -This method is used to evaluate a trained model on an evaluation dataset. -Saves a txt logger file containing stats regarding evaluation. - -Parameters: - -- **dataset** : *ExternalDataset*\ - Object that holds the evaluation dataset. -- **verbose**: *bool, default=True*\ - Enables the maximum verbosity and logger. - -#### `NanodetLearner.infer` -```python -NanodetLearner.infer(self, input, thershold, verbose) -``` - -This method is used to perform object detection on an image. -Returns an `engine.target.BoundingBoxList` object, which contains bounding boxes that are described by the left-top corner and -its width and height, or returns an empty list if no detections were made of the image in input. - -Parameters: -- **input** : *Image*\ - Image type object to perform inference on it. - - **threshold**: *float, default=0.35*\ - Specifies the threshold for object detection inference. - An object is detected if the confidence of the output is higher than the specified threshold. -- **verbose**: *bool, default=True*\ - Enables the maximum verbosity and logger. - -#### `NanodetLearner.save` -```python -NanodetLearner.save(self, path, verbose) -``` - -This method is used to save a trained model with its metadata. -Provided with the path, it creates the "path" directory, if it does not already exist. -Inside this folder, the model is saved as *"nanodet_{model_name}.pth"* and a metadata file *"nanodet_{model_name}.json"*. -If the directory already exists, the *"nanodet_{model_name}.pth"* and *"nanodet_{model_name}.json"* files are overwritten. - -Parameters: - -- **path**: *str, default=None*\ - Path to save the model, if None it will be the `"temp_folder"` or the `"cfg.save_dir"` from learner. -- **verbose**: *bool, default=True*\ - Enables the maximum verbosity and logger. - -#### `NanodetLearner.load` -```python -NanodetLearner.load(self, path, verbose) -``` - -This method is used to load a previously saved model from its saved folder. -Loads the model from inside the directory of the path provided, using the metadata .json file included. - -Parameters: - -- **path**: *str, default=None*\ - Path of the model to be loaded. -- **verbose**: *bool, default=True*\ - Enables the maximum verbosity and logger. - -#### `NanodetLearner.download` -```python -NanodetLearner.download(self, path, mode, model, verbose, url) -``` - -Downloads data needed for the various functions of the learner, e.g., pretrained models as well as test data. - -Parameters: - -- **path**: *str, default=None*\ - Specifies the folder where data will be downloaded. If *None*, the *self.temp_path* directory is used instead. -- **mode**: *{'pretrained', 'images', 'test_data'}, default='pretrained'*\ - If *'pretrained'*, downloads a pretrained detector model from the *model_to_use* architecture which was chosen at learner initialization. - If *'images'*, downloads an image to perform inference on. If *'test_data'* downloads a dummy dataset for testing purposes. -- **verbose**: *bool, default=False*\ - Enables the maximum verbosity and logger. -- **url**: *str, default=OpenDR FTP URL*\ - URL of the FTP server. - - -#### Tutorials and Demos - -A tutorial on performing inference is available. -Furthermore, demos on performing [training](../../projects/perception/object_detection_2d/nanodet/train_demo.py), -[evaluation](../../projects/perception/object_detection_2d/nanodet/eval_demo.py) and -[inference](../../projects/perception/object_detection_2d/nanodet/inference_demo.py) are also available. - - - -#### Examples - -* **Training example using an `ExternalDataset`.** - - To train properly, the architecture weights must be downloaded in a predefined directory before fit is called, in this case the directory name is "predefined_examples". - Default architecture is *'plus-m-1.5x_416'*. - The training and evaluation dataset root should be present in the path provided, along with the annotation files. - The default COCO 2017 training data can be found [here](https://cocodataset.org/#download) (train, val, annotations). - All training parameters (optimizer, lr schedule, losses, model parameters etc.) can be changed in the model config file - in [config directori](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/config). - You can find more informations in [config file detail](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/config/config_file_detail.md). - For easier use, with NanodetLearner parameters user can overwrite the following parameters: - (iters, lr, batch_size, checkpoint_after_iter, checkpoint_load_iter, temp_path, device, weight_decay, warmup_steps, - warmup_ratio, lr_schedule_T_max, lr_schedule_eta_min, grad_clip) - - **Note** - - The Nanodet tool can be used with any PASCAL VOC or COCO like dataset. The only thing is needed is to provide the correct root and dataset type. - - If *'voc'* is choosed for *dataset* the directory must look like this: - - - root folder - - train - - Annotations - - image1.xml - - image2.xml - - ... - - JPEGImages - - image1.jpg - - image2.jpg - - ... - - val - - Annotations - - image1.xml - - image2.xml - - ... - - JPEGImages - - image1.jpg - - image2.jpg - - ... - - On the other hand if *'coco'* is choosed for *dataset* the directory must look like this: - - - root folder - - train2017 - - image1.jpg - - image2.jpg - - ... - - val2017 - - image1.jpg - - image2.jpg - - ... - - annotations - - instances_train2017.json - - instances_val2017.json - - You can change the default annotation and image directories in [dataset](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py) - - ```python - import argparse - - from opendr.engine.datasets import ExternalDataset - from opendr.perception.object_detection_2d import NanodetLearner - - - if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument("--dataset", help="Dataset to train on", type=str, default="coco", choices=["voc", "coco"]) - parser.add_argument("--data-root", help="Dataset root folder", type=str) - parser.add_argument("--model", help="Model that config file will be used", type=str) - parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) - parser.add_argument("--batch-size", help="Batch size to use for training", type=int, default=6) - parser.add_argument("--lr", help="Learning rate to use for training", type=float, default=5e-4) - parser.add_argument("--checkpoint-freq", help="Frequency in-between checkpoint saving and evaluations", type=int, default=50) - parser.add_argument("--n-epochs", help="Number of total epochs", type=int, default=300) - parser.add_argument("--resume-from", help="Epoch to load checkpoint file and resume training from", type=int, default=0) - - args = parser.parse_args() - - if args.dataset == 'voc': - dataset = ExternalDataset(args.data_root, 'voc') - val_dataset = ExternalDataset(args.data_root, 'voc') - elif args.dataset == 'coco': - dataset = ExternalDataset(args.data_root, 'coco') - val_dataset = ExternalDataset(args.data_root, 'coco') - - nanodet = NanodetLearner(model_to_use=args.model, iters=args.n_epochs, lr=args.lr, batch_size=args.batch_size, - checkpoint_after_iter=args.checkpoint_freq, checkpoint_load_iter=args.resume_from, - device=args.device) - - nanodet.download("./predefined_examples", mode="pretrained") - nanodet.load("./predefined_examples/nanodet-{}/nanodet-{}.ckpt".format(args.model, args.model), verbose=True) - nanodet.fit(dataset, val_dataset) - nanodet.save() - ``` - -* **Inference and result drawing example on a test image.** - - This example shows how to perform inference on an image and draw the resulting bounding boxes using a nanodet model that is pretrained on the COCO dataset. - Moreover, inference can be used in all images in a folder, frames of a video or a webcam feedback with the provided *mode*. - In this example first is downloaded a pre-trained model as in training example and then an image to be inference. - With the same *path* parameter you can choose a folder or a video file to be used as inference. Last but not least, if 'webcam' is - used in *mode* the *camid* parameter of inference must be used to determine the webcam device in your machine. - - ```python - import argparse - from opendr.perception.object_detection_2d import NanodetLearner - - if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) - parser.add_argument("--model", help="Model that config file will be used", type=str) - args = parser.parse_args() - - nanodet = NanodetLearner(model_to_use=args.model, device=args.device) - - nanodet.download("./predefined_examples", mode="pretrained") - nanodet.load("./predefined_examples/nanodet-{}/nanodet-{}.ckpt".format(args.model, args.model), verbose=True) - nanodet.download("./predefined_examples", mode="images") - boxes = nanodet.infer(path="./predefined_examples/000000000036.jpg") - ``` \ No newline at end of file diff --git a/docs/reference/object-detection-2d-nanodet.md b/docs/reference/object-detection-2d-nanodet.md new file mode 100644 index 0000000000..009e1358d5 --- /dev/null +++ b/docs/reference/object-detection-2d-nanodet.md @@ -0,0 +1,410 @@ +## nanodet module + +The *nanodet* module contains the *NanodetLearner* class, which inherits from the abstract class *Learner*. + +### Class NanodetLearner +Bases: `engine.learners.Learner` + +The *NanodetLearner* class is a wrapper of the Nanodet object detection algorithms based on the original +[Nanodet implementation](https://github.com/RangiLyu/nanodet). +It can be used to perform object detection on images (inference) and train all predefined Nanodet object detection models and new modular models from the user. + +The [NanodetLearner](../../src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py) class has the +following public methods: + +#### `NanodetLearner` constructor +```python +NanodetLearner(self, model_to_use, iters, lr, batch_size, checkpoint_after_iter, checkpoint_load_iter, temp_path, device, + weight_decay, warmup_steps, warmup_ratio, lr_schedule_T_max, lr_schedule_eta_min, grad_clip) +``` + +Constructor parameters: + +- **model_to_use**: *{"EfficientNet_Lite0_320", "EfficientNet_Lite1_416", "EfficientNet_Lite2_512", "RepVGG_A0_416", + "t", "g", "m", "m_416", "m_0.5x", "m_1.5x", "m_1.5x_416", "plus_m_320", "plus_m_1.5x_320", "plus_m_416", + "plus_m_1.5x_416", "custom"}, default=m*\ + Specifies the model to use and the config file that contains all hyperparameters for training, evaluation and inference as the original + [Nanodet implementation](https://github.com/RangiLyu/nanodet). If you want to overwrite some of the parameters you can + put them as parameters in the learner. +- **iters**: *int, default=None*\ + Specifies the number of epochs the training should run for. +- **lr**: *float, default=None*\ + Specifies the initial learning rate to be used during training. +- **batch_size**: *int, default=None*\ + Specifies number of images to be bundled up in a batch during training. + This heavily affects memory usage, adjust according to your system. +- **checkpoint_after_iter**: *int, default=None*\ + Specifies per how many training iterations a checkpoint should be saved. + If it is set to 0 no checkpoints will be saved. +- **checkpoint_load_iter**: *int, default=None*\ + Specifies which checkpoint should be loaded. + If it is set to 0, no checkpoints will be loaded. +- **temp_path**: *str, default=''*\ + Specifies a path where the algorithm looks for saving the checkpoints along with the logging files. If *''* the `cfg.save_dir` will be used instead. +- **device**: *{'cpu', 'cuda'}, default='cuda'*\ + Specifies the device to be used. +- **weight_decay**: *float, default=None*\ +- **warmup_steps**: *int, default=None*\ +- **warmup_ratio**: *float, default=None*\ +- **lr_schedule_T_max**: *int, default=None*\ +- **lr_schedule_eta_min**: *float, default=None*\ +- **grad_clip**: *int, default=None*\ + +#### `NanodetLearner.fit` +```python +NanodetLearner.fit(self, dataset, val_dataset, logging_path, verbose, logging, seed, local_rank) +``` + +This method is used for training the algorithm on a train dataset and validating on a val dataset. + +Parameters: + +- **dataset**: *object*\ + Object that holds the training dataset. + Can be of type `ExternalDataset` or `XMLBasedDataset`. +- **val_dataset** : *object, default=None*\ + Object that holds the validation dataset. + Can be of type `ExternalDataset` or `XMLBasedDataset`. +- **logging_path** : *str, default=''*\ + Subdirectory in temp_path to save log files and TensorBoard. +- **verbose** : *bool, default=True*\ + Enables verbosity. +- **logging** : *bool, default=False*\ + Enables the maximum verbosity and the logger. +- **seed** : *int, default=123*\ + Seed for repeatability. +- **local_rank** : *int, default=1*\ + Needed if training on multiple machines. + +#### `NanodetLearner.eval` +```python +NanodetLearner.eval(self, dataset, verbose, logging, local_rank) +``` + +This method is used to evaluate a trained model on an evaluation dataset. +Saves a txt logger file containing stats regarding evaluation. + +Parameters: + +- **dataset** : *object*\ + Object that holds the evaluation dataset. + Can be of type `ExternalDataset` or `XMLBasedDataset`. +- **verbose**: *bool, default=True*\ + Enables verbosity. +- **logging**: *bool, default=False*\ + Enables the maximum verbosity and logger. +- **local_rank** : *int, default=1*\ + Needed if evaluating on multiple machines. + +#### `NanodetLearner.infer` +```python +NanodetLearner.infer(self, input, thershold, nms_max_num) +``` + +This method is used to perform object detection on an image. +Returns an `engine.target.BoundingBoxList` object, which contains bounding boxes that are described by the top-left corner and +their width and height, or returns an empty list if no detections were made on the input image. + +Parameters: +- **input** : *object*\ + Object of type engine.data.Image. + Image type object to perform inference on. +- **threshold**: *float, default=0.35*\ + Specifies the threshold for object detection inference. + An object is detected if the confidence of the output is higher than the specified threshold. +- **nms_max_num**: *int, default=100*\ + Determines the maximum number of bounding boxes that will be retained following the nms. + +#### `NanodetLearner.optimize` +```python +NanodetLearner.optimize(self, export_path, verbose, optimization, nms_max_num) +``` + +This method is used to perform JIT or ONNX optimizations and save a trained model with its metadata. +If a model is not present in the location specified by *export_path*, the optimizer will save it there. +If a model is already present, it will load it instead. +Inside this folder, the model is saved as *nanodet_{model_name}.pth* for JIT models or *nanodet_{model_name}.onnx* for ONNX and a metadata file *nanodet_{model_name}.json*. + +Note: In ONNX optimization, the output model executes the original model's feed forward method. +The user must create their own pre- and post-processes in order to use the ONNX model in the C API. +In JIT optimization the output model performs the feed forward pass and post-processing. +To use the C API, it is recommended to use JIT optimization as shown in the [example of OpenDR's C API](../../projects/c_api/samples/object_detection/nanodet/nanodet_jit_demo.c). + +Parameters: + +- **export_path**: *str*\ + Path to save or load the optimized model. +- **verbose**: *bool, default=True*\ + Enables the maximum verbosity. +- **optimization**: *str, default="jit"*\ + It determines what kind of optimization is used, possible values are *jit* or *onnx*. +- **nms_max_num**: *int, default=100*\ + Determines the maximum number of bounding boxes that will be retained following the nms. + +#### `NanodetLearner.save` +```python +NanodetLearner.save(self, path, verbose) +``` + +This method is used to save a trained model with its metadata. +Provided with the path, it creates the *path* directory, if it does not already exist. +Inside this folder, the model is saved as *nanodet_{model_name}.pth* and a metadata file *nanodet_{model_name}.json*. +If the directory already exists, the *nanodet_{model_name}.pth* and *nanodet_{model_name}.json* files are overwritten. +If optimization is performed, the optimized model is saved instead. + +Parameters: + +- **path**: *str, default=None*\ + Path to save the model, if None it will be `"temp_folder"` or `"cfg.save_dir"` from the learner. +- **verbose**: *bool, default=True*\ + Enables the maximum verbosity and logger. + +#### `NanodetLearner.load` +```python +NanodetLearner.load(self, path, verbose) +``` + +This method is used to load a previously saved model from its saved folder. +Loads the model from inside the directory of the path provided, using the metadata .json file included. +If optimization is performed, the optimized model is loaded instead. + +Parameters: + +- **path**: *str, default=None*\ + Path of the model to be loaded. +- **verbose**: *bool, default=True*\ + Enables the maximum verbosity. + +#### `NanodetLearner.download` +```python +NanodetLearner.download(self, path, mode, model, verbose, url) +``` + +Downloads data needed for the various functions of the learner, e.g., pretrained models as well as test data. + +Parameters: + +- **path**: *str, default=None*\ + Specifies the folder where data will be downloaded. If *None*, the *self.temp_path* directory is used instead. +- **mode**: *{'pretrained', 'images', 'test_data'}, default='pretrained'*\ + If *'pretrained'*, downloads a pretrained detector model from the *model_to_use* architecture which was chosen at learner initialization. + If *'images'*, downloads an image to perform inference on. If *'test_data'* downloads a dummy dataset for testing purposes. +- **verbose**: *bool, default=True*\ + Enables the maximum verbosity. +- **url**: *str, default=OpenDR FTP URL*\ + URL of the FTP server. + + +#### Tutorials and Demos + +A Jupyter notebook tutorial on performing inference is [available](../../projects/python/perception/object_detection_2d/nanodet/inference_tutorial.ipynb). +Furthermore, demos on performing [training](../../projects/python/perception/object_detection_2d/nanodet/train_demo.py), +[evaluation](../../projects/python/perception/object_detection_2d/nanodet/eval_demo.py) and +[inference](../../projects/python/perception/object_detection_2d/nanodet/inference_demo.py) are also available. + + + +#### Examples + +* **Training example using an `ExternalDataset`** + + To train properly, the architecture weights must be downloaded in a predefined directory before fit is called, in this case the directory name is "predefined_examples". + Default architecture is *'m'*. + The training and evaluation dataset root should be present in the path provided, along with the annotation files. + The default COCO 2017 training data can be found [here](https://cocodataset.org/#download) (train, val, annotations). + All training parameters (optimizer, lr schedule, losses, model parameters etc.) can be changed in the model config file + in [config directory](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/config). + You can find more information in [corresponding documentation](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/config/config_file_detail.md). + For easier usage of the NanodetLearner, you can overwrite the following parameters: + (iters, lr, batch_size, checkpoint_after_iter, checkpoint_load_iter, temp_path, device, weight_decay, warmup_steps, + warmup_ratio, lr_schedule_T_max, lr_schedule_eta_min, grad_clip) + + **Note** + + The Nanodet tool can be used with any PASCAL VOC- or COCO-like dataset, by providing the correct root and dataset type. + + If *'voc'* is chosen for *dataset*, the directory must look like this: + + - root folder + - train + - Annotations + - image1.xml + - image2.xml + - ... + - JPEGImages + - image1.jpg + - image2.jpg + - ... + - val + - Annotations + - image1.xml + - image2.xml + - ... + - JPEGImages + - image1.jpg + - image2.jpg + - ... + + On the other hand, if *'coco'* is chosen for *dataset*, the directory must look like this: + + - root folder + - train2017 + - image1.jpg + - image2.jpg + - ... + - val2017 + - image1.jpg + - image2.jpg + - ... + - annotations + - instances_train2017.json + - instances_val2017.json + + You can change the default annotation and image directories in [the *build_dataset* function](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py). + This example assumes the data has been downloaded and placed in the directory referenced by `data_root`. + ```python + from opendr.engine.datasets import ExternalDataset + from opendr.perception.object_detection_2d import NanodetLearner + + + if __name__ == '__main__': + dataset = ExternalDataset(data_root, 'voc') + val_dataset = ExternalDataset(data_root, 'voc') + + nanodet = NanodetLearner(model_to_use='m', iters=300, lr=5e-4, batch_size=8, + checkpoint_after_iter=50, checkpoint_load_iter=0, + device="cpu") + + nanodet.download("./predefined_examples", mode="pretrained") + nanodet.load("./predefined_examples/nanodet_m", verbose=True) + nanodet.fit(dataset, val_dataset) + nanodet.save() + + ``` + +* **Inference and result drawing example on a test image** + + This example shows how to perform inference on an image and draw the resulting bounding boxes using a nanodet model that is pretrained on the COCO dataset. + In this example, a pre-trained model is downloaded and inference is performed on an image that can be specified with the *path* parameter. + + ```python + from opendr.perception.object_detection_2d import NanodetLearner + from opendr.engine.data import Image + from opendr.perception.object_detection_2d import draw_bounding_boxes + + if __name__ == '__main__': + nanodet = NanodetLearner(model_to_use='m', device="cpu") + nanodet.download("./predefined_examples", mode="pretrained") + nanodet.load("./predefined_examples/nanodet_m", verbose=True) + nanodet.download("./predefined_examples", mode="images") + img = Image.open("./predefined_examples/000000000036.jpg") + boxes = nanodet.infer(input=img) + + draw_bounding_boxes(img.opencv(), boxes, class_names=nanodet.classes, show=True) + ``` + +* **Optimization framework with Inference and result drawing example on a test image** + + This example shows how to perform optimization on a pretrained model, then run inference on an image and finally draw the resulting bounding boxes, using a nanodet model that is pretrained on the COCO dataset. + In this example we use ONNX optimization, but JIT can also be used by changing *optimization* to *jit*. + The optimized model will be saved in the `./optimization_models` folder + ```python + from opendr.engine.data import Image + from opendr.perception.object_detection_2d import NanodetLearner, draw_bounding_boxes + + + if __name__ == '__main__': + nanodet = NanodetLearner(model_to_use='m', device="cpu") + nanodet.load("./predefined_examples/nanodet_m", verbose=True) + + # First read an OpenDR image from your dataset and run the optimizer: + img = Image.open("./predefined_examples/000000000036.jpg") + nanodet.optimize("./onnx/nanodet_m/", optimization="onnx") + + boxes = nanodet.infer(input=img) + + draw_bounding_boxes(img.opencv(), boxes, class_names=nanodet.classes, show=True) + ``` + + +#### Performance Evaluation + +In terms of speed, the performance of Nanodet is summarized in the tables below (in FPS). +The speed is measured from the start of the forward pass until the end of post-processing. + +For PyTorch inference: + +| Method {input} | RTX 2070 | TX2 | NX | +|-----------------------------|----------|-------|-------| +| Efficient Lite0 {320} | 48.63 | 9.38 | 14.48 | +| Efficient Lite1 {416} | 43.88 | 7.93 | 11.07 | +| Efficient Lite2 {512} | 40.51 | 6.44 | 8.84 | +| RepVGG A0 {416} | 33.4 | 9.21 | 12.3 | +| Nanodet-g {416} | 51.32 | 9.57 | 15.75 | +| Nanodet-m {320} | 48.36 | 8.56 | 14.08 | +| Nanodet-m 0.5x {320} | 46.94 | 7.97 | 12.84 | +| Nanodet-m 1.5x {320} | 47.41 | 8.8 | 13.98 | +| Nanodet-m {416} | 47.3 | 8.34 | 13.15 | +| Nanodet-m 1.5x {416} | 45.62 | 8.43 | 13.2 | +| Nanodet-plus m {320} | 41.9 | 7.45 | 12.01 | +| Nanodet-plus m 1.5x {320} | 39.63 | 7.66 | 12.21 | +| Nanodet-plus m {416} | 40.16 | 7.24 | 11.58 | +| Nanodet-plus m 1.5x {416} | 38.94 | 7.37 | 11.52 | + +For JIT optimization inference: + +| Method {input} | RTX 2070 | TX2 | NX | +|-----------------------------|----------|-------|-------| +| Efficient Lite0 {320} | 69.06 | 12.94 | 17.78 | +| Efficient Lite1 {416} | 62.94 | 9.27 | 12.94 | +| Efficient Lite2 {512} | 65.46 | 7.46 | 10.32 | +| RepVGG A0 {416} | 41.44 | 11.16 | 14.89 | +| Nanodet-g {416} | 76.3 | 12.94 | 20.52 | +| Nanodet-m {320} | 75.66 | 12.22 | 20.67 | +| Nanodet-m 0.5x {320} | 65.71 | 11.31 | 17.68 | +| Nanodet-m 1.5x {320} | 66.23 | 12.46 | 19.99 | +| Nanodet-m {416} | 79.91 | 12.08 | 19.28 | +| Nanodet-m 1.5x {416} | 69.44 | 12.3 | 18.6 | +| Nanodet-plus m {320} | 67.82 | 11.19 | 18.85 | +| Nanodet-plus m 1.5x {320} | 64.12 | 11.57 | 18.26 | +| Nanodet-plus m {416} | 64.74 | 11.22 | 17.57 | +| Nanodet-plus m 1.5x {416} | 56.77 | 10.39 | 14.81 | + +For ONNX optimization inference: + +| Method {input} | RTX 2070 | +|-----------------------------|-----------| +| Efficient Lite0 {320} | 33.12 | +| Efficient Lite1 {416} | 16.78 | +| Efficient Lite2 {512} | 10.35 | +| RepVGG A0 {416} | 27.89 | +| Nanodet-g {416} | 103.22 | +| Nanodet-m {320} | 98.73 | +| Nanodet-m 0.5x {320} | 144.46 | +| Nanodet-m 1.5x {320} | 75.82 | +| Nanodet-m {416} | 73.09 | +| Nanodet-m 1.5x {416} | 51.30 | +| Nanodet-plus m {320} | 51.39 | +| Nanodet-plus m 1.5x {320} | 39.65 | +| Nanodet-plus m {416} | 39.17 | +| Nanodet-plus m 1.5x {416} | 28.55 | + +Finally, we measure the performance on the COCO dataset, using the corresponding metrics: + +| Method {input} | coco2017 mAP | +|-----------------------------|--------------| +| Efficient Lite0 {320} | 24.4 | +| Efficient Lite1 {416} | 29.2 | +| Efficient Lite2 {512} | 32.4 | +| RepVGG A0 {416} | 25.5 | +| Nanodet-g {416} | 22.7 | +| Nanodet-m {320} | 20.2 | +| Nanodet-m 0.5x {320} | 13.1 | +| Nanodet-m 1.5x {320} | 23.1 | +| Nanodet-m {416} | 23.5 | +| Nanodet-m 1.5x {416} | 26.6 | +| Nanodet-plus m {320} | 27.0 | +| Nanodet-plus m 1.5x {320} | 29.9 | +| Nanodet-plus m {416} | 30.3 | +| Nanodet-plus m 1.5x {416} | 34.1 | + \ No newline at end of file diff --git a/include/data.h b/include/data.h index 274067ab54..f366b8f20e 100644 --- a/include/data.h +++ b/include/data.h @@ -24,10 +24,10 @@ extern "C" { /*** * OpenDR data type for representing images */ -struct opendr_image { +struct OpendrImage { void *data; }; -typedef struct opendr_image opendr_image_t; +typedef struct OpendrImage OpendrImageT; #ifdef __cplusplus } diff --git a/include/face_recognition.h b/include/face_recognition.h index 3af615219b..43eed4afe4 100644 --- a/include/face_recognition.h +++ b/include/face_recognition.h @@ -24,84 +24,84 @@ extern "C" { #endif -struct face_recognition_model { +struct FaceRecognitionModel { // ONNX session objects - void *onnx_session; + void *onnxSession; void *env; - void *session_options; + void *sessionOptions; // Sizes for resizing and cropping an input image - int model_size; - int resize_size; + int modelSize; + int resizeSize; // Statistics for normalization - float mean_value; - float std_value; + float meanValue; + float stdValue; // Recognition threshold float threshold; // Feature dimension - int output_size; + int outputSize; // Database data void *database; - int *database_ids; - char **person_names; + int *databaseIds; + char **personNames; // Number of persons in the database - int n_persons; + int nPersons; // Number of features vectors in the database - int n_features; + int nFeatures; }; -typedef struct face_recognition_model face_recognition_model_t; +typedef struct FaceRecognitionModel FaceRecognitionModelT; /** - * Loads a face recognition model saved in OpenDR format - * @param model_path path to the OpenDR face recongition model (as exported using OpenDR library) + * Loads a face recognition model saved in OpenDR format. + * @param modelPath path to the OpenDR face recognition model (as exported using OpenDR library) * @param model the loaded model */ -void load_face_recognition_model(const char *model_path, face_recognition_model_t *model); +void loadFaceRecognitionModel(const char *modelPath, FaceRecognitionModelT *model); /** - * This function perform inference using a face recognition model and an input image + * This function perform inference using a face recognition model and an input image. * @param model face recognition model to be used for inference * @param image OpenDR image * @return OpenDR classification target containing the id of the recognized person */ -opendr_category_target_t infer_face_recognition(face_recognition_model_t *model, opendr_image_t *image); +OpendrCategoryTargetT inferFaceRecognition(FaceRecognitionModelT *model, OpendrImageT *image); /** * Builds a face recognition database (containing images for persons to be recognized). This function expects the - * database_folder to have the same format as the main Python toolkit. - * @param database_folder folder containing the database - * @param output_path output path to store the binary database. This file should be loaded along with the face + * databaseFolder to have the same format as the main Python toolkit. + * @param databaseFolder folder containing the database + * @param outputPath output path to store the binary database. This file should be loaded along with the face * recognition model before performing inference. * @param model the face recognition model to be used for extracting the database features */ -void build_database_face_recognition(const char *database_folder, const char *output_path, face_recognition_model_t *model); +void buildDatabaseFaceRecognition(const char *databaseFolder, const char *outputPath, FaceRecognitionModelT *model); /** * Loads an already built database into the face recognition model. After this step, the model can be used for * performing inference. - * @param database_path path to the database file + * @param databasePath path to the database file * @param model the face recognition model to be used for inference */ -void load_database_face_recognition(const char *database_path, face_recognition_model_t *model); +void loadDatabaseFaceRecognition(const char *databasePath, FaceRecognitionModelT *model); /** - * Returns the name of a recognition person by decoding the category id into a string + * Returns the name of a recognition person by decoding the category id into a string. * @param model the face recognition model to be used for inference * @param category the predicted category - * @param person_name buffer to store the person name + * @param personName buffer to store the person name */ -void decode_category_face_recognition(face_recognition_model_t *model, opendr_category_target_t category, char *person_name); +void decodeCategoryFaceRecognition(FaceRecognitionModelT *model, OpendrCategoryTargetT category, char *personName); /** - * Releases the memory allocated for a face recognition model + * Releases the memory allocated for a face recognition model. * @param model model to be de-allocated */ -void free_face_recognition_model(face_recognition_model_t *model); +void freeFaceRecognitionModel(FaceRecognitionModelT *model); #ifdef __cplusplus } diff --git a/include/object_detection_2d_nanodet_jit.h b/include/object_detection_2d_nanodet_jit.h new file mode 100644 index 0000000000..72a0288659 --- /dev/null +++ b/include/object_detection_2d_nanodet_jit.h @@ -0,0 +1,84 @@ +/* + * Copyright 2020-2023 OpenDR European Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef C_API_NANODET_H +#define C_API_NANODET_H + +#include "opendr_utils.h" +#include "target.h" + +#ifdef __cplusplus +extern "C" { +#endif + +struct NanodetModel { + // Jit cpp class holder + void *network; + + // Device to be used + char *device; + int **colorList; + int numberOfClasses; + + // Recognition threshold + float scoreThreshold; + + // Model input size + int inputSizes[2]; + + // Keep ratio flag + int keepRatio; +}; +typedef struct NanodetModel NanodetModelT; + +/** + * Loads a nanodet object detection model saved in libtorch format. + * @param modelPath path to the libtorch nanodet model (as exported using OpenDR) + * @param device the device that will be used for inference + * @param height the height of model input + * @param width the width of model input + * @param scoreThreshold confidence threshold + * @param model the model to be loaded + */ +void loadNanodetModel(char *modelPath, char *device, int height, int width, float scoreThreshold, NanodetModelT *model); + +/** + * This function performs inference using a nanodet object detection model and an input image. + * @param model nanodet model to be used for inference + * @param image OpenDR image + * @return OpenDR detection vector target containing the detections of the recognized objects + */ +OpendrDetectionVectorTargetT inferNanodet(NanodetModelT *model, OpendrImageT *image); + +/** + * Releases the memory allocated for a nanodet object detection model. + * @param model model to be de-allocated + */ +void freeNanodetModel(NanodetModelT *model); + +/** + * Draw the bounding boxes from detections in the given image. + * @param image image that has been used for inference + * @param model nanodet model that has been used for inference + * @param detectionsVector output of the inference + */ +void drawBboxes(OpendrImageT *image, NanodetModelT *model, OpendrDetectionVectorTargetT *detectionsVector); + +#ifdef __cplusplus +} +#endif + +#endif // C_API_NANODET_H diff --git a/include/opendr_utils.h b/include/opendr_utils.h index 1df1dc8af5..3b07d48868 100644 --- a/include/opendr_utils.h +++ b/include/opendr_utils.h @@ -18,23 +18,71 @@ #define C_API_OPENDR_UTILS_H #include "data.h" +#include "target.h" #ifdef __cplusplus extern "C" { #endif /** - * Reads an image from path and saves it into OpenDR an image structure + * JSON parser for OpenDR model files. + * @param json a string of json file + * @param key the value to extract from json file + * @param index the index to choose the value if it is an array, otherwise it is not used + * @return string with the value of key + */ +const char *jsonGetKeyString(const char *json, const char *key, const int index); + +/** + * JSON parser for OpenDR model files. + * @param json a string of json file + * @param key the value to extract from json file + * @param index the index to choose the value if it is an array, otherwise it is not used + * @return float with the value of key + */ +float jsonGetKeyFloat(const char *json, const char *key, const int index); + +/** + * JSON parser for OpenDR model files from inference_params key. + * @param json a string of json file + * @param key the value to extract from inference_params + * @param index the index to choose the value if it is an array, otherwise it is not used + * @return float with the value of key + */ +float jsonGetKeyFromInferenceParams(const char *json, const char *key, const int index); + +/** + * Reads an image from path and saves it into OpenDR image structure. * @param path path from which the image will be read * @param image OpenDR image data structure to store the image */ -void load_image(const char *path, opendr_image_t *image); +void loadImage(const char *path, OpendrImageT *image); /** * Releases the memory allocated for an OpenDR image structure * @param image OpenDR image structure to release */ -void free_image(opendr_image_t *image); +void freeImage(OpendrImageT *image); + +/** + * Initialize an empty detection list. + * @param detectionVector OpenDR OpendrDetectionVectorTarget structure to be initialized + */ +void initDetectionsVector(OpendrDetectionVectorTargetT *detectionVector); + +/** + * Loads an OpenDR detection target list. + * @param detectionVector OpenDR OpendrDetectionVectorTarget structure to be loaded + * @param detection the pointer of the first OpenDR detection target in a vector + * @param vectorSize the size of the vector + */ +void loadDetectionsVector(OpendrDetectionVectorTargetT *detectionVector, OpendrDetectionTargetT *detection, int vectorSize); + +/** + * Releases the memory allocated for a detection list structure + * @param detectionVector OpenDR detection vector target structure to release + */ +void freeDetectionsVector(OpendrDetectionVectorTargetT *detectionVector); #ifdef __cplusplus } diff --git a/include/target.h b/include/target.h index c93888606a..8313e14565 100644 --- a/include/target.h +++ b/include/target.h @@ -23,11 +23,33 @@ extern "C" { /*** * OpenDR data type for representing classification targets */ -struct opendr_category_target { +struct OpendrCategoryTarget { int data; float confidence; }; -typedef struct opendr_category_target opendr_category_target_t; +typedef struct OpendrCategoryTarget OpendrCategoryTargetT; + +/*** + * OpenDR data type for representing detection targets + */ +struct OpendrDetectionTarget { + int name; + float left; + float top; + float width; + float height; + float score; +}; +typedef struct OpendrDetectionTarget OpendrDetectionTargetT; + +/*** + * OpenDR data type for representing a structure of detection targets + */ +struct OpendrDetectionVectorTarget { + OpendrDetectionTargetT *startingPointer; + int size; +}; +typedef struct OpendrDetectionVectorTarget OpendrDetectionVectorTargetT; #ifdef __cplusplus } diff --git a/projects/c_api/Makefile b/projects/c_api/Makefile index 3b1b567086..771c6fe63b 100644 --- a/projects/c_api/Makefile +++ b/projects/c_api/Makefile @@ -33,7 +33,10 @@ OPENDR_LD = -L$(OPENDR_HOME)/lib -lopendr all: download demos -demos: $(BUILD_DIR)/face_recognition_demo +demos: face_recognition nanodet + +face_recognition: $(BUILD_DIR)/face_recognition_demo +nanodet: $(BUILD_DIR)/object_detection_2d/nanodet_jit_demo download: @+if [ -a $(DATA_DIR) ] ; \ @@ -47,6 +50,11 @@ download: $(MV) opendrdata.csd.auth.gr/perception/face_recognition/test_data/images $(DATA_DIR)/database; \ $(WGET) ftp://opendrdata.csd.auth.gr/perception/face_recognition/optimized_model/*; \ $(MV) opendrdata.csd.auth.gr/perception/face_recognition/optimized_model $(DATA_DIR)/optimized_model; \ + $(MKDIR_P) $(DATA_DIR)/object_detection_2d/nanodet; \ + $(WGET) ftp://opendrdata.csd.auth.gr/perception/object_detection_2d/nanodet/images/*; \ + $(MV) opendrdata.csd.auth.gr/perception/object_detection_2d/nanodet/images $(DATA_DIR)/object_detection_2d/nanodet/database; \ + $(WGET) ftp://opendrdata.csd.auth.gr/perception/object_detection_2d/nanodet/optimized_model/*; \ + $(MV) opendrdata.csd.auth.gr/perception/object_detection_2d/nanodet/optimized_model $(DATA_DIR)/object_detection_2d/nanodet/optimized_model; \ $(RM) -r opendrdata.csd.auth.gr; \ fi; @@ -55,8 +63,13 @@ $(BUILD_DIR)/face_recognition_demo: @+echo "Building face recognition demo..." $(CC) $(CFLAGS) -o $(BUILD_DIR)/face_recognition_demo samples/face_recognition/face_recognition_demo.c $(INC) $(OPENDR_INC) $(OPENDR_LD) $(LD) +$(BUILD_DIR)/object_detection_2d/nanodet_jit_demo: + $(MKDIR_P) $(BUILD_DIR)/object_detection_2d + @+echo "Building nanodet object detection demo..." + $(CC) $(CFLAGS) -o $(BUILD_DIR)/object_detection_2d/nanodet_jit_demo samples/object_detection_2d/nanodet/nanodet_jit_demo.c $(INC) $(OPENDR_INC) $(OPENDR_LD) $(LD) + clean: @+echo "Cleaning C API demo binaries and temporary files..." - @+$(RM) $(BUILD_DIR)/* + @+$(RM) -rf $(BUILD_DIR)/* @+$(RM) -rf $(DATA_DIR) @+echo "Done!" diff --git a/projects/c_api/README.md b/projects/c_api/README.md index 62dd65bea7..3f289ccf75 100644 --- a/projects/c_api/README.md +++ b/projects/c_api/README.md @@ -10,4 +10,5 @@ Make sure that you have downloaded the necessary resources before running the de ## Supported tools Currently, the following tools are exposing a C API: 1. Face recognition +2. Object detection 2D Nanodet diff --git a/projects/c_api/samples/face_recognition/face_recognition_demo.c b/projects/c_api/samples/face_recognition/face_recognition_demo.c index d6adf1488c..c19f11ff60 100644 --- a/projects/c_api/samples/face_recognition/face_recognition_demo.c +++ b/projects/c_api/samples/face_recognition/face_recognition_demo.c @@ -22,32 +22,32 @@ int main(int argc, char *argv[]) { // Create a face recognition model - face_recognition_model_t model; + FaceRecognitionModelT model; // Load a pretrained model (see instructions for downloading the data) - load_face_recognition_model("data/optimized_model", &model); + loadFaceRecognitionModel("data/optimized_model", &model); // Build and load the database - build_database_face_recognition("data/database", "data/database.dat", &model); - load_database_face_recognition("data/database.dat", &model); + buildDatabaseFaceRecognition("data/database", "data/database.dat", &model); + loadDatabaseFaceRecognition("data/database.dat", &model); // Load an image and performance inference - opendr_image_t image; - load_image("data/database/1/1.jpg", &image); + OpendrImageT image; + loadImage("data/database/1/1.jpg", &image); if (!image.data) { printf("Image not found!"); return 1; } - opendr_category_target_t res = infer_face_recognition(&model, &image); + OpendrCategoryTargetT res = inferFaceRecognition(&model, &image); // Free the image resources - free_image(&image); + freeImage(&image); // Get the prediction and decode it char buff[512]; - decode_category_face_recognition(&model, res, buff); + decodeCategoryFaceRecognition(&model, res, buff); printf("Predicted category %d (folder name: %s) with confidence %f\n", res.data, buff, res.confidence); // Free the model resources - free_face_recognition_model(&model); + freeFaceRecognitionModel(&model); return 0; } diff --git a/projects/c_api/samples/object_detection_2d/nanodet/README.md b/projects/c_api/samples/object_detection_2d/nanodet/README.md new file mode 100644 index 0000000000..cf27c62956 --- /dev/null +++ b/projects/c_api/samples/object_detection_2d/nanodet/README.md @@ -0,0 +1,15 @@ +# OpenDR C API Nanodet Demo + +C API implementation of nanodet models for inference. +To run the demo, the downloaded model can be used or it can be exported with JIT optimization from the python implementation, see [Nanodet optimization](../../../../../docs/reference/object-detection-2d-nanodet.md#nanodetlearneroptimize). + +After installation, the demo can be run from projects/c_api directory with: +```sh +./build/nanodet_libtorch_demo ./path/to/your/model.pth device_name{cpu, cuda} ./path/to/your/image.jpg height width +``` + +Or with the downloaded model and image with: + +```sh +./build/nanodet_libtorch_demo ./data/object_detection_2d/nanodet/optimized_model/nanodet_m.pth cuda ./data/object_detection_2d/nanodet/database/000000000036.jpg 320 320 +``` diff --git a/projects/c_api/samples/object_detection_2d/nanodet/nanodet_jit_demo.c b/projects/c_api/samples/object_detection_2d/nanodet/nanodet_jit_demo.c new file mode 100644 index 0000000000..e384a6224f --- /dev/null +++ b/projects/c_api/samples/object_detection_2d/nanodet/nanodet_jit_demo.c @@ -0,0 +1,62 @@ +/* + * Copyright 2020-2023 OpenDR European Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "object_detection_2d_nanodet_jit.h" +#include "opendr_utils.h" + +int main(int argc, char **argv) { + if (argc != 6) { + fprintf(stderr, + "usage: %s [model_path] [device] [images_path] [input_sizes].\n" + "model_path = path/to/your/libtorch/model.pth \ndevice = cuda or cpu \n" + "images_path = \"xxx/xxx/*.jpg\" \ninput_size = width height.\n", + argv[0]); + return -1; + } + + NanodetModelT model; + + int height = atoi(argv[4]); + int width = atoi(argv[5]); + printf("start init model\n"); + loadNanodetModel(argv[1], argv[2], height, width, 0.35, &model); + printf("success\n"); + + OpendrImageT image; + + loadImage(argv[3], &image); + if (!image.data) { + printf("Image not found!"); + return 1; + } + + // Initialize opendr detection target list; + OpendrDetectionVectorTargetT results; + initDetectionsVector(&results); + + results = inferNanodet(&model, &image); + + drawBboxes(&image, &model, &results); + + // Free the memory + freeDetectionsVector(&results); + freeImage(&image); + freeNanodetModel(&model); + + return 0; +} diff --git a/projects/python/perception/object_detection_2d/nanodet/README.md b/projects/python/perception/object_detection_2d/nanodet/README.md index 92c456c235..53bb4fc074 100644 --- a/projects/python/perception/object_detection_2d/nanodet/README.md +++ b/projects/python/perception/object_detection_2d/nanodet/README.md @@ -1,18 +1,28 @@ # NanoDet Demos -This folder contains minimal code usage examples that showcase the basic functionality of the NanodetLearner +This folder contains minimal code usage examples that showcase the basic functionality of the NanodetLearner provided by OpenDR. Specifically the following examples are provided: 1. inference_demo.py: Perform inference on a single image in a directory. Setting `--device cpu` performs inference on CPU. -2. eval_demo.py: Perform evaluation on the `COCO dataset`, implemented in OpenDR format. The user must first download - the dataset and provide the path to the dataset root via `--data-root /path/to/coco_dataset`. - Setting `--device cpu` performs evaluation on CPU. + Setting the config file for the specific model is done with `--model "model name"`. + Inference will use optimization [ONNX or JIT] if specified in `--optimize onnx` or `--optimize jit`. + If optimization is used, first an optimized model will be exported and then inference will be performed. + + In ONNX it is recommended to install `onnxsim` dependencies with `pip install onnxsim` on OpenDR's virtual environment, for smaller and better optimized models. -3. train_demo.py: Fit learner to dataset. PASCAL VOC and COCO datasets are supported via `ExternalDataset` class. - Provided is an example of training on `COCO dataset`. The user must set the dataset type using the `--dataset` + If user is planning on using the C API, JIT optimization is preferred, so it can be used for the same postprocessing of the output + and have exactly the same detection as the python API. + +2. eval_demo.py: Perform evaluation on the `COCO dataset`, implemented in OpenDR format. The user must first download + the dataset and provide the path to the dataset root via `--data-root /path/to/coco_dataset`. + Setting `--device cpu` performs evaluation on CPU. + +3. train_demo.py: Fit learner to dataset. PASCAL VOC and COCO datasets are supported via the `ExternalDataset` class. + An example of training on the COCO dataset is provided. The user must set the dataset type using the `--dataset` argument and provide the dataset root path with the `--data-root` argument. Setting the config file for the specific - model is done with `--model "wanted model name"`. Setting `--device cpu` performs training on CPU. Additional command - line arguments can be set to overwrite various training hyperparameters from the provided config file, and running - `python3 train_demo.py -h` prints information about them on stdout. - + model is done with `--model "model name"`. Setting `--device cpu` performs training on CPU. Additional command + line arguments can be set to overwrite various training hyperparameters from the provided config file, run `python3 train_demo.py -h` to print information about them on stdout. + Example usage: - `python3 train_demo.py --model plus-m_416 --dataset coco --data-root /path/to/coco_dataset` \ No newline at end of file + `python3 train_demo.py --model m --dataset coco --data-root /path/to/coco_dataset` + +4. inference_tutorial.ipynb: A simple tutorial in jupyter for using the Nanodet tool for inference. \ No newline at end of file diff --git a/projects/python/perception/object_detection_2d/nanodet/eval_demo.py b/projects/python/perception/object_detection_2d/nanodet/eval_demo.py index 21ff430b94..c2e049efe3 100644 --- a/projects/python/perception/object_detection_2d/nanodet/eval_demo.py +++ b/projects/python/perception/object_detection_2d/nanodet/eval_demo.py @@ -20,15 +20,16 @@ if __name__ == '__main__': parser = argparse.ArgumentParser() + parser.add_argument("--dataset", help="Dataset to evaluate on", type=str, default="coco", choices=["voc", "coco"]) parser.add_argument("--data-root", help="Dataset root folder", type=str) - parser.add_argument("--model", help="Model that config file will be used", type=str) + parser.add_argument("--model", help="Model for which a config file will be used", type=str, default="m") parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) args = parser.parse_args() - val_dataset = ExternalDataset(args.data_root, 'coco') + val_dataset = ExternalDataset(args.data_root, args.dataset) nanodet = NanodetLearner(model_to_use=args.model, device=args.device) - nanodet.download("./predefined_examples", mode="pretrained") - nanodet.load("./predefined_examples/nanodet-{}/nanodet-{}.ckpt".format(args.model, args.model), verbose=True) - nanodet.eval(val_dataset) + nanodet.download("./predefined_examples", mode="pretrained", verbose=False) + nanodet.load("./predefined_examples/nanodet_{}".format(args.model), verbose=False) + nanodet.eval(val_dataset, verbose=False) diff --git a/projects/python/perception/object_detection_2d/nanodet/inference_demo.py b/projects/python/perception/object_detection_2d/nanodet/inference_demo.py index ae2b7b5748..351f00fd15 100644 --- a/projects/python/perception/object_detection_2d/nanodet/inference_demo.py +++ b/projects/python/perception/object_detection_2d/nanodet/inference_demo.py @@ -21,14 +21,23 @@ if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) - parser.add_argument("--model", help="Model that config file will be used", type=str, default='m') + parser.add_argument("--model", help="Model for which a config file will be used", type=str, default="m") + parser.add_argument("--path", help="Path to the image that is used for inference", type=str, + default="./predefined_examples/000000000036.jpg") + parser.add_argument("--optimize", help="If specified will determine the optimization to be used (onnx, jit)", + type=str, default="", choices=["", "onnx", "jit"]) args = parser.parse_args() nanodet = NanodetLearner(model_to_use=args.model, device=args.device) nanodet.download("./predefined_examples", mode="pretrained") nanodet.load("./predefined_examples/nanodet_{}".format(args.model), verbose=True) nanodet.download("./predefined_examples", mode="images") - img = Image.open("./predefined_examples/000000000036.jpg") + + img = Image.open(args.path) + + if args.optimize != "": + nanodet.optimize("./{}/nanodet_{}".format(args.optimize, args.model), optimization=args.optimize) + boxes = nanodet.infer(input=img) draw_bounding_boxes(img.opencv(), boxes, class_names=nanodet.classes, show=True) diff --git a/projects/python/perception/object_detection_2d/nanodet/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/nanodet/inference_tutorial.ipynb index 96af81257c..23c6eb80b0 100644 --- a/projects/python/perception/object_detection_2d/nanodet/inference_tutorial.ipynb +++ b/projects/python/perception/object_detection_2d/nanodet/inference_tutorial.ipynb @@ -25,36 +25,16 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "b6f3d99a-b702-472b-b8d0-95a551e7b9ba", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n", - "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/gluoncv/__init__.py:40: UserWarning: Both `mxnet==1.8.0` and `torch==1.9.0+cu111` are installed. You might encounter increased GPU memory footprint if both framework are used at the same time.\n", - " warnings.warn(f'Both `mxnet=={mx.__version__}` and `torch=={torch.__version__}` are installed. '\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "model size is 1.5x\n", - "init weights...\n", - "Finish initialize NanoDet-Plus Head.\n" - ] - } - ], + "outputs": [], "source": [ "from opendr.perception.object_detection_2d import NanodetLearner\n", "\n", - "model=\"plus_m_1.5x_416\"\n", + "model=\"m\"\n", "\n", - "nanodet = NanodetLearner(model_to_use=model, device=\"cuda\")" + "nanodet = NanodetLearner(model_to_use=model, device=\"cpu\")" ] }, { @@ -77,7 +57,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "8a680c28-8f42-4b4a-8c6e-2580b7be2da5", "metadata": {}, "outputs": [], @@ -98,510 +78,10 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "e12f582b-c001-4b9d-b396-4260e23139f6", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Model name: plus_m_1.5x_416 --> ./predefined_examples/nanodet_plus_m_1.5x_416/plus_m_1.5x_416.json\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:root:No param aux_fpn.reduce_layers.0.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.0.bn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.0.bn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.0.bn.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.0.bn.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.0.bn.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.1.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.1.bn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.1.bn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.1.bn.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.1.bn.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.1.bn.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.2.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.2.bn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.2.bn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.2.bn.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.2.bn.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.reduce_layers.2.bn.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.2.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.2.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.2.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.2.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.depthwise.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.depthwise.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.pointwise.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pointwise.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.dwnorm.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.dwnorm.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.dwnorm.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.dwnorm.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.dwnorm.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.pwnorm.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.pwnorm.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.pwnorm.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.pwnorm.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.0.pwnorm.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.depthwise.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.depthwise.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.pointwise.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pointwise.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.dwnorm.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.dwnorm.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.dwnorm.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.dwnorm.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.dwnorm.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.pwnorm.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.pwnorm.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.pwnorm.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.pwnorm.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.downsamples.1.pwnorm.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.2.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.2.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.0.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.0.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.2.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.2.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.depthwise.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.depthwise.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pointwise.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pointwise.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.depthwise.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.depthwise.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pointwise.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pointwise.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.weight.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.bias.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.running_mean.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.running_mean.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.running_var.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.running_var.\u001b[0m\n", - "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.num_batches_tracked.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.num_batches_tracked.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.0.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.0.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.0.gn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.0.gn.weight.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.0.gn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.0.gn.bias.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.1.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.1.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.1.gn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.1.gn.weight.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.1.gn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.1.gn.bias.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.2.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.2.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.2.gn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.2.gn.weight.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.2.gn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.2.gn.bias.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.3.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.3.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.3.gn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.3.gn.weight.\u001b[0m\n", - "INFO:root:No param aux_head.cls_convs.3.gn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.3.gn.bias.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.0.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.0.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.0.gn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.0.gn.weight.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.0.gn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.0.gn.bias.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.1.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.1.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.1.gn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.1.gn.weight.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.1.gn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.1.gn.bias.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.2.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.2.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.2.gn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.2.gn.weight.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.2.gn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.2.gn.bias.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.3.conv.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.3.conv.weight.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.3.gn.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.3.gn.weight.\u001b[0m\n", - "INFO:root:No param aux_head.reg_convs.3.gn.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.3.gn.bias.\u001b[0m\n", - "INFO:root:No param aux_head.gfl_cls.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.gfl_cls.weight.\u001b[0m\n", - "INFO:root:No param aux_head.gfl_cls.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.gfl_cls.bias.\u001b[0m\n", - "INFO:root:No param aux_head.gfl_reg.weight.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.gfl_reg.weight.\u001b[0m\n", - "INFO:root:No param aux_head.gfl_reg.bias.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.gfl_reg.bias.\u001b[0m\n", - "INFO:root:No param aux_head.scales.0.scale.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.scales.0.scale.\u001b[0m\n", - "INFO:root:No param aux_head.scales.1.scale.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.scales.1.scale.\u001b[0m\n", - "INFO:root:No param aux_head.scales.2.scale.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.scales.2.scale.\u001b[0m\n", - "INFO:root:No param aux_head.scales.3.scale.\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.scales.3.scale.\u001b[0m\n", - "INFO:root:Loaded model weight from ./predefined_examples/nanodet_plus_m_1.5x_416\n", - "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mLoaded model weight from ./predefined_examples/nanodet_plus_m_1.5x_416\u001b[0m\n" - ] - } - ], + "outputs": [], "source": [ "nanodet.load(path=load_model_weights, verbose=True)" ] @@ -616,7 +96,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "9efba6eb-5235-4e31-a002-1bcb6e311704", "metadata": {}, "outputs": [], @@ -630,33 +110,10 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "9f083566-3d57-4db6-baa5-0fefdf8fa8ea", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMsAAAD8CAYAAADZhFAmAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Z6xlWXbfCf7W3vucc+993oSPyEgT6W1l+SKrKJJFSqIIseUIanpkBjOjLy1gHDAjzJf5KqCBAWYwGCOgGyN2qyVREtmkqCqaIllFsrxlmsrMykgXGd68ePaac/beaz6sfe6LLFZmFWjUMUAeMirjvXjm3nP22nut//r//0tUlfev96/3rx9+uf+pX8D71/vX/79c7wfL+9f71494vR8s71/vXz/i9X6wvH+9f/2I1/vB8v71/vUjXu8Hy/vX+9ePeP2lBIuI/DUReUVEzovIP/vL+B3vX+9f/7kv+Yvus4iIB74H/AxwEfg68PdV9bt/ob/o/ev96z/z9ZdxsnwEOK+qr6tqC/wb4Bf+En7P+9f713/WK/wl/MxTwNt3fHwR+Oh7fcNgeVmXjhyZfywi9t/ysaIIgpR/c3L4d9DylYqqfaTl75R/VVXyHR/PvwVQBRHK1ysiggI52w/LqvPvt98J4gTK73KCvZ7ymu88qeevQUDu+J39986/vvwMJ3e+a+b3QOlfx+HPVVXekRPoO9/3ne/t+++r3vH1/RdWRJy397XYVDgRbu9PyDHjvGNhWLGxuIgTd8czsStrZne8R8r58LV93+vR8nDe8b6/7z7NH0z5xDs+zeHrFYTvz4j0+75Hvu93AGj+vo9RuOO+CjAdj4lt+86HUK6/jGD5kS4R+SfAPwFY2Nzk7/zX/zVOIKktwKEPVN6TcqLLGYctSAc03rFU19TBzRdNcI4uKwddR8rQpURVBTIw6yLTlOmSPa0qeFLOAHQxA0LOGeTwIeQEk5iYxsQsJjTZom6Cp6k8qkrMMKg8o9ojAuM20nYJ7xziIKdMlxXnPEEcKgBKzkpMGRXwInjnCN4RRPDeHpoTR3AOsN+z13ZMJh1dhpSVnJQYlZwzKkLO9tD7oJovlDve0zxWy3rUbO8dzRwPOywMFMnCg0eW+ejZE/w/f+cbjG9P8aI8+ugx/umnP8HK6Ag5JyCXRZZREl999evcPNin6+x+zbpE22ViTHQx0cVM0vJ6s5KyotnuRcq2Ockdrz1rRlFyVMSV95ft+wVIOdlO0G9qOYHappqylkDAniuC5oSIkNU2xJwSqhlRSCmX3wff/I3Pvuua/csIlkvAmTs+Pl0+945LVf8F8C8Ajpx7QBXw3kPWcnrYDUg5I+IQgZgyIuAVYs6Q7MY23lM7h2pm5D3ZCyl4u7HOoy4zjQlHpgqBKgiqni4DGYIPzLpImxMxZergqYLgg6NJnmmXiCmhKLUPLNQVPjgOZi1kxffbt4otThFb7MGRY8I5QcRyXucCHRGnQi6Lm5xxTsjOkZISnODLaZMVZrEjxhLMGVJSNJcbKRaUdhrO1847TmcXPDllcllsqWzgwXlElJSgTcIoK2jm9mTG2miEDw7nhdRFdsYtu5Mxy8OymeRsiy4r3nuevf9DpBQtdHIipsg0dqSszLqWNiYmaUbXRtqUmcaOWdsxix1tTEzbjpgS01lHlxMpKzEl21hiIqVk7zPbexXV+UnWH/sxJgDc/KRQUky2SWV7zd5ePSD281SR8l8FxL97ZfKXESxfBx4UkfuwIPkl4H/2Xt/gRFisAk0IFvkI3lkguOxAbOd3IYDaYqacQpQ3mQHvBHGh7FaZDoeIMPAeapjmDGpflxOgGXGOREIEGh9wksquLvOUrw5+HqhV6ANTCU1FzJaGxKwkmOc9OWeCc/jg5qeXE0Fyst2/3/GVcvqlkioKbUzMRAg+E1NmPI1M20hSIXYJ57w9WCe4LKizHdMyGFvEKKgo4hyC3BFIJd0rp1xKtuC6BMSMVIH9WYd3wqgJTEKgG0+ZtImdyZhTZaFpSYudt3tc+xpfj2zxlTMsZdsoNGt5XSDiSoqWcXKYAivl1EGJ5R7lnIk50sZEl+zPLEXaLtKVz086C7bJbGb/3kWmbccsRrqYaduOlEsQxkgCUpeIXSTnbL9T7YQj2/16t+svPFhUNYrIPwV+G/DAf6uqL77X9whiN1XtiK3KQk2qVM5TeU/MEUWoJNhuKkLt7OPKOXuEJcF3KMF7glemyW5ApaCuHNsIs2zHsPeB2EbbwXLCe0cdLFjsDQnee9TbjqbZdrfgHOo9KopXR9RI0gwozjmc2ILKyRaqYgVEyhHnHd45HH3NIsSc7QSLma6zlKGvy1QhRktdcrZFmFRxTgjOkRSi7RwA5V7aDelPGO/tvStSvkzAKaL2cUtNzi1OIWaljYmVuuKWjC2Y2sjN/Ql2pHmc2EkevCcle70xTXHiyOWERSCVe5XVNgdxYvWbOtvkkh2R4hziSioqubwPZ5tk2SxVFBFLTb335JTKBqHz1xJTn27l8ppSOU2VLkdsM2oZzyJJk21MMTJLieks8uKv//q7rtO/lJpFVT8DfOZH/XoB3Lzihi7bg3diN0/Ebrgg1N7RpUQQR+M9Hj3cncoOZbWOoiq0KbHXRUpNbvWEKt4J5HIzUTuZvLfCsKTywTsklBqBsnAFomr5upIKZMULDEJFTInK2e6ZxRZuU1co0HYRRCxtKydcTNDGhCq0baKdRVv4YnWY6zeOlHHe246bLCBVLSVLKZNSLsEBsSspGxl1jlAKIe9cSf0s8L048EqSTNSAZsWJBd72rOPY8iJvXt3BIcRZ5Or2Lil1VFVFjKksZrvvbTzguTe/jQO8D7ZpOU8VKpwTahdAPFmhCjWVDzhxeBFC+XsIVdloPK48H4eQEERcWSN5Xl+CpaMignOBjD1zVUt3wWpC55VKhAG1BWzdsDa0n2VBb+vNO8/yoHnXdfo/WYF/5yUCA+9tt8iZaYw456i8wzl7kXYTwSP4UJW6VHHBoznPA8ayCzulskLlHANvu9g0RlLKBG8/KzuhwpEy2NlkKURGDWjIGUthS36LWA3UJZJYEe5E6USZtpk2Wu0BWDGrlGJSmXWRruTGOTLf+VJSYrIdfzqzE07KiaACmbITi6Vzin0eVZy3955zxntXUthDBJBSUyRsV9asiHi886W8st/rnUNdYJJqarF7d313j6Ori4Ag3tHNOq7vT5ilDu+z/QxLPAvAUjNuE/uzsW1+zqOa52iTqBBztGJe9bD2MHiRGDPeCZW31wdW44lzOOepgyd4A1K8eOqqxjtH7QODuib4QBV8+beKKlTkZJmDc4alOnF4H+aYmqXwqV+FfwpN/P7r7ggWhBAcPb7rnaMOgarshnam6Dy9cOUUEedAsVQgZzq1OkYKTisoToRRCHRZSc52ZF/SHsFg4UqssGtLXUK2oHElpbKdyxuogO3enWZmbTK0J1tgWqblmHaZmJLVVEBWoW2zBUZMpBRJZYGrKqmkiikZiiNlIYsKQixQ6TyDtEVYAhjstTtxSI+COZ3D3s4JMdviDCHYz51D7n3NIOQQ2JcVlnQPEcfVvX0e3lhDRAghEGeJW3sTZt2MUbVkwVeQJe+FnD23u2W+fXFGE5TKKbUXanEEDyIZLwGnGS92IqiU+jEbaqZkvHMl3Swbl/bghZJSmteo/f2ISckpU1UeL0Jd26lUe0cm2/FePl85hzhPJcKwrkg509SVrbfK0NdJO3nXdXp3BEvpQ+RSeFbi5whTShm8I0i5iRQIEcHnSHSCRmWaM7NsJ0uTHaEAAIlyFIuwUAWcc8ySFZ4OxeHIFeSYqdR29FnMdJpw4iHa75x2kTbp4emgzF9P5QOaEgllNjPkSrCUx1KkkioVuDapolnoCgyqCLlLlmSopVtkcNmCNWXIKSEF8QNL75A8v4e5gBWqUhZynzYKYgUMKSVC5axuKL2KnGzRO4FcXnMlcGV3nw+fPoZTW4zdbMb+tGNvesDG0lFyTogoObfl9dQ0zSrfvn7LTsRyGpbfhA8BUUunbRnbSRIceLE0FlECSu2wjzVTeUVzZFh50IQn07WdIYgKXewgCcMBLDSe6TjhvIEjqvZzU874qSfnRC4f9/fHTnhnrQRVdsfTd12nd0WwAPNcOZd8fpKSwYMKqe1Yqiu8E/s4KyrQxUhXCtJZTJZSCDTB4UtalrDejBdhEDwuZ7qYSECXM6ijS1aTJLXaoC2omWKplgrMorI37Zi2cd5EVBWcg5Q6K7Jjpou2MLUUn7n0EFK20yyX0yeLAo6shvH3KVPwVveollMtp/mDBex1ZSWTcV5Kv0StHik7MjCHmRxC0mwBUtI+QRGxmlCd/bdPVbI6lI7d/RmVF05sLPLWpEMnymySuLW/xz2bttn0J6H1jhL3rq/gHXRZcN5BFrKUzSBr2Qz6QPJIcuRorw3sXjkpOK9qCX77Db4gcKVFiy+njhcDh5gmfCcFfFEqsRPMoVSubEmS8WSCQOUy3jA4NGcqB2gmf19T+M7rrgmWrMI4RsYxkVWYtB3Be1uUGQ6iMgyl8DMAlEnMzLrEtIs48QSsV9HGhCv1R1JDxpwTaxCWIjBmJZZiN5edJnMI/4bgygOBLmXGbcdk1jGd2cJ2UtJH75hptr5AabzhBC8OLUV3ViWW06hfzP1OVjZ9vDeUJxUok7KjS0F7XFk4Tg6RuZzsNTPvRJfUSvteuMxrIICYkxXrJTidy/NGHdlStqQOdY42tmQJHDu6zNu395FxoG0T13b3rcEXGkRqnHbzlPjYYsOoqdidxr4Ut/RQFMXNU0OrD2yDlFKQi+YSKFo2Iz9POz2HGLMFtKfViBdHwtGp1WIkQXFoLKks9t48zFsSIoc9Iiv9CoKIrYVxeveQuEuCxR7gTGHc6Rwrz7mlrqzYY+poKsegsgBqoxL7RZpsF2kVFEfGehFSKv4qKeKg7eHacnrF0vgylMZOk6haivOMrwJ15YmzyKxTZq3SdXleEjoRQmB+Cqnaw9FYimqFlCNgu6aWNDFLaYZRHpwTC57SXBNk/hoLsms7bAa87YQu+L63Nk+pDukspfcUDUZVT/lCKcFrv9eVU1ecfX+XleRt4YJjazzl6NLiHHDIMXJ194CYIt7VBUQoAYyw2DQ8cnyRN25PSj2RSVhHPWZ7DymXQJjXZpYGIW7enZcSam5+H9QWfYGNDSlzh51456zJW9K5nO3UAbVA4xAttyUh83vUI2Gqti7eo76/O4Kly5nb045pNhh1POvKzRW6tt9dM9NO2KElOLHjUsuDBirX56AJQZi2EVWDf1ux5e3dYb0hqoi3tEvQebDk8mA0BAY1kJWDWcfBpLUivdxoESFT+hsFWtaC0OX5wsjz3avfUYHy5Kzuct42A4NtbTH0iz5rxoklIF4cSS21cyGgapwt0dIiFHeYOjohluLelr07TOsoiworeVRAe+qNZpx0djJ6z429A06tLuGDp2oq4iyxtT9j0o6pwsAQuOwBB6Ue+T/+xDN2X3IiqaW8MVtqO4uJWcxMYmK/bVER9mcdUZWDaWtNV4VJl0gI0zYx6SKzLjFLiVlS2sJkaKMzmko2aDnnXHo54J1aOSfQo1xSnpPzDs2Wvln5128gpUn6Huv0rgiWnJXbk66gIEKXhVnbkZLivcMHX47yktM6R1OCRMTNu71zMl3pyKoKrSbbkUSYdFYLhAIP55nxtmKyIjVHO1WqYA3NLiqT3HJrd8p4GhGsCYna7hhzJql1/HOyRmFfVOdshbohOFIa+2WXy2Vxlycjd8DBttXb53zfTRZb4OLL9xfY2QppxamlcbGkdk4cTvvTJ/dr5nAXxWo4VbWUrZQJjkQlBn87cVzb3eXx40epgqMNjnbacXu/ZdJNWC3QsGpfD9hrHVah/Dzrxmud5ycfpS4SzfONxZXTrieUSqkFRbSwJsQKc5RU2gGqBrS0sSublHDQtcy6XJ4JTGOmTRiNRmGWIvuzyDQaWDNLmVkXmbQWhLMu0aXM6+/Bw78rgiWpsjeJpXCXeec5aUaSImRC5ea4vCvQn6jtFClbTRCzzgl7/WLUkg6UUEMQZjGRk/18K+ns4E+5z6cV1yZC8IwnifE0lqTAfr93gko2ACIpKlqImSX/vyMNopwYiswL8pwN+fIFPIhlp5PS40D6PpFS2RkwJwAaK3qOSeMLb86IiPZpW5f2/u00YY74OO9xWjhV/cIou7AtxL55qdyetiwNrQ45CMDUOvs70ynHczK0S+7ci7Xs0HZv7TRj3gBzUnovzuo5A8xcobn06VePghYmtvNWG5Zg7xu0i7Ur96sHNLLx5nIk+HqOGh5+jaWM2t+gHg1US8i6aA3jv/ffLLzrOr0rggUsdZikTDuzws1yTMtlVaxTbbCnI5cFPwiehTowrAJtUm4eTImizJ9fyfd76nnsj+0stJ3xj1K52d55Yk+BSd5Qqax0uWD4mXla5ZzDBY9zdmqkwimyoLBCX0oH3U6Dw76G966kbbawREvdIIbkiHPz/zrNFoQ94lN21UL5Kz9W56eG9jtzATekP8GwfzOEquzWBRpwau8n5gQSMHDWnsf+LKLA6uKQ29tjWjpms8jN/QMePJpKemdfm3MujUil66aA4FxdTv50x6K1brw40GxESftA59Qe1TwHNA5rMZ1/7Nz3U/TteFQSTsL8+w1lvANG1x75sAUizuFSuddByuZy16NhhioNVAx+ba04yxm0ApydJsE5BsF2lC4lYwd7RyWA9wTv0Lbvc1int7+xRpo7PD1S34cpjb2UKRwnK34PphHvfQEPHFVtcgGQkvMWNq9hzkg+3CUpi9V5b6dE4TeJ2Ody6RHkUtC7fov3BjQ4J/O+SqnzgUPkRkSgvK95vBR0By3UndIZL+URKcY5DO6yIn29UxanL2lUxqPZ6paUlZ3plGPLQy4EBw7aNnJtdw8oCzqn+WtLyRay9wOk0EjyHazxw0VrR5n34Y4AOPx6792cb3b4vg9P1pwPuWN9ym20IAc4kFy+381h/qyHbANLS63/pNKz2zt7ge9R4d8VwdLvFsPG08ZMTBFUCF6om0AdoPaeEDyFD0hdBRzCrFDasyrDEEgDyNoa69fbA+8K92q+cFLpFMeIc37OvO1vvoEGyqRN88ZjSqkkCWrNU+whZQVfCT67eRpTVR5NliIWvvHhg895TutBe+C3MHV7ohXKcpqy54b02E1/uuTSY3Jg2GsJOMWatf3OmAvC05M1je0guKSoE5w1fOapKqWE6tSDxPkuf3P3gOMri1h8OuKsY2tvTJdmJSis2WmbUiHDviM9kxI4+Z1pE4cwuv09FTaABe0h/Z/5++6vd6Z+9twQayf0sLAPbl4voj2MnOcBZ6/Zzb9f1VPQkne97opgMc2GErxSBYeLgkdYHNUsDyoa78gos5QOxVJIKdAiHmtWxmx1RFMFgy1TKrtrqSXU9vySuRC88a16oVZfE/W7eRfTXCQmUoiVCqH0Jpwwf5gueERNdNY/15QyXoVIBnFFCWnpCv2D7RnJhY0r3lnnfuca7sj9aMwlGOYdFQ6bFFYIzxeP7xeLFc9auF99+6IgExZYWtI+kcOUBaEtYKuKNVyvHYw5tWzBgnekaeTG3oxpZ7SWvtnZp3u2wN08fdKebVxOhD5g+t+rpb48hHAL2sDhur2z/rCaqKg1lTmQgh72T6xHZahoKQHLqWYnXErpcMMqm4r31TtAmB903RXBoiiTmPClYF0aVAyqwLD2NMEz8H6ebyomtupiZm8WcV2mTXkOJ9viUKNVFNw8eutQx9boDqFypKgQAlmiUTr6Ard0w1UtWBSd70CqlvfnlKhLo7PFCJQpG9sYLbRzMd5Tykooik7EBFf9CdAvUoeBFCKKpszCeMKZ62/TbJzhTWBPArEwW6TQ1KXHifv8uyB8Asa7cgXxsht8mKbBXJat5XDJqvNF1RbNTBM8oFzfPeCJY0cMmRMhtpGDacf+bMzKcI7PkgvTYN48fceu3QcH83/rUylgnnId1gsONBVticwl3fba+2dRckgois/DE8eCz4LSeZn/u9qeZZILNZp/jKkEd1FbvkcedlcES68/TykhzqS6lbOUp0ux5AnYYhPHpIuMW5OvOqfzWrcvbh1CFYz51cVM5RyJjKsddWl+RZeZdZYy6HyHMXg2Fqo6WB2iQIoJUQekObrVJRNn5VTK5dAXsHJ4ErlCcdF5hjV/LySdQ8YB2JjOePL6Hhtv3GKnm/C/+oBnZzbl5e2bvJQy55sR15sR+6JE70pDT+ZgSH9S9EpTAw3Krq6HxMS56qpk8NaoBVA69XQxUyfTD+1OZywNaoaDimnd0k5aJrPE7fEBp9cEEY9z+o5dui/4gTtOmDuL9cOFfWdgzU8Kd1jbWV1WWAAlGA8rD0t1+5/TAwjzrrzrG51u/poMKRQQI8E68eXkyX8qvfv+664IFu8cy01taZQ4muBwmNjJcuo0h2YVYdpZRzgUfUdMhoD5sku6yjGqKrpcmMF9Z7gIpbTkxh7Bt7EUAJbn5oKkdF1nhbceImpJI5rEAhT7XTHnUqQbFb9fqFUdLA00IgWh8vNUKCWTu7oghKSc2rnNR6/s8NQOLNKwP1rlc/vX6V6fsDEI/Fja4EOuY3v/gG7rFvvH1nm7ajjvHRdUuJFgP2c6Eut1xThibIZ8CEP3geOLXGG+iO+gflha5625V+79dDojoywPG3arCeKEru24vntAOmHy6P7n9Tu2oVH+HfVJn0qp5nfIEOCO9Anm+pJUOGOHwVZ+jv2Qd6R7fT3q3lET9Tw546kdBqUFmYjJPwyw0TmSedenYSIwqD0pmTCqqkzxltV6GlKO4jYlVIVh7Rmq7SjTLpVOSa9FEKqSy3cx4pwwGtSklBjHVDr2Quk7W09ApBSIMKxrg4bLC9Oy+xtxsE8XHJX3zLr4jlPbSVFOeof3jq4rLIKC5okzxSNZWcuRs1du8MyVHR6c1iz4RfxCgMoTYiRMYHdnTL01oGsjbuBZZ4CrHflCx33NlJ8gk2phNqzYrSuuxcTNvTGvpAlXxDEJFV1VMw2O7CtaFDQUuNwQtR6Onuf6QErWB/GxI2pmZzxjY3HAJbOyMdXk3piYZtShoqfpOyd0XcT7wGS6XWTF3qj5/QkIpWazms4VJrUraZ4g5DjmYHyVqlpiMNgsdUbEeYOFe5HaIe/NPlYtiGTqg/FPF+xWm9nv6yXPcud9eI91encEC+bYosHktgNv6ISqzAVg5hACVZH9xqKAbHMmdj0d2xntu2hTrH7J1AGqusJ3jtksMZsluoKS9Z1y50renpXgIQxrpm2HlhzZO6EpNBMtxboPgsRyYkXD9L0D7gjUrFjTMmZql7lnZ4+HLt7gsa0ZJ9wig/oIBHChIhcOl8sVq9WIg9iy3jZopyTtkOAtVZllGEGOgiwIo1ZpmHLM20L8uFYcMGWax/j9TDcyjwCNgekgM2sqoxalwHgQmFaBaVXROmFQVay5inCwz7fPf4/81GNc39vj1PoSL7x1w5gCs8i1nTExdzQFUcuZwmh2qCa2D66xtXetbDwFHSuLed4rUpNce+8NGSyAQHAOzS1Jb4G7Vhqv9v11VRk5E1egdYd33tSZCMF5C7xSz7k5+GCbGfRlni/B5nBZ5ifdeyDHd0ewQDGC8LBQVQy8n58UitI4Tz2ygn2WMtOUGHfCuMvFC0rMvKEUiV1K5GxsYc1qELRz1E6I3pFyoo2ZLKZ7QQvBz1kRWgc7PUJBR7TURDkr3gV7XaW2TXUpNrWHS3MxaLAACwLLkwlnr9zgoWt7nJtVrI2WqRbXoKBiRgMxtWZO4JyyQM12N+FkGlkRHg2ByJLJLhOyp51GfHakoOAFXbLTbyEGRguLOHXE0OKqgE6t+WaNyoALoJXlL3m/iN40IZrQBC+/fp6NB9dYbJa5sLvPsdVVO42DJ7WRrf0p427GQnOY7tjWnmjjGESsd5Uz5dCwfkcXCyXfIkxFScl6Hrnc40mxQbKGZmc/f96Mtd+VkjVs+7NAysY0azsGtcmUTX7s5kiGat8Bu6Nno0oVKlADXyaz8buu0bsmWLwThsHTeF9MC+zmh0J3KBkAXvpueaT2wuKgNhZyF0vdYuKmaUyQhar2DKtAJULlHLW3NGhnYgtlUFvn3ajxgvdKVdKo0RyNsa78LGWK7AEJUDvbnbqYkWBNVAQqEVamU87c3ObMpS3OHignF9cYDk/CYs8xkfL/zux3SuItyeg6I6nZjhODjDOIL/l7y5wi4xpP9rbgnAhxlgnBW13WCjkqOYFuRVN/BiWPM7JnAqkedNCk5FmCGpwhK1zI8JGj93OkWeLopW1uLqwUXp6QxpmDScfu+IDNRePX5dQjh55AjfhV/rvn3ySqbVLeYW0BgdrbJuIKDy14Ry3gJZtZCZa6BmcsYu968w7MP6FsYoI3LwXUMgJxVKFC1aFOiiWVMdp9yQb6nlDSWNjSmVnbkTHmd8zpB65PuIuCxcwlIGFalIH3LPhC1sOOeFdIk5qVYRUYhMAmihfPzfHMRFYl72y8p6szTTGvi7m36MmEIHZ6YPLTUHnqENAsiMdUeHNBlBJcoMuZg2ImkcmmdxFHVye8eLTNLO1uc3LrNqdv7HF2omxWCywMjlBtVGjfOBNfGAB9gNruZ+4lFMcVWHINb812IBQGQrTdP9c9gRTSIMFUELXAIAodEfGCBMEFOewhzRRXFZi4Nf+0LtvXpi4TxKOtErvMdJC4eqThiFvE3YgcGS1x+1vnOZUy570QkylCt/bH3HfUoG9xHjSaDFgdg1Cx09Zc3TPKjHH+eupODyf7eTvALJu8MQLmcDjYv5pvgvW2fEGMde6B4AR8UVwGqezfvOlggoOU7L/BWWZQl39zRUDn8KXOFSbx3dfoXREsWWF31tqp4u2N9QvClX/3KLWz9CaIMvCBhRBQlHFMrA0q2pQZF/Zo5RxDZ7lw9460SFhowhz3aIJjVAUGVUDUdr3Z7pjR0rA8RKPhH7SRXHvaqEhwNKljsL/HmStbLF/fZmOn5WRbsTocMRqcILoOsFopOXAFlqagN9YvdSTJhNIw05QtmFxkwVfszCZIfWeDEVwCvEOnoFPBV26estjR5qgXAtklJLhy8ggMhdxlSCbK6g3mNFuwxRTRDKFx/MnkCk/de9qK7dag14dPH+dvvXWF/7R3nefIpDZxbXePlDp8VZmIq+gZnVMWmiH3rC9yee+2Fe5zlrj0gFRB0XrFZ1FMllNWc0ktVTD+nSGh1lS2gGsLgKA5I/EQklYxgqShmYfOPJQ6CQA1eyZXKEUp2r3f6+5yNAyKnt2b/BZMB49aUFin3PBwpxDEaC+CMomR/S6iYrBvyqZ2NG6VmdFF1eIEIjRNzTDn0kcR1pqaQSguMk6onafd3UF3rrOyeZQ0neGnUyY39ohbu9S7Y+pbB6xOMiupZrlaYKHexC8EdJjNKc2ZS0mctdZ/cUW91z8or+CsPhGE7Aopxh1aM9VVzX5qzWtYHFp2Ze16aowabOvUsD1R49B5iF3ExZ7uge04ydJESUYaTVi/RWKhkhSqyixFvsc2/3jjKfLY6henAjcS546c4hcXhmy+9gbPxcz17QMjQxZxnSkzbdGBcs/KgK+IARxg/DhzmSl/R3GlYevlkDOG9kYhnrnUTmDOSu6/V/oOvcyDUDGRjtFYU5Fmy9whJ89FZUZqTdiawh32ft7tuiuCxUh4QnRKIpl9KZ6q7EaKCcSyCE6h9tYvmaTMJGX2u1QsWQ+loikncjaipSuQZFVsWZekYqkK7HeR2omhX95kyUPvGa0s8/Z//+/4+Pp9DPaEuN+RO/BVoAoNTlaQ4PFNDc6jzpIFK/RtByvqbogRp55cegDOCaIefOlGp75cUVQT3hktfcE1ZK9kb/cCtd05y7wwggKDIxmpLIXRrEgqasyodppkQXKPFJfiD0P/co52cpfFdqG9xalTm4ySQ4mGXgnkidU5J5oVfuHpxzn+2hu8fXOHaTelaZYNzaKnrljf5MfuPUZdVeZ3kBJtpjhLmrfCLBv7QdVEf1GMSJvFCviYMzG7YjyikCCSiwzC2eunEG7pUXx71qYR86XR7OamHYVpVwLrsDenoiWo7/aTRSzS22QmAl56HF6ZEqmDx6vQFap3wgp/nxNtsg552xVGsOYSIHYj+nK6y5ntaaISU1VWvjILpOL165x5TAUy+7/3BnJ1gS9dPc9fXXiEWj0yCEgTcJUz50lxdmI4K75zBvEOJZO7jASHqwK565AqEEIwik0ujbNcUrK+IajGkk6F/VhVFYLQaTRLpL4bHzxSOSvqy4PNgp1aGVQM2qYEjaizVEoOfbwkFYGa+UaZHZEmXCW8MN3i0+vPQGeL0CXFNYIMHDrL6DiztFLxqUcf5vztG1z4+ks8/LFlqkF15wNFxHF2ZYX71jfu6EVpMfk+ZA3H2OF8MIaEYLt/Qb60pFAxRfMu61Jxd+vZ4+bOGbMFX5fN7jVp0SypZR6z2KGYz4OIM9aFmkFJmxJtEe5lhf/P4M+hwReR/xb4eeC6qj5RPrcO/FvgXuBN4BdV9bbYOfZ/A34OGAP/WFW/9UN/B7bgyNBUwXb7YqqXVJnGTBBrFJq2XcwFppw2wzoYcxhBJNAWSkNpeRiMnIuXlwjJe5BMVQVC4alLtt129rW3Wbywx8bJB/nK1ef4dnuND66ept9xenmwiEOlHPHiEG/pQm57HlIxwSud62yap9J5DqW7bvVAf/UEz1QWyyA0TF2kiaE0Rp3l8mqMBTUhTEHT8vx1IfahFri5p/SIiNUxJbg8fYcfYgV7MmG64Djih+SpoVsaBG0cREVmxSjjoKOi4vHV4+zsT3j9V36H4598lqWzx+dN276JaK+pD5LCNJ73WyD4CsUauc6ZXZG/g/2NAsHcMmVYl95IqU2K+ZhK6c5rT/f3hSxpqajmhDgtaGdN76nsSu8GdH4q/epo8K7r9Ec5Wf6/wP8D+OU7PvfPgN9T1X8uNgbvnwH/J+CvAw+WPx8F/l/8kNks9lILTQXTUwPUwRlC4WwX6XIilEy0jZa3Dp1j4M215aDwiOrgcckktSnn4vaopYNuYylyspy5dhm8IWJOofvmGyz8yU1GgxG+qfnI6Wf4gze+yJnpKkeXVgs/yRna5Ap9ReznCwZEuBCsPig1Ad4Tu0hVO5DePiEjlUMIlib1TFspj60Y7A2p2Msta2GhpHU2FcDImQHxkFM8/D5fAs4DXQFIgqdNrbk8iom+wL4OZ2lt6jpCFl7gBs+eOYt02Wj8leAGHgkCM/M/QwSZWj1AhOWFAQubFZf/4BvcWFvknp/6CPXKggU2iel027zFtFevujnrV0r90Vut2huR0l13hvCVFWL9muJDVjrvznmydmju6JWwTux9ikA3U0ub/aGrSE6WWko/8UAtlS2x9p7XDw0WVf1DEbn3+z79C8BfKX//l8DnsWD5BeCX1Qg6XxGRVRE5oapXfvjvObwp6sz/K5Siq648EiOi4IvJUe93O3AOD0zFiI2+FOpNENDARCLqIEUrbnvO2SzqnM/UOKF68S2Off0miwvrxRy8YTQY8bH7nuU/fu8b/KPmI8bvCmo1R0l4xQs4ZwHoxDD+FHHY7uaA2HVEhFCBrypbFLmgQ67YPeHQaLxpK0QTy2HEdhpzxq8bQFB7C1SMJS0oUuxlNBcmsslWbK6JApLwjYeC8kg/Wcl5JJdGn7N68U3d5WdXnkInilTWA8qacFMz48vFPNCABBN+qYKrHadWVmkHyoVf+W2Gj5zj2EcexoVA2+4wnt4sJ7/O2cl9S2DO+dKMUiTSxUtNtFgYSfk6kWLgUb4m22bqncc5j3PBTAOlkDk1410F5eeiVrU4HwpEbX0018sdELr4F2+yd+yOALgKHCt//0FTv04B7xksOSvTWcfioCnryDErg4eaIAyAEGyeihkdZAJC7c25MgRPBAKRQRXm7h0iUDc1VRdpnHJAJGVnrGInaOngu1feZvWr11hd3qRqqoK2GTBw7+pJ3jx2gi9tnecnjj40D+qcE+o8Tn1pmLmC1vQLuC/cM66pD6FasUVoLftC6VcxTbpVrKbgc8JyM+LW/m009OxaK3ypXC8IRApKmHJCXaHKTzIkey05GuLkgrN6KMMcu5XyukV4y9/m3oV16uzIbUJGvvyeUmMlbPxH7aAqabMzZgAxISNHM3Lcc+9xdm5e4bV/9SabH3uSwT1lDEWRFRjCbU4sdp6Wk78nOfbs5DlzmHl/rafQpztqnqzQtjODx7GmY0xKVVXklIhF/2K2sLn0agpZs9wJFLoiPmvj7F3X6Z+7wFdVFZEfcoD96UvumPw13DhCUrMvqmqPiLkGuiZQFYBi0Gu5ncdjvZbKCW02XL8Wx3BQ41RxPswfBAgDCXSaGQXHtJhrezHBmL52keUvX+DE6kkG3gpx70MpIG1xffzkk/za85/j3M5tTi1tQmO7rKo5SJIFTaDO6gfNGRVr1mUVwtATx1NzaFHFqxXqlAWTUkKSzomAWjyvlusB5/M+gnHLfF1Zn6RLVE4M9iyd86QJIaM+obE077ANATWIVIL1cpzz5K7ICkRRB98aX+LnzjyNzjKyZMCFb9V4ab50zyuBILjaoHAXTI6tKUPtkDbDTFhlgeWFEVvffolb347UTy4QjizYYi8wbz8FQOh9w3TeXEvRlKx9T6oXhgUXCoXfOGEZLWqDYO9VwUugKosmA+Tyu0pdJMkspGLKZZqCBU7jw58/DXuX61qfXonICeB6+fyPNPULQO+Y/LV63znVrMww/b2jIgdXJmXZoggCA2+zWHw5YnunR58yTTCo2Ze0zNAwh4rQRnOOaXwmVZamkZXx65dpP/8aZ1dOMKxqfFVboed8GVenkJVaaz5x34f4te99kf/16OMMdWjeXSVNlODB9YzWUsP0Ml6x3otrAr1TuPbS39jhQoWrPbmd73PltFEWw4i9bt96Ea44VBYOXKIACBwSy23LMjMIhyPnhKur0uw0jUgyhVtxsbdUbYsp6iLHZRFqQRY82haGdACJiht6Ox0GntSfELFgtgNBK8HtgB5EGDh0nNhYWmQ1CFvf2mLc3CQ8tkE4MiJpJsaI4HChAg6RTDBwR8ROw7JW3kHj77l65hhaHF96R8uihhQVO3XnmhYLUl9+pvN2KvX/3ntFH0rk/vTl3vVf3vv6DeAflb//I+DX7/j8PxS7Pgbs/Ej1ChBzsSwqXKsuZmLZvb2zMQRBBDT1zHJQG3w0qgKNd5aWYdysphAna4FBMbaoC7Fx4Dxy8Sb5My/x4MYphqHCO5sp4n1ljicFZvQuIN5zcukoK5tn+P3r30WjVdHBVVZ2a5lqVfotztuD8k2Dq4ItMufpYkev8NNoHXaDSA/TISmsQ+8cQ1+ZZ1Yq3C4AVwibSckx2s8zApUtnA6kK+5GwaPRHGvwvWqyONRXxUxDhOdml/jg5klzwRlZUOAFKiCARpAkZA/ZK1IbJq9kCBkZONhX0n5CWsi7CekUxuDGiSPNKmfqkyx+a0z3B2+Trx8c0ol6OLsU96ihhb2V650yZC0gSD/ZwG6HUfJR+7vrSzInBTUzEV7vwWaES9vMgi8aFle+njtQ7h9w/SjQ8b/GivlNEbkI/F+Afw78ioj8L4G3gF8sX/4ZDDY+j0HH/4sf9vPL87M3Xm5EFmXOISr/LmrpVsDRhIAv8wa9F+pCvAwihuKgc7181ozX8kZDRaPK5PxFJp95kXuXj1K52na4qprvSta4oxhwW/0QqpofP/sB/tOLv8dbty9y/5F70GD08xwzGqxwzCTMGsuVrrZDouJdhXctOZtrjJdAkt7Q2xgHjl4K60maqSQUf7OIxNA3GFBvxnMEO2W0Kybq9MVwcYgpIIbNsFEg2yCk6tAPeCaZt2fX+atHHkcbZ0TMUhuIQB4bRSZ1GVmq0MbhKyFuzUAUt1aTdzrc2KDqGI014GpruJqEAdwssuJWWAur7P7JLlfSBV4ME7r1mtCYKrKy2RTmy2arr4BYvaa6GJWIEMucnR767u2lYko0laXR3tu68EXaAGUERzRPOJE7JrSpOe709dCfKVhU9e+/yz/99A/4WgX+qx/2M3/Ab8EmP9m0q8aLdfFLpzkXJ8JKXBlEJDRVZbQJV8ReWtipQoGMS5ML2z1DMPRn5+W3OPjtl3lg5SQi3kznxCZjSc9JK2le8WCgCgHtIk3T8LH7P8TvPPf7/KOFNRq/iHMGTSbNUOx2ehZx7qKlDKY3IlQNsQwphYh2kHu1nuYivLLURlSoQoUPA7p2RuOCpVXel13FKCVknb9OspCIeAkQFWL5+vI1mWzcK+cQsQbfa9ObPDAYUXc1sujt1OzKIu/XjQcaIzCKeNJOawvQGifogQEnfqUhdA6dJrRVsjM+nCSskZoU3VcW8gIPLixz0rfcmk2QjSFyLDBT5fwb17j3ng3q4MveUNKsnOlSNBa6E9rWGI8x57lJR8o6h7dTNtfKFDtiaRWkbDVoVsfuztgYygKzNjLrIjnl0tz+wdfd0cGnIFfeSI3DyjGsfJna2zcDmcuDUcrYawg4mwzWj8R2PZuXQ6+uUr9c/tYrTD7/PR5YPUPwA9K8uKQU3FoWelE9AmZrCp0o4jxHlzY5cvZx/vjid/jpez+GUKGNcddyxJpfGNmyR3esCZhx6pCMGXY7X2BNSie/Tx+sL5+KaZ0PNTHOaGrrXWRM4kzMpd9jC8mJGKwu9vPRni9ljUxF5ukJPX9OlRf33uJvPPIIUgnqMqkT8rgzaBgMHl/wUJWFlpJRQxLkEXArIrmwh6cJ2vJ+K4GZ2vt3guusUSoqSAeaEotVYNEvkV9Vbnz3Fr9/43t84G88g6BMZjMWhoN50FjxVFsvLmeGVTDun6Y7mpzWhVfKSe09OUd6B//54lGbroAwV432hiijdzAR3nn9WWuWv9DL3mRm0ASWBhXLdWCp8oyqQFUkqOYeadryabJpWwWvMutWVdpk5tNgJ1DtvQ1wFc+VLz7P5Hdf4fTiJtkJKubf5ZwjBNPEqC+KOe/mNrKuCJW02DGJFx49/givDisuXX0NYrKFm4ygJ+rKgFH7o0VU1vcJxIdiyOdRX97XXJPu5lws5xxROwbViBljxPm5e4v1CsruUHnCoEHqCuqSbnhB6rLYHYWFW8AH1w+EStxmgueAI8MjphFqgXHCBUd2ivrS4OwLgZGxilFFa4U2m75mpmjEuGiquCgmCaj7RWrfI1HRrjRfZ5k0SeRZ4trNWzz36mv8/CNPciwMGDUNywsLpn6UQ5tWS7APnXF64Z9lYWVOpvdF+9RrYHypBe1n2IQxmE9N1jmsQt/Hf7frrgiWfqG0XRndnE0GS1YcUDtHE/whPq42HqGfMDzNmUnMtAptQXmyQpuVWdvxxme+Cl+9yrmj91DVI5sWHPqMxB5AihFyf+xjC7sUg10/KwKHSGDQBD74wMf5vd3Xmdzegi6hMRn9pvQy0izawywsXNdUmPMj5NzR+4Dh5dChxBdWrneGkCksLSyw2+6D9kOHXJnmBZoUkpK6aOiWWkNXAqAZV4e5qV/feAOrAdQ5/mT/LT509F5cU6PjjM6yMZkDUBqWGmw5Ogn2PrWkfSOBKaRZNiZ0FohYZthmXAdpbOBF7rJNL2ttt9M9kwO4Snh57ya/deMNnn3iCY40KwzfHNumMGcA31nomxdc7/qiarC5fa3dw65roYAsWqKhDwotAE4udZArKsreWP49gDDgbgmW8j9JYZZga9axNe3Y7RItRlVpnGcheIbOMfSeobehnE6ExpvCcuidIV2Y2Gs2nvD6v/1jFl+dcGL5GM7V1FVDkGC5rotkUbP2VHOjD/RabC2+yoWPlq3wN7DBsb6wwuq5D/LVK8+TZzODUftmaBYbrzc1WKqfAe9DjbiKLiZy6lBJRr13RpOxeLSASWWnXAiLjLv2kOZBWSRibADrdKsV/jnhVHDZ4dTj6xpndp5IOYmksAVaidzcu8p9x86ie5lcGaqWq2zwaihSbWwH78YteRJhlskDhbGi0f6gmBlg7HOgTI4JpwlfGbKm2VSmeKB2aIAvXbvA89vX+MUHn2JDG/I0MzwAvzOjd2kx0qXvV8n8lMg9d4xDjX2PKiomaaagXv01t2gSC45UVJFdioWp3pue/ODrrgiWPpfMGIXbiWOWle1py7iN5bQws4qFumaxblioAyPvGQQDBKoeOcOREPav3ODCf/sFTm4HNpc3qeoheI8jEFyFpypWRd0cWhQHImo1hZruQr23hewFZxiqoWPB88ixc5xfW+TqpddwWdEukcvcEe89UtwqiYV/VBYOwRvsKwYmaBCSJPr5hkGCjZFwnoV6yG7XlgdcMCIR0EwuM2gURSUbiqiFyRxKE7egPoigwZcJyJnX4xb3jBYZVSvGEAhaiInFI612tjsHOymdt3orxQgTRXczeZJxpabLRTsjUZE6IANfTmhDMDUl8tBeX0T5rSuvsKUT/vYDTzBsHdIJ7GYaWaJ+5eYdHXtjJfTdexOs2ZPukVKLz8OuvmY7LRL9/Eg7JVPuvQashsscchK1oIjvdd0VwSICTeUZlvHNddFqW8B07HWJadZCxc5lnIF9n+fQwE2z0bm3vvkSe//665zzm6yO1nCuMtTLV7aIS9/Gy4CYlJgiSRO5S7ZDlxsqJSWzG2syYjNRsrSmCp6n7v8QfzC+yOz2DZx6HIIvJ4lzZo1khXgBJHygKh5pmg3KzQW5kwKFWrfd5rovVkMOQgKnZVGUxdtUuMbm1rg7eiioNS/VlR3XH1LzicVYrgq8fPsCz9z/sKVcQSCUCQWmrjNzicxhk25mqJNDYE/J08KmpizeBNqZWEw6RWMmB1eeVyJUxi7el45/c+k7rCwO+LkT5/C70QRoSY0JEGFhZ4A/mBV2cD8Ps/SwnCv1R/9289wnGSgnRp77UGsBj8D+3p/Yh0FUysvCNXyv6+4IFoxmX3sxQwpMyJWKuOvWpOX6tOXKZMpeZ8V9qzCJia5UscF7ZDLh6n/4IvUXLnHfwikGVYN3NrLZTC/KcY2hJ94Hgq9JuTPzhGDuKWWiSSmode7W0kGhj9jDcBJYbZZYfuApvnbpBbSbmvlDgYdF3Bx+7a18zB7Wl4lbcV47zwt732PN9pAHoWZfMrhs3lxCqYV0jv7ZgWP8snlvQkvKlszk2xqXlsJstfuEvM/mygmktRokxcIFqMoJ67EivsDOUgsaTM9CAsnmYaBdKYuzeayJQp5FSIJ0xp4WZ/Dx7f0p//LVr/Lk0eN8cv0s0hoQMFdIiqCTzHBpFXnxKj1Kdcists3QzBf7FMwCJ/VBcsepUaAZuhiN3oIpK7t46Kjfjz8HmWtk3u26K6Bjgz1tR1WFSWeFXA8BT2NmlpShdyQtmpWUUHHUJIbOMX3xTbY/913O+A1Gy6uIr/F1MIEWvTTXFr0L3jhSPQKFEnNXGnpmGCdZ0LIrO5TsAjGBl8JvUjU9SBAePHo/f3TzbR66+ConH3iK5Iw+XkLN+h9dZw1T5whaWZDGDhdqJDuscCmDmLwvehBPTWSSs6U/3norGjLZBYKLBn8nN8/XQU0T7wQXjEuFs3RPsonSXty5yNNH78Fluyc5ZtxCMElMTEhlHgVVZSCDpox4E9zlmE1MFpyRPgsop84Yydoq6sW0uhUWAEl5c/82n3nzeX7u3OPc36zPuXVSOags0B0ebcC1ntGu0I07GFXzWrG06efEzMNhRLZw5kmU2gaUioFzb1USywSxAq3MU7hY6C7Ke/uG3RUni2ICry6aXqRLmajQJmXSJSazxPak5eZ4xtXxjKs3t7ndJrbbyOzabbb/hz+k+U/neaQ+wcJgwayUiqu9U1Mgqsih72/uC7/iKuICwTWkWUI12liH0iNx2qMtGa9GU3EYqpXUiIqVr/jAAx/h98ZvM9m6jM9ikHJRVIqabDfkgkg5oapq61u0rWky2pY4npC7rmg1THMRXEVqKnJujQhYefC+uFt6cgzmHKNGf0HMBihrsjrCWRMzdRH1ZhF1afcKD6zdR9qL0BfLnaLe7JUK8YpcYfLlYm+WDmzGjWQ7bVMbkdL9ttnY1tiVSixF9A5tM8/fvspnXv0Ov/T4s5xtVoldNHAgZXJj/tbiBPE6t41dWj+JPHfV5MTSnyB2mqqWQbkllUJ69x7mtUoWo/ykO1oMh3+34LM6q6+BLKV7r2C5K04WVUpxG4oepbjKQ0nJbM+YJWUSlfFkQn7zKvcdTNj48hbNYBG/uoGrG3CVudcD2rsfZkD6wHDzxhz92q1MB9E4zyxOoYskrziCpTa+UE2w3bPXrvTjuFUdy8NFlh58lq+/9Dw/NlpDFwaIKH5QW+c6WwFvC7H3KgOdzgijEaCEZmCcdHHkbKmHI+CqhqgRV0ZouEIQJGBoG5ns7QSzuC5jNIphg3KYtlzotjgzGjDwS2Zo3iWzQRLmwilNJW2cKko0DY8Eq+m64pbSlZ8di44nGRzdz45RVdIs8sWt13nj0lv842d/nAXfGGfSi93Dylswgo3lLiImAapQs7w9ZOvmmLQ5Mil0qVHmELgWxUGxa81lFksuQIDN6LHZn1qGQ/Uo27xe6VE37Qmp7x4ud8nJYg+qmHLazcceWFM5Foc1y8OKlYWG4Bzt6hq7Wy0vff5PeH7/CtEzJ1uSk/n0KrgMXq2VRUyWhmjp36j1KDRpSSccUgWqekDE4VOGnKyfEROaCiFRsIApCEqhHlM5x4Mb9/HyxgrXLp83gqAAMRb3k1ILaNHGi+DrYEU9ufRYpHSjpRhhWTDW9QKT3NoMmKqwcZ1DQmVHpxMoOynB2+kTAq4OVrALlsKJ8NLtN/ngyQfBK34kUHvyyDYlyT1+bxoVatDGAk33knHQRJBQ6qHaDs88y0g0loSrbdFHgd+88By3r73FP/jwTzAKzbwvpLOERGuWukIh1gF9lNlCbxNr68fx37iKxlwM8OxPFyMpRjMlKSeMzRRNpBzfwVKORcIwr39U5yBADwwl7bX+7051gbskWFLO7E5apl0yIwU1t49ZG80L1wmDYK6SIThmGU7daLln+SH2Ftb4dzde5OXty+QuW8qFJaMuR3KKRgd3DvGWi3vvwbt5KkbO5K6zI987qrpGxRsMWnYlcp5btqo3nUouzANK17/2jqce+BC/e3CRdHvH+h1ZCqnTlz+KJhvJ1tQ22at3Q+nXtQ1unfdBGQwaZjoz+yJfdBpSgoySe2cpmnKdQ6XWKyrdbcnsphnSHrA5Om59EV8cAGZlIcVsu/SiR2qHFDhWWyWnEkxtOY2DQ2cJ1zHv2OdpRtvIQZzy7577IusH2/wXH/5JgjfzDSJI7WDBoY0FaL+RuCilZir2t9G6/JuLp9BXbpFjsr5ISYtjtlQ9qfVLckrvOC16a9bco4MYt2w+ur3cPauNi3rTBfQ9OpN3RbBohnEbSSLUhSfoHQwaY4/uTSJb446DWWTaJnwXOT1uWWgWePzYOX78vk/wcjzg37/+Za5t38QTrF+SDAmpXCBgdjimUSlFel0hdQ3OGQI265Ayj7KqKuM4Ff1HttykpAKli11OQqt9BBcqNkbLLJ97mq+89U3SeFpy5MLi9VYImw6loESVpXVZs9kcGVdjjo4BLDZL7E3GPYxnp1vAHCeDMQKQ8nOlP5kEVwec9/jKKB+vji/zxNoJ6KR4FJQhsEXpTLJaTaPBxDmA7mMpUjLNidTl63po2gnUMmcrbE+n/Ltv/SFPCHzyg59EnC/9mQI1l8KeUF5nwCg5mi2AyzwhV+qmpSPLrNxwxJ0pKSZmXUfblcnWOaPpsKsfi81Sl5KdPimZYQVqzjCl/9KfTjEXn7liuzTHo9/luitqFoCclMm0QxZrlhvzsJ3FTNtlkgqxywyDEfZWdiYcczWXqgrxjsYFPnj6GW5Nt/nNC9/mzNbr/PjJJxmORjQlGKw4LPBxWSQo9KO4g/dk70lF8SviCL5hMp2ZfSRm9iBis2MKE8z6HEXoZ7CT8PDR+/nSjQs8eu111gcPAQGRjASQ7K3p6b0Z2IWKrp3ZruVMSOZK0SlqhfpiPWQvd4XpUPoJKRbSpUdcJsbOtDelJ9EPnhWB8UB4a5T43mrDsyce4joLbFybUkdjHuMEqcskMszmVQdG/ccdQsKGIzl0psjIaiCyqSRdFq7oPp/91h/xyRPHefiRDyLiSF2CTvDRZAUKuCzGi4vWj3ELzjQzTc/29mbAMM1IIxw9cZLd75zn6tPLdNJrVYzi4p3MaxSgmFgUjUyxxe1Hjb+jAVn8zfp1oJKw2TTvvkbvjmARwfvKUI2YaeqKVoRZVhabYB12EZabmga4//XrVMMFXBtIYoW2OsfRhTU+/chPcf7mG/zyq1/gI0fu5+ljDxFq66bbIg3kkisb8QOiQHRiTcdiMoG3Tn5AmGG9ACl2oBYXpc9AgUAVhETwtiU+du4j/PY3P8MvbhwnbKyjUgaT9tJjMeDBS0WrM3LqiuFCLAIw6MdSj1zNm9wmi2k+ymgaUhcLq1bLCHGM3uIcNIG9Bce3hhO+2d3kufNvIM7zzThGU+LEcJGPPXQ/H4qLrF0+IIReSyPkOpmxxYGlYCJi/ZdYGre9wyMY7KvKq2zzx9/+In/9zAOcuv+xogY1rlkSoBFcU5H2OlKr5R5bCpHbwhJXKdPVBJcgV47kMn5Qcfrcvbz83GV+58Q9xWnUcuJCEjeuWQFscjb3mNILwHgdRYN/Bwdszg3rETeUnejfdZneFcGiqsxiYtEFWlX2u0gssGwIjsXKs1oFBsFMvo9d26MLQ0jeSILe7Ik8VpM8cvQBTq+c5DuXnuPFl3+fT596mpNrx41Bq7HQzzMZD05tjF4u9j6F5gIeYvGcKs2ErptRV7UxWVVLelUQtwJtOhfwdKw1S9T3Pc3X3vg2nxj9BDpwEI3aQmm8StHr1HVthSk9vHOI1mR1dAo30r5R62O0hVvkLBICTjOpzfb3piYD367H/Nvt1wnZ8fqLbzObzqBt2X7pZUIIXF5a5vnX3+KLH3yMjz94mge2Wo4cKGs+4IOQ9xVaRwhGsZfWVqNzFZmIThLSWeP4Ob3Kd9/8Ln/r4adZ3ThVoFmF1npaQtm924xJrYFi0tHz3RBBs6WAUinaKLJeoSOTDoxCzSePr/HG1g7fWjt2SFUp7PBe0zJnGEdLjecOMRj7QctpY30n5lB1H/sxv3tlclcEC9gGsHvQWld6wc0hZAHzBUumka4mUwZbU243Q3uTPeO0NJhsJIFjdbDAJ+/7GJd3rvObF7/DPTfe5MdOPsbyaMGabr7/PlM6Omxoq4FjZmbhqgqXPTqbgXOE0Fgjsd+0ipXOfIZjr5PAU/vEI8fP8cWbF7j/xlucPPtwQabMPikhmBZejZKSCs9r7gOMfW3l2HDLLLJutj9VIGmhmTcVlObt+WHmzbMNTy2ts/+9t/h/X/wTtrb3OHVmg8m1a7jBkNxO8YMRUjn80gKxa3nx+e/y/Deew08OaHA8c/o0//CJD+NONTTXpyzOomlwghBS4YqJwIGSJPOlg7e4sv82/8XTH2CxWUfHIAPmUL0RKrGTJtj7QRJaOyRavebVlWkBGTdwsOiQpQoZmQumC5Yera6u8re3r7J1sM3rC6sAJTE8lBH3JYf39rMLmjz/vFCygAKEWDtND7U/717f3x3BYjuoMp3FIt5SKi+sDAKu8tTeM3A2JdhfuMlUK8ZqlP1MfzIkehNpV+ZzIMqp1eMcXfwZXrp5nn91/g/4xPo5njr5EKGp7WSQXKTMziSxItTB04rSKkZE1NooFwjB1+RUuv1OikzY7JU092pJh/eeAZknH/wov/udz/H3N8/gl0YQjfAoGK9L1aj3xa4SChVGgid3HU4CTXDEypSSmWxpnFMInltM+dIR5SXpkKajenKZ720dkC8ecGJjiXZnh8c/8jRN5RjfvIVbaNCVVQZLQ3b2p9bRLmKyg2vbfOmN82wPHEtH1jm4dJMHhqv80ur91ONICg7djUi2OuH3tr8H8RY//9BTjLplg5A9ED2iDu2skLcSooAiteAqm+qWRXFNqbM8MHCw7q34r+SQxRzVdDa148yZk/zS6xf53OYqrCwWNa3QKTaJrDRkWw0EEUQTs3aM9xVV1dhYdjJ70wld7ieFCaOqwqG8Ud3lJ4t1Za0gm8w6umh2NdMuszqqzTEyZ5YHFXuvXGDLrSI4fAhFb2IWQ0lNXRh8MSvAkBjvPE8fe4j7V0/xjQvf5MXnfoefOvUMx9aOUNUO9YFQmQCs13EHb/T/SVJ6spSolh3T03UdvrIGocGT4IP1eUSMnOlE2Byt8Nbph/nGG8/xscc+TPJFFSmYzqVPAETo4ozQNAhurl3pJdV7VWaskUUprJfgcZXndw+u8N1Z4tFHz5F9hbpMHjpSUsZZcYMFbu8fsHF0ndMffQoqRzUacN/mBq9+903UCdcvbXEwnrK5ssREj3Njb5fL5y/SrA15+43XuP/H1vjY4grN2OI5xsRnb7/Eet3x0bPPULUDtLPuf2ooMLtDklLGHRTEqzxtZ+hZWKqsXvBiDjFLpuNhpjAtqVHlYVrGZniD1x999B5W3rrJsY+cw48aVE1inHPi6o1XmY23uefeZxEN3N69wMFBy8b6JqPBBqqJyeQms7ZlOFyhrpbw1chEe5L53VHzruv0rgiWeb6YkhloJyuoZwJ7s0iXMoPK045nrF86gPUjlsrkhO9/gID3wZjCiJH9nENcMaiLmbXBEj/5wI9zYecy/+Nr3+Sha8t8/OzTLCwuIWmKa6rC4q3ICJIyQzFtRHbWcReBRIcnELsZtdT0qAxaBomm3mpUqZzy5JlH+fyN3+LcjSusbxw3zlRVuuRir7OqGqbxwAADZxqaTiOT2DFOkUuzCS+0V/m4Xyd5mAZlp5nyfNpnbyexu3uLLnf4oeOt515k5623GCyMaEon/+KtdW7vHTBaHnHsyArfuHidsydOMlqoOL66wivPv8KlV1/jYO+A2HWkyYzujX1ElX+rn+Xg1GM8uXyGtWrE7958kfsXHE8efxyv5h0sjSc7kFSc/0XQxqhMkoXcJvzQI85Kc9cYD0yWgnkfdBmZWpGfCy2Fg4wsCbmMLZRsFBw/VY6fXOXa73+TYz/7EXwdyBpxrmJ58Tg3Z7s4PFU1YmXpBJtr9+FcU6xeAynDja03OX70QQbNOl5qQObtgHe77opggdLR9uZCkkStllAYzyKzKMyS0ly4zENh2YRM9k1QuEnOmV+x9NBhMbYga3GNsZtQ+8B962fYXDjCd6+8xL9+6ff51LFHeeDkA9RAGFZmGiE6PzVqJ0xjz4PyQEDp8M7TdS2uqvDF1E7Ke3GocbZiosnw0JnH+a2L3+CXVv4qrrFutgZKP8VGt+EwfpcPRmAUYVA30FU8tHEvqwRy5bh4JvBHeo2r12+xtAzpxg2+/pnXeOwTz/D8577AzsUrLDgh7d5iHBPiKkazDjfb43bbccPVyHDIef8Co8UFzj37BPc+8wgn7j/NC5//Em88/yKxPbQxPX/+PP/yxi0+cu4xTh45zhOLnifOPILbr9BGCpJUWM9q6VMeKGHR21i+oMZCLv0fXweoFZYOB6uSiwo2Wx3na28/e2ZpJ0GNChMEjRCiY3NtxK0/fJ7Nv/KEETkls7SwTNM8RkwtMe5ze+cii4snGTRrttGlGddvvs3aymkWF04iYuCKc44US5PnXa67JFgKEoLOi9ukRZvuSn6rysYr11laPG4LDEvdsrMzXgoUbM70BldGNZ2Im/sQGxvYoyw3Qz586glurR7nj954nvMvXOaT9z/Jmj+Kr5jziIQyck+tydnPUxcxvpikSNfZSATvjMBnVD0jbIbgCcD9x+7l7e2L/Mnll3j2wafnrz/lOE/LfF2ToonfpMgOUowk4NTqJrK1xcubkZfPerrrjr39LSY720z3ZjCZ8uoff4PZ/hSycR+Hi2sokfHBBE3K7Vu71HXNvedWWHnwYTbuOc1bL7zKdz73RWR5mZ/4ax/jQ3/jp2mnB7z98vcKUmQp7e2dbb708nMc273Bxgee4b5lYZQENzNrpf7YdZUFhLoyunvBbJd8ELS2HocsWe+GaHSj3s3eMoKMGxX4u42l+SkFLlckGh0qo9RLFWt14uaXv8PqRx6l1SkOoQrmooNUbK4/CMCsO0DV7Kaqeo2VlZOlDh3PTcdTStZ8fpfrLgkWi2YT9RyOLsup2OkoVNt7PHwguKHBw1nKdGIxBMoVVrHrN4as1kCTXJAP5gxZX8zd6ipwdPkYP/vkJq9tXeDfvPZlPnbzLE/e8zDVqME1TemKC8NQoRTZQI5WnHuP4qmyErspUtXgglmCpnQ43aquiLPEh+/7MF947re4b+sMy2troOArj2ZDYYITcyVxDueq0s2H16+9yeXxda6erHnTX0VfjjQp8ca3X8GLErsxIQyYTj2pmzI9GANKCI7BwgDnLEWdzSKaE5fevMb+rR2qreucPPsgjz7zONcu3eK7X36Rh598iE/8nb/Fl//9r/HWy69YZ10Nat872Cdcucq1pUvciMucHByh8YUjF0olP/JIdQiBJ1V87dCm2L52Cd2PxY6Jud4nJzPDyykj0TY9jSW9K26RGpy5ysQEtcepox55Vr3j4KUtNj9ogRFzoq6BnimL2scFRn7g7FG0t60SS+cVpfY2bezdrrskWAwC9IghTCguFZsayWQHZ9+6zvrqMq3D3qh4UlLqEEgYGc4XKNfUe5nMHQo6EXwRTWlxYDFXfqGqGp44/gCnVo7xrbe+xcvf+Rw/de8znLjnDKDUzpC3KJlJ4XGZy0lv+empa0dKHUTTfVgQJ1A7gUIQBtKwfuRefuX1L/OPnvo0wQ9LXSVFgOZQFWLXUg2CgRiu4sETp3np4nUu59v4SzfYvrnH7uUbdNMD2mRo3KTbp6ob68+4TOw69vZ2yUR87VlZrqgniclEIEeyJvauXaJKLTe2T3LsyUc498R9fPtrLzLohvzU//xv84Vf/U+8/q3nS3fcdvjtg31+6/WXadZX+dmNI1SVx2WMU6b2JFEx7hiCi8m4ZouO1NqwWukKEzz1ngZFgOWxfou39IvaQav4gUd9oUlUIMlDHcqodc+gFoQJs9evsbtZ8Wtf+yJ18IirqH3g733skwYl97w5FfrhTi9cPM+4i0xiy4mVNd6rZrkruGE9DSXdQc2JvauiKlWnfODWGBktkDG2qi88r1hMGlSFTrToGDJCplIrOCnEO+cczhvMrF4sZfKumFbAarPEjz/449z7xEf59SvP85XvfJVu94A863BJGbqKOtji7pstnhIwYqzn1HVmqFAKdyj1kmZmsePBow8Q6yXevPBaWWBFgOYDqKMZjoiFmkGxZJo08KJENh+4h4MxHOyMGW9vE9sZMbZMJ2P7c7CPdzY3M2lmMjlge2uHbjwl5My9x9YZDQNdGwkqbC4PqHXC1ve+yzd+7Xe48uLbPP3hRzl7/yncwoD1Zz7AvU89OqfZ9IX3NCvP3XiTG7JLbgrtBWMPaCwZgSrSKG4zkJuCIiagLX2PlJGpOb3kg4xO1dxhJmqjyDuQCUjtyQkbmdGKDVrtFagz4EYLe5k6Q7V9k+biDpPxlBtb25zeWOPU5gZRu7lGv79yNhDp0s5t9mctp9eO8r0bl+a6/x90/dBgEZEzIvIHIvJdEXlRRP435fPrIvK7IvJq+e9a+byIyP9dRM6LyHMi8uwP+x3GDzx0O8+lLrBRII5TV29x3/Iq0dkMRxviqJh6KiOaCC7jRQutXnGa6f8v5Whs+ULPpn9YSGE5G2UlkahD4OzaCX766Z/h7eWGf/+Nz3Ll9fPkyYQqdwzKOIoeD+2n4BrJxaTKB9OJ9S/E0pfUdSRxjJqGI0urfOyxT/DVq+eZTg7IKsUEDyN0hlC62uYr1Hn4k9FtpIl89/kLXD6A/du7qCZSiiCHDOpYRnWHuqb29fzEmk0m3Li+RRsTg8aRupbdvX0uXjlgY2WJB04uc24t89JX/pCXvvk8a8sLnDh+lJH3PP3pT7KwvnrIZk6Z4M0e6sXLb5rzTSO4oSPXGb8YkKHDbXhYC9aEbJW8ZyeOnSgG8VNoNK42oqcWu1fK5ieh5985XChMZ8X8AKYJ2Y/oQUIPIu0sc+XFt3jhf/hVxt/5Hvtb2zxx8l5+7MHHCK5i7iVd/hha6dhcWkIkc3b9BJ+473GTefxZgwWL9/+Dqj4GfAz4r0TkMQ6nfz0I/F75GN45/eufYNO/3vOy5aY207xnANt2hlPlY9d3aNbXMWdNQ14SNpjUozgycdbikgWMFHSs74ZTUrBeYKqa8U7n/CIRG4ZjtbnxigaDIR+/71nOPf0pPrt1ni986/PMdvdocqRBCWKDkCRUBlk7wHtCCNRVRdYO45kbHyk4GwPnvGNzcY3Bmft57sVvQLRmp6OYWqhQDYdWl2XltekW37j8Frev3eL2fiRub5Mme0xnE2LqTH7sPCFUiDPfrGpQ0Sw0ZaSccaIGg4Z7jq3hkzIc1OzsTLi5c8Arb25Rec89xxZ55MwiF776Nf7g3/06Ny5c4oMffRRf1UgzxFc2isMFz2hpAWmEb1+4wPb+2Nxd2mwO/OuBcKQ2Ov5uRg+S9U1ahXHZ3b2RWXt+WZnebfXKICCN0Zi0cTDwuJGfT6PmIBqxM8E+ke/evMpvfPPb/Mpn/5Dnrlzk7GNneObUMdzSIr/6tT/ixQuv4uabGkUpm8spKQwELly+QttN0DvskX7Q9aN4HV+hDCNS1T0ReQkbUPQLwF8pX/Yv+XNM/9LyP0aIywQf5o4dx27d5ul6ga4Fl4wl64tXq2CjCRQzv86YVY+ixK41X1zNeAnW0U86Jz7Gsjh747ti12B5bWEZO+c5uXKUzWd+hucufZdf+cZn+dSpJzhx9kFSFehH4RlLTObjwr2YOm88G1M3A6rKxir4nl6fM4+cfIjfv/Ab3HfhDY4+8Mg8/8wYzWfSdkglnN+/xc3tXSY5wP4O4fZ1Yjcr5GMDBNQF8yBDaKdTmtpDjoX+oUzbyPWbu7zw6hWauibGPU4e3yC1kVu3D+jayOpyxWA04qmHj3NzZ58X/+grPPPpT7F6bJWcYbC0gteOHBMxRt547Qr3bG6yJ2NOLK3BquKXgnmLTY13ZfJ/8x4jqKVR4nBeiWTbHMWZE2jjzN62MXg45Wwj+ryZWNBmcgU344Tzr1/ltWvX6dKMM0fW+NDZE2wujew51pm1/QUq3aZLaoxz6VX4zOe79DNfjq0e59TRm9wa7/Cty6+z307+7MFy51XG5X0A+Cp/zulfdw4zqtePzMU6PSyMOJwqn3zrFgsnT3Jztzv07K1MX6/OkR34TKlLIr6q8ZqsT6G5zDSB4CrQVFi9ghTjNjOtKEYWRdIrWJqWMUBqEGo+fPZpbh25ly+8/FXOfvMSH3j0o/ilJWKxNw3OHocWH+Hga4aDftx2Mvo8NmRIxbGxuMbmmYf4/Gtf52+duQc3bOx0E6PCe/F0uePrty+xe+MWadoi031SOyPFznhUZSSG98HAkJToNLK/P0G8UFc2szKJ0HYtF6/eZmFYkbqO1aWaDz5yjr1xy7Vbe8QOxuNZ8RtXtq7e5Cuf+wpP/sQHWFpc4GCr45HHH2Y6HvP6+bdZGDaEOnB9vM+5pvRH2owmOzhEfEEPTbGqiaJSzUavKcpYdUDjcNmR1KxjUbVnnIXZQcvl69u8fPESl3a3WawrHjq6yS88+QgLqVBjAD0wB85J7Dh+zymqSzuM6obHTt9f5AW2AXrvD8VhKCujJdoc2Bkf8NF7HpxT/f9cwSIii8B/AP63qrp7aK8Jf5bpX3cOMxqdPXc4Bij3dHPl5M1dPlwNEBeYFF8n7735HIsZghscWP4iYnMGtTK3xhRx3uyAUteZXVLhX6WccRiNQrwhK64MVe3tVBFLhQQTS20urPMzz3yaFy59l1977rM8vHmGhx/4EDoIuFCRNZGyo+2mDJrGds3kSDninTezB2eAgMPx9L1P8OuXXuaVl7/Dox/8BE4cXbYmXAiBtydbXN/fY3b7FjqbIqMl4nTHJg+XvtFgNER8jZMK383outY2ic7smkZNw3BoXmWTWaSNCZzjxq19NjcXeHDhOBsnNvmDP/4Gb18fM51MWV5paNUT93Z54+svM5tOqGrP0eOrXHhtQtfNePDZR7l9aY9Lezu8ef0yr79+kXvvu5fjssRSM7SzVosL5CTjh95MABNI5W1qWJetCaygNUYrQph0Ha9fvskrb11i7+CAIyuLPHbsKH/l/nuoxCMpk6dWJ8aY8INgiz9l0iSytnyEY3urXN/d4V/90W+zPBjwqceeZX15nZRiqYMEUTv17l0/zgtvvsyj99wPd6zrP1OwiEhVAuVfqeqvlk//uad/HUYOxbi67O5ArZlPv3GNhTOnGc8y06QgpjBMKF6AAh1Gyg3AUYVCOOy5VyKmVc95rhXJKZOzfQ+hKi4o7pAUWVR9PRMVBzl1dto5x+MnH+Pk0hG+9NpXuPT13+VjT3yCZnWVflT3oO/P9MRIsnX6vb0OXzaDzYVVHrzvSf741ec5e+MhRpubZi3kHD5UvD3Zo4sd0kbUBzRHSxFLeuXF40NDqEckbYkoqDVJTQRlkOhDmxv8nZ99lstbB/zhN17n2q0dFOUPvnyejz52Dw8/eY6PPnmW+uVLvHwhMpsqPndcv3rFrI9wzA5mvPbqJa5dvEI3bTn/3Td45OwZbsV9fu/y68zijFdfeoFfeuBD5NTYQCfnUK/QqdnU9t5l3rKDKJGUFKk929sHnN++ycsX3qKdzrj/5BE+fvYU666hqjzGlLS0WQcgtRCnidRGvDO/ARVB05R6oeGRM/eQrlwwh1PvmaaOHOO8wO/19yKOx0/dz7GVVcQJC9WfgxtWZtv/N8BLqvp/veOffgOb+vXP+dPTv/6piPwbbKz3D53+1VNEUEPFUHj87Rt8YGUVqWrasRVdSU2brhnzrXKmIQ/ii2bBzce/+dxrQhTV4pbvzI0yKkiaEXNC2mzjuLPRY7IUTyqKU4raaFdVCGJzGH3wHFs9zt985ud4/tqr/NqffJYPnH2K+048QNXUhN5EzwYkULmKTjtybBGp0OBNE4Py+PGH+dL+bb7y3a/yUz/x81B6TaJA3dgm4D2aOrSdYB5oAAYWOO9xweNlSFUFWhGEgzLoyZNi4uLNPW7vz3jq4dMc3Vxla3dMmkWGiyPSZMru1i5n7ruPlbVF6vAKr1/bw6fMkw9v8o3nLzEZT9Gu5ebl6+ztHSACbTtlGDzjWUuD1R63DvaZxBkrS8vkccLXFH24QCNzvbjN1oRbt/d56cpFXrn4Nvrm6zx1z1n+2sc/yOrCAl5topdE6/Fo3TM0jGSaY8LXoTSjhTTNuEVTumpwfOjBx/jA/Q9b6uv+9DK/MzPKCmujVUSE4N89JH6Uk+XHgH8APC8i3ymf+z/zFzj9q8/fbOgmrI4n/PzVPQYP3E92nv3Y9T1+9tspXioG3nozmntjBkfGZqD4oowyBXExMSiAgfawofe4GC0VyxGJkeyM9yWSzW29IG0+daWQp7ABIuIrKlfx9KnH2Fja4LMv/wHfu36Bv/rEpwgrK/b7y+mmmgmhMsFSSrgYUWd2RhtLy5w9cpbXdv6Eh994jTMPPkTOkEQ5SC2pmxkbgGzBVgDMHtnr57l4F9DsqAdY0EzG1FHJLrI7nfHL//FrHFlfYXF5xIn1RU4dXWRxEDh1/33s3trj2L2nEe149qlz7H71Za7vzbi5PWZzrWE6mbDXKtvb22SF4WgEInztxZc4vnaEh+47w/54SiRyI8+Qg21ONasGZjTevMiikINybXrA869e4NWLlxiK8NjJo/yMDhjt7HLs+Dny8jIyMY2/quIGgTSOVstk20hzF2HkobbFnZ2SfZECJAqy6ebDsHqdSy4TBe6cT/lOt/6e3feDrx8FDftj3r2t+Rc2/cuXwj7kxN985TInjx1Hs9B2iWlnKVNwxbDOFygwW+/Ehq56khhrNcXeYd12PMm9ztpEXeRcxk4ESw9ywAWly0Zn8Zjjex2sp5OlN2Irw5VEUC1DcpxwfGGTv/v4z/Hazbf4zW9/jk+d+yDHT54pPSHbDJzzuMqRU0dKLZIDVDXeOU5tHGPaPsznX/kaf+/kSZrFJUKhXeh0YovAC5rN7rXvqAffUFUDQtUYfCsO7x3J28jx2HXENEG6GTu7O8ymU45O11gLNcMTDQvDisFowPL6kIOtXUYrmxxVOHf/cc5/8RVu77XcvLVHbCOSzSXFB8fy8ojNzQ2q4Ni+ucP5q1e5evUaXddx8do1zq4d5Z/9zN9mWNeknLi8t8vzb7/N629fZBBnPHbfWX7pIx9geXFIGmd2Z5dpmnuJN64Q3L0kzP1GADotREpBZhQipX1OR4JOxVK+WCgZk444mZW1WGjiZTeW4qI/F3vdQZrMmgk/4AS687o76C4CqkLlhI+8dZln3QCaGhVhf9IV50dFc2TkbCKYxYIRFTWneaNQMY5RKjWJQ02n3mvUXQDReZGc1eDcLFpSapnruGMXyUIZFa6EYnqhUmakiLMF7Byry+t8eGmFpYUlPvPKl3ni5tt86PGPQDUkVAYgiEaCBlIXic4mZokLHF3Y4NroJjunTvP8i9/igx/7CZz3dLE1czjN5BhxsaXECYINKjUDjUKFL2Iyj+Cqipw7BurxwxGIMhwt8MxT9/PUoydZHi2wuOCoa1g7uU53MCN3HcPFNe47dZRzp29waXvGwd4BB7enRhVyZlO2uDigbVuapmHt2Do7e/sctDNi11F1M7QRrk72eO38Vc6/9RqDSeTc6aP84rNPsLK0YsI2zchMSZOINgF34hT58m2YtkDxKaCgaMV8L6d0OKjJCZJKNuHLqRCEXMNsMmGYZ3OpcY5mcOgKQmfeYWWep6bi0p9RUWLq3nWZ3h3BoiCi3L+9z9+4PqY+dQLnbGfcP5iSMgQxyv6wsiBSMeq9z4mIjaroj11VCxPnwBUUTXt2Mlhw2hZth6/zOBvRhVJG8blgwys0I11nNJvCdxI1aoZ4VyBcIaiNj3vk6P0sVkN+75Uv8vZXfpOffvKvsLqxYe+zvI5QN6Q4ZdZNaGSA9zWn1o9Ti+NPXvgaD2/dYnnzGAKkrkVTi+aW+UxrozwQmiE4RyKVee7MZyc6VRZrh4RATMqsSxy0kWs7U67uJ964tUUlmSfOZRY3VhhuHKfb3UaccPTYUZ55aJv40iUOjq2hs8SN6zeoRyOawYBRM0AFLl3fxyE0o4ql1VVm4ymnTq6zsVbz5T/+Ix5cP8rffeYZFheWjDRaO7RyRfot4D1ZWnIHcmQNfW4fmc2Q0ciallHRBFqZB4AUIZk22IY37ax31glUnpQyOhrwW1/4DHvfaXACbYz0jqYx2emT+55WcdrPKRfkM3Nja+tdl+ldESyVJk5Ptvi7O7usHF1mYX1AVQ3Y356ZEXhZkKgynbXUdWVQLBnnwqEfcQkMQecM3n6mydwoocDTodBcbEQEfYgUkl15Yc7ksU4UlxIp5jJhWPCqtDEjvsJJgUUVHImzK5v8zSd+km9efYlf/87v8Il7nuah+x4tfAmDv70bMW3HdGmKiLI5XOLK9jXOPvwkX37uG3z6J/8639u5hhRnfCdAqKHt7GRxgZQj+3tb4GFhMKKqa3A1znuOLnhWfOJ7b16k7SJ7kxlpPOZL39jlxfMXePjcGT746Ak2NxYtrVWol5bI430GC0s8/NBZnMscXa74T5ev40WIbaQZKDd3JxzsbuPCgMFogHZKCBVuwdOMRtx78hSfPneOSp25WmalH0DkFBuAVNtg2Pago5soceSoRovozgxZWUSymFm7NydPHZsc21UFTWu8mfp5u6dS3P4HSzWzNybcTDNraJbmo7mIqlFpbOsjxkKK9cXsT97bGPyuCJYkjvH+mFffOM+lzQ3qnR2OrC4wbqdc3N9hezJjFEbUrmGS4MzSOnW2xl1yRojzviBMmoCMOo9TrGnpbdZiP3cFOXR17E30nDf9hXfGCkDyfCxfn7J5BxSrpDZOaXPLUITswMUSqMGTRTi2uMrHzj7NK4NVXth6ize++gafeupTjJaWEFWyKIN6xKwdM5nsoc2A06tHub2/w7fDazz3+gu8fvsG5A7VZDVTcCWQHRCZ7m9TVYHBwgDVMSJmqudwbKwu8eGHV3n75g63r92kIzOoFxjVQ549d4a//hMPcnRpxLBtYRrJ9RgJAYJRd0bLqzz80H0sL4744pdeoV2u2W9tXMPk1jazyZi1zQGjxUVOnjyCCkz3D2inE2LKdopo8VXzd3gg52yEymmh3+cy47IO5JNLpFd3CfcdNXZx0a6IQqqt/MaVA3Zq6Vcqt8NVjlQLjQuMsmN/0hHKwLDcN+OEMlZCmR50dF1ksFCXzKbIQt7DOOyuCBYVIW4c58GTkNqGvFXh9h1pzzGctOztjLk9vcJePGA/TXjVAcETQkMzWGBUL7A0XGJ1sETjBoxCw8B5AwQKwuYKOKCqZo0abES08/0MdDe/YeRM5b0NK02mf9Hi/IITJCcUx7AekcvJlwGfi0kcjlx5VqoFHj/5AMNBzaSd8T9+/TN88v4Pcfbs/XOmQN0MmUw6ptMDFkaLXExjPvLUx/j3z/8hk9bETLnYLdFP1xLF+wGLy6u266aO6XRKSjBaaPjZD9zLEw+fxI/3+S9/4VP869/+Ort7ExoRji0NObc6ZDELDRB8hbaRNJ4QGn9HPZxplha555F1fvKT1/jlX/5tFpuGrS4z68zRf/fWDpNpy2w6YXllico7Yuy4deEmacNqwFw6974qnmvRUEwJ3tQLpfcSHXB0QH5jgiwGc4UhkNqEi4IvgaViLI6k5m3mxaHenquLih9VbAxHVHnHTjMUjzdPBCc4MWMUFRiOBvggheDZo5fvvk7vimABmLrA8uk1TqSaWTVgMBAuXRmSB4usnDqNy5AQ8x6eTplNJkxnB0z2dhjv7LNz7TrX4pSxtkQHoWloRgssDJdZH22wPlhisV6gCTXDUFmjzEFyVoOoKwbilGMfg3mhMASUQlcxVMWHymBLdD6r0OguGRMumcpz0TU8unkvr968wMee/DhfevU7XLx5kY8+9VH8YGikm+Ei7Wyf1LWcWj7GbpwwOnEGffMV8JUVtQnzas4eJ8ri0jK+HtC1LdPxlNhNqetMqGYcH1U8+sgJ0MjJg5YQP0SatlRdy8bygNGwpqkDlQSca3D1EJJND5Yq2WwaoJ21qHecWBlS+yFh5lhZHDJtp1ZzVI7J/pgbbWRvZ9/QtbpiZX1AisX7wIkxiZP1wCSIeahVAA4/EFxlUHkaOlxONiJ8QRAXcElNYelB22RFeiGtUovpk5LaaD4BGTiObaxRX99mmnuCbi7wsTCdduSoDAZVGZtn/bVD4753v+6SYBGSCLcbz9F9oakdvglUoZ1/xUHXcnM24fTqOs1ghcHKMivI3OfJ50K/bzva2Zh2csD+/g47t7fYvfoqV7oJ49zRVko9HDEarbCxuMrGaJ214RpLw0Ua8Ub4owSGmumEGbYK6ix4pl1XQAZDynqKTN8QRYwI6AtMXYvw8No9vHL7Ah9/7CO8fu1NfuOLn+FTT/4YR4+exHsYLC7TTsasDYZcvHqda+NtiBOTTYfajPR6A3FnaeVscsB0vE/XTgElZYyHnRSdTQkrCywPFvn4jy0xvnWDrfPXOdiP7McpPozQkbOhT6Em01kKq7ZJSBUIvuLmpYu8+eoNfvpDD/PW7SnfevUNSNEcdQTI0M4mjIYDJvsHDNaXObWxSRhWkBy57cgDT+XN/V+qCinDgySYLVKOkJ3SBcEfH5CuHOCfWkWjLfScLWD8yJstkmNuvRqCx1X2/a5x+MXA0ZPrLFx+gwNRxAb0ICJMJzbRYDBowGWqwhbpdUfeHw56/UHXXRIsoOK46pUnBsH0Dd4aS7kU7LupZZJaM/tWy1d93+kWAQ8+ezR4mtGA0cYmqwhngYTSdR20M8bbt5lsbbGzs8XOlQu8mp5ngpIHA8LCMiura2wsrnNseYPVapkGIbiAVN702UmZTCcsDEeEYB0ZFSCbjNiFCkGt6Vg0MkbIFB5dP83re9e4/+g95M3TfPY7n+cjZx7lsYceR52nHozQOOPMyhHkxsu44HA+UC2u0LUVVZwS9/bwocY74WB3hy62hODxfshoaQ3vPO000+7uExaGuMYT2xk33rzB5Qu3uL0baRZqUjTNvxslkt6mWjAvMlGPDxVSNWVKc0dVK3vTlis3d02Q1hlx0heVpyuanTp4NpeWqYcDCB4GnmoUSCTG047RaMFsYh1WTDqHL2P5kkAOEBfBXdvGhXUkdbDgzLSiEB/90KET83fzlSd32QzSwXpabWZ9dZnN7LjmjBqUNdPNDOwZjnp9sUHSzlkQG/z+XqFytwRLYdpe66w4902YD8bpCmW/9hVnVkY2QMiZPZEWc2xVxYeerWyFoBYNSe+Q7+uKatDQLC+zefZeTuVkE4bbGeO9fSbb24x3dtm+covd9jpvM2PPJdqq5dSRezi+fprNxXUWQ80sRxbnxEjmJhY9KtePsbYzypO9Fri549zSUS7u3yII/PzH/yp/9L2vcvmbt/jU45+gWqjIzrPsKz5y9AyfjxN28x4km9+epnuAjeigzH9ZXF4zTY8bUfmaELzVBr5BFjZhuosLDr+5xm/8d58np0DdNDx071GeTB0nfWb52BJ5YgVwGA2tqYkjVBUSPM2w4siZdQa3D+huz+anZ7+ygjpi6vBOuXzlOv9h9lXaD8KzJ+9Fq4L+ZRgfjFk8smDDmDol73XUgwYqU8bOOkErIXYdQydkMbGYdECxYs8pkasyXqOkzao280UPEoygXqh4aG2TV3evk3JmVpz2B01dCn0xy19nGUGKCV88Fv7cRMq/9KsssitZ8Ml0GYhx55zzZIRRMzCaffEpVu8o+IoZv7XdfDBn5f2852KzSYyXFAuNvkdGcghUdWBxYcTi8WPGM0od2iWmBxOm4wMO0gE7N66y8/J5LuYxW01kFpRHTz3ByeVjHFlYYxhqcy8pZn8O+30+Wz2jYiTOrNbRP7mwxo3929zYv8lfe+yTPHf5VX71a7/JTz/+STbX10gID6wc4dXRAftff4Vc5rCEklWnlHC+YmXtiHHfYsC7Ci9Q4fn6y5d56PiQlKZcvXWA7E9Zv2eD5WPH+d7blxgcTDl4dcz+ZMwHZi1nZhNWji9QF8M670d4V5PVvNw8cPvmLm9fvkbS4necbHaOaYksJZ21JsA78cg6v/Xcd3hw4whrg2XEOUIt7N86YPn4klnOTpM1T0cBV5xyyEKuPXWXcd483HJO4CG1nRnu5YyLGRFPGkdkKdhcGW/9G+0yaa/j9OoGC1tXuamZGCNNXRVXzEJzwZA2V7h3OClUorscDeupB7ed6cPrYnvTZiGXoaLOlYGixcmlt0lyZMQlE3gppBTpklmoxpxNKKaZEKxzbwFkaZ/53rp5o880LRVu4Bk1NcO1ZTZEyPc+BEmZ3t7i0tULXNm6BK9d4IXqbQ4WPUuDBdYXVjm+tMGxhVWWBiN8odf06aSIlLTFaqAjS2tU011e3XqLp07ez+nVE3zupa/yzLFzPHzmfgbVEveePsnLX36BpNA4OHn8FK+9sWcLSAQvDTmBq4INHHeZpoKt3W2+9Z032frqd3l1e4+feeocZ556mP/yH/4k/+KX/5A3Lr/NpNvne1cTkzThI3KWcyNhbVCZVEHL1ABfcfHyNr/2u99ia5zY7WaIE7MZ6t9fbwABiPdk53jxtTdYWVnitctXeKpeoG5kPvFZJoJ6eybaOGQaqRcCs30bY1c5h3OZFEGGNa6bkcTGUOQuwSQRluq5C06eKm6pQqfRastkUPHmiU1OvOa5kiKV98U3zp6/D8G+t8hBfHGoid17T/66K4JFgRACB9Ezbac0zsbQdW0k51QgP2c+T2Vib/Aej9CpGUUIGadK5R2arHFoebWAllkcZd6IUzuGyQpOSSplsG5hHfccMMrkKCylG22u8eDGGg/6p6FNtLf3uHX7Jlcnu+zsHfDSlYu8kKa40QJH1o5yZv0kRxfXGFWNoTXR7F1BCWQ2Bss0dc3L19/kgbWT/M1nPsWX33yRm698k6cffpKGilOPneXW1W3yxbe5tH0V1LwDPMLA13RFFgCJPNtD85Td8QGXrggf/uA57pu2nDt3FGXC6XuW+N/97/8Gv/Efv8Uff/VbXB/vMr02Y315yD0PbNJ1NkrQp2Q0Hm+S4rd3x+xOOxyeugqEYCO2RTD/r7IYV1dWOHVymYsXtljxQ17Yvsn6wVFOs8zV67c4eXyd3Fod4Ye1+Rt7oVoa0u3Z2LsYHVXtYJyR9RqGCYrBoRdHDOYDYFPECs49y4hC7CJePTJy1N7z0MoRXty+RBIteiWrcw39Mj6HanERKuTK99B+3R3BAtYMiiEwyZHFlNDkmHaJmFJZDI4qFB5XLu6HYt5gZhdaxjj0PZVsFBD63knxuPClphC1o0Ryb8pXpq0kCtco2785jw8BNCFA7ky77b1nsLnGmePrnBahjZGt2S5Xr17i4NJV8stv8l3O87WhsLCxyonNM5xaOcbGYMl2z1yjCqu+4uFVz6u3L3Jm5SifeuBJ3ti6zhe++xUGKw/y2OPneHX3Oa5m6+QjzBWHdQhITjYFazYlx5aDwRAWVnklKc9/4TuMlhp+YXPA6Pgiy5VnbWmNf/BLn+Lcfev8q3//OcbTfV66eJUP7d3L4kZDlRqTdGvGaWZ5bZngAynPcM4xm3XFCsr6Us1wSDubEBTUVdy8NWV1Y50jawMWFis+/+qLPLt6kntPHqceNEXhCnka8bURJOvlAQdXdwiNJ83MODFvj5EjjWXarkxEzhlXmzpWvDns+8YM/qIo0jgDU5KSu8SDZ86wefsKN4qZnAWCZSoixVMOYT7064es0bsiWASrqzpxjHPEAZMuMu5Mo1p7X4YFGeXeO6Pa95Y4pltxVGWj0TIOrde8a/kFZdapzZnXhIqb+41lESRH05tkg2k9hcmqNo5NySTJeIKNiVAzdHNi6cOx4TrH7t+ku+9hLm/fYHdrh3pvit/e4eall/ly/CbThZqN48c5c+QU96wcZzFU1GHIYxsP8NrO27Sp5d4lAxJeDYml4YAwTqwub3B9clAo/4nZbMJwo0ZnMyTPaNspqpnkA9XCiElnPZLl5WV2X7nN81+/zHh/z9L+QcX52ZS9W7fpJLMjFbe3x5ySdRs710V8SiRtOX5sg2ZQw545N/oQ5szdFDtLcauKpdUlJARGaysQO4b1iIVRg6hj8+gaq4tGYaHYfwmO3Jp3wmB1hKtqUjT1avKeOGupQkUeWI2iQJ5ZM5lxNC29F3RmyJwEqxWzYoIzhNWlZR4bLvOH3W7JXsr0gb6IL5LyXuN7mK794OuuCBagLGxlmqwxNJ10xOLqCBkfTF+iakpJVajEWKPOyVy70jNwq+DNF6znBukd3K/C0bLubjFP1DyXE6vYbmUbuJpvsRp7GQQXBMr8R1Hr7HvvrWGZEs5VPLB5mrR+ituzHW4c7HDaVTzROabXb3PryiXeevVrPOdbqvUVTp44wz3rx7hv9QwX966zP7nO6vIybmVA3N1iMtlnEDwj35BiS0dib7xP5WEo0CabAJbFEFvxDpmA7s64ePUttuKE+0+ucd/jZ3nwYydolpd58T9+m91Ll6momIowkwxBaLuOhUUzAREPo4Wa4aCysRgwd5Ppae5aJginrAwb4e03LrA4GjA6eZrj62vcDi3LNOYWWheulmBpFFY3+CzUC0PSrCPTkRRS1xlqFQJ5UCGCubQ4gaWiQi0FvRNX3HScmVtMIm6xwonwgXvv5xsvf4dpYwOvRCg+CRZsIg7RTAiOqjQ23+26O4Kl3/2dIzlHl+HG1oyZZIKvSNnET8aqdWiZSBy1s9qkUEycWrNKnLObIVLMCoxa4so8FUHMJb/8ehubZ3a93uW5hVJWJSpUBVSw6b7eNDRaejxaRGVZ5yrL3lXMezg6XOHIaIVxnHJjususWeTEuQ/xsHji7T1uX7nC1dev8K0XXuKg8awc3wTvWNUjXHmtY2l5gaObx7hw/mWG9f+Puv8KtixL7/vA31pru+PP9em9KdtdXdVdVd2NtgAIEARAAiRASgyNaEaaeZqJUExIoaeZh5mY0MOEhiEpYmYeKBdD0WlEkaBg2Wi0r+7q8i6r0pt783pz7N57mXn41jlZgNAFkEIoEjsqI7PSXHPOMt/3//6mwaQc4ZTQ9w+Pdlhu9tBlwGcBawOZSWAwZrqxgcoSmkstzl84y6/+1S/S7CW8+fYt3vr+27z67i1OtLo8+8wTvHXjNvujUnwL6hg0CwRfoxNFmmZRhuvF2TFOvhWONElRJsFWlsHhCKMM3lpG0ymTqmK0PSBfzmWomyqBjAGVG1RNDKUNFAs5R/cn4AP11FMNK0HNnBPhRRUk6dh5OciUUPKNMrjgxd3SgnagChMl4YHVM6s8cavFG0wjL1DLAFN4ubjgyVJDmuo5UvaTnsdjs8Q+g5jUNNgdczAsUV5UiWk0pxBNZPQujk1ucFZeTMycch+CmCV4H6soArmOzobxYxhl4k0ReWIw90kWQ2xRYRrjUEHjvTinZIlCEsEVM/2Miqelj9HT3svcBx3m3l0NnXKuvYwNjv3xgFvTQ4p2yupzT3LSPIufVAy2t9jdeMitjXVu33+APdFhpxX7J2MYj0dYJVHUSmvubW6weLZFI8uoXYryJdQlk819eieXWDy5RrPfQrkJ/+o7b/CtV97n/vou3TznU9kS/5svfY7VX32CV19bYefBDkmR4/xUBo2JmEBMjo44it7JxqSY1FCXNc1Wg3a7IDEpSSNnsH+ISQ0nTixxuDvEoHnj/Ts83VmRQOJUofMEZyuRhE9dRAs1ITc01loc3duT98dDtT/GjUt0M5fm29RgvYRBOaHrh8oLvT83UQouGTG+ErhZ2UCiEl48dY53774vbkCRmR6XHc1GTpKoiPL5Px8NfvDi31VVFUkayLIAZWBqS6w2FCTMzmwdCW+yRmXCr7wXxxZl5t5bAjnH0io2xzOqv0doDkbJ7QFK7FeVpFeFoMQWKGrenYLMpOhZ7hpimI0C54SCkURDi6C8uIjYWP4Jrity2GBZyBss5m0qHLujI8Z2ykLWZPHUCr3jSxybXGJ0NGB3sMcrgzvc3bpHOZ5S1iOstXLye8+0rri585AnT5+laRzDumRwtMOgHtNpLzMcH3Djzk22tveo6hqF5kyzw988dp6nz55j8ReeRzU9L75wnneUJWs2cUqsZ0XupnjzzevsHY4AOQx86aKTSk2Wtbly9TQ7+4d0mmscHAwZHg05e3aVcjDm8GhM1l1ltH1E7hoEZUmnAdPJxWdNAzagnRwGWbOg3J+ICGvq8IcVOs+iF7KWG4TIEA7gLeLJMKlko4QEP7JoiyQt54FgHKfPnuT0+m1u+SmzUzFoTaORkaTyXlqnsOHPAzdMCbqknSPLDEU7Y6kF+wMnlIYAxuQi2tHiRplqOdFrJye3hBYZCDEbRTFPvoUo3w7So0hhBuKY7h85IzJjskotG0PlAVFkeh+HX5HKrSKdX3yymMPVSaIIJFKKIeiPEJY1KhjpKazA3+3OMsEH9soRdw730SrQSzJUmrK6sMwTiWOvHAupUcZHj/qFENg+2GO53+NYv0/pHZu727jU8OG1W9TOEoKilxZ8/vQJTk0NT+xrTk5zWi+dJjvZxVb7GA3PPHcek2ZoQpxnOY4OB/zuH7wpZnmRj0bs36rKsr97yMO76xRZymBQUk+nTHXBw51DmklOq13w7vYm1x/u4WtHUnvalQIdhDHeLEh6DZpLbfIkIRxNCaUlQZGPFeb1B/Qv9UkichkSg5pKr+iH0Q7KgPIK08nQRuMzxAVnKj2o8h7TzPn82YvcuvmOGJckhjQ3gq76SJ8MkMxyf37C83hsFojUXk9bp+g0o91WFGnKuJahklAsNEZpdPCieotuIRGunxmIIROzSJKT90YYWkHcI1WkOwTnxI9YRTPxoEhTYQwIm0QUfR4p53TwouAziUReREIfPojReJDfcz766RrpZZhh/AFQicwEbEldOcpqyqSaMpyMOZoM2Z0esV7ustZeZK8asuOGBOWZ1mO8ltr8401oIHD9/j067Ta9/iKV97hkSM+0Ge8egqs5F1qw7tjyJUkjofeFZRbPJFSjPYwJpM0GKaJrl4lTINiKt1+/xsHBhDQ1VLWdf06lFM45RuMJH15fR0XWhFKKuioppzWu32Z1qccvP/Uci2kbHzxpEIN2r4VlXFvHdFxSplDZmvFCzvZ4xHQ8paws1+8PybY0eiROmB5FsBbqgKk9ygWMFttfowJFntNcWMIstmi2C/KppjhMyEYFp8k4phvsJp4sVyJHDw7rFJWtyZL0E5t7eEw2ywwSzn2g6Q2J0RSFopMZhpXHu0Cio6mFh6A1RkczbqVwXkI35baYCzJmHcXc0QWto/GERFSgxXg6UzM5sTiIKASOTKKJho66F+fzyD+LoiYlV7dPxJJHh6jMDIpQ15R1yWQ04Gh4yHAy4mAyYDAZMvKWUjvSNCPJM7rtDp1mk4Vek9XlPs9xjuAsD+sjbo13uJfskxYtDiYHcmgE5iZ7WmnKuuatDz/khSefZHVpicoFKBq02z3KhwP2bMmWLml3GgyX+zRcibq9wYmzlkYjQzcKdJZE0ZvCW8t4NMbVDq0NWWYYDMMfs5gUIWiCd1ReKPmpEgh4PKxgMVBOxtRFCzuuSdKMIkujqjSgkpSQFtJfJALQ7JcNJqMxSmmWr66QLzUwqSaMKnwi0/mytNgjS02g9JaqqplMapyHybRkOCnZGY+xlcVVDhcc9dYu+lxOttKEEL3cnPSnYgYePrFfgcdks6BkQaZVRRpE9psWKb1Gwua4FAWgEqTKOwhe4N+ZIEvpEN0pJVFKR42KNkkEBWaNf4i3hUwoEyNS0+CEFjOLiRBHVh17ooiceIVyNmaop+Km4muq6ZRyMmQ8GnA42GdnuMdROWZCjU5Tkjyn3enQ7/RYOnaOi60m7bRJI0lIFELmDDZazQaCrTGpke9rmLAxPmAyGpE3GqRHKbUXQwcg5m8KvD4pp7x98wbPXrzM2vIS+1NL2mrQ6rWp64qqHBNszShUPNgf0384oN9vEpyl02jMTQhdNWUyGKHTnMtXz5J/6z2KIp1rdj5uJSTzLanzjYJmnkHlCKnY5C532nhXs3e0TVlCq78qcdsajJPbSSVGQosQQCYtEqYTg/eOMLb41KLXMpQPmIElJJrcGYpCUDDvEoLJCSqalvQVYSLm4SEaVpAEPuw0+HBhQJopvHJIMLWK9KNZxa0/cZk+HpsF2Sw95yjSdEYmpShyjJqKYjpI7odBunsV5Id4exmZ6kf9gkHcEK0T9q/RaoYainWql59dPJlnIToQsCGQqciPco4QAvW0ppxM2R1sc/tgg9wbDoZH7JUDqsRTNBp0mj16nR6Lxy9xvtmhmxc0dCqcJhVZyAF8sDjnObKllGKuFlQuEb9iH6CaWnYP9tmZHrDrjlhZWMMHT6PRxo0PcUG4WbOUqlkPMxgO+ejBPa6cOUu/mTDyDm80WhkIKVPn0DpB6ZQ8KSgnliQxVOORaNs12PGUuqro9RYxObQbBa1GQWoGVCHMB5JA1KwLOTHNCzq9HguNJp12i3c+ukky1ZzsnsC0EgEGrEQNGh0zcmovHoZpDDBKAlmREdQEZWA6qsVvbZqgEoXarsW0op8TyiDvm5ME45AYIVRWTj5eECOLkAdcHrgWjvB5ImWgE2ssrWT24iL51ds/Dw1+kMHgYl2R5gWhttHMW8WsQkiNnpvwEfuL4JhnmejgJYEqDszwRL1LfCHMrAk3c4h3RucHxHLIe+oylk6He6xPNhkcDhlUE6okoFsFdZLy1PELnM5Oc1ZrMpPON6fSQYzAgcPphJEWOrsKAeVd5Gt6DAqTGIxOKLIWiTckOQQHdzbu8s76Da4sH+dnj19iPJ1wOK347Wqb5YU1srTgYLCLdzZ6pT0i/4UQ2NrZJk9Szp06RcMYyuhmj05I8xbtbotxrbj14Ag/rTh+vI3WjqQQSbGzjvbKEkqnNBoFSwttHu4dkmWJ+CQTT2Q1M/hTGAP9TotOv81S0QVt8M4zrIb4FNI8EYCkFPvUMPUSTZ4ncqsYMZHQXoNRJJnwq+3U4VseN3KYtoHKSn9YaGikMtjUSpzHDajUEKZaDAkz8VFQWnN7fZN7S5rRqGJ7c58kMTLEdpAXaXQzDTgH9SeQKR+PzRLpKKuTUiSuka9VVyWpFqq+mOPNSqjZgDDI9DzeNjCboshGkEWs8EqGWFpBsE4adedwtmZ4eMTg6IC9wz22yyMOqiFkCc1Oi36zxbnjn2Kh3aOTFyRKMXQVnazAaJEBOIR9EIjkTedJQLzKghezCeQ000BaFKA0iU6R/HZNsI7rD2/y2r2PWOn2+GsvfJkkwGR0xFGwfHD0kCd7p2glKd/LbmCD5+hwhywrSCkYjg/nzorBee5vbJBkGSfX1lBJwtgHlBeG9uBozATH2CjKaZMksaS5QxsRSa2dWyFJM5K8SZIVLK/0aDzYodHIGYym840ib1ug2ShYOr5GU2vW725w5YUTWO/o9boUaU7lAkXEW1Q7lcMjkzmLGIpoofjH298oyY8ZVzVOgzOgKy92R8dahLtHqHGFLxKpMLIAaQLOSc/hPW4swIs2isp43pzuMeynODztTpMsT0U8GEOgQqRQWefFV/knPH8ar+MC+BaIvwHwT0MI/2el1HngHwJLwI+BfyeEUCmlcuC/AV4AdoG/HkK4/YmfJIAhcLp00BBbUx8FVEZBPXNDTB/ZsgpFPAjCNDu9dfTBjaleGkGOVAA3mTI43Odof5f9/V22JgcMlcMXCa1On+Vjy1zsXmCh0aWVNkiVQruakEhEhIm3hrKx+Y8EPIz0Pyp4XC2bo/ZSvomCT6F1RpqaONVPohRaqBoP7t3hlbvv0e73+fLl5zjRX6CyJde3NhkZx92Hd1kwBae6fd483GDiJqwsrIJz5EWTqi5Rk8M4/CRO3wN37t3FACePHyfJEybAtKpwpXiQVQns70856ID2NdNqysJKk6UTXXk9VQIqYWVpgYV2wV4rZ2cvEg6jQ47Whsl4gvY1vaUFxqMu3VaLw9GQ5V6fJy6cZ+zGdJOGsIhTORjE10lccVQCTiPivbGFVNFcauIOxtS1pXQ1SZpKgdHL0F2D3xzLTb7WAqeoS0tSRJMLleDGDp0nkMK9Bw+50/XY2pKZhLRrRCAXPcOUkuGxUmouM/433ixIct/XQwjD6Kb/HaXUbwL/AfCfhhD+oVLq/wX8XSTl6+8C+yGES0qpvwH8J8Bf/6RPoBQkwXPSRQWg9XNqBV4UcihPDJkQZr2SWLgsyO0h9BT559o66vGQg719dg+32B0ecOBGTFJN3mqzvLrM2f5Fltt9ulkDk0jDrrSaC4lCjNYTVoAHlaK0Jk/FycUrLZ/MeXHxt06sYAlyi8VEMIBEG8TxL9rAljXr9+/yys23od/mS8+/yLFGj93xgNce3CLLM071l7m9cZdO0eD54+e4frhDd6FNo26RmpTDvIVWMBjuxdP+EZTjg+SkXL97F0zGycUeK4kmFJrKKFKTsNjMWWwZcBV37x8xKSvGVY/F1R6N9jJpI+B9zcJin24rp9NsUeQZVS1iKo3QYIbVgAf3NvGl5eqVcxxb7HE0GPPF0+e50FsRcmquUJkEqaixfG26I1JtPKjaoVJkg9ayETsn25RHFeWwpNyvyFcLmHroNVE3BnBviFntiGWvVbG0FqM81TTgAnVlefdgl6OlSE2Kor+6cvNIRkJ0rYzSh/9FE/zoXTyM/5vGHwH4OvBvx9//r4H/S9wsfzn+GuCfAv+5UkqFT0qJAfp1zbJOcC7gywqVpdEpRXa7ZKcIC1gFuWFMCBgfKKdjRoMD9g632dzfYlAOGCtP0mrR7iywdOw8z7eXWGi2yZJMUA8VjSiCkAAJwhgLZtbvK2kag4oQcwDvSLTGWofTFuUsMfGeEAKJSSUqT8vMQciHQusQ83HF3sMNfvTBj9lP4aWnP8X55WPsTCa8vn2XPHguLizTThrcWX/AzcEmP//Ec9za2+LU4irh4CGtZhPnA847xqOBZLq4MAPI5LWKrxkhcDAeY7KUlgos54aVtsZ6x+7wkAdHnuAcqZHDpkZxeeSoyilpOSBRXZrtFlmSkKcpjSLD2QkuSoqn0yk6ppxtPNyG2nO0NWD/8JDV85cJ5ZSFlUXpC3V0EW2n4AJ+UktKWAgCVDg9l0AYLbOt5nKTvJVztHXI6KMhxXKDxkJBZSuKpYJZiptwjeIg2kMwGqU9+0dD7qY11kucYpCuXjiGQfSnc3aA87El+snL9E+bz2KQUusS8F8AN4CDIEU3PEr3go8lf4UQrFLqECnVdv7Ix5wnf+VLq5ytpmR1pKfM8x8jadhLqeODx1nLcDpgf3LA9nCHg+kRpQo0sgYLrR5LFy5zudGhkzRJ0zTeSpo0MSglTpVBRXluBAFCRNe8Eq6ZibEXs3Qt56w0Td4Lpdt7vI29kjaYRIiG2hhmvGftA1YHEiQLcbS7y+vvvs5td8jzVz/FV5eOsT0e8s7uQxaygqvdBRppjtIZ6xsP+MH6NX7pMy9xd7DHQqvDiVaP77//JovLHcblGGdrev0ldnbW0UYyGK2t50iVwLwpWd7AoRjWNYPRCFdPsOUUG2qy1JCnCY0sIU8z9Khid3fEyukJRTUhSRv0em2SPCUvDO1mg8FgAnF+5WOiWbtoUtYlu4dH7B8e0skKru9s8qmdbZa6PZgoIbhmYsGLkhgIj0MlCCtjIonQKlM4DSpVeBw6ES+0EYF6UGEHFfrkAtnJDtoGlJv5YIPKhGvmERnF/bv77GQR4CCWjh6UCtRWKpfaelScsYSPIX3/xpslhOCA55RSfeB/AJ740/y7P+FjzpO/uheuhMuTIbZyMK7QzRzjPKm2BHfA3e1dNid7HNQjUIZW0WaxtcjpY5d4pmhRJBkmMSTzmIE46ETNN59DIGUbe5yZpEz5iFGZmROyvLGEmUDMzmc0Wmo1cfKPWS/iI2ZmfJboPQY+iCpvurfH2++8zoejLS5ffpJfXP0su9MBtwZ7nGj1OZ31sHUV6egNjnZ3+eaNN/j5F77A1uiQdpJzor3AYDTGKbjoOtw2UmtPx0M8nnarTbPZ4eHm/fjqSq/WbHcj/UcRTIINJVolEqrqQSHolFGJACo2cOP2DkvLbfJGg6y5SG+hj7PQ6zTptZusbx0gvLjoH+wtdTnmWL/PE+fPsVQ0+dkrT7O00CNVmRBKQ0A5HQmzAZWn2GklgAuihTcNadhDJa99MEBtCUqRdnP6CzK8lCExkGjwchMJFUeBCQQr1KTaWu6NjnDtWC6H2AdrhcPjrPTBzsuczdlI+f+zcqQMIRwopX4f+DzQV0ol8Xb5eLrXLPnrvlIqAXpIo/8THwVcSjVj7ZimA47UPoNRyX4d2AxDbJ5wrHWOUzpluWjRSFLxGNazTBeZys+4Y8FL6UR0QRG6torolEYnEswKjuCiTaufvQturn1x6lF1E0IgqEQ2SyI3z8xR3ykZZBKHdsF76vGI6++9zRsbH7F89ixfeOKL1FiO6gnnuqvkWlPbCVVVkuVNEgzleMxvvf09Pv+p5xjXJUYrTrV7KK24vn6Xpf4CF0+cwe48oLZlTBDzdNoLdFodtrbWY+0tup9Go0mSJVTBkZsEkzXwlZilH1to8sxTa3gM77/3gCq63dzfOeLczpjV0zXO1aR5A5PmTKsxvU6TxeUl9nf3hCoU+zlnLXuH+3xwx3F8eZnfeKfkbKfHF5/8FN2OmIKrQqJYfSU6epUpvFPo6hEbIcR+EefFj82IZ5hAjkBDCa1fq5jsFgeJNkTABzDSu45cye1yxCSCHtZa/CyKJJI8ghfQxvmZiIN59Pu/0WZRSq0AddwoDeBnkab994G/hiBi/y5/OPnr3wW+H//8G39SvxLKKe+8+w52ktBWx2nmTRabq5zpNWgsJmw9HLN9WFJ5R2Fiz6GNwJ0wN8mGQB0USWQWRxJD/LWmCopURa6Wj5SLEAih/tiAT7QaIjOdpYARE6FU/E9mNUprfLxpZjT6alqyceNDXr35Fm5lgavPPU8rLWg3G6wWbQwZ3pdUbopCkWctjEmwkym/++PvcPHKRZTWTMoJFxdWkGg+w53DLU6fOM1SZwG/8wDvLU9efIYPb1/DqIQ0SUmShKquSNKcJCtIi6YI3LQYfZjEYMg5f7zDX/rqZc4/cYass8gbr37IN37/dQbjkrTZYjKtGR2N6a5U5A3Nl7/8aT58+wMGhyN2RiXkTUxdc+nYMTY2H7K1vUW3aPDpCxfZ2D9kEjzPX346uufruV9xqL0oFesAw4DWAdIIeVspk6gtOjXSzCtIcoNPAhjRq5g0Wq1mRij7cdP40hFSBdOAShTbW/vs51LWK61IkzweeLP5kBBCRRUrcLiwM34yIvanuVmOA/917Fs08I9DCL+hlHoP+IdKqf8r8DoSpUf8+b9VSl0H9oC/8Sd+hrzBzktf5K+XFalNSRfbAlx4EWkZrxlOLLqOpY/WcxMCtBArZ40aXkoEibmL6EaQhjGL10TwTti1Sk4XAgQjQUYBsVYyKJIkiRvHxTnNTFSm53MBHTej94G9e3f54XuvsJE5Lly+wPmF4xxrL9ErmnF8byX70U5lTpM10Chc6fneGz+gsbbAamuRSTngXGdRvjdlGBwOGQdPJ80pjMHkOVmWstRtsbZ0nOWFBe5vblLkHYomJGmBVxqdtyBUcsHGEjIvcs6cXqTRFPtSnWpe+KlnOXdugXffuMPNuztMpp7D/TGLkzFJy3L16UucOnmMh7evU3QN//zVfTqdFl0NP/XTLzOclLx3/SP2D3c5n3dZy1vkJkWXQSLH83gQGTBWSROfGkgE5nZTJ8NJrYW3ZT2qKeiZF71FHC8o/FRe72DF8FCnCp+J/Sp4Qd6ahp3xFJfnKD/zlkNY6TBfF0pJqZ4YLcxq9Um5X386NOwtJM77j/7+TeDFP+b3p8Cv/Ukf948+P8zbXDZHfPHAQWlRaTI/BVrNhIU8Y8tGgzc8Oqh4+sswygSZjsv+EXd8Nd8w4tAyE3WJeIw5OuKdE2/eJJH6V6dChUcM7dSsl1FK6CIotLKRm6SoDnZ594M3+XC6xfKp0/zs8fOc6qzQyAqC9fGaD1hb4b0lSXISk0nNbWveeOd1dnJ46eQZhtMJZ1qL8nUaqctv3r9Lr93h2MIyIUiGPBp67RZLvS6Hg0NKX9PuL4rxh6sxaSFDz/g12nhqBmU4mljKIBZKkKBNRndpgZXlbR5upxwMJjx4cMTqyQHtFTl9m4tLnG0a0obhjWuv8vJXrmKnmnd/+BorrYSvvniFpGiQ6MBi2qZOB/izTUySgVWEkYdxQE2lbHZ4tJN+0hgNqXDOQqSc+IlEsOsikYNTam6UcoQgEgcPQnydit+xzhTeRPeJRp9cD6mGR4JkKogC8nkVgVIYZC4nnDc9H27/cc9jM8G3SvHPTIuzjUNO7I9Q7baA1DqQppqlhZydYUkVPBrpEZwWdIMgcdguwogzzy5PRIWQU8Mjw0+lYlgr4h6DD6RpDklOgotYj5RyKuYTKi1T3plZeKIVo8GQH916k63DXc6cOMMvH3+epaJDoomfKYh5QnDUVYl3FVmSSz0e4eSb16/xwXSLLzzzHOPpiNNFH+tqskQUgtYqXj+4w9WTZ+k321jvmDrHdDrhe2+8QekcHk2Sp6AMJpFhoc8KJoMxjUaKDzVpls5vwINBzXAgHgfeCaKVtbocP3uM999/yM7+lHLiOXPmgOWzU1TRRgNJ0aPX7/HSpS69TNNYXuP0yl+iHK3T6RSsrfVp9zKU8FAEStcJSqWCGLpAqCx6q8TvVDCGUMnBRRAPAJUkBOtIcpn2C7M63kIREFAuiO+bCrEPQUJqgxPZ99Ry/vQ5lneHDEZiVmE+rlWZVRjxdhHvhT8Mv/9xzyePLP9XfLQxHCUp/zgrqIzDjSaiXfAeyppmO6VdJOBlgTuYh68SB5Jyo3i8dXKixvLMEPsUkAFjCNS2JqDJsiZ50ZLTxoeImclG0dqg5V/LK+uExq9cYOP6dX7nld/lQTnghQvP8oUzz7CcdSIhL956lcXWJbaaErwnTRooLTcKtWPj9h2+v/ERzz35LLW3nGwuYKNJ4M7gkHt7W7y1cZ09UzFNAr916zX+H6/8Bt9fv4FzgbF1JFmDxCQEG30BvGz0qk6onGG5W/Dc5SVOLRo5NDwcjmpuPxgyPBoT4jBTpy3ay2ucOrMMPjAaVzy4d4CryvmiVSqh0erx3MvPc//d61QNyBqwFla4+uRpev0GwUm8oPBbEhne6kSQd6OhkaFOt0g+1UWfaaCbCUkmxD2TiTOlCtGSNXoRx0kKfmohLolghHyLRqLax5ZQCXu5OpjQPbnGybVjcbAs/8Y5P2/qZ6MJCeiNxosRKPpJz2Nxs8jiVGg01xsdvjme8jPjCmyKjwTKTGv6nYyDSY1VQpCcHwSzenU+9XeSDTJP8Qrzl8B6OWWSNEPJPRMbeSealRBN9rSJNkmCjgWksRzsbvPjD37Mbup4+dMvcbK7TOlKiapLYoZ6cBBqHA5bi89WlhaiedGaYC37W5t886Mf88SnnyVTmuPNNluTHQb1gKwLa+c7HD/RYeuNfZLDlO/c/4CqdjI9zxqAwpjkEWtaXkiJ+1YKVpf5hc+/wNdOeooErl17wP7QcePBiMPhhAfbQ9Y3DugvL5LlbdA5WaPD6UtnyF+5g68sB7tjyvGEvDdTSgZc0KycOsVi6xr2/fd48OQXWQv3KIdLpA0bXVcinUVpFEksbTRKSXmrlNBc1LlAaI1x1wdgNb52qFRLZg4e48HZWGNPxLPaJULF91ZiPQjiKsksrdgGpgcVvZUepzkm72EcB0jvEhshHalQscT2zPQsj3kZFniUceIJ/F6vx1PlDqdEGyccIhXodFKyPUPpBC+P+NScPaxUzFM0wttSeNkwKBIj2vxgVNxI0uPM6JcSMeEIypBqE+cIMcRVGexoxPu33uLDvbucP/0EX42Z9yZJUJVAkrN5TFAe6yoZumlNlhVRiy+isNH+Pv/qze+zfOUMvUbBUjNnr3mfsy8U9FeOkzUL0kYTpRNOXzzGT//Ms1z/cJPX377Dq2/fYTipSFJDkmekafYoGyZqdC6e6rH28qf4t758BTO8TkBjkoyDgwPOnGixd2g5OJwy3BtzuLNH0eqRZ01Im6ycPMnx430e3N2jnlrKyXSOGqE0WjmyRoPPfP15br79IbffvUX/9Ao/vvGALzy3TChFyuxCQDkHaSoUeB15WAGUEeqQAsIx8a+2H+xDDaGYwfIaXwdMpnAOcA5fWUwrFdh/YuXvaB/zRRU6FYf/2gaSToMTrKIxYqSOi3MWjXNO5mRezbU5f0IFBjwmmwWIxaPcMOMs51+0GvxvD0ekmUGkuJ4iVzQKxWDkY9ScisPGOO2P/KC57jF+TOci4hVhwTrCw+nMgX1mHk4gDWIOPhN/WWvZvHuTV++9R2tlgb/w6a+z2OzOsXoftRw4ETwJFG3xzqKTjDwpkC0d0EFRTUu+++orcKLHhaXjtIqMB+ltLp5vkWQKnRh0koNKZSakE/oLLV743Bmee+EMn3r9NP/l//f7jI4a5M0mRZ4RKidlHp7nLyzy1c+f4Z5uoydDgpcMlNUza2R5Tqs7Znl5LJtcJWRpSjkekjS6mDSl6CzxqRee5OG97xFsYHI0loNESSy6s2IVtbjUYeFrL/JslXO4t8fe9vtUdgU4hdIJJknFj7q2okXBiwPkzAQ83taoBLXUhs4AdizK+ijoU+gIE4eJmB5qYyRbMngUJsq1gZCACfg6oL2oN02esmwWKRo549EE5wJVVdFoNVHK4GdrgpmllagwP4kc9lhsFrHQtHMVng/wQb/Pmwf3+GzlxBVba3QKvXbKwchGxoWwjjUyeQ1ONCNWGYxJgWjEF+typUJ0gJHgIa9nvUkasyhVnO2LSGm4u81rH77JXhF4+bkXOdNakn+LzCysQZLIainVXLDSdOJI0hyjEuZpSyFgy5LXXv0h97uOL52/zGqnx+88eI0Ll5uUDppeUUc3SOXEa1io/k5M9KxFuynH1zqsb3ja3QbNLKep4eRimxefP8OplYR3fniT/X6fzXsjwsEGzX6L1lKHbr9F3soZHqYkKbR7fXSSRmOHKSoUKN3g3JOX6H7jDdxowv76LqeeFR9iRYBQEuwQBegkp1U02bh+m+zMz7O1+21W+nsEuyyLUcfEaA0Q4m3thCSrIu8vIEPDBcPDf/Bdjr1wlfTUogwqy0BIJCIvVIowcdKrFGJMojIIQcv8xgf581zjkwkkgWbSYKHbYzgUlnJWFHGkQOzDvPQ/8aBU2vz5QMP0LPuRqEEJit9Z7vPU/ohG3kMlMhjstDPMXknlHN55MZWLA0SdCKwb73yBfkOI/sYBokpvZgg963u0lzcvRI8vN6m49tE7vH20ztMXnuSrq2dIk2QONUOQk87rmFPP3IoJZYXmITRaQXB8QDnPtTde5/XyIV96+kWurp7nGw/eYKce0xsmrI5r8kxTtDJcXeGtRZkKHVNE66pkOpnwze/fQhlNt93g5z5/lroMXD69xJVnLpIkCjc9olJ3GI9HDEcJlJbBg306gxHtfoskS+l2mxwcjjjcG7J8fDU6bIKtDzG6otFu8uSzl3nz22+wc3+PMC3xlLipIEvytRVoZNh57OJZ/tWr73Hh8kvc3LvF5eVFVlSNd45gDM5ZggqkkWAKeq7lmSGOxaklrp9p88o/+wOeX1vh3MvPYo710HUgJBrVEHTR6BCdRh1ojZ86qB06MQQjQUnDbEpZjWnkXY6fOMmDwyHKGHSWQyL8PZWlBG0wKsXkGSrJ8cag/6ff+4nL9PHYLEGIcEHF3iU2ZdvdPj/aO+SrKprraWgWmmaimNRWmtug8NECSSyMZqiG1MhSFiGnmAoSYqoUabQ0ElujePWHwNaDO/zg3ru0Vlf45ctfo1s0pGqI8KIM61X8ml0k33mcryEoEpPhIoSpiDandc29mzf48f59vviZl/nU6csc1Ie8P9ykDorN3Qmr3ZRWrhkNp+hEk2QZ2ntsrXCVZVpN+eFbG9zeGLC23KbVanLpVJt+r0mrWWCrQ4n0RdNutSBpcPHiAqNdzXQ0jopSCYGypaW/2Gd/64j33r/DxctnKHIvuSs6oJTnuS8/y9bdhxzt7THZ26e1phkPHtLuHwOdYbIWqBwCNHotPnfxDHW9z7Buc2OSsLDYIqmHzH3doozAe4v4NQOIM0vwoBPDl/7iC9zSLd6+u8Xr//K7fKa3zNkXL5GcXpq/V8FoQmnlxh6FOG/T4IKQL52n1m0Gk0NaRZfzz3yWd/rnUNqQphlaablJY4WgdTJfA4LIFT9xmT4emwXm1pkhyJBKkBfFd5cXeXH/gMZST67LLLDcyRhMHFYbaq8wTqbxDkjipH1mTgES/pMYcXbRRLcWxYyOKnDj0YAfXX+TDV3zhade4ExvhURJT6Jm/ZSbMaEFnlbxBrOuQilFEo01QFLBjILgHPs7W7xy8y2e/sxzPHfmCkrB7cMtJq4iEBhMavbHNculpVFZ0kmFLq3MhhRUleXh1oDvvfkQVRjyImNpZYl3bh5yYnHC2kKLbqdB0WoKN62Zs5M1CFqTNYTMaKOptzEGjNTs/bUlqu0B7775EU8+c4VWMwekVyqaGT/96y9z/ftvih/Y8ICiuYTO2iidorTMUCSRAI6fO8mNDwZw6NnfPGDy2VMsZk2cmwLIfEobOeC8jXwwcVCQ6awmPdZmsd3hp84XjE6d5N2NLV77nR/xbL/L+ecvUpxdxVdBynUlpbfyCuU0KhciJXWgffwkB8M9jvXPcLLfI92bypA3JiXIyhCL3hAip1Dum4/hpv/z5/HYLOqRx5ZQpeO0KHj2ul3ePjzgpTIQCkOWaNLU08wSDkobWb5GjO1CgDC7MWY5g1J6ETl6CplFOGfJ0gRfW27eucmPd25z6eRZ/tqxM2RJio7hR0ppvHNz1xlnrRhMIxT12pckKsGoFEm+DfPNGqxl52Cfb733KheffZoXzl+NzFjHvcG2vGU+MK0t49IyqSxlZXGHI9JEVJ+1dQwnNd989QFH44pOvyHoXqZ4eFQymtZUteeYdfScp2gUjK2ludyTXi0Rs78i9oNBiaHhrF9YWe6xcf0u1996n9MXTtNdXiVtdEFnNHs9rnz+KoO9TTBLZM0+6JSgErltXS0LUGmc95y59CS/+Y9+yHt31jl59TRLa03xXfMOHUttIUaWJLoApeIhKRsnaWY0z/eYvrdHR2u+eO40w9PH+GB7h7e/8xZPfL/g4pNnyc6soDup2CelCq+9vKch4CvP4tnj3Js8RGnFaiMnVYFKOExi2Djj3io+Nm/5pAmLPI/HZgGBC5XQ2mcsaW0MjsB3+z2e25uQpwvY4OkvF+R5yv3NEXsTG+eFXhSJyO2kg+gcrLUIX09Hx8ogE/+g2N/Z4ZW771M1Cv7CMy+x1Gih0xjPRiRNOglJ8jaa8yUSV4Gy1HVNmuQkZsYhC7NuFuUsVVnx+x+8QtlPeebUBWZZ9rWrOXCT+aDT1Y7J2FJbCSpNEubWTpV1vHXrgFsPhyRZSpYZQeJ0Qu1hf+poDhythiXNK/Hz1YrVRoPaDoRLlyVoE6n6yLRbmxSFBgXPf+l5nPfUk4rB3jZa7aN0QFGjVcrC2jkpT5QmOI8y0aRdC/09BIHXdWL46qdOcOPD6yzpI7Q+Jo4xtoJo7K4U6DSfJxeEMIP9Ba7PznSx94+gFEOJdsj43PGT2LMneH9rj9/48DoX3r3F05+5THFplaANOojPgvZg60DR6zM+uEttS5rK0E00u7XHe0+aJBHUkRttFjGRKEPl/hwkf4HcJnPtg2L+RqLgQavDrd0hV2uLKVK085ilnDNaw/qA/UmN0dE6PLKBnfeRLlJTmCTOYFw0raj54MFNrg12eO7UBc4vrpIZIWgqM0uCcuBAu0cDLKFvB3xt8a4mSYW6okAc9GOaWKhrcIEPPngHn6f8lee/QFIYgpFSpLQVg2pKcFIiKgWH45LDYUpRaIqQMLU1QWvWdye8eWMPnWqanYwQPZanoxJrxd526jzjaU0xTUCVYsYQET+ltMgZ0pmeZAaCCC1mJoBLlSLPi4iOpYTIf7PVdO6ppo1GWG4qHg7xDQohTtMDZ04v8ZmTKVuHlrMrSug9WkRVosR2aO2x9UQGnd6IhVUEQ7JjbcpmhkrBVRblFFQSj/7s2hKXlrrc2j/iW9fucOH2Dmc+d4HiRBvlwY8qVJ5R9Ntkwwaj6Yheq8dqnrBn69g9RXg4zGZ7YS4r1p9Mjn98NoueXcd4HlWWiLGAMvxoocPVwwmqEM8g4xytVsLJ5SZ+a8iojtBkABMCznm0D7SzDB0DgJQL7Ozv8YPN6yx2+vylq5+lkaakxmCNFmAhBJlNRK6Qi+VDqC3eeryv8cGRpjkmz0VY5r2YW4cIQfrA+tY6b4zX+eXP/yyLnV7UiHuJY7ATahweH2P5xJy89FDWHo3DB8X+sOKH729T28CZk11ccDgVvZ9ViLJYEXGOK0+jdKAk/co6T1VWGFVHqDWRBDNm7iwaTCI8uZjxSITfPVFx6EGb4hENPhYtWps5kKKU0IF8kHzOvJGRWcvR5gGTM6dIjNwa4hHgQZl4CCoINd5PQUmIkEJ0+clyg+rWIcHHwaVWqKhpKUzKMyvLVAsLbAwO+cZb1+jfb/P0pRO0mwUmyzBpQr/d52B0QKfZ5nSr4IOxiHp9HB0opQSIUYI2Ou/nngk/6XksNotiRmaTG2XG51HEeDQVuN5us7/7kCU/IzfKsK3dS1ktG9zfHlMHkZkqPEmQpCcTDTBsafnx+nW2/ZSXz1zmWBRV6STBRpwdwNVOjLxVpHjE8sopT6AmBEeaFdLLRB8tPROX+UAIjuHBIb9z+xU+/+QLrLS64ASuVji8r9mZHFLVlpm/WSBgnWNS1kynYgVbWcebH+4ymjhOrTbpNRXTSlM5z8ONDXa3N1g6tUjwEl09LD1mUKJNQlk5ClsxnZSkiSPJUgwOb4NkrSglG0fp6PkrX4e4nDD3D5jpOySPcUYj1HNlqELhnCVJUkKkFqWZ4fz506we7+OqfSj6cZQifjtSjsmGC64m2CnapDhbYhKD8w6z0oC7AxHZWR/b1wClvKfW1hirOJG2WDnV4mE55huvfcjJboPL58/QMtAqOmzsbHBm9SzHikwOS63EmF0RpcgBVPx6YB5B8pOex4ZICUDk78iAKNbE0XJnaBTXco2vKrwWm1ZvaxLv6HcN3dwQXEAFJwGsBIyHUAfu72zyG3ffoGg1+MVLn2Ktu4iKTbz3AeVkDhIbp3iSRhmqD/iyxk1LQnDkaYMkSSNQ8MgWNv6K6XDMd979IU9d/BRPHD+PN0ryL2NYjFaKremR3JixrFMRyZuUjknlGZWWd24f8XB/zLHlBmeONUlUIFGK/d0xNz5a5+H6HjsbeyilqerAwbhmUAaORhXDYUVd1gwHU6pKbhhXC6tAUPQZzUPo6YC8zknCfFYYnEDtopsGFEonKGVi+RSjCHWCs7XcHMGjNbz0F7/M2YtnGO6OcD6JqlQvtyuzBABp7kM9wldDlJ+ilQPv0P1CZmKZmdtb4aP2JfjYhymUSUiAtbzg+V6Xav+Q7964zvW7t2nlbfaGRxilWclziliaai2McRVC7DVnxZmfVf0/8Xksbpa44hBUJKoT44ufJDGkKEn4sFPwhUGNzxKBL2xA20DmPd3ccnBU4YOekwmHwxGvbtwktHJ++tzTLDYaOG1QJoHIPpYSQgRCotmPIiAF1LLxXF2jlQSx+rmPsvQAc6aq97hatCnqxAovnrmKSTMZ4MW3wFuHcyVbk0O5vSIRcIbxW6UZWcftOwNubw7otTOunurgnWSi1NayvrEvKc7BUU+nuNoytpbaJJTVhNJ6BoOadDjETgcsWQl61UrRaDaEKqLSef0uRjDicqMwhHR2+iMb3Ft0msfSWPpI70USoYIiBBu5eJL7qKNzjtGaxaUlDg8n9LvipqKjpFeBhBhZmI5GJKlBp4VIpdMGqqPQzYRQh0jNVxLB7RSu9jJkDqCTiIANHQfDKatf/yIvf+WzjMoxoCiritpZullC2yQcOmFP15EbFt9A4azFQ+uTupbHY7NEJCyoRzs70ULH9yHE5kux0Wwy3tmlUWUiErJx4OgszUJTJIHaS39y7eFDro12eW7tFOe7C+JbZZK5p62UzuLvq0hlKWgDQegY1OIJVnsrZhhJSphlq6kZ/VK+bu8k2fijDz/gXlHzK09/EZOlc6QnhOiWGBylrdivJzLzUYo6yKa1taPynvubI9a3hxSZ4eq5PkorqtJTVRZrPXXt4qITDYv1QQbhtcPViofTMcORJVscsbs/BNMizxLywpBYcbKfFbxGS58Wd5N8T3GAGyL9aDYw9rM+QwnV3ZhYLs+PYh9Ls2gcbiythTaD+7vU7TbGD0R5SpSCqxR0RlV5fF2SFBVJIulEKlGo3OCHYoKujI9EvECSi5LSVw6scAkf+kPUX7pCcXmFuwe3aKQNTvXPkaU5k2pKMytYzBP2htNoKhIPh1g+q9n3lPx5yGdRcvK5OXV61qvIm2fiMOkgMRwoRz4pMUUKlUN5YZo2Q8rJdot3723xysEGvbzgZ09cptVuCcM13gISNSGOw0QPMI0nUU42pRYUKVgr6V1pgkoTfJJIME6UKBtPJG4qQlWzu7PNqwf3+IWXv0SW5aB1nEMEtErwvsb5momtmNhabJ2i9Y6O/fbe/pSDwxGpMVw61aOVasrKUteOsrKMp1UsUzVJkpJkqZy0qZYNo8B7zdSLXv1wWLI3yOg0LVlakhjZIEprTFZgrUNJ9JWUvWo2gNCxVxEj8Tm1PURUCyFhhuDm86bZ6RyCjQeI3IbNVoN33z/g2SsBR4wlNAlaJShtMGnOcHefRq+GYAm+llClpsEfxl5SK3zlHgm4UlnwlavYbRnO/vW/QrPfwuhUykSk5Oq1uuwe7dFaOclaprkOsqdjeekj3SnEUt9/grMLPC49SxA0RSbronmXK1Ixo96jFNYY9tsFKliCFYM4X9YwsbA3waZD1tN9rnYWeGn1LK12E0wibGMtkCdReWeiQYHRwhXyM8jHeaicIE15Kiex9IGR0Uqc3IuJdrCWyXDIN2+8xeeeeY6V7qKc2HUkewZZTM5LQz90FZWLiyY20yEEqtJycDhGK82JlRZFpqSHKS2DaUVC4OUnz/CVz15kcUGUi3mS0G1lzFz0k1QL5aPyZGlKVcNg7DgYVkzGNeW0pqpK6mpCPTqaG5nLfHJGZZ99XQna5KAMzlrReyDsBblN3PymIc5vtEx9mUm/Q/D0ei2yQjGtFb4uIc7PZyTGyioGh2NmkRezBREaWrInMw2FkWiKSJHx3mO15XYDDj97Bp9pHu7vsnO0w97RFre2buKCZam7wMOdbZTSHG80mCUlPwKQmMcmCvXmf6EG/3+NR9CXqJcn6qGDNF1GJ49M95RhJ09gWIK3grWOPd6V7F7WGNXjL3e6jMea3SPLtPYYLXn3PmhxntRGHAudNKMuuvQrhCBIkCbSpIbEiP6bOMCaUTtmDb0KCl9XfP+DNzl56SJPrJ0Ss+ta5HxBzYRQHmdrkiRhHGwsAQX2xckCnEwrjFb0+wVFppmUlkrDpKoZjWo+s7bIX/jSkySdBX7tr36Rb33nbbav3+bXXzzBOzuOb7//AJ9IKK2ylsHdQ6oQGE5qBhPDKFc0i4Q002RpxmBnh6W8SZI35zMH572Y38VNo7WUxtpolE7irORjiWcgSFmkJwU/I14IDBsCBDynTzX5YDjgmbwk06Bmpw8QgiLNUumlnBwKLniSZsLYCkOA2goDI5EN7ZTn7nTKsV/5Gjc2b3Nt/TrHF4+zub+FNpp+u8O0HrHQXeDd2zdBBY43G2Ra4+KGmJmby+xJxSrmz8mcZUaUm+U5KnzkK7q4kaRE2GukGF3jBjV2NOVoVTFqaJbrJp20SehB0XHkLc/GdsXISsRDpFbOZxMqcoEc8aR0Du1lw+gkkSBXGTTIv7Qyh5ApttinUjveufYuo8UGP3PuKjrmX4ZK8gzxsnCcr9FRnVlGKYBz0f0wzBpjaDQzcqMpS4Gkg3dUtWM89bz57h5PLL/Pxa8+z2K/za/84sscbD9LsvOAF59e4/qDQzaGQ6ySQdvBnX2qXs0kUxweaXpFQmtqyXJNntV0ji0RvMW5Gh1LMWOSjzW8Uc6rtMxJYq0o/8ZFqYKK8xEV9UNBND3iRhh3TmCh06Y3PmLoUnq2FgKjSQk4UhNotxsQe0bva4GmE03aSYW+5zRMPd4GQhJYH1me/rf/Gnm34NTqcWEPaB03sAjVtDYY7TgajbHW0U00Da2ZSDU5+9Kij9is4vjkQuvxKMOQul1FdxYfZCEnCGyo1aNJ8UGWEnIY+zH3jtW4qefUMKet8nhaeZI0JWukpIkIe2YfEzXLnvR4JREDzOrWIGiVSVIh2IHMWAjinxtzO7yroa6htjx4cIv33CE/+/RzZEUWM2N8BMeirVIQUwiCDCurshRAIDw6IGYZlwb5QmrrKWtH7QNlVRN8xlY55fe+8xE3v/Ma1XCPgGfh+DLNSxfQyvGlz12hrqWk0kZTVTW1c9R1YFw5DiYVg6llPHVU1svNHRwqxKAlb4FZLqaC2VQ9xBofI+TJ+D4I41qg6ICPwAhx4yeEMNtksnHONjMSk+Nn1BdfiTVUKfMVuW7lWPPeoVJFXVmYRvmwk/5lY/OA/teeZmyO2B9ts3V4l/3RJntHGwynuxyNd9gbbnAw3iIxhsQYpnVFQ2n6uZk7TkrlNft1iDfNn4ubRfI5IHpDgbBkZ9oTxAzPGMPW/oi74wckHk4Ol2h224RM0sJsCEwnnqNBzf7hhMoFfBALnVmZN1uczskcIThRVzrv0JnBKyXOMf4RO1mFAEk0ZnVyMw339/n+1i2+/PyLdBtdaRZ9nHEnCd45fIy/I0QTch9kamxEvSlzB/DekQRD7Ty1h0SJ4blGYR2EYCirkpsu4ZXrI1auDkgafYIbYoomwY44e6bDcrPJbjnFNBJ8qrC1p64c47HisEjoNBydpmcyqUhSTZJmWFeDEg8zYoCsWM/K0gjoiJQZVBwmWl9JIy+hIBHNFEKl1oqAASVuklppfFDkzUX8cIC3u/Kaa4VSnulwSKubE3ygno5RLaH9ewLKgFc+zn00R8MJ2YtPsXbxDEZJlGAjbZClBZUtSXQa2QXRZ0wZ+s02B4MjWssrLKeG9amdrwFm5X9chTOd3k96HpubZfZTcLJIjDGiqxbKEYkxmP0dtnYPeGvhBGuXL1CkKeW04nB/zMONI65f2+XmzQPWt48Y14HKIadgeMQqddbL5xCyFz7IsExF4ViQaSjGKFTkhwlVzBNqS7COejrh27fe5uqlJzjdX5HN5HyUqmqcq+X70QqnHCbPZACKOMTLsI85XKsTSUx23jOtLTYErPVUVY3zGussSbuNXjnG5v6Uw/UB2JEg2DoDU2Co+PynL+Gcx6eK9HRPjAg1lLVlMqmZlDXD4YQ6bqK6rGU+5GqsreICkqEjcZGjFVrFqA+EJR1CwNYl5XiIq0txA42LrjzaJ7haylXixzNGTCU4lMGjkhu6mk4Y7B+JRRKQZnkk02qSViYLP2pPbLBsZ4rl589zMDhkWE7ZHe5QuZL94QEHo212h9vsDXfQmDhHgeOrK2wf7BLQHG8UmNirqFjNBITXN3MJ+qTnsbhZZg1+QG7j+fWIfP3aRCi5v0hjcZnvBs/BwRHPfPQQExSlynHRNiRJU7w2kASCMqBnzh5yZ6mYoeKdJ3grNXeknOvooesUaASOjR45wpuyHqzjjQ/fwayu8umT52TiraI7iAuoVCQCM1BCozHezP3HEpWKMVwMi7V1TWKEcuJ9wCUKrxNq78CDCwobAs3FBVRlefJijxNPHkMnBcpkaJORdteojh7w9JU+9zdP8t33bpOcy2FfELs6KGofIrpmmE5rms1MlIzeiREeM6RollHyqHfxnlnnPr+dUZqb737I2cunafQXCTi8Tsi6SwI7BzAmRYUgSkkCSdrE+m0pZQkMdgfUEyucVeuir5uev45MBVQJmWJr+5DFX3qe/fEBrbwBQKdokZkC66bkaYeAppEVAkAgI4C1hSXeuf0RRimOFWk0t9DzMszMcHv1SUoWef7UmyXat74KPAgh/OKfdfLXvHcg6k+Y1ZKzEVoQtaJSmJBwbXGB+199guO//SZnbCDJcpI0pQqSiR6cdHFe63m2ZJwSErxo2lFgklhfw9wONi6VR19eEHsknOf+1jo3Tc2vXLwSB3Mx8Zgg7iLOih4fR7BywupE6OchxvzNYvxUIqWCsx4dhGBoK0eVGMnQNApXBZG9BrBecf/2IUd3t+ifj0G1mbj866xFGOzyC1+7CgquHRwQnKOsFEmRUpaWcW0Z10Z0M6Ulz4WTJnmc0dDOGDFJl4+KIkqBldiAuJjYTPCcvnyKpMhRXlxxCA4VvQ+EgOEJuLjJNEnW5+hwk04jxdYj7t9apxxbxpOStFkQ6hprSpklFQm0FEwV49GUxnOf4uLlZyJXbzZQFkpSkbTnTISP9x2BQL/dZX94iAuOxTQlVwiHMJIp1exfBZGEqE+4Xf51yrD/I/D+x/7/P0GSvy4B+0jiF3ws+Qv4T+Pf++RHyRdi5h3vH/njWGOKvxhRIgvT1UXu//oXeG8tYVgO5CZCWBrWh7ka0rsaCeNUMlsI0g8lM01HBACCjhTuiMKpmfYFBbVlMDjiu1u3+JknnqORNJlFrAU1G6DG00kH8DOOmsH5Wm4VrSmS9NHB4OPUO85aAnKDit2sDO8IosPx3mOrikY7Z7AzpJ6WmKSQJtvbODPxqGrCz3/1Gc4tdOZmcpWtqerAZFwznVpGgynjSUVZ18IZi/JomXPITTv/oR/NN6Q5Fv6dszXGKHGhd9LDGKOR+GELyqGIPmIU1PTZT9YYrXyR9TsNqsmUrXt7TCuHLWf5m9GzWgV0bjB5Qgie/Upx8gtP4eTLoLYlEWqUnkMrxuUEsfJ1jMshtS0JIVDEwa11nlaW0kuj/W6crkHkIUaO2Cc9f9owo1PAXwL+b8B/oGT7fZ0/w+QvYhk2y6j/w+iE/Kwjn2eWH6kUuEbO4Bc+y0c/usbp1x+w2l9Gp0LwC8GRGBNziBwKj9MBVIjqxpn8OH6KuE91kHJrFkMQvMfWNd+8/TbPXXmatU5fLkMR7uOUzIOEjKci7czJ7a6NANQRUGgluQQPOYcKGh+czCkigq0iNI0O2ErAAqU106MhL5zr8df+rRfJ8wXpG8ZD4cE1m4BGmYBKNCY1/OzLz/L+2w/YPByhlOSVlLViOK4pUkU6GJPlhkRDmqcYZYBUoFsFkMp7EAKzdC2llZSQGvBWFqd1Ej5kUmxdQoTcTZJhVYYyPUqdkWoJSe2mhrdZRK1/yGB/QLayiK0cNiJ0II22Dw5faIY7Y+xTpzksB9y5e5/jC2t8583XWVte4PTqcW48uENRpAzLilaRcWr1FPuDPbSyNLKCp09/mmbR5HA8YKndY8lojrQnRFZHxFlRGvzcR+6Pf/60Zdj/E/gPgU78/yX+DJO/mqtrCHwsqJWffRPxzUpnE/MZaTHOSTyKJNLopy8/ye2lNpNvfMjZhTVQYpANjzZgjSUoQxaFYrX3UZMvpgUqCEUjRGjX+4gMOMePbr1L79gaTy4dExJg7GW8B50ZGULGhRWsOJcITYdIwRCTt0JnJEpjI/+KWUmhgrAJggxKU22EFJomVJMpWZJyuDNl94M7nPjMssyCGg3QDfz4IZ6ATjLRpriaXqfg3/ubP8v/+7/9HfYnY5zzjCYW4z1ppkkSRSMbk+qAMYpmVxN0ggmKJNOCaAWN1vJjZmColGiFAo66roS9qzTKVDK49A6nctZtizNNicxoxllMZh0NNN1eh8M7clDV1mGdipJtcezRmQEsOk94OJ2w208wO7vsHOzTyJqcOn4M7yu0URxfXMMkmjPNJllq6Dd6nFhco64l2cwHz2J3kY3tbZY6PVaLlFsTCY8KYZZ+7eUwiHOaf+PNopT6RWArhPBjpdRX/6S//6d9Pp78tXzlydBMpCn0MC891Wy6ijSVOjyaLM8ippV3UqY4j79ymu1+m/pfvMa5xqJQXXA442RxayP2RfjoYxybc+/iFFv4UKmXnkXmD45b63e4p2t+5ewVTJ7io3zAV2LHE2yY0/VBjPl0ojAmjVoQEU2ZJKGpCwz60ZwlqPlk3Mc3KngFVmOVJ5QVtqohSbm7fci7r63T6Re0T56G9iqJKfDeM7xzg8ZiD91ZkGa7rjh2rMPf+htf4x/8zjuoNKXbzun2miyfXKORaB4Gx77xLATNKZvSTwSlcs7Gqb2gg9HhWRAvpSBY6smY6bhEEUitFUd+5SnTBX5rqPnRR2O+eHaHJ5Y6nC8KGlpRjCy7/91rPP2rT3Hz6gs0br8tacUuxOStiKVE+cC0qTj5i1/hwqkFFlo9nj5/Ka4dD2jev3uLRpGztrhMajKG0wHbg20SlXJ84QRJ5JKdWlnmo3t3ePr8JU42G4S9ITpEqkvsThWIS/8nrNk/zc3yReCXlVK/ABRAF/h7/Bkmf+VacaZImDrP0HomHizEvA4X+xQT3Th8/CYRb+M4M0kiMc6vLjD4tZf56J//kCt6gXoWaWA0KcI9s37W6AeMBhN8DCMgWuv4mCYVOBoO+M7eXX7h2c9S5AUecZDxwvAEIyo+HyzKJARrpYcxGS5YKW/CDAKADEMjSRlWE8Bg0sDMG9jFDxmUp6wmJGnBdDIlzTKcqzG5wRrFrffX6e8c0ezcZPHcSY5u3WXz+ianP21IGm0p3RKZj5w9d4yfetHzP75yl/bSImqhwwelZWoVV5cWMEXObpoySBJeai/w7uSQZ3WBDVOqqqaTGqEIxfcgOABHNR5zuDPAB2g0C8o7nvGrO1z6313hyW7NXeP4zfeG/Lba59xKyv/hieOMXrnF1nc/ZPj+Ov2vXqB/5UX89vv4ELC1lGGi/Re42JzocO3WQwav3sT5miQ31HVNq9nh/IlTvPbh+7TbTbL1Gzx1+gnWD3aYTA/otRr02h06podSiqVunx8O3kZrxVKWSq5ocPHzyHjCeYcO+g8BO//amyWE8B8D/zFAvFn+TyGEv6mU+if8GSV/aSVBQ8pEt0evGDuPDTFaIKJlejbtVo8YoxFCi9VmQIdA3esQfvXzfPDPvs8l26XQBUlkGc90FwCJ1nhn8TqJdXiE2gMoo6gmJd+49y4vXX6S1U4fQpyJzHqZZEZzj+xjL2iRVoYEUeQJgqLFFUZJ7mWhU0yiI3w9+yYUaRxmosDkGltKGalTDdYzHU3ZPBzz45v3GY9rjqWOl547TkOnJFZJ/1Bb0jQQdAIqRZmMl55a4/c/3GKrdGx/eMjmzpT+023OnzjOvnU82e2y0JCIc5u22TMNFnSP10aHvEzO9IMHZH1DfjwH5fDWMT0ac7A5xj00lDe9mHsnhhvZ9zj5t15i62DK/u4h1bRm77blXkcz+v3XcV1PubmNu91g/4vPsDK6hY7olKQfSMmmtaHTatLOKlbXznIwOCQxOd1mQbfdpdvo8BdfXqTdaILyZCbn3LGTc/BkNpgMIdBqNCmtxXlPN89oZ4ZR7SJwFPBBzeH7P4ue5Y97/iP+jJK/omKUqbc4ZdBE+DcOWmbYvo83QQizXkN6mlmzrwI4ZJLuWw3qX/0iN/7HH3Cl1Og8QSmhlCdKzx0kZ0gU1omqJfZLPsBr6zdZWFnmiaWTEBtCfBABmH6kpAzeRZaBmFTPciUh4JSeN80qyPCxmzVZL/fndBfgEWQZbzwfPLV1QuZ0FhVqKMfc2tNc/MzTnFxb4L3vvc7DwwkXEg9pxnj3CFM00I2O6My1RqmENAv88qe7/P3vvsW71wsY5Xz6dIu7oyn9ImfqLHcmY042mjybN3i3qjhWNPlqq48yio039ln/l2+RHitoXGnRutTAbeecal3Au5rhdJtxe0SeZozv7/P+2w9499ZDnPMUnRZZt8u/Osz49a88z+Cfvk+9ZAkPjvhUOyFc/SzT9TdEGFdLxqfzFkxCkhqef+oSmD7jejIfNlZuitYeV084GI1p5B3WD9bpFz1KO0YE3IFEG/Iko99cIU1SDkcDFto9+olmXMdaIt7mgjx/Mjj8rxvA+k3gm/HXN/kzSv4KiK+T9Zo6yI0SP1gcGoUYI23mNGvhJsmkWYzBZeMYZQg4rIfQzFF/5SWu/9MfcHUKqpFHqncgszKQlKGjoFheRSawMazvbLLhJ/zSmecxqcH6GJUXTadC3Gh+xo8KQeDUNGrUXUSSnGjIhfYh9JbFtCXljFfz7182WJjD2JVzpGlKXVYQNKoc4csRu2qJZ579NEvHVvlqt02/2iCdwODGbVSAajgmX7FxluQwyqKD4/LJNk+dPcY2ms03R3z43S16icPljp87fYxO3mAjzymyBnmi+eCVW/CjDRaeO4EfTAmpYbpXcvTNIw5+NGU5aXDmdJvt/+gLmN0pvX/0DgufOUXv+VME7XmqTnk4lgOp14CWfkD2pTGr559h8kHJ5jfeJ98c0HvqPLfvv09WSYNv64osswRnCTphMNzgo8P79JsdNvZ3OLdyEhumeG/IEx1tdT2dtClJayGQpRnNTPzVUAqlDcf6C+wcHLDY7nEsz9mIWS4z5FWImBFA+gnPYzHBD3F+Mnu8D9EMTkobhZIT1sdsjjjkUvE0F9wvzBt9Zsle3uMbBdWvvMSH//33uOpWMWiMmbmRxCtfKSpb472jleUcDYb8YPsOX7/8NFmaYWdDLK1xLtr+zBC5qHiUYCXQKpppRNmwyKMFQZIbDRa1CNIkOs/FwWhsbGPp4AlYLOBQPlBPS2rrOLG8TNFZoN3sceKpS7QRh8j704rxvT2KpQ7eVphMSKVhlmGJ5+go59xCzkhvcfKk4sJSm+WVPpdOHGecJKzlBYXWnAjw6r/8LvbmDtUHm5h2RnGpA6VHbSiWioLTv/ocjbTBcDhE9XNaFzrs/9b77P7uexQXFvmLv/4kP1w/5OaHE55/puAXFzSJvYs5eUjnzAnaF15msDfm2/sVx1ev4Ic3wQWJLHQ1Oi3wKtBtNXiqtUyaNDm5cJzUJFhXo03KaDoRBoJKSIs2w8kIdI2znp1yRJoYFltraBTHlpd5sPWQK6fEwEIdjFBGz0GAmeTgk4aSj8dmCYEqQO1n5m2xgVbRsEIJYVxo13LKP2ILh/nHeORxbKLJdwwh6rTwv/Q57vzTVzjfXROVojFoHyTjxSiSoMS4zjr+4N4HPHfiLAvNluSMzGyaIp8sRIoLsfxy0TlfJRqPJtUpwcr/E2qCjZNxJ5ugo3KM1jgfSX3xpFBa3C/nF6sKJIXBlxUBj0ka9Lt9unlByxSkuoNWY1SwnP35L7D147fxgxKqEvJSPoYG3ISjvRG7m4ds4mg/t8io1eStsuTsoGTwYJsqJIzQPNHp89Jih0tfeJr9++/itqZkz3VprTWZbE5wd4Z4D4vPnKEIhhN/7zsc/7svk//cVXYvdXjwcIMPd7b43n9/nfsfbKKOFL/1/gK7VwuurB5wvhOoeJ/F9lfZI+Xh4SLLqo2zjqr25M6TOEkMMFmK1oEHD25wc29Ep1HEEEPPan8RrVLKesJip8e0ntJMC3pFh1E1pZW35kPqEDyrvUXevH6doGA5z4QdoaLB+6wvloX0E9fp47FZkIk7RHr4jA4SiCewwHzWB6yz0VVkhv07DHFfzWTB6tHHCN6D0dRLfUZfucreN2+y3F8kiTLYaHMvFBtjePXuR7S7HS4urko/4RyodJ5HSaTOKIX0Lg68r0FHDb8OeGqUCYQgsLIXOy+pi4Pj8PAwfm8KFUM/QywFQ1QYokUam6RGvh/nUL4kKVIu9nqstVqocgK6gXMVClh65jLTjQciIbAl3llMZjBace3BkPWBZeP+EaoVyC4ams2C3/mDmwzKQCiamEbOajfHnMxoPNzHfdYw+eER2fdKOmf6dNodWo0+VWk5vL7JzuubHAwPWUhHvLa+zjfu32NjY8zGfs3TT5zlK195kf5Bi9+8dsDPfr5kp3eSC9kNmsGjkuN096e8urdP4+oq9WGBsx5b1SS5JWtERSVwbLHJ8so5pnVNM2/SzHK0NiQmnfsVB0QBqxR0GhCCkgMq0oy67TZlXRKcp2MMLSPmIImRG17NGOaP+80CkEV1pAvSNyRzIU4g0UosVFU0OyA25Uri7GQRygYJhGg7pOaT59mt4584y4Nbm/Q2xiRFC0XApKmUVkZxd2eTDTvi5889OzcE1xE8CEEyW1Sk44TgI9Uc3EzsFctJkXeJnaj0ObIhZ9/bvZ1dzEKCqmSmK4fZo9tUzSgFUb2YFvk8KatG084aZImhGk4JupRBZHyt0qU+uujgS4eiwk5LlA78q+895P4HR5hTHdaOJ6y24Gh0xKefafOjV44oncWkLc6cWOCpL57m/n/+Hfx7+7QuL1Pd2+Pg+gY7+UPM2MuJfu0exz93guWvHWe7nfKRarJ88Rlu6IrmcYNZzmmvFhxQoxYWeOUh/FLf84E7y1nVpat6qHM5K124tltxRWfU1ZS6tqTWYutKSuYko50rdNFFqwLRLP3PF7SKszgQJMz7SMKM731qUhSGwWREr93hiWbKfg0HVkwdGzow8vF1/wnPY0HRD8DUOSrvcPGbUzMoOFK9xYUx9ilqpuN2EPk9PjghTMYFrtWjhFoJeYpS4K99mtv1QLy0vKd24pU8HAx5dfc2X7vwFEWaRZd3gaRrF/2ugo9CLuLQVLyUfYgalhghpZlNV6PV0NxAUFGOp+w5x8m0G5vLGdQZdRWR0PiI9ymAQpKnuFbBIE24tXebo9Em5XSf6mgL6omQNk2KyVso3UAXTQEavMX5gF0w9K8UrLgBJ97d5YQv+OLZ05zoNam8lQRg63lvt+Q/e22HlX/vS6y+eJns+pSTT12i31yh316k312gMBnu5g6vnyn4veM5v+Vr8m6fnz95jMS00Cbn3pFm/SNLt2841XW8+aDkt14rcRue//tvD/jtD9/kqNqjAv6HP3iPddcgmATrAt5KiloIdj4wxk0I+LnkeYZizofTcafMNseMdQAzeyY4sbLIztEBCsVinkrceZhZIM2JLz9xnT42N4sIvmShiToxPJqwhrg+4xBSRhNybYaP68VDmL+IegYa6NmUVpARXxSUX3uSvd98n+X+EoSA9ZbvrN/khTOX6KRZHFDKV+KcFaa+in0LSM+Em9PstdGo2srnCvJDIzQK52pQcVrsHNtHByz2eyy0U25Mt6JxhlBcZltqNvXHAqlM+b0Hk+WYPOPN6x9i1xZYq8Z0Jtt0T3qSVk8YvzqTWy/2exBwrmZtWKPTNqM7Q8rRlO3fex977yGHCwuEyVgYC2NF9XDKGwdb/I7+kL/yt0F/7TRJr4l9O3B454Ci22b5hGHpxB5v3Rnw/NXz3M/lFvSlo17fpBxNyUPJOwdDPlI1nzrXpJFWrDzRY7l9mX/nqbM82N+naGmaWcrzX3yW4+FNlDFR1epxtsKkKUoLgBPcAJIes0rBfwz2n2VFzg7ZMDtkwyN4OITASrfP7fV1Lhw/RaJTtqKVlgKGHycH/oTnsdgsMr7wc3O7j5dZLnghPMKMMIbyfj6MFPpF/P34nUpjL4TL4MUPaqZWCABXzrD95l06OyNMXvDeYIf+Yo8z7R5iNSeeYUaL04w3CuflbUh1MkfSpM8QzQoKETH5QDCzNykabQRFsOKHfONgj0vnTpG2IGyGqM6b2YeKGE0WuECbRiu8kfQyW0OoKu7c3mWyv8slE1je3+OCtzRXK5JeP9LMRYeC99hyilcGN7VMbz3EjiYE5zna2aZWQ9LPKJJ6QF1Pcbbm3IVF/v2f3uHpsyWNZpveZzxlWKJ/5SXSac3WP36LpZe3cGqLrz/o8cZm4HubD/G3R3zrjuWkCjwxHtPtNbC15npngVtVj//wa4E/0F0We56xanFtfcBXOo7ivS2mNw/x5zSYgKslGjyzLvLtpJdTYSxsDmb0p0c3ycf7jBkZU4R2Zq6IVRERe+ujGxBgMUux3mFMNof+9exU/gnPY7FZgD+80EOcO8QN42fNb3xhTIzUE4QK4XrN7ECj630IQPSgDt5FMwJBUqwC9dPPsv1ffpvy6IgdP+Bra1elH3IVtVXCI4scMufc/LTyXpKIjTbo4DCJIlXRfywSD4NzhBC9jUNkVgWP9Z4tar7Q7VNrS25SplEI5YkOKy6+2d4TjAAYSYhMZB+oBkeU4wOK0GTLwEfvbKA9nCo9XQJJuy20mzhncc5DkrLnNEeTCmXGqNSxeukUJz5zkvcCPHP5DPevaaZ5l5d++STPntqkqe6Bqgl+ws1ywqvD+zxhU9J3bpJ/5gKts4ETjTbYJpPuCb6VOVafbfK3tw9IXrlPs7VM43Oneem1e1zfsbz50UmOPd3jTm2ptePL55u8Oxzy2UsrLBWWRjWU0hlRpQqp0qGMR5sEfAVuhE66kcPn5+jnzId5dnDOUDBr7aP5iYJ+u8u4muCCpZtmLOYpVSx7BTAKM/HqH/s8NpsFJTF3PjbBykWT6tlMQytUmLF445yEj8PF0kPEbp9ZtEA8h0AJ7dvMXtDlHrvPneT+v/weL527QKhqKj+RFN80QesgfVIEC1T8eC74uLQ9nW5GURSUR3VE5aSHEa16mPsIx0uQg+mIZrNBYRIyY+ikDSZVKfoXIoITXUpMopntbxfz4VEwHQ6ZTkZMCnC9JttTz7feuMdzgwmXphWLZ5YxnSYmTdEEsnaXoFLWVgr29TJpdobljw7pTFKa1y0/8/IZfrNZsXKh4v57G7z99g7/4O0Rp5LXGVaQqSFVXUF2hqr7KTbXpmz+k7dZ6xfkxzL6p3f4tYtn2N2fcubDdfZ+eIO6tqi9I7IPdyjHI/be2sRt93jzRznbexP2dIunP3eBlaWME8mE3lLKdHNISERG7J2PpVgteTmk8v65IUG15ofmxzfJrJchMiVmr/oM5AFp8hNlGE2m9Jodjuc5e2WF0Xo+v/ukJv6x2CyKGdLwyG9W2g0prWaO5zO4ddanBHgU3Bqv0NlJotUsvEcGgzq6vAcgNVr6h1XFiX//F9i9scPRvT16JfS6HdJ2iyTStkkkP8R7R0K0aQpyQ7WaGa2mweaa/AiyZsZwZ8RwXOI8BOeEfxZqggvcP9zndLsnsd86YSnvsDXalzt1PtgM8fuVGGytRXWZaKGbKzvFlRPKKqXX7fPCCydZv7XPGx9tMh1NeCY4+qeXCO2CYBJMmlHXgcHuHt2pmNdtJ0fcGpe4Ow8Y3n2Hg7LCtwxpq8Hed3v8d/uan//CFX75p1Z4r3GK02bM+XafbX+cYy9mnAuB4f09dj7YYGdzlzfe+za9D7aoPzxka7lBcSpncm9CdesBo9IyCQ6zr/jsV89iV89zzy3yleNdbr55j8PxhIPBkI6pIddYa4Wt7UTSEJwjaMm1CW5Ikh2TEvtjtkViKRXf+zgoho+thZk1LbDU67O5t0Ov1aGjFUcaGgoyLUbnn7QhHovNAkhpFenysu6Fhk0IJJFi8uiGFKVbMnfc1zEUSBys/LyfiUnFkc0qlPzYGHrHsSeukjUaqGfO46ua/Z0jtq+tk97YojN2LLbaFO2m9AGIfZVSTnB5FzAmwRhxpDGdnERD1s9IdGA4KglGmnrrxHPs/mjET60dE71OgNWsxfs8yr/UxmDrShAxL+WI9WAS8VMmeAqjmKBw1nF3c0JR1Zy7uIw51WH3zh737+yTNwxJ3SRb7OPKkt39gEsa5M1Ao9dg8MQKt+5vwcMJduxR6SInXjhD8eklluwih3cSbjdyHk5SrmZD/uEbgYHb4MXld3l+9Th3JjXDXcviqUVWyxWe/vJn2Pj+Og+u/ZCQ5CSHmlbZIDveQt0fkwTF6otPYT6/wrDe5vCjhG/f17SLHmMzRqmUSe0w05pWM5mL7VI3U3GKPaymxvsKZQpJP2B2YEr94NzMI+ARz24GKYuPgKaRN9k9HMCpwFKecX88oTIK6xQuBD4p++ux2CyzKWvwcsMoo0VvoKXxTVQgjUpKh5wYBhkSJjq67BtxUvQCPT3qd7w00YmendzS+BLLLeIE1+QZ4cQS2ell+Ipjujfg9ru3Mdc2WXQZC/2uWCV5T21Fbeiqkoog9q5WofIE4yHPNA2Tkfoght0OdoYjVpebnGhlZM0ElRgu95b51sY10fCHgDKzwaT0LNaKHRAWtA8ok5FkhuDkz6al4/b6kAebRyznmtXFJg8PRjQ2G3Qc9IocEsutuyOG0wE6b1FPS24Gx+XPXqG91+b1LU3SbWNbBU/1+uzmIxbuHbJ/8x5va1haS/lcs2Rse/z2tzX9r+d8+XyHuuPRNNgeKO5PC/R7ByxdOUl5/xClNNmVFar9McXn+uxd2+TB2w9Y+foSw7TLieYe1fAGR3Wf1x+OabcnLOuKVEOqNYmWWEJnxaTdBU8CMipwQ5QupBoxM+b2TFMvKPMMYk5m2ZfRcb62Ne/cusXPf/7zBKXoZRmNREdirjCPP6FleTw2SyA6AxHII00/6Ec0kLl+BSgUpFoawRRoGulVMpPMMyWNllAc+bhxk/lAGQJT55BsrRAFcmpuieNC/D2tydd6tI49B1/xHN7eYOdHN2itD1hoNcW9PkmYTksSDEbLZ6oGHuMdmRf9fhocQQeKkHLHT3nh1DL9OmDGFmU8Z9IWRZIyslOUAjsL05GBC8Gpufw1EGh2unQ6LbYf7oB1VJMSW9YcVZZy7NnYcbSTwPL5FSY7I/LlLplJubVzxKCfs9TtMd3bwk2gdzplMjgk3D1Am8Ak0bz6BjhdEUi4+nSXhWNtOH6Fl9ZGfNee4G9fzXBHr7FeNrk33GJv3GF9o6AR9sncDru3bmDLmtxojg16pMuG1488N3NL5Sq+/uYrhNNnOQwt/sLiSW48LPnqc2e48d4rDH1NhidLNHkhBouuttRVhU5SQkilsbdHkC7NWd9hPjCuqKylrEqqumYyLSmrkmldMpmWDMZD3rt1D2cVF08cl4oFRTdaLc3WiXncG3yFIjOaQkFLiwhq5B2WOINQgVQHCqVJlYjFkuBpJYZciTQ00xojoxQhUM7GLLEPsgqyAG1j8EGABBc/u/jngtWKKvLTDIpEQUg1vcsn0FdOMt08YP1775Ff32at32OcpfgqIU1y8hQKE4dalWQ0KutRXuOCZ9yBU97gbT2XMbfJWG20uDeusc7GnuoRMOCDw4lCGR8SJsMjThcdOu2MdjNHuxDZuZ6JjTmXJqHZb7C9PaA/mrLQ7PLBh5uU5YCDgx1CeUQy6vHGP3kFO6y4eHyBpalnrdVheWmRbtVl2l3kjeVzPJwY/M4h//zDAW/e+wBz5NAtUKcWSRZX6C71SVyF3ky48nMvcPfyExy/t8VBoyB75iRff6LLz2WKA5vyP62XvH7g+OurKZujAWdOa365aWkauG4dtnYc+YpGISGz2jhM4gjOMT8qVaAc7/H7P7jO+tYBg/GI4WQqxhsuhu0aQ56lNIqcZrOg027SajZpFAWffeppPnPlKnmaoRSMnMWFgAnCodNz9sQf/zwem0UFsjgXKcPHNPgRTtVBGvTZMwvNDHHBK6Ww81QrPa9llYrafe8jAe/R84eiVqRym2tkgtYkWm6smZevUZAeW6D/V3+KsHvEzjffYff6OiudHsdX++RJhvWBzGuSRoIuS2ZV9bCuyduG5q7BGzHBDgRUpjnb6HF3si96GUJ023c4Jy4pgRxXOZyqCB52D8c0spyzqz0erI9QGpy1WG/x3jK1MK4c+0clp6wYg5dWkSUJZdZhrZvQH8MDs8TzXz7Pi9tDzPaAdtFiaWWJ3R/cohit89z777N5vMV3dZvp4gqme57O2Q5/+6UTlAtHOFqsBsv6aA+rj+gWGe+WmtdPn6fW0G7kpC1NPRpghiOenQz5vVd3+Y33D7h84ST/xavwUyawN3oPYwQIqWvHeFLRbqcUPuCczEe8dWhd4oIizzStwnDpwhk67TbNPKfVbNJqNGhkOVmaofWMTazmAJDS0tPMQKQ6eG4PB1Re4fGkAfKEx3/OYpSKpgYaG/wfyiTXHxtOWiVmccorCgM1nkIpCo8E4EREaebG74MXV8U49bdByrHZoDZaX1OFmXdU5JDhqL3Mc6ybDUAFLEi1pljqcPzXvsB0/YCH33iT8a17XDl/nMWikM1loxrTBUKi2HYTFkkl/VjeQ1AaHQKX8z7fDqI7spXU3yE2tcF6XDKz/AGaLSwJXhu0ShiPK3TwOCRjRhuN15qjScnxkz2yRsb2oWVgcwZ7JQsnLfdKTbPVZenhHumrb3In0Zz51FlCUvDwuzfJlCbxBScGGVdCh8//3NOkP3eGXktR6yPubqc8vK55ajXlcPqQ549d5870JK3mFs93pnz7+pTpzj6HBy2+fVPRSDMa3T6HaZv82BqDzmlWryyyeMqznI4obpVs3fsAowNOMc+jCdE+17no8UY2d8b80vNXSPJTUq6HRwfkIzca5mjZTBmrvMysbAiUbsrN0YiBdZGIqRgHQdE8P/l5LDaLBhqJLFYTFNpDHSBBpuQOISTKHFJ+r3aKBMgTFU30xDxt4j1uRnVBtNVBa4lTQ1J8VYQUEyV+uzbIAk2UfCwfGQHWCV14Zhhn0FTOUhmNdoqw0qb961+gvLnF/g9ucrxdgP2YtVLT4CaWnaTi6iCXuLdo3aoU+NpztujTTnNGrkL8tnyETG08VSuhbRjNZFixeWuTrFHwxmAXXdc4JzqcABACk7Lm+6/d40svnKHTa/PR3pRat+leOcH29IiFrGZ5uU2vtLRv79NPG5x+8gx5FXjw3gEjZ6HhUN5x4Cp6G9cZ7sPrt3Y4bTf4/RsNqqnitbIk73W5+NVNfvfdKV/jAZ9ZUZjuszw8vMpuY4XmlUU+f65FlmZ4ZTizW/KfvbvHubUe51JFYqd8sLGLDkQF7Gwoq6JQLnK8kKFvkhqpCOoD6nSNSW2xKNlkzlF6T2kdpRdTdeJhJwnFkbAaA2VrFLV0/3gEOKqtHNQ/6XksNosnUFkf5SuaTAdypcBDpeSbyrQmRXBwozRGazINmdFkiI2NV1p0McHjYtlW+0duISF4AgrvpWfR81UNqdIYHTlgMxQtiIGeVtFu1cgmtP7RXMcpTTi/wsMzq2Q3Nrg0qPB7E0wmuvlxaVF5oDlIUEbhrJcAV8Ey6aiUC+1F3jrcRGtFWYpnsvcBcbKNwaBJRj06IOQL6EwBDjePERQ5Nc5jFCy0GxxfadNsNpiWQxYXPcoPGO8PyBLNONSMrSfYmv29TT74//wGBQntRorxCWohZX3rkL3gGb+2zenJNf7yz1xgd/kzdE5lfMYY/n//4gY9ptzbPcUXn+nw3JVAmpe8nE+5Z1tM7S4PyoxvPhiyMFHcyKZkTnPV1+xujGh3cuoa3nzjdY41BNgZ+xpnYTSuaDUzZupXHzyGGICEh2B5d/cBd6qcGHsl3pmRm/eIYOnFMSdAomKcCfJ6ifJVz7llRFPzP1quf/x5LDYLESaW+AMxz25rhUkUUx+wQTZPM5WcSR0XM3HiXwOV8/KzD3ilxBNMR8/B2JvM69cgXmQCBgQIXm6ZyDXzhKiPkVtmttkIPjoZysBT5M8BlGGsA+9eOcHGeMJzB0MWDybow5qD7X2W0wJVeYK2mNREGofMjvDwTGOFd452sPFG9F7FbHoZwGoFRaOFdRO8rXHRQSY4h42IkHNu7nbTaaY0mzkoxfbWPdToIS4/ifNN8pBjdcb65gaZgobOaTZSQhKoOymuYWivduhfOcuHwx7p8iJPPZ9SPrnKK6+lXFgb8lNPF7z0dz7H4eQW2/uOo7uGV36rwZnP1ejpTXa3f8jULeJbz/OPN3JGqsvf+NpJfm5pkc3zNQ3goC7pW89Cp4ez2yRJQophWnmcC9RWMjRzmPtNK0E68DhOZRMe2AbOC4Kl5r2J/H09I9wioEkdoCbmiapZ7N8jqosw22fT/j/+eTw2C5AqRWEMqZbyKiW6viSidsR7dOxnbJBrM0TLpNo76hDwKuaOoFCR8DjrN2Rde4yWWyrKUsQKCdHSzNjMM3uled+kFEacWkm1kZlNXPAOiboIOhC8Yrso+NZSwrPNgosrJdMHtzjXWkLllfRTWhp4gokEQbjaWqBZFFTlNE6jhbFgXY1WmqTdJmm3yb1mOjokbSRC/6hriEG1SsWyxRhazRQI1JVlWFuaiye59oHDj7bYHx2SDdqcXNRAn+1pwjDNWDq+wt/56jFWFjp0F7rorMHvPlD8s1sVbw8VT+0o+mbMX1gbc+1DxaR8nzPHxpBfZeX0kDtulc2dJZp317hw8TTLV8+hu01239jgn72/z+5BxTvtIVfyJgTomkSslp79NDuv/Ba9JCWJhxWINMJZJ2COViiTSP5l0Chr6SYVp9MpN6oGRM6evH0CBcteEPRGq8DMPk88GuT0VCpg/cwZKIq/PmGNPjabxSupI20sJXItDTBx56MM1kn/UoeAJeC8oGEeyRQhCAc4IBNy9zGiXYhUFbnKH032Z0hYQE6iMH/R5cpWCDSdaA3eYYJHB1FV+kgJV/HzEbNYqjThjbTJ1pEipBNWlpvofh+Va6baY7KUalJBloKTr2Nh1Gdnd4+Ao54OMCaj2e2SpBkmEdNz02gw3rhJs1cIEOAkKgNkKBqCsJSzRBpe62CwtcXBwZDl5glq6zF5j/bxFQAGe1O2OudYXFvia8+tcf6JLnY8YP9wxODoIUtHY3rXDvhoZ8p7w4qmPs433lnk2keHlImi10/5aO82v/bkLis/HNE8bLB9d4t7/kO6L5/lyb/zeerRIefOBvK+51454am8hVGBuqrJH26RNxTZwjIqGZEpw7SuKWsX3xOPtZZMvsO4UJDDwQTO5VPWbcE0zG6LR9ogWQORFqPi0DeyOlBqLodI4pBNAplm2sw//nksNksAKhdQRt5wDVgxKAbFfH5ilORCViHEG4U5PDtbtj7IxF55H/MPZy9LkOZeCa0hnuuShaIeUbtnPUyIJ5pRCuvjFHmmr0GsTEtC7BuYgwLz0CQM662U+xfOs/XkRWHOEqT8AikrlEiHnbPoh8cIN65TTUuyok3RbKGVIQSNTg3VdIJOM0wIjPYOaTQlWkGTRM8CAUFSLZFxPihqr9BZmyQbo/WA9NgxapWytT8iVYo8Af/gQw42NTfrDne/V8rt7QJ5I+f4iUVeurzAXmuN5z53if/mh0dcurDAT7/Q59rNCdfXJ0zzAZfP9hls7rL3gw2WnzhJCJq9l89yx1r6l5v871sdzhZNUp1gFNTK0FUZ69d3edg55MLlJ9i/8SMaeYJCMxpZBkcVRSMnsw5b1aS5mFloxG4KX9FOEi5mEz4smzFjhXjAxUm+Ela6VpqER+rZmbBQMRsh+D/sRfcTnsdis8z4PMLtEWcX632c0kKFoFgmTvVtADBY/+iWACJdX055YwwBL/HbEUacKSlnL+QsA9572WzS04RoTO3FDD+WcCZuLMEE3Mc2V/wejIkEpBD/C9jKcuriRUKWyNfmgtBWZsCCB7SgNSpPcSFQNNqkaY5w3gwmE8qGtzWoQN5bojraEUZxs4gyAilBpffTkg2pFaOxABHNTgufdNmfdNh+aNnfnbK0kHJ8TVM0a0aqwbo+xl/94grdPKHfb9LuNknTlBENvvutQ/7zNyxJq8HaQpPPdluUFyqGhxa/M+a/+h3Hs72C/qU+d585SZU3uXqqR1G0eL6j6agEbTIe1iWnU01DOlPcVDNeaHD83Bke3ngXXU3xAUbjmp29Ca1OTtGUuVRdVxgM2sxsdWvquuR0kXCvzhn7NLK0Z9WEirJiFflhf7iSIFYSBB/7nUi4/ISp5GOxWQiB2oNFUKcklj7eCfynlJja2YhayBqXW0dUu0Ko9PFjCWFSTpCZ9gEEMREYMea0yyeXun+2+GPTL44fPm7cjzXkMDdJmN9aIeYgzr8W5JZpNkiM8M+EKatijIUUFUki1q5GKepJiVIJWZE+KhsSIxvX1SgV8OWY1uoSdrCHrSYELc6N2swo64ZmoyDPDISU7Y0po2GJN5pxpbH2EONK2rnlcHeb6lChVns8/+I5GtMO75YdfuliG+MdVYDRdMrwcJ+rWcUPru2zsqSori1yq7vEM70Oh2s1+5uGnz3RoZWV/H6nwGcVo8Two1e30WGPhabm8rGMxW7gGVfwg1HNpq95oYTyZI6xE8rgOHb+CTY+eo3aeSrn2dqbkjWGNNsF7a4WgimQmBxrxbRQJxmpcpxNRnxQ90RNakSwJ2vGRDFddOSZHYb4CPrILK2euQXNaOk/4Xk8NgtEMqHAey7qETzC1pVmXEEsw2b6EvlbcXaBnP5q5gYZfMQUVbx5wrwHEVZAdLg0j7yI44EjtxtxUxDiNNjP0TNJw43ctXhLzGTMM4UmEGMM4hsRPrbJZqdXRPaSRPqzrNUQr2Vby0ZQEJzDYSFIZIaejOicPs/u9WtkKKYAhSE1iixL6XYa9BealKrHW6/9mJ2qQztLuLt7QDMrWF1tMK40JA0G00AnswyufYuj4Lh5s8t3f6cLk8DKpS5Pnl2h32pxebnLzz1zBd9IOdbpMLAlOlg+GO6w+3DE378NXQNfb4+p39riYcNS/fg65XhK84mT6E9f4Ae7nr+/qfCdLp2VBZ7+zBr1wxFjP2TvaMDJM+e5/s5rJEqjleFwVPFwa0ynMyTLDc1ug4DDu1LYFUYjGv2SEw24XVVMSGKGjIp+bbPWXd6jWU8psg/5PZmfyYxvHgDzE54/bT7LbWAAM/AnfFYptQj8I+AccBv49RDCfsxu+XvALwBj4G+FEF77pI8fkClqCLM47+jiEksm5qWLLDj9hyj5sxcCZiEnJiolZ4iXihQHWaxyCwUQv2Hv4g00o/LPFJWxjwk+lm9InWce/blSQCwX1cfRlPh1PTJVeEQGFUq/zIdSJSigQk44kyb4siZo2STWBlABbcT/GOuoqhptLN0z5xluPsQeHaJCi0a/Rbvd4NyJFRq907z3w2tsHB3h0i6h0SZpa8aVoSan6BraaYpqVpSjQ053M6wDWxvW7w2wpaFTrHDi/HFOLPTxiYHUsNYoOPSWew6+tLjKZy9m/P76Hp8dTDn5/h3U2/dIJ5ozxxco91fEm+CW4czPXeTbkxJ1zEJiqJKM3556fu2ZHrz7kK3BEc+ePo9XObYei8DOBzZ3x6QNQ7eb0mhmkBhcPcUUTYzJo7d0TWo0J9IJt6r2fOD8R9f8/7+9Pw2yLD3v+8Dfu5xz7n5z36qy1q6l9wWNbmwEAYKgKUoiaFKSpVFobJmOsOSxR475MJI8EY6Y+eSZiXBY3sZyaEZjhSVTC80gBEgkAUIACYDoRu9LdVXXvmZWbjfverZ38Yf33KyiBIAto4kuRdTTkdE3b97K++Y973PeZ/v//1Osi6+ufiTCsK0QoQ1QkYN+aDnL573392us/A3gd733/4UQ4m9U3/914E8Ap6qvFwkCRy/+6F/tq875dOv7A8wH8IecImxCdxCfhkrUfVWv+yoiB5netBzp78sXqp/JKrmXYvoh34++DO8rpyXNKuYV05AKcUBcfkBaPj3Oq1NselwJEU6+WEAsBDUVBjWFCOwx9UYQH8XZoCvjzb01lYEgQwkfGm0mQ4mCmdVlytLSiDSPnznL6vIcUVly4duvcvnWHdL2CnUcqhxRjnqUu4ZhalA1yZ7WOBNC19Ozx1k5vswcgq2dHQb7Od2kxdv7A9ZnO8QobqYp+96yVG9SFBnWFDyqNGefX2L29h63vjamV2hE5vHXxjRETJ2EVqPFbGeeU6cF6V6OyXJaTDhSu8jVvmI0GrItHTrWLCwfZuPSe9XokSNNS+5ujdhZatCoRbQXJLWkjjUGp8MwZLgslhU15pZsBwm86hK7g1DZT7dZxR4a+JSr7CfkuVNYx4+wHycM+xLwuerx/0TgQP7r1fN/r2LO/54QYkYIseq93/hRv8zhDjZktQ0PTgaqJF5V8ngCjxZhGnka2oRyYODmmtY7wiaW4YQIFcRqvwfYr4eDbu+Uhzh8rr6S7QP8VN47LEbI0BAN2jFVXd/fo2eKVFhn6MnIaiZt2oUO/aNEhX/jpn7sYW5hLggBVTJzBzNktmrIWV8VG9zB30AxJvKGcpjz2nfu0tGCMytt0rjJ23mNxU5MoxEzLhzG1XAmpRYrOp0m48JT5IZDaxpESbezwHK3xUtnupyaFxw63GQSlYwQHKnVWavViaOY2wjm4jY13WDxwg6yLkmlZRKlREcXiNcXqY9K9l+6hpm32M/N8drtt9i/tUt6Ycihpxb5Dz+hefLwkJv9YyAkk7LEOsOhI0e4ce4dYhU0NaUS9AY5d7bHLM02aOQGG7mAdK2UPKWXOJvTkIYGBX3iUEae7gI/JcEKN0QEeFlRAFe7yVWyJB4+lATfA78jQnLwtyshouX7HGATWK4eHyh/VTZVBftDznK/8ld3eSWQ6nl3APgScMD+WD2s2M7vHbFKClwVwoX40x9g7IWUOKZsKRzcaZiGWl5Moe+hcmZDbT+SlXCP98EZpt+LaX4EXoaejQOE80Q6MPOr+z7QqSy5wx8g8CyhISqnJ959sNe5xXlUHDEZFWBK0AoVhRGMUK2ruMsAlEcKW/FAe0SthmpKjNJcpolzEXk5Zt/mRCWMMosaG5YPtZD4MNI+SBEe6q15Lg0m7G9sMN+r8dysIkoijsR1FmZnadcb6KTO7bLgqIqYzXM2bt7l19+6yv63ztNd63LyxVPUjs6y14bvdya0bU6jNqY5zEj/xfs8+ld+mssnl+n4Hoceb3JOpaxNGlwbhMHYUVYyyVNWD63ihEY4h/KCSCmK0rK1NaG/ktKdq+HKCKESrA2KCvlkgo4jpHB0mDASSSD/IETN4ZSp1KQrgKGS8uDUdm7K4hPot/gRp8sHdZbPeO9vCyGWgK8JIc7f/0PvvRfTLPsD2v3KX4cffdTHwlZVrCr88dVdv7pDSAjjHBVwy4gweu+pZPBkNVHsqcgtwmiKPsgfph37KVHflLQPhLdIFRwzUiF3Cror/uC1eHHQZwlTzCEGkDLMHblKcg/BgYKZr/KZaeNUABaBs74a/nQHDrs4O8vs0hzj3nZAelZVvFiokFcdDEtaJPpeqdOJKo9TSB1hEUwmBflenzwvmJvv0Ls6YDJM6d8JVK7Wh82Cirl+I6UYZwzHJfncLHOLHVxfkl3dosSCdYxLw7goOLLYZnm2SfHGNs2tjFlqzDcajG/exSWO9L0eP1U/zuFf+jiXn16nNxmTNxTfTLfZGRuUKLn8Bxlnf26Jc2aOi5tXKiEjS38y4fDcMq2ZLuXuLhKHsAHPsrc/ZntvwsJCnXoz6GdqqUDCZJTTmgk3lqaYIMVc+JymvHNV5OCdC+Ty3lUOMj1X7r0Gea8c8IPsAzmL9/529f8tIcRvEKQm7k7DKyHEKrBVvXyq/DW1+1XBfqAJoIYIpVRCGXZKYyqmYZO6x+winTjQMpyOzuPD8Tol5pNVUh1VJ9VUAG1KdTPNaw6yIe+4h5cLoZ6SgSzPMy04TB1A4QnO6sT01WEIz/tqcqAqMvgp9oaAzMNVVTV/L+SUPlTlDh8/xsbFS9V7KZSEsiiwRRiR0VqHvoEIjsaU0YSwOGNDODvaGTA310I1WuzdLcgKjUg6CCRWqOpEC2yM2SCnuVBn0SqksFy8s8N4ZBhdH+GMR9SayEaT2YUZPn1qhYWNPqs/9TR3/ul53KBAvFHQ8AKjHO2lOZZ/5nG2jOd7y21u3PQMNjPKfs7MM10+89NzfPflfcb7DT42W+NSmQfJDg9b+/scWVxj6dBhru/sEElJrEOD1TtHZhx5bjBFAbVa+JO9pzvfARFC06YogmxidX2kmF7NgJ4NA5O+UmeoQueqSR2uYnUz/CH2QTQlm4D03g+rxz8H/D+4p/D1X/CvKn/9x0KIXyMk9v0/Kl8BKPGhj4KvqIp9JfNcOYS1FWNL9cdXH4aochmcOyAVV2I6FFlNmorQ0UdWHfaKLXIaDgmoyMbvNS2nVbQQ84YPu7T2oNpyfwHBhP/h8diKOsmaapCzEmDCBfwN01H0g43uD/6WuaPrqFoNm6aBsLpi6Q9Vu4DRkVIGlkYXHFFHCiU11oCIBGVpyLKCRk2TlAVzM5rhcExrpUOSRFjniZxHlh6SBmluMFmK7U843Jjh7LNnubBbcKXshypYs0nUbPDZ5w7x+TOzXPxvv8n22++zeGyRrD5mvDtij31UXTGam2FnPOBKaojqMfKOYzKsoyLF/isZNxdu8NPLESePKnaGgn46AAJf153ePnjLkRNHufT6m2gfiEu0qoooAmzpKIt7UuT39748Au0ypJ82IgmYIDEtzgDCHeTDrlLKDDfbEC6LH+Eo8MFOlmXgN6oBNw38A+/9bwkhvg/8IyHErwLXgT9Xvf6fEcrGlwil47/8R72BB3IXKj1aVvWJaX4R+I8CU6SfNpYc3roDggFjbJXAiQMS8YOxEwGqSvQD3asPmw1/IO08LcaHD9gdcOP6ylEcYXMbqC5OGLSEe03I6ccsRAgThaj044WvVH6pavoVS6KcNlepjjtJc2aGzsw8vfIO4aZRMeozvQv6+07DqpJjAwwbL3Be41UDWc8hrmO0YtjPMaVlmKWMZYlzjkYUMdOJaDYSJnccpYsxdcewP0D1Btzd1tSOrCIaDXwUEbuCdy7f5FS0xfBojwEb6OQOc598iqMnH2e+1sGPDFe5xUt5gcNw1kmuXd/kma5gRUjKPryTtjj76RX6rskaJXhR9bQk24MR1hmW15aQcYwwOdoFtk8hQzitIlV1CKaNxerC+dBz0cKivMVUdeBp03g6uX0wvXHfzpvKsYdRqh9T2rtS+Hr6Bzy/C3zhBzzvgf/TH/V77zcJNJWsjkOBVqLi6KrCniqMEfJeGCN0xRhZ9ShEFXopUeUWfvq7p1xc00TZB2xKVUYIyEigCocEgKuml4WsknNfDWuGfGg6ZiMBWTlNwG9Pm2GBailAku+VrqchxTQ8mCb30/mDehKzcuwYvZ27WFMGmQl/X6h1EDIGmLVH4l0ADSAEj7/4STpzS/z2P/l1+r0JhRWMBzZ8wn1D3EzIlGRkLBM0c97TWu5Sz0rS3T7bTct7596l1xsjRj7AeFcanFlVHFsYUgwSHvlEk/k/Ocf1xudxcol26rj4tXdpNPbZXhpze7zCxvkRL/cUv/jIEh/fHNBIYrSq8bGNITtv3eHKsA3NOExnOIe1jmGWUhhDq9mmu7RE//YNQlARJDusDShK58M8njIu4OZlCNGtDRXSuvTkLtBUBZTkNDqQ03siodrp4WAXgHP2j2TJfyA6+EpAS1WAp4rmKOQMslLFmg7HeYTmgEY1bD4OWCvhvp5MVTe30zuJD+HPlD7HiWk/JQhwHpCxVbBkcxBW3Wtk4aGqjSEJJ5ep5PssVCM04YjSSlDYkNarijLEORt6NlQVGcnUZUFIajri2NlTXHztVUqXQ9UPkC5cZqkUqgoiQt4iAvJPSE5/7AU+8W99gevv3yRyEotgNLFM4QA2g/xOiegqombEfL2GU4pskKLMPs2GRBSaztEV4iOSYjNi/vAMh56f5U8s9JkT7zEcZ4yHE169M+QPBt/keGeW7Hob/c6A43/tM2SjPZ46tMTGYMJTjyR8/M2LZJf22E4KRifr2NGAp07M88m1mDubJVeThLIs8V4wKR3jLKXeqbNy+BC7d24DDq0Vk7JgkpYVIC9EElKWKBTGOKI4rpqLjtPtJu+OSsZVH2Wq2RNSWRdwUHJKATwtK3OvSvoj7IFwFoBYViMlAFSTw0xDKqpSq0BUZdR7khBQ3aarUZnpB6Aw1UYPkOIqma4UDCQVi8oB5Dhg+6dUoAdNR+6xtE8bV9O8xbmKtLzKl6byeYjAGSDE9DQJ+VYgOA8VPFNpTfqqiBGGNmF1fY2ltUPcung+qIcJgcdUuRaV+pgK+vQSZJTwzGc+ywtf/BxKSYbjAY2VJbIrl5ntKqRu0NsvyTOLc4LDc7PMdhJoRAyNw0YeL2YZ5VB4w+DWHUQjIR9rzKUee9uXuNZRtJuep1csx5aOc+yI57F4g073NObUCW6Ul7n1uxeZDPc48kSfpfc2kZubfN0WrH96Dc5nNG5a1gYNumsn2d7ZQM01aQ1bjEYjvHeUBvbGIxa686yur/L6y55IaZS0lNYxmhhs6TCFqWb9guNIpXE+EB9a56gJx9lOl3eHffJKT2eqquBFiFrCdgnXWCkZRmSm1/ZHxGEPhLM4D0XV5zg4Ihx46Q6GI5UQyPuw1CZEO0EgqPpnxoe0zeIDZaoImMbpCP20aOCqE0WKcMrcS9bdgbyFQhyQT4sqh5l25vEWraJAvSPCRZjOfE2bq/d6PuF0OdCirG4H09wrwApcuANWTbNnfupTbFy7RlkUB4UID3gbQg8Io/uq3uJzX/qTPPLM40EwyXnajTpYiObn2dveZ2UphqhkayunvtBAk9EfF9y8PcQh8bFmeWkRsNx+7y5SK8QRTzLXxu7B4P2SRz7V5hMvrrGpWtS7jpYckahD3EwPsVtMOPEz67QiSXa9TmuuxS8fLdi/scf8oyu0VIN+qWjf9ggtuXz3Lul2RrcdoZQMim9VmXOrP+D0qmdldQkVx/g8w0uB0jGjScFgmLMw36imicO/mZ7WXlXCU94wW4s45Ru8tdcLtFpFhpaKTr1xXxofbj6uIiSZVlrvFV7+VXsgnAVCNcM4W4U3QHVqBFiwD/2Jg6OkIsSbHqX3kUIDFeRYVuHKPTK9af9k6o9TAglXyVII7uv4VwUDKkdQhI6997LisnIVDr9iCCH0XKQLeRZVn2RaZAiOck8NAH9vti3SuppGDifY2ol1Pv+lP83vf+Wr5INe4B7DVLotIQuLmi1+6d//SywfWWM6vOm958btDTZu3KWzXLJytEG93SJpW7oznsJrJrlDKkEt9qS5RYqYIptQpFBbXCJZPsSZM/MkJ5rcvuno7wncYszduyVHF1NeOVfw+ju7zKuLHF+9ydn14+wWgiypIWKY0S38/h7NnTa19xQMcw69cISbept3XzzJJ47P0D63w2V/jXPDHTrdFUoT8rveZIzzhkYjoT07y2BrkyjSRHnBJHX0BhlpWhIXJUktCbujukFJJFJprBmTqGWW6w0WxR1+47Xvs3V7g/Zsh0++8NMstGZDuCwCIckoG2OcCRBxLxilkx+6Rx8IZwknQ0C7TdGH0zF1fGCnLKcSA1MIqZiOu0/HVkLvQcjpnX56VAe03HSkfnrU+ip0EyKoF4vpHJH3qKpYYH2VAKr70ZDTUe7gcbLCykxDMH9QmJhKVEwHKe9/zyrEFFSb3B00VoUPZfHTzz7B2rF1Xv72N7n1znsMd3s4ZykKgxGS5z73GMwGBecwViN4+aULvPbaVWqLHQR7TDJHOi6oz9WpNzRuKMnGmvZCzCNHLLev5vSGnoFNWV9dw5dD+jfP8db5LJCTK4mQmu1jMXdERr27yhPrXX7pxdO8vqU5n2l+/siA+dZhJmnCd966zY1//gr+Zp+oW2Bsn+R0i5X1lKVfeYGv3YHfuzLi3/l4l2hmla/9/b9PoxlQm9Y57u4Psd6gRMTa2iF6W3cR3qHjiCwr2NrNWFnK0HVFLdEoFeDjQoSmsFICVw6CMJQQPLF8lPear3Bl8wrlZI6rt6+RLxuWm7Nc3b7DxYvn6O/exZscpSUyblKk2Q/dpw+Es3gC9VEIY8ImtM4HztuQdITkXNyrLk2rSWGHhvGTsIdDdjINfw6qH5XEnqs+yPspYQ+mlP29wUrvOWh6HuBgqvkhVzVEVUXANy0zh19YJfPc+1ZKGSo4B/FU5UDcm3a23lJaw95wB49job3EjfFdduUmS588THdvmc33ryOzjKUjK4yaKW9c+D4/9czPEKkGzlpub21Tjndo13pAHR01GPY8XtbYHOzTjDpMdnoIURLXYkTTsNipk5oxgzsbdO6MOX72MO1GHbVfMJqd53xR49M/1SZbm+W5pTnm5Xvc5AiNPGVreJs3L90g1/tcdfO0s5IjR5Z59caI0coC+fIc/+dffpIThxKubg3JJpbasRaHHl1m47bl6NoSRVGikxiPpz/JKYyhHsccPnqIN994PZDCm7APBsOC7b2cek3Tatap1T3eW3x1TREaZ0dIUV0/FfFvf/pPsTfo83svvcNLv/XPWFhf5sipxzn32mX2r19BSU+tFeOEpTXbQYoHPAzzgKlOinCHCIrDYQAxVMt81Tii6plMo/+pUq0g9F5E1VgUlRiSrxzAVyeXrhRstZqGQ/7AIWTVzJz2aKZd4IOpgMrprAwOYKxlUkyI1L2P0XlLImN0VTgQVdikxL3pAsQUvRcQmTd7W1y4fJ5s3Gfcu0V7Zob5o09w68o1RhtbmOU1XL2Jm1PEImHiRxS3bzD79LPEMsCV797d48bdTeor84hCov0Q7zZYWFuhl2uiZIZap0vnRMzOuauYkaTW8oiZDnONDihH85Emvas3GBhBZCX19i5zUY23fruJPlHnVlmwMxywl13iSLPD4nHNt8oVku4cLzbr+OFd+rMtXj11GpfUmJltsLG/j8pz2mvHOTOfcaauSXcdZ+YTLh87zYW7YZDdexgXJYN0TCNpsra2gkpq2GyC8gItFeM0Z2t/QqcVMTub0WjV0XFQGFBaVcQdIqg3i6BknMQN/tIX/wz9/ZRX3rzAYOsu7+0P2b+ZYSY5LlJE9YDpT4fjeyH9D7AHxFmq5l+VuN2bMgvl48CIrrDehM0+bR7d10+h6sNM0ZEBHXdfR14EIgzBNLmu7uzqD4/1h9KyPZgrss4yLIZs9W6jpWZl7jDGWdJ8yDAdcf7SBaK4HtZlDUIJHjn+OHPNLnVdC2wi9zubVpSmBCQ7o33ubN/h7Ve/R+/mDcosRyeefrLHcOTJ+gNM6SgmI9Aaa2oYVxLHhlrsaNaTSkXA8d23LjHaGVOUObPtBF/sMs4lNSXZvzpgebGDSQdQOEStw7iQjHeAnRQdQzvRdLSnUUZIIVHSIYcZh6WjMXGc/cwJ5tZus9V8Atk5zRJLDMc9Ti/VeXnzMnPpFfJOm69eHqFlgbl1g6NRl+NzZzhy7DROa36lYemZAj/u89bQMzj6OLr3B4EE3kHpPP1JxlLb0W63mJ2dY3tjTKQkpXQUCLb3JnQbCYvzNZqdAhXpKo+jinMNzhUoHR1MWLTqXX71S3+Wfvp32djdwtsxUB6E19YE/Zt8XEyj6x9oD4SzAExRbErJg9AFG5pKqhppnzJDTje291SMhZ7QsQgJPd5C1bOZCiCFClLQGZx6mIAD/Mk92C+k+RAhFYV1XL3zPtl4l8HONrYsuDa3TDnZxeUjvEi4e2MTkxegIuqNBqUp2Ll1naTR4uTZpzm5fJxmlNDPx0RKEbuYCxvXGYyH3Lx6hdH2Dr1bN/BlIOaLY4nJh2ydP0dU72JSg3c9rNsjbswzGRqEyMmyjK07l3k7qjMpNVd2tmgutWnkNcp8gowSZhuAG7L4yBHczgRd1yjg2FNrTHYHUBiSRky9qRG7E9LtHkpYGp02NSuJjCeWini2hqwV7A4a+N4mw/ENfu9yQaE7fPqxdUQjYrfhyYv3+cuPGc5FJyjiT9KM5pDLMwy9Z7/wWK1oyIhI1Bj3Yt69W/DUzCybgz5CgnSO7f6As2sKhOT4iWPcvn2TBNCVvs54bNgZZuzuZzQ7GSoSRDoCZIV6tdhygo7uKYR5D4vdFf6jX/kL/Ff/6O+wtbdDa14x3vYU4yD0igRvHM484CeLIGCn3RSf4izec9A4cjbMAekDfHs1GFkpgiFDWTk8X4VoKrzQV5BhXzG0DKpNW9cxWZkfhFo7w220jImV5sqVV3BIBhNP/9Z14thSZBnGZGxcvUZSj0mSUEDIxxmutCALbJ6ik4h8sM+4t02RTRgc22amPc+Vq+/RaDY4vH6a9y+8x86tu3gHo609RhsDlBbUZjS5AB0J8lFG2kuDTkuZIBWk+4HhxWYGnOD2lQtsb+yQ1p9kkpbM1xJKa4lki+FklVJAM57QqSny9hKFhbqzRJMJ8zNBxgEscV1TVGyOuYjQiaY3zunt9qHToKYnnP/ODRbWjhI36+z7CWvPrXF8do0/eLlHu9vhrpjdagAAN6xJREFU504foTV3h5UVxal2yjv5LV67sc//+2t1Pv/JNb6w2MLLGO5MuF6zLM3CZ0vNijnM3VE/kN9JuLm7F9hupOLkySP8/u+F66i1RilDnhv6o5z9YcnMIKXRTFBUkYAKYbct0yrEDif6dEDy6NJx/oNf/HP8rV/7/zNOx6ga+HHIj2uJojxoD/xgeyCcBUJfIozhV1xPIS67NwxZVcmmhSgpQmOvtIZEawyBGzkzObo6UbSU5Lbk4sZV+vs9TNpnNOih4oR6vYkxJWVRoKUgG+whkhZFWlL0N4ljxbBXUqYlUpdY41CxAuvJRinNjiaqCVxpyUYGKFBaYE0UsDHKsX/rCns3r1PmAu9yZpda9DauMRoY+jd7lKVHSk9UV9jcYtLQdMvHBS7zxJ0IZx2TnRSVKLyTJC3QicMUCZO+JaovoF1Et90lG40ZjQs63SY11Qn6iWWHtAw5nSs9w40hM3MxQtbBG1B1JpsDhpnj0edOsDy3QOwMBs97PcfVIYwiQWsOTh/pMru0xhtpDxuD793mWCel0Wlxt79HPzmCvz4gaSWcml/GryzS6+8xa3IiMYtyjttv3uClY3UOL9dZm4G6nw8EerYED71RetD0WFhYoNVqY0YDvIMkCsOs6Shnfz9lPJuQjjOUrIUGow5DmdYMqRpjB5CPkJp6Hlt/ir/087/If/9r/wvJjMOk4MqqB3cApPrB9kA4S5hymioDmypmDhNT04ahACZlzub+DkkcM1vvcPHW+wz6eyT1Jq1Wh6bWbO3eDb9DJ8x25+kN9rl56TJmPMKmm0SRRGvPwIVju8hzJv0cISMaiysIGSN1gzybUOQlxjjMOMc7Ta0FSU0wGUyIa3WElOg4RseeYmIxmUUoTUmBjiXWeNJxgcsKonrEZJBS5CWDHcNwo4fH0pyrkXQ0tnToSKMijxCW3IXWqNY1jClwBbgyIxcFQiWYbISqt8nMHPlozO72LlppVLeB0wnWSZw36Jpmtzdgtt1Eake8UGPr/U0aCXQXZkjaGjlbpzPTYDudsHHxfbw1gSZVSpLUsb2dkiXQvymYmAYnTtTZ6beIj0fEUZv3b1qOlEN+en0dcb1JerRNNtOi8Cl/5VPHEThe397l2M0x3BxyaD7hm709bL3Bf/zMXDgVZGCgHOaGrMho1JokScThw4e4dK6Pv08RoTSOwbhkOCoZ9TOiSKGVwhpDFGlMMcCYAq3jgyLPQUgGPHXyWZYPvcLdrfMIKcn3S2otTdSiIrz4wfZAOAsQBEZd0Imc9i7wgswUTLIJw2zMzdtXuXvlIt3VQ9RqTYa9HfauXseT05lfQLiSdLQfxDtthFN1cJ50sE8UQVyzOONwUgCaLM0CeCtOUHGbMiupNyVCedA1ZCRpNgRlHjQsTWnwCNozTZKGRkYCHSfoWLKf5XgFSoPSgTNMJxLvzLTBQpFaRns5490UVxqSrsYZQ5kZdCQDvRGgYkHSrfo2xuIpcVlISFWkwBmcL5mdfYL9XDC82iPfGiPLAtXV+KMzNOc7wWGkxmYZhY5IajFeCWpH5tm/02N4e0ijXaBrCuciXjx9mLa3RJFCuoqIO4r57tUxu05jVpqcfGyeL50+zNYoZ6+f8frbA1CemVrOW+9fxx5apHlsFltLyJzln711icv9gq2sxl9/fBU3Os/jMx1e7isaUYtvb/XZ3DDML4ATktxYJnlBPWkggMcfP8vFc+eZYpq0DOjJ0aRknFmy3JKnJfVY43W4wY4nfd545yVefOJ5El2jyvSZ8lUP0hHzh0/Q2+uzt3sXkxvSvj0Yuvyhe/SP2wk+iGVlwSTPSKIYay1ZOWZ7b5fNrdsM9reY7O8TJzGTvbuYskQUBRvDEbrWpEgLvJswYgMlDVlaENcExciTj7cCmz2h0+eMIC/BlhqlPUIJisyQp55uW9C7s0mmNLWmJqpFaO0IeAtNsxNomSYjW8lfaNJBTlSbThH4IJTqDB6NTkBqQ2dRUo6h29XMzCg2blnKocAVPgxKlgGTYkpL3i+JmjHOlCgtSLoh9BIyohg7dN0TtzwmM4ioTSmWqQlHfalN1ozwCgbX95nspuh6EhCgSYRXMXuXB0TzEUkjotltUEpHQ2r0eEJeZMhmwZVty/JsE5EavAxlSVFCd16zdaVPanswO+CdyRb1WkxEg+eWBHtFwrWNGp/8zDFemsS88dIWJzb36Z6e4fZdya2iSd3ljMRrnP2rHa5OGnyi3aVvFN++MabXOUa7vIQACu/YGvRZ7M7gvOfIkTVqjTrp0KKFJ1aSTEgmhWGYlkwyS6swlIUhTjQeQUMrvvXKeQor+OmnP06sk4MJDiEEu5MR1sPcodPk+xG+zEhHOdnAMCVk/EH2QDhLOh7y8lvfBZ1g0xH5YIPh1hZpf0iZlSit0VqC9MhIMRlaMIZiMKrmewpMnuMUQSVLKWTi8WODECG3uK/GTD7xeF+SNBRxLCnTCf3NlMlOQa2doKMEW2aUhUOiSUc55UQyt9xAR57xyKGUIh3mOOvIJxnOOZIkxqQlzhqUDpXtejeiNhfCK2stKnbU2xJbSLJ+hk5ihHDUOjGqI9Cxokg9Iha43KMiidCSWqeGrBlk5JDGY3yXrO84fKSBNRPKpE7hoejkjMcZmXGYvCDaTLFbPVRSp9zIYVFhjMWWirF0zDfqLMwIluc3uZNGvLa1QHFXsHyqy/qZLme6XUZCcvxTNVaaLZ5bmGXHepajhOt5ytev7VLbkSydLrjgc44sx5xcP8QLNiJdm+drvQHX39ngV55NeXb1OjfKBd6f1NAz8+z1Rkx8wuzyArXd62S2wCPYGgx5jPD5NZoNjhxd58p7F7BliZQQiUDlmheWLDeUJhB8W+tRThAnEfOx5yt/cI5Oq8XHTz8Z0JM+4Fc2+yNKC41OgzOfeoxh6rizlWERiLff/aH79IFwFm8KNt/9A4yx+NIQ1RTWVPjv3TFxPSJuVmPYZYEQOVKHMq/zHh05pFLBUUpHZl1I9jxkowJdCqKaxBlBoyODCM4EvLOYJBBSCgG+DJDmfFKESWGlyPIUZyxFphGiRZR4VFaSTXpkowkm0+RpRq0dQ+xpNGKsKbHWEddinBX0eyVSS+7mlnpTU2sqyjzGWofJDcJ4aosJK8dq1OoqlEd3LXkOZVaEosKkwA9LVCRRsUK0F0gHOW6omIyLwOmrJQunFrDX9ohVgt1MGWwGNKLPUqRwZJf2EUmCnumyenKJs3NDci352GNf5Iiuk79VsKGG1BYSHj+8wFxTg81ZjSP6zrJvR9wsSmZ1nVc3d/A9w5vXHJ/e3eTp3LL09DHGL22w7cE/v8a1VpfaTJMnjmYkseaRpMO8GPDtXsYTDc0rXvFLiyWvbBYIHShwN/b2w6RFhYQ8deoRLr93IZC5E5CxxsBoXFAUFdzY2jDlIQPJx9GFJt+7tsPvvPouZw4foducAyTOFmwNh6G/4j2F84yNooyaGEsFKPvB9kA4i7OO0e4AnSiEcJS5xGaefBwoUaN6hEktJjXhNbGh1oqQWlD0LFJ7EDYcHtKS7Vts5vHWUJtJUFrhbBgxcYCuebCEo7uukNrjnUTGFqk0MnJk+wU6iYjrEuoSk3myNAvtHDy1lsTmEcU4xbsCVRckDQEktJoa5wviGHCC4d0yqIg5STbIEUiKocFkBmccejai3dUIpZBSMDOnaXcFdzc9G9cLioHFZjnWlCTtGkl7mfm1M5hhSmkLknpEcatPsTPGtSMWVtuM91LSrQnCCXAl1okgr+eBNMNkKZM5zw1RgFT81ivXELEm3ZWgGywuzNCo1zk5M8OZOGbPlTwWNxBCsD0a0hQxL84I3rEjHtsbMXt1QjyK2fnmJexMwuqXnuT8U6usbo9ZMZJrrTly2+WwmOXGaJ8VPeHCZJnPzEz4/PEmdy93uJgN8d7TS1OsK5AqwXs48cjx0MMaDXFOhlPCWvLckeYlZZFQpBZTtwHWkMScOblC+VbJxY2cf/ytb/MXv/AFWrU2mS3oVZHAdMS8HVuyCMZxjPgRcsUPhLN4D7YMTB5xIxALFLmhyApcYZnsjLBjj4gUeIiiUCLMxwabGxAaHVOpakmksIz3JmHgrhMjFEz2S4QQ5JGg1tJQM6gkQHvLTKBiQX0uwqSGbGQoRgXgiZsJ+ICTyTKDlDDp5ZS1BBUr7ACihqpKk4ZaDXQMcRJkxpMEvJTs3YH9GwVSg4wB5zETU3VGFekEclvQqEl0DLUYmk1JLYFRf4zLLaqpQUisPoFJLV3tMTomz0p8HEPLIdeaSCmoRSWd5ZjhLqi5ZeYVjHd2GPkSvbBEMj/PaDJi//YOM4/WWVtdIDaeEyswyAvyjdt8++4NXpGeyHushG69RrfeYCQFwkUUF3Y5u7ZIfm2HnaZj74kWazRZHEb4iz30ap3NTcGgSOhv9Pn5p7r8w2s5Ymj5xUc0T0W7nFmqIVHs9sbQBCzsj1MmxZhulCCloNmscezEcS6//Q5CVNghAXlWkhWWvLrpeB+UErQQrM3UWJltcHNo+c5723Sb3+Hf/sznGBc5wzQPVFmlPcA2LXQE9VoTqdUP3acPhrMYR95LsbkNm1OGpmJckwinA4tJ4lCxRMZhwncyLMBC3NLU2gpvJWUWmlNJB0YbIel21mIygU0duqEoU0neL3DGUpvRlFlJ0orIBoZ8YCuibdB1j26WWG8pBjFYjTEWjGeyPSHpEgi5I4jbGiSUJpBBxFJRllCkHtcEqYOKmTcOVwrMOMOV7gAUlvUL+ncz5o/UySYGUoFtCmp1T6sr2anoTJ0DO4F4YR6Vpkyu3IW1OrIeIecamHocOICVRLViRCpgewLJPicfXeP5oy8w6Rt+/41bXN/bo73YZWl5ne1sxI2Lt0kizZGVeWbm6rSiiKxwrHQatBqamaaiozzeGO68vcHdlzZpxw3KTYOWAvNTx4kfP8bg/btEmWXpE2e4hGXlUEH9yi7zosWx2hK/dGTI5Rs5DHK6q6vU64pBv8dqu852MQIpSQvLYJLSbVRyhhKeePpxLr79LkpKIqUQ0mEQZIVjXFhGaUk9K0LFz3miSPK5J+f4J1dKStvm6+e3WZx9g/XVZfLSHkiK+Io+SwKNbIB+0Pss3rnAjSUFtjDIukXXBVLXqCc1vDfgHMXE4b0J+JIoEDVkw4K0B+U4YEGsydE1BVoQdxKiWoTQlnzssYUnaXlGuzkmdRSTAhkJklYUgCjaU5uR6EiQpRH1dhihcblEqybpoB8UhIucbOAD4EoY8hFELUnSkuTSEcU1nHFMBqAihVQJiBKViOCo3hLpGJVIbFnixyXpIGfQk9jMUBpLd6HF4loNU+b3EJPjktbJkywcP4T+zkUmhWf7ek4RpSyu1pg/tMD63Crv9nOGG/uMNkcQNymt5bvXNnj56h2WGhEnji8zmO2idJ3ReMhpFbM+08Y/vU7hBUXu+d65HbyKMbcmKC35uUeX+LjLibZLxLjDYNgDLDaaIE52SF6+zvOPPsL+55/iUpbz5bdvcePiNqeeXKLbWmR7MqEwtxhHmhceP4Lv7bKgDXme02rWUT78fbIVUxrHZm/AofkAiLPOs7a+SntultHOLpEUJEoxMYY8t0zSKtHPS0xpcBUvwvOHanxtEyYuxiQRv/G98xxfu4UpAmGIddUEiK+a4sLSjB7wMAwBuqHxpSMfZjQ6AhVrMCWj3RyVVP0F7aGEcmIQsUTHGtVSlOOqrNsfQVFiZxKiepBumPQL4mZEXE9wORQjiJsJUcOSDTNsGYR/dKywNRtIuFWAVhYpRLGmORujkPTvZhT9AlcWeGOwEiBwcOEjlIqQyiFnPDJ2xA2PjCxlahnujpAxuNKE0TUsQoEow1yaqil0IlEiwk8E3mrKXJG0ajSXakx2LN4IVJJg94e43TETB0NbsnZygagu2b69x+a1bcaFQtTn6Z4+TjkYk+/3cJ0ca1M2ipLexgaPz8/REIrd7Zz9a9foTcbE5y/SfmaV27dTendyEAqhA8b/TrTPpVnBzreuBxh2R+AmhjldZ/YOrP3JM8h2xHdfeYvvvHqNXq/B7MnTXN2IWE13eOT5Gd4YtuhtZ1DfJ71wC3luyOE//yTpcos8LWlImFRI053hKGwMEWYCIx1x6uwp3vjuLsqJip/dM0wLukVCmjvGw4JmoySpl0R1xUzkOdyUnB96okYNl6zw/rUb1Fo14kYCThzAw6daOQ98n0UogTcF5agIITw1hHMYM8HmHm9iSh/4f8u0BCtpLNdpzoRysmtDc77OaNsy2gwy2EIFggdvBOW4JN/PqTWbmJFFaImMPY2lMALiyoKoFthZJj1DazEiaQlcGRqJSjjimqfRrVMMClxuUA1N+3Abm1smOyV4hSsdJvWkaUGkFbWGIIoCorLR1aQDSzKXIJygHOcopYNokwq4Fq0FWWaRcYRHUWYOIWNmjs5QZAPsWNCYn6U5Lknn6lglaexWozBZQZJoXCxR2pCObjK4NIJWl/r6UeR8h7oeM1ED7HDA27s9ZncHPPvkURbUUVqdmDt1x7mi4ObVPj4vQx6mJGjJa6+Nab/YZPkzC0zOFSwemaO8M0Lfyhn4gvd9ydffuUBTzjHsPsmZQzGf3t+i/ls3aTx3hqURvLRzjqOnV9i4UefuxoTOuVvs/Jf77PzlT/HZ584yvvA+t01KVhpu7fUDVsWrAyDg2SfP8sZLr+CNJdYRKrdkWUmWO0bjknaiKEuLNSXaRkSR5ok5uJyFEf5C1fCzC5jRXhijipIDcJ4XATMVqQc8Z5FaABYvAxOJ9xqpDXEiiRsC5yUmFeS9kua8QDc8Ogk0nLVEYYTDKxumdlshbjdFoDn10lBMSlzuySigVDSSGmU+RukwaDbp5UR1TVSLQZTkY4OOJUUa5suy1BFHEbVmkygZUw4IXMSJRMYCPfaoWIbJWe0p0pLMFrQ6TVSkyfMcXVfo0mNLBRn4ehTUwgZlGMT0niJ15CNH3JB4VzIZ+eBwsSTuKuKVeY48foZ8NGTmzJimr7Erm0S9nIVjHbwRDIZjrIB2vUWrlbB7Z5/+q99DNRt89lc+xjPHjiGlZBfJW6/vcvfmBomEp3/6aT5+9hCNbMSznyz5nTd7WGc51IKfO3qLR1e36NQy6vU68pc/h8Oze91z6xvvc217G3tojhOdLu9/6zYff/0yzy/UOb20xkgkbH/jVRqrT/Nnv3gEUVygf2yGXjJP79vXSGSd97ZzDt3ps7V9FznfwXnPuLA4bxFeMRUdWlxaZOXIIe5cuor0Di0dRQ6DcU4tUbRTy3hc0mgaoppDKs/ZecU/33KUNiTzoj0TULejHnFjinuaduHcg+8s4GktCdKewIwF1hTkKeFkcAKdaGQMuhNiWJU4hLAUI4d0gTPL2JS4Ycj2Da5UeOOxGOJOhM8dMpHEdY2VFamEgmzfYUuBalRyfJMSFU8ZKRWuNFCXRHVHkWeMd8a4wiKUCkpkmUUlEfWZCFMUeONxVpAPPdaAko64LhnvZ4x3LPnYYsYWl1viZo240SD1Y5SQGGPJxyXFpMBjUQl47TDKEdVh/kSDVnKUpJgwZsQuLbp6TO3RFcqXb7H7yg5nnj7MFz5zCh012ekPubLd4x2dMNov0Ery9luXuHWh5PSjh6E9w7CdsfbkCt1hxm//znfIX29xObZEooba8GitmZtTuMll3r48oisKIrHN+d33GbhZnj31SVSR4euK0c0dLn//Cj+/P+HYjqD74jpX37oGk5x6oqG2y356kkge5WbUpBGlpF3BYl3w5090ca/2SDfAtYJ41CjNSfOMZk0H4sKq7/Lok49z99p1bGHRQpA7Q5aVpLmhP8qYmcR0i4TEGFQUsVCDrhRsm4pnQUrkwgIeSz7sE9XDjJ8XnloS3VNu+wH2YDiL8xjj8SiKvEQ6gCh8ILUYKQVlanAGysJjC0lUB5c7JsYgIwPCoyOozyWYMpA/xElo4EkN5QSiWiCcy7MRzpa4IpSbvTdhotlLyrEjakLScCAc+bgk6SqSWsL+zRHZICNuRaAErnToRKCSygERlKPA6h/XJPnEMBqMUVphcotLHa5w4CXl2GDGozAoqlUoXyuJKyRGREykRscaMY7QUQN8hDEzzCIYZprbuzkcOgrUEE+u4G/t8c73b3Llzj6LawVHVo/wqSdPcTU5wuz8GRbrE37qZBMxGHBxP+P65m2SVofvbV1lvt2mNdNi7DV2rJk93GJ1SbOUN/jW7TEd/TiPn20z8JLHxQ1W1jdwCBrtBunPPE5+7jKHZha4K3Pq/R1uND21775LfXmOnVsjFn/lUc6v5nz3pS1+5eSAM8sztA7vsX1mhvnHZjj25A7m8U/y515f4X999/vkwpGXjn6a06o1Kz6FANI7fvIY3603cGZYccxJ8sIwmOTESjAcF6RpQa1ZEsUxjXrCWsOw7yVOVKQjQmJmFiHN8ZMsjAYpwWwzCVwIP8Q+qPLXDPB3gCcIJ9a/D1zgw1L+clCOw0Z1ob1BnhvQEqUlpgCXTydHAzWqKxRxPWI8nIRwSghcFFd4BIdueoQMTlTrWryQlNZgHZiiIG5LrPbYscFmBmcVcSPCOoMWgfDPlYEgQ6BQIqEYloiKUlXLKKypYZESsCAihTMebEyWj+m06nRaDWo6YXBnh1J5ZBxjM4AY1ahT79ZBSZJGgwKYXWmh6jUmucNHESKpkTpAKNaXmoy2xiRL6xxKUmzq4Nom/vQSHFE4axiPMlpS8HLpuHzuJs3emDVR8ORjx3n26HGWWoqVNKN3/grZrV3mVxaZHWneePc6ZjxBlAV7TUU8K4kmltFOyasLDa69GtFZbfNPLu3iappPPwEffyKnLj1RR5JdvYPub+Bw1KTGZpLlk4fYGjb4Hwez6NcGLEd7HD80z1cvGoRM2FtY4uzjipuTiPj8LQwZUQy5FRR4doZD1mbmAk9BIC+g2apx+NgxLr75JkHhILDwp5lhEJXsDwtmBhnNdoOk6YiAo23BuYlFCoXWYbLYawFLS9g7t3FpStSosdhtfih4lr8F/Jb3/s8IIWKgAfxnfGjKXyBkSTEMZM3eOqxxxDrBTHJknOBxgevWh7zAGygyh8sq3HsnoRwbUJZiWOIt6I5CJQUBva9wzqBqilqsKcceMwk0RDKRGEDVNJH0RAk0O5JyInHOECcxkVCoWgSRQEYVRRIC4RUYz8LyIiBJ6yU6TnCuTXumxfEjy7STNhu7G4iOhLiJ1wkiihFSQRwF3IWS1IWEWoTXAmUCz5aSgZxPA3Hikb2SyfUBrNWp13OEzTEXd7GHZ/H1Fjv7hs13S2bWrtOLanzq5CJ1ucTo2pD//Dd/h7luxJFjddpNQ73R4MxMl++9uo2LWiSrSyS1GCsU7bbisedmuHHJsNDRvHimy/J8i6vbEy7cGnG8e4XdrR0WmpJ6PUFuTJAywmQFrX3J7NF5lp87xObRBPXWDvFik//0Fxv8znsdVFTw+rkxh2Y7fN3U+HzWYXTlJu/O1pG+wPlAfLc1GCFVEGG3zlQ0vYJTT5zl/JtvVNStgqywiMwgBAxqmnFaC+ToZYnRmiNtjb1R4KTHe4UUgbi9dJAsLeE2NrDjnMVm40fu0Q/Cot8FPgv8ewDe+wIohBBfAj5Xvex/4sdQ/vLeg3TELRfCpYYk23OY3KJbGluWRInGSY8SEbYwqEggY4e2geGjPucY7zmiRgweTFqQy4AS00KitMcWDps5osYcVo2Im4H+VUYSMREVIYxG6QhnLbVWgi0d9XpEJCxHHlsJmPhSUBpwTqDiOg5PLlp4oSkbklLHqLjGXhTz8u0aXkfYxTNEKzroRkqJ1DLwFQtCLC0EiKA/g4SI0KkOqmQCs59h0gjVrCNrEruVU17dxYyGpAw5ebTDI587wY1Lu9y+nrO4NmZ+eYkkfZ/tuxdJiwbzK6tsT3KW05iT9Qnfu2D4ivOUueVnVzvYC1fJ0xyvIsYm59qbDfaiGPNoi388iXj6aI0bewVPrR1jXCwx2xlwoX2IxrjAp3B41EZ164ybBdunPOn2VZ564iT/wdHj/H9e2+bmjmClY5hZ7PCZI0t8+bWbPJHGPDJziNs7N7muBaYMHAlOWG5v71KUGZFODphCpRIcWV+j051jf3cHTQAH5lmBkCLgYTLDZJIxUzZxkWO1Dg1X0i8lWomKI8wH5pi4junOYXfuEv8IZpcP5CzAcWAb+LtCiKeBV4G/xoeo/KUTzWTbENU0UUOg4tDBl5EiamhMmuNSi4+g1klw1mCtJ6lphBaUQ89oqwhE0tZhjMEWHpV4xjueuKEQMjCsFKWDscGZsBFtFfbpeoxWTWxWUuome+MSZIMo0aR5k9RrciRGRVil8CoOJd44xnmB1Sqoq2qFjCPQEqkIuu1xjJYqjPFH4c42re0LGapogTA88IxJJRFCHXCiaaGw+yNsLUF4CWlKFEVkJ2cY7cb0r2yx/+0L7A4c64ueU08/w83eLC+sLaEbc3QmG8yYGi+9EbGzv8uh2YRaa5aGuIHf6HFqqUv39j6yhKjVZvXZw4ze3MIbjzq+xvaZw4glCXnGGTK23trmSn/EoUfnKfKcIwPo/tQpfvPdDfT+Dl/42CJPP/8IM/OzIARH8j2iW7f4/bPH+StPHOP94Q6/9s5FLlzL6KYJj9fv8vvzDZ45tci1i3VupwZhJb1JGpTbKn1DUUGfG606jzz1GK9+81soAbqCDhtjGI9zeuOCmUlMXpToWky3HrOWSPYnYFylz+KptHc89fkOTju+/vIFClP+WM6igeeA/8R7/5IQ4m8RQq4D+3GVv5qzda8TgS0trpDkI4+znjLLUeNAcWRyg4wlRaWt4gqDUSHHccaR9jzJbIQdGrLtMnx8WuFLTzrxJK0WcT2m3pRIWSfXVQ2/HuO1QkYxTjeJVltYBFIrnI4wKmKsdWAw0jpwAVQlZxVFeAI5n/PBMYQMiXqgqBFESVRxk6mDeFggKtk8Gy5c4HcK5UtRcaHdB4c11tJoeXJhyUeVrJtz9DfH9K5sE7VjVL3OblZHpYL65BouV7zzfo89PLoZ87Mry5w95nhva8Q3LqYcmhvx2MkV1m9doXX+JhObM24KpMu49U9fQdQkZA5/6wby1Rr6M0uMGjm7xJTM0hvCqUab8V5BnmX8+qU9JtRZOvYIz3zhUVoy4+7uLrfTgqtFm+c//ghLEdzu9zjRmWfu6Jgnr95h7bWL7KzHXGvCo5PLOJ8hhMLjGeeWrDTE+p7WjZABMfn0c0/x9vdeoizHgY/NWJyD/UHBfj9nMtegyA1JYYiU4ZE5xfnU4vICFUuiA8FVgTEO0e6yOyjY7Q9/LGe5Bdzy3r9Uff9PKmf50JS/nA13BqUUTgZmQesdogCTCmwRwquoqXG5C5ht6SknqoppI6JEEsdNiglEnRmEjlFxAkmEihNE3EC1GiitieKEPPconaBjhYgUQimUgUeOLnFjr0/uQngUOMV8EGiVGgglTMmUPxmoYmCtVdCBqbDgrmK09BUrjfcQxRHWObSaapEF8g0HmNJiTEmSRDgLUlXcu1aSpZpiPGCnKJkVMc1GE92pM/vMYURSMluzmNTSMBusz8ChEw2a3S7fOtehm3nOXT7HZLXD5M51JmNLcvoYzzx5mL2VAcNbGeutDqKmKfsl+uOzDM7dQc5KfOpoNJs8+ac+we6iwZqCGZb48jtDHIqnOynRypBPPrXGWC3x1hsD/ttvX+RPnKixMa4xWmkwKwzHDkvOrq7yxv4u9v2rfFHGtD9xgrd/9zKjrRFnjy/yifkbvPbGANmZR3jHKM3ZGw7o1BtV8zDgkryH2bkuh0+c4Op771FUp44Tgklp2Rtk7PdzxuOCRjPBmJL1JkQyqCI4GwRtlXO0NMwbi4slnYUaV3+cqWPv/aYQ4qYQ4oz3/gJBk+Vc9fXv8iEof01zBVzQ6ygmFowAGeMKiUShojpSxVDTeCJ0u4GXGq80sW4gogRXq6GERusYEUUIraqvcNx+/NFlTviU5SjitzYmXM88xgdeYqkVvrBsTSaQKGpSMZXwdtaihKqokkLTyuGRFV5bSonQIf+QkUIqEXDsCLybcmLeU1tWMnCTxbEOo/s+hI9RpKppZR8oeWxwQOsccaOOlprOeB9Xa5INHfu3eriu41jL0LR9mvMdZmLFOM9561KPQR5x8+Ymv/Cps8z1GhRrx3j9ZMyXzizR6w24/I3XscOMYcuw64a4PUdtJsFne4xbGeMERjanLPt8/x98k8aSJ27H5GiiekRWLPPuqMef++xdTpTv8PZem2dOnqG1ssI/vT5g99Im/4fPn+ap9SO8Ndjl4s1NdBLTOJ/ihrsM5urImZg7madXb6MbLyB3LyPiHKE0XsDuaMKxJXAufJ5TgSIpFU9+7FmuXXifSDtsYREu5CH7/Zyt3ZTFhYxup4YUnuOzdZ6ZUSTGcESmtKShpWAm8dAvKCcFoqH48o+YpPyg1bD/BPj7VSXsCkHNS/IhKX+BRLgZHBofC+LFGkQxcaODiBK80hAneK3RcQ2VRHzu2cN8/8I2mQGvJToKH66odCW9kgck2lJ6vIWrW33WOxOu9zUyaZAIEGUY/RdCBAy4lAdCsN6CrTQ9hKjCLecIorYhFItjXQkYhZK2dWCdIdIKYX2gJ9UaqQJQTbgQqhlrg6NwD8Mpqj7AAQw6D8OlQnpkJGgWBiXH7N60qIbBTCxRt4FghFMa7YY0YsnCQsTM3GleubLEezdusnO7x923bzC/WuPfWnGsuX1EM+bS/CEm9FjPC+z+AI+gUJ6FZ9ZplQtc7xe8cnGMQTFLzF99IaJoPspX84ynlhQv7Kbs7ghqjZSuFhxdmWF3NMP3L+7w9pUmjz9ygk1ZkNy4xYnFOVYfWaO31+Ob3/4+zuU8+8vPsPyJVb52vs/O6gjZHfPZ59b4nfd3YHYG62Fzf3gAB3YugPOECDeb9eOHmF1YYnPzdmDAN4GwcVwU7PRSdnYnLM4lSJnQjhV/8Tjk+yPytCBqxJjC4kqPiV1oEtfiMIP44ziL9/4N4Pkf8KMPR/kracLRF9BxjUgrZBQhtA4VIxGk0agwDGVe0mxqljoNji02ubCbEddjRBR4bAQCZ0J/RU6p7/EIJelbz28Pugg8pTGBMFxUqlFKB0ZuD95LnDUVA36YMRPeY6vpXyElzjiiqFKX8iHxdFikdxUzf/h3yVS3RQXSQOdCY1RKgRIKU72P95VYhrGUeREIy6VCCE8xzognlqxXkC2v05iR9Lc3aNRShjdy+o0VZn/qMdZqBYfzK1zeG/NWLnliVTL3s6d4vl1nd8/x+GefpMgGZD3L1TdvcPpwi2+XLW7HR5FuB7Tks59foaMMRR6xMhOzZxu8fGGfWAneug27Ozc4MSu58SY89UKLC3sT7vTaHLuds/bYKU5++mk6KyUXx+9xsj7kmZljHJvpsL2xyStvvc/Nr7xN82JJ93CX7L0BSivW+/t8cWGLtlZ8/Jl1Xnp/i54P17I/yQ7UA0LK5w/+q9ViHvvY02x99U6g75UCaUPI3B+mbPcz1icltVhRRAX1WoKNY6wrGewMEELRaMQ0ZupEsUZE8b8BVEhxTLK6jFIKIe8vpQqopO2mCa9KQmj1/c0xO0ai6zHIcCzDPeEhZDiyvQh38sDOrzA4hBc4BzqSoAItn7UmcBVUJ4aqZoQqDr/w/k5ijUUlkihSOO+wZQUZUIoo1mhd0bhKcSDzJkQgHy9N1ez0U9I/A9ajIh3WaAxSBB3FwCEeOABELrDDAislrckGIpL0xwXz6xHpWyk7b15F1nKuzyn+nUcXeOFYRtwc0dJ7xFEXZ06w9eJJ3t8Y89W3e/zMasITp1bYynfZ8jEX67OIJw/zqUM1Xvz0AsKWeCzOeh79pOCr5wb85uUhTx3b4HxjBSU8l/cH/M/fcjx6bpezrk7er/HuxlVeLlNqRxb4hU+f4JFmg7fevs7f/f53+Vi3y9OFYvG5pzl/43W2d/bZ+9aYpVMtfvrfW6A9Z5EokkTwxHqXb+8VCKnY3NujtDmxiMNeqVQUvA+kek888xivf/dlip1tpDOU3qAQ5IVhd2/Cfj9jYbYB1TUui5IiK2m0EpJmQtEvsLnBG4+I/w2gQhJCENcDEF6pIFAaaY3Dhc3OtMQayqzCS+4O8xB2ReFPKI3FOhfCn0gdSKqFaRKFLUqECHLWztqgBizuH9EO2Aapgu68qhgnZBUnO+vw2Eo9t5K7sIFRJCT64bl8kqIjjZBB495VZOVlUWBsRfVU8TJb6w70Z5CCJEmCgJEMMbqSIJzBlSVmmNOcTchSRbHnMBs5/YYGJxCdBCsUet/z937rOutdy1x8lYW2odWcpb48z2u3Dbe/c4nN/Ql7i22eflTzs8+M+QvRKr/1xjVuXM64fhv+51uCGUGgQfWQW8elLUd2O+Vv36jzzCdbfObUOrNHZ8lue27v3uB3C8PV+ToLT59k4msc3hO8dOEV+ldK7iZzzC8d5z/82cPc+nu/z+Y7d1h4foHBm9ssPdXg1C/PUJuRAbRG0Nd89rFVXv7GNYpYsTcq2R8NmWu3UCrGOwsV2WJRZCQ1zXOfeZHf/vJXQIL1IZ/0zrHdm3D5Zp9WIjl1aqkibZTs7efgPEtakZYlkTFgBaXJceUDTt8KgY84YKsdkY7wMkhwex9KgsIBNmxWY91BIu2MC+JCzlGrBVI1rEVFurrrC5xWobxoXeAUm7Lma0WZZSAEcRSqVL50aOHAC6RQ1YnmEQp0PcEUJWVh0AK0BCU9lI6yDAOW3jqQoXnm8hInBEIopIBIAj7oTobStg4IytLinCNLc2xZENVivPAU1qKEJGolbPd3mT+cMMo6jLb65JlispfhtWfh7CKLyzMcXmoxVmu4KOb5xQRp79KUbZLGYY4fj7i8lfPS+1u0010+dXoHW6acWBzy736xxbfLs8wME3ypWZ5JOLUYI7RCKUU/hZ1v3qaxGPPso21mrSfvD/j97U1WXnwEeXyV+q0Jt169w9kZy8n5w3QWzvDV3CLLHO6e5603X+K5P7vEE+0v0j8/4kbZ45Evtal1Kq7qqlHo8SwtNFnvRFzIPMZZCldgbUlpJ9TiLkFiBLwQWFfyzMee5P1zF7h44TxTulYtPUWW8d7FDcw4Z6YVsX5siVpT0J1pMthL6W+PqiHWUKRJh0UQwvoh9sA4S5hyA1TgOhYyLE0qhVCSsgx3Wuv9QV3dVAmdlvIgJ1DT0yIPeBE/VQz2Dm9N4JuQIZ9wUqKbDZyxGEBGcWCMseaA0T/U9itJaV9VwGyJFaCkpixybOlQKsYSknUhBa7MQUqUCE7rpKp6KoK8tMRVo7IsDYU1aK0QUUSc6OBM1amjtcbKnNahGYzSpLtbLC6XuNRj5lrIrqLTbeFKw929AYdWZlnsNDBxi5nukfB3ephVmo83LY+ud7jw5R43v2JpfHoJUVxkPL5FPd1jo7/D5sDzjdtNpPDMrLeYbydMUk/DC1b9DD21zPzhQwzjGtYLbuV94nM9Tr21weHzOR/73POcOHOG/+a1O9Q3LvMzh7b40396xFy3YGIzLF3iZcWjv9JGNyqhKanCdSVMe3thefr4HBfe2cUIwU4/Y21uGRxIoQk6n/dUoqUS/MIv/Tx/57+/TVn0kLbEBR12RsOCyzd2aCYCm1oOP7JMe75LkVaq19aStCOKUUnhHD+qh//AOIvAI1xAqgXBISriOo13BmksIpJhPL6i47TGECUxSgqsKUP9XKqKWDxcCC0VwhqsNeADZVKkVDUoGZxCao3wDmsMQkfhJPMWKUK4JL2hLMLHaKwBQkNSIjGlQenoQF5PCkE+mRBpUJEOOZFWWOuDBAa+4jpzFAdDmQIlCFBXY7B5jlKAE2Q2RwjJ3OE249wilrrsjCfMH4PRpI5PwOYFpRNYC73dPuP9AZGDmZomrivuZIb1TpvnDq2y6wzZrQHm8BqvzM7wq7MTOskiiIKsaPBPN9p8+SuOtZUmH//0AocbNQZVWTZxkiu7Q9ayMZmB5X7J/NeusDS3iLyVIMcxV795jm9tXuGlQnH048f5E19YpDN6iZv7jq+PjvAFuc16PSeq+AJspXqmZVA5EFLijefU+gzt87vsGrjbH6KEwokgdRWqhlVOWcl3d2dafPpzn+Wrv/GbSBV6ddJ6jBYM8pxXL2xy4/oun39xyNlnjjK71MUUhklvzHhS4jzsDjOy3PzQPfpAOIsQApzDlK4SEgJvDEIpTFEgHOHuXIJKNBCSdmkNvvCBSMKHUIa4ho6jqgk4nVQOPROT56A1IolCl1gKlKhyDg9Yj3UluCBN4YwDLSjKIvR6lMRrhROS0oF0BqEihNQV9juw/QspgzBomCyH0qCjKGC+rUXL0In2TkBpA02qNXghiVXQobF5YHePGzFRHMFkQHptgjUZfi5h565AuTTMypGjnMKUDjVbp9uqU2/GnJibZbZRY9jvc7cseX2rhx2PWUhiFudnODuMGVxZwy7MsvjkGnPzHZaLCeL4DodPzbBwZpZPtdr0TWBMyfB0ypwXag3y/oR3bo7oFzHqVsqeH7O/VKI/f5JLT61Sf1shlyW3+wY16eKHjp9z2yw2NagI6wBkkCf3hJtOFIjQlVS0m5pTiy36d4Zs7g3CKRDuoFjncN6hpD5Qg3Ze8MInnmWwu893v/kt8nyCBpqRpJ8ahqPAOvk7372BlTEnTy9gnSctLDu7Y4z13OqNyMwD7izee8o0CyFXlXcID9baSpK5kq9zLigzqTC671GV3HagWI2TuCodl/i8RDVqIMCWJcIKpI7RSUSRZ8S1OhqPw0HpUJGkdCWm8OhYYwpTdYwVUkdQTsVvJMqHwT3jXAjxhCFq1JHK453BQsU/FoUwTkmk1lAabFFCHE4aY2yQyqiGPb0tKU0e+jJR6NdIDTpWeFvQNntkZU7vSg2T+9Cxr2uSBPb3++T5hMiW2FGDqJOwO5pwammWw1Ly7c0Rr73jWCom/JUz6+Rfv0VT13FDg1MZ4yct3V99kc8d7fAbiynxIvSKIb+30+cZFTMeT5gMB1x67Qq3bo9I7mZsyj4bR2O6zTqrNxyrtDn7/LO8307J9q5zuF2wle9QsxkzcY1GAyqK/JAbmKn0YCiTMx2Pl2Hq+Mn1GV65PWBjf0BWZCRxHGQThUeIUHCRUocIRAb48c/96c9jV+f5+jf/gGRvD5VlNIwldY5Y1thPS377u5d4qjdiab7J3l7K9v6EUsClIeQy/qH79IFwlhAeSaxzSGORWuKKMsSzTDXlRcCSTHJyW+JadaSW4DWyVsPZwBklrMEMx+G0UCpoOWpZMchIhAxMk2WWIWzY7EIrnIoQQhLFleQzjiiOwlhNVuCswYwDMhIl0LEM2BXCbJcrCjwh31GRRk1H8LEID+VkgrdhXMMaH8RiTYkzNoy6+NA81UpgTIkUoOMIpCLLSiLjiOMQRipVYJVkc8eTuoLR7phYaPy256bpM5iUdPYiiPZJRmNmui3ceITdS2ksdbn20kXS8Sj8Gx1IL0YXbjH69Q0ymyP7fUZbkmtO8HZa8g2hWGrWWU1qZP/iOmSOspVw6FPrNM+u88kj63THkqiuuCMn9N58j3UxYXZ3k6IvSJbrgSutiqGCSK0INx8B3luQIZQNrJIKLzzHVlq0Y0V/kHLtzk1OrZ+s9kRVgJFhzzhnKnmJ0L9aXF5mb/0R5LonynPaO3eoXb6Cq4oz29mEb35vwPx8h3FhmbTalKtHyGa7WPWNH7pNxYFw6EdoQoghAUz2UdsCsPNRL4KH6/iX7Se5jqPe+8Uf9IMH42SBC977HzQh8BM1IcQrD9fxcB0/zH444PihPbSH9ofsobM8tIf2Ae1BcZb/8aNeQGUP1/GH7eE67rMHIsF/aA/t3wR7UE6Wh/bQHnj7yJ1FCPHzQogLQohLFaXSH+d7/f+EEFtCiHfue25OCPE1IcTF6v+z1fNCCPFfV+t6Swjx3Ie4jnUhxL8QQpwTQrwrhPhrH8VahBA1IcTLQog3q3X836vnjwshXqre7x9WoD+EEEn1/aXq58c+jHVUv1sJIV4XQnzlo1rDH2n3Sx//pL8IcKvLwAkgBt4EHvtjfL/PEsg33rnvuf8X8Deqx38D+H9Wj38B+OeElvMngJc+xHWsAs9Vj9vA+8BjP+m1VL+vVT2OgJeq3/+PgD9fPf8/AH+1evwfAf9D9fjPA//wQ/xM/i/APwC+Un3/E1/DH7nGn9Qb/ZAP6JPAb9/3/d8E/uYf83se+5ec5QKwWj1eJfR8AP428Bd+0Ov+GNb0m8AXP8q1EIgTXyPwJuwA+l++RsBvA5+sHuvqdeJDeO/DwO8CPwN8pXLin+gaPsjXRx2G/TCOsZ+k/evyn32oVoURzxLu6j/xtVThzxsEdp6vEU76fe/9dKLw/vc6WEf18z4w/yEs478C/q9wMCE//xGs4Y+0j9pZHijz4Xb1EysPCiFawK8D/6n3fvBRrMV7b733zxDu7i8AZ/+43/N+E0L8KWDLe//qT/J9//fYR+0s/9ocY38MdrfiPePH5T/71zEhRERwlL/vvf9fP8q1AHjv94F/QQh5ZoQQ01Go+9/rYB3Vz7vA7o/51p8GflEIcQ34NUIo9rd+wmv4QPZRO8v3gVNV5SMmJGxf/gmv4csE3jP4V/nP/o9VJeoTfAD+sw9qIsz+/3+B97z3/+VHtRYhxKIICgkIIeqEvOk9gtP8mR+yjun6/gzwjeoE/N9t3vu/6b0/7L0/Rrj+3/De/8Wf5Br+dRb7kX4RKj3vE2Ll/9sf83v9LwTO5ZIQB/8qId79XeAi8HVgrnqtAP67al1vA89/iOv4DCHEegt4o/r6hZ/0WoCngNerdbwD/OfV8yeAlwncb/8YSKrna9X3l6qfn/iQr8/nuFcN+0jW8KO+HnbwH9pD+4D2UYdhD+2h/RtjD53loT20D2gPneWhPbQPaA+d5aE9tA9oD53loT20D2gPneWhPbQPaA+d5aE9tA9oD53loT20D2j/G+y8leMctJemAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "%matplotlib inline\n", "import cv2\n", @@ -678,32 +135,10 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "6cab7dae-8892-4a16-ad03-651fa3bb20ee", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "forward time: 0.030s | decode time: 0.004s | " - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.)\n", - " return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode)\n", - "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/torch/nn/functional.py:3609: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", - " warnings.warn(\n", - "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/numpy/core/fromnumeric.py:3474: RuntimeWarning: Mean of empty slice.\n", - " return _methods._mean(a, axis=axis, dtype=dtype,\n", - "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/numpy/core/_methods.py:189: RuntimeWarning: invalid value encountered in double_scalars\n", - " ret = ret.dtype.type(ret / rcount)\n" - ] - } - ], + "outputs": [], "source": [ "boxes = nanodet.infer(input=img)" ] @@ -722,33 +157,10 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "d7129fe6-a198-4196-b35f-93ba41e50031", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMsAAAD8CAYAAADZhFAmAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Z7BlWXLfi/1yrbX3Pudcb8p3dVV1V3s7PR7AzBDAACRBBEELgY+iUUiiQiIVchESpS/6oC+MYMSLkIIhQ+lRIt6jA0HgAQQGbgAMzHiL7mk3Ve2qy9trj9l7rZX6kGufWzOYbsyD4StG9J7pqGvOPWeblSsz//nPf4qq8t7x3vHe8Ucf7r/vE3jveO/4z+V4z1jeO947vsfjPWN573jv+B6P94zlveO943s83jOW9473ju/xeM9Y3jveO77H48/EWETkL4jIqyJyXkT+8Z/FZ7x3vHf8pz7kT7vOIiIe+BbwI8BF4MvA31LVl/5UP+i9473jP/HxZ+FZPgScV9XXVbUF/i3wE38Gn/Pe8d7xn/QIfwbveQJ4+67vLwIffrc/GCwv69KhQ/PvRcT+Ld8riiBI+Z2Tg69ByysVVftOy9eU36oq+a7v538CqIII5fWKiKBAzvZmWXX+9/aZIE6gfJYT7HzKOd/tqefnICB3fWb/t/PXl/dwcvdVM78HSn8eB++rqnxbTKDfft13X9t33le96/X9Cysiztt1LTYVToQ7exNyzDjvWBhWbCwu4sTd9UzsyJrZGe+Scj44t+84Hy0P59uu+zvu0/zBlB982485OF9B+M6ISL/jb+Q7PgNA83d8j8Jd91WA6XhMbNtvfwjl+LMwlu/pEJF/APwDgIXNTf76P/2nOIGktgCHPlB5T8qJLmcctiAd0HjHUl1TBzdfNME5uqzsdx0pQ5cSVRXIwKyLTFOmS/a0quBJOQPQxQwIOWeQg4eQE0xiYhoTs5jQZIu6CZ6m8qgqMcOg8oxqjwiM20jbJbxziIOcMl1WnPMEcagAKDkrMWVUwIvgnSN4RxDBe3toThzBOcA+Z7ftmEw6ugwpKzkpMSo5Z1SEnO2h90Y1Xyh3XdPcVst61GzXjmaOhm0WBopk4aFDy3z41DH+77/+FcZ3pnhRHnvsCP/ok9/HyugQOScgl0WWURJfPPdlbu7v0XV2v2Zdou0yMSa6mOhiJmk536ykrGi2e5GybU5y17lnzShKjoq4cn3Z/l6AlJPtBP2mlhOobaopazEE7LkiaE6ICFltQ8wpoZoRhZRy+Tz46i/+yjuu2T8LY7kEnLzr+/vKz77tUNV/DvxzgENnH1QFvPeQtXgPuwEpZ0QcIhBTRgS8QswZkt3Yxntq51DNjLwneyEFbzfWedRlpjHhyFQhUAVB1dNlIEPwgVkXaXMipkwdPFUQfHA0yTPtEjElFKX2gYW6wgfH/qyFrPh++1axxSliiz04ckw4J4hYzOtcoCPiVMhlcZMzzgnZOVJSghN88TZZYRY7YizGnCElRXO5kWJGad5wvna+zTu74Mkpk8tiS2UDD84joqQEbRJGWUEzdyYz1kYjfHA4L6Qusj1u2ZmMWR6WzSRnW3RZ8d7z3AMfIKVoppMTMUWmsSNlZda1tDExSTO6NtKmzDR2zNqOWexoY2LadsSUmM46upxIWYkp2cYSEyklu85s1yqqc0/Wu/0YEwBu7imUFJNtUtnO2dvZA2Lvp4qUfxUQ/86ZyZ+FsXwZeEhEzmBG8lPAf/Fuf+BEWKwCTQhm+QjemSG47EBs53chgNpipnghykVmwDtBXCi7VabDISIMvIcapjmD2utyAjQjzpFIiEDjA05S2dVlHvLVwc8NtQq9YSqhqYjZwpCYlQTzuCfnTHAOH9zcezkRJCfb/fsdXyneL5VQUWhjYiZC8JmYMuNpZNpGkgqxSzjn7cE6wWVBne2YFsHYIkZBRRHnEOQuQyrhXvFyKdmC6xIQM1IF9mYd3gmjJjAJgW48ZdImtidjTpSFpiUsdt7uce1rfD2yxVd8WMq2UWjWcl4g4kqIlnFyEAIrxeugxHKPcs7EHGljokv23yxF2i7SlZ9POjO2yWxmv+8i07ZjFiNdzLRtR8rFCGMkAalLxC6Sc7bPVPNwZLtf73T8qRuLqkYR+UfArwEe+Beq+uK7/Y0gdlPVXGxVFmpSpXKeyntijihCJcF2UxFqZ99XztkjLAG+QwneE7wyTXYDKgV1xW0jzLK5Ye8DsY22g+WE9446mLHYBQnee9TbjqbZdrfgHOo9KopXR9RI0gwozjmc2ILKyRaqYglEyhHnHd45HH3OIsSczYPFTNdZyNDnZaoQo4UuOdsiTKo4JwTnSArRdg6Aci/thvQexnu7dkXKywScImrft9Tk3OIUYlbamFipK27J2Iypjdzcm2AuzePEPHnwnpTsfGOa4sSRi4dFIJV7ldU2B3Fi+Zs62+SSuUhxDnElFJVcrsPZJlk2SxVFxEJT7z05pbJB6PxcYurDrVzOKRVvqnQ5YptRy3gWSZpsY4qRWUpMZ5EXf+EX3nGd/pnkLKr6KeBT3+vrBXDzjBu6bA/eid08EbvhglB7R5cSQRyN93j0YHcqO5TlOoqq0KbEbhcpObnlE6p4J5DLzUTNM3lviWEJ5YN3SCg5AmXhCkTV8roSCmTFCwxCRUyJytnumcUWblNXKNB2EUQsbCseLiZoY0IV2jbRzqItfLE8zPUbR8o4723HTWaQqhaSpZRJKRfjgNiVkI2MOkcoiZB3roR+ZvheHHglSSZqQLPixAxva9ZxZHmRN69u4xDiLHJ1a4eUOqqqIsZUFrPd9zbu8/ybX8cB3gfbtJynChXOCbULIJ6sUIWaygecOLwIoXwdQlU2Go8rz8chJAQRV9ZInueXYOGoiOBcIGPPXNXCXbCc0HmlEmFAbQZbN6wN7b3M6G29eedZHjTvuE7/e0vw7z5EYOC97RY5M40R5xyVdzhnJ2k3ETyCD1XJSxUXPJrz3GAsujAvlRUq5xh428WmMZJSJnh7r+yECkfKYL7JQoiMGtCQMxbClvgWsRyoSySxJNyJ0okybTNttNwDsGRWKcmkMusiXYmNc2S+86WkxGQ7/nRmHk6KR1CBTNmJxcI5xX6OKs7bteec8d6VEPYAAaTkFAnblTUrIh7vfEmv7HO9c6gLTFJNLXbvru/scnh1ERDEO7pZx/W9CbPU4X2297DAswAsNeM2sTcb2+bnPKp5jjaJCjFHS+ZVD3IPgxeJMeOdUHk7P7AcT5zDOU8dPMEbkOLFU1c13jlqHxjUNcEHquDL7yqqUJGTRQ7OGZbqxOF9mGNqFsKnfhX+ITTxO497w1gQQnD0+K53jjoEqrIbmk/ReXjhihcR50CxUCBnOrU8RgpOKyhOhFEIdFlJznZkX8IewWDhSiyxa0teQjajcSWksp3LG6iA7d6dZmZtMrQnm2FapOWYdpmYkuVUQFahbbMZRkykFEllgasqqYSKKRmKI2UhiwpCLFDpPIK0RVgMGOzcnTikR8GczmFv54SYbXGGEOx955B7nzMIOQT2ZIUl3UXEcXV3j0c21hARQgjEWeLW7oRZN2NULZnxFWTJeyFnz51uma9fnNEEpXJK7YVaHMGDSMZLwGnGi3kElZI/ZkPNlIx3roSbZePSHrxQUkrzHLW/HzEpOWWqyuNFqGvzSrV3ZLK59/LzyjnEeSoRhnVFypmmrmy9VYa+TtrJO67Te8NYSh0il8SzEj9HmFLK4B1Byk2kQIgIPkeiEzQq05yZZfMsTXaEAgAkiisWYaEKOOeYJUs8HYrDkSvIMVOp7eizmOk04cRDtM+cdpE26YF3UObnU/mApkRCmc0MuRIs5LEQqYRKBa5NqmgWugKDKkLukgUZauEWGVw2Y00ZckpIQfzAwjskz+9hLmCFqpSF3IeNglgCQ0qJUDnLG0qtIidb9E4gl3OuBK7s7PHB+47g1BZjN5uxN+3Yne6zsXSYnBMiSs5tOZ+aplnl69dvmUcs3rB8Ej4ERC2ctmVsniQ48GJhLKIElNph32um8ormyLDyoAlPpms7QxAVuthBEoYDWGg803HCeQNHVO19U874qSfnRC7f9/fHPLyzUoIqO+PpO67Te8JYgHmsnEs8P0nJ4EGF1HYs1RXeiX2fFRXoYqQrCeksJgspBJrg8CUsS1htxoswCB6XM11MJKDLGdTRJctJklpu0BbUTLFQSwVmUdmddkzbOC8iqgrOQUqdJdkx00VbmFqSz1xqCCmbN8vF+2RRwJHVMP4+ZAre8h7V4tVymj9YwM4rK5mM81LqJWr5SNmRgTnM5BCSZjOQEvYJiojlhOrs3z5UyepQOnb2ZlReOLaxyFuTDp0os0ni1t4u92/aZtN7QqsdJU6vr+AddFlw3kEWspTNIGvZDHpD8khy5GjnBnavnBScV7UYv32CLwhcKdHii9fxYuAQ04TvpIAvSiXmwRxK5cqWJBlPJghULuMNg0NzpnKAZvJ3FIXvPu4ZY8kqjGNkHBNZhUnbEby3RZlhPyrDUBI/A0CZxMysS0y7iBNPwGoVbUy4kn8kNWTMObECYUkCY1ZiSXZz2WkyB/BvCK48EOhSZtx2TGYd05ktbCclfPSOmWarC5TCG07w4tCSdGdVYvFG/WLud7Ky6eO9oTypQJmUHV0K2uPKwnFygMzlZOfMvBJdQivta+Eyz4EAYk6WrBfjdC7PC3VkC9mSOtQ52tiSJXDk8DJv39lDxoG2TVzb2bMCX2gQqXHazUPiI4sNo6ZiZxr7VNzCQ1EUNw8NLT+wDVJKQi6ai6Fo2Yz8POz0HGDMZtCeViNeHAlHp5aLkQTFobGEsti1eZiXJEQOakSW+hUEEVsL4/TOJnGPGIs9wJnCuNM5Vp5zS11ZssfU0VSOQWUG1EYl9os02S7SKiiOjNUipGT8VVLEQdvDtcV7xVL4MpTGvElULcl5xleBuvLEWWTWKbNW6bo8TwmdCCEw90Kq9nA0lqRaIeUI2K6pJUzMUophlAfnxIynFNcEmZ9jQXZth82At53QBd/X1uYh1QGdpdSeosGo6ikvlGK89rmueF1x9vddVpK3hQuO2+Mph5cW54BDjpGrO/vEFPGuLiBCMWCExabh0aOLvHFnUvKJTMIq6jHbNaRcDGGem1kYhLh5dV6Kqbn5fVBb9AU2NqTMHVTinbMibwnncjavA2qGxgFabktC5veoR8JUbV28S35/bxhLlzN3ph3TbDDqeNaVmyt0bb+7ZqadsE1LcGLuUsuDBirXx6AJQZi2EVWDf1ux5e3dQb4hqoi3sEvQubHk8mA0BAY1kJX9Wcf+pLUkvdxoESFT6hsFWtaC0OX5wsjz3avfUYHy5Czvct42A4NtbTH0iz5rxokFIF4cSS20cyGgapwt0VIiFHcQOjohluTelr07COsoiwpLeVRAe+qNZpx05hm958buPidWl/DBUzUVcZa4vTdj0o6pwsAQuOwBByUf+d9/4lm7LzmR1ELemC20ncXELGYmMbHXtqgIe7OOqMr+tLWiq8KkSySEaZuYdJFZl5ilxCwpbWEytNEZTSUbtJxzLrUc8E4tnRPoUS4pz8l5h2YL3yz96zeQUiR9l3V6TxhLzsqdSTfnCXUZZm0kJcV7RwgeRIvrFnCOphiJiJtXe+dkulKRVRVaTbYjiTDpLBcIBR7OM+NtxWRJao7mVapgBc0uKpPccmtnyngaEawIidruGHMmqVX8c7JCYZ9U52yJuiE4Ugr7ZZfLZXGXJyN3wcG21dvPfF9NFlvg4svfF9jZEmnFqYVxsYR2ThxOe++T+zVzsItiOZyqWshW0gRHohKDv504ru3s8MTRw1TB0QZHO+24s9cy6SasFmhYtc8H7FyHVSjvZ9V4rfPc81HyItE831hc8XY9oVRKLiiihTUhlpijpFIOUDWgpY1d2aSE/a5l1uXyTGAaM23CaDQKsxTZm0Wm0cCaWcrMusikNSOcdYkuZV5/Fx7+PWEsSZVPfvkXeeziN0EcyXnUBfJkbLvswjIudRBn/OsP/g+4cvg0w7pC1HaKlC0niFnnhL1+MWoJB4qpIQizmMjJEl9L6czxp9zH04prEyF4xpPEeBpLUGAP0ztBJRsAkRQVLcTMEv/fFQZRPIYi84Q8Z0O+fAEPYtnppNQ4kL5OpFTmA+YEQGNFzzFpfOHNGRHRfmzr0q7fvAlzxMd5j9PCqeoXRtmFbSH2xUvlzrRlaWh5yH4AplbZ355OOZqToV1y916sZYe2e2vejHkBzEmpvTjL5wwwc4Xm0odfPQpamNjOW25YjL0v0C7WrtyvHtDIxpvLkeDrOWp48Bqdb7h9oth7fEXpohWM/+Z/tfCO6/SeMBaAM0sjPvjQw3DqYRAPzQBuXYXRIhw9BTcvk1/9Br/c7nOpLPhB8CzUgWEVaJNyc39KFGX+/Eq831PPY++2s9B2xj9K5WZ754k9BSZ5Q6Wy0uWC4WfmYZVzDhc8zpnXSIVTZEZhib6UCrp5g4O6hveuhG22sERL3iCG5Ihz83+dZjPCHvEpu2qh/JW31bnX0H5nLuCG9B4M+50hVGW3LtCAU7uemBNIwMBZC+X2ZhEFVheH3Nka09Ixm0Vu7u3z0OFUwjt7bc65FCKVrpsCgnN18fzprkVr1XhxoNmIkvaNzqk9qnkOaBzkYjr/3rnvpOibe1QSTsL87w1lvAtG1x75sAUizuFSuddByubynwEaFgZDWD8El16HqobNE7B9E0KA88+DCFI3PHn1FdbShJQyg+BZGgQa54gIW9OW8bQlFgqEwLz3JBeKvhZ300YjLuaUy7pzpHzAyaqCZzgwGDd2PdUkzxesc71hmHfpd35FSV0qhDydo00HHC1PjPHgfcrunOlZxTrnQhmPSg7soiBpfZ7Wh6W9cQDzcKbvO+kNKWku4WgfyheaRz7oS1FV1v2YQZUsTEMZfXmXj12/xH23t2j3powuDVn56g1GNy/hfW1kSZFy74phqs6pNQdeReaIVP8aVzwYcBfh0vhjWjaJ+XX1fyNmBL2XP7jmsiGVmLIP0XtcsCdyHry2nJtCt36I8emHy8/eeY3+qbcV/3GOzdNn9JuPTLm5epSJOmLZbRyZUFWGzswD71wapeyCnZRwBgpLVedJ+hyV78OrsrPlgjr1P7ebf7D7GwXjbhdO+Sz5ttdnPajoz2O+/l3uuq19cv9tEQvMd7oeMOgdBlkZpCnTMDx4cX963x71HCA88p0vvOtP5yjZXc7orrBmDjmLUJGoXZq/38ZoCChX7uyR24R4WF4ccP/GGt7X82s+uKa+peKucLE/z75/5q5ruNt47r5BPTBy9w8MxTpYCv2G85039u4crTewg2vk29aGm03w27d4+//y36CDIX/1r/xtXnj+xe/qXu4Jz5KzEsXzzz75j7hSr7A77fAqLC7ULA8CjfcomVnhXnnnCAizriOEwGJdkRTaZCzSaTRIuO+FmRZWsRUIpVArLKzq+q+TxbQiDu+lJLlyVzztqII7iHfLUki9G3eGTHUx9fta6ZcwsqZzvjzTQt7LEGM02gcHbFx1gsvK0s3X2Tn0IBotlGDOgraE1luLZoGb+9/1OU/ZNIV5iFf+lJLLH9QeROZ5g4iwImOODsZ4b4TT504e5cTyAv/f3/4a0ztTglMeevQo/4sf+ggrw0OIQEpWNdesZE1UoTlossqGRErvLSlGWmhE89CzzysASqipJccCY230oVWP/NmGZyEXHFBkLD8prQQq3J23iLPzNSRPqa5d4Mg//V8SXEBDzbvhYfeEsSjmCSYxQeNYGlQMqsCw9jTBM/C+7BIRxZqtupjZnSVcp7Qpz+FkIzsqAV9CJcsTnBNia3SHUDlSVAjBKsyqZcdhXg1XpRAX7SH1CXBPDa9KobPFCJQpG9sYIBfDc87ymVA6OhFruNJckKFCX3G4AkoomjIL4wn3Xb9Is3E/bwK7EojzZq8+dOsLmiX+xhAzFDNAVxAvu8GW95S3mIflJZbvuysFmJWeGR9skV7f3efJo4fsnogQ28j+tGNvNmFlWAy1LFrLX3zhpNmn9XmM8esOQthcOlURmVP85/mCCOSEOIOl5zkPfWjVQyjlMu7yjJTXaGkJMCPO5XW27/iynuxfu3vQd1W+c6R1TxiLkRUhRes9GFTe6iYoXYoHrl4giGPSRcatta86p3PP3rt+h1AFY351MVM5RyLjakddil/RZWad7VJabpKqwbOxUNXBkkDFzk3UAWmObnXJmrNyKuly6MMtC7mCd4grFBctl9E/CwGSziHjAGxMZzx1fZeNN26x3U34n7zPsz2b8srWTV5OmfPNiOvNiD1RoneloFcMolxHj5i5PozMd+VNfejYd13NfSbzludOPV3M1Mn6h3amM5YGNcNBxbRuaSctk1niznif+9YEEY9zOl9odyf88O2h3t2F07vzjd6wDCks0HVhoFvx9mBz6L1IH4c57+bv04fZ86p87zFx83MypFBAjARb07/fwbm903FPGIsvuP6ormjrQBMcDmt2spAizaFZRZh2VhEOpb8jJvMMvsSjrnKMqoouF2ZwXxkujVIqgvdGZ/FtLBCqtRHngqR0XYfjrkKfF5JGNIkZKPZZMVvY4zAqfr9QqzpYjcWIFITKz5OZlKzd1QUhJOXE9h0+fGWbp7dhkYa90Sqf3rtO9/qEjUHg+9MGH3AdW3v7dLdvsXdknberhvPecUGFGwn2cqYjsV5XjCPGZsgHMHRvOL60K8wX8V3UD8QWf1Z7LQLT6YyMsjxs2KkmiBO6tuP6zj7pmLVH9+/n5xHAATLWHz2Uq5q/rQ0BDugnwDy/SIUzdmBs5X3sTUo7c4+Y3QV2zD9T5zmp8+4uozQjE7H2jz6/EueL17rHwzChiFRUnq5yVJV1vGW12Nx2RMtJVIVh7Rmq7SjTLpVKSd+LIFRiRa0uRpwTRoOalBLjmErFXih1Z0NyRCxfAYZ1bdAw2EIru78RB/twwVF5z6yL3+a1nZTOSe/w3tF1hUVQ8h9x1vFIVtZy5NSVGzx7ZZuHpjULfhG/EKDyhBgJE9jZHlPfHtC1ETfwrDPA1Y58oeNMM+UTZFItzIYVO3XFtZi4uTvm1TThijgmoaKraqbBkX1Fi4KGApeXnEcPDActhNJkdRAfO6JmtsczNhYHXCp5UtdGbu6OiWlGHSp6mr5zQtdFvA9MplulrdgbNb/3gFhoBpbvuT4UtZVrzzqO2R9fpaqWGAw2S14Ucd5g4b5J7YD3Zt+r2obYb1oi+ofWvoEy9nk9z9A8vsxD0Xc67gljAUBgUHu6QWXNWupRlXkDmCmEQFXafmPpgGxzJnY9HdsZ7bv0pijWZVkHqOoK3zlms8Rsluiy0kWjxnjncK503WUleAjDmmnboSVG9k5oCs1EpTSiBUFi8VglEfcOuMtQs2JFy5ipXeb+7V0evniDx2/POOYWGdSHIIALFblwuFyuWK1G7MeW9bZBOyVphwSLsfMswwhyFGRBGLVKw5Qj3hbiR7VinynTPMbvZbqRoYQaA9NBZtZURi1KgfEgMK0C06qidcKgqlhzFWF/j6+f/xb56ce5vrvLifUlvvnWDWMKzCLXtsfE3NHMcxYKo9mhmtjav8bt3Wtl43FztO+ASWwIRM5FqERz0SWwVmnNLUlvgbtWCq/293VVWc6GKyCBwztv3ZkIwVmuOs/TROZ5jhMzUnNinpwVpwnomdj/GXDDwBLUhdrTDGsG3s89haI0zlOPrK99ljLTlBh3wrjLpfYgJt5QksQuJXI2trBmJXihdo7aCdFbPaWNmSzW94IWgp+zJLQO5j1CwSBLqcNcvwt2XgUFTXUPAffV7FzqBWZgQWB5MuHUlRs8fG2Xs7OKtdEy1eIalATWaCDWrZkTOKcsULPVTTieRpbvRkMYsmSyy4TsaacRnx0pKHhBl8z7LcTAaGERp44YWlwV0KmhTlaoDLgAWln8kvdK05smRBOa4JXXz7Px0BqLzTIXdvY4srpq3jh4Uhu5vTdl3M1YaA7CHduaE20cW+JeCrnFaRi62MVCyTcLU1FSsp6bXO7xpMggWUHTaFA9QtbnKylZwbavEknZmGZtx6C2NmVrP3ZzrNzWk8wNQ0RYuHWNj7cTvvraV9DhIpPZ+B3X6D1kLMowBFrvi2iB3fxQ6A49Uuqlr5ZHai8sDmpjIXex5C3W3DSNCbJQ1Z5hFahEqJyj9hYGbU9soQxqq7wbNV7wXqlKGDUqMW5Wq8rPUqa0PSABame7UxczEgyaRqASYWU65eTNLU5eus2pfeX44hrD4XFYdPMrtv87k98pgbcko+uMpGYrTrB+CxBf4veWOUXGNZ7sbcE5EeIsE4K3vKwVclRyAr0drfgYlDzOyK5BvT3ooEnJswQ1OENWuJDhQ4cf4FCzxOFLW9xcWCm8PCGNM/uTjp3xPpuLxq/LRSrKOU+gRvwq//ULbxLVNinvoArmkWtvm4grPLTgHbWAl2xiJVjoGpyxiL3rxTsw/YSyiQneIHTUIgJxVKFC1aFOiiSVMdp9iQZSafxLGksbgPEFJ92UKdmYDO9w3DPGAkKblL3SizLwngVfyHqYi3eFNKlZGVaBQQhsonjx3BzPjIhZCk6N93R1pinidXGOwWdCEPMeWPtpqDx1CGgWxGNdePOGKCW4QJcz+0VMIpOt30UcXZ3w4tE2s7SzxfHbd7jvxi6nJspmtcDC4BDVRoU6V+o4vrBeewO13c/USyiKK7DkGt6abUMoHKZou3+uewgV0iDB1Kr8OSpEoSMiXpAguCBzsT+dKa4q/e+t6ad12V6bukwQj7ZK7DLTQeLqoYZDbhF3I3JotMSdr53nRMqc90JM1hF6e2/MmcMGfYvzoNHagNUxCBXbbc3VXaPMqIjVPkrOZ3mSPyjKOofg0V48jz5/sAatUDQGnPjCgdO5BoIT8KXjMkhlv/PWBxMcpGT/BmeRQV1+58gc2XN8MApfveaZNZ5JfOcVek8Yi2Jkyr02sjvrCAchZ2nKAY9SOwtvgigDH1gIAUUZx8TaoKJNmXFhj1bOMXQWC3ffFhYJC02Yg49NcIyqwKAKiNquN9sZM1oalodoNPz9NpJrTxsVCY4mdQz2djl55TbL17fY2G453lasDkeMBseIrgMsV0oOXIGlKehNSiXXkUzQUhtI2YzJRRZ8xfZsgtRW+9GedZ0A79Ap6FTwlZuHLObaHPVCILuEBFc8j8BQyF2GZE1ZvcCcZjO2mKxAGhrHH0yu8PTp+yzZbg16feS+o/zVt67wy7vXeZ5MahPXdnZJqcNXlTVxlX5G55SFZsj964tc3r1jifucJS4H1XcMHTOCaemYLF5WcwktVTD+nfSZBdah6mgLgKA5I/EAklaxup2hmQfKPKWiWxadyTOdmgQmSfj8Vce4qtjt7nE0zFCIvmUYwPrgUTMKJ0WmSIz4F8TjxUK3SYzsdREtnKGUrdvRetZNjC6qFiUQoWlqhjmXOoqw1tQMQlGRcULtPO3ONrp9nZXNw6TpDD+dMrmxS7y9Q70zpr61z+oks5JqlqsFFupN/EJAh9lK40WlJM5aq7+40r3XPyiv4Cw/EYTsLH5z7kCaqa5q9lJrWsPi0LIra1c8ZFaDbZ0atleq/M5D7CIuutIegO04ycJESUYaTVi9RWKh/5Tq/yxFvsUWf3/jafLY8henAjcSZw+d4CcXhmy+9gbPx8z1rX0jQ5bmOuvMNIMH5f6VAV8QAziMuVD0AYoWmKK4UrD1BeLucx+rvXnmrXYCc1Zy/7cicymk3ggVa9IxGmsqrdkHCjl53lRmxercs6Id8/bmdzruGWNRjObexmTypXiquwiDXc5kEZxC7a1eMkmZScrsdalIsh60iqacyNkbmlYgyarIsi5JxVIV2OsitRNDv7y1JQ+9Z7SyzNv/zb/no+tnGOwKca8jd+CrQBUanKwgweObGpxHnZ2/9hwT+84MJUacenKpATgniHrwheyYDq5fNVnNyXkWXEP2SvZ2L1DbnbPMEyMoMDiSkcpCGM2KpNKNGdW8SRYk90hxSf4w9C/naJ67LLYL7S1OnNhklBxKNPRKIE8szznWrPATzzzB0dfe4O2b20y7KU2zbGhW8XB9Xef7Tx+hrirTO0iJNlOUJU1bYZaN/aBqTX9RhBjtOadkvSkxuyI8opAgkgvXzihDmdJsR4/i27M2Mm1hDxTqTv+7UmGY7yP0T+zb1Nv/8HFPGIuUXb5NJiHkpcfhlSmROni8Cl2hPSQs8fc50SarkLddwqq3uRiIFSn7m9HlzNY0UYl1VVa+MgmkovXrnGlMBTJ7v/kGcnWBz109z59feJRaPTIISBNwlTPlSXHmMZwl3zmDeIeSyV1GgsNVgdx1SBUIIRjFJpfCWS4hWV8QVNPDSmWXq6oKQeg0miRSX40PHqmcJfXlwWbBvFYGFYO2KUYj6iyU6ntJVJFUGtRMN8rkiDThKuGb09t8cv1Z6GwRuqS4RpCBQ2cZHWeWVio+/tgjnL9zgwtffplHPrJMNajufqKIOE6trHBmfeOuWpQWkW8rUDrniLHD+WAMCcF2/54bVkKomKJpl3WJnpCS5loGyaRz1Z5xzCZzO4upUKgis9ihmM6DiDPWhZpAycINx/JXK/7y48eZ1kP+X4M/QQ++iPwL4MeB66r6ZPnZOvDvgNPAm8BPquodscDw/wr8GDAG/r6qfu2P/Az6vVhoqsp2+yKql1SZxkwQKxRab7uYCkzxNsM6mGYvgkig1dI4hEUnOZcbmRJJhOQ9SKaqAkHUEu9sZzH70tssXthl4/hDfOHq83y9vcb7V++j33EOGMQOleLixSHewoXclkp1LiJ4pXKdreepVJ5Dqa5bPtAffVEslcUyCA1TF2liKIVRZ7F84bKpsSQLmpbn54XYt1rg5p7SIyKWxxTj8n1bgEKsYFcmTBcch/yQPDV0S4OgjYOoyKwIZex3VFQ8sXqU7b0Jr//Mr3P0Y8+xdOrovGjbFxHtnHojKUzveb0Fgq9QtPD3TK7I9wBAKZoSTC1ThnUpRJbcpIiPqZTqvPbUfz8nS6aUTGDDaUE767mmshMhLCdWm4r/4vFT5OESPzca/PGNBfj/Af8M+Om7fvaPgd9U1X8iNgbvHwP/B+AvAg+V/z4M/D/4I2azwN1FcOunBqiDs0q8s12ky4lQItE2Wtw6dI6BN9WW/cIjqoPHJWupTTkXtUctFXQbS5GTxcy1y+ANEXMK3VffYOEPbjIajPBNzYfue5bffuOznJyucnhptfCTnKFNrtBXpDdzAyJcCJYflJwA74ldpKodSC+fkJHKIQQLk/q+XrFNgyKwN6RiN7eshYUS1tlUACNnBsRDTvHg73wxOA90BSAJnja1pvIopW8Gex3OwtrUdYQsfJMbPHfyFNJlYz9Xght4JAjMcmFmCzK1fIAIywsDFjYrLv/2V7ixtsj9P/Qh6pWFQmJNTKdbpi2mffeqKwvZlxDSpFf7509J3p1zhvCVFZIKu1hKTc1YyJ6sHZo7+k5YJ3adItDN1MJmf6AqkpOFltJPPCg1GOfcPPd/p+OPNBZV/V0ROf0dP/4J4M+Vr/8l8BnMWH4C+Gk1gs4XRGRVRI6p6pU/6nPQg5uizvS/QhGhqCuPxGiM0SJy1OvdDpzDA1MxYqMviXoTBDQwkYg6SNGS255zNos65zM1TqhefIsjX77J4sJ6EQdvGA1GfOTMc/zHb32Fv9d8yPhdQS3nUMCLIVzOmQE6MYw/RRy2uzkgdh0RIVTgqworyBV0yBW5JxwajTdtiWhiOYzYSmNO+nUDCGpvhoqad0CRIi+j2dnfOrO9HEubsSR846GgPNJPVnIeyaXQ54TolDd1hx9deRqdKFJZDShrwk1NjC8X8UADEpzlSAqudpxYWaUdKBd+5tcYPnqWIx96BBcCbbvNeHqzeH7TF1NlXhKYc740o5QW6aKlJtq3EZTXiRQBj17CyjZT7zzOeZwLJhoohcypGe8qKO9LAZKcDwWidgxuXWU9zrh081ukwQJd/NMX2TtylwFcBY6Ur7/b1K8TwLsaS85lX3C+rCPHrAweaoIwAEKweSomdJAJCLU35coQPBEIRAZVmKt3iEDd1FRdpHHKPpGUrbnMOUFLBd+9+jarX7zG6vImVVMVtM2AgdOrx3nzyDE+d/s8nzj8cHm45aE7j1NfCmaOvpfCFnCfuGdcUx9AtaUQaSX7XktLrCfdMlYb6uOE5WbErb07aOjZtZb4UjlDbzJIQQlTTqiz/n6dZEh2Ljka4uSCtQHQK5rYypkTCd/ydzi9sE6dnTV5jXz5nLL7Jmz8R+2gomwSxgwgJmTkaEaO+08fZfvmFV77V2+y+ZGnGNxfxlCIGbMh3KbEYv60eP6e5Nizk+fMYeb1NeuN6fUOCqtYoW1nBo9jRceYlKqqyCkR1dBEk4XNpVZTyJrA0s5Nutjy9vW3iIMhbZy94zr9Eyf4qqoivVTC937I3ZO/1jeAAdM2MkuKiKkGuiZQFYBi0PdyO4/Hai2VE9psuH4tjuGgxqnifJg/CBAGEug0MwqOaRHX9mJQtb52keXPX+DY6nEG3hJx70NJIG1xffT4U/z8C5/m7PYdTixtQmO7rKopSJIFTaDO8gfNGRUr1mUVwtATx1NTaFHFqyXqlAWTUkKSzomAWjSvlusB5/MegnHLfF1ZnaRLVE5IgCuV86QJIaM+obEU77ANAXUkQILVcpzz5K60FYiiDr42vsSPnXwGnWVkyYAL36rx0nypnlcCQXC1QeEuGNSvKUPtkDbDTFhlgeWFEbe//jK3vh6pn1ogHFqwxV5g3n4KgNDrhum8uJaiaR73Nam+cSu4MG/2EnFktHQbBLtWBS+BqiyaDJBl3gLuvUOSSUjFZIIjRnEyrWMpslTvdPxxjeVaH16JyDHgevn59zT1C0Dvmvy1ceqMqnbMivKjoyIHVyZl2aIIAgNvs1h8cbG90qNPmSYY1OxLWDZvNxahjZGUlcZnUmVhGlkZv36Z9jOvcWrlGMOqxle1RYPOl3F1Clmpteb7znyAn//WZ/mfjj7KUIem3VXCRAkeXM9oLTmMs3jIidVeXBPolcK14Pk5drhQ4WpPbg3NsxdY+LEYRux2e1aLcEWhsnDgEgVAKNG6QKlsmxiEw5FzwtVVKXZaj0iyDreiYm+h2m2mqIsclUWoBVnwaFsY0gEkKm5oFHYZeFLvIWLBbAeCVoLbBt2PMHDoOLGxtMhqEG5/7Tbj5ibh8Q3CoRFJMzFGBIcLFaZIfVBnCcUYXDjoVbmbxt9z9UwxtCi+9IqWRe1GVMzrzntazEh9eU/nzSv1RmmOvo8Ovvvh3vE37378IvD3ytd/D/iFu37+d8WOjwDb30u+0mPksRAfu5jpYiaW3ds7G0MQREBTzywHtcFHoyrQeGdhGcbNagpxshYYBKu31IXYOHAeuXiT/KmXeWjjBMNQ4Z3NFPG+MsWTAjN6ZzvO8aXDrGye5Leuv4RGy6KDqyztLmIIUuotztuD8k2Dq4ItMufpYkev4qjRKuwGkR6EQ1JYh945hr4yzawUD4QlSjsuSckx2vsZgcoWTgfSFXWj4NFoijX4vmuytN9W5i5UhOdnl3j/5nFTwRmVvg4vUAEBNIIkIXvIXpHaMHklQ8jIwMGekvYS0kLeSUinMAY3ThxqVjlZH2fxa2O6336bfH3/gE7Uw9kluUcNLeylXPvGsV4tB+mbBW1Ru0LJR+1r16dkTgpqZk14vQabES5tMwv+rn4luVuF4bsf3wt0/G+wZH5TRC4C/2fgnwA/IyL/Y+At4CfLyz+FwcbnMej4f/RHvX95fkCpi7gimt276vJ7c5lKwNGEgC/zBr0X6kK8DCKG4qDWpai223stFxoqGlUm5y8y+dSLnF4+TOVq2+Gqar4rWeGOIsBt+UOoan7g1Pv45Rd/k7fuXOSBQ/ejwejnOWY0WOKYSZg0litVbYdExbsK71pyjnhvPR5JekFvYxw4+lZYT9JMJaHom0Ukhr7AgPrS2x7My2hXRNTpk+Eyo7KAGDbDRoFsg5CqAz3gmWTenl3nzx96Am1M0DCV3EAE8tgoMqnLyFKFNg5fCfH2DERxazV5u8ONDaqO0VgDrraCq7UwgJtFVtwKa2GVnT/Y4Uq6wIthQrdeExrriqxsNoXpstnqK4u5b0GGujJDimXOTg999/JSMSWaysJo721d+NC3D5cRHNE04UQcK/sTUs7c2Z2QZgf50B/LWFT1b73Dr374u7xWgX/4R73nH/q7/mSCJzhH48Wq+KXSnIsSYSWuDCKyeoyWFtRKjM7gy26SSk2lL3ZqAQEkK9uvvMX+r73CgyvHEfEmOicGLMx1EEqY1yvpVCGgXaRpGj7ywAf49ed/i7+3sEbjF3HOoMmkGbSIMxR+U+6ihQzWb0SoGmIZUgoR7TC6RQlBrPHKQhtRoQoVPgzo2hmNCxZWeW8xpjNKCVnn50kWEhEvAaJCLK8vr8lk4145h4gV+F6b3uTBwYi6q5FFb16zK4u8XzceaIzAKOJJ260tQCucoPsGnPiVhtA5dJrQVsnO+HCSsEJqUnRPWcgLPLSwzHHfcms2QTaGyJHATJXzb1zj9P0b1MGXvaGEWTnTpWgsdCe0rTEeY84lRLPX9vB2yqZamWJHLKWClC0HzerY2R5ThQoZz4gxc/nmLhM3KcXt737cExV8sBi08o6FKjCsHMPKl6m9fTGQeXswShl7DQFnk8H6kdiuZ/MW1yoyz18uf+1VJp/5Fg+uniT4AWmeXFISbi0LvXQ9AiZrCp0o4jyHlzY5dOoJfv/iN/jh0x9BqNCmCFlErPiFkS17dMeKgBmnDsmYYLfzBdakVPL78MHq8qmI1vlQE+OMprbaRcZanIm51HtsITkRg9XF3h/t+VJWyFRkHp7Q8+dUeXH3Lf7So48ilaAukzohjzuDhsHg8QUPVVloKRm/K0EeAbcikgt7eJqgLddbCczUrt8JrrNCqaggHWhKLFaBRb9EPqfceOkWv3XjW7zvLz2LoExmMxaGg7nRWPJU2wiNnBlWwbh/mu4qcpqOtVI8tffkHOkV/OeLR226AgKDG5mmDjx59jixHjD6NibCtx9/3JzlT/mw2zFoAkuDiuU6sFR5RlWgKi2oph5pveXTZNO25rlOSfTbZOLTYB6o9t4GuIrnymdfYPIbr3Lf4ibZCSoZ762rLgTriVFfxCa8K5TyXnFEUG/oi3jhsaOPcm5YcenqaxCTLdxkBD1RVwaM2n9amsr6OoH4YJCv86gv1zXvSXdzLpZzjqgdg2rEjDHiivI8fa3A9zsMYdAgdQV1CTe8IHVZ7I7Cwi3gg+sHQiXuMMGzz6HhIesRaoFxwgVHdor6UuDsE4GRsYpRRWuFNlt/zUzRiHHRVHFRrCWg7hep/Y1ERbtSfJ1l0iSRZ4lrN2/x/LnX+PFHn+JIGDBqGpYXFqz7sdcg09KFyQGC1Tf+WRRW5mR6X3qf+h4YX3JBe49eBms+NVk5AA7eNWO5R4ylRyC6roxuLmJ5ZMUBtXM0wR/g42rjEfoJw9OcmcRMq9AWlCcrtFmZtR1vfOqL8MWrnD18P1U9smnBoY9I7AGkGCH3bh96WnlWpZuL/DlEAoMm8P4HP8pv7rzO5M5t6BIak9FvSi0jzaI9zMLCdU2Fai4jEToKzRK8HCiU+MLK9c4QMoWlhQV22j3QfuiQK9O8QJNCUlIXDd1SK+hKADTj6lJzKrJNzBeFDWb9g723+MDh07imRscZnWVjMgegFCw12HJ0Euw6tYR9I4EppFk2JnQWiFhk2GZcB2ls4EXusk0va223011rB3CV8MruTX71xhs89+STHGpWGL45LuMoZL46DhL9TCrTh02owmBze60ra6iFArJosYbeKLQAOLnkQXPVTphPPn63494wlnKSKcMswe1Zx+1px06XaDGqSuM8C8EzdI6h9wy9DeV0IjTe03jP0DtDurBmr9l4wuv/7vdZPDfh2PIRnKupq4YgwWJdF8liwnCoqdGHAjH28G+m8NGyJf4GNjjWF1ZYPft+vnjlBfJsZjBqXwzNYuP1pgZL9dKhPtSIq0yIL3WoJKPeO6PJmD2awaSyUy6ERcZde0DzoCwSMTaAVbrVEv+ccCq47HDq8XWNqwPU3moIQrk2RyuRm7tXOXPkFLqbyZWharnKBq+G0qqN7eDduCVPIswyeaAwVjTafygmBhj7GCiTY8JpwleGrGm2LlM8UDs0wOeuXeCFrWv85ENPs6ENeZoZ7oPfntGrtBjp0sTRba3IXYhW8bO9llhBFRVraaagXv0xl2gSg6JT6Yo0TYAyUe5dnMs9YSwACHMqtxPHLKtpF7exeIuS09Q1i3XDQh0Yec8gGCBQ9cgZjoSwd+UGF/7F73B8K7C5vElVD8F7HIHgKjxVkSrq5tCiOBBRyynU+i7Ue1vIXnCGoRo6FjyPHjnL+bVFrl56DZcV7RK5zB3x3iO+5AjRcH5XFg7BG+wrBiZoEJKkudpikGBjJJxnoR6y07XlAReMSAQ0k8sMGqOX5yKyV5jMoRRxC+qDCBp8mYCceT3e5v7RIqNqxRgCQQsxsWik1c5252Ce0nnLt1KMMFF0J5MnGVdyulx6ZyQqUgdkUBQ4xRBMTYk8tPOLKL965VVu64S/9uCTDFuHdAI7mUaWqF+9eVfF3lgJffXeGtbsSfdIqdnnQVXflDEtL+snqfUtIIZIWw6X6WtjlPTynZEwuEeMpW/eGQSb2V6XXm0zmI7dLjHNWqjYuYwzMI/kORBw02x07ttffZndf/NlzvpNVkdrOFcZ6uUrW8SlbuNlQExKTJGkidwl26HLDZUSktmNtTZiE1GysKYKnqcf+AC/Pb7I7M4NnHocgi+exDmTRrJEvAASPlA1NTFbYTAntUY1KSiaSKm221z3xWrIfkjgtCyKsnibCtd4RErIAfNYI6cSjokDf0DNJ5qwnKsCr9y5wLMPPGIhVxAIli9l664r0rUHITIzQ50cArtKnhY2NWXxJtDOmsWkUzRmcnDleSVCZeziPen4t5e+wcrigB87dha/E60BLakxASIsbA/w+7PCDu7nYZYalnMl/+gvN9sIjx4IyiZkkXOf7Ms8clFk7rEPjEgOjE3ePQ67N4wFSj+8mCAF1siVSnPXrUnL9WnLlcmU3c6S+1ZhEhNdyWKD98hkwtX/8Fnq37nEmYUTDKoG72xks4leFHeNoSfeB4KvSbkz8YRg6illoklJqHWu1tJBoY/Yw3ASWG2WWH7wab506ZtoNzXxhwIPi7g5/NpL+Zg8rC8Tt+I8d54n9r7Hmu0hD0LNnmRw2bS5hJILHVSdzeEYv2xem9ASsqVon+3tP3HC7XaPkPfYXDmGtJaDpFi4AFXxsB5L4gvsLLWgwfpZSCDZNAy0K2lxNo01UcizCEmQztjT4gw+vrM35V+e+yJPHT7Kx9ZPIa0BAfMOSRF0khkurSIvXrVrUr2LWW2boYkv9iGYGU7qjeQur1GgGboYjd6CdVZ2Mc3XwV36MPMemXc67gnouG/nLPkqk84SuR4CnsbMLClD70haelZSQsVRkxg6x/TFN9n69Euc9BuMllcRX+PrYA1avRh3LwEavHGkegQKJeauFPRMME6yoGVXdijZBWICL4XfpGr9IEF46PAD/N7Nt3n44jmOP/g0yRl9vJia1T+6zgqmzhG0MiONHS7USHZY4lIGMXlf+kE8NZFJzhb+9CLZIZNdILho8Hdy83gd1HrineCCcan6llnJ1pT24vZFnjl8Py7bPckx4xaCtcTEhFSmUVBVBjJoyoi3hrscszWTBWekzwLKqTNGsraKerH+7wozgKS8uXeHT735Aj929gkeaNbn3DqpHFRm6A6PNuBaz2hH6MYdjKp5rljK9HNipkHFOveq8yBKbQOyJj3r2xdsdF8+gFbuYjyXHqL+vd7huCc8Sx93xmT9Il3KRIU2KZMuMZkltiYtN8czro5nXL25xZ02sdVGZtfusPWvf5fml8/zaH2MhcGCSSkFK6I5tQ5EFTnQ/c194ldURVwguIY0S6jGwoKWkvT3aEvGq9FUHIZqJTWiYuUr3vfgh/jN8dtMbl/GZzFIuXRUilrbbsgFkXJCVdVWt2hb68loW+J4Qu66uWK8qBBcRWoqcm6NCFh58L6oW3pyDKYco0Z/QUwGKGuyPMJZETN1EfUmEXVp5woPrp0h7Ubok+VOUW/ySoV4Ra6w9uUib5b2s4Vm2bxtaiNC0U+uejqJ1Vhc4w2CbzMv3LnKp859g5964jlONavELho4kDK5SfY3ThCvc9nYpfXjyPNXrZ1Yeg9iXkC1DMotoRTSq/cwz1WyGOUn3VViOPjajC8nm8zW11/6toF3Ou4Jz1JS1EJ1sIJjygc1hT7xmiVlEpXxZEJ+8ypn9idsfP42zWARv7qBqxtwFb6QL7VXP8yA9Ibh5oU5+rVbWR9E4zyzOIUukrziCBba+EI1wXbPvnelH8et6lgeLrL00HN8+eUX+P7RGrowQETxg9oq17kk8KlFRJGYaGdTdDLGDYeICrUP5OCtPyZb6OEIuKohasSZJAyuEAQJGNpGJnvzYGbXuVTs0xwa78OWC91tTo4GDPySCZp3yWSQhHnjlKYSNk4VJVoPjwTL6bqiltKV9y5jQDQZHN2PuVBV0izy2duv88alt/j7z/0AC74xFMfbOA5XeTNGsLHcpYlJgCrULG8NuX1zTNocWSt0yVHmELiWjoMi15rLaOdcgICcM8Hb7E9NaV64hAME7MDL9ITUdzaXe8JYtJi+aTlZQ08sXqCpnCXFzvKFgNCurtG+eoOXP/8HrHSBZxceoi4NQCknVC0Z7xPU3CNcvp8JAk77Ia12i8Sb2F3lBrRtR5UyKqlocqWCvJRBPeqKvA7GVxGTXnpo4wyf3bjA2cvnOXL2cYtRYiRnY9CKJg597mcYXXkZvUvYoc9WVRwXf+gf0m6etCIQBgnX9QKT3DII3ti9wWbVi/NobCnZLxnMo3pb9M6Xwa2KhXCqvHznTX705EPgFd8I2TtyZUox0usDdGXBDC1MkyzobkK7ZAblseJiLei+1VFc5XCNQ4uuWRT45QvPw/4t/s4HP4GXYHy0BNqabK7WildjVOuAu5Aqe83a+lG2v/Iq3SdPk10/GqPw3aSImZRwzECZDEVet2cpx5RKB6QchG3aT49m/n4Wgb4z1QXuEWNJBeqbxV7v1sbYxZhNrKISBsETs/UkzDI8cKNluPwwF6d3+Pc3XuTD8X4eXTuFD8HabxVcjiQ1w/NVjXjTGfMlF7Fyl1pdIGWorFJZ1TXaRSRa8NtX31VL4usNnhQ9GI8n3lGr8vSDH+A3vvbr/K07J6gObR4wAtQKic1f+FEWNv48HDoCt27CYIDevo0sjIjf+AOkzYWuMq+DMhg0zPZnNjbDh2KoDrQ3YtBcpqGVh9+DWOLEakAi7MYWaffZHB21rszGG+w7E7QyBEscyKKhbCZEjOUhyfQGSCBDkOBIs4TripHlA5bAvov84stf5b484+Mf/EHrLM1W4ZfGlc+yTCIna06TCOqxxe4FzQKzzObiCS6/eov80BqpiHSIiHkLcUAv4XtXJV57T3MANYtQph+7uRdRDDmkv4cuzNP973bcE8Yyd4ki1B5i4fuFJtB1md1JZOotT6iDZ4hy37hl2izwxMoGZ+VBXrj5Kt98/fN84vBjHF09DGKkui4n6qYpjVgl8zNBYaOIAJoSLil51s3h21BVtF2LL5i+NRiGeV3DFQJkCcRKQl2xMVpm+ewzfOHcV/m+hU/gl4ZYFO1xknDjbdJrr1qO8fpruAfOQk7ojRvkmzdY5zW6wWKB7OzefGz/DmvjjqXBivXuh0CBxazrsNQ4+mq2FHgVKdCvGEEzpCl/abLD8v6OxUqhGFd5nTo9oNyQLeGzWMVo+ncZcLYPxKUSuMRc2qwdN6e3+MFbN3jwwbO43/rZg3vfGZ8tF0aElPytJBsl1C1TnVG0cixUwmDrDvHUot0z7R3pgXZYXyvpx7zb13cbDvMJa5TNuP+6Ho+Rb+3w8L/8BXJVE8Z/+m3Ff/qHwnTWMRLHcmMatrOYabtMUiF2mWEwwt7K9oQjruZSVSHe0bjA++97llvTLX7pwtc5eft1fuD4UwxHI5q6PkBQevi43wUV+lHcwXuy96TS8SviCL5hMp2ZfCQm9iBis2MKE8zqHKXRz2An4ZHDD/C5Gxd47NrrrA8eBgIiGR/3kZ/5/3D99AfRekiuz6KXldhFQjiMrh41QmYpsCk2aiE4x53pdUb1AtKEuSE4Z8U+zWravZjqjROx3aYsyPHI8fqwY9Issrz8IC47Ni+NqUpdBG+QcblwC90ah7aJ3BX4OPb2WyDi2ljEOWLeJylX0i4X3zrH+x56iPrwafCGSKVU0HBfVroYXp4jRmmqiqcOB0aiTkwXbRBYObPKras3uH2kJpmSO5QkPfQzYdADljmU2N42A196fXov0oMsqkqTrR60Mxiyeuk69c7eOy7Re8RY+pjBRNaauqIVYZaVxSZYhV2E5aamAR54/TrVcAHXBpJYoq3OcXhhjU8++kOcv/kGP33ud/jQoQd45sjDhNqq6Za3BLKUGB3jnkWB6MSKjkVkAm95TkCYFaREihyo2UWpM1AgUAUhEbxBSY+f/RC/9tVP8ZMbRwkb60VR0oxs5+kfoRstm2i3wGS6hw+eUA0Pdna1duO27bg+2eHN8Fl+9OgPUC0vFgkkUyoxkTwbSeEw4T9xDmkCOwuOrw0nfLW7yfPn30CcZ21xFU2JY8/cx0ceeIAPxEXWLu8TGltY4sUQKhF0O5Pa4q3UED6jtQhuwRs3rVNwcK67w+9/47P8xY/9BPGBx+lqhzi1XphUOhIbIe12aFdgWC9zcQlV8MPiGQYl5/TAosMvV2ie8tXXLvPrx+4vSqNWSSkkce4eaJazqcfQ54OYlGsqdP5+uTlxHNu5yv9x5xz/7Pv/Bh/4+lfpvnLuHVfpPWEsxSninKdVZa+LxALLhuBYrDyrVbAKvwhHru3ShSEkbyRBb/JEHqsPPHr4Qe5bOc43Lj3Pi6/8Fp888QzH144ag1ZjoZ9nMh6c2hi9XOR9Cs0FvC2OnJBSTOi6GXVVG5NVtYRXfYJZGrdcwNOx1ixRn3mGL73xdb5v9Al0YA1XgEGhhesFSl3XpBxL/lTuSR+aqqNTuJH2jFofoxUOSzuLhIDTTGqzfd3UZODr9Zh/t/U6ITtef/FtZtMZtC1bL79CCIHLS8u88PpbfPb9j/PRh+7jwdsth/aVNR/wQch7Cq0jBKPYS2ur0bmKTEQnCemscPy8XuWlN1/irz7yDKsbJwo0q9BaTUsMskPbjLVaA0Wko+e7IZanaMxIpWijyHqFjqx1YBRqPnZ0jTdub/O1tSOFqqKllbiXjeWAYRwtNJ4rxFCEUfrk3wmSoYo1SYWbsWEnh4NazXc57gljAdsAdvZntHsdbsHNIWQB0wUrU4GryZTB7Sl3mqF52Z5xWkJeG0ngWB0s8LEzH+Hy9nV+6eI3uP/Gm3z/8cdZHi1Y0c33f2edjg41yFoNIE4Krqpw2aOzGThHCI0VEvtNS2yLn89w7Psk8NQ+8ejRs3z25gUeuPEWx089UpTiMdi3H+CT1SgpqfC85jrAmFFVjg23zCLrJvtTBVJJ7KWpoBRvzw8zb55qeHppnb1vvcX/8+IfcHtrlxMnN5hcu4YbDMntFD8YIZXDLy0Qu5YXX3iJF77yPH6yT4Pj2fvu4+8++UHciYbm+pTFWbQenCCEVLhiIrCvJMl8bv8truy9zV955n0sNuvoGGTAHKo3QiUWVgW7HiShtUOiwfBeXZkWkHEDB4sOWaqQkalgumDF0dXVVf7a1lVu72/x+sIqYLlTr9UwdyQUHeNCXepBELAgzXQVeyTUfuE4yHPe6bg3jMWCTKazyP7E6AyVF1YGAVd5au8ZOEdOCX/hJlOtGKtR9jO9Z0j0ItKuzOdAlBOrRzm8+CO8fPM8/+r8b/N962d5+vjDhKY2zyC5tDI7a4kVoQ6eVpRWMSKi1ka5QAi+JqdS7XdS2oRNXklz3y3p8N4zIPPUQx/mN77xaf7W5klqX4phqmX6Qg+DWveiyVZazUOCJ3cdTmzGZqysUzKTDV1zCsFziymfO6S8LB3SdFRPLfOt2/vki/sc21ii3d7miQ89Q1M5xjdv4RYadGWVwdKQ7b2pVbRLM9n+tS0+98Z5tgaOpUPr7F+6yYPDVX5q9QHqcSQFh+5EJBsV6De3vgXxFj/+8NOMumXyLJvHix5Rh3ZqPfy5JNoOqAVX2VS3LIorOZh4YOBg3VufVyUHLOao1mdTO06ePM5PvX6RT2+uwspi6aYVOsUmkZWCbKuBIIJoYtaO8b6iqhqCWO65O53QZeG4rxhWjg8cGfLQcm1iJu9w3BvG0h8iTGYdXTS5mmmXWR3VphiZM8uDit1XL3DbrSI4g4lV6dRg3FQGfwZfxArwSDb27jNHHuaB1RN85cJXefH5X+eHTjzLkbVDVLVDfSBU1gDW93EHb/T/SVJ6spQUNE2cp+s6fGUFQksercZBTogYOdOJsDla4a37HuErbzzP959+uFxoXxA1ycD+2rs4IzQNgpv3rvQt1btVZqyRRSmsl+Bxlec39q/w0izx2GNnyb5CXSYPHSkp46y4wQJ39vbZOLzOfR9+GipHNRpwZnODcy+9iTrh+qXb7I+nbK4sMdGj3Njd4fL5izRrQ95+4zUe+P41PrK4QjM2e44x8St3Xma97vjwqWep2gHaWfU/NVircXZIUsq4A0q6hyETBjWHpcpQNS+mELNkfTzMFKZlD608TMvYDG8Kmo89dj8rb93kyIfO4kcNqtZinHPi6o1zzMZb3H/6OUQDd3YusL/fsrG+yWiwgWpiMrnJrG0ZDldYuO3Z+PWa//nTZ6ivnue/fBc5pHvKWPqxZylZQj0T2J1FumTjvtvxjPVL+7B+yIbw5ISHOczqfTCmMKUe4CzJdN5BzKwNlvjBB3+AC9uX+W9f+yoPX1vmo6eeYWFxCUlTXFMVFm9FRpCUGYr1RmTnSKV4nOjwBGI3o5YaMHkm+kGiqZcaVSqnPHXyMT5z41d55MYVjhSvYsBZgXedo6oapnHfAANnPTSdRiaxY5wil2YTvtle5aN+neRhGpTtZsoLaY/d7cTOzi263OGHjreef5Htt95isDCiKZX8i7fWubO7z2h5xJFDK3zl4nVOHTvOaKHi6OoKr77wKpfOvcb+7j6x60iTGd0be4gq/05/hf0Tj/PU8knWqhG/cfNFHlhwPHX0CbyadrA03uqzqSj/i6BNgbOzkNuEH3rEWWruGuOByVIw7YMuI1NT1cmFlsJ+RpaEXMYWSjYKjp8qR4+vcu23vsqRH/0Qvg5kjThXsbx4lJuzHRyeqhqxsnSMzbUzONcUqddAynDj9pscPfwQi/M5kwGRdzeHe8hYDlpFk6jlEgrjWWQWhVlSmguXeTgsH4ihFQpHP47bKdZ+q1aocWK0FlcQF4DaB86sn2Rz4RAvXXmZf/Pyb/HxI4/x4PEHqYEwrEw0QnTuNWonTGPPg/JAQOnwztN1La6q8EXUztDX0gDrHMREk+Hhk0/wq+d+i8ey6RWTSheic6jY6DYcxu/ywcoOIgzqBrqKhzdOs0ogV46LJwO/p9e4ev0WS8uQbtzgy596jce/71le+PTvsH3xCgtOSDu3GMeEuIrRrMPNdrnTdtxwNTIcct5/k9HiAmefe5LTzz7KsQfu45uf+RxvvPAisT2oN5w/f55/eeMWHzr7OMcPHeXJRc+TJx/F7VVoI3PAQpMl8EQlD5Sw6G0sX1BjIZsYghFca4Wlg8Gq5NIFmy2P87W3955Z2ElQo8IEQSOE6NhcG3Hrd19g8889aUROySwtLNM0jxNTS4x73Nm+yOLicQbNmm10acb1m2+ztnIfiwvHceMieSdp3gz2Tsc9Yiwy/7efGJu09Ka7Et+qsvHqdZYWj5aimcXB2ZmPlwIFmzK9kASiWp+Im+sQGxvYoyw3Qz544klurR7l9954gfPfvMzHHniKNX8YXzHnEQll5J6a3m8/T13E+GKSIl1nIxG8MwKflcyMsBmCJwAPHDnN9q3TTM5/Bfr6saqhYAXy9HVNitb8JqXtIMVIAk6sbiK3b/PKZuSVU57uumN37zaT7S2muzOYTDn3+19htjcFK7wzXFxDiYz3J2hS7tzaoa5rTp9dYeWhR9i4/z7e+uY5vvHpzyLLy3ziL3yED/ylH6ad7vP2K98qpEYLae9sb/G5V57nyM4NNt73LGeWhVES3MzqJL3bdZUZhLoyunvBUEAfxKgwArJktRqioqkgWloKqCnjRsYWp42lL8feDxSJpouQUeqlirU6cfPz32D1Q4/R6hSHUAVT0UEqNtcfAmDW7aNqclNVvcbKynFyiqQ8Rcm03R4hjnm3BrB7xFjK4WyZ2WLEGqPKjMVqa5dH9gU3DPNqvyv/pZ4rlK01GCg95DYxy5AP5gxZX8Td6ipwePkIP/rUJq/dvsC/fe3zfOTmKZ66/xGqUYNrGlN9FGEYKpTSNpCjJefeo3iqrMRuilQ1uGAjE1I6mG5VV8RZ4gP3P0v1lf+W6d4+vlTpfeXRbATE4MRUSZzDuapIHsHr197k8vg6V4/XvOmvoq9EmpR44+uv4kWJ3ZgQBkynntRNme6PASUEx2BhgIFvgdksojlx6c1r7N3aprp9neOnHuKxZ5/g2qVbvPT5F3nkqYf5vr/+V/n8z/48b73yqjFx1aD23f09wpWrXFu6xI24zPHBIZpSKyGUTH7kkeoAAk+q+NqhTZF97RK6F4scE/N+n5xMDC+nXGhGeU7JMVkqhwZnqjIxQe1x6qhHnlXv2H/5NpvvN8OIOVHXQM+URe37Aq48eOowWmSrqmoHQQhhRFMtvCsidk8YS7++pSR/GcUlilxNJjs49dZ11leXaR12oeJJSalDIGEiFr5Auda9l8nc1UEnYtSVbCzhMpYFj1BVDU8efZATK0f42ltf45VvfJofOv0sx+4/CaiRNMURJTMp5EdTOSnjsvHUtSOlDqL1fZgRJ1DzQKHE3ePc8fOvfZ6fWPkxgh/OqSnWgGYcsti1VINgIIareOjYfbx88TqX8x38pRts3dxl5/INuuk+bTI0btLtUdWN1WdcJnYdu7s7ZCK+9qwsV9STxGQikCNZE7vXLlGllhtbxzny1KOcffIMX//Siwy6IT/0P/xr/M7P/TKvf+2FA7KpCFv7e/zq66/QrK/yoxuHqCqPyxjzV8G4MYXsieBiMq7ZoiO1NqhKusIET72mgSm2WKt9YR/PsomQt4ofeNQXmkQFkjzUoYxa9wxqQZgwe/0aO5sVP/+lz1IHj7iK2gf+5kc+Nq/iG3FS6Ic7vXXzIisp8vuvvciDd6zh7J2Oe6KfpTfmlA/w8NirKqpSdcr7bo2R0QIZUCni2pi6i1PrrOxESx9DRshUagknhXjnnMN5g5nVi4VM3hXRClhtlviBh36A009+mF+48gJf+MYX6Xb2ybMOl5Shq6hD4V2VYounGIxYq3LqOiMvlsQdSr5U5jVWvmLHVbx54bWywEoDmg+gjmY4IhYdZ4ok06SBFyWy+eD97I9hf3vMeGuL2M6IsWU6Gdt/+3t4Z3Mzk2Ymk322bm/TjaeEnDl9ZJ3RMNC1kaDC5vKAWifc/tZLfOXnf50rL77NMx98jFMPnMAtDFh/9n2cfvoxWz5ykHhPs/L8jTe5ITvkxvJGwNRkYokIVJFGcZuB3BQUMQFtCUJTRqaK7kbyfkanauowE7VR5B3IBKT25ISNzGjFBq32Hagz4EYLu5k6Q7V1k+biNpPxlBu3t7hvY40TmxtE7eYUov4wxnnm2u4OTjz3rR3hzmT/XdfpH2ksInJSRH5bRF4SkRdF5H9Vfr4uIr8hIufKv2vl5yIi/zcROS8iz4vIc3/UZ8xPplRzc8kLbBSI48TVW5xZXiU6m+FoQxwV657KiCaCy/hCkhRVnGb6/6UcEaxhaM676ttMCxHPRhAk6hA4tXaMH37mR3h7ueFnv/IrXHn9PHkyocodgzKOosdD+ym4RnKxVuX96cTqF2LhS+o6kjiGVU3tAx9+9KN88ep5ppN9GzeRewqNM3EMJ2jRFeo8/MHoDtJEXnrhApf3Ye/ODqqJlCKIB28UmVhGdYe6pvb13GPNJhNuXL9NGxODxpG6lp3dPS5e2WdjZYkHjy9zdi3z8hd+l5e/+gJrywscO3qYkfc888mPsbC+Omcza8oEb/JQL15+05RvGsENHbnO+MWADB1uw8NasCJkq+Rd8zjmUXIZLyJQGVwsfR9NxF6nJrph/DuHC5RhrZgewDQhexHdT+h+pJ1lrrz4Ft/81z/H+BvfYu/2Fk8eP833P/Q4wVXMtaTLf4ZWOlYXFlDgzPoxHtw49iczFsze/3eq+jjwEeAfisjjHEz/egj4zfI9fPv0r3+ATf/6ng5XyIGuMGVBcKp85Po2zfo6pqxpyEvCBpN6FEcmzlpcMoORgo711XBKCNY3mKpmvNM5v0jEemEsNzde0WAw5KNnnuPsMx/nV26f53e+9hlmO7s0OdKgBDGqv4TKIGsHeE8IgbqqyNoZVbcwg4OzMXAiwubiGoOTD/D8i1+BaMVO6+NxoEI1HFpelpXXprf5yuW3uHPtFnf2InFrizTZZTqbEFNn7cfOE0KFONPNqgYVzUJTRsoZJ2owaLj/yBo+KcNBzfb2hJvb+7z65m0q77n/yCKPnlzkwhe/xG//+1/gxoVLvP/Dj1lrQzPEVzaKwwXPaGkBaYSvX7jA1t7YaP6t9bfIeiAcqiGC7GR0P1ndpFUYl93dG5m1H1RSpndbvjIISGM0Jm0cDDxu5OfTqNmPRuRMsEfkpZtX+cWvfp2f+ZXf5fkrFzn1+EmePXEEt7TIz33p93jxwjncfFMzpDIXjpgi1PSBe2dTF+ZJwR8+vhet4yuUYUSquisiL2MDin4C+HPlZf+SP4XpX6qUnSvMFTuO3LrDM/UCXQsumT6vL1qtgo0mUEz8OmNSPYoSu9Z0cTVb45ETg2sL8TGWxdkL3/X8tKyKFpaxc57jK4fZfPZHeP7SS/zMV36Fj594kmOnHiJV1jdD8YYJmY8L92LdeePZmLoZUFU2VsGV1eE08+jxh/mtC7/ImQtvcPjBR+fxZ8ZoPpO2Qyrh/N4tbm7tMMkB9rYJd64Tu9mcYZtTsj4MSShCO53S1B5yLPQPZdpGrt/c4ZvnrtDUNTHucvzoBqmN3LqzT9dGVpcrBqMRTz9ylJvbe7z4e1/g2U9+nNUjq+QMg6UVvHbkmIgx8sZrV7h/c5NdGXNsaQ1WFb8UTFtsarwra/837TGCWhglDueVSMZJGSzkBWmcyds2Bg+nnG1EnzcRC9pMruBmnHD+9au8du06XZpx8tAaHzh1jM2lkT3HOrO2t0ClW3RJjXEufRc+8/kuPdXlyOpR2pS4ubfNlZsX37UB7L9Tgl/G5b0P+CJ/wulfdw8zatY2QIbzUQBW6HU4VT721i0Wjh/n5k53oNlbWX+9Optj7kvbYswRX9V4TVan0FxmmkBwFWgqMwUFKcJtJlpRhCycs3gYM9qMebtBqPngqWe4deg0v/PKFzn11Uu877EP45eWiEXeNDh7HFp0hIOvGQ76cdvJRlf0PCTxbCyusHnyYT7z2pf5qyfvxw2b4nlAyXjxdLnjy3cusXPjFmnaItM9Ujsjxc54VGUkhvfBwJCU6DSytzdBvFBXNrMyidB2LRev3mFhWJG6jtWlmvc/epbdccu1W7vEDsbjWdEbV25fvckXPv0FnvrE+1haXGD/dsejTzzCdDzm9fNvszBsCHXg+niPs02pj7QZTaU7QHxBD8EVFpCoWqOdFxCTjVIHNA6XHUlNOhZVe8ZZmO23XL6+xSsXL3FpZ4vFuuLhw5v8xFOPspAKNQbQfVPgnMSOo/efoLq0zahuePy+B8qcJNsAfaH0S2FrLNRDVJWt/V3ed/zMvB/mT2QsIrII/Afgf62qO3dDbH+c6V93DzNaPnlG0TJ+OVuDECjHb+7wwWqAuMCk6Dp5703nWEwQ3ODA8oWIzRnUytQaU8R5kwNKXWdySYV/lXLGYTQK8YasuDJUtZdTRSwUEgQXPJsL6/zIs5/km5de4uef/xUe2TzJIw9+AB0EXKjImkjZ0XZTBk1ju2ZypBxN/qhvTHJG9Hvm9JP8wqVXePWVb/DY+78PJ44uWxEuhMDbk9tc39tlducWOpsioyXidNsmD5e60WA0RHyNkwrfzei61jaJzuSaRk3DcGhaZZNZpI3W+Hbj1h6bmws8tHCUjWOb/Pbvf4W3r4+ZTqYsrzS06om7O7zx5VeYTSdUtefw0VUuvDah62Y89Nxj3Lm0y6Xdbd68fpnXX7/I6TOnOSpLLDXW8NYLkedJxg+9iQAmkMrb1LAuWxFYQetyYxAmXcfrl2/y6luX2N3f59DKIo8fOcyfe+B+KvFIyuSp5YkxJvwg2OJPmTSJrC0f4sjuKtd3tvlXv/drLA8GfPzx51hfXielWPIgazvoYqTJyktvfYvje7vctaz/0PE9GYuIVMVQ/pWq/lz58Z94+tfccO7+rIIg1Zr55BvXWDh5H+NZZpoUxOSFEooXoECHkXIDcFShEA577pUIEqrSOmyTanPK5Gx/Q6iKCoo7IEXGvvPOkBcc5NSZt3OOJ44/zvGlQ3zutS9w6cu/wUee/D6a1VX6Ud2Dvj7TEyPJdF2LV51frKJsLqzy0Jmn+P1zL3DqxsOMNjdNWsg5fKh4e7JLFzukjagPaI4WIpbwyovHh4ZQj0jaElFQK5LaJN/IJLY8vLnBX//R57h8e5/f/crrXLu1jaL89ufP8+HH7+eRp87y4adOUb9yiVcuRGZTxeeO61evmPQRjtn+jNfOXeLaxSt005bzL73Bo6dOcivu8ZuXX2cWZ5x7+Zv81IMfIKfGBjo5h3qFTq2fp9cu8xYdRImkpEjt2dra5/zWTV658BbtdMYDxw/x0VMnWHcNVeWtbyZa2KwDkFqI00RqI94FQ+JE0DSlXmh49OT9pCsXTOHUe6apI8c4T/D73ntxjlEz4ONPfpDmxu/Nhej/WMYi5kL+K+BlVf0v7/rVL2JTv/4Jf3j61z8SkX+LjfX+I6d/lfB73ouAwhNv3+B9K6tIVdOOLY5MmgzGz5hulZPSHetLz4Kbj3/zue8JUVSLWr4zNcqoIGlGzAlps43jzkaPyVI0qShKKWqjXVUhiM1h9MFzZPUof/nZH+OFa+f4+T/4Fd536mnOHHuQqqkJvYieDUigchWdduRoI6hNJswU6Z84+gif27vDF176Ij/0iR8HxKrWCtSNbQLeo6lD2wmmgQZgfTzOe1zweBlSVYFWBGG/DHrypJi4eHOXO3sznn7kPg5vrnJ7Z0yaRYaLI9Jkys7tHU6eOcPK2iJ1eJXXr+3iU+apRzb5yguXmIynaNdy8/J1dnf3EYG2nTIMnvGspcFyj1v7e0zijJWlZfI44WssJhsINFZD66F6Ubh1Z4+Xr1zk1Ytvo2++ztP3n+IvfPT9rC4s4NUmekm0Go/WPUPDSKY5JnwdSjFaSNOMW7ROVw2ODzz0OO974BEE6zH6Luv6275eG61SB2u+e6fje/Es3w/8HeAFEflG+dn/iT/F6V+9Z3Fii2R1POHHr+4yePABsvPsxa68Rthrp3ipGHhrEdai9m4i3jYDxZfOKOsgLuIEBTDQHjb0HhejhWI5IjGSnfG+RLKprRekzaeuJPIUNkBEfEXlKp458TgbSxv8yiu/zbeuX+DPP/lxwsqKfX7xbqqZEKoinQR03VyaZ2NpmVOHTvHa9h/wyBuvcfKhh8kZkij7qSV1M2MDkMmxRQqA2SN7/TwX7wKaHfUAM5rJmDoq2UV2pjN++j9+iUPrKywujzi2vsiJw4ssDgInHjjDzq1djpy+D9GO554+y84XX+H67oybW2M21xqmkwm7rbK1tUVWGI5GIMKXXnyZo2uHePjMSfbGUyKRG3mG7G9xolm1DbDxpkUWhRyUa9N9Xjh3gXMXLzEU4fHjh/kRHTDa3uHI0bPk5WVkomhPTxoE0jhaLpNtI81dhJGHWgwMckr2pRUgUZBNNx+G1fe55DJR4O75lG4uHF6iiHc5vhc07Pd557Lmn8r0r/7Ns4DXxF9+9TLHjxxFs9B2iWlnIVNwRbCuSBqRrXZiQ1c9SYy1mmKvsG47nuTSqSfW1EXOZexEsPAgB1wwcYuUMx5TfK+D1XSy6HwWiC9QtGoZkuOEowub/I0nfozXbr7FL33903z87Ps5evxkqQnZZuCcx1cNYH0XGjuoarxznNg4wrR9hM+8+iX+5vHjNItLhMKG1enEFoEXNJvca19RD76hqgaEqjH4VhzeO5K3keOx64hpgnQztne2mU2nHJ6usRZqhscaFoYVg9GA5fUh+7d3GK1scljh7ANHOf/ZV7mz23Lz1i6xjUi2HnYfHMvLIzY3N6iCY+vmNuevXuXq1Wt0XcfFa9c4tXaYf/wjf41hXZNy4vLuDi+8/Tavv32RQZzx+JlT/NSH3sfy4pA0zuzMLtM0p4k3rhDcaRKdJfkAnRYipSAzCpHSfqYjQacmcNEr8TDpiJNZWYuFJl52Yykq+n2C3/Pzyrr9z6T5CzOYWjwffusaz7kBNDUqwt6kK8qPiubIyNlEMLMFIypqTvNCoWIco1RyEofa9Cff7yQBROdJclYTasiiJaSWeR937CJZKKPClVBEL1TKjBRxtoCdY3V5nQ8urbC0sMSnXv08T958mw888SGohoSqBxB6I1ei2sQscYHDCxtcG91k+8R9vPDi13j/Rz6B854utuaBNJNjxMV2rsMg2KBSE9AoVPjSTOYRXFWRc8dAPX44AlGGowWeffoBnn7sOMujBRYXHHUNa8fX6fZn5K5juLjGmROHOXvfDS5tzdjf3Wf/ztSoQs4EXxYXB7RtS9M0rB1ZZ3t3j/12Ruw6qm6GNsLVyS6vnb/K+bdeYzCJnL3vMD/53JOsLK1YY5tmZKakSUSbgDt2gnz5DkxboMhHUVC0Ir6XUzoY1OQESSWa8MYikCDkGmaTCcM8m7ca52gCh64gdKbQb5tsavdQzYwnWzDb/c+FSCk8sLXLj12fUZ84hnO2M+7tT0kZghhlf1iZEakY9d7nRMRGVfRuV9XMxDlwBUXTnp1sH2Xfl91LnMfZiC6UMorPBRteoRnpOqPZFL6TqFEzTA/MCp9BbXzco4cfYLEa8puvfpa3v/BL/PBTf47VjQ2gbHAKoR4QUWbdhEYGeF9zYv0otTj+4Jtf4pHbt1jePIIAqWvR1KK5ZT7TWgFxhGYIzpFIRt5U5rMTnSqLtckmxaTMusR+G7m2PeXqXuKNW7epJPPk2czixgrDjaN0O1uIEw4fOcyzD28RX77E/pE1dJa4cf0G9WhEMxgwagaowKXreziEZlSxtLrKbDzlxPF1NtZqPv/7v8dD64f5G88+y+LCkpFGa4dWrrR+C3hPlpbcgRxaQ5/fQ2YzZDQqijKKJtDKNACkNJJpg214085qZ51A5Ukpo6MBv/o7n2L3Gw1OoI1xbgAxmffp4eGclY39Lf5n29v8v//Dz/L4y+eZtu07rtB7wliCZurU8Vd2bzE8fJyF9QFVNWBva2ZC4P2Mc1Wms5a6rvDO267iwoEecTEMQecM3n6myVwoAQupQqG52IgIehMpJLtyYs7aY50oLiVSzGXCsOBVaWNGfIWTAosqOBKnVjb5y0/+IF+9+jK/8I1f5/vuf4aHzzxm/TZiRdW6HjFtx3RpioiyOVziytY1Tj3yFJ9//it88gf/It/avoYUZXwnQKih7Up4HUg5srd7GzwsDEZUdQ2uxnnP4QXPik98682LtF1kdzIjjcd87is7vHj+Ao+cPcn7HzvG5saihbUK9dISebzHYGGJRx4+hXOZw8sVv3z5Ol6E2EaagXJzZ8L+zhYuDBiMBminhFDhFjzNaMTp4yf45NmzVOqgttpTP4DIKSbcUdtg2Ha/o5soceSoRovo9gxZWUSymFi7N1V/Hdv8GVcVNK3x5Gk2YT7Emv4cDJZqZm9MuJlmVtAsxUex7NSoNLb1EWNmNG3JOTOZdmZM77ZO/wzW/n/nI4m1oJ678AazY0q9vc2h1QXG7ZSLe9tsTWaMwojaNUwSnFxap85WuEvOCHHe+6K6koCMOo9TU1e1QhTzuSuIFNE8h2RjDztv/RfeGSsAyfOxfH3I5h1QpJLaOKXNLUMRsgNXVB8leLIIRxZX+cipZ3h1sMo3b7/FG198g08++DT0tWSBQT1i1o6ZTHbRZsB9q4e5s7fN18NrPP/6N3n9zg3IHaqmDUZwxZAdEJnubVFVgcHCANUxIsHQMRwbq0t88JFV3r65zZ1rN+nIDOoFRvWQ586e5C9+4iEOL40Yti1MI7kem3hfMOrOaHmVRx4+w/LiiM9+7lXa5Zq91sY1TG5tMZuMWdscMFpc5PjxQ6jAdG+fdjohpmxeRIuumr9LAzlnI1ROC/0+lxmXdSAfXyKd2yGcOWzs4tK7IgqpLnfOFQc7tfArldvhKkeqhcYFRtmxN+kIZWBY7otxQhkroUz3O7ousuhn1mQ4aZnOunkO892Oe8JYFBuP8Mjhh7jOEfLtCrfnSLuO4aRld3vMnekVduM+e2nCOQcETwgNzWCBUb3A0nCJ1cESjRswCg0DZ2PCjW9FUaUvyXHGhChc0QPuJwlLWYw5U3lvw0qT9b9oUX7BCZITimNYj8jF82XA5zISHEeuPCvVAk8cf5DhoGbSzvilr/8GT3XTwlezkLBuhkwmHdPpPgujRS6mMR96+iP87Au/y6S1ZqZc5Jbop2uJ4v2AxeVV23VTx3Q6JSUYLTT86PtO8+Qjx/HjPf72T3ycf/NrX2Znd0IjwpGlIWdXhyxmoQGCr9A2ksYTQuPvyoczzdIi9z+6zg9+7Bo//dO/xmLTcLvLzDpT9N+5tc1k2jKbTlheWaLyjhg7bl24SdqwHDCXyr2viuZaNBRTgrfuhVJ7iQ44PCC/MUEWg6nCEEhtwkXBF8NSMRZHUtOh9uJQb8/VRcWPKjaGI6q8bd4MxeNNE8EJTkwYRQWGowE+dfQBupO+1/W7H/eEsQAkB2v3rbBZrzKrBgwGwqUrQ/JgkZUT9+EyJATJGZ1OmU0mTGf7THa3GW/vsX3tOtfilLG2RAehaWhGCywMl1kfbbA+WGKxXqAJNcNQWaHMQXKWg6iTIh5R3D5lJAGFIaCWVDsxVMWHymBLdD6r0OguGWtcsi7PRdfw2OZpzt28wINPfZT0+u/ztee/xGPPfQw/GBrpZrhIO9sjdS0nlo+wEyeMjp1E33wVfGVJbcK0mrPHibK4tIyvB3Rty3Q8JXZT6joTqhlHRxWPPXoMNHJ8vyXED5CmLVXXsrE8YDSsaepAJQHnGlw9hGTTg6VKpl0MtLMW9Y5jK0NqPyTMHCuLQ6bt1HKOyjHZG3Ojjexu7xm6VlesrA9IsWgfODEmcbIamARBB85mt+DwA8FVBpWnocPlZCPCFwRxAZfUOiw9aGvC5FpIq9QGlphCZjCofuA4srFGfX2LaZFsdZILfCxMpx05KoNBZWPzyvoLIfxngoYJdBnu1I61LDS1wzeBKhwkW/tdy83ZhPtW12kGKwxWllmhdOkBPhf6fdvRzsa0k3329rbZvnObnavnuNJNGOeOtlLq4YjRaIWNxVU2RuusDddYGi7SiDfCH8Uw1EQnTLBVUGfGM+26AjIYUtZTZPqCKGJEQF9g6lqER9bu5+0rL+FDYHsAv/jZT/Hxp76fw4eP4z0MFpdpJ2PWBkMuXr3OtfEWxIm1TYfahPRK45lzFlbOJvtMx3t07RRQUsZ42EnR2ZSwssDyYJGPfv8S41s3uH3+Ovt7kb04xYcROnI29CnUZDoLYdU2CakCwVfcvHSRN8/d4Ic/8Ahv3ZnytXNvQIqmqCNAhnY2YTQcMNnbZ7C+zImNTcKwguTIbUceeCpfJglUFZKzeeBgskg5QnZKFwR/dEC6so9/ehWNttBzNoPxI2+ySK7w/jQTgsdV9veucfjFwOHj6yxcfoN9UVPFLCPbp5NITspg0ICzyQcSC/XFWevzux33hrFgnYyXnXK4Dtbf4K2wlEvCvpNaJqklJ+ulcF5KezAlawafPRo8zWjAaGOTVYRTQELpug7aGeOtO0xu32Z7+zbbVy5wLr3ABCUPBoSFZVZW19hYXOfI8gar1TINQnABqbxNNU7KZDphYTgiBKvIqADZ2ohdqBAUdSadlEiFkCk8vHrM/j16muMnh/zKNz7Dh04+xuMPP4E6Tz0YoXHGyZVDyI1XcMHhfKBaXKFrK6o4Je7u4kONd8L+zjZdbAnB4/2Q0dIa3nnaaabd2SMsDHGNJ7Yzbrx5g8sXbnFnJ9Is1KRoPf9ulEh6h2rBtMhEPT5USNWUKc0dVa3sTluu3NyxhrTOiJO+dHm60rNTB8/m0jL1cADBw8BTjQKJxHjaMRotEEIhT2bTSvNlLF8SyAHiIrhrW7iwjqQOFpyJVhTiox86dGL6br7yNu4ilOBJQdvM+uoym9lxzRk1KGummxnYMxz1/cUGSbuCsIXqnWku/XHPGAsiXI8KIeObMB+M02Wj7Ne+4uTKyKrgzuSJNMU53dqHnq1siaCWHhJKf4yvK6pBQ7O8zOap05zIySYMtzPGu3tMtrYYb++wdeUWO+113mbGrku0VcuJQ/dzdP0+NhfXWQw1sxxZdN7AN8dcxKJH5fox1uajPNkXXbPUEcRZ+0DV8OMf/fP83re+yOWv3uLjT3wf1UJFdp5lX/Ghwyf5TJywk3ch2fz2NN0FTDKKokS/uLxmPT1uROVrQrA5LOobZGETpju44PCba/zif/0ZcgrUTcPDpw/zVOo47jPLR5bIE0uAw2hoRU0coaqQ4GmGFYdOrjO4s093Zzb3nn0ME9QRU4d3yuUr1/kPsy/Svh+eO34arYz86jOM98csHlqwYUydknc76kEDlXXGzjpBKyF2HUNnIueaQTqgSLHnlMhVET8vYbOqorNkvTMjqBcqHl7b5NzOdVLOzJKNXRw0Nb0Mvysomytiya6/mD8pkfI/xSHAlQw+WV8GUjSnnScjjJqB0eyLTrF6R8FXTPit7eaDOSvv5zWXrEWgzQmx0Oh7ZCSHQFUHFhdGLB49Yjyj1KFdYro/YTreZz/ts33jKtuvnOdiHnO7icyC8tiJJzm+fIRDC2sMQ23qJUXsz2Gf57PlMyq5KFoaneL4whqXU+TG3k3+wuMf4/nL5/i5L/0SP/zEx9hcXyMhPLhyiHOjffa+/CrGSDMBpowJhjtfsbJ2yLhvMeBdhReo8Hz5lcs8fHRISlOu3tpH9qas37/B8pGjfOvtSwz2p+yfG7M3GfO+WcvJ2YSVowvURbDO+xHe1WS1AT8euHNzh7cvXyNp0TtOkdznBMFC0llrDXjHHl3nV5//Bg9tHGJtsIw4R6iFvVv7LB9dMsnZabLi6SjgilIOWci1p+4yzpuGW87J1PjbzgT3csbFjIgnjSOyFGz+jLf6jXaZtNtx3+oGC7evclMzMUaauirESSnrzZA21xeuiorMux33hLH0p3jHBboUqYvsTZuFXIaKOlcGihYll14myZERl6zBS21EQ5dMQjXmbI1imgnBKvdmQKBl8KqUCj5K6WmpcAPPqKkZri2zIUI+/TAkZXrnNpeuXuDK7Uvw2gW+Wb3N/qJnabDA+sIqR5c2OLKwytJghC/0mj6cFBEqVxltRzyHlpaopjucu/0WTx9/gPtWj/Hpl7/Is0fO8sjJBxhUS5y+7zivfP6bJIXGwfGjJ3jtjV1bQCJ4aWzUTBVs4LjLNBXc3tnia994k9tffIlzW7v8yNNnOfn0I/ztv/uD/POf/l3euPw2k26Pb11NTNKED8kpzo6EtUFlTAMtUwN8xcXLW/z8b3yN2+PETjdDnJjMUH99vQAEIN6TnePF195gZWWJ1y5f4el6gbqR+cRnmQja6zw3DplG6oXAbM+4cpVzOJdJEWRY47oZSUxrLXcJJomwVM9VcPJUcUsVOo2WWyaDijePbXLsNc+VFKm8L7px9vx9CPa3/ezQwhlM39Gn/53HPWEsPbK97x3TWUvjHBmhayM5pwL5Odudy8Te4D0eoVMTihAyTpXK29wQ70qS7QTUlR6PZExlNTdMVnBKUimDdQvruOeAYdXejIV0o801HtpY4yH/DLSJ9s4ut+7c5Opkh+3dfV6+cpFvpilutMChtcOcXD/O4cU1RlWD1NbzYnuaJ+DYGCzT1DWvXH+TB9eO85ef/Tiff/NFbr76VZ555CkaKk48fopbV7fIF9/m0tZV0FzeQRj4mq60BUAiz3bRPGVnvM+lK8IH33+WM9OWs2cPo0y47/4l/jf/27/EL/7Hr/H7X/wa18c7TK/NWF8ecv+Dm3RdQnI2754jzltL8ds7Y3amNtKirgIh2IhtEUz/qyzG1ZUVThxf5uKF26z4Id/cusn6/mHuY5mr129x/Og6ubU8wg9r0zf2QrU0pNuNpJyI0VHVDsYZWa9hmKAIHHpxxGA6AESj6aNYTqMQu4hXj4wctfc8vHKIF7cukUQL3cjyXCdSOi9MXcZAjXgXqfK7H/eEsfSepfOBSY4spoQmx7RLxJTKYnBUofC4clE/FNMGM7lQW+TS11SyUUDoaydF48KXnELUXInkXpSvTFtJFK5Rtt85jw8BNCFA7qx323vPYHONk0fXuU+ENkZuz3a4evUS+5eukl95k5c4z5eGwsLGKsc2T3KmangERbzHS40qrPqKR1Y95+5c5OTKYT7+4FO8cfs6v/PSFxisPMTjT5zl3M7zXM1WyUeYdxzWISA5kbKhXzm27A+GsLDKq0l54Xe+wWip4Sc2B4yOLrJcedaW1vg7P/Vxzp5Z51/97KcZT/d4+eJVPrB7msWNhio11tKtGaeZ5bVlgg+kPMM5x2zWFSkoq0s1wyHtbEJQUFdx89aU1Y11Dq0NWFis+My5F3lu9Tinjx+lHjSlwxXyNOJrI0jWywP2r24TGk+amXBi3hojhxqLXF2Z7pUzrrbuWPGmsO8bE/iLZQBTBpvi1iUeOnmSzTtXuOF6siT0obtI0ZQTY633g4vf7bgnjKU/OhHGucMBky4y7qxHtfZ+PqItq3kN7Qt7874VR1U2Gi0zKr2YcalgYYujoCDexjaIm+uNZREkR0QqNBtM6ylMVs02jZdMkown2JgINUE3JxY+HBmuc+SBTbozj3B56wY7t7epd6f4rW1uXnqFr89u8dxkj98592XWjj/I/StHWQwVdRjy+MaDvLb9Nm1qOb1kQMK5kFgaDgjjxOryBtcn+4Xyn5jNJgw3anQ2Q/KMtp2imkk+UC2MmHRWI1leXmbn1Tu88OXLjPd2LewfVJyfTdm9dYdOMttScWdrzAlZR1MmdRGfEklbjh7ZoBnUsDs2Tx3CnLmbYmchblWxtLqEhMBobQVix7AesTBqEHVsHl5jddEoLBT5L8GRW9NOGKyOcFVNita9mrwnzlqqUJEHlqMokGdWTGYcrZfeCzozZE6C5YpZsYYzhNWlZR4fLvO73Y7lfKFMH+hzkzlIUTzNAWbxXY97w1ikx7od0zLKbDrpiEXVETI+WH+JqnVKqkIljqwWpvW9Kz0DtwredMF6bpDexf1yAH11t4gnap63E6vYbmUbuJpusRp7GQQXBMTEKkStsu+9t5udEs5VPLh5H2n9BHdm29zY3+Y+V3Fkd4/hL36RzetTXn/zSzzvW6r1FY4fO8n960c4s3qSi7vX2ZtcZ3V5GbcyIO7cZjLZYxA8I9+QYktHYne8R+VhKND2E8DEEFvxDpmA7sy4ePUtbscJDxxf48wTp3joI8dolpd58T9+nZ1Ll6momIowkwxBaLuOhUUTAREPo4Wa4aCysRgwV5PpKe2aMzFnUlaGjfD2GxdYHA0YHb+Po+tr3An/f+r+LNiyLL3vw35rrT2d+dw55zmz5u7qqu6q6m70CIAAMZEASZAUQxJpWrL9YkcoFFLoxdaDHQ49OGSGpAjLD2RIClMUSZkiCBpAA2g0eq7u6prnynm4N+88nHHvvQY/fOucTJDo6pYJORI7KiMr8+a995y791rr+/7ff6jokotbaBa5Wgopo5C+wXhF1mrgyhpPjQvg6lpQqyTBFylKIS4tWkEnqlBjQ6+Vjm46WswtJhbdTtFK8alzF3jl/deZ5hJ4pRTRJ0EW26yXSY0mTT4ePn40Fku8gtI4rak9bO+VlMqTmBTnRfwkrFpNUIZAwIZaepNIMdFBhlVKa/lhRDFZxKfipD0afD9EbJDYPDH5M9rPLZR8CNgAaQQVMAqNEQ1NTB8mRFGZD3OV5cxVzBhYbfRYafYY2ylDNyJkhuOffY5zzT52f8D+xgb3r2/w6tvvMcoNvWPLYDT9sMLGtZpOt8Xq8hq3r75PI2swKUc4JfT9w6Mdlps9dBnwWcDaQGYSGIyZbmygsoTmUovzF87yG3/l8zR7CW+8dYM3v/cWr7xzgxOtLs88/ThvXrvJ/qgU34JawAM5oWt0okjTLMpwvTg7+hhxjiNNUpRJsJVlcDjCKIO3ltF0yqSqGG0PyJdzGeqmSiBjQOUGVSPpzzpQLOQc3Z2AD9RTTzWsBDVzDkWASnIscV42MiWUfKMMLnhxt7SgHajCREl4YPXMKo/faPE608gL1DLAFF6uMLSVIs9NDH798dcjslhmL1EgyMHumINhifKiSkyjOYVUl9G7ODa5wVn5YWLmlPsQxCzB+1hFEch1dDaMX8MoE0+KyBODuU+yGGKLCtMYJ7n3XpxTskRJUrHUdsKWj7ul9xKz573MfdBh7t3V0Cn91iKJEq3NjckeRTtl9dknOGmewU8qBttb7G7c58bGOjfv3sOe6LDTiv2TMYzHI2x0e1dac2dzg8WzLRpZRu1SlC+hLpls7tM7ucTiyTWa/RbKTfjDb7/ON19+j7vru3TznE9kS/w7X/gMq7/xOK+8usLOvR2SIsf5aUxeFvrH5OiIo+idbEyKSQ11WdNsNWi3CxKTkjRyBvuHmNRw4sQSh7tDDJrX37vFU50VEkCnCp0nOFuJJHzqIlqoCbmhsdbi6M6e3B8P1f4YNy7RzVyMFE0ticg2OvYYRai80PtzE6XgkhHjK4GblQ0kKuGFU+d45/Z74gYUmemzpy7PskhfSuChDfRPux6RxSIvUSuoqookDWRZgDIwtSVWGwoSZnu2FrRPNgoZQaK8F8cWZebeWwI5x9IqNsczqr8HMYZQcnqAEvtVJelVISixBYqad6cgMyl6lruGGGajwDmhYCTR0CIoLy4iNpZ/guvKrg0s5A06zQUqHLujI8Z2ykLWZPHUCr3jSxybXGJ0NGB3sMfLg1vc3rpDOZ5S1iOstbLze8+0rri+c58nTp+laRzDumRwtMOgHtNpLzMcH3Dt1nW2tveo6hqF5kyzw986dp6nzp5j8ZeeQzU9Lzx/nreVJWs2cUqsZ0XupnjjjavsHYqtqfcOX7ropFKTZW2uPHaanf1DOs01Dg6GDI+GnD27SjkYc3g0JuuuMto+IncNgrKk04Dp5OKzpgEb0E42g6xZUO5PCCpgpw5/WKHzLHohazlBQE61QEw7hjCpZKGEBD+yaAuhdpAHgnGcPnuS0+s3ueGnzHbFoDWNRkZSTgCwzs+//o+7HpHFIpcCssxQtDOWWrA/cEJpCGBMLqIdLW6UqZYdvXaz7HbJUyHEbBSFzFDiQvSKqCIEKcxAKelY5s6IzJisooKULyKLLkli5LaJDv8+RI9jHX2ymMPVSaIIJFKKIehPYKa4VJgkBxISY2h3lgk+sFeOuHW4j1aBXpKh0pTVhWUeTxx75VhIjTI+etAvhMD2wR7L/R7H+n1K79jc3calhg8/uEHtLCEoemnBZ0+f4NTU8Pi+5uQ0p/XiabKTXWy1j9Hw9LPnMWmGJsR5luPocMDv//EbYpYX+WjE/q2qLPu7h9y/vU6RpQwGJfV0ylQX3N85pJnktNoF72xvcvX+Hr52JLWnXSnQQRjjzYKk16C51CZPEsLRlFBaEhT5WGFeu0f/Up8kIpchMaip9Ip+GO2gDCivMJ0MbTQ+Q1xwptKDKu8xzZzPnr3Ijetvi3FJYkhzI+hqOW9rZaP7mOfzkVgsav67p61TdJrRbiuKNGVcy1BJKBYS+ayDF6FOdAuJcP1D2F/EJ5GHWBKmo9cu8qBpI7EQXgmtxuNRQZGmwhgQNoko+jxSzungRcFnEom8iIQ+fBCj8SB/53z00zXSyzDD+OPwLvEKay115SirKZNqynAy5mgyZHd6xHq5y1p7kb1qyI4bEpRnWo/xWmrzhwfNgcDVu3fotNv0+otU3uOSIT3TZrx7CK7mXGjBumPLlySNhN7nllk8k1CN9jAmkDYbpIiuXSZOgWAr3nrtAw4OJqSpoartg/ulFM45RuMJH15dl4GrkT6hrkrKaY3rt1ld6vFrTz7LYtrGB08axKDda2EZ19YxHZeUKVS2ZryQsz0eMR1PKSvL1btDsi2NHokTpkcRrIU6YGqPcgGjA1oFjAoUeU5zYQmz2KLZLsinmuIwIRsVnCbjmG6wm3iyXIkcPThmc0jvAx+7UnhEFgtEBCsEmt6QGE1RKDqZYVh5vAskWrLUg4egNUZHM26lcF5CN+W0UPM3HTuKuaMLMVkMJKICLcbTmZrJicVBRCFwZBJ3Gh11L87nkX8WRU1KieN9IpY8OkRlZlCEuqasSyajAUfDQ4aTEfZoixOTEf/sO7/DXp6RphlJntFtd+g0myz0mqwu93mWcwRnuV8fcWO8w51kn7RocTA5kE0jMDfZ00pT1jVvfvghzz/xBKtLS1QuQNGg3e5R3h+wZ0u2dEm702C43KfhStTNDU6ctTQaGbpRoLMkit4U3lrGozGudmhtyDLDYBj+FAM6RQia4B2VF0p+qgQCHg8rWAyUkzF10cKOa5I0o8jSqCoNqCQlpIX0F4kANPtlg8lojFKa5cdWyJcamFQTRhU+kel8WVrskaUmUHpLVdVMJjXOw2RaMpyU7IzH2MriKocLjnprF30uJ1tpQohebg7sQxvsxwm/4FFZLEoe8qSuSIMh+EBapPQaCZvjUhSASpAq7yB4gX9ngiylQ3SnlEQpHTUq2iQRFJg1/iGeFjKhTIxITYMTWswsJkIcWXXsiSJy4hXK2Zihnoqbiq+pplPKyZDxaMDhYJ+d4R5H5ZgJNTpNSfKcdqdDv9PjePck+dsFX33pS5jOIolCyJzBRqvZQLA1JjXyvoYJG+MDJqMReaNBepRSezF0ANFguAivT8opb12/xjMXL7O2vMT+1JK2GrR6beq6oirHBFszChX39sf07w/o95sEZ+k0GnMTQldNmQxG6DTn8mNnyb/5LkWRzjU7D1sJyXxLtmajoJlnUDlCKja5y5023tXsHW1TltDqr0rctgbj5HRSiZHQIgSQSYuE6cTgvSOMLT616LUM5QNmYAmJJneGohAUzLuEYHKCiqYlfUWYiHl4iIYVJIEPOw0+XBiQZgqvHBJMLZsekRiq/jxM8GdX1zuKtJiRSSmKHKOmopgOkvthIpdHBfkl3l5GpvpRv2CQN26dsH+NVjPUEKU12svvLu7MsxAdCNgQyFTkRzlHCIF6WlNOpuwOtrl5sEHuDQfDI/bKAVXiKRoNOs0evU6PxeOXON/s0M0LGjoVTpOK1krDA7TSOOsZTUusLaldLahcIn7FPkA1tewe7LMzPWDXHbGysIYPnkajjRsf4oKN9zd2YLGHGQyHfHTvDlfOnKXfTBh5hzcarQyElKlzaJ2gdEqeFJQTS5IYqvFItO0a7HhKXVX0eouYHNqNglajIDUDqhDmA0mQh0082xRpXtDp9VhoNOm0W7z90XWSqeZk9wSmlQgwYCVq0MQo9lB78TBMY4BREsiKjKAmKAPTUU2SpPhpgkoUarsW04p+TiiD3DcnCcYhMUKorJx8vSBGFiEPuDzwQTjC54mUgc5H7mR0BArygHn356jBX6oq0nyJUNto5q1iViGkRgt9JSJRKsQ8j5hlooOXBKqZp7An6l3iZNfMmnAzh3hRD8DC4IQXVZexdDrcY32yyeBwyKCaUCUB3Sqok5Qnj1/gdHaas1qTmXS+OJUOYgQOHE4njLTQ2VUIKO9ojI9w3jGcDrCpweiEImuReEOSQ3Bwa+M2b69f48rycX7++CXG0wmH04rfq7ZZXlgjSwsOBrt4Z6NX2gPX9xACWzvb5EnKuVOnaBhDGd3s0Qlp3qLdbTGuFTfuHeGnFcePt9HakRQiKXbW0V5ZQumURqNgaaHN/b1DsiwRn2RmO/LM4E9hDPQ7LTr9NktFF7TBO8+wGuJTSPNEAJJS7FPD1Es0eZ7IqWLEREJ7DUaRZMKvtlOHb3ncyGHaBior/WGhoZHKYFMrcR43oFJDmGoxJMzER0Fpzc31Te4saUajiu3NfZLEyBDbQa6mOOfZ2T7k6GD46GvwAVCK1UmJLoSGrZBmMdVC1RdzvFkJNRsQBpmex9MGZlMUWQjyECu8kiGWVhCsk0bdOZytGR4eMTg6YO9wj+3yiINqCFlCs9Oi32xx7vgnWGj36OQFiVIMXUUnKzBaZAAO5rkeRiuc8yQgXmXBi9kEspvlzpFqw6mFVUJ7Gclv1wTruHr/Oq/e+YiVbo+/+vwXSQJMRkccBcv7R/d5oneKVpLy3ewaNniODnfIsoKUguH4cE4vD85zd2ODJMs4ubaGShLGPqC8MLQHR2MmOMZGUU6bJIklzR3aiEhq7dwKSZqR5E2SrGB5pUfj3g6NRs5gNJ0vFJAStdkoWDq+RlNr1m9vcOX5E1jv6PW6FGlO5QJFxFtUO5XNI5M5ixiKaKH4x9PfKMmPGVc1ToMzoCsvdkfHWoTbR6hxhS8SqTCyAGkCzskJ4T1uLMCLNorKeN6Y7jHspzg87U6TLE9FPGg0rVKMCfuLXRr384+l6f80XscF8E0QfwPgn4YQ/k9KqfPAPwKWgB8B/3YIoVJK5cB/CzwP7AJ/PYRw8yd+H+85PRVWsYrDI43UwvXMDTF9YMtKIMblyRJRQaBcYSfLYtMIcqQCuMmUweE+R/u77O/vsjU5YKgcvkhodfosH1vmYvcCC40urbRBqhTa1YREgnRMPDWUjc0/SnZsI/2PCh5Xy+KovZRvouBTaJ2RpobUVkLhMClOCVXj3p1bvHz7Xdr9Pl+8/Cwn+gtUtuTq1iYj47h9/zYLpuBUt88bhxtM3ISVhVVwjrxoUtUlanIYh5/E6Xvg1p3bGODk8eMkecIEmFYVrhQPsiqB/f0pBx3QvmZaTVlYabJ0ois/T5WASlhZWmChXbDXytnZkwM5RIccrQ2T8QTta3pLC4xHXbqtFoejIcu9Po9fOM/YjekmDWERp7IxiK+TuOKoRPwXTKIJYwupornUxB2MqWtL6WqSNJUCo5ehuwa/OZaTfK0FTlGXlqSIJhcqwY0dOk8ghTv37nOr67G1JTMJadeIQM5LyatnC18rkXF8zPXTnCwl8NUQwjC66X9bKfU7wH8A/OchhH+klPp/AH8XSfn6u8B+COGSUupvAP8Z8Nd/0jdJveNktCXC+jm1Au9jEpgnhkzENymxcFmQ00PoKfLp2jrq8ZCDvX12D7fYHR5w4EZMUk3earO8uszZ/kWW2326WQOTSMOutJoLiUKM1hNWgAeVorQmT8XJxSst38x5cfG3TqxgCXKKxUQwgEQbUWymomdhWrO+cZ2Xr78F/TZfeO4FjjV67I4HvHrvBlmecaq/zM2N23SKBs8dP8fVwx26C20adYvUpBzmLbSCwXDvoUaV+PORnJSrt2+DyTi52GMl0YRCUxlFahIWmzmLLQOu4vbdIyZlxbjqsbjao9FeJm0EvK9ZWOzTbeV0mi2KPKOqRUylERrMsBpw784mvrQ8duUcxxZ7HA3GfP70eS70VoScmitUJkEqaiyvTXdEqo0HVTtUiizQWhZi52Sb8qiiHJaU+xX5agFTD70m6toA7gwxqx2x7LUqltYxsqRpwAXqyvLOwS5HS5GaFEV/deXiTEU24RAC1rlok/RvsFiid/Fw9kzHXwH4KvBvxb//b4D/NC6WvxT/H+CfAv+lUkqFn4DLpSh6JmfqAr6sUFkanVIEoZDsFGEBqyAnjAkB4wPldMxocMDe4Tab+1sMygFj5UlaLdqdBZaOnee59hILzTZZkgnqoaIRRRASIEEYY8HM+n0lTWNQEWIO4B2J1ljrcNqinCUm3hNCIDGpROXFODxpIIXWEbzMB+q64ve/+/vcb7d58alPcH75GDuTCa9t3yYPnosLy7STBrfW73F9sMkvPv4sN/a2OLW4Sji4T6vZxPmA847xaCCZLi78CbrsjPRICByMx5gspaUCy7lhpa2x3rE7POTekSc4R2pks6lRXB45qnJKWg5IVJdmu0WWJORpSqPIcHaCi2zd6XSKjilnG/e3ofYcbQ3YPzxk9fxlQjllYWVRNggdXUTbKbiAn9SSEhaCABVOzyUQRstsq7ncJG/lHG0dMvpoSLHcoLFQUNmKYqlgluImXKM4iPYQjEZpz/7RkNtpjfUSpxikqxeOYRD9aRUdfKrKUdX+Y2ctP20+i0FKrUvAfwVcAw6CFN3wIN0LHkr+CiFYpdQhUqrt/Ctfc5781VhchpBjfZyHzPMf43TVS6njg8dZy3A6YH9ywPZwh4PpEaUKNLIGC60eSxcuc7nRoZM0SdM0nkrCKFVKnCqDivLcCAKEiK55JVwzE2MvZulazllpmrzoH5T3eBt7JW0wiRANtTHMeM/aB6wOJEgW4mh3l2tvfI8TdcXZx5/gs8fPsz0e8vbufRaygse6CzTSHKUz1jfu8f31D/jVT73I7cEeC60OJ1o9vvfeGywudxiXY5yt6fWX2NlZRxvJYLT2gUmcwLwpWd7AoRjWNYPRCFdPsOUUG2qy1JCnCY0sIU8z9Khid3fEyukJRTUhSRv0em2SPCUvDO1mg8FgAnF+5WOiWbtoUtYlu4dH7B8e0skKru5s8omdbZa6PZhIjqfKxIIXJTEQHodKEFbGRBKhVaZwGlSq8Dh0Il5oIwL1oMIOKvTJBbKTHbQNKDfzwQaVCdfMIzKKu7f32ckiwBHhYe1BqUBtpXJxThjsNkLN/0aZkvGhd8CzSqk+8M+Ax3+az/sJX3Oe/LVw7mIIlFSVRY0rdDPHOE+qLcEdcHt7l83JHgf1CJShVbRZbC1y+tglni5aFEmGSUR9aOYaa7FknS0+h0DKNvY4M0mZEh4/3sxMC+TGEmYCMTuf0Wip1cTJP2a9iI+YiXD3zHsMfBBV3nRvj7fefo0PR1s8dfIEWZajtOHGYI8TrT6nsx62jr1M0uBod5dvXHudX3z+c2yNDmknOSfaCwxGY5yCi67DTSP8qOl4iMfTbrVpNjvc37wbf7rSqzXb3Uj/UQSTYEOJVomEqnpQCDpllNBwahu4dnOHpeU2eaNB1lykt9DHWeh1mvTaTda3DhBeXPQP9pa6HHOs3+fx8+dYKpr8/JWnWFrokapMCKUhoFw0htABlafYaSWAC0KTNw1p2EMlP/tggNoSlCLt5vQXZHgpQ2Ig0eDlJBIqjgITCFaoSbW13Bkd4dqxXA6xD9YKh8dZKcFslGJUkwpb1f/mJ8tDD/iBUuqPgM8CfaVUEk+Xh9O9Zslfd5VSCdBDGv2PuUQJN1GWYTrgSO0zGJXs14HNMMTmCcda5zilU5aLFo0kFY/hONWfkXtm3LHgpXQiuqAIXVtFdEqjEwlmBUdw0abVz+6Cm2tfnHpQ3YQQCCqRxZLIyTNz1HdKBpnEoV3wnno84uq7b/H6xkcsnz3L5x7/PGa8j1KK8901VLtHbSdUVUmWN0kwlOMxv/vWd/nsJ55lXJcYrTjV7qG04ur6bZb6C1w8cQa7c4/alpio6Oy0F+i0OmxtrUdHTNH9NBpNkiyhCo7cJJisga/ELP3YQpOnn1zDY3jv3XtU0e3m7s4R53bGrJ6uca4mzRuYNGdajel1miwuL7G/uydUodjPOWvZO9zn/VuO48vL/PbbJWc7PT7/xCfodsQUXBUSxeor0dGrTOGdQlcP2Agh9os4L35sRjzDBHIEGkpo/VrFZDeZl2FDBHwAI73ryJXcLEdMIuhhrcXPokgiySP4QLMc4b1nPJxQlfXHrZWfCg1bAeq4UBrAzyNN+x8BfxVBxP5d/mTy178LfC9+/Os/qV8J1RRbl7x5sIVvtGnmTRabq5zpNWgsJmzdH7N9WFJ5R2Fiz6GNwJ0wN8mGQB0USWQWS2cT4v9rqqBIVeRq+Ui5CIEQ6ocGfKLVCIHII5OzIjFC4Zb/ZFajtMbHk2ZGo6+mJRvXPuSV62/iVhZ47NnnaKUF7WaDE3lKqg2J9kzcFIUiz1oYk2AnU37/R9/m4pWLKK2ZlBMuLqwg0XyGW4dbnD5xmqXOAn7nHt5bnrj4NB/e/ACjEtIkJUkSqmizlGQFadEUgZsWow+TGAw55493+OUvX+b842fIOou8/sqHfP2PXmMwLkmbLSbTmtHRmO5KRd7QfPGLn+TDt95ncDhiZ1RC3sTUNZeOHWNj8z5b21t0iwafvHCRjf1DJsHz3OWnonu+nvsVh9qLUrEOMAxoHSCNkLeVMonaolMjzbyCJDf4JIARvYpJhW+nMiOU/bhofOkIqYJpQCWK7a199vMY86EVaZLHDW82HxJ6S9NM0VrR6rbIG/nHroWf5mQ5Dvw3sW/RwD8OIfy2Uupd4B8ppf7PwGtIlB7x9/9OKXUV2AP+xk/6BioryFuar3zycVy+RrrYFuDCi0jLeM1wYtF1LH20npsQoIVYOaNx46VEkJi72XRWGsYsHhPBO2HXKtldCBCMBBkFxFrJoKKlp5ZZSWzYRVSm53MBHRej94G9O7f5wbsvs5E5Lly+wPmF4xxrL9ErmrJgBzsEAtaWmLyJyRpoFK70fPf179NYW2C1tcikHHCusyjvTRkGh0PGwdNJcwpjMHlOlqUsdVusLR1neWGBu5ubFHmHoglJWuCVRuctCJUcsLGEzIucM6cXaTTFvlSnmud/5hnOnVvgnddvcf32DpOp53B/zOJkTNKyPPbUJU6dPMb9m1cpuobfemWfTqdFV8PP/OxLDCcl7179iP3DXc7nXdbyFrlJ0WWQyPE8bkQGjFXSxKcGEoG53dTJcFJr4W1Zj2oKeuZFbwFxnuan8vMOVgwPdarwmZJSGC/IW9OwM57i8hzlZ95yCCsd5s+FimWgvDhmbc3/74slhPAmEuf9r/79deCFP+Xvp8Bf+0lf91+9ygAfNA2P1w5Ki0qT+S7QaiYs5BlbNhq84dFBxd1fhlEmIMO/MDOsiAYWkcPk8XNRl4jHmKMj3jnx5k0SqX91KlR4iH64sZdRSugiKLSykZukqA52eef9N/hwusXyqdP8/PHznOqs0MgKghW0TROobQUhiL9w0pSa29a8/vZr7OTw4skzDKcTzrQW5XUaqcuv371Nr93h2MKywJx40NBrt1jqdTkcHFL6mnZ/Ee+DGN6lhQw942u0Xh7GoAxHE0sZxEIJErTJ6C4tsLK8zf3tlIPBhHv3jlg9OaC9Io1vc3GJs01D2jC8/sErvPSlx7BTzTs/eJWVVsKXX7hCUjRIdGAxbVOnA/zZJibJwCrCyMM4oKZSNjs82kk/aYyGVDhnIYbf+olEsOsikY1Tam6UcoSghToDQnydit+xzhRewkSh0SfXQ6rhkSCZCqKAfF5FoBRJIshhYnSUGP/469GY4CtwSvHPTYt24jmxP0K12wJS60CaapYWcnaGJVXwaKRHcFrQDYLEYbsII848uzwRFUKGTx4wSEJWnP9jo61HmuaQ5CS4iPVIKadiPqHSMqGfmYUnWjEaDPnhjTfYOtzlzIkz/Nrx51gqOsjPP4KVJkBw1FWJdkKCNCbBRTj5+tUPeH+6xeeefpbxdMTpoo91NVkiCkFrFa8d3OKxk2fpN9tY75g6x3Q64buvv07pHB5NkqegDCaRYaHPCiaDMY1Gig81aZbOT8CDQc1wIB4H3gmilbW6HD97jPfeu8/O/pRy4jlz5oDls1NU0UYDSdGj1+/x4qUuvUzTWF7j9MovU47W6XQK1tb6tHsZSngoAqXrBKVSQQxdIFQWvVXidyoYQ6gi/hTEA0AlCcE6klym/cKsjqdQPAmUC+L7pkLsQ5CQ2uBE9j21nD99juXdIYORmFUYY/7E8wax1Y1/FoT04+csH0+z/P/zdWRS/nFWUBmHG01Eu+A9lDXNdkq7SMDLA+5gHr5KHEjKieLx1smOGsszQ+xTQAaMIVDbmoAmy5rkRUt2Gx8iZqbiCWDQ8tlEwbYsHBfYuHqVr738+9wrBzx/4Rk+d+ZplrOO8NBsPPUqi61LbDUleE+aFMyyMKkdGzdv8b2Nj3j2iWeoveVkcwEbTQJ3Bofc2dvizY2r7JmKaRL43Ruv8n97+bf53vo1nAuMrSPJGiQmIdjoC+BloVd1QuUMy92CZy8vcWrRyKbh4XBUc/PekOHRmBCHmTpt0V5e49SZZfCB0bji3p0DXFXOH1qlEhqtHs++9Bx337lK1YCsAWthhceeOE2v3yA4iRcUfksiw1udCPJuNDQy1OkWySe66DMNdDMhyYS4ZzJxplQhWrIaNRdkKcQ+ifhIBCPSCTQS1T62hErYy9XBhO7JNU6uHYuDZfkc56S6CN7NRxNeypP5739maNj/UteMzKiU5mqjwzfGU35uXIFN8ZFAmWlNv5NxMKmxSgiS80NzVq/Op/5OskHmKV4P0HPrZZdJ0gwl50xs5J1oVkI02dMm2iQJOhaQxnKwu82P3v8Ru6njpU++yMnuMqUrJaouiQ1icBBqHA5bi89Wlhbouowf9+xv3ecbH/2Ixz/5DJnSHG+22ZrsMKgHZF1YO9/h+IkOW6/vkxymfPvu+1S1k+l51mB2Qs1Z0/KDlLhvpWB1mV/67PN85aSnSOCDD+6xP3RcuzficDjh3vaQ9Y0D+suLZHkbdE7W6HD60hnyl2/hK8vB7phyPCHvzZSSARc0K6dOsdj6APveu9x74vOshTuUwyXSho2uK5HOojSKJDYCGqWkvFVKaC7qXCC0xrirA7Aiu1aplswcPMaDs7HGnohntUuEiu+txHoQgjCRZ2nFNjA9qOit9DjNMbmHcRwgvUtshHSkQs2blBm89uOf00discwe5RA8jsAf9Ho8We5wSrRxwiFSgU4nJdszlE7w8ohPzdnDSsU8RSO8LYWXBYMiMaLND0bFhSQ9zox+KRETjqAMqTZxjhBDXJXBjka8d+NNPty7zfnTj/PlmHlvkgRVCSQ5m8cE5bGukqGb1mRZEbX48t1GB/v84duvs3zlDL1GwVIzZ695l7PPF/RXjpM1C9JGE6UTTl88xs/+3DNc/XCT1966xStv3WI4qUhSQxIFZPNsmKjRuXiqx9pLn+BvfvEKZniVgMYkGQcHB5w50WLv0HJwOGW4N+ZwZ4+i1SPPmpA2WTl5kuPH+9y7vUc9tZSTKbP0M5RGK0fWaPCprz7H9bc+5OY7N+ifXuFH1+7xuWeXCaVImV0IKOcgTUUKoYlSbVBGqEMKCMfEv9q+vw81hGIGy2t8HTCZwjnAOXxlMa1UYP+JlX+jfcwXVehUHP5rG0g6DU6wisZEW1YX5ywa55zMybya0/Jnposfdz0Si2V2ifhGM85y/kWrwf/6cESaGVAJKniKXNEoFIORlFdeqThsjNP+yA+a6x6V0Cyci4hXFPfUER5OZw7sM/NwAmkQc/CZ+Mtay+bt67xy511aKwv8hU9+lcVmd47V+6jlwIngSaBoK3agSUaeFMiSFnqNqy3f/dEP4PwKF5aO0yoy7qU3uXi+RZIpdGLQSQ4qlZmQTugvtHj+M2d49vkzfOK10/yD/9f3GB01yJtNijwjVE7KPDzPXVjky589wx3dRk+GBC8ZKKtn1sjynFZ3zPLyWBa5SsjSlHI8JGl0MWlK0VniE88/wf073yXYwORoLBuJklh0Z8UqanGpw8JXXuCZKudwb4+97feo7ApwCqUTTJKKH3VtRYuCjw6Q0QQ8ntaoBLXUhs4AdizK+ijoU+gIE4eJmEloYyRbMngUJsq1gZCACfg6oL2oN02esmwWKRo549EE5wJVVdFoNVHK4GfPRJyp+QgIfdz1SCwWsQOVnkPHXf/9fp83Du7w6cqJK7bW6BR67ZSDkY2MC2Eda4L8UJ1YrFplMCYFohFfrMuVCtEBRoKHvJ71JmnMolRxti8ipeHuNq9++AZ7ReClZ1/gTGtJPheZWViDJJHVUqq5YKXpxJGkOUYlzNOWQiDUFaWr2Vo2PHf+MqudHl+79yoXLjcpHTS9oo5ukMqJ17BQ/Z2Y6FmLdlOOr3VY3/C0uw2aWU5Tw8nFNi88d4ZTKwlv/+A6+/0+m3dGhIMNmv0WraUO3X6LvJUzPExJUmj3+ugkjcYOU1QoULrBuScu0f3667jRhP31XU49Iz7EigChJNih3K8kp1U02bh6k+zML7K1+y1W+nsEuywPo46J0RogxNPaCUlWRd5fQIaGC4b7//A7HHv+MdJTizKoLAMhkYi8UCnCxEmvUogxicogBC3zGx/k47nGJxNIAs2kwUK3x3AoLOWsKOJIgdiHeVw8WWwIOD7+eiQWy/yKQi1JtlV8bbnPk/sjGnkPlchgsNPOMHsllXN458VULg4QdSKwbjzzBfoNIfobB4gqvZkh9Kzv0V5uXogeX25S8cFHb/PW0TpPXXiCL6+eIU2SOdQMQXY6r2NOPXMrJpQVmofQaAXB8QHlPDffeZfj3vPZC09xbvU8X7/3Ojv1mN4wYXVck2eaopXh6gpvLcpU6JgiWlcl08mEb3zvBspouu0Gv/DZs9Rl4PLpJa48fZEkUbjpEZW6xXg8YjhKoLQM7u3TGYxo91skWUq32+TgcMTh3pDl46vRYRNsfYjRFY12kyeeucwb33qdnbt7hGmJp8RNBVmS11agkWHnsYtn+cNX3uXC5Re5vneDy8uLrKga7xzBGJyzBBVII8FUMlViBEdEHItTS1w90+bl/+mPeW5thXMvPYM51kPXgZBoVEPQRaNDdBp1oDV+6qB26MQQjAQlDbMpZTWmkXc5fuIk9w6HKGPQWQ6J8PdUlhK0IRkdoja+S+OJ58lchjKv/NjH85FZLIHosBEjoA2K7W6fH+4d8mUVzfU0NAtNM1FMaivNbVD4aIEkFkY8JHuNM5WA7GIqSIipUqTR0khsjeLRHwJb927x/Tvv0Fpd4dcuf4Vu0ZCqwQhBUob1ciIFL+WPMJdrCIrEZLgIYSqizWldc+f6NV7buskXspQrx8+zbQe8N9ykDorN3Qmr3ZRWrhkNp+hEk2QZ2ntsrXCVZVpN+cGbG9zcGLC23KbVanLpVJt+r0mrWWCrQ4n0RdNutSBpcPHiAqNdzXQ0jopSCYGypaW/2Gd/64h337vFxctnKHIvuSs6oJTn2S8+w9bt+xzt7THZ26e1phkP7tPuHwOdYbIWqBwCNHotPnPxDHW9z7Buc22SsLDYIqmHzH3doozAe4v4Nctd15EEqRPDF/7i89zQLd66vcVr//I7fKq3zNkXLpGcXprfq2A0obRyYo9CnLdpcEHIl85T6zaDySGtosv5pz/N2/1zKG1I0wyttJyksULoH9xHv/5PaF98mmJzf3YM/qnXI7JY4sMdOULG6Ii8KL6zvMgL+wc0lnrSV2SB5U7GYOKw2lB7hXEyjXdAEiftM3MKkPCfxIiziya6tShmdFSBG48G/PDqG2zoms89+TxneiskSnoSFXsf3IwJLfC0iieYdZUYbkRjDZBUMKMgOMf+zhYvX3+Txx9/nHznBwDcPNxi4ioCgcGkZn9cs1xaGpUlnVTo0spsSEFVWe5vDfjuG/dRhSEvMpZWlnj7+iEnFiesLbTodhoUraZw05o5O1mDoDVZQ8iMNpp6G2PASM3eX1ui2h7wzhsf8cTTV2g1c0B6paKZ8bO/+RJXv/eG+IENDyiaS+isjdIpSssMRRIJ4Pi5k1x7fwCHnv3NAyafPsVi1sS5KYDMp7SRDc7beK/FQUGms5r0WJvFdoefOV8wOnWSdza2ePVrP+SZfpfzz12kOLuKr4KYZigpvZVXKKdRuRApqQPt4yc5GO5xrH+Gk/0e6d5UhrwxKUGeDLHoDWE2no72pR9zPVJzltmiEbsi2b33ul3e0gFVBgiaLElJ04RmJso4vAwVEzVDNGL/o2Y5g1J6ETl6Jg4yXW0lE9F5bly/ym+9/zKttTX+6jOf41x/mSRNIDWoJJEYgxBQPuCqOi5EoaiX9QiFxqgsspA1iRZpQLCWnf1dvv7uK1x85imeP/cY4ubouDPYlhvnA9Npzbi0TCpLWVkOD0cMByOGgzGHewO2t4/4xit3ORpXtNqFoHuZ4v5RyUfrI+5sDdjdFzfIcloytpZmt4dXMdc+TSiaDUxiJJXZmHm/sLLcoz4ccfXN99jb2sROxyg06Ixmr8eVzz6GNVMwBVmzDzolqEQqAVdDPDWc95y59ASvXCv5g+9e5cZAoZImJmmAzqQ30lrybtw0njayQapIck2aGc3zPTSajk75/LnTfO5nnubeWpPf+vabvPmPvs3kzbvYvVK8j9HoVEMh8yVvA77yLK4dZzwZoLRitZGTqojm4YX8OVsUakZvUQ/9+vHXI3KySPmklRJ7ovhetDE4At/p93h2b0KeLmCDp79ckOcpdzdH7E1snBd6USQiZZgOonOw1iJ8PR0dK4NM/INif2eHl2+/R9Uo+AtPv8hSoyVqRhVBZQXKSUiSt9GcL5G4CpSlrmvSJCcxMw5ZmB/jylmqsuKP3n+Zsp/y9KkL4GTOYl3FgZvMB52udkzGltpKUGmSMLd2qqzjzRsH3Lg/JMlSsszIPqITSXieOpoDR6thSfNK/Hy1YrXRoLYDWcBZIo7x8axVxkgMHRoUPPeF53DeU08qBnvbaLWP0gFFjVYpC2vnMFkBShOcR5lo0q6F/h6CwOs6MXz5Eye49uFVlvQRWh8TxxhbQTR2Vwp0ms+TC0KYwf4C12dnuti7R1DKPtgOGZ85fhJ79gTvbe3x2x9e5cI7N3jqU5cpLq0StEEH8VnQHmwdKHp9xge3qW1JUxm6iWa39njvSZMkgjpyosXigCwRL7GPux6RxSLXLPJBZkN6vtjvtTrc2B3yWG0xRYp2HrOUc0ZrWB+wP6kxOlqHRzaw8z7SRWoKk8Tdz0XTipr3713ng8EOz566wPnFVTIjBE1lBBTAO3Cg3YMBlvciG/a1xbuaJM2jJas48auYJhbqGlzg/fffxucpf/m5z5EURlJ2gamtGFRTgpMSUSk4HJccDlOKQlOEhKmtCVqzvjvhjWt76FTT7GSE6LE8HZVYK/a2U+cZT2uKaQKqFDOGiPgppUXOkM70JDMQRGgxMwFcqhR5XkR0LCVE/putpnNPNW00wnJTcXOINyiEOE0PnDm9xKdOpmwdWs6uKLIkR2tRNIoS26G1x9YTGXR6IxZWEQzJjrUpmxkqBVdZlFNQSTz6M2tLXFrqcmP/iG9+cIsLN3c485kLFCfacrKMKlSeUfTbZMMGo+mIXqvHap6wZ+vYPcV5SnzN3sfsF++kMvmY6xFaLJGsKFgFM2M8fMArww8XOjx2OEEV4hlknKPVSji53MRvDRnVEZoMYELAOY/2gXaWoWMAkHKBnf09vr95lcVOn19+7NM00pTUGKyREsGEILOJSL5zKnrg1hZvPd7X+OBI0xyT5yIs817MrYMgZcoH1rfWeX28zq999udZ7PTmGnGAkZ1Q4/D4GMsn5uSlh7L2aBw+KPaHFT94b5vaBs6c7OKCw6no/Ry5TEpAPsaVp1E6UJJ+ZZ2nKiuMqiPUmjxwio+IFCYRnlzMeCTC756oOPSgTfGABh9JIlqbOZCilNCBfJB8zryRkVnL0eYBkzOnSIycGlL6eFAm3moFocb7KSiJ2VOILj9ZblDdOCT4OLjUChU1LYVJeXplmWphgY3BIV9/8wP6d9s8dekE7WaByTJMmtBv9zkYHdBptjndKnh/LKJeH0cHSikJtIqLfZZR+nHXI7FYHlSKYno34/MoYjyaClxtt9nfvc+Sn5EbZdjW7qWslg3ubo+pg8hMFZ4kSNKTiQYYtrT8aP0q237KS2cucyyKqnSSYONAEsDVToy8VaR4xPLKKU+gJgRHmhUopaVnIjqERHg6BMfw4JCv3XyZzz7xPCutLrgYLx4VNrvTI6raMvM3C4hhwqSsmU7FCrayjjc+3GU0cZxabdJrKqaVpnKe+xsb7G5vsHRqkRCjq4elxwxKtEkoK0dhK6aTkjRxJFmKweFtkKwVFSMWlI6ev/I60GFOAdEq5rDEoNIHeYt6rgxVKJyzJElKiNSiNDOcP3+a1eN9XLUPRT+OUsRvR8oxWXDB1QQ7FbcbW2ISg/MOs9KA2wMR2VlxlQw+QCn31NoaYxUn0hYrp1rcL8d8/dUPOdltcPn8GVoGWkWHjZ0Nzqye5ViRyWapFSrEhR4BgpniU6FiX/Pjr0emwZ814zpGRSgda+JouTM0ig9yja8qvBabVm9rEu/odw3d3BBcQAUnAawEjIdQB+7ubPLbt1+naDX4lUufYK27iEpScYv3AeVkDiJPQOxV4sOifcCXNW5aEoIjTxskibi0iAwgUlzilH46HPPtd37Akxc/wePHz+ONkvzLWMIpYGt6KCfm7O8ikjcpHZPKMyotb9884v7+mGPLDc4ca5KoQKIU+7tjrn20zv31PXY29lBKU9WBg3HNoAwcjSqGw4q6rBkOplSVnDCuFlaBoOhqHkg0d2FUMZNlNisMTtAh0U0DCqUTlDKxfIpRhDrB2VpOjiA9wIt/8YucvXiG4e4I55OoSvVyujJLAJB5WqhH+GqI8lO0EsBG9wuZiWVmbm+Fj9qX4GMfplAmIQHW8oLnel2q/UO+c+0qV2/fpJW32RseYZRmJc8pYmmqtTDGVQgkJnmw0RHv/8dcj8TJ8jAIIUe7FjZs8OLxFAIqSfiwU/C5QY3PEhnz24C2gcx7urnl4KjCBz0nEw6HI17ZuE5o5fzsuadYbDRw2qBMApF9LCWECIREsx+RNAXUsvBcXaOVBLH6uY+yjnSJSFX1HleLNkWdWOGFM49h0kwGePENeuuw3rM1OZLTKxIBZwE7VmlG1nHz1oCbmwN67YzHTnXwTjJRamtZ39iXFOfgqKdTXG0ZW0ttEspqQmk9g0FNOhxipwOWrAS9aqVoNBtCFVHpvH4XIxhxuVEYQjrb/ZEF7i06zWNpLH2k9yKJUEERgo1cPKn9dXTOMVqzuLTE4eGEflfcVHSU9CqQECML09GIJDXotBCpdNpAdRS6mRDqEKn5CmWU+CPXXobMAXQi/aUfOg6GU1a/+nle+tKnGZVjQFFWFbWzdLOEtkk4dMKeriM37MHiiNy+Pxd6lhmSpx/EKSRa6Pg+iKuLDoqNZpPxzi6NKhORkI0DR2dpFpoiCdRe+pMP7t/ng9Euz66d4nx3QXyrTBIp9D6WzuLvq0jlUdAGgtAxqMUTrPZWzDCSlDDLVlNq/rIFCpZk448+fJ87Rc2vP/V5TJbOkZ4QokDNiwHffj0lpJJ5WAdZtLZ2VN5zd3PE+vaQIjM8dq6P0oqq9FSVxVpPXbv40ImGxfogg/Da4WrF/emY4ciSLY7Y3R+CaZFnCXlhSKw42UcYBROh3Lia5D3FAW7wVmYyQYaGftZnKKG6GxPL5fnz5WNpFo3DjaW10GZwd5e63cb4gShPiVJwlYLOqCqPr0uSoiJJJJ1IJQqVG/xQ9D/KRIsiH0hyUVL6yoFVEBT3/SHql69QXF7h9sENGmmDU/1zZGnOpJrSzAoW84S94TSaisTNYTbpROHdnxNuGDOujp9N3me9itw8E4dJB4nhQDnySYkpUqgcygvTtBlSTrZbvHNni5cPNujlBT9/4jKtdksYrvEUkKiJgEeM76yVvMpEOVmUWlCkYK2kd6Uyp/BJIsE4UaJs/Iy4qQhVze7ONq8c3OGXXvoCWZaD1gQn5YlWCd7XOG/xwVN5iw/ChhVxE6Bhb3/KweGI1BgunerRSjVlZalrR1lZxtMqlqmaJElJslR22lTLglHgvWbqRa9+OCzZG2R0mpYsLUmMLBClNSYrsNahEuHQKR39xkJA+hKJBtQ6eUBtDxHVQkiYIbj5vElFm9AQ7HyWpJSi2WrwznsHPHMl4IixhCZBqwSlDSbNGe7u0+jVECzB1xKq1DT4w9hLaoWv3AMBVyoPfOUqdluGs3/9L9PstzA6lTIRKbl6rS67R3u0Vk6ylmmugqxp9YApEmaUJ22EQfIx16PRs8zmKrE+Feel2YBRzXcAawz77QIVLMGKQZwva5hY2Jtg0yHr6T6PdRZ4cfUsrXYTTCJsY63nyIdWRkKKvERNBHScqygxE6ucIE15KjtxQMKJ/cyNX8oRZx3BWibDId+49iafefpZVrqLsmPXkewZ5GFy3s5pOA/oOMxlz1VpOTgco5XmxEqLIlPSw5SWwbQiIfDSE2f40qcvsrggysU8Sei2MmYu+kmqhfJRebI0paphMHYcDCsm45pyWlNVJXU1oR4dzY3MZT45o7LPXleCNjkog7OWmaNWiJsYuPlJQ5zfaKPnG9/MLqnXa5EVimmt8HUJcX4egsd7qKxicDhmFnkxeyBCQ0v2ZKahMBJNESky3nusttxswOGnz+Azzf39XXaOdtg72uLG1nVcsCx1F7i/s41SmuONBrOk5AcAEnMpsVJ+LrP4cdcjcrJET+DYaBljZNXjZX4yM91Thp08gWEJ3grWOvZ4V7J7WWNUj7/U6TIea3aPLNPaY7Tk3fugxXlSG3EsdNKMuujSrxCCIEGaSJMaEiP6b+IAa0btmDX0Kih8XfG999/g5KWLPL52Ssyua5HzBTUTQnmcrUkjYGGi474KgJMHcDKtMFrR7xcUmWZSWioNk6pmNKr51Noif+ELT5B0Fvhrf+XzfPPbb7F99Sa/+cIJ3t5xfOu9e/hEQmmVtQxuH1KFwHBSM5gYRrmiWSSkmSZLMwY7OyzlTZK8OZ85OO/F/C4uGq1l9qGNRukkzkoeSjxD+Hw60pOCj4iw9xiTyDgDz+lTTd4fDng6L8k0qNnuA4SgSLNUeiknm4ILnqSZMLa1nFi1FQZGIgvaKc/t6ZRjv/4Vrm3e5IP1qxxfPM7m/hbaaPrtDtN6xEJ3gXduXgcVON5skGmNi8yOuZG6l6WtH/Jm+HHXI7FY1EO/J1pFDo+PfEUXG24pEfYaKUbXuEGNHU05WlWMGprlukknbRJ6UHQcecuzsV0xsl6UcvE7zGYTKs5zHHGndA7tZcHoJJEgVxk0yGdaH71ytTT9Xtiub3/wDqPFBj937jF0zL8MleQZ4uXBcb4W18gkm5+ULnrsEmaNMTSaGbnRlKVA0sE7qtoxnnreeGePx5ff4+KXn2Ox3+bXf+UlDrafIdm5xwtPrXH13iEbwyFWKdHZ39qn6tVMMsXhkaZXJLSmlizX5FlN59gSwVucq9GxFDNGpttSUkU5r9IyJ4m1onyOi1IFFecjKuqHgmh6xI0wrpzAQqdNb3zE0KX0bC2zNJMScKQm0G43IPaM3tcCTSeatJMKiOg0TD3eBkISWB9Znvq3/ip5t+DU6nFhD2gdF7BQW7Q2GO04Go2x1tFNNA2tmUg1OXtpc3kIPFhAP+56NMowwvyXRDDKg5wgZZlWDybFB1lKyGHsx9w5VuOmnlPDnLbK427lSdKUrJGSJuJXNfuaqFn2pMcrhXXSXAv8K2iVSdJ4wiEzFoL458akYe9qqGuoLffu3eBdd8jPP/UsWZHFzBgfwbFoqxTEFIIQZpupAAIzlHo214jZmASxFi1rR+0DZVUTfMZWOeUPvv0R17/9KtVwj4Bn4fgyzUsX0Mrxhc9coa6lpNJGU1U1tXPUdWBcOQ4mFYOpZTx1VNbHet2hgqQVBG+BWS6mgtlUPcQaHyPkyXgfhHFtZ3ctAiPEhZ8QwmyRycI528xITI6fUV98Bd7iSpmvCOIi25r3DpUq6srCNMqHnfQvG5sH9L/yFGNzxP5om63D2+yPNtk72mA43eVovMPecIOD8RaJMSTGMK0rGkrTz828LxYQM8wXyIMYjR9/PRIny3wX0mpuCm29f6A9QczwjDFs7Y+4Pb5H4uHkcIlmt03IUggi4JlOPEeDmv3DCZUL+CAWOnI6PdBdOydzhOBksu68Q2cGr5Q4x/gH7GQVAiTRmNXJyTTc3+d7Wzf44nMv0G10ZYLt44w7SfDO4WP8HUFUkt67uKtGv2UfF6B3JMFQO0/tIVFieK5RWAchGMqq5LpLePnqiJXHBiSNPsENMUWTYEecPdNhudlkt5xiGgk+VdjaU1eO8VhxWCR0Go5O0zOZVCSpJkkzrKtBGUzWgBggK9az8mgEdETKDCoOE62vpJGXUJA4nhJWr9aKgAElbpJaaXxQ5M1F/HCAt7vyM9cKpTzT4ZBWNyf4QD0do1pC+/cElAGvfJz7aI6GE7IXnmTt4hmMkijBRtogSwsqWwqBVRtm8mWjDP1mm4PBEa3lFZZTw/rUzp8BHj5VZoyRj7kenZMlNm/ByUNijBFdtVCOSIzB7O+wtXvAmwsnWLt8gSJNKacVh/tj7m8ccfWDXa5fP2B9+4hxHagcsguGmeY+ymKdyGTxIlH13qGicCzINBRjFCryw4Qq5gm1JVhHPZ3wrRtv8dilxzndX5HF5HyUqmqcq+VtaYVTDpNn6CR5UA8rgXtncK1OJDHZec+0ttgQsNZTVTXOa6yzJO02euUYm/tTDtcHYEeCeuoMTIGh4rOfvIRzHp8q0tM9MSLUUNaWyaRmUtYMhxPquIjqspb5kKuxtooPkAwdiQ85WqFVjPpArJRCCNi6pBwPcXUpbqDxwSuP9gmulnKV+PWMEVMJDmXwqOSErqYTBvtHYpEEpJmECRmtSVqZPPhRe2KDZTtTLD93noPBIcNyyu5wh8qV7A8POBhtszvcZm+4g8bEOQocX11h+2CXgOZ4o8DMT5CIQj40BhCrpR9/PRony2xqjgyfmKFG8UPaRCi5v0hjcZnvBM/BwRFPf3QfExSlynHRNiRJU7w2kASCMqBnzh5GKuuYoeKdJ3grNXeknOvooesUaASOJdq84qMXr3W8/uHbmNVVPnnynEy8lfRCwQVUKqKyGSih0Rg/MxGPu7UnlmwBW9ckRign3gdcovA6ofYOPLigsCHQXFxAVZYnLvY48cQxdFKgTIY2GWl3jeroHk9d6XN38yTfefcmybkc9gWxq4Oi9iGia4bptKbZzETJ6J0Y4RHm92CGOs16F++Zde7z0xmluf7Oh5y9fJpGf5GAw+uErLsksHMAY1JUCKKUJJCkTazfnlP7B7sD6okVzqp10ddNz3+OTAVUCZlia/uQxV99jv3xAa28AUCnaJGZAuum5GmHgKaRFQJAICOAtYUl3r75EUYpjhVpNLfQD5Vhs0dQzZ+3H3f91Isl2re+AtwLIfzKn2nyV1wcOsKOJvKQZtN8kJ0gmIgmhYQPFhe4++XHOf57b3DGBpIsJ0lTqiCZ6MFJF+e1nmdLxikhwYumHQUmifU1M6Vm/N4PHcoz/QrOc3drneum5tcvXomDuZh4TBB3EWdFj48jWNlhdWIiVOoePIwKVCKlgrMeHYRgaCtHlRjJ0DQKVwVMnqECWK+4e/OQo9tb9M9L6UkmLv86axEGu/zSVx4DBR8cHBCco6wUSZFSlpZxbRnXRnQzpSXPhZMmeZzR0M4YMUmXr4oiSoGV2IC4mNhM8Jy+fIqkyFFeXHEIDhW9D4SA4Qm4uMg0Sdbn6HCTTiPF1iPu3linHFvGk5K0WRDqGmtKmSUVCbQUTBXj0ZTGs5/g4uWnI1dvNlAWSlKRtOdMhIfPhkCg3+6yPzzEBcdimpIrhEMYyZQzprEOEhP/cUfL/5wy7P8AvPfQn/8zJPnrErCPJH7BQ8lfwH8e/91PuFScPen5KfMnPqpmqIX8O5HIwnR1kbu/+TneXUsYlgPiM4j3km8+U0N6V8MMR9eC8Rstjokq+rT6EMS/OO6w0vwKWqVQUFsGgyO+s3WDn3v8WRpJUxayFusj+ZwIPeoAfsZRMzhfo9CkemZVPnOjiVPvOGsJyAkqdrMyvCOIDsd7j60qGu2cwc6QelpikkKabG/jzMSjqgm/+OWnObfQmZvJVbamqgOTcc10ahkNpownFWVdC2csyqNlziEn7fyXfjDfkOZY+HfO1hijCN7hnPQwxmgkftiCciiijxgFNX32kzVGK59n/VaDajJl684e08phy1n+ZvSsVgGdG0yeEIJnv1Kc/NyTYigRoLYlEWqUXlQrxuUEsfJ1jMshtS0JIVDEwa11nlaW0kuj/W6crs3uhf7XH7t/7fppw4xOAb8M/F+A/0AJdPBV/qySv2YoIw8y6tVDTODZ7zo+bLP8SKXANXIGv/RpPvrhB5x+7R6r/WV0KgS/EByJMTGHyKHwOB1ABRKViOYjomGzlaYQHhPWxbwWeahtXfONm2/x7JWnWOv0pcYV4T5OyTxIJGMq0s6cGMppIwC1c0IJjygYkdLug5M5RUSwVYSm0QFbCVigtGZ6NOT5cz3+6t98gTxfkL5hPBQeXLMJaJQJqERjUsPPv/QM7711j83DEUpJXklZK4bjmiJVpIMxWW5INKR5ilEGSAW6VQCp3IM45YbInfJCEcJbeTitk/Ahk2LrEiLkbpIMqzKU6VHqjFRLSGo3NbzFImr9Qwb7A7KVRWzlsBGhA+kdfHD4QjPcGWOfPM1hOeDW7bscX1jj22+8xtryAqdXj3Pt3i2KImVYVrSKjFOrp9gf7KGVpZEVPHX6kzSLJofjAUvtHktGc6Q9IbI6krg5m2hE8nEr5qctw/7vwH8EdOKfl/gzTP5qLa8CDbQS04kY/Te/WelsYj4jLcY5iUeRRBr99KUnuLnUZvL1Dzm7sAZKDLKB+QKssQRlyKJQrPY+avITOb2CUDRChHa9j8iAc/zwxjv0jq3xxNIxIQHGXsZ70JkkLIf4YAUrziUmyg2EguGIzn/xqJSZBbOSQgVhEwQZlKbaCCk0TagmU7Ik5XBnyu77tzjxqWWZBTUaoBv48X08AZ1kok1xNb1Owb/3t36e//q/+xr7kzHOeUYTi/GeNNMkiaKRjUl1wBhFs6sJOsEERZJpQbSCFlaFjvoPL5Jc56S8qutK2LtKo0wlg0vvcCpn3bY405TIjGacxWTW0UDT7XU4vCUbVW0d1om8wVlx7NGZASw6T7g/nbDbTzA7u+wc7NPImpw6fgzvK7RRHF9cwySaM80mWWroN3qcWFyjriXZzAfPYneRje1tljo9VouUGxMJj5pTjQI8CK/6N1gsSqlfAbZCCD9SSn35J/37n/Z6OPlr9dKVoKhIFHOeFCGCRgJZSKMfHkyWZxHTyjspU5zHXznNdr9N/S9e5VxjUaguOJxx8nBrI/ZF+OhjHJtzL5CXjnyo1EvPIvMHx431W9zRNb9+9gomT/FRPuArseMJNszp+iDGfDpRGJNGLYgAkybaNs28EWSBqvlkfGZMHbwCq7HKE8qYSJWk3N4+5J1X1+n0C9onT0N7lcQUeO8Z3rpGY7GH7ixIs11XHDvW4W//ja/wD7/2NipN6bZzur0myyfXaCSa+8GxbzwLQXPKpvQTQamcs3FqL+hgdHgWxEspCJZ6MmY6LlEEUmvFkV95ynSB3x1qfvjRmM+f3eHxpQ7ni4KGVhQjy+5//ypP/caTXH/seRo335K0YhdwVkrSEGdUSmmmTcXJX/kSF04tsNDq8dT5S/HZ8YDmvds3aBQ5a4vLpCZjOB2wPdgmUSnHF06QRC7ZqZVlPrpzi6fOX+Jks0HYG6JDZJfHymIOjP0bniyfB35NKfVLQAF0gb/Hn2HyV64VqVasphqrAxMPFmJeh4t9ioluHJ55HHMQurZzjiQS4/zqAoO/9hIf/dYPuKIXqGeRBkaTIqIm62eNfhATi+Dn6mux1vExTSpwNBzw7b3b/NIzn6bICzxydHtheIIRFZ8PFmUSgrXSw5gMF6yUNyEOKBHIOo86GjCYNDDzBnbxSwblKasJSVownUxJswznakxusEZx4711+jtHNDvXWTx3kqMbt9m8usnpTxqSRltKt0TmI2fPHeNnXvD885dv015aRC10eL+0TK3isaUFTJGzm6YMkoQX2wu8MznkGV1gw5SqqumkRihC8R4EB+CoxmMOdwb4AI1mQXnLM35lh0v/mys80a25bRy/8+6Q31P7nFtJ+d8/fpzRyzfY+s6HDN9bp//lC/SvvIDffg8fAraOZhJatimlNOZEhw9u3GfwynWcr0lyQ13XtJodzp84xasfvke73SRbv8aTpx9n/WCHyfSAXqtBr92hY3oopVjq9vnB4C20VixlqeSKBjfbAqRPDf7j1slPt1hCCP8J8J8AxJPlPwwh/C2l1D/hzyj5a9ZcFYmmlSiMV4ydx4YYLRBhPh0bfR0fVgXEdxtdiwM6BOpeh/Abn+X9/+l7XLJdCl2QRJbxTHcBkGiNdxavk1iHz/oJUEZRTUq+fucdXrz8BKudvoAQiZ4jZSGZ0dwj+9gLWqSVIUEUeYKgaJy1MVVBU6gEY3yEr2dvQpHGYSYKTK6xpZSROtVgPdPRlM3DMT+6fpfxuOZY6njx2eM0dEpilfQPtSVNA0EnoFKUyXjxyTX+6MMttkrH9oeHbO5M6T/V5vyJ4+xbxxPdLgsNiTi3aZs902BB93h1dMhL5Ezfv0fWN+THc1AObx3TozEHm2PcfUN53Yu5d2K4ln2Xk3/7RbYOpuzvHlJNa/ZuWu50NKM/eg3X9ZSb27ibDfY//zQroxvinDNTnkbyrNaGTqtJO6tYXTvLweCQxOR0mwXddpduo8NffGmRdqMJypOZnHPHTjJjZMwGkyEEWo0mpbU47+nmGe3MMKrdAwYFMsf7CU5I/0Zzlv+YP6PkrxlVeuIsTmk0Ef6NwPcM2/fxJAhh1mtITzNr9lUAh0zSfatB/Ruf59o//z5XSo3OE5QS6DZReu4gOUOisE5ULbFf8gFeXb/Owsoyjy+dhNgQ4oMIwPQDJWWIk3kVxKR6lisJAaf0vGlWBBJt6BUt7tjB/GYBD3hJ8cTzwVNbJ2ROZ1GhhnLMjT3NxU89xcm1Bd797mvcP5xwIfGQZox3jzBFA93oCDFQa5RKSLPAr32yy9//zpu8c7WAUc4nT7e4PZrSL3KmznJrMuZko8kzeYN3qopjRZMvt/ooo9h4fZ/1f/km6bGCxpUWrUsN3HbOqdYFvKsZTrcZt0fkacb47j7vvXWPd27cxzlP0WmRdbv84WHGb37pOQb/9D3qJUu4d8Qn2gnhsU8zXX8db51Iup2XIadJSFLDc09eAtNnXE/mw8bKTdHa4+oJB6MxjbzD+sE6/aJHaccoxAM70YY8yeg3V0iTlMPRgIV2j36iGdeRyOlDHKjGSL2PeU7/5wawfgP4Rvz/6/wZJX/Fth0XFFU8UeIXm6MUEiNt5jRr4SbJpFmMwWXhGGUIOKyH0MxRf/lFrv7T7/PYFFQjj1TvQGZlIClDR0GxvIpMYGNY39lkw0/41TPPYVKD9TEqT1ZrdE5R4kUs71vg1DRq1F1EkpyPTB49/5yltE0oB1FRFVFAY+aRfT5IPnuaptRlBUGjyhG+HLGrlnj6mU+ydGyVL3fb9KsN0gkMrt1EBaiGY/IVG2dJDqMsOjgun2zz5NljbKPZfGPEh9/Zopc4XO74hdPH6OQNNvKcImuQJ5r3X74BP9xg4dkT+MGUkBqmeyVH3zji4IdTlpMGZ0632f6PP4fZndL7H95m4VOn6D13iqA9T9Yp98eyIfUa0NL3yL4wZvX800zeL9n8+nvkmwN6T57n5t33yCpp8G1dkWWW4CxBJwyGG3x0eJd+s8PG/g7nVk5iwxTvDXmio62up5M2JWktBLI0o5k15JlQCqUNx/oL7BwcsNjucSzP2YhZLkrHsUX02X7kJ/gB/kR35X0QUwUjpY1CyQ7rYzZHHHKpuJujxIJo1ugzS/byHt8oqH79RT78H7/LY24Vg8aYmRvJA2lAZWu8d7SynKPBkO9v3+Krl58iSzPsbIilJTddx5+qivQPrdRDVO9ophFlwyKPjgiSk51sQTdFW04QO6MkedDYxtLBE7BYwKF8oJ6W1NZxYnmZorNAu9njxJOXaCMOkXenFeM7exRLHbytMJmQSkOosHaKwXN0lHNuIWektzh5UnFhqc3ySp9LJ44zThLW8oJCa04EeOVffgd7fYfq/U1MO6O41IHSozYUS0XB6d94lkbaYDgcovo5rQsd9n/3PXZ//12KC4v8xd98gh+sH3L9wwnPPV3wKwuaxN7GnDykc+YE7QsvMdgb8639iuOrV/DD6+CCQOyuRqcFXgW6rQZPtpZJkyYnF46TmgTrarRJGU0nwkBQCWnRZjgZga5x1rNTjkgTw2JrDY3i2PIy97buc+WUGFiogxHKaFJjHjrNP54f9mgsloiCWC+DLx2bdeKQUgKeQqRdyy7/gC0cEaT5PECgWjH5jiFEnRb+Vz/DrX/6Mue7a/jgccagfZCMF6NIghLjOuv44zvv8+yJsyw0W5IzEhG5EPlkIVJciOWXi875KtF4NKlOCVb+TKgJNk7G4/vtqByjNc7beXkGAmh455gfrCqQFAZfVgQ8JmnQ7/bp5gUtU5DqDlqNUcFy9hc/x9aP3sIPSqhKyMXQL2jATTjaG7G7ecgmjvazi4xaTd4sS84OSgb3tqlCwgjN450+Ly52uPS5p9i/+w5ua0r2bJfWWpPJ5gR3a4j3sPj0GYpgOPH3vs3xv/sS+S88xu6lDvfub/Dhzhbf/R+vcvf9TdSR4nffW2D3sYIrqwec7wQq3mOx/WX2SLl/uMiyauOso6o9ufMkThIDTJaideDevWtc3xvRaRQxxNCz2l9Eq5SynrDY6TGtpzTTgl7RYVRNaeWt+ZA6BM9qb5E3rl4lKFjOM2FHqID1krygQviJE/pHY7E89PvM3YXYg8gOLDCf9QHrbHQVmWH/DkNcVzNZ8LxLj1Nyo6mX+oy+9Bh737jOcn+RJMpgo829UGyM4ZXbH9Hudri4uCqHnXOg0nkeJZE6oxTSuzjwvgYdNfw64KlRJhCCwMpe7LyYQQOHR4fyerVCSVKTTK/j9CgEMfYzSpOkRt6PcyhfkhQpF3s91lotVDkB3cC5CgUsPX2Z6cY9kRDYEu8sJjMYrfjg3pD1gWXj7hGqFcguGprNgq/98XUGZSAUTUwjZ7WbY05mNO7v4z5tmPzgiOy7JZ0zfTrtDq1Gn6q0HF7dZOe1TQ6GhyykI15dX+frd++wsTFmY7/mqcfP8qUvvUD/oMXvfHDAz3+2ZKd3kgvZNZrBo5LjdPenvLK3T+OxVerDAmc9tqpJckvWiIpK4Nhik+WVc0zrmmbepJnlaG1ITDr3Kw6IAlYp6DQgBLGpDUHSEbrtNmVdEpynYwwtI+YgaewVU6PnJMsfdz0Si2V2pUbNG/VEzdZ5INFKLFRVNDsgNuVK4uw0RPhVfmhEt5TZ5Hl26vjHz3Lvxia9jTFJ0UIRMGkqpZVR3N7ZZMOO+MVzz8wNwXUED0KQzBYV4ZMQfKSag5uJvSIdR5JexE5U+hxZkC72One29zAnElQlM105SR6cpmpGKYjqxbTI50lZNZp21iBLDNVwStClDCLjzypd6qOLDr50KCrstETpwB9+9z533z/CnOqwdjxhtQVHoyM++XSbH758ROksJm1x5sQCT37+NHf/y2/j392ndXmZ6s4eB1c32MnvY8ZedvQP7nD8MydY/spxttspH6kmyxef5pquaB43mOWc9mrBATVqYYGX78Ov9j3vu7OcVV26qoc6l7PShQ92K67ojLqaUteW1FpsXUnJnGS0c4UuumhVIKKgf/2hVqiHwJKIej40KE5NisIwmIzotTs83kzZr0EdCZi0YjxN/fFw2CNB0ZcGP1D5+EDNiIuEuSJPXBhjn6JmOm4H0e/JByeEyfiAa/UgoVZCnqIU+Cuf5GY9EC8t76mdeCUPB0Ne2b3JVy48SZFm0eVdIOnaRb+r4KOQizg0FS9lH6KGRTBu0Rh6eWdKI47kyA0trWU/OE6m3UjrmUGdUVsRCY0PeJ8CKCR5imsVDNKEG3s3ORptUk73qY62oJ4IadOkmLyF0g100RSgwVucD9gFQ/9KwYobcOKdXU74gs+fPc2JXpPKW0kAtp53d0v+i1d3WPn3vsDqC5fJrk45+eQl+s0V+u1F+t0FCpPhru/w2pmCPzie87u+Ju/2+cWTx0hMC21y7hxp1j+ydPuGU13HG/dKfvfVErfh+b/+3oDf+/ANjqo9KuCf/fG7rLsGwSRYF/BWUtRCsPOBMW5CwM8lzzMUcz6cnlHt4/MzYx3AzJ4JTqwssnN0gEKxmKcSdx6n9jOQ6eOuR+hkERq5jQpDFRtneUjjm4lDSBlNSC8zcxHU+sEPLwTRxMSOW75GREZ8UVB+5Qn2fuc9lvtLEALWW769fp3nz1yik2ZxQCkqSeesMPUjABFAeibcnGavjUbVNtKm5ZdGy2nialBSRjpnqQkcW1oma3e4Nt2KxhlCcZktqdnUHwukMuX3HkyWY/KMN65+iF1bYK0a05ls0z3pSVo9YfzqTE69GSmVgHM1a8ManbYZ3RpSjqZs/8F72Dv3OVxYIEzGwlgYK6r7U14/2OJr+kP+8t8B/ZXTJL0m9q3A4a0Dim6b5ROGpRN7vHlrwHOPneduLqegLx31+iblaEoeSt4+GPKRqvnEuSaNtGLl8R7L7cv820+e5d7+PkVL08xSnvv8MxwPb6CMiapWj7MVJk1RWgCc4AaQ9JhVCv4h2H+WFTnbZMNsk42Mj9m8ZaXb5+b6OheOnyLRKVs2sOCFNrXrYOF/wTnLn/k1Y5A+XGa54IXwCHNOgppxrMKMPxb/Ph7P0tgL4TJ4L1ELMyoJwJUzbL9xm87OCJMXvDvYob/Y40y7h1jNiWeY0eI0443CebkNqU7mSJr0GaJZQSEiJh8IZnaTotFGUKKFceL0/8TKcUatjLAZYopeNDaNYjR5wAXaNFrhjaSX2RpCVXHr5i6T/V0umcDy/h4XvKW5WpH0+iK0ijoUvMeWU7wyuKlleuM+djQhOM/Rzja1GpJ+SpHUA+p6irM15y4s8u//7A5PnS1pNNv0PuUpwxL9Ky+STmu2/vGbLL20hVNbfPVej9c3A9/dvI+/OeKbtywnVeDx8Zhur4GtNVc7C9yoevxHXwn8se6y2POMVYsP1gd8qeMo3t1iev0Qf06DCbhaosEz6yLfTno5FcbC5mBGf3pwkjysnZ+RMT3i3DNTxKqIiL350TUIsJilsunNvZp/8vP5SCyWWRk2K0ck0MjMF4yfNb/xB2NE3YMgVBAQEz6lowY+llBED+rgxWCBiKRYBepnn2H7H3yL8uiIHT/gK2uPST/kKmqrhEcWOWTOuflu5b0kERtt0MFhEkWqov9YJB4G5whBRmPy2jUqCqeKJOF4q81ekpGblGkUQnmiw4qLN9t7ghEAIwmRiewD1eCIcnxAEZpsGfjo7Q20h1Olp0sgabeFdhPnLM55SFL2nOZoUqHMGJU6Vi+d4sSnTvJugKcvn+HuB5pp3uXFXzvJM6c2aao7oGqCn3C9nPDK8C6P25T07evkn7pA62zgRKMNtsmke4JvZo7VZ5r8ne0Dkpfv0mwt0/jMaV589Q5XdyxvfHSSY0/1uFVbau344vkm7wyHfPrSCkuFpVENpXRGVKlCqnQo49EmAV+BG6GTbuTw+Tn6OfNhnm2cMxTMWjs/cVDQb3cZVxNcsHTTjMU8pdBCMWon4rr0yEPHca+IJm8RBXPRpHo209AKFWYs3jgn4WG4WHqI2O0zixaYfW2U0L5nAp+w3GP32ZPc/Zff5cVzFwhVTeUnkuKbJmgt5hkmggUqfj0XfHy0PZ1uRlEUlEd1ROWkhxGtengQG43cwGk9nevuW0lOJ20wqUrRvxARnOhSYhLNbH27mA+PgulwyHQyYlKA6zXZnnq++fodnh1MuDStWDyzjOk0MWmKJpC1uwSVsrZSsK+XSbMzLH90SGeS0rxq+bmXzvA7zYqVCxV3393grbd2+IdvjTiVvMawgkwNqeoKsjNU3U+wuTZl85+8xVq/ID+W0T+9w1+7eIbd/SlnPlxn7wfXqGuL2jsi+3CHcjxi781N3HaPN36Ys703YU+3eOozF1hZyjiRTOgtpUw3h4REZMTe+ViK1ZKXQyr3zw0JqjXfNB9eJLNeRlBUPf+pz0AekCY/UYbRZEqv2eF4nuO0QMb9xER29I+/HonFMruUCsyqKmk3Ykk2i0SLi2bWp8yh5gA6NjaznUSrWXiPDAZ1dHkPCEyIc2yvKk78+7/E7rUdju7s0Suh1+2QtlskIaIuiZmbSyREm6YgJ1SrmdFqGmyuyY8ga2YMd0YMxyXOQ3BO+GehJrjAvYM9rngnfVBQLOUdtkb7coPmg80Q36/EYGstqstEC91c2SmunFBWKb1un+efP8n6jX1e/2iT6WjC08HRP71EaBcEk2DSjLoODHb36E7FvG47OeLGuMTdusfw9tsclBW+ZUhbDfa+0+O/39f84ueu8Gs/s8K7jVOcNmPOt/ts++MceyHjXAgM7+6x8/4GO5u7vP7ut+i9v0X94SFbyw2KUzmTOxOqG/cYlZZJcJh9xae/fBa7ep47bpEvHe9y/Y07HI4nHAyGdEwNucZaK2xtJ7n1wTmCloi94IYk2TGpQtQDbCqEh+59HBTDQ8/CzJoWWOr12dzbodfq0NGKgZYSuK0DxUP0oz/tesQWi4kDIilMmCFBkWLy4H2I0i2ZO+7rGAokDlZ+3s/EpOLIZhVKfmwMvePY44+RNRqop8/jq5r9nSO2P1gnvbZFZ+xYbLUp2k3pAxD7KqWczEtcwJgEY5Co1U5OoiHrZyQ6MByVBCPzEevEc+zq4SE/i5ojXatZi/d4kH+pjcHWlSBiXsoR68Ek4qdM8BRGMUHhrOP25oSiqjl3cRlzqsPurT3u3tonbxiSukm22MeVJbv7AZc0yJuBRq/B4PEVbtzdgvsT7Nij0kVOPH+G4pNLLNlFDm8l3Gzk3J+kPJYN+UevBwZugxeW3+G51ePcmtQMdy2LpxZZLVd46oufYuN769z74AeEJCc51LTKBtnxFurumCQoVl94EvPZFYb1NocfJXzrrqZd9BibMUqlTGqHmda0mslcbJe6mYpT7GE1Nd5XKFPE3PrZhin1g3Mzj4AHPLsZpCw+AppG3mT3cACnAkt5xpETxvnYeabezwfCf9r1SCyW2SLw3olVqtGiN4irPlGBVAkS5ZAdwyBDwkRHl30jTopeoKcH/Y6XJjrRs51bGt9Z1qJIUzUmzwgnlshOL8OXHNO9ATffuYn5YJNFl7HQ74pVkvfUVtSGriqpCPKarULlCcZDnmkaJiP1QQy7HewMRywuNWgmCd1GgmsYLveW+ebGByJuDQFlZoNJ6VmsFTsgLGgfUCYjyQzBycempePm+pB7m0cs55rVxSb3D0Y0Nht0HPSKHBLLjdsjhtMBOm9RT0uuB8flT1+hvdfmtS1N0m1jWwVP9vrs5iMW7hyyf/0Ob2lYWkv5TLNkbHv83rc0/a/mfPF8h7rj0TTYHijuTgv0uwcsXTlJefcQpTTZlRWq/THFZ/rsfbDJvbfusfLVJYZplxPNParhNY7qPq/dH9NuT1jWFamGVGsSLbGEzopJuwueBGRU4IYoXaAQfzRhbs809ZENEiHmJBqRhCBleG1r3r5xg1/87GcJStHLMopENsIUFWXfP/56JBaLXIFMQdPE0kk/oIHM9StAoSDV0gimQNNIr5KZZJ4pabSE4gRglqPofKAMgalzSLaW7N5zCBri0BDQmnytR+vYs/Alz+HNDXZ+eI3W+oCFVlPc65OE6bQkwWC0fKdq4DHekcXItTQ4gg4UIeWWn/Kp08skQGviCKbmTNqiSFJGdopSYH0UnsrAheDUXP4aCDQ7XTqdFtv3d8A6qkmJLWuOKks59mzsONpJYPn8CpOdEflyl8yk3Ng5YtDPWer2mO5t4SbQO50yGRwSbh+gTWCSaF55HZyuCCQ89lSXhWNtOH6FF9dGfMee4O88luGOXmW9bHJnuMXeuMP6RkEj7JO5HXZvXMOWNbnRHBv0SJcNrx15rueWylV89Y2XCafPchha/IXFk1y7X/LlZ89w7d2XGfqaDE+WaPJCDBZdbamrCp2khBADXO0RpEtz1neYD4wrKmspq5KqrplMS8qqZFqXTKYlg/GQd2/cwVnFxRPHpWJB0dYKo6Cbahr6z0kZplC0Ek2eaFyAkXdY4gxCBVIdKJQmVSIWS4KnlRhyJbr9TGtMrDmDj9avRIqJknSCLEDbGHyQabmL31n8c8FqRRWPYoMiURBSTe/yCfSVk0w3D1j/7rvkV7dZ6/cYZym+SkiTnDyFwkg5QCUZjcp6lNe44Bl34OxE/IKJ4UZtMlYbLe6Ma6yzsad6AAz44HCiUMaHhMnwiNNFh047o93M0S5Edq5nYmPOpUlo9htsbw/oj6YsNLu8/+EmZTng4GCHUB6RjHq8/k9exg4rLh5fYGnqWWt1WF5apFt1mXYXeX35HPcnBr9zyG99OOCNO+9jjhy6BerUIsniCt2lPomr0JsJV37heW5ffpzjd7Y4aBRkT5/kq493+YVMcWBT/j/rJa8dOP76asrmaMCZ05pfa1qaBq5ah60dR76iUUjIrDYOkziCc8y3ShUox3v80fevsr51wGA8YjiZivGGi2G7xpBnKY0ip9ks6LSbtJpNGkXBp598ik9deYw8zVAKRu4hw3N+smnFI7FYZrqOEDxl8A80+BFO1UEa9NnlfZhTUByChth5qpWe17JKRe2+95GA9+D6E1ErUrnNNTJBaxItJ9bMy9coSI8t0P8rP0PYPWLnG2+ze3WdlU6P46t98iTD+kDmNUkjQZcls6p6WNfkbUNzR0ADZx2hdqhMc7bR4/ZkX/QyhOi2LyAAWAI5rnI4VRE87B6OaWQ5Z1d73FsfoTQ4a7He4r1lamFcOfaPSk5ZMQYvrSJLEsqsw1o3oT+Ge2aJ5754nhe2h5jtAe2ixdLKErvfv0ExWufZ995j83iL7+g208UVTPc8nbMd/s6LJygXjnC0WA2W9dEeVh/RLTLeKTWvnT5PraHdyElbmno0wAxHPDMZ8gev7PLb7x1w+cJJ/qtX4GdMYG/0LsYIEFLXjvGkot1OKXzAxcwUbx1al7igyDNNqzBcunCGTrtNM89pNZu0Gg0aWU6WZmito6RYzQEgpaWnIW6edfDcHA5IfHQKmpXwH3M9EovFIA9qoQ2T8MA9EkA/NJy0Sqb8yisKAzWeQikKjwTgRERJhpLRbTLEyT2iFHZxJSrm1tdUEXGec8hw1F7mOdbNBqACFqRaUyx1OP7XPsd0/YD7X3+D8Y07XDl/nMWikMVloxrTBUKi2HYTFknFqTGSMIPS6BC4nPf5VhC7MltJ/R1iUxusxyUzyx+g2cKS4KNh33hcoYPHIRkz2mi81hxNSo6f7JE1MrYPLQObM9grWThpuVNqmq0uS/f3SF95g1uJ5swnzhKSgvvfuU6mNIkvODHIuBI6fPYXniL9hTP0WopaH3F7O+X+Vc2TqymH0/s8d+wqt6YnaTW3eK4z5VtXp0x39jk8aPGt64pGmtHo9jlM2+TH1hh0TrN6ZZHFU57ldERxo2TrzvsYHXCKeR5NiPa5zkWPN7K5SeEXnrtCkp+Scj082CAfuNEwR8tmyljlZWZlQ6B0U66PRgysoxe/xtRD5T+e8vJILJZZLF1uArlRaA91gASZkjuEkChzSPm72ikSIE9UNNET87SJ97gZ1QXxMA5aS5wakuKrIqSYKPHbtUEe0ERFr+XICLBO6MIzwziDpnKWymi0U4SVNu3f/Bzl9S32v3+d4+0C7EPWSk2Dm1h2korHBrmcNEqOMKXA156zRZ92mjNyFeK35SNkauOuWgltw2gmw4rNG5tkjYLXB7vousa5OnooAyEwKWu+9+odvvD8GTq9Nh/tTal1m+6VE2xPj1jIapaX2/RKS/vmPv20weknzpBXgXvvHjByFhoO5R0HrqK3cZXhPrx2Y4fTdoM/utagmipeLUvyXpeLX97k99+Z8hXu8akVhek+w/3Dx9htrNC8sshnz7XI0gyvDGd2S/6Ld/Y4t9bjXKpI7JT3N3bF4E5LIy5DWSVSbWbqRRn6JqmRiqA+oE7XmNQWi5JF5hyl95TWUXoxVSdudpJQHAmrMVC2RlEHpOwGSgIT9+cADZvJimsvqsJMB3KlwEOl5E1lWpMi+hOjNEZrMg2Z0WQoITkqTRXkiHWxbKv9A7eQEDwBhffSs+j5Uw2p0hgdOWAzFC2IgZ64gCiBggNY/2Cu45QmnF/h/plVsmsbXBpU+L0JJhPd/Li0qDzQHCRoU8cTTcchmqKjUi60F3nzcBOtFWUpnsneB8k2jkYKKsmoRweEfAGdKcDh5jGCIqfGeYyChXaD4yttms0G03LI4qJH+QHj/QFZohmHmrH1BFuzv7fJ+//P36Ygod1IMT5BLaSsbx2yFzzjV7c5PfmAv/RzF9hd/hSdUxmfMob/97+4Ro8pd3ZP8fmnOzx7JZDmJS/lU+7YFlO7y70y4xv3hixMFNeyKZnTPOZrdjdGtDs5dQ1vvP4axxqB3GjGvsZZGI0rWk2J5yCIxNog5ogeD8Hyzu49blU5MfZKvDMjN+8BwdKLY06ARInjpEJ+XqJ81ZSIw2XtFH62If+Y65FYLLNLEzDBY3REKRLF1AdskMXTTCVnUseHWagwgRqonJfffcArJZ5gWjLhZ73JvH4N4kUmYECA4OWUCXEaTIj6GDllZouN4KOToQw8Rf4cQBnGOvDOlRNsjCc8ezBk8WCCPqw52N5nOS1QlcfXtfRZkT4Tac083Vjh7aMdbDwRvVcxm14GsFpB0Whh3QRvazG/UDKwsxERcs7N3W46zZRmMwel2N66gxrdx+Uncb5JHnKszljf3CBT0NA5zUZKSAJ1J8U1DO3VDv0rZ/lw2CNdXuTJ51LKJ1Z5+dWUC2tDfuapghf/V5/hcHKD7X3H0W3Dy7/b4MxnavT0OrvbP2DqFvGt5/jHGzkj1eVvfOUkv7C0yOb5mgZwUJf0rWeh08PZbZIkIcUwrTzOBWorGZo5zP2mlSAdeBynsgn3bAPnha6i5r2J/Hs9I9wioEkdoCbmiapZ7F+MTYy0kJ/gq/LoLBbjPWfvfUh9tEWipMTSSrhfXrgO84lsQMXeQ+yDnA/UyCIJIUT3dx2Hf1EuKisDo1XcYQCCJGUR+xWQh1SJUTXzRflgV0qUIpnlTiLGCNIPyfcNAa5ZR1FPWCoseXWDc2UbVVWocoT2FcXRR1C3mb2KZ03Fu/6AcTWitKX4CSMx1955Gq0WRTKmTCfY4S4N3ZJTsK7xtgY8PlhwjjQxXK736K2n5NMBq/tbNJpNrl/fZWEyoD0dkBwUXOhC3smopoFRo6DRX+AXnuqw2GnRXeyis4I/vKd5dX+IWk94rlmzsT3h1xf2cD9aZDr5EecWx6xmT3CiNeC9dMDBDw5Z9AXPXfos/ZVjeDTNoz2+cXMD9foGd441OZ3kGBRdAsPhlMcWE/beu8uiy2MuDbR9TrfOaY0aNNoNsiJHJRkmyQFH8BUNlfPEtMOWS5kxxIE5B1Dc4dR8OP3g/+W+z5ZFY28TNXP8+TjcmEdksQSlWT/3NM997e8/aOxh/jDNhk0zlCs8BPfBgx1B/SvH6IyuHfv4+HcPf5wZi11ex+zvZ/Xt/OOzxRaRu/hvH2aqhj/xiYqZTG1t/5C82RC+WelQYUgx+QNCSOevYZnA3x0cUA5HeC9WQ0maym6ZgKo17MopaidjTJ3OZ0/zdxnLNa0Vnau3ad5LMWnKr+4cSskRUtlNc9C12OB6G3BFmzQxrDmF/VEtMRfO4ULgLzr4igWrU86uZfw7ZYZ+BZomY2NUsR/ABLjQGHFp6CiHDu8DCZpisU3ZSfhlpfmK95y4m4HScWPx5NpgR2OO+RptLepWhRLCAyhIU0OaSOKYNiZqUuSJ8JGUuqI0A2dwYbZYHszjPu7P/8pdY/3iJ/FJwsc2LDwii8UrxW/9zf8jRZwz6AjVhhjbYGJ6lIllVxUCNggIEGJdGyJvy89OiNiTCFIiGL1zD/yLRZYc5qfF/MGPsXUBScCaJYUpFImwyEm0wYZAhRzjM5fX+c1R8VSsp9x9/ypnnngcbRKKvR1+4f3/HV/73/49pgsL8jWVID7vfOObfPDN7zAeHmCylKLZikbphqyVSzlpMnY/eIPMtGk0G8LSTXMUDl9NCMHSaeT8zCfP8cyTxyn6K/yDf/Yt9g438aoFyXFqUoYHUxINhVbcuF+TGMVLp7qYeooJ0i/lzYITJxaZFIv8znqT//AvXeS//cEhF19c4ImFPu9fm/DR+oR7u0f8p1+BwSt7DL+/zmK3Qwia/b9wls6lRe66imeaXWzeJNUJyiRUSpMNS9b/+A2+3znkQqPJ/rUfUuQJh4OayimOrbRYWe3Q7BY0Wk2anS7KZCg83o1ROsEkTW5M23w4bca5mmwcDxuto6KzaJipZ+U2RS6H9LAE0Ga+Ef6465FYLEop0CZGAYibvfMBlRh8lOZaL7oWpYIERCtJyvLMTJ2Z152yw+rYDD4UW5Ho+b9xM+p28GI+OcfkQ4zI9pGMKUZ/RikqZnkes3pYzQZCoE0kIMlNCgSshROXr+CLHBT4KD0OSos4K0hTafGEIqcCkkaXNM3nN9BkiQxnXQ0akt4y06MdAoqsWZAq8VKrVZAeRxtUkhISw7DUoCHvdPBJl/1Jl+37lv3dkqWFjONrmrTlGKk299KT/JXPrtDNE/r9Ju1ukzRNGdHgj795yN97w5O026wsdni222V0yXE4GGP3Kv7+Hzie6TXpX1ri1tMnqfImj53qkbdaPJtqOiohmIx7dcnpNKOBxpuArVKGWZu182dYv/4e1FMsmsHEYg5Kmr0GeRCftso7jIHUGFQssmpvOdmsueVg7NNIJYybZSSEBaLRiDLzj+nZCcWDRTSD9D/ueiQWCyFQe7AI6pQgU3nvBP5TSkztrI+quTB7k1KumUio9PFrCWFS9pmZ9gEEMZktAj+fDAfQDx3B8UST9FofF6pDRTInMDdJmFPEQ8xBnL8WZEdrNkhMwsy+9UHpJAzqJBFrV6MU9aREqYSsSEFFrlhiZOG6WuruckxrdQk72MNWE4IW50ZtZpR1Q7NRkGcGQsr2xpTRsMQbzbjSWHuIcSXt3HK4u011qFCrPZ574RyNaYd3yg6/erGN8Y4qwGg6ZXi4z2NZxfc/2GdlSVF9sMiN7hJP9zocrtXsbxp+/kSHVlbyR50Cn1WMEsMPX9lGhz0WmprLxzIWu4GnXcH3RzWbvub5EsqTOcZOKIPj2PnH2fjoVWrnqZxna29K1hjSbBe0u1oIpkBicqwV00KdZKTKcTYZ8X7dEzWpEcGePDMmiumiI89sM8RH0EdmafVMuvznoQwDIplQGmkX9QgeYevK8ahA6Xljr+IxOnOgB6GoqJkbZPBzDoOJjUk0RoqsgOhwaR54EcdDQ0434qIgxGmwn6NnkoYrDIPZUGUmY54pNEGQGR9z9mbJeAIdz/T2guwliQwos1ZDvJZtLQtBQXAOh4UgkRl6MqJz+jy7Vz8gQzEFKAypUWRZSrfToL/QpFQ93nz1R+xUHdpZwu3dA5pZwepqg3GlIWkwmAY6mWXwwTc5Co7r17t852tdmARWLnV54uwK/VaLy8tdfuHpK/hGyrFOh4Et0cHy/nCH3fsj/v5N6Br4antM/eYW9xuW6kdXKcdTmo+fRH/yAt/f9fz9TYXvdOmsLPDUp9ao748Y+yF7RwNOnjnP1bdfJVEarQyHo4r7W2M6nSFZbmh2GwQc3pXCrjAa0eiXnGjAzapiQhIzZFT0a/Pzg19uTqw0UFGhGpW0yInETBPzY66fNp/lJjBASL82hPBppdQi8D8A54CbwG+GEPZjdsvfA34JGAN/O4Tw6sd9/YAiRJmuV3NElVla7mzBEB9g/Sco+bMfBMxCTkxUSs5qUBXLJzkR5BQKgEn0PI1rruGfWTERJaoRQZP1pqL5xAxQQPqZ2BfNuUXxdT0wVZDvncSPNzTkqfDcUhXpNFpj0gRf1gQti8TaACqgjfgfYx1VVaONpXvmPMPN+9ijQ1Ro0ei3aLcbnDuxQqN3mnd/8AEbR0e4tEtotEnamnFlqMkpuv/f9v40WLL0vO/Efu9yzsk9777VXtW19L6g0Y2dAEFQFCURHFKrFfKMhhMx0njGmvAHS7IjJsL+NLbDipE9lkcTsuVRjDTUNgxSgEQQIAWQIIhu9L5UV3Xty6176255cz3bu/jDe/JWSUI3m4MGuhVRT8eNzpuZlfnec85z3mf7//+KVhQhGgX5uM+RToyxYErFnVtDTK5o1xZZO7HK2uwMTiuIFMv1Gn1nuGXh83NLPHsq5l/f2ePZYcahd24g3rxFlEqOrs6S9xYDN8E1xdE/dorfS3PEigGtKHTMNzLHn3msC29vsjUc8PiREziRYMpJANg5z93dCVFd0elE1BsxaIUtM1StgVJJxS1dEinJWpRyrWgdNJz/7YbJFOsyLRBFIgzbChHaAE7c6/q/l/1RdpYvee/v11j5G8Bve+//ayHE36h+/+vAHwdOVz/PEwSOnn//j/ZV53x66YekeUrUfL9ThIvQVSjC6vdpeHRAWlDFrFUla+pp7iAu9QevBQbXKcFB2Irvr64JApBxOkoR0pSq6VXlH8Jzj7R8WjmrdrHpdiVEaIwJAQ0Z6JF09W9Ka6k3EqbEfcaW4M29NZWBIEMJHxptJkOJgpnVZcrS0og0j549x+ryHFFZcvG7L3Pl9h3S9gp1HKocUY56lLsm5AM1yZ7WOBNC1zOzJ1g5scwcgq2dHQb7Od2kxZv7A47MdohR3EpT9r1lqd6kKDKsKXhYac49u8Ts+h63vzmmV2hE5vHXxzRETJ2EVqPFbGee02cE6V6OyXJaTDhau8S1vmI0GrItHTrWLCwfZuPyOwcJeZqW3N0asbPUoFGLaC9IakkdawxOx/fNFFpW1Jjbsh3y3uoUu4NQ+V65M7CHBj7lKvUPOc6BUMt7248Shn0V+GL1+H8gcCD/9er5f1Ax539fCDEjhFj13m+834c53MEFKQ6qGve2UFWNolSbLFqEaeRp/hCqHhWVUhXmhIs4VMiQVNSoVDtJdZepLu4wXkF1vEKIp6rdRAoQFSZCyNB7Cdox4W4k/D16pkiFdUofsBn3ejCOpIKtJkpUz90rXc8tzAUhoEpmToY5wIrHKYDNwqSDO/gbKMZE3lAOc175/bt0tODsSps0bvJmXmOxE9NoxIwLh3E1nEmpxYpOp8m48BS54dCaBlHS7Syw3G3xwtkup+cFhw43mUQlIwRHa3XWanXiKGYdwVzcpqYbLF7cQdYlqbRMopTo2ALxkUXqo5L9F65j5i32i3O8sv4G+7d3SS8OOfTEIv/ppzSPHx5yq38chGRSllhnOHT0KDfPv0WsgqamVILeIOfO9pil2QaN3GAjF5Cu1byw9BJncxrS0KCgTxyajtOrwE9JsHxVXQQvKwrg6mpylSzJ/cQXP8w+qLN44LdESA7+biVEtHyfA2wCy9XjA+WvyqaqYP+Gs9yv/NVdXgmket4dAL4EHLA/Vg9Rohp+rG4ASgpcFcKF+NMfYOxFVQ1z99HhuGrnCImemELfAzmGtUgBkaxUO7wPzjD9XUzzI/CyKlMDwnkiHZj51X0HdCpL7qrysgV0tTMejNncB3udW5xHxRGTUQGmBK1QUdCVd87jq8EOAaA8UtiKB9ojajVUU2KU5gpNnIvIyzH7NicqYZRZ1NiwfKiFxIeR9kGK8FBvzXN5MGF/Y4P5Xo1nZhVREnE0rrMwO0u73kAnddbLgmMqYjbP2bh1l3/+xjX2v3OB7lqXU8+fpnZslr02/KAzoW1zGrUxzWFG+q/f5eG/8lNcObVMx/c49GiT8yplbdLg+iAMxo6ykkmesnpoFSc0wjmUF0RKUZSWra0J/ZWU7lwNV0YIlWBtqALmkwk6jpDC0WHCSIRKoiREzeFQV2rSPiT6SsqDXdu5KYvP/UjcH24f1Fk+571fF0IsAd8UQly4/0XvvRfTLPsD2v3KX4cfftjHwlZVrCr88dVdv7pDSAjjHBVwy4gweu+pZPBkNVHsqcgtwmiKPsgfOKhWTXetAxk7b5EqOGakQu4UdFf8wXvxoScy7Uy6aQ1fhvDKVZJ7iNBzcRAuZimZUgFqH+5lpQ3TBsK7A4ddnJ1ldmmOcW87ID2rKl4sVMirDoYlLRJdObMAJ6o8TiF1hEUwmRTke33yvGBuvkPv2oDJMKV/J1C5Wh8uFlTMjZspxThjOC7J52aZW+zg+pLs2hYlFqxjXBrGRcHRxTbLs02K17ZpbmXMUmO+0WB86y4ucaTv9Ph8/QSHf/GTXHnyCL3JmLyh+Ha6zc7YoETJlT/IOPezS5w3c1zavFoJGVn6kwmH55ZpzXQpd3eROIQNeJa9/THbexMWFurUm0E/U0sFEiajnNZMuLE0xQQp5sJxmvLOVZGDdy6Qy3tXOch0X7n3ng/FWbz369X/t4QQv0aQmrg7Da+EEKvAVvX2qfLX1O5XBfuhJoAaIpRSCWXYKY2pmIZN6h6zi3TiQMtwOjqPD9vrlJhPVkl1VO1U02bkQUuGaU5SBXreIQ9wlSHUUzKQ5XmmBYepAwSuKVuBzKosJqAvfaV9WRUZ/BR7gyeq/l4nAlxgGnJKH6pyh08cZ+PS5eq7VOifFAW2CBgXrTXeVwyM3v2b1RshMDaEs6OdAXNzLVSjxd7dgqzQiKSDQGKFqna0wMaYDXKaC3UWrUIKy6U7O4xHhtGNEc54RK2JbDSZXZjhs6dXWNjos/r5J7nzLy7gBgXitYKGFxjlaC/NsfzTj7JlPN9fbnPzlmewmVH2c2ae6vK5n5rjey/uM95v8InZGpfLPEh2eNja3+fo4hpLhw5zY2eHSEpiHTr33jky48hzgykKqNXCn+w93fkOiBCaNkURZBOr8yPvm+hQMkDCEb5SZ6hCZyEqrVLevyPJB9OUbALSez+sHv8s8H/mnsLXf82/q/z1nwshfpWQ2Pf/sHwFoCSwUQZnqfokVeKN9yhrqyZh9cczHe2vpk2dq4gLgvNMNRuVCKGTcIFPLDSkqpCvSvIFVGTj4qACN62iUTWylFSU1h5UW+4vIBiqMj0eW1EnWVMNclYCTLgw5uGB1DlSW+VI+IO/Ze7YEVSthk1TtJLIiqU/VO0CRkdKGVgaXXBEHSmU1FgDIhKUpSHLCho1TVIWzM1ohsMxrZUOSRJhnSdyHll6SBqkucFkKbY/4XBjhnNPn+PibsHVsh+qYM0mUbPBF545xJfOznLpv/0222++y+LxRbL6mPHuiD32UXXFaG6GnfGAq6khqsfIO47JsI6KFPsvZdxauMlPLUecOqbYGQr66QAIjeY7vX3wlqMnj3H51dfR3oZelKqKKAJs6SiLe1Lk9/e+PALtMqR34YYCARMkpsUZQLiDfNhVSpnhZks1nfyjJ/jLwK9VyY8G/pH3/jeFED8A/okQ4leAG8Cfrd7/Lwll48uE0vFf/sO+wAO5C5UeLav6xDS/CPxHgSnSTxtLDm9dkDYDjLFVAicOSMSnjiREGFGRohKrcT5cbFVjsNoYqpyEqodSJYKVozgC9NgA03kJW13s0ybk9DALEcJEISr9eOErld/gLPjKobivDyYFIGnOzNCZmadX3iHcNCpGfaZ3QX/fblhVcmwYJsULnNd41UDWc4jrGK0Y9nNMaRlmKWNZ4pyjEUXMdCKajYTJHUfpYkzdMewPUL0Bd7c1taOriEYDH0XEruCtK7c4HW0xPNZjwAY6ucPcp5/g2KlHma918CPDNW7zQl7gMJxzkus3NnmqK1gRkrIPb6Utzn12hb5rskYJXlQ9Lcn2YIR1huW1JWQcI0yOdoHtU8gQTqtIVR2CaWOxOnE+9Fy0sChvMZVI1LRpPJ3cnt4I77lE+JyAhL2vavYe9kE0Ja8CT/6Q53eBL/+Q5z3wv/nDPvd+k0BTyWo7FGglKo6uKuypwhhR7SBCgNAVY2TVoxBV6KVElVv46WdPubimiXJFp1qVEQIyEqjCIQHgghioF7JKzsN7XJUPGecOkkhZOY0UoZrip2MnzlaQ5Hula1UNA06nnqfJ/XT+oJ7ErBw/Tm/nLtaUQWbC398om4aMAWbtkXgXQAMIwaPPf5rO3BLf+Gf/nH5vQmEF44ENR7hviJsJmZKMjGWCZs57Wstd6llJuttnu2l55/zb9HpjxMgHGO9Kg7OriuMLQ4pBwkOfajL/J+a40fgSTi7RTh2Xvvk2jcY+20tj1scrbFwY8WJP8QsPLfHJzQGNJEarGp/YGLLzxh2uDtvQjMN0hnNY6xhmKYUxtJptuktL9NdvIqrxFWfDgGVeBBJ26yzKOLysGIBUEJnSAurSk7tAU+Uq3uwQHcjpPZFQ7fRwcBUEZiHxIewsP3ZTAlqqAjxVNEfTAcqgiuUPLi6hOaBRDfEsB6yVcF9Ppmoy2umdxEPQ7KgYD8W0nzIdrZ8mzOHubQ7CqnuNLDxMFW4lYecylXyfhWqEJmxRWgXJPwgKZgBFNSlg/XQ8ZdrVD7tnTUccP3eaS6+8TOlyqPoB0oXTLJVCVUFEyFtEQP4JyZlPPMen/tiXufHuLSInsQhGE4v3FrzCZpDfKRFdRdSMmK/XcEqRDVKU2afZkIhC0zm2QnxUUmxGzB+e4dCzs/zxhT5z4h2G44zxcMLLd4b8weDbnOjMkt1oo98acOKvfY5stMcTh5bYGEx44qGET75+iezyHttJwehUHTsa8MTJeT69FnNns+RaklCWJd4LJqVjnKXUO3VWDh9i98464NBaMSkLJmlZAfJCJCFliUJhjCOK46q56DjTbvL2qGRc9VGmmj0hlXUBciGnFMDTsjIHSf772cfCWQBiWY2UAOCrst80pCLEK1IgqjLqPUkIqBom1ajM9ACooCQGFaS4SqarMSBJxaJyADkO2P4pFehB05F7LO3ivi7/Qdmx0oqUUhzI5yECZ4AQVaO0GmtRBICSp4I3i6q86UPj03lYPbLG0tohbl+6ENTDhMBjDob/gvqYCvr0EmSU8NTnvsBzX/kiSkmG4wGNlSWyq1eY7SqkbtDbL8kzi3OCw3OzzHYSaEQMjcNGHi9mGeVQeMPg9h1EIyEfa8zlHnvbl7neUbSbnidXLMeXTnD8qOeReINO9wzm9Elulle4/duXmAz3OPpYn6V3NpGbm3zLFhz57BpcyGjcsqwNGnTXTrG9s4Gaa9IathiNRnjvKA3sjUcsdOdZPbLKqy96IqVR0lJax2hisKXDFKaa9QuOI5XG+UB8aJ2jJhznOl3eHvbJ7b0JZEkYqNRKVJdLOMdKyTAiM6UJfh9/+Vg4i/NQVH2Ogy3CgZfuYDhSCYG8D0ttQrQT7tLVPzM+pG0WHyhTRcA0Tkfop0UDV+0oUoRd5l6y7g7kLRTigHxaVDnMtDOPt2gVBeodEU7ClDJ22ly91/OpGF3sfdOwQhzkXoKK+Lwi+0PAU5//DBvXr1MWxUEhwgPehtADFN5LVL3FF7/6J3joqUeDYJLztBt1sBDNz7O3vc/KUgxRydZWTn2hgSajPy64tT7EIfGxZnlpEbCsv3MXqRXiqCeZa2P3YPBuyUOfafOp59fYVC3qXUdLjkjUIW6lh9gtJpz86SO0Ikl2o05rrsUvHSvYv7nH/MMrtFSDfqlor3uElly5e5d0O6PbjlBKYg6ofARb/QFnVj0rq0uoOMbnGV4KlI4ZTQoGw5yF+UYlUhT+zZSY0atKeMobZmsRp32DN/Z6gVaryNBS0ak38PfVOwVUGqRU/a/ptfHD7WPhLBCqGcbZKrwBql0jjFF7LJUyVhXCW+/x0630PlJooEI3Vlg5cY9Mb9o/mfqjFyHBdJUsRThg8j5nEtW/qypWEryXFZeVq3D4tipRVmGVC3kWVZ9kWmSYypCHcM6BuzfbFmldieqEHWzt5BG+9NU/xe997evkg17gHsNUui0hC4uaLX7xP/5LLB9dYzq86b3n5voGGzfv0lkuWTnWoN5ukbQt3RlP4TWT3CGVoBZ70twiRUyRTShSqC0ukSwf4uzZeZKTTdZvOfp7ArcYc/duybHFlJfOF7z61i7z6hInVm9x7sgJdgtBltQQMczoFn5/j+ZOm9o7CoY5h547yi29zdvPn+JTJ2Zon9/hir/O+eEOne4KpQn5XW8yxnlDo5HQnp1lsLVJFGmivGCSOnqDjDQtiYuSpJaEq6O6QUkkUmmsGZOoZZbrDRbFHX7tlR+wtb5Be7bDp5/7KRZasyFcFoGQZJSNMc5gvGcuTzGlec9r9GPhLGFnCNhoV/UepmPq+MBOWU4lBqp8BVF19/10bCX0HoSc3umnW3XAs0xH6qeDjX7a5xBBvVhUr+M9qioWWF8lgOre+MxBbHswWRAqLtMQzB8UJqYSFdNByrCzHeQ9YjpGEzrL08aq8KEsfubpx1g7foQXv/ttbr/1DsPdHs5ZisJghOSZLz4Cs0HBOYzVCF584SKvvHKN2mIHwR6TzJGOC+pzdeoNjRtKsrGmvRDz0FHL+rWc3tAzsClHVtfw5ZD+rfO8cSEL5ORKIqRm+3jMHZFR767y2JEuv/j8GV7d0lzIND93dMB86zCTNOH331jn5r96CX+rT9QtMLZPcqbFypGUpV9+jm/egd+9OuLPfbJLNLPKN//hP6TRXARCz+ru/hDrDUpErK0dord1F+EdOo7IsoKt3YyVpQxdV9QSjVICITVChKawUgJXDoIwlBA8tnyMd5ovcXXzKuVkjmvr18mXDcvNWa5t3+HSpfP0d+/iTR5UC969dY+N/4fYx8JZPAH1GMKYcBFa5wPnbUg6QnIu7lWXwnUbdoBQgpxy3YbsZBr+HFQ/Kok9Vx3I+ylhD6aU/b3BSu85aHoe4GBU1aCsGqKqIuCblpnDB4bPnR7yad5iq7wl9H6mozv3pp2tt5TWsDfcweNYaC9xc3yXXbnJ0qcP091bZvPdG8gsY+noCqNmymsXf8Dnn/ppItXAWcv61jbleId2rQfU0VGDYc/jZY3NwT7NqMNkp4cQJXEtRjQNi506qRkzuLNB586YE+cO027UUfsFo9l5LhQ1Pvv5NtnaLM8szTEv3+EWR2nkKVvDdV6/fJNc73PNzdPOSo4eXeblmyNGKwvky3P8b3/pcU4eSri2NSSbWGrHWxx6eJmNdcuxtSWKokQnMR5Pf5JTGEM9jjl87BCvv/ZqIIU34ToYDAu293LqNU2rWadW93hv8dU5RWicHSFFdf5UxH/w2T/J3qDP777wFi/85r9k4cgyR08/yvlXrrB/4ypKemqtGCcs2XCf9xtE+dg4i6l2inCHCIrD02FDVSXCU1obKe6VAadKtYLQexFVY1FUYki+cgBf7Vy6UrDVShzsXFOHkFUzc9qjCS2QqWOFcqMUEiuDAxhrmRQTInXvMDpvSWSMrgoHogqblJBVLX8KVw7O7bzjVm+Li1cukI37jHu3ac/MMH/sMW5fvc5oYwuzvIarN3FzilgkTPyIYv0ms08+TSw14Ll7d4+bdzepr8wjCon2Q7zbYGFthV6uiZIZap0unZMxO+evYUaSWssjZjrMNTqgHM2HmvSu3WRgBJGV1Nu7zEU13vhGE32yzu2yYGc4YC+7zNFmh8UTmu+UKyTdOZ5v1vHDu/RnW7x8+gwuqTEz22Bjfx+V57TXTnB2PuNsXZPuOs7OJ1w5foaLd8Mgu/cwLkoG6ZhG0mRtbQWV1LDZBOUFWirGac7W/oROK2J2NqPRqqPjoDCgtDoIba0tECLwFCRxg7/0lT9Nfz/lpdcvMti6yzv7Q/ZvZZhJjosUUd0ilKfMy/e9Tj8mzlI1/6rE7Z5zh/JxYERXWG/Cxe6qqtl9/RSqPswUHRnQcfd15EXg1BJMG5vVnV39m2P9obRsD+aKrLMMiyFbvXW01KzMHcY4S5oPGaYjLly+SBTXw7qsQSjBQyceZa7Zpa5rKKEOpMantX1flZB3Rvvc2b7Dmy9/n96tm5RZjk48/WSP4ciT9QeY0lFMRqA11tQwriSODbXY0awnlYqA43tvXGa0M6Yoc2bbCb7YZZxLakqyf23A8mIHkw6gcIhah3EhGe8AOyk6hnai6WhPo4yQQqKkQw4zDktHY+I497mTzK2ts9V8DNk5wxJLDMc9zizVeXHzCnPpVfJOm69fGaFlgbl9k2NRlxNzZzl6/AxOa365YemZAj/u88bQMzj2KLr3B4EE3kHpPP1JxlLb0W63mJ2dY3tjTKQkpXQUCLb3JnQbCYvzNZqdAhXpKo+jCo0NzhUoHR1MWLTqXX7lq3+Gfvr32djdwtsxUB6E19YE/ZtiUr5vE/9j4SwAUxRbGJWuuq82XFRKVWPwngNHmTqBlNN5ruAgokq6qXo2UwGkUEEKOoNTDxNwgD/x+IMBzjQfIqSisI5rd94lG+8y2NnGlgXX55YpJ7u4fIQXCXdvbmLyAlREvdGgNAU7t2+QNFqcOvckp5ZP0IwS+vkYTIZznot3brCzs86ta1cZbe/Qu30TXwZivjiWmHzI1oXzRPUuJjV418O6PeLGPJOhQYicLMvYunOFN6M6k1JzdWeL5lKbRl6jzCfIKGG2Abghiw8dxe1M0HWNAo4/scZkdwCFIWnE1JsasTsh3e6hhKXRaVOzksh4YqmIZ2vIWsHuoIHvbTIc3+R3rxQUusNnHzmCaETsNjx58S5/+RHD+egkRfxpmtEccnmGoffsFx6rFQ0ZEYka417M23cLnpiZZXPQR0iQzrHdH3BuTYGQnDh5nPX1WySArvR1xmPDzjBjdz+j2clQkSDSgQ4poF4ttpygo3sKYd7DYneF/+yX/wL/zT/5e2zt7dCaV4y3PcU4CL0ezFF93KthgoCddlN8irN4z0HjyNkwB6QP8O3VYGSlCIYMZeXwfBWiqfDG6V3cu6DzMcjHREpR1zFZmR+EWjvDbbSMiZXm6tWXcEgGE0//9g3i2FJkGcZkbFy7TlKPSZJQQMjHGa60IAtsnqKTiHywz7i3TZFNGBzfZqY9z9Vr77BYlnzK5Fy59A63hhnewWhrj9HGAKUFtRlNLkBHgnyUkfbSoNNSJkgF6X6KjGJsZsAJ1q9eZHtjh7T+OJO0ZL6WUFpLJFsMJ6uUAprxUDrTHwAAN8FJREFUhE5NkbeXKCzUnSWaTJifCTIOYInrmqJic8xFhE40vXFOb7cPnQY1PeHC799kYe0YcbPOvp+w9swaJ2bX+IMXe7S7HX72zFFac3dYWVGcbqe8ld/mlZv7/N++WedLn17jy4stvIzhzoQbNcvSLHyh1KyYw9wd9QP5nYRbu3tBGU0qTp06yu/9bjiPWmuUMuS5oT/K2R+WzAxSGs0ERRUJqBB22zKtQuywo1cdKo4tneA/+YU/y9/+1f8f43SMqoEfh/y4loTJ5fezj4WzQOhLhDH8io0lxGX3hiGrKtm0ECVFaOyV1pBojSFwI2cmR1c7ipaS3JZc2rhGf7+HSfuMBj1UnFCvNzGmpCwKtBRkgz1E0qJIS4r+JnGsGPZKyrRE6hJrHCpWYD3ZKKXZ0UQ1gSst2cgABUoLrIkCNkY59m9fZe/WDcpc4F2ObyrKPGN0d53+bkZZeqT0RHWFzS0mDU23fFzgMk/ciXDWMdlJUYnCO0nSAp04TJEw6Vui+gLaRXTbXbLRmNG4oNNtUlOdoJ9YdkjLkNO50jPcGDIzFyNkHbwBVWeyOWCYOR5+5iTLcwvEzmDwvNNzXBvCKBK05uDM0S6zS2u8lvawMfjeOsc7KY1Oi7v9PfrJUfyNAUkr4fT8Mn5lkV5/j1mTE4lZlHOsv36TF47XObxcZ20G6n4eoTTCluChN0oP+i4LCwu0Wm3MaIB3kERhmDUd5ezvp4xnE9JxhpK10GDUYSjTmiFVY+wA8hFSU88jR57gL/3cL/B3fvV/IplxmBRcGSqVB9in97CPhbOEKaepMrCpYuaK1alqGApgUuZs7u+QxDGz9Q6Xbr/LoL9HUm/SanVoas3W7t3wGTphtjtPb7DPrctXMOMRNt0kiiRaewYubNtFnjPp5wgZ0VhcQcgYqRvk2YQiLzHGYcY53mlqLUhqgslgQlyrI6RExzE69hQTi8ksQmlKCnQsscaTjgtcVhDVI1JvcMbS39xhuDPBY2nO1Ug6Gls6dKRRkUcIS+5Ca1TrGsYUuAJcmZGLAqESTDZC1dtkZo58NGZ3exetNKrbwOkE6yTOG3RNs9sbMNtuIrUjXqix9e4mjQS6CzMkbY2crdOZabCdTti49C7emkCTKiVJ6tjeTskS6N8STEyDkyfr7PRbxCci4qjNu7csR8shP3XkCOJGk/RYm2ymReFT/spnTiBwvLq9y/FbY7g15NB8wrd7e9h6g//8qbmwK8ggxTHMDVmR0ag1SZKIw4cPcfl8H3+fIkJpHINxyXBUMupnRJFCK4U1hijSmGKAMQVaxwdFnoOQDHji1NMsH3qJu1sXEFKS75fUWhqt78+B/137WDgLBOI654JO5LR3gRdkpmCSTRhmY26tX+Pu1Ut0Vw9RqzUZ9nbYu3YDT05nfgHhStLRfhDvtBFO1cF50sE+UQRxzeKMw0kBaLI0C+CtOEHFbcqspN6UCOVB15CRpNkQlHlgqjKlwSNozzRJGhoZCXScoGPJfpbjFSgNSntMYdCJxDszbepg8jA0ONoa4CwkXY0zhjIz6EgGeiNAxYKkW/VtjMVT4rKQkKpIgTM4XzI7+xj7uWB4rUe+NUaWBaqr8cdmaM53gsNIjc0yCh2R1GK8EtSOzrN/p8dwfUijXaBrCucinj9zmLa3RJFCuoqIO4r53rUxu05jVpqcemSer545zNYoZ6+f8eqbA1CemVrOG+/ewB5apHl8FltLyJzlX75xmSv9gq2sxl9/dBU3usCjMx1e7CsaUYvvbvXZ3DDML4ATktxYJnlBPWkggEcfPcel8xeYYpq0DOjJ0aRknFmy3JKnJfVY43W4wY4nfV576wWef+xZEl2jyvSZ8lUP0hHzh0/S2+uzt3sXkxvSfrjZve81+uN0gA9qWVkwyTOSKMZaS1aO2d7bZXNrncH+FpP9feIkZrJ3F1OWiKJgYzhC15oUaYF3E0ZsoKQhSwvimqAYefLxVmCzJ3T6nBHkJdhSo7RHKEGRGfLU020Lenc2yZSm1tREtQitgwisEJpmJ9AyTUa2kr/QpIOcqDadIvBBKNUZPBqdgNSGzqKkHEO3qzmaVERxsUAWAbxly4BJMaUl75dEzRhnSpQWJN0QegkZUYwduu6JWx6TGUTUphTL1ISjvtQma0Z4BYMb+0x2U3Q9CQjQJMKrmL0rA6L5iKQR0ew2KKWjITV6PCEvMmSz4Oq2ZXm2iUgNXoaypCihO6/ZutontT2YHfDWZIt6LSaiwTNLgr0i4fpGjU9/7jgvTGJee2GLk5v7dM/MsH5XcrtoUnc5I/EK5/5qh2uTBp9qd+kbxXdvjul1jtMuLyMIw6Zbgz6L3Rmc9xw9ukatUScdWrTwxEqSCcmkMAzTkklmaRWGsjDESQBuN7TiOy9doLCCn3ryk8Q6OZjgEEKwOxlhPcwdOkO+H+HLjHSUU07U+20sHw9nScdDXnzje6ATbDoiH2ww3Noi7Q8psxKlNVpLkB4ZKSZDC8ZQDEbVfE+ByXOcIqhkKYVMPH5sECLkFvfvr/nE431J0lDEsaRMJ/Q3UyY7BbV2go4SbJlRFg6Jrg6kZG65gY4845FDKUU6zHHWkU8ynHMkSYxJS5w1KB0q2/VuRG0uhFemDDlYvSWJRpD1M3QSI4Sj1olRHYGOFUXqEbHA5R4VSYSW1Do1ZM0gI4c0HuO7ZH3H4aMNrJlQJnUKD0UnZzzOyIzD5AXRZord6qGSOuVGDosKYyy2VIylY75RZ2FGsDy/yZ004pWtBYq7guXTXY6c7XK222UkJCc+U2Ol2eKZhVl2rGc5SriRp3zr+i61HcnSmYKLPufocsypI4d4zkaka/N8szfgxlsb/PLTKU+v3uBmucC7kxp6Zp693oiJT5hdXqC2e4PMFngEW4MhjxCOX6PZ4OixI1x95yK2LJESIuFx1pIXliw3lMZjHAGX7wRxEjEfe772B+fptFp88szjAT3pA35lsz+itNDoNDj7mUcYpo47WxlJohGXr7zndfqxcBZvCjbf/gOMsfjSENVUYLG3jvHumLgeETerMeyyQIgcqUOZ13mPjhxSqeAopSOzLiR7HrJRgS4FUU3ijKDRkUEEZwLeWUwSCCmFAF8GSHM+KcKksFJkeYozliLTCNEiSjwqK8kmPbLRBJNp8jSj1o4h9jQaMdaUWOuIazHOCvq9EqklZT/FOU+tIUnQWOswuUEYT20xYeV4jVpdhfLoriXPocyKUFSYFPhhiYokKlaI9gLpIMcNFZNxgRcgtWTh9AL2+h6xSrCbKYPNgEb0WYoUjuzyPiJJ0DNdVk8tcW5uSK4ln3jkKxzVdfI3CjbUkNpCwqOHF5hrarA5q3FE31n27YhbRcmsrvPy5g6+Z3j9uuOzu5s8mVuWnjzO+IUNtj34Z9e43upSm2ny2LGMJNY8lHSYFwO+28t4rKF5ySt+cbHkpc0CoQMF7sbefpi0qJCQp08/xJV3LgaZdgIy1hgYjQuKooIbWxumPGQg+Ti20OT713f4rZff5uzho3Sbc4DE2YKt4TD0V7yncJ6xUZRREyOS971OPxbO4qxjtDtAJwohHGUusZknHwdK1KgeYVKLSU14T2yotSKkFhQ9i9QehA2bh7Rk+xabebw11GYSlFY4G2a0HKBrHixh664rpPZ4J5GxRSqNjBzZfoFOIuK6hLrEZJ4szap6vKfWktg8ohineFeg6oKkIYCEVlPjfEEcA04wvFtirKWWWZz17O8UTHKByQzOOPRsRLurEUohpWBmTtPuCu5uejZuFBQDi81yrClJ2jWS9jLza2cxw5TSFiT1iOJ2n2JnjGtHLKy2Ge+lpFsThBPgSqwTQV7PA2mGyVImc56bogCp+M2XriNiTborQTdYXJihUa9zamaGs3HMnit5JG4ghGB7NKQpYp6fEbxlRzyyN2L22oR4FLPz7cvYmYTVrz7OhSdWWd0es2Ik11tz5LbLYTHLzdE+K3rCxckyn5uZ8KUTTe5e6XApG+K9p5emWFcgVYL3cPKhE6GHNRrinAy7hLXkuSPNS8oioUgtpm4DrCGJOXtqhfKNkksbOf/0O9/lL375y7RqbTJb0KsigemIeTu2ZBGoKbHbe9jHwlm8B1sGJo+4EYgFitxQZAWusEx2RtixR0QKPERRKBHmY4PNDQiNjsEaF6hQhWW8NwkDd50YoWCyXyKEII8EtZaGmkEllepTJlCxoD4XYVJDNjIUowLwxM0EfMDJZJlBSpj0cspagooVdgBRQ1WlSUOtBjqGOAky40kCXkr27kC2leNcBSt2EjMxVWdUkU4gtwWNmkTHUIuh2ZTUEhj1x7jcopoahMTqk5jU0tUeo2PyrMTHMbQccq2JlIJaVNJZjhnugppbZl7BeGeHkS/RC0sk8/OMJiP213eYebjO2uoCsfGcXIFBXpBvrPPduzd5SXoi77ESuvUa3XqDkRQIF1Fc3OXc2iL59R12mo69x1qs0WRxGOEv9dCrdTY3BYMiob/R5+ee6PKPr+eIoeUXHtI8Ee1ydqmGRLHbG0MTsLA/TpkUY7pRgpSCZrPG8ZMnuPLmWwhRYYcE5FlJVljy6qbjvcfYIEy1NlNjZbbBraHl99/Zptv8ff6Dz32RcZEzTPNAlVXaA2zTQkfQaav3BYB9PJzFOPJeis1tuDhlaCrGNYlwOrCYJA4VS2QcJnwnwwIsxC1Nra3wVlJmoTmVdGC0EZJuZy0mE9jUoRuKMpXk/QJnLLUZTZmVJK2IbGDIB7Yi2gZd9+hmifWWYhCD1RhjwXgm2xOSLoGQO4K4rUFCaQIZRCwVZQlF6nFNkDqomPlAv0K5n1P6e6CwrF/Qv5sxf7RONjGQCmxTUKt7Wl3JTkVn6hzYCcQL86g0ZXL1LqzVkfUIOdfA1OPAAawkqhUjUgHbE0j2OfXwGs8ee45J3/B7r93mxt4e7cUuS8tH2M5G3Ly0ThJpjq7MMzNXpxVFZIVjpdOg1dDMNBUd5fHGcOfNDe6+sEk7blBuGrQUmM+fIH70OIN37xJllqVPneUylpVDBfWru8yLFsdrS/zi0SFXbuYwyOmurlKvKwb9HqvtOtvFCKQkLSyDSUq3UckZSnjsyUe59ObbKCkDk750GARZ4RgXllFaUs+KUPFzniiSfPHxOf7Z1ZLStvnWhW0WZ1/jyOoyeWkrLgZRKb+FgCEq848/+Ms7FzDQUmALg6xbdF0gdY16UsN7A85RTBzem4AviQJRQzYsSHtQjgMWxJocXVOgBXEnIapFCG3Jxx5beJKWZ7SbY1JHMSmQkSBpRQGIoj21GYmOBFkaUW+HERqXS7Rqkg76QUG4yMkGPgCuhCEfQdSSJC1JLh1RXMMZx2QAKlJIlYAoUUkoNJjSIJM6KpHYssSPS9JBzqAnsZmhNJbuQovFtRqmzO8hJsclrVOnWDhxCP37l5gUnu0bOUWUsrhaY/7QAkfmVnm7nzPc2Ge0OYK4SWkt37u+wYvX7rDUiDh5YpnBbBel64zGQ86omCMzbfyTRyi8oMg93z+/g1cx5vYEpSU/+/ASn3Q50XaJGHcYDHuAxUYTxKkOyYs3ePbhh9j/0hNcznJ+483b3Ly0zenHl+i2FtmeTCjMbcaR5rlHj+J7uyxoQ57ntJp1lA9/n2zFlMax2RtwaD4A4qzzrB1ZpT03y2hnl0gKEqWYGEOeWyZplejnJaY0uIoX4dlDNb65CRMXY5KIX/v+BU6s3cYUNiBkXTUB4u81xWtaved1+rFwFgTohsaXjnyY0egIVKzBlIx2c1RS9Re0hxLKiUHEEh1rVEtRjquybn8ERYmdSYjqQbph0i+ImxFxPcHlUIwgbiZEDUs2zLClxPlQhbI1G0i4VYBWFilEsaY5G6OQ9O9mFP0CVxZ4Y7ASIHBw4SOUipDKIWc8MnbEDY+MLGVqGe6OmE3CrLQvPE5ZhAJRhrk0VVPoRKJEhJ8IvNWUuSJp1Wgu1ZjsWLwRqCTB7g9xu2MmDoa2ZO3UAlFdsr2+x+b1bcaFQtTn6Z45QTkYk+/3cJ0ca1M2ipLexgaPzs/REIrd7Zz969fpTcbEFy7RfmqV9fWU3p0chELogPG/E+1zeVaw850bAYbdEbiJYU7Xmb0Da3/iLLId8b2X3uD3X75Or9dg9tQZrm1ErKY7PPTsDK8NW/S2M6jvk168jTw/5PCff5x0uUWeljQkTCqk6c5wFC4MEWYCIx1x+txpXvveLsqJip/dM0wLukVCmjvGw4JmoySpl0R1xUzkOdyUXBh6okYNl6zw7vWb1Fo14kYCThzAw3EBNPg+G8vHw1mEEnhTUI6KigyihnAOYybY3ONNTOkD/2+ZlmAljeU6zZlQTnZtaM7XGW1bRptBBluoQPDgjaAcl+T7ObVmEzOyCC2RsaexFEZAXFkQ1QIga9IztBYjkpbAlZ4itSjhiGueRrdOMShwuUE1NO3DbWxumeyU4BWudJjUk6YFkVbUGoIoCojKRlejjEEoSGYTyC1KaYxzYa1SoLUgyywyjvAoyswhZMzMsRmKbIAdCxrzszTHJelcHaskjd1qFCYrSBKNiyVKG9LRLQaXR9DqUj9yDDnfoa7HTNQAOxzw5m6P2d0BTz9+jAV1jFYn5k7dcb4ouHWtj8/DBK5XErTklVfGtJ9vsvy5BSbnCxaPzlHeGaFv5wx8wbu+5FtvXaQp5xh2H+fsoZjP7m9R/81bNJ45y9IIXtg5z7EzK2zcrHN3Y0Ln/G12/tY+O3/5M3zhmXOML77LuknJSsPtvX7Aqnh1AAQ89/g5XnvhJbyxxDpC5ZYsK8lyx2hc0k4UZWmxpkTbiCjSPDYHV7Iwwl+oGn52ATPaC2NUUXIAzvPiPtj3e9jHwlmkFoDFy8BE4r1GakOcSOKGwHmJSQV5r6Q5L9ANj04CDWctURjh8MqGqd1WiNtNEWhOvTQUkxKXezIKKBWNpEaZj0P1w3smvZyorolqMYiSfGzQsaRIw3xZljriKKLWbBIlY8oBgYs4kchYoMceFcswOas9RVqS2YJWp4mKNHmeo+sKXZPVSQpDoUIpGJRhENN7itSRjxxxQ+JdyWTkg8PFkririFfmOfroWfLRkJmzY5q+xq5sEvVyFo538EYwGI6xAtr1Fq1Wwu6dffovfx/VbPCFX/4ETx0/jpSSXSRvvLrL3VsbJBKe/Kkn+eS5QzSyEU9/uuS3Xu9hneVQC3722G0eXt2iU8uo1+vIX/oiDs/uDc/t33mX69vb2ENznOx0efc763zy1Ss8u1DnzNIaI5Gw/Tsv01h9kj/zlaOI4iL94zP0knl6371OIuu8s51z6E6fre27yPkOznvGhcV5i/Ah6VZSsri0yMrRQ9y5fA3pHVo6ihwG45xaominlvG4pNE0RDWHVJ5z84p/teUobUjmRXsmoG5HPeLGFPdUcRz4fw86+OBpLQnSnsCMBdYU5ClhZ3ACnWhkDLoTYliVOISwFCOHdIEzy9iUuGHI9g2uVHjjsRjiToTPHTKRxHWNlRWphIJs32FLgWpInINiUqLiKSOlwpUG6pKo7ijyjPHOGFdYhFJBiSyzqCSiPhNhigJvPM4K8qHHGlDSEdcl4/2M8Y6F/QJnPXk/RXRbxI0GqR+jhMQYSz4uKSYFHotKwGuHUY6oDvMnG7SSYyTFhDEjdmnR1WNqD69Qvnib3Zd2OPvkYb78udPoqMlOf8jV7R5v6YTRfoFWkjffuMztiyVnHj4M7RmG7Yy1x1foDjO+8Vu/T/5qiyuxJRI11IZHa83cnMJNrvDmlRFdURCJbS7svsvAzfL06U+jigxfV4xu7XDlB1f5uf0Jx3cE3eePcO2N6zDJqScaarvsp6eI5DFuRU0aUUraFSzWBX/+ZBf3co90A1wriEeN0pw0z2jWNELIABVWkocff5S7129gC4sWgtwZsqwkzQ39UcbMJKZbJCTGoKKIhRp0pWDbVDwLUiIXFvBY8mGfqB5m/LwIaNz3s4+HsziPMR6PoshLpAOIwgGpxUgpKFODM1AWHltIojq43DExBhkZEB4dQX0uwZSB/CFOQgNPaignENUC4VyejXC2xBWh3Oy9CRPNXlKOHVETkoYD4cjHJUlXkdQS9m+NyAYZcSsCJXClQycClVQOiKAcBVb/uCbJJ4bRYIzSCpNbXDa9hUnKscGMR2Hb1yqUr5XEFRIjIiZSo2ONGEfoqAE+wpgZZhEMM836bg6HjgE1xOMr+Nt7vPWDW1y9s8/iWsHR1aN85vHTXEuOMjt/lsX6hM+faiIGAy7tZ9zYXCdpdfj+1jXm221aMy3GXmPHmtnDLVaXNEt5g++sj+noR3n0XJuBlzwqbrJyZAOHoNFukP70o+Tnr3BoZoG7Mqfe3+Fm01P73tvUl+fYuT1i8Zcf5sJqzvde2OKXTw04uzxD6/Ae22dnmH9khuOP72Ae/TR/9tUV/ue3f0AuHHnp6Kc5rVqz4lMIIL0Tp47zvXoDZ4YVx5wkLwyDSU6sBMNxQZoW1JolURzTqCesNQz7XuJERToiJGZmEdIcP8nCaJCa6sK9t31Q5a8Z4O8BjxFO938MXOTDUv5yUI7DhepCe4M8N6AlSktMAS6fTo4GalRXKOJ6xHg4OWgmuSiuYk6HbnqEDE5U61q8kJTWYB2YoiBuS6z22LHBZgZnFXEjwjqDFoHwz5WBIEOgUCKhGJZBM907tIzCmhoWKQELIlI448HGZPmYTqtOp9WgphMGd3ZAFoDAixqeBqpRp96tg5IkjQYFMLvSQtVrTHKHjyJEUiN1gFAcWWoy2hqTLB3hUJJiUwfXN/FnluCowlnDeJTRkoIXS8eV87do9sasiYLHHznB08dOsNRSrKQZvQtXyW7vMr+yyOxI89rbNzDjCaIs2Gsq4llJNLGMdkpeXmhw/eWIzmqbf3Z5F1fTfPYx+ORjOXXpiTqS7NoddH8Dh6MmNTaTLJ86xNawwX8/mEW/MmA52uPEoXm+fskgZMLewhLnHlXcmkTEF25jyIhiyG0Qu90ZDlmbmQs8BRXKtNmqcfj4cS69/jpB4SCw8KeZYRCV7A8LZgYZzXaDpOmIgGNtwfmJRQqF1oFMxGsBS0vYO+u4NCVq1MKI0o/qLNXF/5ve+z8thIiBBvB/4ENT/gIhS4phIGv21mGNI9YJZpIj4wSPC1y3PuQF3kCROVxW4d47CeXYgLIUwxJvQXcUKikI6H2FcwZVU9RiTTn2mEmgIZKJxACqpomkJ0qg2ZGUE4lzhjiJiYRC1SKIBDKqKJIQCK/AeBaWFwFJWi/RcYJzbdozLU4cXaadtNnY3SBJMuS1dVpPfQbb6iKkgjgKuAslqQsJtQivBcoEni1V4fU1ECce2SuZ3BjAWp16PUfYHHNpF3t4Fl9vsbNv2Hy7ZGbtBr2oxmdOLVKXS4yuD/mvfv23mOtGHD1ep9001BsNzs50+f7L27ioRbK6RFKLsULRbiseeWaGm5cNCx3N82e7LM+3uLY94eLtESe6V9nd2mGhKanXE+TGBCkjTFbQ2pfMHptn+ZlDbB5LUG/sEC82+S9/ocFvvdNBRQWvnh9zaLbDt0yNL2UdRldv8fZsHekLnA/Ed1uDEVIpBALrTEXTKzj92DkuvP5aRd0qyAqLyAxCwKCmGae1QI5elhitOdrW2JsFTnq8V0gRiNtLB8nSEm5jAzvO6dTj971GPwiLfhf4AvAfAXjvC6AQQnwV+GL1tv+BH0H5y3sP0hG3XAiXGpJsz2Fyi25pbFkSJRonPUpE2MKgIoGMHdoGho/6nGO854gaMXgwaUEuA0pMC4nSHls4bOaIGnNYNSJuBvpXGUnERFSEMBqlI5y11FoJtnTU6xGRsBx9ZCVg4ktBacA5gYrrODy5aOGFpmxISh2j4hp7UcyL6zW8jrCLZ2m2U8QL3yE5skat0wl8xYIQSwsBIujPICEidKqDKpnA7GeYNEI168iaxG7llNd2MaMhKUNOHevw0BdPcvPyLus3chbXxswvL5Gk77J99xJp0WB+ZZXtSc5yGnOqPuH7Fw1fc54yt/zMagd78Rp5muNVxNjkXH+9wV4UYx5u8U8nEU8eq3Fzr+CJteOMiyVmOwMutg/RGBf4FA6P2qhunXGzYPu0J92+xhOPneI/OXaC//cr29zaEax0DDOLHT53dInfeOUWj6UxD80cYn3nFje0wJSBI8EJy/r2LkWZEenkgClUKsHRI2t0unPs7+6gCeDAPCsQUgQ8TGaYTDJmyiYucqzWoeFK+qVEV6preB+YY+I6pjuH3blLzHtzhn0gZwFOANvA3xdCPAm8DPw1PkTlL51oJtuGqKaJGgIVhw6+jBRRQ2PSHJdafAS1ToKzBms9SU0jtKAcekZbRSCStg5jDLbwqMQz3vHEDYWQgWGlKB2MDc6EC9FWYZ+ux2jVxGYlpW6yNy5BNogSTZo3Sb0mR2JUhFUKr+JQ4o1jnBdYrUAp0AoZR6AlUoFUAhnHaKmo9/eRStCeaWAaSeCIk6GKFpjeA8+YVBIh1AEnmhYKuz/C1hKEl5CmRFFEdmqG0W5M/+oW+9+9yO7AcWTRc/rJp7jVm+W5tSV0Y47OZIMZU+OF1yJ29nc5NJtQa83SEDfxGz1OL3Xpru8jS4habVafPszo9S288agTa2yfPYxYkpBnnCVj641trvZHHHp4niLPOTqA7udP8+tvb6D3d/jyJxZ58tmHmJmfBSE4mu8R3b7N7507wV957DjvDnf41bcucfF6RjdNeLR+l9+bb/DU6UWuX6qznhqElfQmaUAvVvqGooI+N1p1HnriEV7+9ndQYqqoJjDGMB7n9MYFM5OYvCjRtZhuPWYtkexPwLhKn8VTae946vMdnHbcvPDOj+wsGngG+C+89y8IIf42IeQ6sB9V+as5W/c6EdjS4gpJPvI46ymzHDUOFEcmN8hYUlTaKq4wGBVyHGccac+TzEbYoSHbLsPh0wpfetKJJ2m1iOsx9aZEyjq5rmr49RivFTKKcbpJtNrCIpBa4XSEURFjrQM5ntaBC6AqOasowhPI+ZwPjiFkSNQDRY0gSqKKm0xRq8UVv65CSYnFhhMX+J1CcikqLrT74LDGWhotTy4s+aiSdXOO/uaY3tVtonaMqtfZzeqoVFCfXMflirfe7bGHRzdjfmZlmXPHHe9sjfidSymH5kY8cmqFI7ev0rpwi4nNGTcF0mXc/hcvIWoSMoe/fRP5cg39uSVGjZxdYkpm6Q3hdKPNeK8gzzL++eU9JtRZOv4QT335YVoy4+7uLutpwbWizbOffIilCNb7PU525pk7Nubxa3dYe+USO0dirjfh4ckVnM8QQuHxjHNLVhpifU/rRsiAmHzymSd48/svUJbjoM9jLM7B/qBgv58zmWtQ5IakMETK8NCc4kJqcXmBiiVRxdcGAmMcot2FzgxF+d50SB/EWW4Dt733L1S//7PKWT405S9nw51BKYWTgVnQeocowKQCW4TwKmpqXO4CZlt6yomqYtqIKJHEcZNiAlFnBqFjVJxAEqHiBBE3UK0GSmuiOCHPPUon6FghIoVQCmXgoWNL3Nzrk7sQHgVOMR8EWqUGQglTMuVPBqoYWGsVdGAqLLirGC19xUrj7FSXxaGVPEgmJSIItZYWY0qSJMJZkCowNWIlWaopxgN2ipJZEdNsNNGdOrNPHUYkJbM1i0ktDbPBkRk4dLJBs9vlO+c7dDPP+Svnmax2mNy5wWRsSc4c56nHD7O3MmB4O+NIq4Ooacp+if7kLIPzd5CzEp86Gs0mj//JT7G7aLCmYIYlfuOtIQ7Fk52UaGXIp59YY6yWeOO1Af/tdy/xx0/W2BjXGK00mBWG44cl51ZXeW1/F/vuNb4iY9qfOsmbv32F0daIcycW+dT8TV55bYDszCO8Y5Tm7A0HdOqNqnkYxoW8h9m5LodPnuTaO+9QVLuOE4JJadkbZOz3c8bjgkYzwZiSI02IZFBFcDYI2irnaGmYNxYXSw43JO8H//og+iybQohbQoiz3vuLBE2W89XPf8iHoPw1zRVwQa+jmFgwAmSMKyQShYrqSBVDTeOJ0O0GXmq80sS6gYgSXK2GEhqtY0QUIbSqfsJ2+8mHlznpU5ajiN/cmHAj80FgSEqkVvjCsjWZQKKoScVUwttZixKqokoKs0MOj3ThwEopETrkHzJSSCUCjh2Bd1NOTFFRMlER/XniWGNsgBZ464giVU0re5xxYIMDWueIG3W01HTG+7hak2zo2L/dw3Udx1uGpu3TnO8wEyvGec4bl3sM8ohbtzb5+c+cY67XoFg7zqunYr56doleb8CV33kVO8wYtgy7bojbc9RmEny2x7iVMU5gZHPKss8P/tG3aSx54nZMjiaqR2TFMm+PevzZL9zlZPkWb+61eerUWVorK/yLGwN2L2/yv/rSGZ44cpQ3BrtcurWJTmIaF1LccJfBXB05E3Mn8/TqbXTjOeTuFUScI5TGC9gdTTi+BM6F4zkVKJJS8fgnnub6xXeJtMMWFuFCHrLfz9naTVlcyOh2akjhOTFb56kZRWIMR2VKSxpaCmYSD/2CclKwqLMPpYP/XwD/sKqEXSWoeUk+JOUvkAg3g0PjY0G8WIMoJm50EFGCVxriBK81Oq6hkogvPn2YH1zcJjPgtURH4eCKSlfSK3lAoi2lx1u4ttXnSGfCjb5GJg0SAaIMo/9CiIABl/JACNZbsJWmhxBVuOUczhMaWNUF76cxtVBYB9YZIq0Q1gd6Uq2R6h7Fq8djrA2Owj0Mp6j6AAcw6DwMlwrpkZGgWRiUHLN7y6IaBjOxRN0GghFOabQb0oglCwsRM3NneOnqEu/cvMXOeo+7b95kfrXGH1txrLl9RDPm8vwhJvQ4khfY/QEeQaE8C08doVUucKNf8NKlMQbFLDF/9bmIovkwX88znlhSPLebsrsjqDVSulpwbGWG3dEMP7i0w5tXmzz60Ek2ZUFy8zYnF+dYfWiN3l6Pb3/3BziX8/QvPcXyp1b55oU+O6sjZHfMF55Z47fe3YHZGayHzf3hARzYuQDOEyIQIR45cYjZhSU2N9cRAqwJx3ZcFOz0UnZ2JyzOJUiZ0I4Vf/EE5Psj8rQgasSYwuJKj4kdcbOGbukfferYe/8a8OwPeenDUf5KmnDsOXRcI9IKGUUIrZFaIkW1MVYYhjIvaTY1S50GxxebXNzNiOsxIgrUqAKBM6G/IqfU93iEkvSt5xuDLgJPaUwgDBeVapTSgZHbg/cSZ03FgB9mzIT32Gr6V0iJM44oqtSlfEg8HRbpA9Gfc+HfJVPdloo8ECplYylQQmGq7/G+EsswljIvAmG5VAjhKcYZ8cSS9Qqy5SM0ZiT97Q0atZThzZx+Y4XZzz/CWq3gcH6VK3tj3sglj61K5n7mNM+26+zuOR79wuMU2YCsZ7n2+k3OHG7x3bLFenwM6XZAS77wpRU6ylDkESszMXu2wYsX94mV4I112N25yclZyc3X4YnnWlzcm3Cn1+b4es7aI6c59dkn6ayUXBq/w6n6kKdmjnN8psP2xiYvvfEut772Js1LJd3DXbJ3BiitONLf5ysLW7S14pNPHeGFd7fo+XAu+5PsQD0gHD9/8F+tFvPIJ55k6+t3An2vFEgbQub+MGW7n3FkUlKLFUVUUK8l2DjGupLBzgAhFI1GTGOmThTrAA95H/tYdPBFHJOsLqOUQsj7S6kCKmm7acKrkhBa/WBzzI6R6HoMMmzLcE94CBm2bC9C+BPY+RUGh/AC50BHElSg5bPWEGQFq+RdVeFWxVGGIEg/GItKJFGkcN5hywoyoBRRrNG6onGV4kDmTYgp+bivAEoWay3WG7AeFemwRmOQIugoBlL+wAEgcoEdFlgpaU02EJGkPy6YPxKRvpGy8/o1ZC3nxpzizz28wHPHM+LmiJbeI466OHOSredP8e7GmK+/2eOnVxMeO73CVr7Llo+5VJ9FPH6Yzxyq8fxnFxC2xBNQnQ9/WvD18wN+/cqQJ45vcKGxghKeK/sD/sfvOB4+v8s5Vyfv13h74xovlim1owv8/GdP8lCzwRtv3uDv/+B7fKLb5clCsfjMk1y4+SrbO/vsfWfM0ukWP/UfLdCes0gUSSJ47EiX7+4VCKnY3NujtDmxCD0QUakoeB9I9R576hFe/d6LFDvbSGcovUEhyAvD7t6E/X7GwmwDqnNcFiVFVtJoJSTNhKJfYHODNx6T/+il4x+7CSGI6wEIr5TCWkukNQ4XLnamJdZQZhVecneYh7ArCn9CaSzWuRD+ROpAUi1MkyhsUSJEkLN21gY1YHH/iHbANkgVdOdVxTghqzjZWYfHVuq5ldyFDYwiIdEPz+WTFB1phAwa964iKy+L4p5oq/BEUmCtO9CfQQqSJAkCRjLE6EqCcAZXlphhTnM2IUsVxZ7DbOT0GxqcQHQSrFDofc8/+M0bHOla5uJrLLQNreYs9eV5Xlk3rP/+ZTb3J+wttnnyYc3PPDXmL0Sr/OZr17l5JePGOvyPtwUzgkCD6iG3jstbjmw95e/erPPUp1t87vQRZo/Nkq171ndv8tuF4dp8nYUnTzHxNQ7vCV64+BL9qyV3kznml07wn/7MYW7/g99j8607LDy7wOD1bZaeaHD6l2aozcgAWiPoaz79yCov/s51ilixNyrZHw2Za7dQKsY7CxXZYlFkJDXNM597nm/8xtdAgvUhn/TOsd2bcOVWn1YiOX16qSJtlOzt5+A8S1qRliWRMWAFcj8N+Jb3sI+Fs0DgIw7YakekI7wMEtzeh5KgcIANF6ux7iCRdsYFcSHnqNUCqRrWoiJd3fUFTqtQXrQucIpNWfO1oswyEII4irDO4UuHFg68QApV7WgeoUDXE0xRUhYGLUBLUNJD6SjLMGDprQMZmmcuL3FCIIRCVu8HDoSHhNYBQVlanHNkaY4tC6JajBeewlqUkESthO3+LvOHE0ZZh9FWnzxTTPYyvPYsnFtkcXmGw0stxmoNF8U8u5gg7V2ask3SOMyJExFXtnJeeHeLdrrLZ87sYMuUk4tD/sOvtPhueY6ZYYIvNcszCacXY4RWKKXop7Dz7XUaizFPP9xm1nry/oDf295k5fmHkCdWqd+ecPvlO5ybsZyaP0xn4Sxfzy2yzOHuBd54/QWe+TNLPNb+Cv0LI26WPR76aptap+KqrhqFHs/SQpMjnYiLmcc4S+EKrC0p7YRa3CVIjIAXAutKnvrE47x7/iKXLl5gSteqpafIMt65tIEZ58y0Io4cX6LWFHRnmgz2Uvrbo2qINRRpkvTfAxZ9gDDlBqjAdSxkWJpUCqEkZRnutNb7g7q6qRI6LUM4hXWBAcR7XJ6jlK6cLgxWemsC34QM+YSTEt1s4IzFADKKA2OMNQeM/qG2X0lK+6oCZkusACU1ZZFjS4dSMZaQrAspcGUOUqJEcFoXEijAkxUW11JoqShLQ2ENWitEFBEnGoHAV7uO1horc1qHZjBKk+5usbhc4lKPmWshu4pOt4UrDXf3BhxamWWx08DELWa6R8Pf6WFWaT7ZtDx8pMPF3+hx62uWxmeXEMUlxuPb1NM9Nvo7bA48v7PeRArPzJEW8+2ESeppeMGqn6Gnlpk/fIhhXMN6we28T3y+x+k3Njh8IecTX3yWk2fP8v985Q71jSv89KEt/tSfGjHXLZjYDEuXeFnx8C+30Y1KaEqqcF4J095eWJ48McfFt3YxQrDTz1ibWwYHUmiCzuc9lWipBD//iz/H3/s765RFD2lLXNBhZzQsuHJzh2YisKnl8EPLtOe7FGmlem0tSTuiGJVhl/+4w4ohULgKF5j0g+AQFXGdxjuDNBYRBQzI9M5sjSFKYpQUWFOG+rlUFbF4OBFaKoQ1WGvAB8qkSKlqUDI4hdQa4R3WGISOwk7mLVKE+r30hrIIPRJjDRAakhKJKQ1KRwfyelII8smESIOKdMiJtMJaT+nuCSw56ygOhjIFShCgrsZg8xylACfIbI4QkrnDbca5RSx12RlPmD8Oo0kdn4DNC0onsBZ6u33G+wMiBzM1TVxX3MkMRzptnjm0yq4zZLcHmMNrvDQ7w6/MTugkiyAKsqLBv9ho8xtfc6ytNPnkZxc43KgxqMqyiZNc3R2ylo3JDCz3S+a/eZWluUXk7QQ5jrn27fN8Z/MqLxSKY588wR//8iKd0Qvc2nd8a3SUL8ttjtRzooovwFaqZ1oGlQMhJd54Th+ZoX1hl10Dd/tDlFA44SqA1jRH5UC+uzvT4rNf/AJf/7VfR6rQq5PWY7RgkOe8fHGTmzd2+dLzQ849dYzZpS6mMEx6Y8aTMuSmhcWaj3kYJoQA5zClO1DH8sYglMIUBcIR7s4lqERD1bOQ1uALH4gkfAhliGvoOKpEOqeTyqFnYvIctEYkUegSS4ESVc7hAeuxrgQXpCmccaAFRVmEXo+SeK1wQlI6kM4gVISQusJ+B7Z/IWUQBg2T5VAadBQx7bho6ZEy6MdQ2kCTag1eSGIVdGhsHtjd40ZMFEcwGZBen2BNhp9L2LkrUC4Ns3LkKKcwpUPN1um26tSbMSfnZplt1Bj2+9wtS17d6mHHYxaSmMX5Gc4NYwZX17ALsyw+vsbcfIflYoI4scPh0zMsnJ3lM602fRMYUzI8nTLnuVqDvD/hrVsj+kWMup2y58fsL5XoL53i8hOr1N9UyGXJet+gJl380PGzbpvFpgYVEfqzMsiTe8JNJwpE6Eoq2k3N6cUW/TtDNvcGYRcId1CsczjvUFIfqEE7L3juU08z2N3ne9/+Dnk+QQPNSNJPDcNRYJ38re/dxMqYU2cWsM6TFpad3THGemZHGfZ9yME/Fs7ivadMsxByVXmH8GCtrSSZK/k654K4qQqj+x5VyW0HitU4iavScYnPS1SjBgJsWSKsQOoYnUQUeUZcq6PxOByUDhVJSldiCo+ONaYwVYNKIXUE5VT8RqJ8GNwzzoUQTxiiRh2pPN4ZLFT8Y1EI45REah3Uiwk7prXByaULBQApJN6WlCYPfZko9GukBh0rvC1om12ysqB3tYbJfejY1zVJAvv7ffJ8QmRL7KhB1EnYHU04vTTLYSn57uaIV95yLBUT/srZI+Tfuk1T13FDg1MZ48ct3V95ni8e6/BriynxIuwXQ353p89TKmY8njAZDrj8ylVur49I7mZsij4bx2K6zTqrNx2rtDn37NO8207J9m5wuF2wle9QsxkzcY1GA6AqszsPJuR2nlAmP6ityzB1/MSRGV5aH7DRG5AVGUkcB9lEUSm9YZFShwhEBvjxz/6pL2FX5/nWt/+AZG8PlWU0jCV1jljW2E9LvvG9yzzRG7E832R3L2V7f0Ip4PCowIn3JqwQB1qIH6G1Vtf847/yV7HOE8URUktcUQYiZ6aa8gJfGoTzWFsStephrETqEDrZMFMlrMEMxzgPcauJdTY4lnM4F0guEJWknq0u9mr40VXalSFMKomTCCElZZaH3o2XqCRCKIGzHmemTUV/0A9y1qCiAFGWSgUtSR+UcbuDIX/r7/zfuba6honisF442AHxAX7kfKB7kjJUiUBAmkFWYJ3HOhBKkU0sqhUF6fKyICJozcSxDlPZkSaKFO1awm5RMtkuqWnFQl4icotGBQnCaniz8cgyVntujcZE0tK2MMmyAJdQglgJyp2cyAkKA65Twy53iLMCtZVSa9RoHprhwrjP5M6Y+TmJmaQsNzSqGipVSgamSQRKBxSkmOruVH0yUSl5FaXl0t0R4Dm5PB+Yfg5auBWQTtyb8ZIVS/7eJOPm/gjhLCpLkaMRxt2TTrfWEceaZi2itI68NDip6RrLn9vtczlNf2jm8rFwFiHEkAAm+6htAdj5qBfBg3X82/aTXMcx7/3iD3vhYxGGARe99z9sQuAnakKIlx6s48E63sveXxfsgT2wB3ZgD5zlgT2wD2gfF2f57z/qBVT2YB3/pj1Yx332sUjwH9gD+/fBPi47ywN7YB97+8idRQjxc0KIi0KIyxWl0o/zu/6/QogtIcRb9z03J4T4phDiUvX/2ep5IYT4f1TrekMI8cyHuI4jQoh/LYQ4L4R4Wwjx1z6KtQghakKIF4UQr1fr+D9Vz58QQrxQfd8/rkB/CCGS6vfL1evHP4x1VJ+thBCvCiG+9lGt4Q+1+6WPf9I/BLjVFeAkEAOvA4/8GL/vCwTyjbfue+7/CvyN6vHfAP4v1eOfB/4VgXftU8ALH+I6VoFnqsdt4F3gkZ/0WqrPa1WPI+CF6vP/CfDnq+f/O+CvVo//M+C/qx7/eeAff4jH5H8H/CPga9XvP/E1/KFr/El90XscoE8D37jv978J/M0f83ce/7ec5SKwWj1eJfR8AP4u8Bd+2Pt+DGv6deArH+VaCMSJrxB4E3YA/W+fI+AbwKerx7p6n/gQvvsw8NvATwNfq5z4J7qGD/LzUYdh78Ux9pO0Pyr/2YdqVRjxNOGu/hNfSxX+vEZg5/kmYaff995PYYP3f9fBOqrX+8D8h7CM/wb43xMkP6k+8ye9hj/UPmpn+ViZD7ern1h5UAjRAv458F967wcfxVq899Z7/xTh7v4ccO7H/Z33mxDiTwJb3vuXf5Lf+7/EPmpn+SNzjP0Y7G7Fe8aPyn/2RzEhRERwlH/ovf+fP8q1AHjv94F/TQh5ZoQQ01Go+7/rYB3V611g90f86s8CvyCEuA78KiEU+9s/4TV8IPuoneUHwOmq8hETErbf+Amv4TcIvGfw7/Kf/a+rStSn+AD8Zx/URJj9//8A73jv/9ZHtRYhxKIICgkIIeqEvOkdgtP86fdYx3R9fxr4nWoH/F9s3vu/6b0/7L0/Tjj/v+O9/4s/yTX8URb7kf4QKj3vEmLl/+OP+bv+JwLnckmIg3+FEO/+NnAJ+BYwV71XAP+val1vAs9+iOv4HCHEegN4rfr5+Z/0WoAngFerdbwF/FfV8yeBFwncb/8USKrna9Xvl6vXT37I5+eL3KuGfSRreL+fBx38B/bAPqB91GHYA3tg/97YA2d5YA/sA9oDZ3lgD+wD2gNneWAP7APaA2d5YA/sA9oDZ3lgD+wD2gNneWAP7APaA2d5YA/sA9r/HyUIPmt4Cce/AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "from opendr.perception.object_detection_2d import draw_bounding_boxes\n", "\n", diff --git a/projects/python/perception/object_detection_2d/nanodet/train_demo.py b/projects/python/perception/object_detection_2d/nanodet/train_demo.py index 9c659fd888..4057901326 100644 --- a/projects/python/perception/object_detection_2d/nanodet/train_demo.py +++ b/projects/python/perception/object_detection_2d/nanodet/train_demo.py @@ -22,7 +22,7 @@ parser = argparse.ArgumentParser() parser.add_argument("--dataset", help="Dataset to train on", type=str, default="coco", choices=["voc", "coco"]) parser.add_argument("--data-root", help="Dataset root folder", type=str) - parser.add_argument("--model", help="Model that config file will be used", type=str) + parser.add_argument("--model", help="Model for which a config file will be used", type=str, default="m") parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) parser.add_argument("--batch-size", help="Batch size to use for training", type=int, default=6) parser.add_argument("--lr", help="Learning rate to use for training", type=float, default=5e-4) @@ -34,18 +34,14 @@ args = parser.parse_args() - if args.dataset == 'voc': - dataset = ExternalDataset(args.data_root, 'voc') - val_dataset = ExternalDataset(args.data_root, 'voc') - elif args.dataset == 'coco': - dataset = ExternalDataset(args.data_root, 'coco') - val_dataset = ExternalDataset(args.data_root, 'coco') + dataset = ExternalDataset(args.data_root, args.dataset) + val_dataset = ExternalDataset(args.data_root, args.dataset) nanodet = NanodetLearner(model_to_use=args.model, iters=args.n_epochs, lr=args.lr, batch_size=args.batch_size, checkpoint_after_iter=args.checkpoint_freq, checkpoint_load_iter=args.resume_from, device=args.device) nanodet.download("./predefined_examples", mode="pretrained") - nanodet.load("./predefined_examples/nanodet-{}/nanodet-{}.ckpt".format(args.model, args.model), verbose=True) + nanodet.load("./predefined_examples/nanodet_{}".format(args.model), verbose=True) nanodet.fit(dataset, val_dataset) nanodet.save() diff --git a/src/c_api/Makefile b/src/c_api/Makefile index 1ce21bbaad..ac4aaf6df1 100644 --- a/src/c_api/Makefile +++ b/src/c_api/Makefile @@ -25,11 +25,15 @@ BUILD_DIR = $(OPENDR_HOME)/build LIB_DIR = $(OPENDR_HOME)/lib CFLAGS = -fPIC -INC = -I/usr/local/include/onnxruntime/ `pkg-config --cflags opencv4` +INC = -I/usr/local/include/onnxruntime/ -I/usr/local/include/rapidjson/ `pkg-config --cflags opencv4` LD = -L/usr/lib/ -L/usr/local/lib/ -lstdc++ -lm -lonnxruntime `pkg-config --libs opencv4` -lboost_filesystem OPENDR_INC = -I$(OPENDR_HOME)/include OPENDR_LD = -L$(OPENDR_HOME)//lib -lopendr +LIBTORCH_DIR = /usr/local/libtorch +TORCHSCRIPT_INC = -I$(LIBTORCH_DIR)/include -I$(LIBTORCH_DIR)/include/torch/csrc/api/include +TORCHSCRIPT_LD = -L$(LIBTORCH_DIR)/lib -L$(LIBTORCH_DIR)/share -ltorchvision -ltorch + all: libopendr libopendr: $(OPENDR_HOME)/lib/libopendr.so @@ -39,9 +43,10 @@ $(OPENDR_HOME)/lib/libopendr.so: @+$(MKDIR_P) $(BUILD_DIR) $(CPP) $(CFLAGS) -c opendr_utils.cpp -o $(BUILD_DIR)/opendr_utils.o $(INC) $(OPENDR_INC) $(CPP) $(CFLAGS) -c face_recognition.cpp -o $(BUILD_DIR)/opendr_face_recognition.o $(INC) $(OPENDR_INC) - @$(MKDIR_P) $(LIB_DIR) - $(CPP) $(CFLAGS) -o $(OPENDR_HOME)/lib/libopendr.so $(BUILD_DIR)/opendr_utils.o $(BUILD_DIR)/opendr_face_recognition.o $(LD) --shared + $(CPP) $(CFLAGS) -c object_detection_2d_nanodet_jit.cpp -o $(BUILD_DIR)/opendr_nanodet_jit.o $(INC) $(OPENDR_INC) $(TORCHSCRIPT_INC) + @$(MKDIR_P) $(LIB_DIR) + $(CPP) $(CFLAGS) -o $(OPENDR_HOME)/lib/libopendr.so $(BUILD_DIR)/opendr_utils.o $(BUILD_DIR)/opendr_face_recognition.o $(BUILD_DIR)/opendr_nanodet_jit.o $(LD) $(TORCHSCRIPT_LD) --shared clean: @+echo "Cleaning C API binaries and temporary files..." @+$(RM) $(BUILD_DIR)/* diff --git a/src/c_api/README.md b/src/c_api/README.md index 1377f753a7..6ff33bacea 100644 --- a/src/c_api/README.md +++ b/src/c_api/README.md @@ -3,7 +3,7 @@ ## Description This module contains a C API that can be used for performing inference on models trained using the Python API of OpenDR. -Therefore, to use the C API you should first use the Python API to train a model and then export it to ONNX format using the `optimize()` method. +Therefore, to use the C API you should first use the Python API to export a pretrained or a newly trained model and export it to ONNX or JIT format using the `optimize()` method. ## Setup diff --git a/src/c_api/face_recognition.cpp b/src/c_api/face_recognition.cpp index 54d2690c52..3461ea5dca 100644 --- a/src/c_api/face_recognition.cpp +++ b/src/c_api/face_recognition.cpp @@ -20,7 +20,6 @@ #include #include #include -#include #include #include #include @@ -36,133 +35,113 @@ /** * Helper function for preprocessing images before feeding them into the face recognition model. * This function follows the OpenDR's face recognition pre-processing pipeline, which includes the following: - * a) resizing the image into resize_target x resize_target pixels and then taking a center crop of size model_input_size, - * and b) normalizing the resulting values using mean_value and std_value + * a) resizing the image into resizeTarget x resizeTarget pixels and then taking a center crop of size modelInputSize, + * and b) normalizing the resulting values using meanValue and stdValue * @param image image to be preprocesses * @param data pre-processed data in a flattened vector - * @param resize_target target size for resizing - * @param model_input_size size of the center crop (equals the size that the DL model expects) - * @param mean_value value used for centering the input image - * @param std_value value used for scaling the input image + * @param resizeTarget target size for resizing + * @param modelInputSize size of the center crop (equals the size that the DL model expects) + * @param meanValue value used for centering the input image + * @param stdValue value used for scaling the input image */ -void preprocess_face_recognition(cv::Mat *image, std::vector &data, int resize_target = 128, int model_input_size = 112, - float mean_value = 0.5, float std_value = 0.5) { +void preprocessFaceRecognition(cv::Mat *image, std::vector &data, int resizeTarget = 128, int modelInputSize = 112, + float meanValue = 0.5, float stdValue = 0.5) { // Convert to RGB cv::Mat img; cv::cvtColor(*image, img, cv::COLOR_BGR2RGB); // Resize and then get a center crop - cv::resize(img, img, cv::Size(resize_target, resize_target)); - int stride = (resize_target - model_input_size) / 2; - cv::Rect myROI(stride, stride, resize_target - stride, resize_target - stride); + cv::resize(img, img, cv::Size(resizeTarget, resizeTarget)); + int stride = (resizeTarget - modelInputSize) / 2; + cv::Rect myROI(stride, stride, resizeTarget - stride, resizeTarget - stride); img = img(myROI); // Scale to 0...1 - cv::Mat out_img; - img.convertTo(out_img, CV_32FC3, 1 / 255.0); + cv::Mat outImg; + img.convertTo(outImg, CV_32FC3, 1 / 255.0); // Unfold the image into the appropriate format // This is certainly not the most efficient way to do this... // ... and is probably constantly leading to cache misses // ... but it works for now. - for (unsigned int j = 0; j < model_input_size; ++j) { - for (unsigned int k = 0; k < model_input_size; ++k) { - cv::Vec3f cur_pixel = out_img.at(j, k); - data[0 * model_input_size * model_input_size + j * model_input_size + k] = (cur_pixel[0] - mean_value) / std_value; - data[1 * model_input_size * model_input_size + j * model_input_size + k] = (cur_pixel[1] - mean_value) / std_value; - data[2 * model_input_size * model_input_size + j * model_input_size + k] = (cur_pixel[2] - mean_value) / std_value; + for (unsigned int j = 0; j < modelInputSize; ++j) { + for (unsigned int k = 0; k < modelInputSize; ++k) { + cv::Vec3f curPixel = outImg.at(j, k); + data[0 * modelInputSize * modelInputSize + j * modelInputSize + k] = (curPixel[0] - meanValue) / stdValue; + data[1 * modelInputSize * modelInputSize + j * modelInputSize + k] = (curPixel[0] - meanValue) / stdValue; + data[2 * modelInputSize * modelInputSize + j * modelInputSize + k] = (curPixel[0] - meanValue) / stdValue; } } } -/** - * Very simple helper function to parse OpenDR model files for face recognition - * In the future this can be done at library level using a JSON-parser - */ -std::string json_get_key_string(std::string json, const std::string &key) { - std::size_t start_idx = json.find(key); - std::string value = json.substr(start_idx); - value = value.substr(value.find(":") + 1); - value.resize(value.find(",")); - value = value.substr(value.find("\"") + 1); - value.resize(value.find("\"")); - return value; -} - -void load_face_recognition_model(const char *model_path, face_recognition_model_t *model) { +void loadFaceRecognitionModel(const char *modelPath, FaceRecognitionModelT *model) { // Initialize model - model->onnx_session = model->env = model->session_options = NULL; - model->database = model->database_ids = NULL; - model->person_names = NULL; + model->onnxSession = model->env = model->sessionOptions = NULL; + model->database = model->databaseIds = NULL; + model->personNames = NULL; model->threshold = 1; // Parse the model JSON file - std::string model_json_path(model_path); - std::size_t split_pos = model_json_path.find_last_of("/"); - split_pos = split_pos > 0 ? split_pos + 1 : 0; - model_json_path = model_json_path + "/" + model_json_path.substr(split_pos) + ".json"; + std::string modelJsonPath(modelPath); + std::size_t splitPos = modelJsonPath.find_last_of("/"); + splitPos = splitPos > 0 ? splitPos + 1 : 0; + modelJsonPath = modelJsonPath + "/" + modelJsonPath.substr(splitPos) + ".json"; - std::ifstream in_stream(model_json_path); - if (!in_stream.is_open()) { + std::ifstream inStream(modelJsonPath); + if (!inStream.is_open()) { std::cerr << "Cannot open JSON model file" << std::endl; return; } + std::string str((std::istreambuf_iterator(inStream)), std::istreambuf_iterator()); + const char *json = str.c_str(); - std::string str; - in_stream.seekg(0, std::ios::end); - str.reserve(in_stream.tellg()); - in_stream.seekg(0, std::ios::beg); - str.assign((std::istreambuf_iterator(in_stream)), std::istreambuf_iterator()); - - std::string basepath = model_json_path.substr(0, split_pos); - split_pos = basepath.find_last_of("/"); - split_pos = split_pos > 0 ? split_pos + 1 : 0; - basepath.resize(split_pos); + std::string basepath = modelJsonPath.substr(0, splitPos); + splitPos = basepath.find_last_of("/"); + splitPos = splitPos > 0 ? splitPos + 1 : 0; + if (splitPos < basepath.size()) + basepath.resize(splitPos); // Parse JSON - std::string onnx_model_path = basepath + json_get_key_string(str, "model_paths"); - std::string model_format = json_get_key_string(str, "format"); + std::string onnxModelPath = basepath + jsonGetKeyString(json, "model_paths", 0); + std::string modelFormat = jsonGetKeyString(json, "format", 0); // Parse inference params - std::string threshold = json_get_key_string(str, "threshold"); - ; - if (!threshold.empty()) { - model->threshold = std::stof(threshold); - } + float threshold = jsonGetKeyFromInferenceParams(json, "threshold", 0); + model->threshold = threshold; // Proceed only if the model is in onnx format - if (model_format != "onnx") { + if (modelFormat != "onnx") { std::cerr << "Model not in ONNX format." << std::endl; return; } Ort::Env *env = new Ort::Env(ORT_LOGGING_LEVEL_WARNING, "opendr_env"); - Ort::SessionOptions *session_options = new Ort::SessionOptions; - session_options->SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_EXTENDED); + Ort::SessionOptions *sessionOptions = new Ort::SessionOptions; + sessionOptions->SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_EXTENDED); - Ort::Session *session = new Ort::Session(*env, onnx_model_path.c_str(), *session_options); + Ort::Session *session = new Ort::Session(*env, onnxModelPath.c_str(), *sessionOptions); model->env = env; - model->onnx_session = session; - model->session_options = session_options; + model->onnxSession = session; + model->sessionOptions = sessionOptions; // Should we pass these parameters through the model json file? - model->model_size = 112; - model->resize_size = 128; - model->mean_value = 0.5; - model->std_value = 0.5; - model->output_size = 128; + model->modelSize = 112; + model->resizeSize = 128; + model->meanValue = 0.5; + model->stdValue = 0.5; + model->outputSize = 128; } -void free_face_recognition_model(face_recognition_model_t *model) { - if (model->onnx_session) { - Ort::Session *session = static_cast(model->onnx_session); +void freeFaceRecognitionModel(FaceRecognitionModelT *model) { + if (model->onnxSession) { + Ort::Session *session = static_cast(model->onnxSession); delete session; } - if (model->session_options) { - Ort::SessionOptions *session_options = static_cast(model->session_options); - delete session_options; + if (model->sessionOptions) { + Ort::SessionOptions *sessionOptions = static_cast(model->sessionOptions); + delete sessionOptions; } if (model->env) { @@ -170,8 +149,8 @@ void free_face_recognition_model(face_recognition_model_t *model) { delete env; } - if (model->database_ids) { - delete[] model->database_ids; + if (model->databaseIds) { + delete[] model->databaseIds; } if (model->database) { @@ -179,139 +158,139 @@ void free_face_recognition_model(face_recognition_model_t *model) { delete database; } - if (model->person_names) { - for (int i = 0; i < model->n_persons; i++) - delete[] model->person_names[i]; - delete[] model->person_names; + if (model->personNames) { + for (int i = 0; i < model->nPersons; i++) + delete[] model->personNames[i]; + delete[] model->personNames; } } -void ff_face_recognition(face_recognition_model_t *model, opendr_image_t *image, cv::Mat *features) { - Ort::Session *session = static_cast(model->onnx_session); +void ffFaceRecognition(FaceRecognitionModelT *model, OpendrImageT *image, cv::Mat *features) { + Ort::Session *session = static_cast(model->onnxSession); if (!session) { std::cerr << "ONNX session not initialized." << std::endl; return; } // Prepare the input dimensions - std::vector input_node_dims = {1, 3, model->model_size, model->model_size}; - size_t input_tensor_size = model->model_size * model->model_size * 3; + std::vector inputNodeDims = {1, 3, model->modelSize, model->modelSize}; + size_t inputTensorSize = model->modelSize * model->modelSize * 3; // Get the input image and pre-process it - std::vector input_tensor_values(input_tensor_size); - cv::Mat *opencv_image = static_cast(image->data); - if (!opencv_image) { + std::vector inputTensorValues(inputTensorSize); + cv::Mat *opencvImage = static_cast(image->data); + if (!opencvImage) { std::cerr << "Cannot load image for inference." << std::endl; return; } - preprocess_face_recognition(opencv_image, input_tensor_values, model->resize_size, model->model_size, model->mean_value, - model->std_value); + preprocessFaceRecognition(opencvImage, inputTensorValues, model->resizeSize, model->modelSize, model->meanValue, + model->stdValue); // Setup input/output names Ort::AllocatorWithDefaultOptions allocator; - std::vector input_node_names = {"data"}; - std::vector output_node_names = {"features"}; + std::vector inputNodeNames = {"data"}; + std::vector outputNodeNames = {"features"}; - // Setup the input tensor - auto memory_info = Ort::MemoryInfo::CreateCpu(OrtArenaAllocator, OrtMemTypeDefault); - Ort::Value input_tensor = - Ort::Value::CreateTensor(memory_info, input_tensor_values.data(), input_tensor_size, input_node_dims.data(), 4); - assert(input_tensor.IsTensor()); + // Set up the input tensor + auto memoryInfo = Ort::MemoryInfo::CreateCpu(OrtArenaAllocator, OrtMemTypeDefault); + Ort::Value inputTensor = + Ort::Value::CreateTensor(memoryInfo, inputTensorValues.data(), inputTensorSize, inputNodeDims.data(), 4); + assert(inputTensor.IsTensor()); // Feed-forward the model - auto output_tensors = - session->Run(Ort::RunOptions{nullptr}, input_node_names.data(), &input_tensor, 1, output_node_names.data(), 1); - assert(output_tensors.size() == 1 && output_tensors.front().IsTensor()); + auto outputTensors = + session->Run(Ort::RunOptions{nullptr}, inputNodeNames.data(), &inputTensor, 1, outputNodeNames.data(), 1); + assert(outputTensors.size() == 1 && outputTensors.front().IsTensor()); // Get the results back - float *floatarr = output_tensors.front().GetTensorMutableData(); - cv::Mat cur_features(cv::Size(model->output_size, 1), CV_32F, floatarr); + float *floatarr = outputTensors.front().GetTensorMutableData(); + cv::Mat curFeatures(cv::Size(model->outputSize, 1), CV_32F, floatarr); // Perform l2 normalizaton - cv::Mat features_square = cur_features.mul(cur_features); - float norm = sqrt(cv::sum(features_square)[0]); - cur_features = cur_features / norm; - memcpy(features->data, cur_features.data, sizeof(float) * model->output_size); + cv::Mat featuresSquare = curFeatures.mul(curFeatures); + float norm = sqrt(cv::sum(featuresSquare)[0]); + curFeatures = curFeatures / norm; + memcpy(features->data, curFeatures.data, sizeof(float) * model->outputSize); } -void build_database_face_recognition(const char *database_folder, const char *output_path, face_recognition_model_t *model) { +void buildDatabaseFaceRecognition(const char *databaseFolder, const char *outputPath, FaceRecognitionModelT *model) { using namespace boost::filesystem; - std::vector person_names; - std::vector database_ids; - cv::Mat database(cv::Size(model->output_size, 0), CV_32F); + std::vector personNames; + std::vector databaseIds; + cv::Mat database(cv::Size(model->outputSize, 0), CV_32F); - path root_path(database_folder); - if (!exists(root_path)) { + path rootPath(databaseFolder); + if (!exists(rootPath)) { std::cerr << "Database path does not exist." << std::endl; return; } - int current_id = 0; - for (auto person_path = directory_iterator(root_path); person_path != directory_iterator(); person_path++) { + int currentId = 0; + for (auto personPath = directory_iterator(rootPath); personPath != directory_iterator(); personPath++) { // For each person in the database - if (is_directory(person_path->path())) { - path cur_person_path(person_path->path()); - person_names.push_back(person_path->path().filename().string()); + if (is_directory(personPath->path())) { + path curPersonPath(personPath->path()); + personNames.push_back(personPath->path().filename().string()); - for (auto cur_img_path = directory_iterator(cur_person_path); cur_img_path != directory_iterator(); cur_img_path++) { - opendr_image_t image; - load_image(cur_img_path->path().string().c_str(), &image); + for (auto curImgPath = directory_iterator(curPersonPath); curImgPath != directory_iterator(); curImgPath++) { + OpendrImageT image; + loadImage(curImgPath->path().string().c_str(), &image); - cv::Mat features(cv::Size(model->output_size, 1), CV_32F); - ff_face_recognition(model, &image, &features); + cv::Mat features(cv::Size(model->outputSize, 1), CV_32F); + ffFaceRecognition(model, &image, &features); - free_image(&image); + freeImage(&image); database.push_back(features.clone()); - database_ids.push_back(current_id); + databaseIds.push_back(currentId); } - current_id++; + currentId++; } else { continue; } } - if (current_id == 0) { + if (currentId == 0) { std::cerr << "Cannot open database files." << std::endl; return; } // Make the array continuous - cv::Mat database_out = database.clone(); + cv::Mat databaseOut = database.clone(); - std::ofstream fout(output_path, std::ios::out | std::ios::binary); + std::ofstream fout(outputPath, std::ios::out | std::ios::binary); if (!fout.is_open()) { std::cerr << "Cannot open database file for writting." << std::endl; return; } // Write number of persons - int n = person_names.size(); + int n = personNames.size(); fout.write(reinterpret_cast(&n), sizeof(int)); for (int i = 0; i < n; i++) { // Write the name of the person (along with its size) - int name_length = person_names[i].size() + 1; - fout.write(reinterpret_cast(&name_length), sizeof(int)); - fout.write(person_names[i].c_str(), name_length); + int nameLength = personNames[i].size() + 1; + fout.write(reinterpret_cast(&nameLength), sizeof(int)); + fout.write(personNames[i].c_str(), nameLength); } - cv::Size s = database_out.size(); + cv::Size s = databaseOut.size(); fout.write(reinterpret_cast(&s.height), sizeof(int)); fout.write(reinterpret_cast(&s.width), sizeof(int)); - fout.write(reinterpret_cast(database_out.data), sizeof(float) * s.height * s.width); - fout.write(reinterpret_cast(&database_ids[0]), sizeof(int) * s.height); + fout.write(reinterpret_cast(databaseOut.data), sizeof(float) * s.height * s.width); + fout.write(reinterpret_cast(&databaseIds[0]), sizeof(int) * s.height); fout.flush(); fout.close(); } -void load_database_face_recognition(const char *database_path, face_recognition_model_t *model) { +void loadDatabaseFaceRecognition(const char *databasePath, FaceRecognitionModelT *model) { model->database = NULL; - model->database_ids = NULL; + model->databaseIds = NULL; - std::ifstream fin(database_path, std::ios::out | std::ios::binary); + std::ifstream fin(databasePath, std::ios::out | std::ios::binary); if (!fin.is_open()) { std::cerr << "Cannot load database file (check that file exists and you have created the database)." << std::endl; @@ -319,50 +298,50 @@ void load_database_face_recognition(const char *database_path, face_recognition_ } int n; fin.read(reinterpret_cast(&n), sizeof(int)); - char **person_names = new char *[n]; + char **personNames = new char *[n]; for (int i = 0; i < n; i++) { - person_names[i] = new char[512]; + personNames[i] = new char[512]; // Read person name - int name_length; - fin.read(reinterpret_cast(&name_length), sizeof(int)); - if (name_length > 512) { + int nameLength; + fin.read(reinterpret_cast(&nameLength), sizeof(int)); + if (nameLength > 512) { std::cerr << "Person name exceeds max number of characters (512)" << std::endl; return; } - fin.read(person_names[i], name_length); + fin.read(personNames[i], nameLength); } int height, width; fin.read(reinterpret_cast(&height), sizeof(int)); fin.read(reinterpret_cast(&width), sizeof(int)); - float *database_buff = new float[height * width]; - int *features_ids = new int[height]; - fin.read(reinterpret_cast(database_buff), sizeof(float) * height * width); - fin.read(reinterpret_cast(features_ids), sizeof(int) * height); + float *databaseBuff = new float[height * width]; + int *featuresIds = new int[height]; + fin.read(reinterpret_cast(databaseBuff), sizeof(float) * height * width); + fin.read(reinterpret_cast(featuresIds), sizeof(int) * height); fin.close(); cv::Mat *database = new cv::Mat(cv::Size(width, height), CV_32F); - memcpy(database->data, database_buff, sizeof(float) * width * height); - delete[] database_buff; + memcpy(database->data, databaseBuff, sizeof(float) * width * height); + delete[] databaseBuff; model->database = database; - model->database_ids = features_ids; - model->person_names = person_names; - model->n_persons = n; - model->n_features = height; + model->databaseIds = featuresIds; + model->personNames = personNames; + model->nPersons = n; + model->nFeatures = height; } -opendr_category_target_t infer_face_recognition(face_recognition_model_t *model, opendr_image_t *image) { - cv::Mat features(cv::Size(model->output_size, 1), CV_32F); - opendr_category_target_t target; +OpendrCategoryTargetT inferFaceRecognition(FaceRecognitionModelT *model, OpendrImageT *image) { + cv::Mat features(cv::Size(model->outputSize, 1), CV_32F); + OpendrCategoryTargetT target; target.data = -1; target.confidence = 0; // Get the feature vector for the current image - ff_face_recognition(model, image, &features); + ffFaceRecognition(model, image, &features); if (!model->database) { std::cerr << "Database is not loaded!" << std::endl; @@ -370,27 +349,27 @@ opendr_category_target_t infer_face_recognition(face_recognition_model_t *model, } cv::Mat *database = static_cast(model->database); // Calculate the distance between the extracted feature vector and database features - cv::Mat features_repeated; - cv::repeat(features, model->n_features, 1, features_repeated); - cv::Mat diff = features_repeated - *database; + cv::Mat featuresRepeated; + cv::repeat(features, model->nFeatures, 1, featuresRepeated); + cv::Mat diff = featuresRepeated - *database; diff = diff.mul(diff); - cv::Mat sq_dists; - cv::reduce(diff, sq_dists, 1, CV_REDUCE_SUM, CV_32F); + cv::Mat sqDists; + cv::reduce(diff, sqDists, 1, CV_REDUCE_SUM, CV_32F); cv::Mat dists; - cv::sqrt(sq_dists, dists); + cv::sqrt(sqDists, dists); - double min_dist, max_dist; - cv::Point min_loc, max_loc; - cv::minMaxLoc(dists, &min_dist, &max_dist, &min_loc, &max_loc); + double minDist, maxDist; + cv::Point minLoc, maxLoc; + cv::minMaxLoc(dists, &minDist, &maxDist, &minLoc, &maxLoc); - target.data = model->database_ids[min_loc.y]; - target.confidence = 1 - (min_dist / model->threshold); + target.data = model->databaseIds[minLoc.y]; + target.confidence = 1 - (minDist / model->threshold); return target; } -void decode_category_face_recognition(face_recognition_model_t *model, opendr_category_target_t category, char *person_name) { - if (category.data >= model->n_persons) +void decodeCategoryFaceRecognition(FaceRecognitionModelT *model, OpendrCategoryTargetT category, char *personName) { + if (category.data >= model->nPersons) return; - strcpy(person_name, model->person_names[category.data]); + strcpy(personName, model->personNames[category.data]); } diff --git a/src/c_api/object_detection_2d_nanodet_jit.cpp b/src/c_api/object_detection_2d_nanodet_jit.cpp new file mode 100644 index 0000000000..4fa8e37940 --- /dev/null +++ b/src/c_api/object_detection_2d_nanodet_jit.cpp @@ -0,0 +1,362 @@ +// Copyright 2020-2023 OpenDR European Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "object_detection_2d_nanodet_jit.h" + +#include +#include +#include +#include +#include + +/** + * Helper class holder of c++ values and jit model. + */ +class NanoDet { +private: + torch::DeviceType mDevice; + torch::jit::script::Module mNetwork; + torch::Tensor mMeanTensor; + torch::Tensor mStdTensor; + std::vector mLabels; + +public: + NanoDet(torch::jit::script::Module network, torch::Tensor meanValues, torch::Tensor stdValues, torch::DeviceType device, + std::vector labels); + ~NanoDet(); + + torch::Tensor preProcess(cv::Mat *image); + torch::jit::script::Module network() const; + torch::Tensor meanTensor() const; + torch::Tensor stdTensor() const; + std::vector labels() const; + std::vector outputs; +}; + +NanoDet::NanoDet(torch::jit::script::Module network, torch::Tensor meanValues, torch::Tensor stdValues, + torch::DeviceType device, const std::vector labels) { + this->mDevice = device; + this->mNetwork = network; + this->mMeanTensor = meanValues.clone().to(device); + this->mStdTensor = stdValues.clone().to(device); + this->mLabels = labels; +} + +NanoDet::~NanoDet() { +} + +/** + * Helper function for preprocessing images for normalization. + * This function follows the OpenDR's Nanodet pre-processing pipeline for color normalization. + * Mean and Standard deviation are already part of NanoDet class when it is initialized. + * @param image, image to be preprocessed + */ +torch::Tensor NanoDet::preProcess(cv::Mat *image) { + torch::Tensor tensorImage = torch::from_blob(image->data, {image->rows, image->cols, 3}, torch::kByte); + tensorImage = tensorImage.toType(torch::kFloat); + tensorImage = tensorImage.to(this->mDevice); + tensorImage = tensorImage.permute({2, 0, 1}); + tensorImage = tensorImage.add(this->mMeanTensor); + tensorImage = tensorImage.mul(this->mStdTensor); + + return tensorImage; +} + +/** + * Getter for jit model + */ +torch::jit::script::Module NanoDet::network() const { + return this->mNetwork; +} + +/** + * Getter for tensor with the mean values + */ +torch::Tensor NanoDet::meanTensor() const { + return this->mMeanTensor; +} + +/** + * Getter for tensor with the standard deviation values + */ +torch::Tensor NanoDet::stdTensor() const { + return this->mStdTensor; +} + +/** + * Getter of labels + */ +std::vector NanoDet::labels() const { + return this->mLabels; +} + +/** + * Helper function to calculate the final shape of the model input relative to size ratio of input image. + */ +void getMinimumDstShape(cv::Size *srcSize, cv::Size *dstSize, float divisible) { + float ratio; + float srcRatio = ((float)srcSize->width / (float)srcSize->height); + float dstRatio = ((float)dstSize->width / (float)dstSize->height); + if (srcRatio < dstRatio) + ratio = ((float)dstSize->height / (float)srcSize->height); + else + ratio = ((float)dstSize->width / (float)srcSize->width); + + dstSize->width = static_cast(ratio * srcSize->width); + dstSize->height = static_cast(ratio * srcSize->height); + + if (divisible > 0) { + dstSize->width = std::max(divisible, ((int)((dstSize->width + divisible - 1) / divisible) * divisible)); + dstSize->height = std::max(divisible, ((int)((dstSize->height + divisible - 1) / divisible) * divisible)); + } +} + +/** + * Helper function to calculate the warp matrix for resizing. + */ +void getResizeMatrix(cv::Size *srcShape, cv::Size *dstShape, cv::Mat *Rs, int keepRatio) { + if (keepRatio == 1) { + float ratio; + cv::Mat C = cv::Mat::eye(3, 3, CV_32FC1); + + C.at(0, 2) = -srcShape->width / 2.0; + C.at(1, 2) = -srcShape->height / 2.0; + float srcRatio = ((float)srcShape->width / (float)srcShape->height); + float dstRatio = ((float)dstShape->width / (float)dstShape->height); + if (srcRatio < dstRatio) { + ratio = ((float)dstShape->height / (float)srcShape->height); + } else { + ratio = ((float)dstShape->width / (float)srcShape->width); + } + + Rs->at(0, 0) *= ratio; + Rs->at(1, 1) *= ratio; + + cv::Mat T = cv::Mat::eye(3, 3, CV_32FC1); + T.at(0, 2) = 0.5 * dstShape->width; + T.at(1, 2) = 0.5 * dstShape->height; + + *Rs = T * (*Rs) * C; + } else { + Rs->at(0, 0) *= (float)dstShape->width / (float)srcShape->width; + Rs->at(1, 1) *= (float)dstShape->height / (float)srcShape->height; + } +} + +/** + * Helper function for preprocessing images for resizing. + * This function follows OpenDR's Nanodet pre-processing pipeline for shape transformation, which includes + * finding the actual final size of the model input if keep ratio is enabled, calculating the warp matrix and finally + * resizing and warping the perspective of the input image. + * @param src, image to be preprocessed + * @param dst, output image to be used as model input + * @param dstSize, final size of the dst + * @param warpMatrix, matrix to be used for warp perspective + * @param keepRatio, flag for targeting the resized image size relative to input image ratio + */ +void preprocess(cv::Mat *src, cv::Mat *dst, cv::Size *dstSize, cv::Mat *warpMatrix, int keepRatio) { + cv::Size srcSize = cv::Size(src->cols, src->rows); + const float divisible = 0.0; + + // Get new destination size if keep ratio is enabled + if (keepRatio == 1) { + getMinimumDstShape(&srcSize, dstSize, divisible); + } + + getResizeMatrix(&srcSize, dstSize, warpMatrix, keepRatio); + cv::warpPerspective(*src, *dst, *warpMatrix, *dstSize); +} + +/** + * Helper function to determine the device of jit model and tensors. + */ +torch::DeviceType torchDevice(char *deviceName, int verbose = 0) { + torch::DeviceType device; + if (std::string(deviceName) == "cuda") { + if (verbose == 1) + printf("to cuda\n"); + device = torch::kCUDA; + } else { + if (verbose == 1) + printf("to cpu\n"); + device = torch::kCPU; + } + return device; +} + +void loadNanodetModel(char *modelPath, char *device, int height, int width, float scoreThreshold, NanodetModelT *model) { + // Initialize model + model->inputSizes[0] = width; + model->inputSizes[1] = height; + + model->scoreThreshold = scoreThreshold; + model->keepRatio = 1; + + const std::vector labels{ + "person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", + "truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", + "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", + "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", + "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", + "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", + "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", + "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", + "chair", "couch", "potted plant", "bed", "dining table", "toilet", "tv", + "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", + "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", + "teddy bear", "hair drier", "toothbrush"}; + + int **colorList = new int *[labels.size()]; + for (int i = 0; i < labels.size(); i++) { + colorList[i] = new int[3]; + } + // seed the random number generator + std::srand(1); + for (int i = 0; i < labels.size(); i++) { + for (int j = 0; j < 3; j++) { + colorList[i][j] = std::rand() % 256; + } + } + + // mean and standard deviation tensors for normalization of input + torch::Tensor meanTensor = torch::tensor({{{-103.53f}}, {{-116.28f}}, {{-123.675f}}}); + torch::Tensor stdValues = torch::tensor({{{0.017429f}}, {{0.017507f}}, {{0.017125f}}}); + + // initialization of jit model and class as holder of c++ values. + torch::DeviceType initDevice = torchDevice(device, 1); + torch::jit::script::Module network = torch::jit::load(modelPath, initDevice); + network.eval(); + + NanoDet *detector = new NanoDet(network, meanTensor, stdValues, initDevice, labels); + + model->network = static_cast(detector); + model->colorList = colorList; + model->numberOfClasses = labels.size(); +} + +void ffNanodet(NanoDet *model, torch::Tensor *inputTensor, cv::Mat *warpMatrix, cv::Size *originalSize, + torch::Tensor *outputs) { + // Make all the inputs as tensors to use in jit model + torch::Tensor srcHeight = torch::tensor(originalSize->height); + torch::Tensor srcWidth = torch::tensor(originalSize->width); + torch::Tensor warpMat = torch::from_blob(warpMatrix->data, {3, 3}); + + // Model inference + *outputs = (model->network()).forward({*inputTensor, srcWidth, srcHeight, warpMat}).toTensor(); + *outputs = outputs->to(torch::Device(torch::kCPU, 0)); +} + +OpendrDetectionVectorTargetT inferNanodet(NanodetModelT *model, OpendrImageT *image) { + NanoDet *networkPTR = static_cast(model->network); + OpendrDetectionVectorTargetT detectionsVector; + initDetectionsVector(&detectionsVector); + + cv::Mat *opencvImage = static_cast(image->data); + if (!opencvImage) { + std::cerr << "Cannot load image for inference." << std::endl; + return detectionsVector; + } + + // Preprocess image and keep values as input in jit model + cv::Mat resizedImg; + cv::Size dstSize = cv::Size(model->inputSizes[0], model->inputSizes[1]); + cv::Mat warpMatrix = cv::Mat::eye(3, 3, CV_32FC1); + preprocess(opencvImage, &resizedImg, &dstSize, &warpMatrix, model->keepRatio); + torch::Tensor input = networkPTR->preProcess(&resizedImg); + cv::Size originalSize(opencvImage->cols, opencvImage->rows); + + torch::Tensor outputs; + ffNanodet(networkPTR, &input, &warpMatrix, &originalSize, &outputs); + + std::vector detections; + // Postprocessing, find which outputs have better score than threshold and keep them. + for (int label = 0; label < outputs.size(0); label++) { + for (int box = 0; box < outputs.size(1); box++) { + if (outputs[label][box][4].item() > model->scoreThreshold) { + OpendrDetectionTargetT detection; + detection.name = label; + detection.left = outputs[label][box][0].item(); + detection.top = outputs[label][box][1].item(); + detection.width = outputs[label][box][2].item() - outputs[label][box][0].item(); + detection.height = outputs[label][box][3].item() - outputs[label][box][1].item(); + detection.score = outputs[label][box][4].item(); + detections.push_back(detection); + } + } + } + + // Put vector detection as C pointer and size + if (static_cast(detections.size()) > 0) + loadDetectionsVector(&detectionsVector, detections.data(), static_cast(detections.size())); + + return detectionsVector; +} + +void drawBboxes(OpendrImageT *image, NanodetModelT *model, OpendrDetectionVectorTargetT *detectionsVector) { + int **colorList = model->colorList; + + std::vector classNames = (static_cast(model->network))->labels(); + + cv::Mat *opencvImage = static_cast(image->data); + if (!opencvImage) { + std::cerr << "Cannot load image for inference." << std::endl; + return; + } + + cv::Mat imageWithDetections = (*opencvImage).clone(); + for (size_t i = 0; i < detectionsVector->size; i++) { + const OpendrDetectionTarget bbox = (detectionsVector->startingPointer)[i]; + float score = bbox.score > 1 ? 1 : bbox.score; + if (score > model->scoreThreshold) { + cv::Scalar color = cv::Scalar(colorList[bbox.name][0], colorList[bbox.name][1], colorList[bbox.name][2]); + cv::rectangle(imageWithDetections, + cv::Rect(cv::Point(bbox.left, bbox.top), cv::Point((bbox.left + bbox.width), (bbox.top + bbox.height))), + color); + + char text[256]; + + sprintf(text, "%s %.1f%%", (classNames)[bbox.name].c_str(), score * 100); + + int baseLine = 0; + cv::Size labelSize = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 0.4, 1, &baseLine); + + int x = (int)bbox.left; + int y = (int)bbox.top; + if (y < 0) + y = 0; + if (x + labelSize.width > imageWithDetections.cols) + x = imageWithDetections.cols - labelSize.width; + + cv::rectangle(imageWithDetections, cv::Rect(cv::Point(x, y), cv::Size(labelSize.width, labelSize.height + baseLine)), + color, -1); + cv::putText(imageWithDetections, text, cv::Point(x, y + labelSize.height), cv::FONT_HERSHEY_SIMPLEX, 0.4, + cv::Scalar(255, 255, 255)); + } + } + + cv::imshow("image", imageWithDetections); + cv::waitKey(0); +} + +void freeNanodetModel(NanodetModelT *model) { + if (model->network) { + NanoDet *networkPTR = static_cast(model->network); + delete networkPTR; + } + + for (int i = 0; i < model->numberOfClasses; i++) { + delete[] model->colorList[i]; + } + delete[] model->colorList; +} diff --git a/src/c_api/opendr_utils.cpp b/src/c_api/opendr_utils.cpp index 55249bc728..42563e58b9 100644 --- a/src/c_api/opendr_utils.cpp +++ b/src/c_api/opendr_utils.cpp @@ -20,18 +20,124 @@ #include #include -void load_image(const char *path, opendr_image_t *image) { - cv::Mat opencv_image = cv::imread(path, cv::IMREAD_COLOR); - if (opencv_image.empty()) { +#include +#include +#include + +float jsonGetKeyFromInferenceParams(const char *json, const char *key, const int index) { + rapidjson::Document doc; + doc.Parse(json); + if ((!doc.IsObject()) || (!doc.HasMember("inference_params"))) { + return 0.0f; + } + const rapidjson::Value &inferenceParams = doc["inference_params"]; + if ((!inferenceParams.IsObject()) || (!inferenceParams.HasMember(key))) { + return 0.0f; + } + const rapidjson::Value &value = inferenceParams[key]; + if (value.IsArray()) { + if (value.Size() <= index) { + return 0.0f; + } + if (!value[index].IsFloat()) { + return 0.0f; + } + return value[index].GetFloat(); + } + if (!value.IsFloat()) { + return 0.0f; + } + return value.GetFloat(); +} + +const char *jsonGetKeyString(const char *json, const char *key, const int index) { + rapidjson::Document doc; + doc.Parse(json); + if ((!doc.IsObject()) || (!doc.HasMember(key))) { + return ""; + } + const rapidjson::Value &value = doc[key]; + if (value.IsArray()) { + if (value.Size() <= index) { + return ""; + } + if (!value[index].IsString()) { + return ""; + } + return value[index].GetString(); + } + if (!value.IsString()) { + return ""; + } + return value.GetString(); +} + +float jsonGetKeyFloat(const char *json, const char *key, const int index) { + rapidjson::Document doc; + doc.Parse(json); + if ((!doc.IsObject()) || (!doc.HasMember(key))) { + return 0.0f; + } + const rapidjson::Value &value = doc[key]; + if (value.IsArray()) { + if (value.Size() <= index) { + return 0.0f; + } + if (!value[index].IsFloat()) { + return 0.0f; + } + return value[index].IsFloat(); + } + if (!value.IsFloat()) { + return 0.0f; + } + return value.GetFloat(); +} + +void loadImage(const char *path, OpendrImageT *image) { + cv::Mat opencvImage = cv::imread(path, cv::IMREAD_COLOR); + if (opencvImage.empty()) { image->data = NULL; } else { - image->data = new cv::Mat(opencv_image); + image->data = new cv::Mat(opencvImage); } } -void free_image(opendr_image_t *image) { +void freeImage(OpendrImageT *image) { if (image->data) { - cv::Mat *opencv_image = static_cast(image->data); - delete opencv_image; + cv::Mat *opencvImage = static_cast(image->data); + delete opencvImage; } } + +void initDetectionsVector(OpendrDetectionVectorTargetT *detectionVector) { + detectionVector->startingPointer = NULL; + + std::vector detections; + OpendrDetectionTargetT detection; + + detection.name = -1; + detection.left = 0.0; + detection.top = 0.0; + detection.width = 0.0; + detection.height = 0.0; + detection.score = 0.0; + + detections.push_back(detection); + + loadDetectionsVector(detectionVector, detections.data(), static_cast(detections.size())); +} + +void loadDetectionsVector(OpendrDetectionVectorTargetT *detectionVector, OpendrDetectionTargetT *detection, int vectorSize) { + freeDetectionsVector(detectionVector); + + detectionVector->size = vectorSize; + int sizeOfOutput = (vectorSize) * sizeof(OpendrDetectionTargetT); + detectionVector->startingPointer = static_cast(malloc(sizeOfOutput)); + std::memcpy(detectionVector->startingPointer, detection, sizeOfOutput); +} + +void freeDetectionsVector(OpendrDetectionVectorTargetT *detectionVector) { + if (detectionVector->startingPointer != NULL) + free(detectionVector->startingPointer); +} diff --git a/src/opendr/perception/object_detection_2d/nanodet/README.md b/src/opendr/perception/object_detection_2d/nanodet/README.md index 409e07a847..777a4eb0e6 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/README.md +++ b/src/opendr/perception/object_detection_2d/nanodet/README.md @@ -10,4 +10,4 @@ Large parts of the implementation are taken from [Nanodet Github](https://github Usage ------ - For VOC and COCO like datasets, an ```ExternalDataset``` with the root path and dataset name (```voc```, ```coco```) must be passed to the fit function. -- The ```temp_path``` folder is used to save checkpoints during training. \ No newline at end of file +- The ```temp``` folder is used to save checkpoints during training. \ No newline at end of file diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite0_320.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite0_320.yml index cdddc320cb..d47708a05f 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite0_320.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite0_320.yml @@ -5,7 +5,7 @@ # AP_small = 0.079 # AP_m = 0.243 # AP_l = 0.406 -save_dir: ./workspace/efficient0_320 +save_dir: ./temp/efficient0_320 check_point_name: EfficientNet_Lite0_320 model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite1_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite1_416.yml index a189662a77..859dbe00e1 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite1_416.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite1_416.yml @@ -5,7 +5,7 @@ # AP_small = 0.122 # AP_m = 0.321 # AP_l = 0.432 -save_dir: ./workspace/efficient1_416_SGD +save_dir: ./temp/efficient1_416_SGD check_point_name: EfficientNet_Lite1_416 model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite2_512.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite2_512.yml index 20664fe7ca..a4248e7eda 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite2_512.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite2_512.yml @@ -5,7 +5,7 @@ # AP_small = 0.152 # AP_m = 0.342 # AP_l = 0.481 -save_dir: ./workspace/efficientlite2_512 +save_dir: ./temp/efficientlite2_512 check_point_name: EfficientNet_Lite2_512 model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/RepVGG/nanodet_RepVGG_A0_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/RepVGG/nanodet_RepVGG_A0_416.yml index 8a0d8debeb..fa93e55896 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/RepVGG/nanodet_RepVGG_A0_416.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/RepVGG/nanodet_RepVGG_A0_416.yml @@ -1,4 +1,4 @@ -save_dir: ./workspace/RepVGG_A0_416 +save_dir: ./temp/RepVGG_A0_416 check_point_name: RepVGG_A0_416 model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/Transformer/nanodet_t.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/Transformer/nanodet_t.yml index a8c312cd61..aa1986f0c3 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/Transformer/nanodet_t.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/Transformer/nanodet_t.yml @@ -6,7 +6,7 @@ # AP_m = 0.214 # AP_l = 0.364 -save_dir: ./workspace/nanodet_t +save_dir: ./temp/nanodet_t check_point_name: t model: arch: @@ -54,7 +54,7 @@ model: data: train: input_size: [320,320] #[w,h] - keep_ratio: True + keep_ratio: False pipeline: perspective: 0.0 scale: [0.6, 1.4] @@ -69,7 +69,8 @@ data: normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] val: input_size: [320,320] #[w,h] - keep_ratio: True + keep_ratio: False # feature_hw: [20,20]. Size for position embedding is hard coded and can not have various values, + # Please use images with standard ratio and change the value accordingly if you want to keep_ratio. pipeline: normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] device: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_g.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_g.yml index 0d09c335ab..8d2ae3cd91 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_g.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_g.yml @@ -3,7 +3,7 @@ # Flops = 4.2B # Params = 3.8M # COCO pre-trained weight link: https://drive.google.com/file/d/10uW7oqZKw231l_tr4C1bJWkbCXgBf7av/view?usp=sharing -save_dir: ./workspace/nanodet_g +save_dir: ./temp/nanodet_g check_point_name: g model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m.yml index 876168e7ad..7bd0d075ab 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m.yml @@ -1,5 +1,5 @@ #Config File example -save_dir: ./workspace/nanodet_m +save_dir: ./temp/nanodet_m check_point_name: m model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_0.5x.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_0.5x.yml index 2a38388336..c067a1535f 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_0.5x.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_0.5x.yml @@ -5,7 +5,7 @@ # AP_small = 0.036 # AP_m = 0.119 # AP_l = 0.232 -save_dir: ./workspace/nanodet_m_0.5x +save_dir: ./temp/nanodet_m_0.5x check_point_name: m_0.5x model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x.yml index a54268f70a..90c2c34d3b 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x.yml @@ -5,7 +5,7 @@ # AP_small = 0.069 # AP_m = 0.235 # AP_l = 0.389 -save_dir: ./workspace/nanodet_m_1.5x +save_dir: ./temp/nanodet_m_1.5x check_point_name: m_1.5x model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x_416.yml index b8274403b1..b6332a5aa1 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x_416.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x_416.yml @@ -5,7 +5,7 @@ # AP_small = 0.098 # AP_m = 0.277 # AP_l = 0.420 -save_dir: ./workspace/nanodet_m_1.5x_416 +save_dir: ./temp/nanodet_m_1.5x_416 check_point_name: m_1.5x_416 model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_416.yml index eb30de1e0d..bd8b4e2907 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_416.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_416.yml @@ -5,7 +5,7 @@ # AP_small = 0.082 # AP_m = 0.240 # AP_l = 0.375 -save_dir: ./workspace/nanodet_m_416 +save_dir: ./temp/nanodet_m_416 check_point_name: m_416 model: arch: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_custom.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_custom.yml deleted file mode 100644 index bf58986a48..0000000000 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_custom.yml +++ /dev/null @@ -1,125 +0,0 @@ -# nanodet-plus-m-1.5x_416 -# COCO mAP(0.5:0.95) = 0.341 -# AP_50 = 0.506 -# AP_75 = 0.357 -# AP_small = 0.143 -# AP_m = 0.363 -# AP_l = 0.539 -save_dir: ./workspace/nanodet_plus_m_1.5x_416/test_training -check_point_name: plus_m_1.5x_416_default -model: - weight_averager: - name: ExpMovingAverager - decay: 0.9998 - arch: - name: NanoDetPlus - detach_epoch: 10 - backbone: - name: ShuffleNetV2 - model_size: 1.5x - out_stages: [2,3,4] - activation: LeakyReLU - fpn: - name: GhostPAN - in_channels: [176, 352, 704] - out_channels: 128 - kernel_size: 5 - num_extra_level: 1 - use_depthwise: True - activation: LeakyReLU - head: - name: NanoDetPlusHead - num_classes: 80 - input_channel: 128 - feat_channels: 128 - stacked_convs: 2 - kernel_size: 5 - strides: [8, 16, 32, 64] - activation: LeakyReLU - reg_max: 7 - norm_cfg: - type: BN - loss: - loss_qfl: - name: QualityFocalLoss - use_sigmoid: True - beta: 2.0 - loss_weight: 1.0 - loss_dfl: - name: DistributionFocalLoss - loss_weight: 0.25 - loss_bbox: - name: GIoULoss - loss_weight: 2.0 - # Auxiliary head, only use in training time. - aux_head: - name: SimpleConvHead - num_classes: 80 - input_channel: 256 - feat_channels: 256 - stacked_convs: 4 - strides: [8, 16, 32, 64] - activation: LeakyReLU - reg_max: 7 -data: - train: - input_size: [416,416] #[w,h] - keep_ratio: False - pipeline: - perspective: 0.0 - scale: [0.6, 1.4] - stretch: [[0.8, 1.2], [0.8, 1.2]] - rotation: 0 - shear: 0 - translate: 0.2 - flip: 0.5 - brightness: 0.2 - contrast: [0.6, 1.4] - saturation: [0.5, 1.2] - normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] - val: - input_size: [416,416] #[w,h] - keep_ratio: False - pipeline: - normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] -device: - gpu_ids: [0] - workers_per_gpu: 10 - batchsize_per_gpu: 12 #96 -schedule: - resume: 0 - optimizer: - name: AdamW - lr: 0.000125 - weight_decay: 0.05 - warmup: - name: linear - steps: 500 - ratio: 0.0001 - total_epochs: 300 - lr_schedule: - name: CosineAnnealingLR - T_max: 300 - eta_min: 0.00005 - val_intervals: 10 -grad_clip: 35 -evaluator: - name: CocoDetectionEvaluator - save_key: mAP -log: - interval: 50 - -class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', - 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', - 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', - 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', - 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', - 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', - 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', - 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', - 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', - 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', - 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', - 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', - 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', - 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_320.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_320.yml index 3dcd1a2973..c2a4a8bdc1 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_320.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_320.yml @@ -5,7 +5,7 @@ # AP_small = 0.102 # AP_m = 0.309 # AP_l = 0.493 -save_dir: ./workspace/nanodet_plus_m_1.5x_320 +save_dir: ./temp/nanodet_plus_m_1.5x_320 check_point_name: plus_m_1.5x_320 model: weight_averager: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_416.yml index 5a76789b50..f999d0c985 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_416.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_416.yml @@ -5,7 +5,7 @@ # AP_small = 0.143 # AP_m = 0.363 # AP_l = 0.539 -save_dir: ./workspace/nanodet_plus_m_1.5x_416 +save_dir: ./temp/nanodet_plus_m_1.5x_416 check_point_name: plus_m_1.5x_416 model: weight_averager: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_320.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_320.yml index e4b5f58f9c..ee4b5235bc 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_320.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_320.yml @@ -5,7 +5,7 @@ # AP_small = 0.083 # AP_m = 0.278 # AP_l = 0.451 -save_dir: ./workspace/nanodet_plus_m_320 +save_dir: ./temp/nanodet_plus_m_320 check_point_name: plus_m_320 model: weight_averager: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_416.yml index 61a536ad7d..cd8ea9186b 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_416.yml +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_416.yml @@ -5,7 +5,7 @@ # AP_small = 0.106 # AP_m = 0.322 # AP_l = 0.477 -save_dir: ./workspace/nanodet_plus_m_416 +save_dir: ./temp/nanodet_plus_m_416 check_point_name: plus_m_416 model: weight_averager: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/batch_process.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/batch_process.py index f84170a275..4b0c910d0f 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/batch_process.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/batch_process.py @@ -27,11 +27,38 @@ def stack_batch_img( img_widths.append(img.shape[-1]) max_h, max_w = max(img_heights), max(img_widths) if divisible > 0: - max_h = (max_h + divisible - 1) // divisible * divisible - max_w = (max_w + divisible - 1) // divisible * divisible + max_h = torch.div((max_h + divisible - 1), divisible, rounding_mode='trunc') * divisible + max_w = torch.div((max_w + divisible - 1), divisible, rounding_mode='trunc') * divisible batch_imgs = [] for img in img_tensors: padding_size = [0, max_w - img.shape[-1], 0, max_h - img.shape[-2]] batch_imgs.append(F.pad(img, padding_size, value=pad_value)) return torch.stack(batch_imgs, dim=0).contiguous() + + +def divisible_padding( + img_tensor: torch.Tensor, divisible: torch.Tensor = torch.tensor(0), pad_value: float = 0.0 +) -> torch.Tensor: + """ + Helper function which uses only pytorch api for scripting and tracing. + Args: + img_tensor (torch.Tensor): + divisible (int): + pad_value (float): value to pad + + Returns: + torch.Tensor. + """ + assert divisible >= 0 + + img_heights = img_tensor.shape[-2] + img_widths = img_tensor.shape[-1] + + if divisible > 0: + img_heights = torch.div((img_heights + divisible - 1), divisible, rounding_mode='trunc') * divisible + img_widths = torch.div((img_widths + divisible - 1), divisible, rounding_mode='trunc') * divisible + + padding_size = [0, img_widths - img_tensor.shape[-1], 0, img_heights - img_tensor.shape[-2]] + batch_img = F.pad(img_tensor, padding_size, value=pad_value) + return batch_img.unsqueeze(0) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py index b68b60e389..6c40da7117 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py @@ -17,7 +17,7 @@ import copy from opendr.engine.datasets import ExternalDataset - +from opendr.perception.object_detection_2d.datasets import XMLBasedDataset from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.dataset.coco import CocoDataset from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.dataset.xml_dataset import XMLDataset @@ -54,5 +54,9 @@ def build_dataset(cfg, dataset, class_names, mode, verbose=True): if verbose: print("ExternalDataset loaded.") return dataset + elif isinstance(dataset, XMLBasedDataset): + dataset = XMLDataset(img_path=dataset.abs_images_dir, ann_path=dataset.abs_annot_dir, mode=mode, + class_names=dataset.classes, **dataset_cfg) + return dataset else: raise ValueError("Dataset type {} not supported".format(type(dataset))) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/pipeline.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/pipeline.py index 24acdb1880..2b9cfe6d32 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/pipeline.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/pipeline.py @@ -13,30 +13,12 @@ # limitations under the License. import functools -import warnings from typing import Dict, Tuple from torch.utils.data import Dataset from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.color import color_aug_and_norm -from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp import ShapeTransform, warp_and_resize - - -class LegacyPipeline: - def __init__(self, cfg, keep_ratio): - warnings.warn( - "Deprecated warning! Pipeline from nanodet v0.x has been deprecated," - "Please use new Pipeline and update your config!" - ) - self.warp = functools.partial( - warp_and_resize, warp_kwargs=cfg, keep_ratio=keep_ratio - ) - self.color = functools.partial(color_aug_and_norm, kwargs=cfg) - - def __call__(self, meta, dst_shape): - meta = self.warp(meta, dst_shape=dst_shape) - meta = self.color(meta=meta) - return meta +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp import ShapeTransform class Pipeline: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/warp.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/warp.py index 6ffd1b66d3..29fa91d242 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/warp.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/warp.py @@ -14,10 +14,11 @@ import math import random -from typing import Dict, Optional, Tuple +from typing import Tuple import cv2 import numpy as np +import torch def get_flip_matrix(prob=0.5): @@ -93,7 +94,8 @@ def get_shear_matrix(degree): def get_translate_matrix(translate, width, height): """ - + :param width: + :param height: :param translate: :return: """ @@ -136,60 +138,31 @@ def get_resize_matrix(raw_shape, dst_shape, keep_ratio): return Rs -def warp_and_resize( - meta: Dict, - warp_kwargs: Dict, - dst_shape: Tuple[int, int], - keep_ratio: bool = True, -): - # TODO: background, type - raw_img = meta["img"] - height = raw_img.shape[0] # shape(h,w,c) - width = raw_img.shape[1] - - # center - C = np.eye(3) - C[0, 2] = -width / 2 - C[1, 2] = -height / 2 - - # do not change the order of mat mul - if "perspective" in warp_kwargs and random.randint(0, 1): - P = get_perspective_matrix(warp_kwargs["perspective"]) - C = P @ C - if "scale" in warp_kwargs and random.randint(0, 1): - Scl = get_scale_matrix(warp_kwargs["scale"]) - C = Scl @ C - if "stretch" in warp_kwargs and random.randint(0, 1): - Str = get_stretch_matrix(*warp_kwargs["stretch"]) - C = Str @ C - if "rotation" in warp_kwargs and random.randint(0, 1): - R = get_rotation_matrix(warp_kwargs["rotation"]) - C = R @ C - if "shear" in warp_kwargs and random.randint(0, 1): - Sh = get_shear_matrix(warp_kwargs["shear"]) - C = Sh @ C - if "flip" in warp_kwargs: - F = get_flip_matrix(warp_kwargs["flip"]) - C = F @ C - if "translate" in warp_kwargs and random.randint(0, 1): - T = get_translate_matrix(warp_kwargs["translate"], width, height) +def scriptable_warp_boxes(boxes, M, width, height): + """ + Warp boxes function that uses pytorch api, so it can be used with scripting and tracing for optimization. + """ + n = boxes.shape[0] + if n: + # warp points + xy = torch.ones((n * 4, 3), dtype=torch.float32) + xy[:, :2] = boxes[:, [0, 1, 2, 3, 0, 3, 2, 1]].reshape( + n * 4, 2 + ) # x1y1, x2y2, x1y2, x2y1 + M = torch.transpose(M, 0, 1).float() + xy = torch.mm(xy, M) # transform + xy = (xy[:, :2] / xy[:, 2:3]).reshape(n, 8) # rescale + # create new boxes + x = xy[:, [0, 2, 4, 6]] + y = xy[:, [1, 3, 5, 7]] + xy = torch.cat((x.min(1).values, y.min(1).values, x.max(1).values, y.max(1).values)).reshape(4, n) + xy = torch.transpose(xy, 0, 1).float() + # clip boxes + xy[:, [0, 2]] = xy[:, [0, 2]].clip(0, width) + xy[:, [1, 3]] = xy[:, [1, 3]].clip(0, height) + return xy else: - T = get_translate_matrix(0, width, height) - M = T @ C - # M = T @ Sh @ R @ Str @ P @ C - ResizeM = get_resize_matrix((width, height), dst_shape, keep_ratio) - M = ResizeM @ M - img = cv2.warpPerspective(raw_img, M, dsize=tuple(dst_shape)) - meta["img"] = img - meta["warp_matrix"] = M - if "gt_bboxes" in meta: - boxes = meta["gt_bboxes"] - meta["gt_bboxes"] = warp_boxes(boxes, M, dst_shape[0], dst_shape[1]) - if "gt_masks" in meta: - for i, mask in enumerate(meta["gt_masks"]): - meta["gt_masks"][i] = cv2.warpPerspective(mask, M, dsize=tuple(dst_shape)) - - return meta + return boxes def warp_boxes(boxes, M, width, height): @@ -217,7 +190,7 @@ def warp_boxes(boxes, M, width, height): def get_minimum_dst_shape( src_shape: Tuple[int, int], dst_shape: Tuple[int, int], - divisible: Optional[int] = None, + divisible: int = 0, ) -> Tuple[int, int]: """Calculate minimum dst shape""" src_w, src_h = src_shape diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/coco_detection.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/coco_detection.py index c408d996a6..797a5e7cbb 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/coco_detection.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/coco_detection.py @@ -26,6 +26,7 @@ from pycocotools.cocoeval import COCOeval from tabulate import tabulate +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util import mkdir logger = logging.getLogger("NanoDet") @@ -74,7 +75,7 @@ def results2json(self, results): json_results.append(detection) return json_results - def evaluate(self, results, save_dir): # rank=-1 + def evaluate(self, results, save_dir, rank=-1): results_json = self.results2json(results) if len(results_json) == 0: warnings.warn( @@ -87,9 +88,15 @@ def evaluate(self, results, save_dir): # rank=-1 for key in self.metric_names: empty_eval_results[key] = 0 return empty_eval_results - # json_path = os.path.join(save_dir, "results{}.json".format(rank)) - json_path = os.path.join(save_dir, "results.json") - json.dump(results_json, open(json_path, "w")) + if rank > 0: + json_path = os.path.join(save_dir, "results{}.json".format(rank)) + else: + json_path = os.path.join(save_dir, "results.json") + + mkdir(rank, save_dir) + with open(json_path, 'w') as f: + json.dump(results_json, f) + coco_dets = self.coco_api.loadRes(json_path) coco_eval = COCOeval( copy.deepcopy(self.coco_api), copy.deepcopy(coco_dets), "bbox" diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/utilities.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/utilities.py index b20b891d58..c4683d8543 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/utilities.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/utilities.py @@ -14,23 +14,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os import torch +import torch.nn as nn -from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.batch_process import stack_batch_img -from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.collate import naive_collate +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.batch_process import divisible_padding from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform import Pipeline from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.arch import build_model -image_ext = [".jpg", ".jpeg", ".webp", ".bmp", ".png"] -video_ext = ["mp4", "mov", "avi", "mkv"] - -class Predictor(object): - def __init__(self, cfg, model, device="cuda"): +class Predictor(nn.Module): + def __init__(self, cfg, model, device="cuda", nms_max_num=100): + super(Predictor, self).__init__() self.cfg = cfg self.device = device - + self.nms_max_num = nms_max_num if self.cfg.model.arch.backbone.name == "RepVGG": deploy_config = self.cfg.model deploy_config.arch.backbone.update({"deploy": True}) @@ -41,9 +38,33 @@ def __init__(self, cfg, model, device="cuda"): self.model = model.to(device).eval() + for para in self.model.parameters(): + para.requires_grad = False + self.pipeline = Pipeline(self.cfg.data.val.pipeline, self.cfg.data.val.keep_ratio) + self.traced_model = None + + def trace_model(self, dummy_input): + self.traced_model = torch.jit.trace(self, dummy_input) + return True + + def script_model(self, img, height, width, warp_matrix): + preds = self.traced_model(img, height, width, warp_matrix) + scripted_model = self.postprocessing(preds, img, height, width, warp_matrix) + return scripted_model - def inference(self, img, verbose=True): + def forward(self, img, height=torch.tensor(0), width=torch.tensor(0), warp_matrix=torch.tensor(0)): + if torch.jit.is_scripting(): + return self.script_model(img, height, width, warp_matrix) + # In tracing (Jit and Onnx optimizations) we must first run the pipeline before the graf, + # cv2 is needed, and it is installed with abi cxx11 but torch is in cxx<11 + meta = {"img": img} + meta["img"] = divisible_padding(meta["img"], divisible=torch.tensor(32)) + with torch.no_grad(): + results = self.model.inference(meta) + return results + + def preprocessing(self, img): img_info = {"id": 0} height, width = img.shape[:2] img_info["height"] = height @@ -51,19 +72,16 @@ def inference(self, img, verbose=True): meta = dict(img_info=img_info, raw_img=img, img=img) meta = self.pipeline(None, meta, self.cfg.data.val.input_size) meta["img"] = torch.from_numpy(meta["img"].transpose(2, 0, 1)).to(self.device) - meta = naive_collate([meta]) - meta["img"] = stack_batch_img(meta["img"], divisible=32) - with torch.no_grad(): - results = self.model.inference(meta, verbose) - return meta, results + _input = meta["img"] + _height = torch.tensor(height) + _width = torch.tensor(width) + _warp_matrix = torch.from_numpy(meta["warp_matrix"]) + + return _input, _height, _width, _warp_matrix -def get_image_list(path): - image_names = [] - for maindir, subdir, file_name_list in os.walk(path): - for filename in file_name_list: - apath = os.path.join(maindir, filename) - ext = os.path.splitext(apath)[1] - if ext in image_ext: - image_names.append(apath) - return image_names + def postprocessing(self, preds, input, height, width, warp_matrix): + meta = {"height": height, "width": width, 'img': input, 'warp_matrix': warp_matrix} + meta["img"] = divisible_padding(meta["img"], divisible=torch.tensor(32)) + res = self.model.head.post_process(preds, meta, nms_max_num=self.nms_max_num) + return res diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/nanodet_plus.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/nanodet_plus.py index 518c0af01b..a400ecff29 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/nanodet_plus.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/nanodet_plus.py @@ -42,15 +42,15 @@ def forward_train(self, gt_meta): fpn_feat = self.fpn(feat) if self.epoch >= self.detach_epoch: aux_fpn_feat = self.aux_fpn([f.detach() for f in feat]) - dual_fpn_feat = ( + dual_fpn_feat = [ torch.cat([f.detach(), aux_f], dim=1) for f, aux_f in zip(fpn_feat, aux_fpn_feat) - ) + ] else: aux_fpn_feat = self.aux_fpn(feat) - dual_fpn_feat = ( + dual_fpn_feat = [ torch.cat([f, aux_f], dim=1) for f, aux_f in zip(fpn_feat, aux_fpn_feat) - ) + ] head_out = self.head(fpn_feat) aux_head_out = self.aux_head(dual_fpn_feat) loss, loss_states = self.head.loss(head_out, gt_meta, aux_preds=aux_head_out) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/one_stage_detector.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/one_stage_detector.py index e1ce7a650e..425a0a6154 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/one_stage_detector.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/one_stage_detector.py @@ -15,6 +15,7 @@ import torch import torch.nn as nn +from typing import Dict from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone import build_backbone from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.fpn import build_fpn from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head import build_head @@ -43,11 +44,10 @@ def forward(self, x): x = self.head(x) return x - def inference(self, meta, verbose=True): + def inference(self, meta: Dict[str, torch.Tensor]): with torch.no_grad(): preds = self(meta["img"]) - results = self.head.post_process(preds, meta) - return results + return preds def forward_train(self, gt_meta): preds = self(gt_meta["img"]) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/custom_csp.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/custom_csp.py index 17cd08402e..ef8c1cd368 100755 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/custom_csp.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/custom_csp.py @@ -52,6 +52,7 @@ def __init__( activation=activation, ) + @torch.jit.unused def forward(self, x): x = self.in_conv(x) x1 = self.mid_conv(x) @@ -96,6 +97,7 @@ def __init__( activation=activation, ) + @torch.jit.unused def forward(self, x): x = self.in_conv(x) x1 = self.res_blocks(x) @@ -145,13 +147,14 @@ def __init__( self.stages.append(stage) self._init_weight() + @torch.jit.unused def forward(self, x): output = [] for i, stage in enumerate(self.stages): x = stage(x) if i in self.out_stages: output.append(x) - return tuple(output) + return output def _init_weight(self): for m in self.modules(): diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/efficientnet_lite.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/efficientnet_lite.py index 9cd6e41baf..6d2f6d4d55 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/efficientnet_lite.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/efficientnet_lite.py @@ -123,7 +123,8 @@ def __init__( ) self._relu = act_layers(activation) - def forward(self, x, drop_connect_rate=None): + @torch.jit.unused + def forward(self, x, drop_connect_rate: bool = None): """ :param x: input tensor :param drop_connect_rate: drop connect rate (float, between 0 and 1) @@ -148,7 +149,7 @@ def forward(self, x, drop_connect_rate=None): if self.id_skip and self.stride == 1 and self.input_filters == self.output_filters: if drop_connect_rate: x = drop_connect(x, drop_connect_rate, training=self.training) - x += identity # skip connection + x = x + identity # skip connection return x @@ -246,6 +247,7 @@ def __init__( self.blocks.append(stage) self._initialize_weights(pretrain) + @torch.jit.unused def forward(self, x): x = self.stem(x) output = [] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/ghostnet.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/ghostnet.py index 2e8f59f001..923c8acb27 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/ghostnet.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/ghostnet.py @@ -10,7 +10,6 @@ """ import logging import math -import warnings import torch import torch.nn as nn @@ -41,7 +40,7 @@ def _make_divisible(v, divisor, min_value=None): new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) # Make sure that round down does not go down by more than 10%. if new_v < 0.9 * v: - new_v += divisor + new_v = new_v + divisor return new_v @@ -195,6 +194,7 @@ def __init__( nn.BatchNorm2d(out_chs), ) + @torch.jit.unused def forward(self, x): residual = x @@ -213,7 +213,7 @@ def forward(self, x): # 2nd ghost bottleneck x = self.ghost2(x) - x += self.shortcut(residual) + x = x + self.shortcut(residual) return x @@ -224,7 +224,6 @@ def __init__( out_stages=(4, 6, 9), activation="ReLU", pretrain=True, - act=None, ): super(GhostNet, self).__init__() assert set(out_stages).issubset(i for i in range(10)) @@ -262,11 +261,6 @@ def __init__( # ------conv+bn+act----------# 9 1/32 self.activation = activation - if act is not None: - warnings.warn( - "Warning! act argument has been deprecated, " "use activation instead!" - ) - self.activation = act # building first layer output_channel = _make_divisible(16 * width_mult, 4) @@ -317,7 +311,7 @@ def forward(self, x): x = self.blocks[i](x) if i in self.out_stages: output.append(x) - return tuple(output) + return output def _initialize_weights(self, pretrain=True): print("init weights...") diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/mobilenetv2.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/mobilenetv2.py index 19fcae379e..a08f4abb38 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/mobilenetv2.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/mobilenetv2.py @@ -1,7 +1,6 @@ from __future__ import absolute_import, division, print_function -import warnings - +import torch.jit import torch.nn as nn from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers @@ -79,7 +78,6 @@ def __init__( out_stages=(1, 2, 4, 6), last_channel=1280, activation="ReLU", - act=None, ): super(MobileNetV2, self).__init__() # TODO: support load torchvison pretrained weight @@ -89,11 +87,6 @@ def __init__( input_channel = 32 self.last_channel = last_channel self.activation = activation - if act is not None: - warnings.warn( - "Warning! act argument has been deprecated, " "use activation instead!" - ) - self.activation = act self.interverted_residual_setting = [ # t, c, n, s [1, 16, 1, 1], @@ -154,6 +147,7 @@ def build_mobilenet_stage(self, stage_num): stage = nn.Sequential(*stage) return stage + @torch.jit.unused def forward(self, x): x = self.first_layer(x) output = [] @@ -162,8 +156,7 @@ def forward(self, x): x = stage(x) if i in self.out_stages: output.append(x) - - return tuple(output) + return output def _initialize_weights(self): for m in self.modules(): diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/repvgg.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/repvgg.py index fa30508f13..c6c090276f 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/repvgg.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/repvgg.py @@ -176,6 +176,7 @@ def _make_stage(self, planes, num_blocks, stride): self.cur_layer_idx += 1 return nn.Sequential(*blocks) + @torch.jit.unused def forward(self, x): x = self.stage0(x) output = [] @@ -184,7 +185,7 @@ def forward(self, x): x = stage(x) if i in self.out_stages: output.append(x) - return tuple(output) + return output def repvgg_model_convert(model, deploy_model, save_path=None): diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/resnet.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/resnet.py index cbd84f7546..d4cdacb0b7 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/resnet.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/resnet.py @@ -1,5 +1,6 @@ from __future__ import absolute_import, division, print_function +import torch.jit import torch.nn as nn import torch.utils.model_zoo as model_zoo @@ -99,8 +100,6 @@ def fill_fc_weights(layers): for m in layers.modules(): if isinstance(m, nn.Conv2d): nn.init.normal_(m.weight, std=0.001) - # torch.nn.init.kaiming_normal_(m.weight.data, nonlinearity='relu') - # torch.nn.init.xavier_normal_(m.weight.data) if m.bias is not None: nn.init.constant_(m.bias, 0) @@ -161,6 +160,7 @@ def _make_layer(self, block, planes, blocks, stride=1): return nn.Sequential(*layers) + @torch.jit.unused def forward(self, x): x = self.conv1(x) x = self.bn1(x) @@ -173,7 +173,7 @@ def forward(self, x): if i in self.out_stages: output.append(x) - return tuple(output) + return output def init_weights(self, pretrain=True): if pretrain: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/shufflenetv2.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/shufflenetv2.py index 013f22a8c1..75a322f179 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/shufflenetv2.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/shufflenetv2.py @@ -14,11 +14,11 @@ def channel_shuffle(x, groups): # type: (torch.Tensor, int) -> torch.Tensor - batchsize, num_channels, height, width = x.data.size() - channels_per_group = num_channels // groups + batchsize, num_channels, height, width = x.size() + channels_per_group = int(num_channels/groups) # reshape - x = x.view(batchsize, groups, channels_per_group, height, width) + x = x.view([batchsize, groups, channels_per_group, height, width]) x = torch.transpose(x, 1, 2).contiguous() @@ -173,6 +173,7 @@ def __init__( self.stage4.add_module("conv5", conv5) self._initialize_weights(pretrain) + @torch.jit.unused def forward(self, x): x = self.conv1(x) x = self.maxpool(x) @@ -182,7 +183,7 @@ def forward(self, x): x = stage(x) if i in self.out_stages: output.append(x) - return tuple(output) + return output def _initialize_weights(self, pretrain=True): print("init weights...") diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/fpn.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/fpn.py index 4549c7409e..f373f9f5d6 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/fpn.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/fpn.py @@ -1,6 +1,6 @@ # Modification 2020 RangiLyu # Copyright 2018-2019 Open-MMLab. - +import torch.jit # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -15,6 +15,8 @@ import torch.nn as nn import torch.nn.functional as F +from torch import Tensor +from typing import List from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import xavier_init @@ -72,7 +74,8 @@ def init_weights(self): if isinstance(m, nn.Conv2d): xavier_init(m, distribution="uniform") - def forward(self, inputs): + @torch.jit.unused + def forward(self, inputs: List[Tensor]): assert len(inputs) == len(self.in_channels) # build laterals @@ -84,17 +87,10 @@ def forward(self, inputs): # build top-down path used_backbone_levels = len(laterals) for i in range(used_backbone_levels - 1, 0, -1): - laterals[i - 1] += F.interpolate( + laterals[i - 1] = laterals[i - 1] + F.interpolate( laterals[i], scale_factor=2, mode="bilinear" ) # build outputs - outs = [ - # self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels) - laterals[i] - for i in range(used_backbone_levels) - ] - return tuple(outs) - - -# if __name__ == '__main__': + outs = [laterals[i] for i in range(used_backbone_levels)] + return outs diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/ghost_pan.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/ghost_pan.py index 76e043179c..cf03e3fb4e 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/ghost_pan.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/ghost_pan.py @@ -13,6 +13,8 @@ # limitations under the License. import torch import torch.nn as nn +from torch import Tensor +from typing import List from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.ghostnet import GhostBottleneck from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule, DepthwiseConvModule @@ -65,6 +67,7 @@ def __init__( ) self.blocks = nn.Sequential(*blocks) + @torch.jit.unused def forward(self, x): out = self.blocks(x) if self.use_res: @@ -118,7 +121,7 @@ def __init__( conv = DepthwiseConvModule if use_depthwise else ConvModule # build top-down blocks - self.upsample = nn.Upsample(**upsample_cfg) + self.upsample = nn.Upsample(**upsample_cfg, align_corners=False) self.reduce_layers = nn.ModuleList() for idx in range(len(in_channels)): self.reduce_layers.append( @@ -198,12 +201,13 @@ def __init__( ) ) - def forward(self, inputs): + @torch.jit.unused + def forward(self, inputs: List[Tensor]): """ Args: - inputs (tuple[Tensor]): input features. + inputs (List[Tensor]): input features. Returns: - tuple[Tensor]: multi level features. + List[Tensor]: multi level features. """ assert len(inputs) == len(self.in_channels) inputs = [ @@ -241,4 +245,4 @@ def forward(self, inputs): ): outs.append(extra_in_layer(inputs[-1]) + extra_out_layer(outs[-1])) - return tuple(outs) + return outs diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/pan.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/pan.py index c12482f294..8bb2114b76 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/pan.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/pan.py @@ -13,7 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import torch.jit import torch.nn.functional as F +from torch import Tensor +from typing import List from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.fpn.fpn import FPN @@ -61,7 +64,8 @@ def __init__( ) self.init_weights() - def forward(self, inputs): + @torch.jit.unused + def forward(self, inputs: List[Tensor]): """Forward function.""" assert len(inputs) == len(self.in_channels) @@ -74,8 +78,8 @@ def forward(self, inputs): # build top-down path used_backbone_levels = len(laterals) for i in range(used_backbone_levels - 1, 0, -1): - laterals[i - 1] += F.interpolate( - laterals[i], scale_factor=2, mode="bilinear" + laterals[i - 1] = laterals[i - 1] + F.interpolate( + laterals[i], scale_factor=2.0, mode="bilinear" ) # build outputs @@ -84,11 +88,10 @@ def forward(self, inputs): # part 2: add bottom-up path for i in range(0, used_backbone_levels - 1): - inter_outs[i + 1] += F.interpolate( + inter_outs[i + 1] = inter_outs[i + 1] + F.interpolate( inter_outs[i], scale_factor=0.5, mode="bilinear" ) - outs = [] - outs.append(inter_outs[0]) + outs = [inter_outs[0]] outs.extend([inter_outs[i] for i in range(1, used_backbone_levels)]) - return tuple(outs) + return outs diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/tan.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/tan.py index 42efd128b9..b079dde44f 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/tan.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/tan.py @@ -15,6 +15,8 @@ import torch import torch.nn as nn import torch.nn.functional as F +from torch import Tensor +from typing import List from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import normal_init @@ -92,7 +94,8 @@ def init_weights(self): elif isinstance(m, nn.Conv2d): normal_init(m, 0.01) - def forward(self, inputs): + @torch.jit.unused + def forward(self, inputs: List[Tensor]): assert len(inputs) == len(self.in_channels) # build laterals @@ -118,4 +121,4 @@ def forward(self, inputs): laterals[1] + mid_lvl, laterals[2] + F.interpolate(mid_lvl, size=laterals[2].shape[2:], mode="bilinear"), ] - return tuple(outs) + return outs diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/gfl_head.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/gfl_head.py index e26e083b37..79e215ce0e 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/gfl_head.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/gfl_head.py @@ -5,6 +5,8 @@ import torch.distributed as dist import torch.nn as nn import torch.nn.functional as F +from torch import Tensor +from typing import List, Dict, Tuple from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util import ( bbox2distance, @@ -13,7 +15,8 @@ multi_apply, ) -from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp import warp_boxes +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp import warp_boxes,\ + scriptable_warp_boxes from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.gfocal_loss\ import DistributionFocalLoss, QualityFocalLoss from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.iou_loss import GIoULoss, bbox_overlaps @@ -62,6 +65,11 @@ def forward(self, x): offsets from the box center in four directions, shape (N, 4). """ shape = x.size() + if torch.jit.is_scripting(): + x = F.softmax(x.reshape(shape[0], shape[1], 4, self.reg_max + 1), dim=-1) + x = F.linear(x, self.project.type_as(x)).reshape(shape[0], shape[1], 4) + return x + x = F.softmax(x.reshape(*shape[:-1], 4, self.reg_max + 1), dim=-1) x = F.linear(x, self.project.type_as(x)).reshape(*shape[:-1], 4) return x @@ -185,13 +193,11 @@ def init_weights(self): normal_init(self.gfl_cls, std=0.01, bias=bias_cls) normal_init(self.gfl_reg, std=0.01) - def forward(self, feats): - if torch.onnx.is_in_onnx_export(): - return self._forward_onnx(feats) + def forward(self, feats: List[Tensor]): outputs = [] - for x, scale in zip(feats, self.scales): - cls_feat = x - reg_feat = x + for idx, scale in enumerate(self.scales): + cls_feat = feats[idx] + reg_feat = feats[idx] for cls_conv in self.cls_convs: cls_feat = cls_conv(cls_feat) for reg_conv in self.reg_convs: @@ -371,7 +377,8 @@ def target_assign( ): """ Assign target for a batch of images. - :param batch_size: num of images in one batch + :param cls_preds: predictions of class in images in one batch + :param reg_preds: predictions of bbox in images in one batch :param featmap_sizes: A list of all grid cell boxes in all image :param gt_bboxes_list: A list of ground truth boxes in all image :param gt_bboxes_ignore_list: A list of all ignored boxes in all image @@ -405,8 +412,6 @@ def target_assign( if gt_labels_list is None: gt_labels_list = [None for _ in range(batch_size)] # target assign on all images, get list of tensors - # list length = batch size - # tensor first dim = num of all grid cell ( all_grid_cells, all_labels, @@ -449,6 +454,7 @@ def target_assign( num_total_neg, ) + @torch.no_grad() def target_assign_single_img( self, grid_cells, num_level_cells, gt_bboxes, gt_bboxes_ignore, gt_labels ): @@ -480,7 +486,7 @@ def target_assign_single_img( label_weights = grid_cells.new_zeros(num_cells, dtype=torch.float) if len(pos_inds) > 0: - pos_bbox_targets = pos_gt_bboxes + pos_bbox_targets = pos_gt_bboxes.float() bbox_targets[pos_inds, :] = pos_bbox_targets bbox_weights[pos_inds, :] = 1.0 if gt_labels is None: @@ -505,6 +511,7 @@ def target_assign_single_img( ) def sample(self, assign_result, gt_bboxes): + """Sample positive and negative bboxes.""" pos_inds = ( torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) .squeeze(-1) @@ -527,11 +534,63 @@ def sample(self, assign_result, gt_bboxes): pos_gt_bboxes = gt_bboxes[pos_assigned_gt_inds, :] return pos_inds, neg_inds, pos_gt_bboxes, pos_assigned_gt_inds - def post_process(self, preds, meta): + def post_process(self, preds, meta: Dict[str, Tensor], mode: str = "infer", nms_max_num: int = 100): + """Prediction results postprocessing. Decode bboxes and rescale + to original image size. + Args: + preds (Tensor): Prediction output. + meta (dict): Meta info. + mode (str): Determines if it uses batches and numpy or tensors for scripting. + nms_max_num (int): Determines the maximum number of bounding boxes that will be retained following the nms. + """ + if mode == "eval" and not torch.jit.is_scripting(): + # Inference do not use batches and tries to have + # tensors exclusively for better optimization during scripting. + return self._eval_post_process(preds, meta) + + cls_scores, bbox_preds = preds.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 + ) + results = self.get_bboxes(cls_scores, bbox_preds, meta["img"], nms_max_num=nms_max_num) + (det_bboxes, det_labels) = results + + det_bboxes[:, :4] = scriptable_warp_boxes( + det_bboxes[:, :4], + torch.linalg.inv(meta["warp_matrix"]), meta["width"], meta["height"] + ) + + # constant output of model every time for tracing + if torch.jit.is_scripting(): + max_count = nms_max_num + else: + _, frequencies = torch.unique(det_labels, return_counts=True) + max_count = frequencies[torch.argmax(frequencies)].item() + + det_result = torch.zeros((self.num_classes, max_count, 5)) + for i in range(self.num_classes): + inds = det_labels == i + det = torch.cat(( + det_bboxes[inds, :4], + det_bboxes[inds, 4:5] + ), + dim=1 + ) + + pad = det.new_zeros((max_count - det.size(0), 5)) + det = torch.cat([det, pad], dim=0) + det_result[i] = det + return det_result + + def most_common_tensor(self, tensor): + _, frequencies = torch.unique(tensor, return_counts=True) + max_count = frequencies[torch.argmax(frequencies)].item() + return max_count + + def _eval_post_process(self, preds, meta): cls_scores, bbox_preds = preds.split( [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 ) - result_list = self.get_bboxes(cls_scores, bbox_preds, meta) + result_list = self.get_bboxes(cls_scores, bbox_preds, meta["img"], mode="eval") det_results = {} warp_matrixes = ( meta["warp_matrix"] @@ -576,67 +635,106 @@ def post_process(self, preds, meta): det_results[img_id] = det_result return det_results - def get_bboxes(self, cls_preds, reg_preds, img_metas): + def get_bboxes(self, cls_preds, reg_preds, input_img, mode: str = "infer", nms_max_num: int = 100): """Decode the outputs to bboxes. Args: cls_preds (Tensor): Shape (num_imgs, num_points, num_classes). reg_preds (Tensor): Shape (num_imgs, num_points, 4 * (regmax + 1)). - img_metas (dict): Dict of image info. - + input_img (Tensor): Input image to net. + mode (str): Determines if it uses batches and numpy or tensors for scripting. + nms_max_num (int): Determines the maximum number of bounding boxes that will be retained following the nms. Returns: results_list (list[tuple]): List of detection bboxes and labels. """ device = cls_preds.device b = cls_preds.shape[0] - input_height, input_width = img_metas["img"].shape[2:] + input_height, input_width = input_img.shape[2:] input_shape = (input_height, input_width) featmap_sizes = [ - (math.ceil(input_height / stride), math.ceil(input_width) / stride) + (int(math.ceil(input_height / stride)), int(math.ceil(input_width / stride))) for stride in self.strides ] # get grid cells of one image mlvl_center_priors = [] for i, stride in enumerate(self.strides): - y, x = self.get_single_level_center_point( - featmap_sizes[i], stride, torch.float32, device + proiors = self.get_single_level_center_priors( + b, featmap_sizes[i], stride, torch.float32, device ) - strides = x.new_full((x.shape[0],), stride) - proiors = torch.stack([x, y, strides, strides], dim=-1) - mlvl_center_priors.append(proiors.unsqueeze(0).repeat(b, 1, 1)) + mlvl_center_priors.append(proiors) center_priors = torch.cat(mlvl_center_priors, dim=1) dis_preds = self.distribution_project(reg_preds) * center_priors[..., 2, None] bboxes = distance2bbox(center_priors[..., :2], dis_preds, max_shape=input_shape) - scores = cls_preds.sigmoid() + cls_preds = cls_preds.sigmoid() + # add a dummy background class at the end of all labels + if torch.jit.is_scripting() or mode == "infer": + # for faster inference and jit scripting in most common cases we do not try to go through for statement + score, bbox = cls_preds[0], bboxes[0] + padding = score.new_zeros(score.shape[0], 1) + score = torch.cat([score, padding], dim=1) + + return multiclass_nms(bbox, score, score_thr=0.05, nms_cfg=dict(iou_threshold=0.6), max_num=nms_max_num) + result_list = [] for i in range(b): # add a dummy background class at the end of all labels # same with mmdetection2.0 - score, bbox = scores[i], bboxes[i] + score, bbox = cls_preds[i], bboxes[i] padding = score.new_zeros(score.shape[0], 1) score = torch.cat([score, padding], dim=1) results = multiclass_nms( bbox, score, score_thr=0.05, - nms_cfg=dict(type="nms", iou_threshold=0.6), - max_num=100, + nms_cfg=dict(iou_threshold=0.6), + max_num=nms_max_num, ) result_list.append(results) return result_list - def get_single_level_center_point( - self, featmap_size, stride, dtype, device, flatten=True + def get_single_level_center_priors( + self, + batch_size: int, + featmap_size: Tuple[int, int], + stride: int, + dtype: torch.dtype, + device: torch.device, + flatten: bool = True ): + """Generate centers of a single stage feature map. + Args: + batch_size (int): Number of images in one batch. + featmap_size (tuple[int]): height and width of the feature map + stride (int): down sample stride of the feature map + dtype (obj:`torch.dtype`): data type of the tensors + device (obj:`torch.device`): device of the tensors + flatten (bool): flatten the x and y tensors + Return: + priors (Tensor): center priors of a single level feature map. """ - Generate pixel centers of a single stage feature map. - :param featmap_size: height and width of the feature map - :param stride: down sample stride of the feature map - :param dtype: data type of the tensors - :param device: device of the tensors - :param flatten: flatten the x and y tensors - :return: y and x of the center points + x, y = self.get_single_level_center_point(featmap_size, stride, dtype, device, flatten) + strides = x.new_full((x.shape[0],), stride) + proiors = torch.stack([x, y, strides, strides], dim=-1) + return proiors.unsqueeze(0).repeat(batch_size, 1, 1) + + def get_single_level_center_point( + self, + featmap_size: Tuple[int, int], + stride: int, + dtype: torch.dtype, + device: torch.device, + flatten: bool = True + ): + """Generate pixel centers of a single stage feature map. + Args: + featmap_size (tuple[int]): height and width of the feature map + stride (int): down sample stride of the feature map + dtype (obj:`torch.dtype`): data type of the tensors + device (obj:`torch.device`): device of the tensors + flatten (bool): flatten the x and y tensors + Return: + x, y (Tuple[Tensor, Tensor]): y and x of the center points. """ h, w = featmap_size x_range = (torch.arange(w, dtype=dtype, device=device) + 0.5) * stride @@ -645,7 +743,7 @@ def get_single_level_center_point( if flatten: y = y.flatten() x = x.flatten() - return y, x + return x, y def get_grid_cells(self, featmap_size, scale, stride, dtype, device): """ @@ -681,20 +779,3 @@ def grid_cells_to_center(self, grid_cells): cells_cx = (grid_cells[:, 2] + grid_cells[:, 0]) / 2 cells_cy = (grid_cells[:, 3] + grid_cells[:, 1]) / 2 return torch.stack([cells_cx, cells_cy], dim=-1) - - def _forward_onnx(self, feats): - """only used for onnx export""" - outputs = [] - for x, scale in zip(feats, self.scales): - cls_feat = x - reg_feat = x - for cls_conv in self.cls_convs: - cls_feat = cls_conv(cls_feat) - for reg_conv in self.reg_convs: - reg_feat = reg_conv(reg_feat) - cls_pred = self.gfl_cls(cls_feat) - reg_pred = scale(self.gfl_reg(reg_feat)) - cls_pred = cls_pred.sigmoid() - out = torch.cat([cls_pred, reg_pred], dim=1) - outputs.append(out.flatten(start_dim=2)) - return torch.cat(outputs, dim=2).permute(0, 2, 1) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_head.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_head.py index 01eac4146e..2e50867a21 100755 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_head.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_head.py @@ -14,6 +14,8 @@ import torch import torch.nn as nn +from torch import Tensor +from typing import List from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule, DepthwiseConvModule from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import normal_init @@ -135,15 +137,14 @@ def init_weights(self): normal_init(self.gfl_reg[i], std=0.01) print("Finish initialize NanoDet Head.") - def forward(self, feats): - if torch.onnx.is_in_onnx_export(): - return self._forward_onnx(feats) + @torch.jit.unused + def forward(self, feats: List[Tensor]): outputs = [] - for x, cls_convs, reg_convs, gfl_cls, gfl_reg in zip( - feats, self.cls_convs, self.reg_convs, self.gfl_cls, self.gfl_reg - ): - cls_feat = x - reg_feat = x + for idx, (cls_convs, reg_convs, gfl_cls, gfl_reg) in enumerate(zip( + self.cls_convs, self.reg_convs, self.gfl_cls, self.gfl_reg + )): + cls_feat = feats[idx] + reg_feat = feats[idx] for cls_conv in cls_convs: cls_feat = cls_conv(cls_feat) for reg_conv in reg_convs: @@ -155,31 +156,6 @@ def forward(self, feats): bbox_pred = gfl_reg(reg_feat) output = torch.cat([cls_score, bbox_pred], dim=1) outputs.append(output.flatten(start_dim=2)) + outputs = torch.cat(outputs, dim=2).permute(0, 2, 1) return outputs - - def _forward_onnx(self, feats): - """only used for onnx export""" - outputs = [] - for x, cls_convs, reg_convs, gfl_cls, gfl_reg in zip( - feats, self.cls_convs, self.reg_convs, self.gfl_cls, self.gfl_reg - ): - cls_feat = x - reg_feat = x - for cls_conv in cls_convs: - cls_feat = cls_conv(cls_feat) - for reg_conv in reg_convs: - reg_feat = reg_conv(reg_feat) - if self.share_cls_reg: - output = gfl_cls(cls_feat) - cls_pred, reg_pred = output.split( - [self.num_classes, 4 * (self.reg_max + 1)], dim=1 - ) - else: - cls_pred = gfl_cls(cls_feat) - reg_pred = gfl_reg(reg_feat) - - cls_pred = cls_pred.sigmoid() - out = torch.cat([cls_pred, reg_pred], dim=1) - outputs.append(out.flatten(start_dim=2)) - return torch.cat(outputs, dim=2).permute(0, 2, 1) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_plus_head.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_plus_head.py index 5d853d5ecf..5e82255e39 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_plus_head.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_plus_head.py @@ -3,10 +3,13 @@ import numpy as np import torch import torch.nn as nn +from torch import Tensor +from typing import List, Tuple, Dict from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util\ import bbox2distance, distance2bbox, multi_apply -from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp import warp_boxes +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp \ + import warp_boxes, scriptable_warp_boxes from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.gfocal_loss \ import DistributionFocalLoss, QualityFocalLoss from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.iou_loss import GIoULoss @@ -133,15 +136,10 @@ def init_weights(self): normal_init(self.gfl_cls[i], std=0.01, bias=bias_cls) print("Finish initialize NanoDet-Plus Head.") - def forward(self, feats): - if torch.onnx.is_in_onnx_export(): - return self._forward_onnx(feats) + def forward(self, feats: List[Tensor]): outputs = [] - for feat, cls_convs, gfl_cls in zip( - feats, - self.cls_convs, - self.gfl_cls, - ): + for idx, (cls_convs, gfl_cls) in enumerate(zip(self.cls_convs, self.gfl_cls)): + feat = feats[idx] for conv in cls_convs: feat = conv(feat) output = gfl_cls(feat) @@ -166,7 +164,7 @@ def loss(self, preds, gt_meta, aux_preds=None): batch_size = preds.shape[0] input_height, input_width = gt_meta["img"].shape[2:] featmap_sizes = [ - (math.ceil(input_height / stride), math.ceil(input_width) / stride) + (math.ceil(input_height / stride), math.ceil(input_width / stride)) for stride in self.strides ] # get grid cells of one image @@ -361,17 +359,58 @@ def sample(self, assign_result, gt_bboxes): pos_gt_bboxes = gt_bboxes[pos_assigned_gt_inds, :] return pos_inds, neg_inds, pos_gt_bboxes, pos_assigned_gt_inds - def post_process(self, preds, meta): - """Prediction results post processing. Decode bboxes and rescale + def post_process(self, preds, meta: Dict[str, Tensor], mode: str = "infer", nms_max_num: int = 100): + """Prediction results postprocessing. Decode bboxes and rescale to original image size. Args: preds (Tensor): Prediction output. meta (dict): Meta info. + mode (str): Determines if it uses batches and numpy or tensors for scripting. + nms_max_num (int): Determines the maximum number of bounding boxes that will be retained following the nms. """ + if mode == "eval" and not torch.jit.is_scripting(): + # Inference do not use batches and tries to have + # tensors exclusively for better optimization during scripting. + return self._eval_post_process(preds, meta) + cls_scores, bbox_preds = preds.split( [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 ) - result_list = self.get_bboxes(cls_scores, bbox_preds, meta) + results = self.get_bboxes(cls_scores, bbox_preds, meta["img"], nms_max_num=nms_max_num) + (det_bboxes, det_labels) = results + + det_bboxes[:, :4] = scriptable_warp_boxes( + det_bboxes[:, :4], + torch.linalg.inv(meta["warp_matrix"]), meta["width"], meta["height"] + ) + + # constant output of model every time for tracing + if torch.jit.is_scripting(): + max_count = nms_max_num + else: + _, frequencies = torch.unique(det_labels, return_counts=True) + max_count = frequencies[torch.argmax(frequencies)].item() + + det_result = torch.zeros((self.num_classes, max_count, 5)) + for i in range(self.num_classes): + inds = det_labels == i + det = torch.cat(( + det_bboxes[inds, :4], + det_bboxes[inds, 4:5] + ), + dim=1 + ) + + pad = det.new_zeros((max_count - det.size(0), 5)) + det = torch.cat([det, pad], dim=0) + det_result[i] = det + return det_result + + def _eval_post_process(self, preds, meta): + cls_scores, bbox_preds = preds.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 + ) + result_list = self.get_bboxes(cls_scores, bbox_preds, meta["img"], mode="eval") det_results = {} warp_matrixes = ( meta["warp_matrix"] @@ -395,7 +434,7 @@ def post_process(self, preds, meta): ) for result, img_width, img_height, img_id, warp_matrix in zip( - result_list, img_widths, img_heights, img_ids, warp_matrixes + result_list, img_widths, img_heights, img_ids, warp_matrixes ): det_result = {} det_bboxes, det_labels = result @@ -416,59 +455,72 @@ def post_process(self, preds, meta): det_results[img_id] = det_result return det_results - def get_bboxes(self, cls_preds, reg_preds, img_metas): + def get_bboxes(self, cls_preds, reg_preds, input_img, mode: str = "infer", nms_max_num: int = 100): """Decode the outputs to bboxes. Args: cls_preds (Tensor): Shape (num_imgs, num_points, num_classes). reg_preds (Tensor): Shape (num_imgs, num_points, 4 * (regmax + 1)). - img_metas (dict): Dict of image info. - + input_img (Tensor): Input image to net. + mode (str): Determines if it uses batches and numpy or tensors for scripting. + nms_max_num (int): Determines the maximum number of bounding boxes that will be retained following the nms. Returns: results_list (list[tuple]): List of detection bboxes and labels. """ device = cls_preds.device b = cls_preds.shape[0] - input_height, input_width = img_metas["img"].shape[2:] + input_height, input_width = input_img.shape[2:] input_shape = (input_height, input_width) featmap_sizes = [ - (math.ceil(input_height / stride), math.ceil(input_width) / stride) + (int(math.ceil(input_height / stride)), int(math.ceil(input_width / stride))) for stride in self.strides ] # get grid cells of one image - mlvl_center_priors = [ - self.get_single_level_center_priors( - b, - featmap_sizes[i], - stride, - dtype=torch.float32, - device=device, + mlvl_center_priors = [] + for i, stride in enumerate(self.strides): + proiors = self.get_single_level_center_priors( + b, featmap_sizes[i], stride, torch.float32, device ) - for i, stride in enumerate(self.strides) - ] + mlvl_center_priors.append(proiors) + center_priors = torch.cat(mlvl_center_priors, dim=1) dis_preds = self.distribution_project(reg_preds) * center_priors[..., 2, None] bboxes = distance2bbox(center_priors[..., :2], dis_preds, max_shape=input_shape) - scores = cls_preds.sigmoid() + cls_preds = cls_preds.sigmoid() + # add a dummy background class at the end of all labels + if torch.jit.is_scripting() or mode == "infer": + # for faster inference and jit scripting in most common cases we do not try to go through for statement + score, bbox = cls_preds[0], bboxes[0] + padding = score.new_zeros(score.shape[0], 1) + score = torch.cat([score, padding], dim=1) + + return multiclass_nms(bbox, score, score_thr=0.05, nms_cfg=dict(iou_threshold=0.6), max_num=nms_max_num) + result_list = [] for i in range(b): # add a dummy background class at the end of all labels # same with mmdetection2.0 - score, bbox = scores[i], bboxes[i] + score, bbox = cls_preds[i], bboxes[i] padding = score.new_zeros(score.shape[0], 1) score = torch.cat([score, padding], dim=1) results = multiclass_nms( bbox, score, score_thr=0.05, - nms_cfg=dict(type="nms", iou_threshold=0.6), - max_num=100, + nms_cfg=dict(iou_threshold=0.6), + max_num=nms_max_num, ) result_list.append(results) return result_list def get_single_level_center_priors( - self, batch_size, featmap_size, stride, dtype, device + self, + batch_size: int, + featmap_size: Tuple[int, int], + stride: int, + dtype: torch.dtype, + device: torch.device, + flatten: bool = True ): """Generate centers of a single stage feature map. Args: @@ -477,6 +529,7 @@ def get_single_level_center_priors( stride (int): down sample stride of the feature map dtype (obj:`torch.dtype`): data type of the tensors device (obj:`torch.device`): device of the tensors + flatten (bool): flatten the x and y tensors Return: priors (Tensor): center priors of a single level feature map. """ @@ -484,27 +537,9 @@ def get_single_level_center_priors( x_range = (torch.arange(w, dtype=dtype, device=device)) * stride y_range = (torch.arange(h, dtype=dtype, device=device)) * stride y, x = torch.meshgrid(y_range, x_range) - y = y.flatten() - x = x.flatten() + if flatten: + y = y.flatten() + x = x.flatten() strides = x.new_full((x.shape[0],), stride) proiors = torch.stack([x, y, strides, strides], dim=-1) return proiors.unsqueeze(0).repeat(batch_size, 1, 1) - - def _forward_onnx(self, feats): - """only used for onnx export""" - outputs = [] - for feat, cls_convs, gfl_cls in zip( - feats, - self.cls_convs, - self.gfl_cls, - ): - for conv in cls_convs: - feat = conv(feat) - output = gfl_cls(feat) - cls_pred, reg_pred = output.split( - [self.num_classes, 4 * (self.reg_max + 1)], dim=1 - ) - cls_pred = cls_pred.sigmoid() - out = torch.cat([cls_pred, reg_pred], dim=1) - outputs.append(out.flatten(start_dim=2)) - return torch.cat(outputs, dim=2).permute(0, 2, 1) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/simple_conv_head.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/simple_conv_head.py index b3d4d95ff7..5a8e1a737a 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/simple_conv_head.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/simple_conv_head.py @@ -1,6 +1,9 @@ import torch import torch.nn as nn +from torch import Tensor +from typing import List + from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import normal_init from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.scale import Scale @@ -83,11 +86,12 @@ def init_weights(self): normal_init(self.gfl_cls, std=0.01, bias=bias_cls) normal_init(self.gfl_reg, std=0.01) - def forward(self, feats): + @torch.jit.unused + def forward(self, feats: List[Tensor]): outputs = [] - for x, scale in zip(feats, self.scales): - cls_feat = x - reg_feat = x + for idx, scale in enumerate(self.scales): + cls_feat = feats[idx] + reg_feat = feats[idx] for cls_conv in self.cls_convs: cls_feat = cls_conv(cls_feat) for reg_conv in self.reg_convs: diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/gfocal_loss.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/gfocal_loss.py index af0b4251c2..b089a8d1f4 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/gfocal_loss.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/gfocal_loss.py @@ -101,6 +101,7 @@ def __init__(self, use_sigmoid=True, beta=2.0, reduction="mean", loss_weight=1.0 self.reduction = reduction self.loss_weight = loss_weight + @torch.jit.unused def forward( self, pred, target, weight=None, avg_factor=None, reduction_override=None ): @@ -151,6 +152,7 @@ def __init__(self, reduction="mean", loss_weight=1.0): self.reduction = reduction self.loss_weight = loss_weight + @torch.jit.unused def forward( self, pred, target, weight=None, avg_factor=None, reduction_override=None ): diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/iou_loss.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/iou_loss.py index 7ee9d324a3..73809580da 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/iou_loss.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/iou_loss.py @@ -449,6 +449,7 @@ def __init__(self, eps=1e-6, reduction="mean", loss_weight=1.0): self.reduction = reduction self.loss_weight = loss_weight + @torch.jit.unused def forward( self, pred, diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/conv.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/conv.py index 693e6fd0fe..0e55d157b6 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/conv.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/conv.py @@ -115,9 +115,10 @@ def __init__( # Use msra init by default self.init_weights() + @torch.jit.unused @property def norm(self): - if self.norm_name: + if self.norm_name is not None: return getattr(self, self.norm_name) else: return None @@ -131,13 +132,14 @@ def init_weights(self): if self.with_norm: constant_init(self.norm, 1, bias=0) - def forward(self, x, norm=True): + @torch.jit.unused + def forward(self, x, norm: bool = True): for layer in self.order: if layer == "conv": x = self.conv(x) - elif layer == "norm" and norm and self.with_norm: + elif layer == "norm" and (norm is not None) and (self.with_norm is not None) and (self.norm is not None): x = self.norm(x) - elif layer == "act" and self.activation: + elif layer == "act" and (self.activation is not None): x = self.act(x) return x @@ -211,7 +213,6 @@ def __init__( # norm layer is after conv layer _, self.dwnorm = build_norm_layer(norm_cfg, in_channels) _, self.pwnorm = build_norm_layer(norm_cfg, out_channels) - # build activation layer if self.activation: self.act = act_layers(self.activation) @@ -230,12 +231,17 @@ def init_weights(self): constant_init(self.dwnorm, 1, bias=0) constant_init(self.pwnorm, 1, bias=0) - def forward(self, x, norm=True): + def forward(self, x): for layer_name in self.order: - if layer_name != "act": - layer = self.__getattr__(layer_name) - x = layer(x) - elif layer_name == "act" and self.activation: + if layer_name == "depthwise": + x = self.depthwise(x) + elif layer_name == "pointwise": + x = self.pointwise(x) + elif layer_name == "dwnorm" and (self.dwnorm is not None): + x = self.dwnorm(x) + elif layer_name == "pwnorm" and (self.pwnorm is not None): + x = self.pwnorm(x) + elif layer_name == "act" and (self.activation is not None): x = self.act(x) return x diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/nms.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/nms.py index e5fa3e216c..abb97a62ca 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/nms.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/nms.py @@ -1,9 +1,15 @@ import torch from torchvision.ops import nms +from typing import Dict def multiclass_nms( - multi_bboxes, multi_scores, score_thr, nms_cfg, max_num=-1, score_factors=None + multi_bboxes, + multi_scores, + score_thr: float, + nms_cfg: Dict[str, float], + max_num: int = -1, + score_factors: torch.Tensor = torch.empty(0) ): """NMS for multi-class bboxes. @@ -13,7 +19,7 @@ def multiclass_nms( contains scores of the background class, but this will be ignored. score_thr (float): bbox threshold, bboxes with scores lower than it will not be considered. - nms_thr (float): NMS IoU threshold + nms_cfg (dictionary): dictionary of the type and threshold of IoU max_num (int): if there are more than max_num bboxes after NMS, only top max_num will be kept. score_factors (Tensor): The factors multiplied to scores before @@ -40,20 +46,19 @@ def multiclass_nms( bboxes = torch.masked_select( bboxes, torch.stack((valid_mask, valid_mask, valid_mask, valid_mask), -1) ).view(-1, 4) - if score_factors is not None: + if not (score_factors.numel() == 0): scores = scores * score_factors[:, None] scores = torch.masked_select(scores, valid_mask) - labels = valid_mask.nonzero(as_tuple=False)[:, 1] + + # for scripting + labels = torch.tensor(0).to(valid_mask.device).long() + torch.nonzero(valid_mask, out=labels) + # labels = valid_mask.nonzero(as_tuple=False)#[:, 1] + labels = labels[:, 1] if bboxes.numel() == 0: bboxes = multi_bboxes.new_zeros((0, 5)) labels = multi_bboxes.new_zeros((0,), dtype=torch.long) - - if torch.onnx.is_in_onnx_export(): - raise RuntimeError( - "[ONNX Error] Can not record NMS " - "as it has not been executed this time" - ) return bboxes, labels dets, keep = batched_nms(bboxes, scores, labels, nms_cfg) @@ -65,7 +70,7 @@ def multiclass_nms( return dets, labels[keep] -def batched_nms(boxes, scores, idxs, nms_cfg, class_agnostic=False): +def batched_nms(boxes, scores, idxs, nms_cfg: Dict[str, float], class_agnostic: bool = False): """Performs non-maximum suppression in a batched fashion. Modified from https://github.com/pytorch/vision/blob /505cd6957711af790211896d32b40291bea1bc21/torchvision/ops/boxes.py#L39. @@ -94,27 +99,32 @@ def batched_nms(boxes, scores, idxs, nms_cfg, class_agnostic=False): tuple: kept dets and indice. """ nms_cfg_ = nms_cfg.copy() - class_agnostic = nms_cfg_.pop("class_agnostic", class_agnostic) if class_agnostic: boxes_for_nms = boxes else: max_coordinate = boxes.max() offsets = idxs.to(boxes) * (max_coordinate + 1) boxes_for_nms = boxes + offsets[:, None] - nms_cfg_.pop("type", "nms") - split_thr = nms_cfg_.pop("split_thr", 10000) - if len(boxes_for_nms) < split_thr: - keep = nms(boxes_for_nms, scores, **nms_cfg_) + split_thr = nms_cfg_.pop("split_thr", 10000.0) + if boxes_for_nms.shape[0] < split_thr: + keep = nms(boxes_for_nms, scores, nms_cfg_["iou_threshold"]) boxes = boxes[keep] scores = scores[keep] else: total_mask = scores.new_zeros(scores.size(), dtype=torch.bool) for id in torch.unique(idxs): - mask = (idxs == id).nonzero(as_tuple=False).view(-1) - keep = nms(boxes_for_nms[mask], scores[mask], **nms_cfg_) + mask = (idxs == id) + mask_out = torch.tensor(0).to(mask.device).long() + torch.nonzero(mask, out=mask_out) + mask = mask_out.view(-1) + # mask = (idxs == id).nonzero(as_tuple=False).view(-1) + keep = nms(boxes_for_nms[mask], scores[mask], nms_cfg_["iou_threshold"]) total_mask[mask[keep]] = True - keep = total_mask.nonzero(as_tuple=False).view(-1) + keep_out = torch.tensor(0).to(total_mask.device).long() + torch.nonzero(total_mask, out=keep_out) + keep = keep_out.view(-1) + # keep = total_mask.nonzero(as_tuple=False).view(-1) keep = keep[scores[keep].argsort(descending=True)] boxes = boxes[keep] scores = scores[keep] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/transformer.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/transformer.py index 24e2de458b..c44788d32e 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/transformer.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/transformer.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import torch.jit import torch.nn as nn from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers @@ -128,6 +129,7 @@ def __init__( ] self.encoders = nn.Sequential(*encoders) + @torch.jit.unused def forward(self, x, pos_embed): b, _, h, w = x.shape x = self.conv(x) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/task.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/task.py index d2939d22e1..7ec2a04864 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/task.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/task.py @@ -25,7 +25,7 @@ from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.batch_process import stack_batch_img from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util\ - import convert_avg_params, gather_results, mkdir + import convert_avg_params, gather_results, mkdir, rank_filter from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.check_point import save_model_state from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.weight_averager import build_weight_averager @@ -69,12 +69,17 @@ def forward(self, x): def predict(self, batch, batch_idx=None, dataloader_idx=None): batch = self._preprocess_batch_input(batch) preds = self.forward(batch["img"]) - results = self.model.head.post_process(preds, batch) + results = self.model.head.post_process(preds, batch, "eval") return results - def save_current_model(self, path, logger): - save_model_state(path=path, model=self.model, weight_averager=self.weight_averager, logger=logger) + @rank_filter + def _save_current_model(self, path, verbose): + save_model_state(path=path, model=self.model, weight_averager=self.weight_averager, verbose=verbose) + def save_current_model(self, path, verbose): + save_model_state(path=path, model=self.model, weight_averager=self.weight_averager, verbose=verbose) + + @torch.jit.unused def training_step(self, batch, batch_idx): batch = self._preprocess_batch_input(batch) preds, loss, loss_states = self.model.forward_train(batch) @@ -109,7 +114,7 @@ def training_epoch_end(self, outputs: List[Any]) -> None: # save models in schedule epoches if self.current_epoch % self.cfg.schedule.val_intervals == 0: checkpoint_save_path = os.path.join(self.cfg.save_dir, "checkpoints") - mkdir(checkpoint_save_path) + mkdir(self.local_rank, checkpoint_save_path) print("===" * 10) print("checkpoint_save_path: {} \n epoch: {}".format(checkpoint_save_path, self.current_epoch)) print("===" * 10) @@ -142,7 +147,7 @@ def validation_step(self, batch, batch_idx): if self.logger: self.logger.info(log_msg) - dets = self.model.head.post_process(preds, batch) + dets = self.model.head.post_process(preds, batch, "eval") return dets def validation_epoch_end(self, validation_step_outputs): @@ -170,11 +175,15 @@ def validation_epoch_end(self, validation_step_outputs): if metric > self.save_flag: self.save_flag = metric best_save_path = os.path.join(self.cfg.save_dir, "model_best") - mkdir(best_save_path) + mkdir(self.local_rank, best_save_path) self.trainer.save_checkpoint( os.path.join(best_save_path, "model_best.ckpt") ) - self.save_current_model(os.path.join(best_save_path, "nanodet_model_best.pth"), logger=self.logger) + verbose = True if self.logger is not None else False + # TODO: save only if local_rank is < 0 + # self._save_current_model(self.local_rank, os.path.join(best_save_path, "nanodet_model_state_best.pth"), + # verbose=verbose) + self.save_current_model(os.path.join(best_save_path, "nanodet_model_state_best.pth"), verbose=verbose) txt_path = os.path.join(best_save_path, "eval_results.txt") with open(txt_path, "a") as f: f.write("Epoch:{}\n".format(self.current_epoch + 1)) @@ -187,9 +196,8 @@ def validation_epoch_end(self, validation_step_outputs): if self.logger: self.logger.log_metrics(eval_results, self.current_epoch + 1) else: - # self.logger.info("Skip val on rank {}".format(self.local_rank)) if self.logger: - self.logger.info("Skip val ") + self.logger.info("Skip val on rank {}".format(self.local_rank)) def test_step(self, batch, batch_idx): dets = self.predict(batch, batch_idx) @@ -207,7 +215,8 @@ def test_epoch_end(self, test_step_outputs): if all_results: if self.cfg.test_mode == "val": eval_results = self.evaluator.evaluate( - all_results, self.cfg.save_dir) + all_results, self.cfg.save_dir, rank=self.local_rank + ) txt_path = os.path.join(self.cfg.save_dir, "eval_results.txt") with open(txt_path, "a") as f: for k, v in eval_results.items(): diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/box_transform.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/box_transform.py index 4b82a8c19f..b954a165db 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/box_transform.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/box_transform.py @@ -1,7 +1,8 @@ import torch +from typing import Tuple -def distance2bbox(points, distance, max_shape=None): +def distance2bbox(points, distance, max_shape: Tuple[int, int]=(0, 0)): """Decode distance prediction to bounding box. Args: @@ -17,7 +18,7 @@ def distance2bbox(points, distance, max_shape=None): y1 = points[..., 1] - distance[..., 1] x2 = points[..., 0] + distance[..., 2] y2 = points[..., 1] + distance[..., 3] - if max_shape is not None: + if max_shape != (0, 0): x1 = x1.clamp(min=0, max=max_shape[1]) y1 = y1.clamp(min=0, max=max_shape[0]) x2 = x2.clamp(min=0, max=max_shape[1]) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/check_point.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/check_point.py index 2ac516167a..fd3487f0f1 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/check_point.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/check_point.py @@ -13,12 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.rank_filter import rank_filter from typing import Any, Dict import torch -def load_model_weight(model, checkpoint, logger=None): +def load_model_weight(model, checkpoint, verbose=None): state_dict = checkpoint["state_dict"].copy() for k in checkpoint["state_dict"]: # convert average model weights @@ -39,8 +39,8 @@ def load_model_weight(model, checkpoint, logger=None): for k in state_dict: if k in model_state_dict: if state_dict[k].shape != model_state_dict[k].shape: - if logger: - logger.log( + if verbose: + print( "Skip loading parameter {}, required shape{}, " "loaded shape{}.".format( k, model_state_dict[k].shape, state_dict[k].shape @@ -48,19 +48,18 @@ def load_model_weight(model, checkpoint, logger=None): ) state_dict[k] = model_state_dict[k] else: - if logger: - logger.log("Drop parameter {}.".format(k)) + if verbose: + print("Drop parameter {}.".format(k)) for k in model_state_dict: if not (k in state_dict): - if logger: - logger.log("No param {}.".format(k)) + if verbose: + print("No param {}.".format(k)) state_dict[k] = model_state_dict[k] model.load_state_dict(state_dict, strict=False) return model -# @rank_zero_only -# @rank_filter +@rank_filter def save_model(model, path, epoch, iter, optimizer=None): model_state_dict = ( model.module.state_dict() if hasattr(model, "module") else model.state_dict() @@ -72,11 +71,9 @@ def save_model(model, path, epoch, iter, optimizer=None): torch.save(data, path) -# @rank_zero_only -# @rank_filter -def save_model_state(path, model, weight_averager=None, logger=None): - if logger: - logger.info("Saving model to {}".format(path)) +def save_model_state(path, model, weight_averager=None, verbose=None): + if verbose: + print("Saving model to {}".format(path)) state_dict = ( weight_averager.state_dict() if weight_averager diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/logger.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/logger.py index b883d8f336..bbe5f59c47 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/logger.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/logger.py @@ -22,15 +22,15 @@ from pytorch_lightning.utilities import rank_zero_only from pytorch_lightning.utilities.cloud_io import get_filesystem + from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.path import mkdir class Logger: def __init__(self, local_rank, save_dir="./", use_tensorboard=True): - # mkdir(local_rank, save_dir) - mkdir(save_dir) + mkdir(local_rank, save_dir) self.rank = local_rank - fmt = ("[%(name)s] [%(asctime)s] %(levelname)s: %(message)s") + fmt = "[%(name)s] [%(asctime)s] %(levelname)s: %(message)s" logging.basicConfig( level=logging.INFO, filename=os.path.join(save_dir, "logs.txt"), @@ -61,6 +61,10 @@ def log(self, string): if self.rank < 1: logging.info(string) + def info(self, string): + if self.rank < 1: + logging.info(string) + def scalar_summary(self, tag, phase, value, step): if self.rank < 1: self.writer.add_scalars(tag, {phase: value}, step) @@ -173,8 +177,7 @@ def _init_logger(self): ch = logging.StreamHandler() ch.setLevel(logging.INFO) # set console formatter - - c_fmt = ("[%(name)s] [%(asctime)s] %(levelname)s: %(message)s") + c_fmt = "[%(name)s] [%(asctime)s] %(levelname)s: %(message)s" console_formatter = logging.Formatter(c_fmt, datefmt="%m-%d %H:%M:%S") ch.setFormatter(console_formatter) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/path.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/path.py index b0887d41a6..6f101ece69 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/path.py +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/path.py @@ -14,10 +14,10 @@ import os -# from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.rank_filter import rank_filter +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.rank_filter import rank_filter -# @rank_filter +@rank_filter def mkdir(path): if not os.path.exists(path): os.makedirs(path) diff --git a/src/opendr/perception/object_detection_2d/nanodet/dependencies.ini b/src/opendr/perception/object_detection_2d/nanodet/dependencies.ini index c8a39d062d..aac7b3d043 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/dependencies.ini +++ b/src/opendr/perception/object_detection_2d/nanodet/dependencies.ini @@ -1,7 +1,7 @@ [runtime] # 'python' key expects a value using the Python requirements file format # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format -python=torch>=1.7 +python=torch>=1.9.0 pytorch-lightning==1.2.3 protobuf<=3.20.0 omegaconf>=2.0.1 @@ -10,13 +10,9 @@ python=torch>=1.7 opencv-python pycocotools Cython - matplotlib onnx - onnx-simplifier pyaml tabulate tensorboard - torchmetrics - tqdm opendr=opendr-toolkit-engine diff --git a/src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py b/src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py index 6eb79c3db6..f087264104 100644 --- a/src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py +++ b/src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py @@ -15,6 +15,7 @@ import os import datetime import json +import warnings from pathlib import Path import pytorch_lightning as pl @@ -30,7 +31,6 @@ from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.inferencer.utilities import Predictor from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util import ( NanoDetLightningLogger, - Logger, cfg, load_config, load_model_weight, @@ -44,13 +44,15 @@ from opendr.engine.learners import Learner from urllib.request import urlretrieve +import onnxruntime as ort + _MODEL_NAMES = {"EfficientNet_Lite0_320", "EfficientNet_Lite1_416", "EfficientNet_Lite2_512", "RepVGG_A0_416", "t", "g", "m", "m_416", "m_0.5x", "m_1.5x", "m_1.5x_416", "plus_m_320", "plus_m_1.5x_320", "plus_m_416", "plus_m_1.5x_416", "custom"} class NanodetLearner(Learner): - def __init__(self, model_to_use="plus_m_1.5x_416", iters=None, lr=None, batch_size=None, checkpoint_after_iter=None, + def __init__(self, model_to_use="m", iters=None, lr=None, batch_size=None, checkpoint_after_iter=None, checkpoint_load_iter=None, temp_path='', device='cuda', weight_decay=None, warmup_steps=None, warmup_ratio=None, lr_schedule_T_max=None, lr_schedule_eta_min=None, grad_clip=None): @@ -82,6 +84,11 @@ def __init__(self, model_to_use="plus_m_1.5x_416", iters=None, lr=None, batch_si checkpoint_load_iter=self.checkpoint_load_iter, temp_path=self.temp_path, device=self.device) + self.ort_session = None + self.jit_model = None + self.predictor = None + + self.pipeline = None self.model = build_model(self.cfg.model) self.logger = None self.task = None @@ -173,73 +180,87 @@ def save(self, path=None, verbose=True): Method for saving the current model and metadata in the path provided. :param path: path to folder where model will be saved :type path: str, optional - :param verbose: whether to print a success message or not, defaults to False + :param verbose: whether to print a success message or not :type verbose: bool, optional """ + path = path if path is not None else self.cfg.save_dir model = self.cfg.check_point_name + os.makedirs(path, exist_ok=True) - metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", - "has_data": False, "inference_params": {}, "optimized": False, - "optimizer_info": {}, "classes": self.classes} + if self.ort_session: + self._save_onnx(path, verbose=verbose) + return + if self.jit_model: + self._save_jit(path, verbose=verbose) + return - param_filepath = "nanodet_{}.pth".format(model) - metadata["model_paths"].append(param_filepath) + metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", "has_data": False, + "inference_params": {"input_size": self.cfg.data.val.input_size, "classes": self.classes}, + "optimized": False, "optimizer_info": {}} + + metadata["model_paths"].append("nanodet_{}.pth".format(model)) - logger = self.logger if verbose else None if self.task is None: - print("You do not have call a task yet, only the state of the loaded or initialized model will be saved") - save_model_state(os.path.join(path, metadata["model_paths"][0]), self.model, None, logger) + print("You haven't called a task yet, only the state of the loaded or initialized model will be saved.") + save_model_state(os.path.join(path, metadata["model_paths"][0]), self.model, None, verbose) else: - self.task.save_current_model(os.path.join(path, metadata["model_paths"][0]), logger) + self.task.save_current_model(os.path.join(path, metadata["model_paths"][0]), verbose) with open(os.path.join(path, "nanodet_{}.json".format(model)), 'w', encoding='utf-8') as f: json.dump(metadata, f, ensure_ascii=False, indent=4) if verbose: print("Model metadata saved.") - return True + return def load(self, path=None, verbose=True): """ Loads the model from the path provided. :param path: path of the directory where the model was saved :type path: str, optional - :param verbose: whether to print a success message or not, defaults to False + :param verbose: whether to print a success message or not, defaults to True :type verbose: bool, optional """ + path = path if path is not None else self.cfg.save_dir + model = self.cfg.check_point_name if verbose: - print("Model name:", model, "-->", os.path.join(path, model + ".json")) + print("Model name:", model, "-->", os.path.join(path, "nanodet_" + model + ".json")) with open(os.path.join(path, "nanodet_{}.json".format(model))) as f: metadata = json.load(f) - logger = Logger(-1, path, False) if verbose else None - ckpt = torch.load(os.path.join(path, metadata["model_paths"][0]), map_location=torch.device(self.device)) - self.model = load_model_weight(self.model, ckpt, logger) + if metadata['optimized']: + if metadata['format'] == "onnx": + self._load_onnx(os.path.join(path, metadata["model_paths"][0]), verbose=verbose) + print("Loaded ONNX model.") + else: + self._load_jit(os.path.join(path, metadata["model_paths"][0]), verbose=verbose) + print("Loaded JIT model.") + else: + ckpt = torch.load(os.path.join(path, metadata["model_paths"][0]), map_location=torch.device(self.device)) + self.model = load_model_weight(self.model, ckpt, verbose) if verbose: - logger.log("Loaded model weight from {}".format(path)) + print("Loaded model weights from {}".format(path)) pass - def download(self, path=None, mode="pretrained", verbose=False, + def download(self, path=None, mode="pretrained", verbose=True, url=OPENDR_SERVER_URL + "/perception/object_detection_2d/nanodet/"): """ Downloads all files necessary for inference, evaluation and training. Valid mode options are: ["pretrained", "images", "test_data"]. :param path: folder to which files will be downloaded, if None self.temp_path will be used - :type path: str, optional + :type path: str :param mode: one of: ["pretrained", "images", "test_data"], where "pretrained" downloads a pretrained - network depending on the network choosed in config file, "images" downloads example inference data, - and "test_data" downloads additional image,annotation file and pretrained network for training and testing - :type mode: str, optional - :param model: the specific name of the model to download, all pre-configured configs files have their pretrained - model and can be selected, if None self.cfg.check_point_name will be used - :param verbose: if True, additional information is printed on stdout - :type verbose: bool, optional + network depending on the network chosen in the config file, "images" downloads example inference data, + and "test_data" downloads additional images and corresponding annotations files + :type mode: str + :param verbose: if True, additional information is printed on STDOUT + :type verbose: bool :param url: URL to file location on FTP server - :type url: str, optional + :type url: str """ valid_modes = ["pretrained", "images", "test_data"] @@ -278,9 +299,9 @@ def download(self, path=None, mode="pretrained", verbose=False, if verbose: print("Making metadata...") - metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", - "has_data": False, "inference_params": {}, "optimized": False, - "optimizer_info": {}, "classes": self.classes} + metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", "has_data": False, + "inference_params": {"input_size": self.cfg.data.val.input_size, "classes": self.classes}, + "optimized": False, "optimizer_info": {}} param_filepath = "nanodet_{}.pth".format(model) metadata["model_paths"].append(param_filepath) @@ -289,13 +310,13 @@ def download(self, path=None, mode="pretrained", verbose=False, except: print("Pretrain weights for this model are not provided!!! \n" - "Only the hole ckeckpoint will be download") + "Only the hole checkpoint will be download") if verbose: print("Making metadata...") - metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", - "has_data": False, "inference_params": {}, "optimized": False, - "optimizer_info": {}, "classes": self.classes} + metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", "has_data": False, + "inference_params": {"input_size": self.cfg.data.val.input_size, "classes": self.classes}, + "optimized": False, "optimizer_info": {}} param_filepath = "nanodet_{}.ckpt".format(model) metadata["model_paths"].append(param_filepath) @@ -333,11 +354,138 @@ def reset(self): """This method is not used in this implementation.""" return NotImplementedError - def optimize(self): - """This method is not used in this implementation.""" - return NotImplementedError + def __dummy_input(self): + width, height = self.cfg.data.val.input_size + dummy_input = ( + torch.randn((3, width, height), device=self.device, dtype=torch.float32), + torch.tensor(width, device="cpu", dtype=torch.int64), + torch.tensor(height, device="cpu", dtype=torch.int64), + torch.eye(3, device="cpu", dtype=torch.float32), + ) + return dummy_input + + def _save_onnx(self, onnx_path, do_constant_folding=False, verbose=True, nms_max_num=100): + if not self.predictor: + self.predictor = Predictor(self.cfg, self.model, device=self.device, nms_max_num=nms_max_num) + + os.makedirs(onnx_path, exist_ok=True) + export_path = os.path.join(onnx_path, "nanodet_{}.onnx".format(self.cfg.check_point_name)) + + dummy_input = self.__dummy_input() + + torch.onnx.export( + self.predictor, + dummy_input[0], + export_path, + verbose=verbose, + keep_initializers_as_inputs=True, + do_constant_folding=do_constant_folding, + opset_version=11, + input_names=['data'], + output_names=['output'], + dynamic_axes={'data': {1: 'width', + 2: 'height'}} + ) + + metadata = {"model_paths": ["nanodet_{}.onnx".format(self.cfg.check_point_name)], "framework": "pytorch", + "format": "onnx", "has_data": False, "optimized": True, "optimizer_info": {}, + "inference_params": {"input_size": self.cfg.data.val.input_size, "classes": self.classes}} + + with open(os.path.join(onnx_path, "nanodet_{}.json".format(self.cfg.check_point_name)), + 'w', encoding='utf-8') as f: + json.dump(metadata, f, ensure_ascii=False, indent=4) + + if verbose: + print("Finished exporting ONNX model.") + + try: + import onnxsim + except: + print("For compression in optimized models, install onnxsim and rerun optimize.") + return + + import onnx + if verbose: + print("Simplifying ONNX model...") + input_data = {"data": dummy_input[0].detach().cpu().numpy()} + model_sim, flag = onnxsim.simplify(export_path, input_data=input_data) + if flag: + onnx.save(model_sim, export_path) + if verbose: + print("ONNX simplified successfully.") + else: + if verbose: + print("ONNX simplified failed.") + + def _load_onnx(self, onnx_path, verbose=True): + if verbose: + print("Loading ONNX runtime inference session from {}".format(onnx_path)) + + self.ort_session = ort.InferenceSession(onnx_path) + + def _save_jit(self, jit_path, verbose=True, nms_max_num=100): + if not self.predictor: + self.predictor = Predictor(self.cfg, self.model, device=self.device, nms_max_num=nms_max_num) + + os.makedirs(jit_path, exist_ok=True) - def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, seed=123): + dummy_input = self.__dummy_input() + + with torch.no_grad(): + export_path = os.path.join(jit_path, "nanodet_{}.pth".format(self.cfg.check_point_name)) + self.predictor.trace_model(dummy_input) + model_traced = torch.jit.script(self.predictor) + + metadata = {"model_paths": ["nanodet_{}.pth".format(self.cfg.check_point_name)], "framework": "pytorch", + "format": "pth", "has_data": False, "optimized": True, "optimizer_info": {}, + "inference_params": {"input_size": self.cfg.data.val.input_size, "classes": self.classes}} + model_traced.save(export_path) + + with open(os.path.join(jit_path, "nanodet_{}.json".format(self.cfg.check_point_name)), + 'w', encoding='utf-8') as f: + json.dump(metadata, f, ensure_ascii=False, indent=4) + + if verbose: + print("Finished export to TorchScript.") + + def _load_jit(self, jit_path, verbose=True): + if verbose: + print("Loading JIT model from {}.".format(jit_path)) + + self.jit_model = torch.jit.load(jit_path, map_location=self.device) + + def optimize(self, export_path, verbose=True, optimization="jit", nms_max_num=100): + """ + Method for optimizing the model with ONNX or JIT. + :param export_path: The file path to the folder where the optimized model will be saved. If a model already + exists at this path, it will be overwritten. + :type export_path: str + :param verbose: if set to True, additional information is printed to STDOUT + :type verbose: bool, optional + :param optimization: the kind of optimization you want to perform [jit, onnx] + :type optimization: str + :param nms_max_num: determines the maximum number of bounding boxes that will be retained following the nms. + :type nms_max_num: int + """ + + optimization = optimization.lower() + if not os.path.exists(export_path): + if optimization == "jit": + self._save_jit(export_path, verbose=verbose, nms_max_num=nms_max_num) + elif optimization == "onnx": + self._save_onnx(export_path, verbose=verbose, nms_max_num=nms_max_num) + else: + assert NotImplementedError + with open(os.path.join(export_path, "nanodet_{}.json".format(self.cfg.check_point_name))) as f: + metadata = json.load(f) + if optimization == "jit": + self._load_jit(os.path.join(export_path, metadata["model_paths"][0]), verbose) + elif optimization == "onnx": + self._load_onnx(os.path.join(export_path, metadata["model_paths"][0]), verbose) + else: + assert NotImplementedError + + def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, logging=False, seed=123, local_rank=1): """ This method is used to train the detector on the COCO dataset. Validation is performed in a val_dataset if provided, else validation is performed in training dataset. @@ -348,27 +496,32 @@ def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, seed=123 :param val_dataset: validation dataset object :type val_dataset: ExternalDataset, DetectionDataset not implemented yet :param logging_path: subdirectory in temp_path to save logger outputs - :type logging_path: str, optional - :param verbose: if set to True, additional information is printed to STDOUT and logger txt output, - defaults to True + :type logging_path: str + :param verbose: if set to True, additional information is printed to STDOUT :type verbose: bool + :param logging: if set to True, text and STDOUT logging will be used + :type logging: bool :param seed: seed for reproducibility :type seed: int + :param local_rank: for distribution learning + :type local_rank: int """ - mkdir(self.cfg.save_dir) + mkdir(local_rank, self.cfg.save_dir) - if verbose: + if logging: self.logger = NanoDetLightningLogger(self.temp_path + "/" + logging_path) self.logger.dump_cfg(self.cfg) if seed != '' or seed is not None: - if verbose: + if logging: self.logger.info("Set random seed to {}".format(seed)) pl.seed_everything(seed) - if verbose: + if logging: self.logger.info("Setting up data...") + elif verbose: + print("Setting up data...") train_dataset = build_dataset(self.cfg.data.val, dataset, self.cfg.class_names, "train") val_dataset = train_dataset if val_dataset is None else \ @@ -381,7 +534,7 @@ def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, seed=123 batch_size=self.batch_size, shuffle=True, num_workers=self.cfg.device.workers_per_gpu, - pin_memory=True, + pin_memory=False, collate_fn=naive_collate, drop_last=True, ) @@ -390,7 +543,7 @@ def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, seed=123 batch_size=self.batch_size, shuffle=False, num_workers=self.cfg.device.workers_per_gpu, - pin_memory=True, + pin_memory=False, collate_fn=naive_collate, drop_last=False, ) @@ -401,14 +554,15 @@ def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, seed=123 if self.checkpoint_load_iter > 0 else None ) - if verbose: + if logging: self.logger.info("Creating task...") + elif verbose: + print("Creating task...") self.task = TrainingTask(self.cfg, self.model, evaluator) - if self.device == "cpu": - gpu_ids = None - accelerator = None - elif self.device == "cuda": + gpu_ids = None + accelerator = None + if self.device == "cuda": gpu_ids = self.cfg.device.gpu_ids accelerator = None if len(gpu_ids) <= 1 else "ddp" @@ -421,7 +575,7 @@ def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, seed=123 log_every_n_steps=self.cfg.log.interval, num_sanity_val_steps=0, resume_from_checkpoint=model_resume_path, - callbacks=[ProgressBar(refresh_rate=0)], # disable tqdm bar + callbacks=[ProgressBar(refresh_rate=0)], logger=self.logger, benchmark=True, gradient_clip_val=self.cfg.get("grad_clip", 0.0), @@ -429,27 +583,32 @@ def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, seed=123 trainer.fit(self.task, train_dataloader, val_dataloader) - def eval(self, dataset, verbose=True): + def eval(self, dataset, verbose=True, logging=False, local_rank=1): """ This method performs evaluation on a given dataset and returns a dictionary with the evaluation results. :param dataset: dataset object, to perform evaluation on - :type dataset: ExternalDataset, DetectionDataset not implemented yet - :param verbose: if set to True, additional information is printed to STDOUT and logger txt output, - defaults to True + :type dataset: ExternalDataset, XMLBasedDataset + :param verbose: if set to True, additional information is printed to STDOUT :type verbose: bool + :param logging: if set to True, text and STDOUT logging will be used + :type logging: bool + :param local_rank: for distribution learning + :type local_rank: int """ timestr = datetime.datetime.now().__format__("%Y_%m_%d_%H:%M:%S") save_dir = os.path.join(self.cfg.save_dir, timestr) - mkdir(save_dir) + mkdir(local_rank, save_dir) - if verbose: + if logging: self.logger = NanoDetLightningLogger(save_dir) self.cfg.update({"test_mode": "val"}) - if verbose: + if logging: self.logger.info("Setting up data...") + elif verbose: + print("Setting up data...") val_dataset = build_dataset(self.cfg.data.val, dataset, self.cfg.class_names, "val") @@ -458,20 +617,22 @@ def eval(self, dataset, verbose=True): batch_size=self.batch_size, shuffle=False, num_workers=self.cfg.device.workers_per_gpu, - pin_memory=True, + pin_memory=False, collate_fn=naive_collate, drop_last=False, ) evaluator = build_evaluator(self.cfg.evaluator, val_dataset) - if verbose: + if logging: self.logger.info("Creating task...") + elif verbose: + print("Creating task...") + self.task = TrainingTask(self.cfg, self.model, evaluator) - if self.device == "cpu": - gpu_ids = None - accelerator = None - elif self.device == "cuda": + gpu_ids = None + accelerator = None + if self.device == "cuda": gpu_ids = self.cfg.device.gpu_ids accelerator = None if len(gpu_ids) <= 1 else "ddp" @@ -483,35 +644,51 @@ def eval(self, dataset, verbose=True): num_sanity_val_steps=0, logger=self.logger, ) - if verbose: + if self.logger: self.logger.info("Starting testing...") - return trainer.test(self.task, val_dataloader, verbose=verbose) + elif verbose: + print("Starting testing...") - def infer(self, input, threshold=0.35, verbose=True): + test_results = (verbose or logging) + return trainer.test(self.task, val_dataloader, verbose=test_results) + + def infer(self, input, threshold=0.35, nms_max_num=100): """ Performs inference - :param input: input can be an Image type image to perform inference - :type input: str, optional + :param input: input image to perform inference on + :type input: opendr.data.Image :param threshold: confidence threshold :type threshold: float, optional - :param verbose: if set to True, additional information is printed to STDOUT and logger txt output, - defaults to True - :type verbose: bool + :param nms_max_num: determines the maximum number of bounding boxes that will be retained following the nms. + :type nms_max_num: int :return: list of bounding boxes of last image of input or last frame of the video - :rtype: BoundingBoxList + :rtype: opendr.engine.target.BoundingBoxList """ + if not self.predictor: + self.predictor = Predictor(self.cfg, self.model, device=self.device, nms_max_num=nms_max_num) - if verbose: - self.logger = Logger(0, use_tensorboard=False) - predictor = Predictor(self.cfg, self.model, device=self.device) if not isinstance(input, Image): input = Image(input) _input = input.opencv() - meta, res = predictor.inference(_input, verbose) - bounding_boxes = BoundingBoxList([]) - for label in res[0]: - for box in res[0][label]: + _input, *metadata = self.predictor.preprocessing(_input) + + if self.ort_session: + if self.jit_model: + warnings.warn( + "Warning: Both JIT and ONNX models are initialized, inference will run in ONNX mode by default.\n" + "To run in JIT please delete the self.ort_session like: detector.ort_session = None.") + preds = self.ort_session.run(['output'], {'data': _input.cpu().detach().numpy()}) + res = self.predictor.postprocessing(torch.from_numpy(preds[0]), _input, *metadata) + elif self.jit_model: + res = self.jit_model(_input, *metadata).cpu() + else: + preds = self.predictor(_input, *metadata) + res = self.predictor.postprocessing(preds, _input, *metadata) + + bounding_boxes = [] + for label in range(len(res)): + for box in res[label]: score = box[-1] if score > threshold: bbox = BoundingBox(left=box[0], top=box[1], @@ -519,7 +696,8 @@ def infer(self, input, threshold=0.35, verbose=True): height=box[3] - box[1], name=label, score=score) - bounding_boxes.data.append(bbox) + bounding_boxes.append(bbox) + bounding_boxes = BoundingBoxList(bounding_boxes) bounding_boxes.data.sort(key=lambda v: v.confidence) return bounding_boxes diff --git a/tests/Makefile b/tests/Makefile index aea3019248..5fdff5383d 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -46,6 +46,11 @@ download: $(MV) opendrdata.csd.auth.gr/perception/face_recognition/test_data/images $(DATA_DIR)/database; \ $(WGET) ftp://opendrdata.csd.auth.gr/perception/face_recognition/optimized_model/*; \ $(MV) opendrdata.csd.auth.gr/perception/face_recognition/optimized_model $(DATA_DIR)/optimized_model; \ + $(MKDIR_P) $(DATA_DIR)/object_detection_2d/nanodet; \ + $(WGET) ftp://opendrdata.csd.auth.gr/perception/object_detection_2d/nanodet/images/*; \ + $(MV) opendrdata.csd.auth.gr/perception/object_detection_2d/nanodet/images $(DATA_DIR)/object_detection_2d/nanodet/database; \ + $(WGET) ftp://opendrdata.csd.auth.gr/perception/object_detection_2d/nanodet/optimized_model/*; \ + $(MV) opendrdata.csd.auth.gr/perception/object_detection_2d/nanodet/optimized_model $(DATA_DIR)/object_detection_2d/nanodet/optimized_model; \ $(RM) -r opendrdata.csd.auth.gr; \ fi; @@ -57,22 +62,34 @@ $(BUILD_DIR)/test_face_recognition: @+echo "Building face recognition test..." $(CC) $(CFLAGS) -o $(BUILD_DIR)/test_face_recognition sources/c_api/test_face_recognition.c $(INC) $(OPENDR_INC) $(OPENDR_LD) $(LD) +$(BUILD_DIR)/test_nanodet: + @+echo "Building Nanodet object detection test..." + $(CC) $(CFLAGS) -o $(BUILD_DIR)/test_nanodet sources/c_api/test_nanodet.c $(INC) $(OPENDR_INC) $(OPENDR_LD) $(LD) + FMP_INC = -I$(OPENDR_HOME)/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include $(BUILD_DIR)/test_fmp_gmapping: @+echo "Building Full-Map-Posterior GMapping test..." $(CPP) $(CFLAGS) -o $(BUILD_DIR)/test_fmp_gmapping sources/c_api/test_fmp_gmapping.cpp -lboost_unit_test_framework $(INC) $(OPENDR_INC) $(OPENDR_LD) $(LD) $(FMP_INC) -tests: $(BUILD_DIR)/test_opendr_utils $(BUILD_DIR)/test_face_recognition $(BUILD_DIR)/test_fmp_gmapping + +tests: utils face_recognition nanodet fmp_gmapping + +utils: $(BUILD_DIR)/test_opendr_utils +face_recognition: $(BUILD_DIR)/test_face_recognition +nanodet: $(BUILD_DIR)/test_nanodet +fmp_gmapping: $(BUILD_DIR)/test_fmp_gmapping runtests: download tests @+$(LD_RUN) $(BUILD_DIR)/test_opendr_utils 2>/dev/null @+$(LD_RUN) $(BUILD_DIR)/test_face_recognition 2>/dev/null + @+$(LD_RUN) $(BUILD_DIR)/test_nanodet 2>/dev/null @+$(LD_RUN) $(BUILD_DIR)/test_fmp_gmapping clean: @+echo "Cleaning C tests binaries and temporary files..." @+$(RM) $(BUILD_DIR)/test_opendr_utils @+$(RM) $(BUILD_DIR)/test_face_recognition + @+$(RM) $(BUILD_DIR)/test_nanodet @+$(RM) $(BUILD_DIR)/test_fmp_gmapping @+$(RM) -rf $(DATA_DIR) @+echo "Done!" diff --git a/tests/sources/c_api/test_face_recognition.c b/tests/sources/c_api/test_face_recognition.c index c2adc17e37..cd25b277b2 100644 --- a/tests/sources/c_api/test_face_recognition.c +++ b/tests/sources/c_api/test_face_recognition.c @@ -22,79 +22,79 @@ START_TEST(model_creation_test) { // Create a face recognition model - face_recognition_model_t model; + FaceRecognitionModelT model; // Load a pretrained model - load_face_recognition_model("data/optimized_model", &model); + loadFaceRecognitionModel("data/optimized_model", &model); - ck_assert(model.onnx_session); + ck_assert(model.onnxSession); ck_assert(model.env); - ck_assert(model.session_options); + ck_assert(model.sessionOptions); // Release the resources - free_face_recognition_model(&model); + freeFaceRecognitionModel(&model); // Load a model that does not exist - load_face_recognition_model("data/optimized_model_not_existant", &model); - ck_assert(!model.onnx_session); + loadFaceRecognitionModel("data/optimized_model_not_existant", &model); + ck_assert(!model.onnxSession); ck_assert(!model.env); - ck_assert(!model.session_options); + ck_assert(!model.sessionOptions); // Release the resources - free_face_recognition_model(&model); + freeFaceRecognitionModel(&model); } END_TEST START_TEST(database_creation_test) { - face_recognition_model_t model; - load_face_recognition_model("data/optimized_model", &model); + FaceRecognitionModelT model; + loadFaceRecognitionModel("data/optimized_model", &model); // Check that we can create and load a database that exists - build_database_face_recognition("data/database", "data/database.dat", &model); - load_database_face_recognition("data/database.dat", &model); + buildDatabaseFaceRecognition("data/database", "data/database.dat", &model); + loadDatabaseFaceRecognition("data/database.dat", &model); ck_assert(model.database); - ck_assert(model.database_ids); - ck_assert(model.database_ids); + ck_assert(model.databaseIds); + ck_assert(model.databaseIds); // Check that we can handle errors in the process - build_database_face_recognition("data/database_not_existant", "data/database.dat", &model); - load_database_face_recognition("data/database_not_existant.dat", &model); + buildDatabaseFaceRecognition("data/database_not_existant", "data/database.dat", &model); + loadDatabaseFaceRecognition("data/database_not_existant.dat", &model); ck_assert(!model.database); - ck_assert(!model.database_ids); + ck_assert(!model.databaseIds); // Release the resources - free_face_recognition_model(&model); + freeFaceRecognitionModel(&model); } END_TEST START_TEST(inference_creation_test) { // Create a face recognition model - face_recognition_model_t model; + FaceRecognitionModelT model; // Load a pretrained model (see instructions for downloading the data) - load_face_recognition_model("data/optimized_model", &model); + loadFaceRecognitionModel("data/optimized_model", &model); // Build and load the database - build_database_face_recognition("data/database", "data/database.dat", &model); - load_database_face_recognition("data/database.dat", &model); + buildDatabaseFaceRecognition("data/database", "data/database.dat", &model); + loadDatabaseFaceRecognition("data/database.dat", &model); // Load an image and performance inference - opendr_image_t image; - load_image("data/database/1/1.jpg", &image); - opendr_category_target_t res = infer_face_recognition(&model, &image); - free_image(&image); + OpendrImageT image; + loadImage("data/database/1/1.jpg", &image); + OpendrCategoryTargetT res = inferFaceRecognition(&model, &image); + freeImage(&image); char buff[512]; - decode_category_face_recognition(&model, res, buff); + decodeCategoryFaceRecognition(&model, res, buff); ck_assert(!strcmp(buff, "1")); // Load another image - load_image("data/database/5/1.jpg", &image); - res = infer_face_recognition(&model, &image); - free_image(&image); - decode_category_face_recognition(&model, res, buff); + loadImage("data/database/5/1.jpg", &image); + res = inferFaceRecognition(&model, &image); + freeImage(&image); + decodeCategoryFaceRecognition(&model, res, buff); ck_assert(!strcmp(buff, "5")); // Free the model resources - free_face_recognition_model(&model); + freeFaceRecognitionModel(&model); } END_TEST diff --git a/tests/sources/c_api/test_nanodet.c b/tests/sources/c_api/test_nanodet.c new file mode 100644 index 0000000000..22a8e8e695 --- /dev/null +++ b/tests/sources/c_api/test_nanodet.c @@ -0,0 +1,85 @@ +/* + * Copyright 2020-2022 OpenDR European Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include "object_detection_2d_nanodet_jit.h" +#include "opendr_utils.h" + +START_TEST(model_creation_test) { + // Create a nanodet libtorch model + NanodetModelT model; + // Load a pretrained model + loadNanodetModel("data/object_detection_2d/nanodet/optimized_model/nanodet_m.pth", "cpu", 320, 320, 0.35, &model); + ck_assert_msg(model.network != 0, "net is NULL"); + + // Release the resources + freeNanodetModel(&model); + + // Check if memory steel exist + ck_assert_msg(model.network, "net is NULL"); +} +END_TEST + +START_TEST(inference_creation_test) { + // Create a nanodet model + NanodetModelT model; + + // Load a pretrained model + loadNanodetModel("data/object_detection_2d/nanodet/optimized_model/nanodet_m.pth", "cpu", 320, 320, 0.35, &model); + + // Load an image and performance inference + OpendrImageT image; + loadImage("data/object_detection_2d/nanodet/database/000000000036.jpg", &image); + OpendrDetectionVectorTargetT res = inferNanodet(&model, &image); + freeImage(&image); + + ck_assert(res.size != 0); + + // Free the model resources + freeDetectionsVector(&res); + freeNanodetModel(&model); +} +END_TEST + +Suite *nanodet_suite(void) { + Suite *s; + TCase *tc_core; + + s = suite_create("Nanodet"); + tc_core = tcase_create("Core"); + + tcase_add_test(tc_core, model_creation_test); + tcase_add_test(tc_core, inference_creation_test); + suite_add_tcase(s, tc_core); + + return s; +} + +int main() { + int no_failed = 0; + Suite *s; + SRunner *runner; + + s = nanodet_suite(); + runner = srunner_create(s); + + srunner_run_all(runner, CK_NORMAL); + no_failed = srunner_ntests_failed(runner); + srunner_free(runner); + return (no_failed == 0) ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/tests/sources/c_api/test_opendr_utils.c b/tests/sources/c_api/test_opendr_utils.c index f872726199..fb68437f44 100644 --- a/tests/sources/c_api/test_opendr_utils.c +++ b/tests/sources/c_api/test_opendr_utils.c @@ -21,16 +21,16 @@ START_TEST(image_load_test) { // Load an image and performance inference - opendr_image_t image; + OpendrImageT image; // An example of an image that exist - load_image("data/database/1/1.jpg", &image); + loadImage("data/database/1/1.jpg", &image); ck_assert(image.data); // An example of an image that does not exist - load_image("images/not_existant/1.jpg", &image); + loadImage("images/not_existant/1.jpg", &image); ck_assert(image.data == 0); // Free the resources - free_image(&image); + freeImage(&image); } END_TEST diff --git a/tests/sources/tools/perception/object_detection_2d/nanodet/test_nanodet.py b/tests/sources/tools/perception/object_detection_2d/nanodet/test_nanodet.py index 6b613df539..e4a212fe5d 100644 --- a/tests/sources/tools/perception/object_detection_2d/nanodet/test_nanodet.py +++ b/tests/sources/tools/perception/object_detection_2d/nanodet/test_nanodet.py @@ -17,13 +17,15 @@ import gc import shutil import os +import warnings +from torch.jit import TracerWarning import numpy as np from opendr.perception.object_detection_2d import NanodetLearner from opendr.engine.datasets import ExternalDataset device = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' -_DEFAULT_MODEL = "plus_m_416" +_DEFAULT_MODEL = "m" def rmfile(path): @@ -47,14 +49,13 @@ def setUpClass(cls): print("\n\n**********************************\nTEST Nanodet Learner\n" "**********************************") - cls.temp_dir = os.path.join(".", "tests", "sources", "tools", "perception", "object_detection_2d", - "nanodet", "nanodet_temp") + cls.temp_dir = os.path.join(".", "nanodet_temp") cls.detector = NanodetLearner(model_to_use=_DEFAULT_MODEL, device=device, temp_path=cls.temp_dir, batch_size=1, iters=1, checkpoint_after_iter=2, lr=1e-4) # Download all required files for testing - cls.detector.download(path=cls.temp_dir, mode="pretrained") - cls.detector.download(path=cls.temp_dir, mode="images") - cls.detector.download(path=cls.temp_dir, mode="test_data") + cls.detector.download(path=cls.temp_dir, mode="pretrained", verbose=False) + cls.detector.download(path=cls.temp_dir, mode="images", verbose=False) + cls.detector.download(path=cls.temp_dir, mode="test_data", verbose=False) @classmethod def tearDownClass(cls): @@ -104,13 +105,15 @@ def test_infer(self): print('Starting inference test for Nanodet...') self.detector.load(os.path.join(self.temp_dir, "nanodet_{}".format(_DEFAULT_MODEL)), verbose=False) img = cv2.imread(os.path.join(self.temp_dir, "000000000036.jpg")) - self.assertIsNotNone(self.detector.infer(input=img, verbose=False), + self.assertIsNotNone(self.detector.infer(input=img), msg="Returned empty BoundingBoxList.") gc.collect() print('Finished inference test for Nanodet...') def test_save_load(self): print('Starting save/load test for Nanodet...') + self.detector.ort_session = None + self.detector.jit_model = None self.detector.save(path=os.path.join(self.temp_dir, "test_model"), verbose=False) starting_param_1 = list(self.detector._model.parameters())[0].detach().clone().to(device) self.detector.model = None @@ -120,12 +123,40 @@ def test_save_load(self): new_param = list(detector2._model.parameters())[0].detach().clone().to(device) self.assertTrue(starting_param_1.allclose(new_param)) + del starting_param_1, new_param # Cleanup rmfile(os.path.join(self.temp_dir, "test_model", "nanodet_{}.json".format(_DEFAULT_MODEL))) rmfile(os.path.join(self.temp_dir, "test_model", "nanodet_{}.pth".format(_DEFAULT_MODEL))) rmdir(os.path.join(self.temp_dir, "test_model")) print('Finished save/load test for Nanodet...') + def test_optimize(self): + # Tracing will issue TracerWarnings, but these can be ignored safely + # because we use this function to create tensors out of constant + # variables that are the same every time we call this function. + warnings.simplefilter("ignore", TracerWarning) + warnings.simplefilter("ignore", RuntimeWarning) + + self.detector.ort_session = None + self.detector.jit_model = None + + self.detector.optimize(os.path.join(self.temp_dir, "onnx"), verbose=False, optimization="onnx") + self.assertIsNotNone(self.detector.ort_session) + + self.detector.optimize(os.path.join(self.temp_dir, "jit"), verbose=False, optimization="jit") + self.assertIsNotNone(self.detector.jit_model) + + # Cleanup + rmfile(os.path.join(self.temp_dir, "onnx", "nanodet_{}.onnx".format(_DEFAULT_MODEL))) + rmfile(os.path.join(self.temp_dir, "onnx", "nanodet_{}.json".format(_DEFAULT_MODEL))) + rmfile(os.path.join(self.temp_dir, "jit", "nanodet_{}.pth".format(_DEFAULT_MODEL))) + rmfile(os.path.join(self.temp_dir, "jit", "nanodet_{}.json".format(_DEFAULT_MODEL))) + rmdir(os.path.join(self.temp_dir, "onnx")) + rmdir(os.path.join(self.temp_dir, "jit")) + + warnings.simplefilter("default", TracerWarning) + warnings.simplefilter("default", RuntimeWarning) + if __name__ == "__main__": unittest.main()