diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 37dfec9..520f6ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,4 +7,14 @@ repos: - repo: https://gitlab.com/pycqa/flake8 rev: 3.9.2 hooks: - - id: flake8 \ No newline at end of file + - id: flake8 +- repo: local + hooks: + - id: pdoc + name: pdoc + description: 'pdoc3: Auto-generate API documentation for Python projects' + entry: pdoc --html --skip-errors --force -o docs/api carvekit + language: python + language_version: python3 + require_serial: true + types: [python] diff --git a/Dockerfile.cpu b/Dockerfile.cpu index 975093f..7f537db 100644 --- a/Dockerfile.cpu +++ b/Dockerfile.cpu @@ -36,12 +36,15 @@ ENV CARVEKIT_PORT '5000' ENV CARVEKIT_HOST '0.0.0.0' ENV CARVEKIT_SEGMENTATION_NETWORK 'tracer_b7' ENV CARVEKIT_PREPROCESSING_METHOD 'none' -ENV CARVEKIT_POSTPROCESSING_METHOD 'fba' +ENV CARVEKIT_POSTPROCESSING_METHOD 'cascade_fba' ENV CARVEKIT_DEVICE 'cpu' +ENV CARVEKIT_BATCH_SIZE_PRE=5 ENV CARVEKIT_BATCH_SIZE_SEG '5' ENV CARVEKIT_BATCH_SIZE_MATTING '1' +ENV CARVEKIT_BATCH_SIZE_REFINE '1' ENV CARVEKIT_SEG_MASK_SIZE '640' ENV CARVEKIT_MATTING_MASK_SIZE '2048' +ENV CARVEKIT_REFINE_MASK_SIZE '900' ENV CARVEKIT_AUTH_ENABLE '1' ENV CARVEKIT_FP16 '0' ENV CARVEKIT_TRIMAP_PROB_THRESHOLD=231 diff --git a/Dockerfile.cuda b/Dockerfile.cuda index b5d31df..1155b0c 100644 --- a/Dockerfile.cuda +++ b/Dockerfile.cuda @@ -36,12 +36,15 @@ ENV CARVEKIT_PORT '5000' ENV CARVEKIT_HOST '0.0.0.0' ENV CARVEKIT_SEGMENTATION_NETWORK 'tracer_b7' ENV CARVEKIT_PREPROCESSING_METHOD 'none' -ENV CARVEKIT_POSTPROCESSING_METHOD 'fba' +ENV CARVEKIT_POSTPROCESSING_METHOD 'cascade_fba' ENV CARVEKIT_DEVICE 'cuda' +ENV CARVEKIT_BATCH_SIZE_PRE=5 ENV CARVEKIT_BATCH_SIZE_SEG '5' ENV CARVEKIT_BATCH_SIZE_MATTING '1' +ENV CARVEKIT_BATCH_SIZE_REFINE '1' ENV CARVEKIT_SEG_MASK_SIZE '640' ENV CARVEKIT_MATTING_MASK_SIZE '2048' +ENV CARVEKIT_REFINE_MASK_SIZE '900' ENV CARVEKIT_AUTH_ENABLE '1' ENV CARVEKIT_FP16 '0' ENV CARVEKIT_TRIMAP_PROB_THRESHOLD=231 diff --git a/README.md b/README.md index 8f44cc7..cb20f72 100644 --- a/README.md +++ b/README.md @@ -26,13 +26,16 @@ Automated high-quality background removal framework for an image using neural ne ## πŸŽ† Features: - High Quality +- Works offline - Batch Processing - NVIDIA CUDA and CPU processing - FP16 inference: Fast inference with low memory usage - Easy inference - 100% remove.bg compatible FastAPI HTTP API - Removes background from hairs +- Automatic best method selection for user's image - Easy integration with your code +- Models hosted on [HuggingFace](https://huggingface.co/Carve) ## β›± Try yourself on [Google Colab](https://colab.research.google.com/github/OPHoperHPO/image-background-remove-tool/blob/master/docs/other/carvekit_try.ipynb) ## ⛓️ How does it work? @@ -64,10 +67,17 @@ It can be briefly described as ## πŸ–ΌοΈ Image pre-processing and post-processing methods: ### πŸ” Preprocessing methods: * `none` - No preprocessing methods used. -> They will be added in the future. +* [`autoscene`](https://huggingface.co/Carve/scene_classifier/) - Automatically detects the scene type using classifier and applies the appropriate model. (default) +* `auto` - Performs in-depth image analysis and more accurately determines the best background removal method. Uses object classifier and scene classifier together. +> ### Notes: +> 1. `AutoScene` and `auto` may override the model and parameters specified by the user without logging. +> So, if you want to use a specific model, make all constant etc., you should disable auto preprocessing methods first! +> 2. At the moment for `auto` method universal models are selected for some specific domains, since the added models are currently not enough for so many types of scenes. +> In the future, when some variety of models is added, auto-selection will be rewritten for the better. ### βœ‚ Post-processing methods: * `none` - No post-processing methods used. -* `fba` (default) - This algorithm improves the borders of the image when removing the background from images with hair, etc. using FBA Matting neural network. This method gives the best result in combination with u2net without any preprocessing methods. +* `fba` - This algorithm improves the borders of the image when removing the background from images with hair, etc. using FBA Matting neural network. +* `cascade_fba` (default) - This algorithm refines the segmentation mask using CascadePSP neural network and then applies the FBA algorithm. ## 🏷 Setup for CPU processing: 1. `pip install carvekit --extra-index-url https://download.pytorch.org/whl/cpu` @@ -84,12 +94,15 @@ import torch from carvekit.api.high import HiInterface # Check doc strings for more information -interface = HiInterface(object_type="hairs-like", # Can be "object" or "hairs-like". +interface = HiInterface(object_type="auto", # Can be "object" or "hairs-like" or "auto" batch_size_seg=5, + batch_size_pre=5, batch_size_matting=1, + batch_size_refine=1, device='cuda' if torch.cuda.is_available() else 'cpu', seg_mask_size=640, # Use 640 for Tracer B7 and 320 for U2Net matting_mask_size=2048, + refine_mask_size=900, trimap_prob_threshold=231, trimap_dilation=30, trimap_erosion_iters=5, @@ -100,33 +113,65 @@ cat_wo_bg.save('2.png') ``` - +### Analogue of `auto` preprocessing method from cli +``` python +from carvekit.api.autointerface import AutoInterface +from carvekit.ml.wrap.scene_classifier import SceneClassifier +from carvekit.ml.wrap.yolov4 import SimplifiedYoloV4 + +scene_classifier = SceneClassifier(device="cpu", batch_size=1) +object_classifier = SimplifiedYoloV4(device="cpu", batch_size=1) + +interface = AutoInterface(scene_classifier=scene_classifier, + object_classifier=object_classifier, + segmentation_batch_size=1, + postprocessing_batch_size=1, + postprocessing_image_size=2048, + refining_batch_size=1, + refining_image_size=900, + segmentation_device="cpu", + fp16=False, + postprocessing_device="cpu") +images_without_background = interface(['./tests/data/cat.jpg']) +cat_wo_bg = images_without_background[0] +cat_wo_bg.save('2.png') +``` ### If you want control everything ``` python import PIL.Image from carvekit.api.interface import Interface from carvekit.ml.wrap.fba_matting import FBAMatting +from carvekit.ml.wrap.scene_classifier import SceneClassifier +from carvekit.ml.wrap.cascadepsp import CascadePSP from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7 -from carvekit.pipelines.postprocessing import MattingMethod -from carvekit.pipelines.preprocessing import PreprocessingStub +from carvekit.pipelines.postprocessing import CasMattingMethod +from carvekit.pipelines.preprocessing import AutoScene from carvekit.trimap.generator import TrimapGenerator # Check doc strings for more information seg_net = TracerUniversalB7(device='cpu', - batch_size=1) - + batch_size=1, fp16=False) +cascade_psp = CascadePSP(device='cpu', + batch_size=1, + input_tensor_size=900, + fp16=False, + processing_accelerate_image_size=2048, + global_step_only=False) fba = FBAMatting(device='cpu', input_tensor_size=2048, - batch_size=1) + batch_size=1, fp16=False) -trimap = TrimapGenerator() +trimap = TrimapGenerator(prob_threshold=231, kernel_size=30, erosion_iters=5) -preprocessing = PreprocessingStub() +scene_classifier = SceneClassifier(device='cpu', batch_size=5) +preprocessing = AutoScene(scene_classifier=scene_classifier) -postprocessing = MattingMethod(matting_module=fba, - trimap_generator=trimap, - device='cpu') +postprocessing = CasMattingMethod( + refining_module=cascade_psp, + matting_module=fba, + trimap_generator=trimap, + device='cpu') interface = Interface(pre_pipe=preprocessing, post_pipe=postprocessing, @@ -134,8 +179,7 @@ interface = Interface(pre_pipe=preprocessing, image = PIL.Image.open('tests/data/cat.jpg') cat_wo_bg = interface([image])[0] -cat_wo_bg.save('2.png') - +cat_wo_bg.save('2.png') ``` @@ -151,24 +195,35 @@ Usage: carvekit [OPTIONS] Options: -i ./2.jpg Path to input file or dir [required] -o ./2.png Path to output file or dir - --pre none Preprocessing method - --post fba Postprocessing method. + --pre autoscene Preprocessing method + --post cascade_fba Postprocessing method. --net tracer_b7 Segmentation Network. Check README for more info. + --recursive Enables recursive search for images in a folder --batch_size 10 Batch Size for list of images to be loaded to RAM - + + --batch_size_pre 5 Batch size for list of images to be + processed by preprocessing method network + --batch_size_seg 5 Batch size for list of images to be processed by segmentation network --batch_size_mat 1 Batch size for list of images to be processed by matting network + --batch_size_refine 1 Batch size for list of images to be + processed by refining network + --seg_mask_size 640 The size of the input image for the segmentation neural network. Use 640 for Tracer B7 and 320 for U2Net --matting_mask_size 2048 The size of the input image for the matting neural network. + + --refine_mask_size 900 The size of the input image for the refining + neural network. + --trimap_dilation 30 The size of the offset radius from the object mask in pixels when forming an unknown area diff --git a/carvekit/__init__.py b/carvekit/__init__.py index b58821b..03a3882 100644 --- a/carvekit/__init__.py +++ b/carvekit/__init__.py @@ -1 +1 @@ -version = "4.1.0" +version = "4.5.0" diff --git a/carvekit/__main__.py b/carvekit/__main__.py index acf901d..c40bc61 100644 --- a/carvekit/__main__.py +++ b/carvekit/__main__.py @@ -16,8 +16,8 @@ ) @click.option("-i", required=True, type=str, help="Path to input file or dir") @click.option("-o", default="none", type=str, help="Path to output file or dir") -@click.option("--pre", default="none", type=str, help="Preprocessing method") -@click.option("--post", default="fba", type=str, help="Postprocessing method.") +@click.option("--pre", default="autoscene", type=str, help="Preprocessing method") +@click.option("--post", default="cascade_fba", type=str, help="Postprocessing method.") @click.option("--net", default="tracer_b7", type=str, help="Segmentation Network") @click.option( "--recursive", @@ -31,6 +31,12 @@ type=int, help="Batch Size for list of images to be loaded to RAM", ) +@click.option( + "--batch_size_pre", + default=5, + type=int, + help="Batch size for list of images to be processed by preprocessing method network", +) @click.option( "--batch_size_seg", default=5, @@ -43,6 +49,12 @@ type=int, help="Batch size for list of images to be processed by matting " "network", ) +@click.option( + "--batch_size_refine", + default=1, + type=int, + help="Batch size for list of images to be processed by refining network", +) @click.option( "--seg_mask_size", default=640, @@ -55,6 +67,12 @@ type=int, help="The size of the input image for the matting neural network.", ) +@click.option( + "--refine_mask_size", + default=900, + type=int, + help="The size of the input image for the refining neural network.", +) @click.option( "--trimap_dilation", default=30, @@ -89,10 +107,13 @@ def removebg( net: str, recursive: bool, batch_size: int, + batch_size_pre: int, batch_size_seg: int, batch_size_mat: int, + batch_size_refine: int, seg_mask_size: int, matting_mask_size: int, + refine_mask_size: int, device: str, fp16: bool, trimap_dilation: int, @@ -121,12 +142,15 @@ def removebg( device=device, batch_size_seg=batch_size_seg, batch_size_matting=batch_size_mat, + batch_size_refine=batch_size_refine, seg_mask_size=seg_mask_size, matting_mask_size=matting_mask_size, + refine_mask_size=refine_mask_size, fp16=fp16, trimap_dilation=trimap_dilation, trimap_erosion=trimap_erosion, trimap_prob_threshold=trimap_prob_threshold, + batch_size_pre=batch_size_pre, ) interface = init_interface(interface_config) diff --git a/carvekit/api/autointerface.py b/carvekit/api/autointerface.py new file mode 100644 index 0000000..60ba0c7 --- /dev/null +++ b/carvekit/api/autointerface.py @@ -0,0 +1,252 @@ +""" +Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0 +""" +from collections import Counter +from pathlib import Path + +from PIL import Image +from typing import Union, List, Dict + +from carvekit.api.interface import Interface +from carvekit.ml.wrap.basnet import BASNET +from carvekit.ml.wrap.cascadepsp import CascadePSP +from carvekit.ml.wrap.deeplab_v3 import DeepLabV3 +from carvekit.ml.wrap.fba_matting import FBAMatting +from carvekit.ml.wrap.scene_classifier import SceneClassifier +from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7 +from carvekit.ml.wrap.u2net import U2NET +from carvekit.ml.wrap.yolov4 import SimplifiedYoloV4 +from carvekit.pipelines.postprocessing import CasMattingMethod +from carvekit.trimap.generator import TrimapGenerator + +__all__ = ["AutoInterface"] + +from carvekit.utils.image_utils import load_image + +from carvekit.utils.pool_utils import thread_pool_processing + + +class AutoInterface(Interface): + def __init__( + self, + scene_classifier: SceneClassifier, + object_classifier: SimplifiedYoloV4, + segmentation_batch_size: int = 3, + refining_batch_size: int = 1, + refining_image_size: int = 900, + postprocessing_batch_size: int = 1, + postprocessing_image_size: int = 2048, + segmentation_device: str = "cpu", + postprocessing_device: str = "cpu", + fp16=False, + ): + """ + Args: + scene_classifier: SceneClassifier instance + object_classifier: YoloV4_COCO instance + """ + self.scene_classifier = scene_classifier + self.object_classifier = object_classifier + self.segmentation_batch_size = segmentation_batch_size + self.refining_batch_size = refining_batch_size + self.refining_image_size = refining_image_size + self.postprocessing_batch_size = postprocessing_batch_size + self.postprocessing_image_size = postprocessing_image_size + self.segmentation_device = segmentation_device + self.postprocessing_device = postprocessing_device + self.fp16 = fp16 + super().__init__( + seg_pipe=None, post_pipe=None, pre_pipe=None + ) # just for compatibility with Interface class + + @staticmethod + def select_params_for_net(net: Union[TracerUniversalB7, U2NET, DeepLabV3]): + """ + Selects the parameters for the network depending on the scene + + Args: + net: network + """ + if net == TracerUniversalB7: + return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5} + elif net == U2NET: + return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5} + elif net == DeepLabV3: + return {"prob_threshold": 231, "kernel_size": 40, "erosion_iters": 20} + elif net == BASNET: + return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5} + else: + raise ValueError("Unknown network type") + + def select_net(self, scene: str, images_info: List[dict]): + # TODO: Update this function, when new networks will be added + if scene == "hard": + for image_info in images_info: + objects = image_info["objects"] + if len(objects) == 0: + image_info[ + "net" + ] = TracerUniversalB7 # It seems that the image is empty, but we will try to process it + continue + obj_counter: Dict = dict(Counter([obj for obj in objects])) + # fill empty classes + for _tag in self.object_classifier.db: + if _tag not in obj_counter: + obj_counter[_tag] = 0 + + non_empty_classes = [obj for obj in obj_counter if obj_counter[obj] > 0] + + if obj_counter["human"] > 0 and len(non_empty_classes) == 1: + # Human only case. Hard Scene? It may be a photo of a person in far/middle distance. + image_info["net"] = TracerUniversalB7 + # TODO: will use DeepLabV3+ for this image, it is more suitable for this case, + # but needs checks for small bbox + elif obj_counter["human"] > 0 and len(non_empty_classes) > 1: + # Okay, we have a human without extra hairs and something else. Hard border + image_info["net"] = TracerUniversalB7 + elif obj_counter["cars"] > 0: + # Cars case + image_info["net"] = TracerUniversalB7 + elif obj_counter["animals"] > 0: + # Animals case + image_info["net"] = U2NET # animals should be always in soft scenes + else: + # We have no idea what is in the image, so we will try to process it with universal model + image_info["net"] = TracerUniversalB7 + + elif scene == "soft": + for image_info in images_info: + objects = image_info["objects"] + if len(objects) == 0: + image_info[ + "net" + ] = TracerUniversalB7 # It seems that the image is empty, but we will try to process it + continue + obj_counter: Dict = dict(Counter([obj for obj in objects])) + # fill empty classes + for _tag in self.object_classifier.db: + if _tag not in obj_counter: + obj_counter[_tag] = 0 + + non_empty_classes = [obj for obj in obj_counter if obj_counter[obj] > 0] + + if obj_counter["human"] > 0 and len(non_empty_classes) == 1: + # Human only case. It may be a portrait + image_info["net"] = U2NET + elif obj_counter["human"] > 0 and len(non_empty_classes) > 1: + # Okay, we have a human with hairs and something else + image_info["net"] = U2NET + elif obj_counter["cars"] > 0: + # Cars case. + image_info["net"] = TracerUniversalB7 + elif obj_counter["animals"] > 0: + # Animals case + image_info["net"] = U2NET # animals should be always in soft scenes + else: + # We have no idea what is in the image, so we will try to process it with universal model + image_info["net"] = TracerUniversalB7 + elif scene == "digital": + for image_info in images_info: # TODO: not implemented yet + image_info[ + "net" + ] = TracerUniversalB7 # It seems that the image is empty, but we will try to process it + + def __call__(self, images: List[Union[str, Path, Image.Image]]): + """ + Automatically detects the scene and selects the appropriate network for segmentation + + Args: + interface: Interface instance + images: list of images + + Returns: + list of masks + """ + loaded_images = thread_pool_processing(load_image, images) + + scene_analysis = self.scene_classifier(loaded_images) + images_objects = self.object_classifier(loaded_images) + + images_per_scene = {} + for i, image in enumerate(loaded_images): + scene_name = scene_analysis[i][0][0] + if scene_name not in images_per_scene: + images_per_scene[scene_name] = [] + images_per_scene[scene_name].append( + {"image": image, "objects": images_objects[i]} + ) + + for scene_name, images_info in list(images_per_scene.items()): + self.select_net(scene_name, images_info) + + # groups images by net + for scene_name, images_info in list(images_per_scene.items()): + groups = {} + for image_info in images_info: + net = image_info["net"] + if net not in groups: + groups[net] = [] + groups[net].append(image_info) + for net, gimages_info in list(groups.items()): + sc_images = [image_info["image"] for image_info in gimages_info] + masks = net( + device=self.segmentation_device, + batch_size=self.segmentation_batch_size, + fp16=self.fp16, + )(sc_images) + + for i, image_info in enumerate(gimages_info): + image_info["mask"] = masks[i] + + cascadepsp = CascadePSP( + device=self.postprocessing_device, + fp16=self.fp16, + input_tensor_size=self.refining_image_size, + batch_size=self.refining_batch_size, + ) + + fba = FBAMatting( + device=self.postprocessing_device, + batch_size=self.postprocessing_batch_size, + input_tensor_size=self.postprocessing_image_size, + fp16=self.fp16, + ) + # groups images by net + for scene_name, images_info in list(images_per_scene.items()): + groups = {} + for image_info in images_info: + net = image_info["net"] + if net not in groups: + groups[net] = [] + groups[net].append(image_info) + for net, gimages_info in list(groups.items()): + sc_images = [image_info["image"] for image_info in gimages_info] + # noinspection PyArgumentList + trimap_generator = TrimapGenerator(**self.select_params_for_net(net)) + matting_method = CasMattingMethod( + refining_module=cascadepsp, + matting_module=fba, + trimap_generator=trimap_generator, + device=self.postprocessing_device, + ) + masks = [image_info["mask"] for image_info in gimages_info] + result = matting_method(sc_images, masks) + + for i, image_info in enumerate(gimages_info): + image_info["result"] = result[i] + + # Reconstructing the original order of image + result = [] + for image in loaded_images: + for scene_name, images_info in list(images_per_scene.items()): + for image_info in images_info: + if image_info["image"] == image: + result.append(image_info["result"]) + break + if len(result) != len(images): + raise RuntimeError( + "Something went wrong with restoring original order. Please report this bug." + ) + return result diff --git a/carvekit/api/high.py b/carvekit/api/high.py index 46fb9d3..dda60f3 100644 --- a/carvekit/api/high.py +++ b/carvekit/api/high.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ import warnings @@ -8,20 +10,26 @@ from carvekit.api.interface import Interface from carvekit.ml.wrap.fba_matting import FBAMatting from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7 +from carvekit.ml.wrap.cascadepsp import CascadePSP +from carvekit.ml.wrap.scene_classifier import SceneClassifier +from carvekit.pipelines.preprocessing import AutoScene from carvekit.ml.wrap.u2net import U2NET -from carvekit.pipelines.postprocessing import MattingMethod +from carvekit.pipelines.postprocessing import CasMattingMethod from carvekit.trimap.generator import TrimapGenerator class HiInterface(Interface): def __init__( self, - object_type: str = "object", + object_type: str = "auto", + batch_size_pre=5, batch_size_seg=2, batch_size_matting=1, + batch_size_refine=1, device="cpu", seg_mask_size=640, matting_mask_size=2048, + refine_mask_size=900, trimap_prob_threshold=231, trimap_dilation=30, trimap_erosion_iters=5, @@ -31,69 +39,96 @@ def __init__( Initializes High Level interface. Args: - object_type: Interest object type. Can be "object" or "hairs-like". - matting_mask_size: The size of the input image for the matting neural network. - seg_mask_size: The size of the input image for the segmentation neural network. - batch_size_seg: Number of images processed per one segmentation neural network call. - batch_size_matting: Number of images processed per one matting neural network call. - device: Processing device - fp16: Use half precision. Reduce memory usage and increase speed. Experimental support - trimap_prob_threshold: Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied - trimap_dilation: The size of the offset radius from the object mask in pixels when forming an unknown area - trimap_erosion_iters: The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area + object_type (str, default=object): Interest object type. Can be "object" or "hairs-like". + matting_mask_size (int, default=2048): The size of the input image for the matting neural network. + seg_mask_size (int, default=640): The size of the input image for the segmentation neural network. + batch_size_pre (int, default=5: Number of images processed per one preprocessing method call. + batch_size_seg (int, default=2): Number of images processed per one segmentation neural network call. + batch_size_matting (int, matting=1): Number of images processed per one matting neural network call. + device (Literal[cpu, cuda], default=cpu): Processing device + fp16 (bool, default=False): Use half precision. Reduce memory usage and increase speed. + .. CAUTION:: ⚠️ **Experimental support** + trimap_prob_threshold (int, default=231): Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied + trimap_dilation (int, default=30): The size of the offset radius from the object mask in pixels when forming an unknown area + trimap_erosion_iters (int, default=5): The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area + refine_mask_size (int, default=900): The size of the input image for the refinement neural network. + batch_size_refine (int, default=1): Number of images processed per one refinement neural network call. - Notes: - 1. Changing seg_mask_size may cause an out-of-memory error if the value is too large, and it may also - result in reduced precision. I do not recommend changing this value. You can change matting_mask_size in - range from (1024 to 4096) to improve object edge refining quality, but it will cause extra large RAM and + + .. NOTE:: + 1. Changing seg_mask_size may cause an `out-of-memory` error if the value is too large, and it may also + result in reduced precision. I do not recommend changing this value. You can change `matting_mask_size` in + range from `(1024 to 4096)` to improve object edge refining quality, but it will cause extra large RAM and video memory consume. Also, you can change batch size to accelerate background removal, but it also causes extra large video memory consume, if value is too big. - - 2. Changing trimap_prob_threshold, trimap_kernel_size, trimap_erosion_iters may improve object edge - refining quality, + 2. Changing `trimap_prob_threshold`, `trimap_kernel_size`, `trimap_erosion_iters` may improve object edge + refining quality. """ + preprocess_pipeline = None + if object_type == "object": - self.u2net = TracerUniversalB7( + self._segnet = TracerUniversalB7( device=device, batch_size=batch_size_seg, input_image_size=seg_mask_size, fp16=fp16, ) elif object_type == "hairs-like": - self.u2net = U2NET( + self._segnet = U2NET( device=device, batch_size=batch_size_seg, input_image_size=seg_mask_size, fp16=fp16, ) + elif object_type == "auto": + # Using Tracer by default, + # but it will dynamically switch to other if needed + self._segnet = TracerUniversalB7( + device=device, + batch_size=batch_size_seg, + input_image_size=seg_mask_size, + fp16=fp16, + ) + self._scene_classifier = SceneClassifier( + device=device, fp16=fp16, batch_size=batch_size_pre + ) + preprocess_pipeline = AutoScene(scene_classifier=self._scene_classifier) + else: warnings.warn( f"Unknown object type: {object_type}. Using default object type: object" ) - self.u2net = TracerUniversalB7( + self._segnet = TracerUniversalB7( device=device, batch_size=batch_size_seg, input_image_size=seg_mask_size, fp16=fp16, ) - self.fba = FBAMatting( + self._cascade_psp = CascadePSP( + device=device, + batch_size=batch_size_refine, + input_tensor_size=refine_mask_size, + fp16=fp16, + ) + self._fba = FBAMatting( batch_size=batch_size_matting, device=device, input_tensor_size=matting_mask_size, fp16=fp16, ) - self.trimap_generator = TrimapGenerator( + self._trimap_generator = TrimapGenerator( prob_threshold=trimap_prob_threshold, kernel_size=trimap_dilation, erosion_iters=trimap_erosion_iters, ) super(HiInterface, self).__init__( - pre_pipe=None, - seg_pipe=self.u2net, - post_pipe=MattingMethod( - matting_module=self.fba, - trimap_generator=self.trimap_generator, + pre_pipe=preprocess_pipeline, + seg_pipe=self._segnet, + post_pipe=CasMattingMethod( + refining_module=self._cascade_psp, + matting_module=self._fba, + trimap_generator=self._trimap_generator, device=device, ), device=device, diff --git a/carvekit/api/interface.py b/carvekit/api/interface.py index 364d247..88dbe98 100644 --- a/carvekit/api/interface.py +++ b/carvekit/api/interface.py @@ -12,8 +12,8 @@ from carvekit.ml.wrap.deeplab_v3 import DeepLabV3 from carvekit.ml.wrap.u2net import U2NET from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7 -from carvekit.pipelines.preprocessing import PreprocessingStub -from carvekit.pipelines.postprocessing import MattingMethod +from carvekit.pipelines.preprocessing import PreprocessingStub, AutoScene +from carvekit.pipelines.postprocessing import MattingMethod, CasMattingMethod from carvekit.utils.image_utils import load_image from carvekit.utils.mask_utils import apply_mask from carvekit.utils.pool_utils import thread_pool_processing @@ -22,19 +22,19 @@ class Interface: def __init__( self, - seg_pipe: Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7], - pre_pipe: Optional[Union[PreprocessingStub]] = None, - post_pipe: Optional[Union[MattingMethod]] = None, + seg_pipe: Optional[Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]], + pre_pipe: Optional[Union[PreprocessingStub, AutoScene]] = None, + post_pipe: Optional[Union[MattingMethod, CasMattingMethod]] = None, device="cpu", ): """ Initializes an object for interacting with pipelines and other components of the CarveKit framework. Args: - pre_pipe: Initialized pre-processing pipeline object - seg_pipe: Initialized segmentation network object - post_pipe: Initialized postprocessing pipeline object - device: The processing device that will be used to apply the masks to the images. + pre_pipe (Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]): Initialized pre-processing pipeline object + seg_pipe (Optional[Union[PreprocessingStub]]): Initialized segmentation network object + post_pipe (Optional[Union[MattingMethod]]): Initialized postprocessing pipeline object + device (Literal[cpu, cuda], default=cpu): The processing device that will be used to apply the masks to the images. """ self.device = device self.preprocessing_pipeline = pre_pipe @@ -53,6 +53,11 @@ def __call__( Returns: List of images without background as PIL.Image.Image instances """ + if self.segmentation_pipeline is None: + raise ValueError( + "Segmentation pipeline is not initialized." + "Override the class or pass the pipeline to the constructor." + ) images = thread_pool_processing(load_image, images) if self.preprocessing_pipeline is not None: masks: List[Image.Image] = self.preprocessing_pipeline( diff --git a/carvekit/ml/arch/cascadepsp/__init__.py b/carvekit/ml/arch/cascadepsp/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/carvekit/ml/arch/cascadepsp/extractors.py b/carvekit/ml/arch/cascadepsp/extractors.py new file mode 100644 index 0000000..7967796 --- /dev/null +++ b/carvekit/ml/arch/cascadepsp/extractors.py @@ -0,0 +1,127 @@ +""" +Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/hkchengrex/CascadePSP +License: MIT License +""" +import math + +import torch.nn as nn + + +def conv3x3(in_planes, out_planes, stride=1, dilation=1): + return nn.Conv2d( + in_planes, + out_planes, + kernel_size=3, + stride=stride, + padding=dilation, + dilation=dilation, + bias=False, + ) + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d( + planes, + planes, + kernel_size=3, + stride=stride, + dilation=dilation, + padding=dilation, + bias=False, + ) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * 4) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class ResNet(nn.Module): + def __init__(self, block, layers=(3, 4, 23, 3)): + self.inplanes = 64 + super(ResNet, self).__init__() + self.conv1 = nn.Conv2d(6, 64, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2.0 / n)) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + def _make_layer(self, block, planes, blocks, stride=1, dilation=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d( + self.inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), + nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [block(self.inplanes, planes, stride, downsample)] + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes, dilation=dilation)) + + return nn.Sequential(*layers) + + def forward(self, x): + x_1 = self.conv1(x) # /2 + x = self.bn1(x_1) + x = self.relu(x) + x = self.maxpool(x) # /2 + + x_2 = self.layer1(x) + x = self.layer2(x_2) # /2 + x = self.layer3(x) + x = self.layer4(x) + + return x, x_1, x_2 + + +def resnet50(): + model = ResNet(Bottleneck, [3, 4, 6, 3]) + return model diff --git a/carvekit/ml/arch/cascadepsp/pspnet.py b/carvekit/ml/arch/cascadepsp/pspnet.py new file mode 100644 index 0000000..350719e --- /dev/null +++ b/carvekit/ml/arch/cascadepsp/pspnet.py @@ -0,0 +1,194 @@ +""" +Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/hkchengrex/CascadePSP +License: MIT License +""" + +import torch +from torch import nn +from torch.nn import functional as F +from carvekit.ml.arch.cascadepsp.extractors import resnet50 + + +class PSPModule(nn.Module): + def __init__(self, features, out_features=1024, sizes=(1, 2, 3, 6)): + super().__init__() + self.stages = [] + self.stages = nn.ModuleList( + [self._make_stage(features, size) for size in sizes] + ) + self.bottleneck = nn.Conv2d( + features * (len(sizes) + 1), out_features, kernel_size=1 + ) + self.relu = nn.ReLU(inplace=True) + + def _make_stage(self, features, size): + prior = nn.AdaptiveAvgPool2d(output_size=(size, size)) + conv = nn.Conv2d(features, features, kernel_size=1, bias=False) + return nn.Sequential(prior, conv) + + def forward(self, feats): + h, w = feats.size(2), feats.size(3) + set_priors = [ + F.interpolate( + input=stage(feats), size=(h, w), mode="bilinear", align_corners=False + ) + for stage in self.stages + ] + priors = set_priors + [feats] + bottle = self.bottleneck(torch.cat(priors, 1)) + return self.relu(bottle) + + +class PSPUpsample(nn.Module): + def __init__(self, x_channels, in_channels, out_channels): + super().__init__() + self.conv = nn.Sequential( + nn.BatchNorm2d(in_channels), + nn.ReLU(inplace=True), + nn.Conv2d(in_channels, out_channels, 3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, 3, padding=1), + ) + + self.conv2 = nn.Sequential( + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, 3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, 3, padding=1), + ) + + self.shortcut = nn.Conv2d(x_channels, out_channels, kernel_size=1) + + def forward(self, x, up): + x = F.interpolate(input=x, scale_factor=2, mode="bilinear", align_corners=False) + + p = self.conv(torch.cat([x, up], 1).type(x.type())) + sc = self.shortcut(x) + + p = p + sc + + p2 = self.conv2(p) + + return p + p2 + + +class RefinementModule(nn.Module): + def __init__(self): + super().__init__() + + self.feats = resnet50() + self.psp = PSPModule(2048, 1024, (1, 2, 3, 6)) + + self.up_1 = PSPUpsample(1024, 1024 + 256, 512) + self.up_2 = PSPUpsample(512, 512 + 64, 256) + self.up_3 = PSPUpsample(256, 256 + 3, 32) + + self.final_28 = nn.Sequential( + nn.Conv2d(1024, 32, kernel_size=1), + nn.ReLU(inplace=True), + nn.Conv2d(32, 1, kernel_size=1), + ) + + self.final_56 = nn.Sequential( + nn.Conv2d(512, 32, kernel_size=1), + nn.ReLU(inplace=True), + nn.Conv2d(32, 1, kernel_size=1), + ) + + self.final_11 = nn.Conv2d(32 + 3, 32, kernel_size=1) + self.final_21 = nn.Conv2d(32, 1, kernel_size=1) + + def forward(self, x, seg, inter_s8=None, inter_s4=None): + + images = {} + + """ + First iteration, s8 output + """ + if inter_s8 is None: + p = torch.cat((x, seg, seg, seg), 1) + + f, f_1, f_2 = self.feats(p) + p = self.psp(f) + + inter_s8 = self.final_28(p) + r_inter_s8 = F.interpolate( + inter_s8, scale_factor=8, mode="bilinear", align_corners=False + ) + r_inter_tanh_s8 = torch.tanh(r_inter_s8) + + images["pred_28"] = torch.sigmoid(r_inter_s8) + images["out_28"] = r_inter_s8 + else: + r_inter_tanh_s8 = inter_s8 + + """ + Second iteration, s8 output + """ + if inter_s4 is None: + p = torch.cat((x, seg, r_inter_tanh_s8, r_inter_tanh_s8), 1) + + f, f_1, f_2 = self.feats(p) + p = self.psp(f) + inter_s8_2 = self.final_28(p) + r_inter_s8_2 = F.interpolate( + inter_s8_2, scale_factor=8, mode="bilinear", align_corners=False + ) + r_inter_tanh_s8_2 = torch.tanh(r_inter_s8_2) + + p = self.up_1(p, f_2) + + inter_s4 = self.final_56(p) + r_inter_s4 = F.interpolate( + inter_s4, scale_factor=4, mode="bilinear", align_corners=False + ) + r_inter_tanh_s4 = torch.tanh(r_inter_s4) + + images["pred_28_2"] = torch.sigmoid(r_inter_s8_2) + images["out_28_2"] = r_inter_s8_2 + images["pred_56"] = torch.sigmoid(r_inter_s4) + images["out_56"] = r_inter_s4 + else: + r_inter_tanh_s8_2 = inter_s8 + r_inter_tanh_s4 = inter_s4 + + """ + Third iteration, s1 output + """ + p = torch.cat((x, seg, r_inter_tanh_s8_2, r_inter_tanh_s4), 1) + + f, f_1, f_2 = self.feats(p) + p = self.psp(f) + inter_s8_3 = self.final_28(p) + r_inter_s8_3 = F.interpolate( + inter_s8_3, scale_factor=8, mode="bilinear", align_corners=False + ) + + p = self.up_1(p, f_2) + inter_s4_2 = self.final_56(p) + r_inter_s4_2 = F.interpolate( + inter_s4_2, scale_factor=4, mode="bilinear", align_corners=False + ) + p = self.up_2(p, f_1) + p = self.up_3(p, x) + + """ + Final output + """ + p = F.relu(self.final_11(torch.cat([p, x], 1)), inplace=True) + p = self.final_21(p) + + pred_224 = torch.sigmoid(p) + + images["pred_224"] = pred_224 + images["out_224"] = p + images["pred_28_3"] = torch.sigmoid(r_inter_s8_3) + images["pred_56_2"] = torch.sigmoid(r_inter_s4_2) + images["out_28_3"] = r_inter_s8_3 + images["out_56_2"] = r_inter_s4_2 + + return images diff --git a/carvekit/ml/arch/cascadepsp/utils.py b/carvekit/ml/arch/cascadepsp/utils.py new file mode 100644 index 0000000..f63a524 --- /dev/null +++ b/carvekit/ml/arch/cascadepsp/utils.py @@ -0,0 +1,166 @@ +import torch +import torch.nn.functional as F + + +def resize_max_side(im, size, method): + h, w = im.shape[-2:] + max_side = max(h, w) + ratio = size / max_side + if method in ["bilinear", "bicubic"]: + return F.interpolate(im, scale_factor=ratio, mode=method, align_corners=False) + else: + return F.interpolate(im, scale_factor=ratio, mode=method) + + +def process_high_res_im(model, im, seg, L=900): + stride = L // 2 + + _, _, h, w = seg.shape + if max(h, w) > L: + im_small = resize_max_side(im, L, "area") + seg_small = resize_max_side(seg, L, "area") + elif max(h, w) < L: + im_small = resize_max_side(im, L, "bicubic") + seg_small = resize_max_side(seg, L, "bilinear") + else: + im_small = im + seg_small = seg + + images = model.safe_forward(im_small, seg_small) + + pred_224 = images["pred_224"] + pred_56 = images["pred_56_2"] + + for new_size in [max(h, w)]: + im_small = resize_max_side(im, new_size, "area") + seg_small = resize_max_side(seg, new_size, "area") + _, _, h, w = seg_small.shape + + combined_224 = torch.zeros_like(seg_small) + combined_weight = torch.zeros_like(seg_small) + + r_pred_224 = ( + F.interpolate(pred_224, size=(h, w), mode="bilinear", align_corners=False) + > 0.5 + ).float() * 2 - 1 + r_pred_56 = ( + F.interpolate(pred_56, size=(h, w), mode="bilinear", align_corners=False) + * 2 + - 1 + ) + + padding = 16 + step_size = stride - padding * 2 + step_len = L + + used_start_idx = {} + for x_idx in range((w) // step_size + 1): + for y_idx in range((h) // step_size + 1): + + start_x = x_idx * step_size + start_y = y_idx * step_size + end_x = start_x + step_len + end_y = start_y + step_len + + # Shift when required + if end_y > h: + end_y = h + start_y = h - step_len + if end_x > w: + end_x = w + start_x = w - step_len + + # Bound x/y range + start_x = max(0, start_x) + start_y = max(0, start_y) + end_x = min(w, end_x) + end_y = min(h, end_y) + + # The same crop might appear twice due to bounding/shifting + start_idx = start_y * w + start_x + if start_idx in used_start_idx: + continue + else: + used_start_idx[start_idx] = True + + # Take crop + im_part = im_small[:, :, start_y:end_y, start_x:end_x] + seg_224_part = r_pred_224[:, :, start_y:end_y, start_x:end_x] + seg_56_part = r_pred_56[:, :, start_y:end_y, start_x:end_x] + + # Skip when it is not an interesting crop anyway + seg_part_norm = (seg_224_part > 0).float() + high_thres = 0.9 + low_thres = 0.1 + if (seg_part_norm.mean() > high_thres) or ( + seg_part_norm.mean() < low_thres + ): + continue + grid_images = model.safe_forward(im_part, seg_224_part, seg_56_part) + grid_pred_224 = grid_images["pred_224"] + + # Padding + pred_sx = pred_sy = 0 + pred_ex = step_len + pred_ey = step_len + + if start_x != 0: + start_x += padding + pred_sx += padding + if start_y != 0: + start_y += padding + pred_sy += padding + if end_x != w: + end_x -= padding + pred_ex -= padding + if end_y != h: + end_y -= padding + pred_ey -= padding + + combined_224[:, :, start_y:end_y, start_x:end_x] += grid_pred_224[ + :, :, pred_sy:pred_ey, pred_sx:pred_ex + ] + + del grid_pred_224 + + # Used for averaging + combined_weight[:, :, start_y:end_y, start_x:end_x] += 1 + + # Final full resolution output + seg_norm = r_pred_224 / 2 + 0.5 + pred_224 = combined_224 / combined_weight + pred_224 = torch.where(combined_weight == 0, seg_norm, pred_224) + + _, _, h, w = seg.shape + images = {} + images["pred_224"] = F.interpolate( + pred_224, size=(h, w), mode="bilinear", align_corners=True + ) + + return images["pred_224"] + + +def process_im_single_pass(model, im, seg, L=900): + """ + A single pass version, aka global step only. + """ + + _, _, h, w = im.shape + if max(h, w) < L: + im = resize_max_side(im, L, "bicubic") + seg = resize_max_side(seg, L, "bilinear") + + if max(h, w) > L: + im = resize_max_side(im, L, "area") + seg = resize_max_side(seg, L, "area") + + images = model.safe_forward(im, seg) + + if max(h, w) < L: + images["pred_224"] = F.interpolate(images["pred_224"], size=(h, w), mode="area") + elif max(h, w) > L: + images["pred_224"] = F.interpolate( + images["pred_224"], size=(h, w), mode="bilinear", align_corners=True + ) + + return images["pred_224"] diff --git a/carvekit/ml/arch/yolov4/__init__.py b/carvekit/ml/arch/yolov4/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/carvekit/ml/arch/yolov4/models.py b/carvekit/ml/arch/yolov4/models.py new file mode 100644 index 0000000..af094f2 --- /dev/null +++ b/carvekit/ml/arch/yolov4/models.py @@ -0,0 +1,557 @@ +""" +Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/Tianxiaomo/pytorch-YOLOv4 +License: Apache License 2.0 +""" +import torch +from torch import nn +import torch.nn.functional as F +from carvekit.ml.arch.yolov4.yolo_layer import YoloLayer + + +def get_region_boxes(boxes_and_confs): + # print('Getting boxes from boxes and confs ...') + + boxes_list = [] + confs_list = [] + + for item in boxes_and_confs: + boxes_list.append(item[0]) + confs_list.append(item[1]) + + # boxes: [batch, num1 + num2 + num3, 1, 4] + # confs: [batch, num1 + num2 + num3, num_classes] + boxes = torch.cat(boxes_list, dim=1) + confs = torch.cat(confs_list, dim=1) + + return [boxes, confs] + + +class Mish(torch.nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x): + x = x * (torch.tanh(torch.nn.functional.softplus(x))) + return x + + +class Upsample(nn.Module): + def __init__(self): + super(Upsample, self).__init__() + + def forward(self, x, target_size, inference=False): + assert x.data.dim() == 4 + # _, _, tH, tW = target_size + + if inference: + + # B = x.data.size(0) + # C = x.data.size(1) + # H = x.data.size(2) + # W = x.data.size(3) + + return ( + x.view(x.size(0), x.size(1), x.size(2), 1, x.size(3), 1) + .expand( + x.size(0), + x.size(1), + x.size(2), + target_size[2] // x.size(2), + x.size(3), + target_size[3] // x.size(3), + ) + .contiguous() + .view(x.size(0), x.size(1), target_size[2], target_size[3]) + ) + else: + return F.interpolate( + x, size=(target_size[2], target_size[3]), mode="nearest" + ) + + +class Conv_Bn_Activation(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + activation, + bn=True, + bias=False, + ): + super().__init__() + pad = (kernel_size - 1) // 2 + + self.conv = nn.ModuleList() + if bias: + self.conv.append( + nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad) + ) + else: + self.conv.append( + nn.Conv2d( + in_channels, out_channels, kernel_size, stride, pad, bias=False + ) + ) + if bn: + self.conv.append(nn.BatchNorm2d(out_channels)) + if activation == "mish": + self.conv.append(Mish()) + elif activation == "relu": + self.conv.append(nn.ReLU(inplace=True)) + elif activation == "leaky": + self.conv.append(nn.LeakyReLU(0.1, inplace=True)) + elif activation == "linear": + pass + else: + raise Exception("activation error") + + def forward(self, x): + for l in self.conv: + x = l(x) + return x + + +class ResBlock(nn.Module): + """ + Sequential residual blocks each of which consists of \ + two convolution layers. + Args: + ch (int): number of input and output channels. + nblocks (int): number of residual blocks. + shortcut (bool): if True, residual tensor addition is enabled. + """ + + def __init__(self, ch, nblocks=1, shortcut=True): + super().__init__() + self.shortcut = shortcut + self.module_list = nn.ModuleList() + for i in range(nblocks): + resblock_one = nn.ModuleList() + resblock_one.append(Conv_Bn_Activation(ch, ch, 1, 1, "mish")) + resblock_one.append(Conv_Bn_Activation(ch, ch, 3, 1, "mish")) + self.module_list.append(resblock_one) + + def forward(self, x): + for module in self.module_list: + h = x + for res in module: + h = res(h) + x = x + h if self.shortcut else h + return x + + +class DownSample1(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = Conv_Bn_Activation(3, 32, 3, 1, "mish") + + self.conv2 = Conv_Bn_Activation(32, 64, 3, 2, "mish") + self.conv3 = Conv_Bn_Activation(64, 64, 1, 1, "mish") + # [route] + # layers = -2 + self.conv4 = Conv_Bn_Activation(64, 64, 1, 1, "mish") + + self.conv5 = Conv_Bn_Activation(64, 32, 1, 1, "mish") + self.conv6 = Conv_Bn_Activation(32, 64, 3, 1, "mish") + # [shortcut] + # from=-3 + # activation = linear + + self.conv7 = Conv_Bn_Activation(64, 64, 1, 1, "mish") + # [route] + # layers = -1, -7 + self.conv8 = Conv_Bn_Activation(128, 64, 1, 1, "mish") + + def forward(self, input): + x1 = self.conv1(input) + x2 = self.conv2(x1) + x3 = self.conv3(x2) + # route -2 + x4 = self.conv4(x2) + x5 = self.conv5(x4) + x6 = self.conv6(x5) + # shortcut -3 + x6 = x6 + x4 + + x7 = self.conv7(x6) + # [route] + # layers = -1, -7 + x7 = torch.cat([x7, x3], dim=1) + x8 = self.conv8(x7) + return x8 + + +class DownSample2(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = Conv_Bn_Activation(64, 128, 3, 2, "mish") + self.conv2 = Conv_Bn_Activation(128, 64, 1, 1, "mish") + # r -2 + self.conv3 = Conv_Bn_Activation(128, 64, 1, 1, "mish") + + self.resblock = ResBlock(ch=64, nblocks=2) + + # s -3 + self.conv4 = Conv_Bn_Activation(64, 64, 1, 1, "mish") + # r -1 -10 + self.conv5 = Conv_Bn_Activation(128, 128, 1, 1, "mish") + + def forward(self, input): + x1 = self.conv1(input) + x2 = self.conv2(x1) + x3 = self.conv3(x1) + + r = self.resblock(x3) + x4 = self.conv4(r) + + x4 = torch.cat([x4, x2], dim=1) + x5 = self.conv5(x4) + return x5 + + +class DownSample3(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = Conv_Bn_Activation(128, 256, 3, 2, "mish") + self.conv2 = Conv_Bn_Activation(256, 128, 1, 1, "mish") + self.conv3 = Conv_Bn_Activation(256, 128, 1, 1, "mish") + + self.resblock = ResBlock(ch=128, nblocks=8) + self.conv4 = Conv_Bn_Activation(128, 128, 1, 1, "mish") + self.conv5 = Conv_Bn_Activation(256, 256, 1, 1, "mish") + + def forward(self, input): + x1 = self.conv1(input) + x2 = self.conv2(x1) + x3 = self.conv3(x1) + + r = self.resblock(x3) + x4 = self.conv4(r) + + x4 = torch.cat([x4, x2], dim=1) + x5 = self.conv5(x4) + return x5 + + +class DownSample4(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = Conv_Bn_Activation(256, 512, 3, 2, "mish") + self.conv2 = Conv_Bn_Activation(512, 256, 1, 1, "mish") + self.conv3 = Conv_Bn_Activation(512, 256, 1, 1, "mish") + + self.resblock = ResBlock(ch=256, nblocks=8) + self.conv4 = Conv_Bn_Activation(256, 256, 1, 1, "mish") + self.conv5 = Conv_Bn_Activation(512, 512, 1, 1, "mish") + + def forward(self, input): + x1 = self.conv1(input) + x2 = self.conv2(x1) + x3 = self.conv3(x1) + + r = self.resblock(x3) + x4 = self.conv4(r) + + x4 = torch.cat([x4, x2], dim=1) + x5 = self.conv5(x4) + return x5 + + +class DownSample5(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = Conv_Bn_Activation(512, 1024, 3, 2, "mish") + self.conv2 = Conv_Bn_Activation(1024, 512, 1, 1, "mish") + self.conv3 = Conv_Bn_Activation(1024, 512, 1, 1, "mish") + + self.resblock = ResBlock(ch=512, nblocks=4) + self.conv4 = Conv_Bn_Activation(512, 512, 1, 1, "mish") + self.conv5 = Conv_Bn_Activation(1024, 1024, 1, 1, "mish") + + def forward(self, input): + x1 = self.conv1(input) + x2 = self.conv2(x1) + x3 = self.conv3(x1) + + r = self.resblock(x3) + x4 = self.conv4(r) + + x4 = torch.cat([x4, x2], dim=1) + x5 = self.conv5(x4) + return x5 + + +class Neck(nn.Module): + def __init__(self, inference=False): + super().__init__() + self.inference = inference + + self.conv1 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky") + self.conv2 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky") + self.conv3 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky") + # SPP + self.maxpool1 = nn.MaxPool2d(kernel_size=5, stride=1, padding=5 // 2) + self.maxpool2 = nn.MaxPool2d(kernel_size=9, stride=1, padding=9 // 2) + self.maxpool3 = nn.MaxPool2d(kernel_size=13, stride=1, padding=13 // 2) + + # R -1 -3 -5 -6 + # SPP + self.conv4 = Conv_Bn_Activation(2048, 512, 1, 1, "leaky") + self.conv5 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky") + self.conv6 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky") + self.conv7 = Conv_Bn_Activation(512, 256, 1, 1, "leaky") + # UP + self.upsample1 = Upsample() + # R 85 + self.conv8 = Conv_Bn_Activation(512, 256, 1, 1, "leaky") + # R -1 -3 + self.conv9 = Conv_Bn_Activation(512, 256, 1, 1, "leaky") + self.conv10 = Conv_Bn_Activation(256, 512, 3, 1, "leaky") + self.conv11 = Conv_Bn_Activation(512, 256, 1, 1, "leaky") + self.conv12 = Conv_Bn_Activation(256, 512, 3, 1, "leaky") + self.conv13 = Conv_Bn_Activation(512, 256, 1, 1, "leaky") + self.conv14 = Conv_Bn_Activation(256, 128, 1, 1, "leaky") + # UP + self.upsample2 = Upsample() + # R 54 + self.conv15 = Conv_Bn_Activation(256, 128, 1, 1, "leaky") + # R -1 -3 + self.conv16 = Conv_Bn_Activation(256, 128, 1, 1, "leaky") + self.conv17 = Conv_Bn_Activation(128, 256, 3, 1, "leaky") + self.conv18 = Conv_Bn_Activation(256, 128, 1, 1, "leaky") + self.conv19 = Conv_Bn_Activation(128, 256, 3, 1, "leaky") + self.conv20 = Conv_Bn_Activation(256, 128, 1, 1, "leaky") + + def forward(self, input, downsample4, downsample3, inference=False): + x1 = self.conv1(input) + x2 = self.conv2(x1) + x3 = self.conv3(x2) + # SPP + m1 = self.maxpool1(x3) + m2 = self.maxpool2(x3) + m3 = self.maxpool3(x3) + spp = torch.cat([m3, m2, m1, x3], dim=1) + # SPP end + x4 = self.conv4(spp) + x5 = self.conv5(x4) + x6 = self.conv6(x5) + x7 = self.conv7(x6) + # UP + up = self.upsample1(x7, downsample4.size(), self.inference) + # R 85 + x8 = self.conv8(downsample4) + # R -1 -3 + x8 = torch.cat([x8, up], dim=1) + + x9 = self.conv9(x8) + x10 = self.conv10(x9) + x11 = self.conv11(x10) + x12 = self.conv12(x11) + x13 = self.conv13(x12) + x14 = self.conv14(x13) + + # UP + up = self.upsample2(x14, downsample3.size(), self.inference) + # R 54 + x15 = self.conv15(downsample3) + # R -1 -3 + x15 = torch.cat([x15, up], dim=1) + + x16 = self.conv16(x15) + x17 = self.conv17(x16) + x18 = self.conv18(x17) + x19 = self.conv19(x18) + x20 = self.conv20(x19) + return x20, x13, x6 + + +class Yolov4Head(nn.Module): + def __init__(self, output_ch, n_classes, inference=False): + super().__init__() + self.inference = inference + + self.conv1 = Conv_Bn_Activation(128, 256, 3, 1, "leaky") + self.conv2 = Conv_Bn_Activation( + 256, output_ch, 1, 1, "linear", bn=False, bias=True + ) + + self.yolo1 = YoloLayer( + anchor_mask=[0, 1, 2], + num_classes=n_classes, + anchors=[ + 12, + 16, + 19, + 36, + 40, + 28, + 36, + 75, + 76, + 55, + 72, + 146, + 142, + 110, + 192, + 243, + 459, + 401, + ], + num_anchors=9, + stride=8, + ) + + # R -4 + self.conv3 = Conv_Bn_Activation(128, 256, 3, 2, "leaky") + + # R -1 -16 + self.conv4 = Conv_Bn_Activation(512, 256, 1, 1, "leaky") + self.conv5 = Conv_Bn_Activation(256, 512, 3, 1, "leaky") + self.conv6 = Conv_Bn_Activation(512, 256, 1, 1, "leaky") + self.conv7 = Conv_Bn_Activation(256, 512, 3, 1, "leaky") + self.conv8 = Conv_Bn_Activation(512, 256, 1, 1, "leaky") + self.conv9 = Conv_Bn_Activation(256, 512, 3, 1, "leaky") + self.conv10 = Conv_Bn_Activation( + 512, output_ch, 1, 1, "linear", bn=False, bias=True + ) + + self.yolo2 = YoloLayer( + anchor_mask=[3, 4, 5], + num_classes=n_classes, + anchors=[ + 12, + 16, + 19, + 36, + 40, + 28, + 36, + 75, + 76, + 55, + 72, + 146, + 142, + 110, + 192, + 243, + 459, + 401, + ], + num_anchors=9, + stride=16, + ) + + # R -4 + self.conv11 = Conv_Bn_Activation(256, 512, 3, 2, "leaky") + + # R -1 -37 + self.conv12 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky") + self.conv13 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky") + self.conv14 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky") + self.conv15 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky") + self.conv16 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky") + self.conv17 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky") + self.conv18 = Conv_Bn_Activation( + 1024, output_ch, 1, 1, "linear", bn=False, bias=True + ) + + self.yolo3 = YoloLayer( + anchor_mask=[6, 7, 8], + num_classes=n_classes, + anchors=[ + 12, + 16, + 19, + 36, + 40, + 28, + 36, + 75, + 76, + 55, + 72, + 146, + 142, + 110, + 192, + 243, + 459, + 401, + ], + num_anchors=9, + stride=32, + ) + + def forward(self, input1, input2, input3): + x1 = self.conv1(input1) + x2 = self.conv2(x1) + + x3 = self.conv3(input1) + # R -1 -16 + x3 = torch.cat([x3, input2], dim=1) + x4 = self.conv4(x3) + x5 = self.conv5(x4) + x6 = self.conv6(x5) + x7 = self.conv7(x6) + x8 = self.conv8(x7) + x9 = self.conv9(x8) + x10 = self.conv10(x9) + + # R -4 + x11 = self.conv11(x8) + # R -1 -37 + x11 = torch.cat([x11, input3], dim=1) + + x12 = self.conv12(x11) + x13 = self.conv13(x12) + x14 = self.conv14(x13) + x15 = self.conv15(x14) + x16 = self.conv16(x15) + x17 = self.conv17(x16) + x18 = self.conv18(x17) + + if self.inference: + y1 = self.yolo1(x2) + y2 = self.yolo2(x10) + y3 = self.yolo3(x18) + + return get_region_boxes([y1, y2, y3]) + + else: + return [x2, x10, x18] + + +class Yolov4(nn.Module): + def __init__(self, n_classes=80, inference=False): + super().__init__() + + output_ch = (4 + 1 + n_classes) * 3 + + # backbone + self.down1 = DownSample1() + self.down2 = DownSample2() + self.down3 = DownSample3() + self.down4 = DownSample4() + self.down5 = DownSample5() + # neck + self.neek = Neck(inference) + + # head + self.head = Yolov4Head(output_ch, n_classes, inference) + + def forward(self, input): + d1 = self.down1(input) + d2 = self.down2(d1) + d3 = self.down3(d2) + d4 = self.down4(d3) + d5 = self.down5(d4) + + x20, x13, x6 = self.neek(d5, d4, d3) + + output = self.head(x20, x13, x6) + return output diff --git a/carvekit/ml/arch/yolov4/utils.py b/carvekit/ml/arch/yolov4/utils.py new file mode 100644 index 0000000..53cc9e9 --- /dev/null +++ b/carvekit/ml/arch/yolov4/utils.py @@ -0,0 +1,105 @@ +import numpy as np + + +def nms_cpu(boxes, confs, nms_thresh=0.5, min_mode=False): + # print(boxes.shape) + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + + areas = (x2 - x1) * (y2 - y1) + order = confs.argsort()[::-1] + + keep = [] + while order.size > 0: + idx_self = order[0] + idx_other = order[1:] + + keep.append(idx_self) + + xx1 = np.maximum(x1[idx_self], x1[idx_other]) + yy1 = np.maximum(y1[idx_self], y1[idx_other]) + xx2 = np.minimum(x2[idx_self], x2[idx_other]) + yy2 = np.minimum(y2[idx_self], y2[idx_other]) + + w = np.maximum(0.0, xx2 - xx1) + h = np.maximum(0.0, yy2 - yy1) + inter = w * h + + if min_mode: + over = inter / np.minimum(areas[order[0]], areas[order[1:]]) + else: + over = inter / (areas[order[0]] + areas[order[1:]] - inter) + + inds = np.where(over <= nms_thresh)[0] + order = order[inds + 1] + + return np.array(keep) + + +def post_processing(conf_thresh, nms_thresh, output): + # anchors = [12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401] + # num_anchors = 9 + # anchor_masks = [[0, 1, 2], [3, 4, 5], [6, 7, 8]] + # strides = [8, 16, 32] + # anchor_step = len(anchors) // num_anchors + + # [batch, num, 1, 4] + box_array = output[0] + # [batch, num, num_classes] + confs = output[1] + + if type(box_array).__name__ != "ndarray": + box_array = box_array.cpu().detach().numpy() + confs = confs.cpu().detach().numpy() + + num_classes = confs.shape[2] + + # [batch, num, 4] + box_array = box_array[:, :, 0] + + # [batch, num, num_classes] --> [batch, num] + max_conf = np.max(confs, axis=2) + max_id = np.argmax(confs, axis=2) + + bboxes_batch = [] + for i in range(box_array.shape[0]): + + argwhere = max_conf[i] > conf_thresh + l_box_array = box_array[i, argwhere, :] + l_max_conf = max_conf[i, argwhere] + l_max_id = max_id[i, argwhere] + + bboxes = [] + # nms for each class + for j in range(num_classes): + + cls_argwhere = l_max_id == j + ll_box_array = l_box_array[cls_argwhere, :] + ll_max_conf = l_max_conf[cls_argwhere] + ll_max_id = l_max_id[cls_argwhere] + + keep = nms_cpu(ll_box_array, ll_max_conf, nms_thresh) + + if keep.size > 0: + ll_box_array = ll_box_array[keep, :] + ll_max_conf = ll_max_conf[keep] + ll_max_id = ll_max_id[keep] + + for k in range(ll_box_array.shape[0]): + bboxes.append( + [ + ll_box_array[k, 0], + ll_box_array[k, 1], + ll_box_array[k, 2], + ll_box_array[k, 3], + ll_max_conf[k], + ll_max_conf[k], + ll_max_id[k], + ] + ) + + bboxes_batch.append(bboxes) + + return bboxes_batch diff --git a/carvekit/ml/arch/yolov4/yolo_layer.py b/carvekit/ml/arch/yolov4/yolo_layer.py new file mode 100644 index 0000000..637f659 --- /dev/null +++ b/carvekit/ml/arch/yolov4/yolo_layer.py @@ -0,0 +1,416 @@ +""" +Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/Tianxiaomo/pytorch-YOLOv4 +License: Apache License 2.0 +""" +import numpy as np +import torch +import torch.nn as nn + + +def yolo_forward( + output, + conf_thresh, + num_classes, + anchors, + num_anchors, + scale_x_y, + only_objectness=1, + validation=False, +): + # Output would be invalid if it does not satisfy this assert + # assert (output.size(1) == (5 + num_classes) * num_anchors) + + # print(output.size()) + + # Slice the second dimension (channel) of output into: + # [ 2, 2, 1, num_classes, 2, 2, 1, num_classes, 2, 2, 1, num_classes ] + # And then into + # bxy = [ 6 ] bwh = [ 6 ] det_conf = [ 3 ] cls_conf = [ num_classes * 3 ] + batch = output.size(0) + H = output.size(2) + W = output.size(3) + + bxy_list = [] + bwh_list = [] + det_confs_list = [] + cls_confs_list = [] + + for i in range(num_anchors): + begin = i * (5 + num_classes) + end = (i + 1) * (5 + num_classes) + + bxy_list.append(output[:, begin : begin + 2]) + bwh_list.append(output[:, begin + 2 : begin + 4]) + det_confs_list.append(output[:, begin + 4 : begin + 5]) + cls_confs_list.append(output[:, begin + 5 : end]) + + # Shape: [batch, num_anchors * 2, H, W] + bxy = torch.cat(bxy_list, dim=1) + # Shape: [batch, num_anchors * 2, H, W] + bwh = torch.cat(bwh_list, dim=1) + + # Shape: [batch, num_anchors, H, W] + det_confs = torch.cat(det_confs_list, dim=1) + # Shape: [batch, num_anchors * H * W] + det_confs = det_confs.view(batch, num_anchors * H * W) + + # Shape: [batch, num_anchors * num_classes, H, W] + cls_confs = torch.cat(cls_confs_list, dim=1) + # Shape: [batch, num_anchors, num_classes, H * W] + cls_confs = cls_confs.view(batch, num_anchors, num_classes, H * W) + # Shape: [batch, num_anchors, num_classes, H * W] --> [batch, num_anchors * H * W, num_classes] + cls_confs = cls_confs.permute(0, 1, 3, 2).reshape( + batch, num_anchors * H * W, num_classes + ) + + # Apply sigmoid(), exp() and softmax() to slices + # + bxy = torch.sigmoid(bxy) * scale_x_y - 0.5 * (scale_x_y - 1) + bwh = torch.exp(bwh) + det_confs = torch.sigmoid(det_confs) + cls_confs = torch.sigmoid(cls_confs) + + # Prepare C-x, C-y, P-w, P-h (None of them are torch related) + grid_x = np.expand_dims( + np.expand_dims( + np.expand_dims(np.linspace(0, W - 1, W), axis=0).repeat(H, 0), axis=0 + ), + axis=0, + ) + grid_y = np.expand_dims( + np.expand_dims( + np.expand_dims(np.linspace(0, H - 1, H), axis=1).repeat(W, 1), axis=0 + ), + axis=0, + ) + # grid_x = torch.linspace(0, W - 1, W).reshape(1, 1, 1, W).repeat(1, 1, H, 1) + # grid_y = torch.linspace(0, H - 1, H).reshape(1, 1, H, 1).repeat(1, 1, 1, W) + + anchor_w = [] + anchor_h = [] + for i in range(num_anchors): + anchor_w.append(anchors[i * 2]) + anchor_h.append(anchors[i * 2 + 1]) + + device = None + cuda_check = output.is_cuda + if cuda_check: + device = output.get_device() + + bx_list = [] + by_list = [] + bw_list = [] + bh_list = [] + + # Apply C-x, C-y, P-w, P-h + for i in range(num_anchors): + ii = i * 2 + # Shape: [batch, 1, H, W] + bx = bxy[:, ii : ii + 1] + torch.tensor( + grid_x, device=device, dtype=torch.float32 + ) # grid_x.to(device=device, dtype=torch.float32) + # Shape: [batch, 1, H, W] + by = bxy[:, ii + 1 : ii + 2] + torch.tensor( + grid_y, device=device, dtype=torch.float32 + ) # grid_y.to(device=device, dtype=torch.float32) + # Shape: [batch, 1, H, W] + bw = bwh[:, ii : ii + 1] * anchor_w[i] + # Shape: [batch, 1, H, W] + bh = bwh[:, ii + 1 : ii + 2] * anchor_h[i] + + bx_list.append(bx) + by_list.append(by) + bw_list.append(bw) + bh_list.append(bh) + + ######################################## + # Figure out bboxes from slices # + ######################################## + + # Shape: [batch, num_anchors, H, W] + bx = torch.cat(bx_list, dim=1) + # Shape: [batch, num_anchors, H, W] + by = torch.cat(by_list, dim=1) + # Shape: [batch, num_anchors, H, W] + bw = torch.cat(bw_list, dim=1) + # Shape: [batch, num_anchors, H, W] + bh = torch.cat(bh_list, dim=1) + + # Shape: [batch, 2 * num_anchors, H, W] + bx_bw = torch.cat((bx, bw), dim=1) + # Shape: [batch, 2 * num_anchors, H, W] + by_bh = torch.cat((by, bh), dim=1) + + # normalize coordinates to [0, 1] + bx_bw /= W + by_bh /= H + + # Shape: [batch, num_anchors * H * W, 1] + bx = bx_bw[:, :num_anchors].view(batch, num_anchors * H * W, 1) + by = by_bh[:, :num_anchors].view(batch, num_anchors * H * W, 1) + bw = bx_bw[:, num_anchors:].view(batch, num_anchors * H * W, 1) + bh = by_bh[:, num_anchors:].view(batch, num_anchors * H * W, 1) + + bx1 = bx - bw * 0.5 + by1 = by - bh * 0.5 + bx2 = bx1 + bw + by2 = by1 + bh + + # Shape: [batch, num_anchors * h * w, 4] -> [batch, num_anchors * h * w, 1, 4] + boxes = torch.cat((bx1, by1, bx2, by2), dim=2).view( + batch, num_anchors * H * W, 1, 4 + ) + # boxes = boxes.repeat(1, 1, num_classes, 1) + + # boxes: [batch, num_anchors * H * W, 1, 4] + # cls_confs: [batch, num_anchors * H * W, num_classes] + # det_confs: [batch, num_anchors * H * W] + + det_confs = det_confs.view(batch, num_anchors * H * W, 1) + confs = cls_confs * det_confs + + # boxes: [batch, num_anchors * H * W, 1, 4] + # confs: [batch, num_anchors * H * W, num_classes] + + return boxes, confs + + +def yolo_forward_dynamic( + output, + conf_thresh, + num_classes, + anchors, + num_anchors, + scale_x_y, + only_objectness=1, + validation=False, +): + # Output would be invalid if it does not satisfy this assert + # assert (output.size(1) == (5 + num_classes) * num_anchors) + + # print(output.size()) + + # Slice the second dimension (channel) of output into: + # [ 2, 2, 1, num_classes, 2, 2, 1, num_classes, 2, 2, 1, num_classes ] + # And then into + # bxy = [ 6 ] bwh = [ 6 ] det_conf = [ 3 ] cls_conf = [ num_classes * 3 ] + # batch = output.size(0) + # H = output.size(2) + # W = output.size(3) + + bxy_list = [] + bwh_list = [] + det_confs_list = [] + cls_confs_list = [] + + for i in range(num_anchors): + begin = i * (5 + num_classes) + end = (i + 1) * (5 + num_classes) + + bxy_list.append(output[:, begin : begin + 2]) + bwh_list.append(output[:, begin + 2 : begin + 4]) + det_confs_list.append(output[:, begin + 4 : begin + 5]) + cls_confs_list.append(output[:, begin + 5 : end]) + + # Shape: [batch, num_anchors * 2, H, W] + bxy = torch.cat(bxy_list, dim=1) + # Shape: [batch, num_anchors * 2, H, W] + bwh = torch.cat(bwh_list, dim=1) + + # Shape: [batch, num_anchors, H, W] + det_confs = torch.cat(det_confs_list, dim=1) + # Shape: [batch, num_anchors * H * W] + det_confs = det_confs.view( + output.size(0), num_anchors * output.size(2) * output.size(3) + ) + + # Shape: [batch, num_anchors * num_classes, H, W] + cls_confs = torch.cat(cls_confs_list, dim=1) + # Shape: [batch, num_anchors, num_classes, H * W] + cls_confs = cls_confs.view( + output.size(0), num_anchors, num_classes, output.size(2) * output.size(3) + ) + # Shape: [batch, num_anchors, num_classes, H * W] --> [batch, num_anchors * H * W, num_classes] + cls_confs = cls_confs.permute(0, 1, 3, 2).reshape( + output.size(0), num_anchors * output.size(2) * output.size(3), num_classes + ) + + # Apply sigmoid(), exp() and softmax() to slices + # + bxy = torch.sigmoid(bxy) * scale_x_y - 0.5 * (scale_x_y - 1) + bwh = torch.exp(bwh) + det_confs = torch.sigmoid(det_confs) + cls_confs = torch.sigmoid(cls_confs) + + # Prepare C-x, C-y, P-w, P-h (None of them are torch related) + grid_x = np.expand_dims( + np.expand_dims( + np.expand_dims( + np.linspace(0, output.size(3) - 1, output.size(3)), axis=0 + ).repeat(output.size(2), 0), + axis=0, + ), + axis=0, + ) + grid_y = np.expand_dims( + np.expand_dims( + np.expand_dims( + np.linspace(0, output.size(2) - 1, output.size(2)), axis=1 + ).repeat(output.size(3), 1), + axis=0, + ), + axis=0, + ) + # grid_x = torch.linspace(0, W - 1, W).reshape(1, 1, 1, W).repeat(1, 1, H, 1) + # grid_y = torch.linspace(0, H - 1, H).reshape(1, 1, H, 1).repeat(1, 1, 1, W) + + anchor_w = [] + anchor_h = [] + for i in range(num_anchors): + anchor_w.append(anchors[i * 2]) + anchor_h.append(anchors[i * 2 + 1]) + + device = None + cuda_check = output.is_cuda + if cuda_check: + device = output.get_device() + + bx_list = [] + by_list = [] + bw_list = [] + bh_list = [] + + # Apply C-x, C-y, P-w, P-h + for i in range(num_anchors): + ii = i * 2 + # Shape: [batch, 1, H, W] + bx = bxy[:, ii : ii + 1] + torch.tensor( + grid_x, device=device, dtype=torch.float32 + ) # grid_x.to(device=device, dtype=torch.float32) + # Shape: [batch, 1, H, W] + by = bxy[:, ii + 1 : ii + 2] + torch.tensor( + grid_y, device=device, dtype=torch.float32 + ) # grid_y.to(device=device, dtype=torch.float32) + # Shape: [batch, 1, H, W] + bw = bwh[:, ii : ii + 1] * anchor_w[i] + # Shape: [batch, 1, H, W] + bh = bwh[:, ii + 1 : ii + 2] * anchor_h[i] + + bx_list.append(bx) + by_list.append(by) + bw_list.append(bw) + bh_list.append(bh) + + ######################################## + # Figure out bboxes from slices # + ######################################## + + # Shape: [batch, num_anchors, H, W] + bx = torch.cat(bx_list, dim=1) + # Shape: [batch, num_anchors, H, W] + by = torch.cat(by_list, dim=1) + # Shape: [batch, num_anchors, H, W] + bw = torch.cat(bw_list, dim=1) + # Shape: [batch, num_anchors, H, W] + bh = torch.cat(bh_list, dim=1) + + # Shape: [batch, 2 * num_anchors, H, W] + bx_bw = torch.cat((bx, bw), dim=1) + # Shape: [batch, 2 * num_anchors, H, W] + by_bh = torch.cat((by, bh), dim=1) + + # normalize coordinates to [0, 1] + bx_bw /= output.size(3) + by_bh /= output.size(2) + + # Shape: [batch, num_anchors * H * W, 1] + bx = bx_bw[:, :num_anchors].view( + output.size(0), num_anchors * output.size(2) * output.size(3), 1 + ) + by = by_bh[:, :num_anchors].view( + output.size(0), num_anchors * output.size(2) * output.size(3), 1 + ) + bw = bx_bw[:, num_anchors:].view( + output.size(0), num_anchors * output.size(2) * output.size(3), 1 + ) + bh = by_bh[:, num_anchors:].view( + output.size(0), num_anchors * output.size(2) * output.size(3), 1 + ) + + bx1 = bx - bw * 0.5 + by1 = by - bh * 0.5 + bx2 = bx1 + bw + by2 = by1 + bh + + # Shape: [batch, num_anchors * h * w, 4] -> [batch, num_anchors * h * w, 1, 4] + boxes = torch.cat((bx1, by1, bx2, by2), dim=2).view( + output.size(0), num_anchors * output.size(2) * output.size(3), 1, 4 + ) + # boxes = boxes.repeat(1, 1, num_classes, 1) + + # boxes: [batch, num_anchors * H * W, 1, 4] + # cls_confs: [batch, num_anchors * H * W, num_classes] + # det_confs: [batch, num_anchors * H * W] + + det_confs = det_confs.view( + output.size(0), num_anchors * output.size(2) * output.size(3), 1 + ) + confs = cls_confs * det_confs + + # boxes: [batch, num_anchors * H * W, 1, 4] + # confs: [batch, num_anchors * H * W, num_classes] + + return boxes, confs + + +class YoloLayer(nn.Module): + """Yolo layer + model_out: while inference,is post-processing inside or outside the model + true:outside + """ + + def __init__( + self, + anchor_mask=[], + num_classes=0, + anchors=[], + num_anchors=1, + stride=32, + model_out=False, + ): + super(YoloLayer, self).__init__() + self.anchor_mask = anchor_mask + self.num_classes = num_classes + self.anchors = anchors + self.num_anchors = num_anchors + self.anchor_step = len(anchors) // num_anchors + self.coord_scale = 1 + self.noobject_scale = 1 + self.object_scale = 5 + self.class_scale = 1 + self.thresh = 0.6 + self.stride = stride + self.seen = 0 + self.scale_x_y = 1 + + self.model_out = model_out + + def forward(self, output, target=None): + if self.training: + return output + masked_anchors = [] + for m in self.anchor_mask: + masked_anchors += self.anchors[ + m * self.anchor_step : (m + 1) * self.anchor_step + ] + masked_anchors = [anchor / self.stride for anchor in masked_anchors] + + return yolo_forward_dynamic( + output, + self.thresh, + self.num_classes, + masked_anchors, + len(self.anchor_mask), + scale_x_y=self.scale_x_y, + ) diff --git a/carvekit/ml/files/models_loc.py b/carvekit/ml/files/models_loc.py index 45f9a56..cf43ab5 100644 --- a/carvekit/ml/files/models_loc.py +++ b/carvekit/ml/files/models_loc.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ import pathlib @@ -12,7 +14,7 @@ def u2net_full_pretrained() -> pathlib.Path: """Returns u2net pretrained model location Returns: - pathlib.Path to model location + pathlib.Path: model location """ return downloader("u2net.pth") @@ -21,7 +23,7 @@ def basnet_pretrained() -> pathlib.Path: """Returns basnet pretrained model location Returns: - pathlib.Path to model location + pathlib.Path: model location """ return downloader("basnet.pth") @@ -30,7 +32,7 @@ def deeplab_pretrained() -> pathlib.Path: """Returns basnet pretrained model location Returns: - pathlib.Path to model location + pathlib.Path: model location """ return downloader("deeplab.pth") @@ -39,7 +41,7 @@ def fba_pretrained() -> pathlib.Path: """Returns basnet pretrained model location Returns: - pathlib.Path to model location + pathlib.Path: model location """ return downloader("fba_matting.pth") @@ -48,18 +50,54 @@ def tracer_b7_pretrained() -> pathlib.Path: """Returns TRACER with EfficientNet v1 b7 encoder pretrained model location Returns: - pathlib.Path to model location + pathlib.Path: model location """ return downloader("tracer_b7.pth") -def tracer_hair_pretrained() -> pathlib.Path: - """Returns TRACER with EfficientNet v1 b7 encoder model for hair segmentation location +def scene_classifier_pretrained() -> pathlib.Path: + """Returns scene classifier pretrained model location + This model is used to classify scenes into 3 categories: hard, soft, digital + + hard - scenes with hard edges, such as objects, buildings, etc. + soft - scenes with soft edges, such as portraits, hairs, animal, etc. + digital - digital scenes, such as screenshots, graphics, etc. + + more info: https://huggingface.co/Carve/scene_classifier + + Returns: + pathlib.Path: model location + """ + return downloader("scene_classifier.pth") + + +def yolov4_coco_pretrained() -> pathlib.Path: + """Returns yolov4 classifier pretrained model location + This model is used to classify objects in images. + + Training dataset: COCO 2017 + Training classes: 80 + + It's a modified version of the original model from https://github.com/Tianxiaomo/pytorch-YOLOv4 (pytorch) + We have only added coco classnames to the model. + + Returns: + pathlib.Path to model location + """ + return downloader("yolov4_coco_with_classes.pth") + + +def cascadepsp_pretrained() -> pathlib.Path: + """Returns cascade psp pretrained model location + This model is used to refine segmentation masks. + + Training dataset: MSRA-10K, DUT-OMRON, ECSSD and FSS-1000 + more info: https://huggingface.co/Carve/cascadepsp Returns: pathlib.Path to model location """ - return downloader("tracer_hair.pth") + return downloader("cascadepsp.pth") def download_all(): @@ -68,3 +106,6 @@ def download_all(): deeplab_pretrained() basnet_pretrained() tracer_b7_pretrained() + scene_classifier_pretrained() + yolov4_coco_pretrained() + cascadepsp_pretrained() diff --git a/carvekit/ml/wrap/basnet.py b/carvekit/ml/wrap/basnet.py index 9912e81..836de7f 100644 --- a/carvekit/ml/wrap/basnet.py +++ b/carvekit/ml/wrap/basnet.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ import pathlib @@ -34,12 +36,11 @@ def __init__( Initialize the BASNET model Args: - device: processing device - input_image_size: input image size - batch_size: the number of images that the neural network processes in one run - load_pretrained: loading pretrained model - fp16: use fp16 precision // not supported at this moment - + device (Literal[cpu, cuda], default=cpu): processing device + input_image_size (Union[List[int], int], default=320): input image size + batch_size (int, default=10): the number of images that the neural network processes in one run + load_pretrained (bool, default=True): loading pretrained model + fp16 (bool, default=True): use fp16 precision **not supported at this moment** """ super(BASNET, self).__init__(n_channels=3, n_classes=1) self.device = device @@ -60,10 +61,10 @@ def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor: Transform input image to suitable data format for neural network Args: - data: input image + data (PIL.Image.Image): input image Returns: - input for neural network + torch.Tensor: input for neural network """ resized = data.resize(self.input_image_size) @@ -81,18 +82,18 @@ def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor: @staticmethod def data_postprocessing( - data: torch.tensor, original_image: PIL.Image.Image + data: torch.Tensor, original_image: PIL.Image.Image ) -> PIL.Image.Image: """ Transforms output data from neural network to suitable data format for using with other components of this framework. Args: - data: output data from neural network - original_image: input image which was used for predicted data + data (torch.Tensor): output data from neural network + original_image (PIL.Image.Image): input image which was used for predicted data Returns: - Segmentation mask as PIL Image instance + PIL.Image.Image: Segmentation mask as `PIL Image` instance """ data = data.unsqueeze(0) @@ -109,22 +110,22 @@ def __call__( self, images: List[Union[str, pathlib.Path, PIL.Image.Image]] ) -> List[PIL.Image.Image]: """ - Passes input images through neural network and returns segmentation masks as PIL.Image.Image instances + Passes input images through neural network and returns segmentation masks as `PIL.Image.Image` instances Args: - images: input images + images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images Returns: - segmentation masks as for input images, as PIL.Image.Image instances + List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances """ collect_masks = [] for image_batch in batch_generator(images, self.batch_size): - images = thread_pool_processing( + converted_images = thread_pool_processing( lambda x: convert_image(load_image(x)), image_batch ) batches = torch.vstack( - thread_pool_processing(self.data_preprocessing, images) + thread_pool_processing(self.data_preprocessing, converted_images) ) with torch.no_grad(): batches = batches.to(self.device) @@ -134,8 +135,8 @@ def __call__( masks_cpu = masks.cpu() del d2, d3, d4, d5, d6, d7, d8, batches, masks masks = thread_pool_processing( - lambda x: self.data_postprocessing(masks_cpu[x], images[x]), - range(len(images)), + lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]), + range(len(converted_images)), ) collect_masks += masks return collect_masks diff --git a/carvekit/ml/wrap/cascadepsp.py b/carvekit/ml/wrap/cascadepsp.py new file mode 100644 index 0000000..1d0fc9a --- /dev/null +++ b/carvekit/ml/wrap/cascadepsp.py @@ -0,0 +1,310 @@ +""" +Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0 +""" +import pathlib +import warnings + +import PIL +import numpy as np +import torch +from PIL import Image +from torchvision import transforms +from typing import Union, List + +from carvekit.ml.arch.cascadepsp.pspnet import RefinementModule +from carvekit.ml.arch.cascadepsp.utils import ( + process_im_single_pass, + process_high_res_im, +) +from carvekit.ml.files.models_loc import cascadepsp_pretrained +from carvekit.utils.image_utils import convert_image, load_image +from carvekit.utils.models_utils import get_precision_autocast, cast_network +from carvekit.utils.pool_utils import batch_generator, thread_pool_processing + +__all__ = ["CascadePSP"] + + +class CascadePSP(RefinementModule): + """ + CascadePSP to refine the mask from segmentation network + """ + + def __init__( + self, + device="cpu", + input_tensor_size: int = 900, + batch_size: int = 1, + load_pretrained: bool = True, + fp16: bool = False, + mask_binary_threshold=127, + global_step_only=False, + processing_accelerate_image_size=2048, + ): + """ + Initialize the CascadePSP model + + Args: + device: processing device + input_tensor_size: input image size + batch_size: the number of images that the neural network processes in one run + load_pretrained: loading pretrained model + fp16: use half precision + global_step_only: if True, only global step will be used for prediction. See paper for details. + mask_binary_threshold: threshold for binary mask, default 70, set to 0 for no threshold + processing_accelerate_image_size: thumbnail size for image processing acceleration. Set to 0 to disable + + """ + super().__init__() + self.fp16 = fp16 + self.device = device + self.batch_size = batch_size + self.mask_binary_threshold = mask_binary_threshold + self.global_step_only = global_step_only + self.processing_accelerate_image_size = processing_accelerate_image_size + self.input_tensor_size = input_tensor_size + + self.to(device) + if batch_size > 1: + warnings.warn( + "Batch size > 1 is experimental feature for CascadePSP." + " Please, don't use it if you have GPU with small memory!" + ) + if load_pretrained: + self.load_state_dict( + torch.load(cascadepsp_pretrained(), map_location=self.device) + ) + self.eval() + + self._image_transform = transforms.Compose( + [ + transforms.ToTensor(), + transforms.Normalize( + mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225] + ), + ] + ) + + self._seg_transform = transforms.Compose( + [ + transforms.ToTensor(), + transforms.Normalize(mean=[0.5], std=[0.5]), + ] + ) + + def data_preprocessing(self, data: Union[PIL.Image.Image]) -> torch.FloatTensor: + """ + Transform input image to suitable data format for neural network + + Args: + data: input image + + Returns: + input for neural network + + """ + preprocessed_data = data.copy() + if self.batch_size == 1 and self.processing_accelerate_image_size > 0: + # Okay, we have only one image, so + # we can use image processing acceleration for accelerate high resolution image processing + preprocessed_data.thumbnail( + ( + self.processing_accelerate_image_size, + self.processing_accelerate_image_size, + ) + ) + elif self.batch_size == 1: + pass # No need to do anything + elif self.batch_size > 1 and self.global_step_only is True: + # If we have more than one image and we use only global step, + # there aren't any reason to use image processing acceleration, + # because we will use only global step for prediction and anyway it will be resized to input_tensor_size + preprocessed_data = preprocessed_data.resize( + (self.input_tensor_size, self.input_tensor_size) + ) + elif ( + self.batch_size > 1 + and self.global_step_only is False + and self.processing_accelerate_image_size > 0 + ): + # If we have more than one image and we use local step, + # we can use image processing acceleration for accelerate high resolution image processing + # but we need to resize image to processing_accelerate_image_size to stack it with other images + preprocessed_data = preprocessed_data.resize( + ( + self.processing_accelerate_image_size, + self.processing_accelerate_image_size, + ) + ) + elif ( + self.batch_size > 1 + and self.global_step_only is False + and not (self.processing_accelerate_image_size > 0) + ): + raise ValueError( + "If you use local step with batch_size > 2, " + "you need to set processing_accelerate_image_size > 0," + "since we cannot stack images with different sizes to one batch" + ) + else: # some extra cases + preprocessed_data = preprocessed_data.resize( + ( + self.processing_accelerate_image_size, + self.processing_accelerate_image_size, + ) + ) + + if data.mode == "RGB": + preprocessed_data = self._image_transform( + np.array(preprocessed_data) + ).unsqueeze(0) + elif data.mode == "L": + preprocessed_data = np.array(preprocessed_data) + if 0 < self.mask_binary_threshold <= 255: + preprocessed_data = ( + preprocessed_data > self.mask_binary_threshold + ).astype(np.uint8) * 255 + elif self.mask_binary_threshold > 255 or self.mask_binary_threshold < 0: + warnings.warn( + "mask_binary_threshold should be in range [0, 255], " + "but got {}. Disabling mask_binary_threshold!".format( + self.mask_binary_threshold + ) + ) + + preprocessed_data = self._seg_transform(preprocessed_data).unsqueeze( + 0 + ) # [H,W,1] + + return preprocessed_data + + @staticmethod + def data_postprocessing( + data: torch.Tensor, mask: PIL.Image.Image + ) -> PIL.Image.Image: + """ + Transforms output data from neural network to suitable data + format for using with other components of this framework. + + Args: + data: output data from neural network + mask: input mask + + Returns: + Segmentation mask as PIL Image instance + + """ + refined_mask = (data[0, :, :].cpu().numpy() * 255).astype("uint8") + return Image.fromarray(refined_mask).convert("L").resize(mask.size) + + def safe_forward(self, im, seg, inter_s8=None, inter_s4=None): + """ + Slightly pads the input image such that its length is a multiple of 8 + """ + b, _, ph, pw = seg.shape + if (ph % 8 != 0) or (pw % 8 != 0): + newH = (ph // 8 + 1) * 8 + newW = (pw // 8 + 1) * 8 + p_im = torch.zeros(b, 3, newH, newW, device=im.device) + p_seg = torch.zeros(b, 1, newH, newW, device=im.device) - 1 + + p_im[:, :, 0:ph, 0:pw] = im + p_seg[:, :, 0:ph, 0:pw] = seg + im = p_im + seg = p_seg + + if inter_s8 is not None: + p_inter_s8 = torch.zeros(b, 1, newH, newW, device=im.device) - 1 + p_inter_s8[:, :, 0:ph, 0:pw] = inter_s8 + inter_s8 = p_inter_s8 + if inter_s4 is not None: + p_inter_s4 = torch.zeros(b, 1, newH, newW, device=im.device) - 1 + p_inter_s4[:, :, 0:ph, 0:pw] = inter_s4 + inter_s4 = p_inter_s4 + + images = super().__call__(im, seg, inter_s8, inter_s4) + return_im = {} + + for key in ["pred_224", "pred_28_3", "pred_56_2"]: + return_im[key] = images[key][:, :, 0:ph, 0:pw] + del images + + return return_im + + def __call__( + self, + images: List[Union[str, pathlib.Path, PIL.Image.Image]], + masks: List[Union[str, pathlib.Path, PIL.Image.Image]], + ) -> List[PIL.Image.Image]: + """ + Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances + + Args: + images: input images + masks: Segmentation masks to refine + + Returns: + segmentation masks as for input images, as PIL.Image.Image instances + + """ + + if len(images) != len(masks): + raise ValueError( + "Len of specified arrays of images and trimaps should be equal!" + ) + + collect_masks = [] + autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16) + with autocast: + cast_network(self, dtype) + for idx_batch in batch_generator(range(len(images)), self.batch_size): + inpt_images = thread_pool_processing( + lambda x: convert_image(load_image(images[x])), idx_batch + ) + + inpt_masks = thread_pool_processing( + lambda x: convert_image(load_image(masks[x]), mode="L"), idx_batch + ) + + inpt_img_batches = thread_pool_processing( + self.data_preprocessing, inpt_images + ) + inpt_masks_batches = thread_pool_processing( + self.data_preprocessing, inpt_masks + ) + if self.batch_size > 1: # We need to stack images, if batch_size > 1 + inpt_img_batches = torch.vstack(inpt_img_batches) + inpt_masks_batches = torch.vstack(inpt_masks_batches) + else: + inpt_img_batches = inpt_img_batches[ + 0 + ] # Get only one image from list + inpt_masks_batches = inpt_masks_batches[0] + + with torch.no_grad(): + inpt_img_batches = inpt_img_batches.to(self.device) + inpt_masks_batches = inpt_masks_batches.to(self.device) + if self.global_step_only: + refined_batches = process_im_single_pass( + self, + inpt_img_batches, + inpt_masks_batches, + self.input_tensor_size, + ) + + else: + refined_batches = process_high_res_im( + self, + inpt_img_batches, + inpt_masks_batches, + self.input_tensor_size, + ) + + refined_masks = refined_batches.cpu() + del (inpt_img_batches, inpt_masks_batches, refined_batches) + collect_masks += thread_pool_processing( + lambda x: self.data_postprocessing(refined_masks[x], inpt_masks[x]), + range(len(inpt_masks)), + ) + return collect_masks diff --git a/carvekit/ml/wrap/deeplab_v3.py b/carvekit/ml/wrap/deeplab_v3.py index 4b19542..d570795 100644 --- a/carvekit/ml/wrap/deeplab_v3.py +++ b/carvekit/ml/wrap/deeplab_v3.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ import pathlib @@ -29,14 +31,14 @@ def __init__( fp16: bool = False, ): """ - Initialize the DeepLabV3 model + Initialize the `DeepLabV3` model Args: - device: processing device - input_image_size: input image size - batch_size: the number of images that the neural network processes in one run - load_pretrained: loading pretrained model - fp16: use half precision + device (Literal[cpu, cuda], default=cpu): processing device + input_image_size (): input image size + batch_size (int, default=10): the number of images that the neural network processes in one run + load_pretrained (bool, default=True): loading pretrained model + fp16 (bool, default=False): use half precision """ self.device = device @@ -69,9 +71,7 @@ def to(self, device: str): Moves neural network to specified processing device Args: - device (:class:`torch.device`): the desired device. - Returns: - None + device (Literal[cpu, cuda]): the desired device. """ self.network.to(device) @@ -81,10 +81,10 @@ def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor: Transform input image to suitable data format for neural network Args: - data: input image + data (PIL.Image.Image): input image Returns: - input for neural network + torch.Tensor: input for neural network """ copy = data.copy() @@ -93,18 +93,18 @@ def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor: @staticmethod def data_postprocessing( - data: torch.tensor, original_image: PIL.Image.Image + data: torch.Tensor, original_image: PIL.Image.Image ) -> PIL.Image.Image: """ Transforms output data from neural network to suitable data format for using with other components of this framework. Args: - data: output data from neural network - original_image: input image which was used for predicted data + data (torch.Tensor): output data from neural network + original_image (PIL.Image.Image): input image which was used for predicted data Returns: - Segmentation mask as PIL Image instance + PIL.Image.Image: Segmentation mask as `PIL Image` instance """ return ( @@ -115,13 +115,13 @@ def __call__( self, images: List[Union[str, pathlib.Path, PIL.Image.Image]] ) -> List[PIL.Image.Image]: """ - Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances + Passes input images though neural network and returns segmentation masks as `PIL.Image.Image` instances Args: - images: input images + images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images Returns: - segmentation masks as for input images, as PIL.Image.Image instances + List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances """ collect_masks = [] @@ -129,10 +129,12 @@ def __call__( with autocast: cast_network(self.network, dtype) for image_batch in batch_generator(images, self.batch_size): - images = thread_pool_processing( + converted_images = thread_pool_processing( lambda x: convert_image(load_image(x)), image_batch ) - batches = thread_pool_processing(self.data_preprocessing, images) + batches = thread_pool_processing( + self.data_preprocessing, converted_images + ) with torch.no_grad(): masks = [ self.network(i.to(self.device).unsqueeze(0))["out"][0] @@ -143,8 +145,8 @@ def __call__( ] del batches masks = thread_pool_processing( - lambda x: self.data_postprocessing(masks[x], images[x]), - range(len(images)), + lambda x: self.data_postprocessing(masks[x], converted_images[x]), + range(len(converted_images)), ) collect_masks += masks return collect_masks diff --git a/carvekit/ml/wrap/fba_matting.py b/carvekit/ml/wrap/fba_matting.py index c285df0..19a2659 100644 --- a/carvekit/ml/wrap/fba_matting.py +++ b/carvekit/ml/wrap/fba_matting.py @@ -43,12 +43,14 @@ def __init__( Initialize the FBAMatting model Args: - device: processing device - input_tensor_size: input image size - batch_size: the number of images that the neural network processes in one run - encoder: neural network encoder head - load_pretrained: loading pretrained model - fp16: use half precision + device (Literal[cpu, cuda], default=cpu): processing device + input_tensor_size (Union[List[int], int], default=2048): input image size + batch_size (int, default=2): the number of images that the neural network processes in one run + encoder (str, default=resnet50_GN_WS): neural network encoder head + .. TODO:: + Add more encoders to documentation as Literal typehint. + load_pretrained (bool, default=True): loading pretrained model + fp16 (bool, default=False): use half precision """ super(FBAMatting, self).__init__(encoder=encoder) @@ -71,10 +73,10 @@ def data_preprocessing( Transform input image to suitable data format for neural network Args: - data: input image + data (Union[PIL.Image.Image, np.ndarray]): input image Returns: - input for neural network + Tuple[torch.FloatTensor, torch.FloatTensor]: input for neural network """ resized = data.copy() @@ -114,18 +116,18 @@ def data_preprocessing( @staticmethod def data_postprocessing( - data: torch.tensor, trimap: PIL.Image.Image + data: torch.Tensor, trimap: PIL.Image.Image ) -> PIL.Image.Image: """ Transforms output data from neural network to suitable data format for using with other components of this framework. Args: - data: output data from neural network - trimap: Map with the area we need to refine + data (torch.Tensor): output data from neural network + trimap (PIL.Image.Image): Map with the area we need to refine Returns: - Segmentation mask as PIL Image instance + PIL.Image.Image: Segmentation mask """ if trimap.mode != "L": @@ -149,11 +151,11 @@ def __call__( Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances Args: - images: input images - trimaps: Maps with the areas we need to refine + images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images + trimaps (List[Union[str, pathlib.Path, PIL.Image.Image]]): Maps with the areas we need to refine Returns: - segmentation masks as for input images, as PIL.Image.Image instances + List[PIL.Image.Image]: segmentation masks as for input images """ diff --git a/carvekit/ml/wrap/scene_classifier.py b/carvekit/ml/wrap/scene_classifier.py new file mode 100644 index 0000000..75c0b39 --- /dev/null +++ b/carvekit/ml/wrap/scene_classifier.py @@ -0,0 +1,150 @@ +""" +Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0 +""" +import pathlib + +import PIL.Image +import torch +import torch.nn.functional as F +import torchvision.transforms as transforms +from typing import List, Union, Tuple +from torch.autograd import Variable + +from carvekit.ml.files.models_loc import scene_classifier_pretrained +from carvekit.utils.image_utils import load_image, convert_image +from carvekit.utils.models_utils import get_precision_autocast, cast_network +from carvekit.utils.pool_utils import thread_pool_processing, batch_generator + +__all__ = ["SceneClassifier"] + + +class SceneClassifier: + """ + SceneClassifier model interface + + Description: + Performs a primary analysis of the image in order to select the necessary method for removing the background. + The choice is made by classifying the scene type. + + The output can be the following types: + - hard + - soft + - digital + + """ + + def __init__( + self, + topk: int = 1, + device="cpu", + batch_size: int = 4, + fp16: bool = False, + model_path: Union[str, pathlib.Path] = None, + ): + """ + Initialize the Scene Classifier. + + Args: + topk: number of top classes to return + device: processing device + batch_size: the number of images that the neural network processes in one run + fp16: use fp16 precision + + """ + if model_path is None: + model_path = scene_classifier_pretrained() + self.topk = topk + self.fp16 = fp16 + self.device = device + self.batch_size = batch_size + + self.transform = transforms.Compose( + [ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize( + mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225] + ), + ] + ) + state_dict = torch.load(model_path, map_location=device) + self.model = state_dict["model"] + self.class_to_idx = state_dict["class_to_idx"] + self.idx_to_class = {v: k for k, v in self.class_to_idx.items()} + self.model.to(device) + self.model.eval() + + def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor: + """ + Transform input image to suitable data format for neural network + + Args: + data: input image + + Returns: + input for neural network + + """ + + return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor) + + def data_postprocessing(self, data: torch.Tensor) -> Tuple[List[str], List[float]]: + """ + Transforms output data from neural network to suitable data + format for using with other components of this framework. + + Args: + data: output data from neural network + + Returns: + Top-k class of scene type, probability of these classes + + """ + ps = F.softmax(data.float(), dim=0) + topk = ps.cpu().topk(self.topk) + + probs, classes = (e.data.numpy().squeeze().tolist() for e in topk) + if isinstance(classes, int): + classes = [classes] + probs = [probs] + return list(map(lambda x: self.idx_to_class[x], classes)), probs + + def __call__( + self, images: List[Union[str, pathlib.Path, PIL.Image.Image]] + ) -> Tuple[List[str], List[float]]: + """ + Passes input images though neural network and returns class predictions. + + Args: + images: input images + + Returns: + Top-k class of scene type, probability of these classes for every passed image + + """ + collect_masks = [] + autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16) + with autocast: + cast_network(self.model, dtype) + for image_batch in batch_generator(images, self.batch_size): + converted_images = thread_pool_processing( + lambda x: convert_image(load_image(x)), image_batch + ) + batches = torch.vstack( + thread_pool_processing(self.data_preprocessing, converted_images) + ) + with torch.no_grad(): + batches = Variable(batches).to(self.device) + masks = self.model.forward(batches) + masks_cpu = masks.cpu() + del batches, masks + masks = thread_pool_processing( + lambda x: self.data_postprocessing(masks_cpu[x]), + range(len(converted_images)), + ) + collect_masks += masks + + return collect_masks diff --git a/carvekit/ml/wrap/tracer_b7.py b/carvekit/ml/wrap/tracer_b7.py index 20a8e45..214b095 100644 --- a/carvekit/ml/wrap/tracer_b7.py +++ b/carvekit/ml/wrap/tracer_b7.py @@ -4,19 +4,19 @@ License: Apache License 2.0 """ import pathlib -import warnings from typing import List, Union + import PIL.Image import numpy as np import torch import torchvision.transforms as transforms from PIL import Image -from carvekit.ml.arch.tracerb7.tracer import TracerDecoder from carvekit.ml.arch.tracerb7.efficientnet import EfficientEncoderB7 -from carvekit.ml.files.models_loc import tracer_b7_pretrained, tracer_hair_pretrained -from carvekit.utils.models_utils import get_precision_autocast, cast_network +from carvekit.ml.arch.tracerb7.tracer import TracerDecoder +from carvekit.ml.files.models_loc import tracer_b7_pretrained from carvekit.utils.image_utils import load_image, convert_image +from carvekit.utils.models_utils import get_precision_autocast, cast_network from carvekit.utils.pool_utils import thread_pool_processing, batch_generator __all__ = ["TracerUniversalB7"] @@ -35,16 +35,16 @@ def __init__( model_path: Union[str, pathlib.Path] = None, ): """ - Initialize the U2NET model + Initialize the TRACER model Args: - layers_cfg: neural network layers configuration - device: processing device - input_image_size: input image size - batch_size: the number of images that the neural network processes in one run - load_pretrained: loading pretrained model - fp16: use fp16 precision - + device (Literal[cpu, cuda], default=cpu): processing device + input_image_size (Union[List[int], int], default=640): input image size + batch_size(int, default=4): the number of images that the neural network processes in one run + load_pretrained(bool, default=True): loading pretrained model + fp16 (bool, default=False): use fp16 precision + model_path (Union[str, pathlib.Path], default=None): path to the model + .. note:: REDO """ if model_path is None: model_path = tracer_b7_pretrained() @@ -82,10 +82,10 @@ def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor: Transform input image to suitable data format for neural network Args: - data: input image + data (PIL.Image.Image): input image Returns: - input for neural network + torch.FloatTensor: input for neural network """ @@ -93,18 +93,18 @@ def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor: @staticmethod def data_postprocessing( - data: torch.tensor, original_image: PIL.Image.Image + data: torch.Tensor, original_image: PIL.Image.Image ) -> PIL.Image.Image: """ Transforms output data from neural network to suitable data format for using with other components of this framework. Args: - data: output data from neural network - original_image: input image which was used for predicted data + data (torch.Tensor): output data from neural network + original_image (PIL.Image.Image): input image which was used for predicted data Returns: - Segmentation mask as PIL Image instance + PIL.Image.Image: Segmentation mask """ output = (data.type(torch.FloatTensor).detach().cpu().numpy() * 255.0).astype( @@ -122,10 +122,10 @@ def __call__( Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances Args: - images: input images + images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images Returns: - segmentation masks as for input images, as PIL.Image.Image instances + List[PIL.Image.Image]: segmentation masks as for input images """ collect_masks = [] @@ -133,11 +133,11 @@ def __call__( with autocast: cast_network(self, dtype) for image_batch in batch_generator(images, self.batch_size): - images = thread_pool_processing( + converted_images = thread_pool_processing( lambda x: convert_image(load_image(x)), image_batch ) batches = torch.vstack( - thread_pool_processing(self.data_preprocessing, images) + thread_pool_processing(self.data_preprocessing, converted_images) ) with torch.no_grad(): batches = batches.to(self.device) @@ -145,34 +145,11 @@ def __call__( masks_cpu = masks.cpu() del batches, masks masks = thread_pool_processing( - lambda x: self.data_postprocessing(masks_cpu[x], images[x]), - range(len(images)), + lambda x: self.data_postprocessing( + masks_cpu[x], converted_images[x] + ), + range(len(converted_images)), ) collect_masks += masks return collect_masks - - -class TracerHair(TracerUniversalB7): - """TRACER HAIR model interface""" - - def __init__( - self, - device="cpu", - input_image_size: Union[List[int], int] = 640, - batch_size: int = 4, - load_pretrained: bool = True, - fp16: bool = False, - model_path: Union[str, pathlib.Path] = None, - ): - if model_path is None: - model_path = tracer_hair_pretrained() - warnings.warn("TracerHair has not public model yet. Don't use it!", UserWarning) - super(TracerHair, self).__init__( - device=device, - input_image_size=input_image_size, - batch_size=batch_size, - load_pretrained=load_pretrained, - fp16=fp16, - model_path=model_path, - ) diff --git a/carvekit/ml/wrap/u2net.py b/carvekit/ml/wrap/u2net.py index 7d126df..4a0eb57 100644 --- a/carvekit/ml/wrap/u2net.py +++ b/carvekit/ml/wrap/u2net.py @@ -4,6 +4,8 @@ License: Apache License 2.0 """ import pathlib +import warnings + from typing import List, Union import PIL.Image import numpy as np @@ -43,6 +45,8 @@ def __init__( """ super(U2NET, self).__init__(cfg_type=layers_cfg, out_ch=1) + if fp16: + warnings.warn("FP16 is not supported at this moment for U2NET model") self.device = device self.batch_size = batch_size if isinstance(input_image_size, list): @@ -54,6 +58,7 @@ def __init__( self.load_state_dict( torch.load(u2net_full_pretrained(), map_location=self.device) ) + self.eval() def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor: @@ -61,10 +66,10 @@ def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor: Transform input image to suitable data format for neural network Args: - data: input image + data (PIL.Image.Image): input image Returns: - input for neural network + torch.FloatTensor: input for neural network """ resized = data.resize(self.input_image_size, resample=3) @@ -82,18 +87,18 @@ def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor: @staticmethod def data_postprocessing( - data: torch.tensor, original_image: PIL.Image.Image + data: torch.Tensor, original_image: PIL.Image.Image ) -> PIL.Image.Image: """ Transforms output data from neural network to suitable data format for using with other components of this framework. Args: - data: output data from neural network - original_image: input image which was used for predicted data + data (torch.Tensor): output data from neural network + original_image (PIL.Image.Image): input image which was used for predicted data Returns: - Segmentation mask as PIL Image instance + PIL.Image.Image: Segmentation mask as `PIL Image` instance """ data = data.unsqueeze(0) @@ -121,11 +126,11 @@ def __call__( """ collect_masks = [] for image_batch in batch_generator(images, self.batch_size): - images = thread_pool_processing( + converted_images = thread_pool_processing( lambda x: convert_image(load_image(x)), image_batch ) batches = torch.vstack( - thread_pool_processing(self.data_preprocessing, images) + thread_pool_processing(self.data_preprocessing, converted_images) ) with torch.no_grad(): batches = batches.to(self.device) @@ -133,8 +138,8 @@ def __call__( masks_cpu = masks.cpu() del d2, d3, d4, d5, d6, d7, batches, masks masks = thread_pool_processing( - lambda x: self.data_postprocessing(masks_cpu[x], images[x]), - range(len(images)), + lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]), + range(len(converted_images)), ) collect_masks += masks return collect_masks diff --git a/carvekit/ml/wrap/yolov4.py b/carvekit/ml/wrap/yolov4.py new file mode 100644 index 0000000..cf59233 --- /dev/null +++ b/carvekit/ml/wrap/yolov4.py @@ -0,0 +1,296 @@ +""" +Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0 +""" + +import pathlib + +import PIL.Image +import PIL.Image +import numpy as np +import pydantic +import torch +from torch.autograd import Variable +from typing import List, Union + +from carvekit.ml.arch.yolov4.models import Yolov4 +from carvekit.ml.arch.yolov4.utils import post_processing +from carvekit.ml.files.models_loc import yolov4_coco_pretrained +from carvekit.utils.image_utils import load_image, convert_image +from carvekit.utils.models_utils import get_precision_autocast, cast_network +from carvekit.utils.pool_utils import thread_pool_processing, batch_generator + +__all__ = ["YoloV4_COCO", "SimplifiedYoloV4"] + + +class Object(pydantic.BaseModel): + """Object class""" + + class_name: str + confidence: float + x1: int + y1: int + x2: int + y2: int + + +class YoloV4_COCO(Yolov4): + """YoloV4 COCO model wrapper""" + + def __init__( + self, + n_classes: int = 80, + device="cpu", + classes: List[str] = None, + input_image_size: Union[List[int], int] = 608, + batch_size: int = 4, + load_pretrained: bool = True, + fp16: bool = False, + model_path: Union[str, pathlib.Path] = None, + ): + """ + Initialize the YoloV4 COCO. + + Args: + n_classes: number of classes + device: processing device + input_image_size: input image size + batch_size: the number of images that the neural network processes in one run + fp16: use fp16 precision + model_path: path to model weights + load_pretrained: load pretrained weights + """ + if model_path is None: + model_path = yolov4_coco_pretrained() + self.fp16 = fp16 + self.device = device + self.batch_size = batch_size + if isinstance(input_image_size, list): + self.input_image_size = input_image_size[:2] + else: + self.input_image_size = (input_image_size, input_image_size) + + if load_pretrained: + state_dict = torch.load(model_path, map_location="cpu") + self.classes = state_dict["classes"] + super().__init__(n_classes=len(state_dict["classes"]), inference=True) + self.load_state_dict(state_dict["state"]) + else: + self.classes = classes + super().__init__(n_classes=n_classes, inference=True) + + self.to(device) + self.eval() + + def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor: + """ + Transform input image to suitable data format for neural network + + Args: + data: input image + + Returns: + input for neural network + + """ + image = data.resize(self.input_image_size) + # noinspection PyTypeChecker + image = np.array(image).astype(np.float32) + image = image.transpose((2, 0, 1)) + image = image / 255.0 + image = torch.from_numpy(image).float() + return torch.unsqueeze(image, 0).type(torch.FloatTensor) + + def data_postprocessing( + self, data: List[torch.FloatTensor], images: List[PIL.Image.Image] + ) -> List[Object]: + """ + Transforms output data from neural network to suitable data + format for using with other components of this framework. + + Args: + data: output data from neural network + images: input images + + + Returns: + list of objects for each image + + """ + output = post_processing(0.4, 0.6, data) + images_objects = [] + for image_idx, image_objects in enumerate(output): + image_size = images[image_idx].size + objects = [] + for obj in image_objects: + objects.append( + Object( + class_name=self.classes[obj[6]], + confidence=obj[5], + x1=int(obj[0] * image_size[0]), + y1=int(obj[1] * image_size[1]), + x2=int(obj[2] * image_size[0]), + y2=int(obj[3] * image_size[1]), + ) + ) + images_objects.append(objects) + + return images_objects + + def __call__( + self, images: List[Union[str, pathlib.Path, PIL.Image.Image]] + ) -> List[List[Object]]: + """ + Passes input images though neural network + + Args: + images: input images + + Returns: + list of objects for each image + + """ + collect_masks = [] + autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16) + with autocast: + cast_network(self, dtype) + for image_batch in batch_generator(images, self.batch_size): + converted_images = thread_pool_processing( + lambda x: convert_image(load_image(x)), image_batch + ) + batches = torch.vstack( + thread_pool_processing(self.data_preprocessing, converted_images) + ) + with torch.no_grad(): + batches = Variable(batches).to(self.device) + out = super().__call__(batches) + out_cpu = [out_i.cpu() for out_i in out] + del batches, out + out = self.data_postprocessing(out_cpu, converted_images) + collect_masks += out + + return collect_masks + + +class SimplifiedYoloV4(YoloV4_COCO): + """ + The YoloV4 COCO classifier, but classifies only 7 supercategories. + + human - Scenes of people, such as portrait photographs + animals - Scenes with animals + objects - Scenes with normal objects + cars - Scenes with cars + other - Other scenes + """ + + db = { + "human": ["person"], + "animals": [ + "bird", + "cat", + "dog", + "horse", + "sheep", + "cow", + "elephant", + "bear", + "zebra", + "giraffe", + ], + "cars": [ + "car", + "motorbike", + "bus", + "truck", + ], + "objects": [ + "bicycle", + "traffic light", + "fire hydrant", + "stop sign", + "parking meter", + "bench", + "backpack", + "umbrella", + "handbag", + "tie", + "suitcase", + "frisbee", + "skis", + "snowboard", + "sports ball", + "kite", + "baseball bat", + "baseball glove", + "skateboard", + "surfboard", + "tennis racket", + "bottle", + "wine glass", + "cup", + "fork", + "knife", + "spoon", + "bowl", + "banana", + "apple", + "sandwich", + "orange", + "broccoli", + "carrot", + "hot dog", + "pizza", + "donut", + "cake", + "chair", + "sofa", + "pottedplant", + "bed", + "diningtable", + "toilet", + "tvmonitor", + "laptop", + "mouse", + "remote", + "keyboard", + "cell phone", + "microwave", + "oven", + "toaster", + "sink", + "refrigerator", + "book", + "clock", + "vase", + "scissors", + "teddy bear", + "hair drier", + "toothbrush", + ], + "other": ["aeroplane", "train", "boat"], + } + + def data_postprocessing( + self, data: List[torch.FloatTensor], images: List[PIL.Image.Image] + ) -> List[List[str]]: + """ + Transforms output data from neural network to suitable data + format for using with other components of this framework. + + Args: + data: output data from neural network + images: input images + """ + objects = super().data_postprocessing(data, images) + new_output = [] + + for image_objects in objects: + new_objects = [] + for obj in image_objects: + for key, values in list(self.db.items()): + if obj.class_name in values: + new_objects.append(key) # We don't need bbox at this moment + new_output.append(new_objects) + + return new_output diff --git a/carvekit/pipelines/postprocessing/__init__.py b/carvekit/pipelines/postprocessing/__init__.py new file mode 100644 index 0000000..1de606e --- /dev/null +++ b/carvekit/pipelines/postprocessing/__init__.py @@ -0,0 +1,2 @@ +from carvekit.pipelines.postprocessing.matting import MattingMethod +from carvekit.pipelines.postprocessing.casmatting import CasMattingMethod diff --git a/carvekit/pipelines/postprocessing/casmatting.py b/carvekit/pipelines/postprocessing/casmatting.py new file mode 100644 index 0000000..d8eec79 --- /dev/null +++ b/carvekit/pipelines/postprocessing/casmatting.py @@ -0,0 +1,83 @@ +""" +Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0 +""" +from carvekit.ml.wrap.fba_matting import FBAMatting +from carvekit.ml.wrap.cascadepsp import CascadePSP +from typing import Union, List +from PIL import Image +from pathlib import Path +from carvekit.trimap.cv_gen import CV2TrimapGenerator +from carvekit.trimap.generator import TrimapGenerator +from carvekit.utils.mask_utils import apply_mask +from carvekit.utils.pool_utils import thread_pool_processing +from carvekit.utils.image_utils import load_image, convert_image + +__all__ = ["CasMattingMethod"] + + +class CasMattingMethod: + """ + Improve segmentation quality by refining segmentation with the CascadePSP model + and post-processing the segmentation with the FBAMatting model + """ + + def __init__( + self, + refining_module: Union[CascadePSP], + matting_module: Union[FBAMatting], + trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator], + device="cpu", + ): + """ + Initializes CasMattingMethod class. + + Args: + refining_module: Initialized refining network + matting_module: Initialized matting neural network class + trimap_generator: Initialized trimap generator class + device: Processing device used for applying mask to image + """ + self.device = device + self.refining_module = refining_module + self.matting_module = matting_module + self.trimap_generator = trimap_generator + + def __call__( + self, + images: List[Union[str, Path, Image.Image]], + masks: List[Union[str, Path, Image.Image]], + ): + """ + Passes data through apply_mask function + + Args: + images: list of images + masks: list pf masks + + Returns: + list of images + """ + if len(images) != len(masks): + raise ValueError("Images and Masks lists should have same length!") + images = thread_pool_processing(lambda x: convert_image(load_image(x)), images) + masks = thread_pool_processing( + lambda x: convert_image(load_image(x), mode="L"), masks + ) + refined_masks = self.refining_module(images, masks) + trimaps = thread_pool_processing( + lambda x: self.trimap_generator( + original_image=images[x], mask=refined_masks[x] + ), + range(len(images)), + ) + alpha = self.matting_module(images=images, trimaps=trimaps) + return list( + map( + lambda x: apply_mask( + image=images[x], mask=alpha[x], device=self.device + ), + range(len(images)), + ) + ) diff --git a/carvekit/pipelines/postprocessing.py b/carvekit/pipelines/postprocessing/matting.py similarity index 89% rename from carvekit/pipelines/postprocessing.py rename to carvekit/pipelines/postprocessing/matting.py index fc22451..cd91142 100644 --- a/carvekit/pipelines/postprocessing.py +++ b/carvekit/pipelines/postprocessing/matting.py @@ -32,9 +32,9 @@ def __init__( Initializes Matting Method class. Args: - matting_module: Initialized matting neural network class - trimap_generator: Initialized trimap generator class - device: Processing device used for applying mask to image + - `matting_module`: Initialized matting neural network class + - `trimap_generator`: Initialized trimap generator class + - `device`: Processing device used for applying mask to image """ self.device = device self.matting_module = matting_module @@ -49,11 +49,11 @@ def __call__( Passes data through apply_mask function Args: - images: list of images - masks: list pf masks + - `images`: list of images + - `masks`: list pf masks Returns: - list of images + list of images """ if len(images) != len(masks): raise ValueError("Images and Masks lists should have same length!") diff --git a/carvekit/pipelines/preprocessing/__init__.py b/carvekit/pipelines/preprocessing/__init__.py new file mode 100644 index 0000000..5355429 --- /dev/null +++ b/carvekit/pipelines/preprocessing/__init__.py @@ -0,0 +1,2 @@ +from carvekit.pipelines.preprocessing.stub import PreprocessingStub +from carvekit.pipelines.preprocessing.autoscene import AutoScene diff --git a/carvekit/pipelines/preprocessing/autoscene.py b/carvekit/pipelines/preprocessing/autoscene.py new file mode 100644 index 0000000..04138fb --- /dev/null +++ b/carvekit/pipelines/preprocessing/autoscene.py @@ -0,0 +1,85 @@ +""" +Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0 +""" +from pathlib import Path + +from PIL import Image +from typing import Union, List + +from carvekit.ml.wrap.scene_classifier import SceneClassifier +from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7 +from carvekit.ml.wrap.u2net import U2NET + +__all__ = ["AutoScene"] + + +class AutoScene: + """AutoScene preprocessing method""" + + def __init__(self, scene_classifier: SceneClassifier): + """ + Args: + scene_classifier: SceneClassifier instance + """ + self.scene_classifier = scene_classifier + + @staticmethod + def select_net(scene: str): + """ + Selects the network to be used for segmentation based on the detected scene + + Args: + scene: scene name + """ + if scene == "hard": + return TracerUniversalB7 + elif scene == "soft": + return U2NET + elif scene == "digital": + return TracerUniversalB7 # TODO: not implemented yet + + def __call__(self, interface, images: List[Union[str, Path, Image.Image]]): + """ + Automatically detects the scene and selects the appropriate network for segmentation + + Args: + interface: Interface instance + images: list of images + + Returns: + list of masks + """ + scene_analysis = self.scene_classifier(images) + images_per_scene = {} + for i, image in enumerate(images): + scene_name = scene_analysis[i][0][0] + if scene_name not in images_per_scene: + images_per_scene[scene_name] = [] + images_per_scene[scene_name].append(image) + + masks_per_scene = {} + for scene_name, igs in list(images_per_scene.items()): + net = self.select_net(scene_name) + if isinstance(interface.segmentation_pipeline, net): + masks_per_scene[scene_name] = interface.segmentation_pipeline(igs) + else: + old_device = interface.segmentation_pipeline.device + interface.segmentation_pipeline.to( + "cpu" + ) # unload model from gpu, to avoid OOM + net_instance = net(device=old_device) + masks_per_scene[scene_name] = net_instance(igs) + del net_instance + interface.segmentation_pipeline.to(old_device) # load model back to gpu + + # restore one list of masks with the same order as images + masks = [] + for i, image in enumerate(images): + scene_name = scene_analysis[i][0][0] + masks.append( + masks_per_scene[scene_name][images_per_scene[scene_name].index(image)] + ) + + return masks diff --git a/carvekit/pipelines/preprocessing.py b/carvekit/pipelines/preprocessing/stub.py similarity index 81% rename from carvekit/pipelines/preprocessing.py rename to carvekit/pipelines/preprocessing/stub.py index 3d1e848..ea1b8b9 100644 --- a/carvekit/pipelines/preprocessing.py +++ b/carvekit/pipelines/preprocessing/stub.py @@ -16,11 +16,11 @@ class PreprocessingStub: def __call__(self, interface, images: List[Union[str, Path, Image.Image]]): """ - Passes data though interface.segmentation_pipeline() method + Passes data though `interface.segmentation_pipeline()` method Args: - interface: Interface instance - images: list of images + - `interface`: Interface instance + - `images`: list of images Returns: the result of passing data through segmentation_pipeline method of interface diff --git a/carvekit/trimap/add_ops.py b/carvekit/trimap/add_ops.py index dfb37ca..c1f8313 100644 --- a/carvekit/trimap/add_ops.py +++ b/carvekit/trimap/add_ops.py @@ -13,14 +13,14 @@ def prob_filter(mask: Image.Image, prob_threshold=231) -> Image.Image: Applies a filter to the mask by the probability of locating an object in the object area. Args: - prob_threshold: Threshold of probability for mark area as background. - mask: Predicted object mask + prob_threshold (int, default=231): Threshold of probability for mark area as background. + mask (Image.Image): Predicted object mask Raises: - ValueError if mask or trimap has wrong color mode + ValueError: if mask or trimap has wrong color mode Returns: - Generated trimap for image. + Image.Image: generated trimap for image. """ if mask.mode != "L": raise ValueError("Input mask has wrong color mode.") @@ -38,15 +38,15 @@ def prob_as_unknown_area( Marks any uncertainty in the seg mask as an unknown region. Args: - prob_threshold: Threshold of probability for mark area as unknown. - trimap: Generated trimap. - mask: Predicted object mask + prob_threshold (int, default=255): Threshold of probability for mark area as unknown. + trimap (Image.Image): Generated trimap. + mask (Image.Image): Predicted object mask Raises: - ValueError if mask or trimap has wrong color mode + ValueError: if mask or trimap has wrong color mode Returns: - Generated trimap for image. + Image.Image: Generated trimap for image. """ if mask.mode != "L" or trimap.mode != "L": raise ValueError("Input mask has wrong color mode.") @@ -63,13 +63,12 @@ def post_erosion(trimap: Image.Image, erosion_iters=1) -> Image.Image: Performs erosion on the mask and marks the resulting area as an unknown region. Args: - erosion_iters: The number of iterations of erosion that + erosion_iters (int, default=1): The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area - trimap: Generated trimap. - mask: Predicted object mask + trimap (Image.Image): Generated trimap. Returns: - Generated trimap for image. + Image.Image: Generated trimap for image. """ if trimap.mode != "L": raise ValueError("Input mask has wrong color mode.") diff --git a/carvekit/trimap/cv_gen.py b/carvekit/trimap/cv_gen.py index fc2c229..8323751 100644 --- a/carvekit/trimap/cv_gen.py +++ b/carvekit/trimap/cv_gen.py @@ -14,9 +14,9 @@ def __init__(self, kernel_size: int = 30, erosion_iters: int = 1): Initialize a new CV2TrimapGenerator instance Args: - kernel_size: The size of the offset from the object mask + kernel_size (int, default=30): The size of the offset from the object mask in pixels when an unknown area is detected in the trimap - erosion_iters: The number of iterations of erosion that + erosion_iters (int, default=1: The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area """ self.kernel_size = kernel_size @@ -30,11 +30,11 @@ def __call__( Based on cv2 erosion algorithm. Args: - original_image: Original image - mask: Predicted object mask + original_image (PIL.Image.Image): Original image + mask (PIL.Image.Image): Predicted object mask Returns: - Generated trimap for image. + PIL.Image.Image: Generated trimap for image. """ if mask.mode != "L": raise ValueError("Input mask has wrong color mode.") diff --git a/carvekit/trimap/generator.py b/carvekit/trimap/generator.py index 0656f45..cbabea6 100644 --- a/carvekit/trimap/generator.py +++ b/carvekit/trimap/generator.py @@ -16,11 +16,11 @@ def __init__( Initialize a TrimapGenerator instance Args: - prob_threshold: Probability threshold at which the + prob_threshold (int, default=231): Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied - kernel_size: The size of the offset from the object mask + kernel_size (int, default=30): The size of the offset from the object mask in pixels when an unknown area is detected in the trimap - erosion_iters: The number of iterations of erosion that + erosion_iters (int, default=5): The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area """ super().__init__(kernel_size, erosion_iters=0) @@ -31,12 +31,13 @@ def __call__(self, original_image: Image.Image, mask: Image.Image) -> Image.Imag """ Generates trimap based on predicted object mask to refine object mask borders. Based on cv2 erosion algorithm and additional prob. filters. + Args: - original_image: Original image - mask: Predicted object mask + original_image (Image.Image): Original image + mask (Image.Image): Predicted object mask Returns: - Generated trimap for image. + Image.Image: Generated trimap for image. """ filter_mask = prob_filter(mask=mask, prob_threshold=self.prob_threshold) trimap = super(TrimapGenerator, self).__call__(original_image, filter_mask) diff --git a/carvekit/utils/download_models.py b/carvekit/utils/download_models.py index b1b52ad..de13778 100644 --- a/carvekit/utils/download_models.py +++ b/carvekit/utils/download_models.py @@ -45,12 +45,25 @@ "revision": "d8a8fd9e7b3fa0d2f1506fe7242966b34381e9c5", "filename": "tracer_b7.pth", }, - "tracer_hair.pth": { - "repository": "Carve/tracer_b7", - "revision": "d8a8fd9e7b3fa0d2f1506fe7242966b34381e9c5", - "filename": "tracer_b7.pth", # TODO don't forget change this link!! + "scene_classifier.pth": { + "repository": "Carve/scene_classifier", + "revision": "71c8e4c771dd5a20ff0c5c9e3c8f1c9cf8082740", + "filename": "scene_classifier.pth", + }, + "yolov4_coco_with_classes.pth": { + "repository": "Carve/yolov4_coco", + "revision": "e3fc9cd22f86e456d2749d1ae148400f2f950fb3", + "filename": "yolov4_coco_with_classes.pth", + }, + "cascadepsp.pth": { + "repository": "Carve/cascadepsp", + "revision": "3ca1e5e432344b1277bc88d1c6d4265c46cff62f", + "filename": "cascadepsp.pth", }, } +""" +All data needed to build path relative to huggingface.co for model download +""" MODELS_CHECKSUMS = { "basnet.pth": "e409cb709f4abca87cb11bd44a9ad3f909044a917977ab65244b4c94dd33" @@ -63,9 +76,15 @@ "bea1533fda5ee70a909b934a9bd495b432cef89d629f00a07858a517742476fa8b346de24f7", "tracer_b7.pth": "c439c5c12d4d43d5f9be9ec61e68b2e54658a541bccac2577ef5a54fb252b6e8415d41f7e" "c2487033d0c02b4dd08367958e4e62091318111c519f93e2632be7b", - "tracer_hair.pth": "5c2fb9973fc42fa6208920ffa9ac233cc2ea9f770b24b4a96969d3449aed7ac89e6d37e" - "e486a13e63be5499f2df6ccef1109e9e8797d1326207ac89b2f39a7cf", + "scene_classifier.pth": "6d8692510abde453b406a1fea557afdea62fd2a2a2677283a3ecc2" + "341a4895ee99ed65cedcb79b80775db14c3ffcfc0aad2caec1d85140678852039d2d4e76b4", + "yolov4_coco_with_classes.pth": "44b6ec2dd35dc3802bf8c512002f76e00e97bfbc86bc7af6de2fafce229a41b4ca" + "12c6f3d7589278c71cd4ddd62df80389b148c19b84fa03216905407a107fff", + "cascadepsp.pth": "3f895f5126d80d6f73186f045557ea7c8eab4dfa3d69a995815bb2c03d564573f36c474f04d7bf0022a27829f583a1a793b036adf801cb423e41a4831b830122", } +""" +Model -> checksum dictionary +""" def sha512_checksum_calc(file: Path) -> str: @@ -73,7 +92,7 @@ def sha512_checksum_calc(file: Path) -> str: Calculates the SHA512 hash digest of a file on fs Args: - file: Path to the file + file (Path): Path to the file Returns: SHA512 hash digest of a file. @@ -86,6 +105,10 @@ def sha512_checksum_calc(file: Path) -> str: class CachedDownloader: + """ + Metaclass for models downloaders. + """ + __metaclass__ = ABCMeta @property @@ -96,9 +119,24 @@ def name(self) -> str: @property @abstractmethod def fallback_downloader(self) -> Optional["CachedDownloader"]: + """ + Property MAY be overriden in subclasses. + Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy. + Less preferred downloader SHOULD be provided by this property. + """ pass def download_model(self, file_name: str) -> Path: + """ + Downloads model from the internet and saves it to the cache. + + Behavior: + If model is already downloaded it will be loaded from the cache. + + If model is already downloaded, but checksum is invalid, it will be downloaded again. + + If model download failed, fallback downloader will be used. + """ try: return self.download_model_base(file_name) except BaseException as e: @@ -116,14 +154,23 @@ def download_model(self, file_name: str) -> Path: raise e @abstractmethod - def download_model_base(self, file_name: str) -> Path: - """Download model from any source if not cached. Returns path if cached""" + def download_model_base(self, model_name: str) -> Path: + """ + Download model from any source if not cached. + Returns: + pathlib.Path: Path to the downloaded model. + """ - def __call__(self, file_name: str): - return self.download_model(file_name) + def __call__(self, model_name: str): + return self.download_model(model_name) class HuggingFaceCompatibleDownloader(CachedDownloader, ABC): + """ + Downloader for models from HuggingFace Hub. + Private models are not supported. + """ + def __init__( self, name: str = "Huggingface.co", @@ -131,7 +178,10 @@ def __init__( fb_downloader: Optional["CachedDownloader"] = None, ): self.cache_dir = checkpoints_dir + """SHOULD be same for all instances to prevent downloading same model multiple times + Points to ~/.cache/carvekit/checkpoints""" self.base_url = base_url + """MUST be a base url with protocol and domain name to huggingface or another, compatible in terms of models downloading API source""" self._name = name self._fallback_downloader = fb_downloader @@ -143,13 +193,18 @@ def fallback_downloader(self) -> Optional["CachedDownloader"]: def name(self): return self._name - def check_for_existence(self, file_name: str) -> Optional[Path]: - if file_name not in MODELS_URLS.keys(): + def check_for_existence(self, model_name: str) -> Optional[Path]: + """ + Checks if model is already downloaded and cached. Verifies file integrity by checksum. + Returns: + Optional[pathlib.Path]: Path to the cached model if cached. + """ + if model_name not in MODELS_URLS.keys(): raise FileNotFoundError("Unknown model!") path = ( self.cache_dir - / MODELS_URLS[file_name]["repository"].split("/")[1] - / file_name + / MODELS_URLS[model_name]["repository"].split("/")[1] + / model_name ) if not path.exists(): @@ -163,18 +218,18 @@ def check_for_existence(self, file_name: str) -> Optional[Path]: return None return path - def download_model_base(self, file_name: str) -> Path: - cached_path = self.check_for_existence(file_name) + def download_model_base(self, model_name: str) -> Path: + cached_path = self.check_for_existence(model_name) if cached_path is not None: return cached_path else: cached_path = ( self.cache_dir - / MODELS_URLS[file_name]["repository"].split("/")[1] - / file_name + / MODELS_URLS[model_name]["repository"].split("/")[1] + / model_name ) cached_path.parent.mkdir(parents=True, exist_ok=True) - url = MODELS_URLS[file_name] + url = MODELS_URLS[model_name] hugging_face_url = f"{self.base_url}/{url['repository']}/resolve/{url['revision']}/{url['filename']}" try: @@ -190,10 +245,10 @@ def download_model_base(self, file_name: str) -> Path: f.write(chunk) else: if r.status_code == 404: - raise FileNotFoundError(f"Model {file_name} not found!") + raise FileNotFoundError(f"Model {model_name} not found!") else: raise ConnectionError( - f"Error {r.status_code} while downloading model {file_name}!" + f"Error {r.status_code} while downloading model {model_name}!" ) except BaseException as e: if cached_path.exists(): diff --git a/carvekit/utils/fs_utils.py b/carvekit/utils/fs_utils.py index bd6291e..5219af5 100644 --- a/carvekit/utils/fs_utils.py +++ b/carvekit/utils/fs_utils.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ from pathlib import Path @@ -14,9 +16,9 @@ def save_file(output: Optional[Path], input_path: Path, image: Image.Image): Saves an image to the file system Args: - output: Output path [dir or end file] - input_path: Input path of the image - image: Image to be saved. + output (Optional[pathlib.Path]): Output path [dir or end file] + input_path (pathlib.Path): Input path of the image + image (Image.Image): Image to be saved. """ if isinstance(output, Path) and str(output) != "none": if output.is_dir() and output.exists(): diff --git a/carvekit/utils/image_utils.py b/carvekit/utils/image_utils.py index 8b939f5..cb2a538 100644 --- a/carvekit/utils/image_utils.py +++ b/carvekit/utils/image_utils.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ @@ -19,22 +21,22 @@ def to_tensor(x: Any) -> torch.Tensor: Returns a PIL.Image.Image as torch tensor without swap tensor dims. Args: - x: PIL.Image.Image instance + x (PIL.Image.Image): image Returns: - torch.Tensor instance + torch.Tensor: image as torch tensor """ return torch.tensor(np.array(x, copy=True)) def load_image(file: Union[str, pathlib.Path, PIL.Image.Image]) -> PIL.Image.Image: - """Returns a PIL.Image.Image class by string path or pathlib path or PIL.Image.Image instance + """Returns a `PIL.Image.Image` class by string path or `pathlib.Path` or `PIL.Image.Image` instance Args: - file: File path or PIL.Image.Image instance + file (Union[str, pathlib.Path, PIL.Image.Image]): File path or `PIL.Image.Image` instance Returns: - PIL.Image.Image instance + PIL.Image.Image: image instance loaded from `file` location Raises: ValueError: If file not exists or file is directory or file isn't an image or file is not correct PIL Image @@ -54,11 +56,11 @@ def convert_image(image: PIL.Image.Image, mode="RGB") -> PIL.Image.Image: """Performs image conversion to correct color mode Args: - image: PIL.Image.Image instance - mode: Colort Mode to convert + image (PIL.Image.Image): `PIL.Image.Image` instance + mode (str, default=RGB): Color mode to convert Returns: - PIL.Image.Image instance + PIL.Image.Image: converted image Raises: ValueError: If image hasn't convertable color mode, or it is too small @@ -71,10 +73,10 @@ def is_image_valid(image: Union[pathlib.Path, PIL.Image.Image]) -> bool: """This function performs image validation. Args: - image: Path to the image or PIL.Image.Image instance being checked. + image (Union[pathlib.Path, PIL.Image.Image]): Path to the image or `PIL.Image.Image` instance being checked. Returns: - True if image is valid + bool: True if image is valid, False otherwise. Raises: ValueError: If file not a valid image path or image hasn't convertable color mode, or it is too small @@ -92,7 +94,11 @@ def is_image_valid(image: Union[pathlib.Path, PIL.Image.Image]) -> bool: elif isinstance(image, PIL.Image.Image): if not (image.size[0] > 32 and image.size[1] > 32): raise ValueError("Image should be bigger then (32x32) pixels.") - elif image.mode not in ["RGB", "RGBA", "L"]: + elif image.mode not in [ + "RGB", + "RGBA", + "L", + ]: raise ValueError("Wrong image color mode.") else: raise ValueError("Unknown input file type") @@ -106,12 +112,12 @@ def transparency_paste( Inserts an image into another image while maintaining transparency. Args: - bg_img: background image - fg_img: foreground image - box: place to paste + bg_img (PIL.Image.Image): background image + fg_img (PIL.Image.Image): foreground image + box (tuple[int, int]): place to paste Returns: - Background image with pasted foreground image at point or in the specified box + PIL.Image.Image: Background image with pasted foreground image at point or in the specified box """ fg_img_trans = PIL.Image.new("RGBA", bg_img.size) fg_img_trans.paste(fg_img, box, mask=fg_img) @@ -131,15 +137,15 @@ def add_margin( Adds margin to the image. Args: - pil_img: Image that needed to add margin. - top: pixels count at top side - right: pixels count at right side - bottom: pixels count at bottom side - left: pixels count at left side - color: color of margin + pil_img (PIL.Image.Image): Image that needed to add margin. + top (int): pixels count at top side + right (int): pixels count at right side + bottom (int): pixels count at bottom side + left (int): pixels count at left side + color (Tuple[int, int, int, int]): color of margin Returns: - Image with margin. + PIL.Image.Image: Image with margin. """ width, height = pil_img.size new_width = width + right + left diff --git a/carvekit/utils/mask_utils.py b/carvekit/utils/mask_utils.py index 4402036..cd712c8 100644 --- a/carvekit/utils/mask_utils.py +++ b/carvekit/utils/mask_utils.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ import PIL.Image @@ -19,13 +21,13 @@ def composite( https://pymatting.github.io/intro.html#alpha-matting math formula. Args: - device: Processing device - foreground: Image that will be pasted to background image with following alpha mask. - background: Background image - alpha: Alpha Image + foreground (PIL.Image.Image): Image that will be pasted to background image with following alpha mask. + background (PIL.Image.Image): Background image + alpha (PIL.Image.Image): Alpha Image + device (Literal[cpu, cuda]): Processing device Returns: - Composited image as PIL.Image instance. + PIL.Image.Image: Composited image. """ foreground = foreground.convert("RGBA") @@ -58,12 +60,12 @@ def apply_mask( Applies mask to foreground. Args: - device: Processing device. - image: Image with background. - mask: Alpha Channel mask for this image. + image (PIL.Image.Image): Image with background. + mask (PIL.Image.Image): Alpha Channel mask for this image. + device (Literal[cpu, cuda]): Processing device. Returns: - Image without background, where mask was black. + PIL.Image.Image: Image without background, where mask was black. """ background = PIL.Image.new("RGBA", image.size, color=(130, 130, 130, 0)) return composite(image, background, mask, device=device).convert("RGBA") @@ -77,7 +79,7 @@ def extract_alpha_channel(image: PIL.Image.Image) -> PIL.Image.Image: image: RGBA PIL image Returns: - RGBA alpha channel image + PIL.Image.Image: RGBA alpha channel image """ alpha = image.split()[-1] bg = PIL.Image.new("RGBA", image.size, (0, 0, 0, 255)) diff --git a/carvekit/utils/models_utils.py b/carvekit/utils/models_utils.py index da0141d..cdd5329 100644 --- a/carvekit/utils/models_utils.py +++ b/carvekit/utils/models_utils.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ @@ -14,7 +16,7 @@ class EmptyAutocast(object): """ - Empty class for disable any autocasting. + Empty class for any auto-casting disabling. """ def __enter__(self): @@ -34,20 +36,21 @@ def get_precision_autocast( Tuple[autocast, Union[torch.dtype, Any]], ]: """ - Returns precision and autocast settings for given device and fp16 settings. + Returns precision and auto-cast settings for given device and fp16 settings. + Args: - device: Device to get precision and autocast settings for. - fp16: Whether to use fp16 precision. - override_dtype: Override dtype for autocast. + device (Literal[cpu, cuda]): Device to get precision and auto-cast settings for. + fp16 (bool): Whether to use fp16 precision. + override_dtype (bool): Override dtype for auto-cast. Returns: - Autocast object, dtype + Union[Tuple[EmptyAutocast, Union[torch.dtype, Any]],Tuple[autocast, Union[torch.dtype, Any]]]: Autocast object, dtype """ dtype = torch.float32 cache_enabled = None if device == "cpu" and fp16: - warnings.warn('FP16 is not supported on CPU. Using FP32 instead.') + warnings.warn("FP16 is not supported on CPU. Using FP32 instead.") dtype = torch.float32 # TODO: Implement BFP16 on CPU. There are unexpected slowdowns on cpu on a clean environment. @@ -59,7 +62,6 @@ def get_precision_autocast( # torch.bfloat16 # ) # Using bfloat16 for CPU, since autocast is not supported for float16 - if "cuda" in device and fp16: dtype = torch.float16 cache_enabled = True @@ -79,11 +81,12 @@ def get_precision_autocast( def cast_network(network: torch.nn.Module, dtype: torch.dtype): - """Cast network to given dtype + """ + Cast network to given dtype Args: - network: Network to be casted - dtype: Dtype to cast network to + network (torch.nn.Module): Network to be casted + dtype (torch.dtype): Dtype to cast network to """ if dtype == torch.float16: network.half() @@ -95,11 +98,12 @@ def cast_network(network: torch.nn.Module, dtype: torch.dtype): raise ValueError(f"Unknown dtype {dtype}") -def fix_seed(seed=42): - """Sets fixed random seed +def fix_seed(seed: int = 42): + """ + Sets fixed random seed Args: - seed: Random seed to be set + seed (int, default=42): Random seed to be set """ random.seed(seed) torch.manual_seed(seed) diff --git a/carvekit/utils/pool_utils.py b/carvekit/utils/pool_utils.py index ae3b741..8822ea9 100644 --- a/carvekit/utils/pool_utils.py +++ b/carvekit/utils/pool_utils.py @@ -1,39 +1,41 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ from concurrent.futures import ThreadPoolExecutor -from typing import Any, Iterable +from typing import Any, Iterable, Callable, Collection, List -def thread_pool_processing(func: Any, data: Iterable, workers=18): +def thread_pool_processing(func: Callable[[Any], Any], data: Iterable, workers=18): """ Passes all iterator data through the given function Args: - workers: Count of workers. - func: function to pass data through - data: input iterator + workers (int, default=18): Count of workers. + func (Callable[[Any], Any]): function to pass data through + data (Iterable): input iterator Returns: - function return list + List[Any]: list of results """ with ThreadPoolExecutor(workers) as p: return list(p.map(func, data)) -def batch_generator(iterable, n=1): +def batch_generator(iterable: Collection, n: int = 1) -> Iterable[Collection]: """ Splits any iterable into n-size packets Args: - iterable: iterator - n: size of packets + iterable (Collection): iterator + n (int, default=1): size of packets Returns: - new n-size packet + Iterable[Collection]: new n-size packet """ it = len(iterable) for ndx in range(0, it, n): diff --git a/carvekit/web/schemas/config.py b/carvekit/web/schemas/config.py index 5d47ffc..8b12c02 100644 --- a/carvekit/web/schemas/config.py +++ b/carvekit/web/schemas/config.py @@ -24,20 +24,26 @@ class MLConfig(BaseModel): "u2net", "deeplabv3", "basnet", "tracer_b7" ] = "tracer_b7" """Segmentation Network""" - preprocessing_method: Literal["none", "stub"] = "none" + preprocessing_method: Literal["none", "stub", "autoscene", "auto"] = "autoscene" """Pre-processing Method""" - postprocessing_method: Literal["fba", "none"] = "fba" + postprocessing_method: Literal["fba", "cascade_fba", "none"] = "cascade_fba" """Post-Processing Network""" device: str = "cpu" """Processing device""" + batch_size_pre: int = 5 + """Batch size for preprocessing method""" batch_size_seg: int = 5 """Batch size for segmentation network""" batch_size_matting: int = 1 """Batch size for matting network""" + batch_size_refine: int = 1 + """Batch size for refine network""" seg_mask_size: int = 640 """The size of the input image for the segmentation neural network.""" matting_mask_size: int = 2048 """The size of the input image for the matting neural network.""" + refine_mask_size: int = 900 + """The size of the input image for the refine neural network.""" fp16: bool = False """Use half precision for inference""" trimap_dilation: int = 30 diff --git a/carvekit/web/utils/init_utils.py b/carvekit/web/utils/init_utils.py index f687182..d975e27 100644 --- a/carvekit/web/utils/init_utils.py +++ b/carvekit/web/utils/init_utils.py @@ -1,18 +1,26 @@ +import warnings from os import getenv from typing import Union from loguru import logger +from carvekit.ml.wrap.cascadepsp import CascadePSP +from carvekit.ml.wrap.scene_classifier import SceneClassifier from carvekit.web.schemas.config import WebAPIConfig, MLConfig, AuthConfig + from carvekit.api.interface import Interface +from carvekit.api.autointerface import AutoInterface + from carvekit.ml.wrap.fba_matting import FBAMatting from carvekit.ml.wrap.u2net import U2NET from carvekit.ml.wrap.deeplab_v3 import DeepLabV3 from carvekit.ml.wrap.basnet import BASNET from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7 +from carvekit.ml.wrap.yolov4 import SimplifiedYoloV4 -from carvekit.pipelines.postprocessing import MattingMethod -from carvekit.pipelines.preprocessing import PreprocessingStub + +from carvekit.pipelines.postprocessing import MattingMethod, CasMattingMethod +from carvekit.pipelines.preprocessing import PreprocessingStub, AutoScene from carvekit.trimap.generator import TrimapGenerator @@ -36,6 +44,9 @@ def init_config() -> WebAPIConfig: default_config.ml.postprocessing_method, ), device=getenv("CARVEKIT_DEVICE", default_config.ml.device), + batch_size_pre=int( + getenv("CARVEKIT_BATCH_SIZE_PRE", default_config.ml.batch_size_pre) + ), batch_size_seg=int( getenv("CARVEKIT_BATCH_SIZE_SEG", default_config.ml.batch_size_seg) ), @@ -45,6 +56,12 @@ def init_config() -> WebAPIConfig: default_config.ml.batch_size_matting, ) ), + batch_size_refine=int( + getenv( + "CARVEKIT_BATCH_SIZE_REFINE", + default_config.ml.batch_size_refine, + ) + ), seg_mask_size=int( getenv("CARVEKIT_SEG_MASK_SIZE", default_config.ml.seg_mask_size) ), @@ -54,6 +71,12 @@ def init_config() -> WebAPIConfig: default_config.ml.matting_mask_size, ) ), + refine_mask_size=int( + getenv( + "CARVEKIT_REFINE_MASK_SIZE", + default_config.ml.refine_mask_size, + ) + ), fp16=bool(int(getenv("CARVEKIT_FP16", default_config.ml.fp16))), trimap_prob_threshold=int( getenv( @@ -92,74 +115,131 @@ def init_config() -> WebAPIConfig: def init_interface(config: Union[WebAPIConfig, MLConfig]) -> Interface: if isinstance(config, WebAPIConfig): config = config.ml - if config.segmentation_network == "u2net": - seg_net = U2NET( - device=config.device, - batch_size=config.batch_size_seg, - input_image_size=config.seg_mask_size, - fp16=config.fp16, - ) - elif config.segmentation_network == "deeplabv3": - seg_net = DeepLabV3( - device=config.device, - batch_size=config.batch_size_seg, - input_image_size=config.seg_mask_size, - fp16=config.fp16, + if config.preprocessing_method == "auto": + warnings.warn( + "Preprocessing_method is set to `auto`." + "We will use automatic methods to determine the best methods for your images! " + "Please note that this is not always the best option and all other options will be ignored!" ) - elif config.segmentation_network == "basnet": - seg_net = BASNET( - device=config.device, - batch_size=config.batch_size_seg, - input_image_size=config.seg_mask_size, - fp16=config.fp16, + scene_classifier = SceneClassifier( + device=config.device, batch_size=config.batch_size_pre, fp16=config.fp16 ) - elif config.segmentation_network == "tracer_b7": - seg_net = TracerUniversalB7( - device=config.device, - batch_size=config.batch_size_seg, - input_image_size=config.seg_mask_size, - fp16=config.fp16, + object_classifier = SimplifiedYoloV4( + device=config.device, batch_size=config.batch_size_pre, fp16=config.fp16 ) - else: - seg_net = TracerUniversalB7( - device=config.device, - batch_size=config.batch_size_seg, - input_image_size=config.seg_mask_size, + return AutoInterface( + scene_classifier=scene_classifier, + object_classifier=object_classifier, + segmentation_batch_size=config.batch_size_seg, + postprocessing_batch_size=config.batch_size_matting, + postprocessing_image_size=config.matting_mask_size, + segmentation_device=config.device, + postprocessing_device=config.device, fp16=config.fp16, ) - if config.preprocessing_method == "stub": - preprocessing = PreprocessingStub() - elif config.preprocessing_method == "none": - preprocessing = None else: - preprocessing = None + if config.segmentation_network == "u2net": + seg_net = U2NET( + device=config.device, + batch_size=config.batch_size_seg, + input_image_size=config.seg_mask_size, + fp16=config.fp16, + ) + elif config.segmentation_network == "deeplabv3": + seg_net = DeepLabV3( + device=config.device, + batch_size=config.batch_size_seg, + input_image_size=config.seg_mask_size, + fp16=config.fp16, + ) + elif config.segmentation_network == "basnet": + seg_net = BASNET( + device=config.device, + batch_size=config.batch_size_seg, + input_image_size=config.seg_mask_size, + fp16=config.fp16, + ) + elif config.segmentation_network == "tracer_b7": + seg_net = TracerUniversalB7( + device=config.device, + batch_size=config.batch_size_seg, + input_image_size=config.seg_mask_size, + fp16=config.fp16, + ) + else: + seg_net = TracerUniversalB7( + device=config.device, + batch_size=config.batch_size_seg, + input_image_size=config.seg_mask_size, + fp16=config.fp16, + ) - if config.postprocessing_method == "fba": - fba = FBAMatting( - device=config.device, - batch_size=config.batch_size_matting, - input_tensor_size=config.matting_mask_size, - fp16=config.fp16, - ) - trimap_generator = TrimapGenerator( - prob_threshold=config.trimap_prob_threshold, - kernel_size=config.trimap_dilation, - erosion_iters=config.trimap_erosion, - ) - postprocessing = MattingMethod( - device=config.device, matting_module=fba, trimap_generator=trimap_generator - ) + if config.preprocessing_method == "stub": + preprocessing = PreprocessingStub() + elif config.preprocessing_method == "none": + preprocessing = None + elif config.preprocessing_method == "autoscene": + preprocessing = AutoScene( + scene_classifier=SceneClassifier( + device=config.device, + batch_size=config.batch_size_pre, + fp16=config.fp16, + ) + ) + else: + preprocessing = None - elif config.postprocessing_method == "none": - postprocessing = None - else: - postprocessing = None + if config.postprocessing_method == "fba": + fba = FBAMatting( + device=config.device, + batch_size=config.batch_size_matting, + input_tensor_size=config.matting_mask_size, + fp16=config.fp16, + ) + trimap_generator = TrimapGenerator( + prob_threshold=config.trimap_prob_threshold, + kernel_size=config.trimap_dilation, + erosion_iters=config.trimap_erosion, + ) + postprocessing = MattingMethod( + device=config.device, + matting_module=fba, + trimap_generator=trimap_generator, + ) + elif config.postprocessing_method == "cascade_fba": + cascadepsp = CascadePSP( + device=config.device, + batch_size=config.batch_size_refine, + input_tensor_size=config.refine_mask_size, + fp16=config.fp16, + ) + fba = FBAMatting( + device=config.device, + batch_size=config.batch_size_matting, + input_tensor_size=config.matting_mask_size, + fp16=config.fp16, + ) + trimap_generator = TrimapGenerator( + prob_threshold=config.trimap_prob_threshold, + kernel_size=config.trimap_dilation, + erosion_iters=config.trimap_erosion, + ) + postprocessing = CasMattingMethod( + device=config.device, + matting_module=fba, + trimap_generator=trimap_generator, + refining_module=cascadepsp, + ) + elif config.postprocessing_method == "none": + postprocessing = None + else: + postprocessing = None - interface = Interface( - pre_pipe=preprocessing, - post_pipe=postprocessing, - seg_pipe=seg_net, - device=config.device, - ) + interface = Interface( + pre_pipe=preprocessing, + post_pipe=postprocessing, + seg_pipe=seg_net, + device=config.device, + ) return interface diff --git a/conftest.py b/conftest.py index f328d35..3f75d22 100644 --- a/conftest.py +++ b/conftest.py @@ -23,6 +23,7 @@ from carvekit.ml.wrap.fba_matting import FBAMatting from carvekit.ml.wrap.deeplab_v3 import DeepLabV3 from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7 +from carvekit.ml.wrap.scene_classifier import SceneClassifier @pytest.fixture() @@ -37,6 +38,15 @@ def u2net_model() -> Callable[[bool], U2NET]: ) +@pytest.fixture() +def scene_classifier_model() -> Callable[[bool], SceneClassifier]: + return lambda fb16: SceneClassifier( + device="cuda" if torch.cuda.is_available() else "cpu", + batch_size=5, + fp16=fb16, + ) + + @pytest.fixture() def tracer_model() -> Callable[[bool], TracerUniversalB7]: return lambda fb16: TracerUniversalB7( diff --git a/docker-compose.cpu.yml b/docker-compose.cpu.yml index 1fe3f5a..bcaa647 100644 --- a/docker-compose.cpu.yml +++ b/docker-compose.cpu.yml @@ -7,13 +7,16 @@ services: - CARVEKIT_PORT=5000 - CARVEKIT_HOST=0.0.0.0 - CARVEKIT_SEGMENTATION_NETWORK=tracer_b7 # can be u2net, tracer_b7, basnet, deeplabv3 - - CARVEKIT_PREPROCESSING_METHOD=none # can be none, stub - - CARVEKIT_POSTPROCESSING_METHOD=fba # can be none, fba + - CARVEKIT_PREPROCESSING_METHOD=none # can be none, stub, autoscene, auto + - CARVEKIT_POSTPROCESSING_METHOD=cascade_fba # can be none, fba, cascade_fba - CARVEKIT_DEVICE=cpu # can be cuda (req. cuda docker image), cpu + - CARVEKIT_BATCH_SIZE_PRE=5 # Number of images processed per one preprocessing method call. NOT USED IF WEB API IS USED - CARVEKIT_BATCH_SIZE_SEG=5 # Number of images processed per one segmentation nn call. NOT USED IF WEB API IS USED - CARVEKIT_BATCH_SIZE_MATTING=1 # Number of images processed per one matting nn call. NOT USED IF WEB API IS USED + - CARVEKIT_BATCH_SIZE_REFINE=1 # Number of images processed per one refine nn call. NOT USED IF WEB API IS USED - CARVEKIT_SEG_MASK_SIZE=640 # The size of the input image for the segmentation neural network. - CARVEKIT_MATTING_MASK_SIZE=2048 # The size of the input image for the matting neural network. + - CARVEKIT_REFINE_MASK_SIZE=900 # The size of the input image for the refine neural network. - CARVEKIT_FP16=0 # Enables FP16 mode (Only CUDA at the moment) - CARVEKIT_TRIMAP_PROB_THRESHOLD=231 # Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied - CARVEKIT_TRIMAP_DILATION=30 # The size of the offset radius from the object mask in pixels when forming an unknown area diff --git a/docker-compose.cuda.yml b/docker-compose.cuda.yml index 8308594..f90d9a2 100644 --- a/docker-compose.cuda.yml +++ b/docker-compose.cuda.yml @@ -7,13 +7,16 @@ services: - CARVEKIT_PORT=5000 - CARVEKIT_HOST=0.0.0.0 - CARVEKIT_SEGMENTATION_NETWORK=tracer_b7 # can be u2net, tracer_b7, basnet, deeplabv3 - - CARVEKIT_PREPROCESSING_METHOD=none # can be none, stub - - CARVEKIT_POSTPROCESSING_METHOD=fba # can be none, fba + - CARVEKIT_PREPROCESSING_METHOD=none # can be none, stub, autoscene, auto + - CARVEKIT_POSTPROCESSING_METHOD=cascade_fba # can be none, fba, cascade_fba - CARVEKIT_DEVICE=cuda # can be cuda (req. cuda docker image), cpu + - CARVEKIT_BATCH_SIZE_PRE=5 # Number of images processed per one preprocessing method call. NOT USED IF WEB API IS USED - CARVEKIT_BATCH_SIZE_SEG=5 # Number of images processed per one segmentation nn call. NOT USED IF WEB API IS USED - CARVEKIT_BATCH_SIZE_MATTING=1 # Number of images processed per one matting nn call. NOT USED IF WEB API IS USED + - CARVEKIT_BATCH_SIZE_REFINE=1 # Number of images processed per one refine nn call. NOT USED IF WEB API IS USED - CARVEKIT_SEG_MASK_SIZE=640 # The size of the input image for the segmentation neural network. - CARVEKIT_MATTING_MASK_SIZE=2048 # The size of the input image for the matting neural network. + - CARVEKIT_REFINE_MASK_SIZE=900 # The size of the input image for the refine neural network. - CARVEKIT_FP16=0 # Enables FP16 mode (Only CUDA at the moment) - CARVEKIT_TRIMAP_PROB_THRESHOLD=231 # Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied - CARVEKIT_TRIMAP_DILATION=30 # The size of the offset radius from the object mask in pixels when forming an unknown area diff --git a/docs/CREDITS.md b/docs/CREDITS.md index c544c65..337f9d0 100644 --- a/docs/CREDITS.md +++ b/docs/CREDITS.md @@ -24,3 +24,5 @@ All images are copyrighted by their authors. 10. https://arxiv.org/abs/1703.06870 11. https://github.com/Karel911/TRACER 12. https://arxiv.org/abs/2112.07380 +13. https://github.com/hkchengrex/CascadePSP + diff --git a/docs/api/__init__.html b/docs/api/__init__.html new file mode 100644 index 0000000..bbcbd85 --- /dev/null +++ b/docs/api/__init__.html @@ -0,0 +1,48 @@ + + + + + + +__init__ API documentation + + + + + + + + + + + +
+
+
+

Module __init__

+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/basnet.html b/docs/api/basnet.html new file mode 100644 index 0000000..acb0232 --- /dev/null +++ b/docs/api/basnet.html @@ -0,0 +1,469 @@ + + + + + + +basnet API documentation + + + + + + + + + + + +
+
+
+

Module basnet

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import pathlib
+from typing import Union, List
+
+import PIL
+import numpy as np
+import torch
+from PIL import Image
+
+from carvekit.ml.arch.basnet.basnet import BASNet
+from carvekit.ml.files.models_loc import basnet_pretrained
+from carvekit.utils.image_utils import convert_image, load_image
+from carvekit.utils.pool_utils import batch_generator, thread_pool_processing
+
+__all__ = ["BASNET"]
+
+
+class BASNET(BASNet):
+    """BASNet model interface"""
+
+    def __init__(
+        self,
+        device="cpu",
+        input_image_size: Union[List[int], int] = 320,
+        batch_size: int = 10,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the BASNET model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (Union[List[int], int], default=320): input image size
+            batch_size (int, default=10): the number of images that the neural network processes in one run
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=True): use fp16 precision **not supported at this moment**
+        """
+        super(BASNET, self).__init__(n_channels=3, n_classes=1)
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(basnet_pretrained(), map_location=self.device)
+            )
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.Tensor: input for neural network
+
+        """
+        resized = data.resize(self.input_image_size)
+        # noinspection PyTypeChecker
+        resized_arr = np.array(resized, dtype=np.float64)
+        temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+        if np.max(resized_arr) != 0:
+            resized_arr /= np.max(resized_arr)
+        temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+        temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+        temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+        temp_image = temp_image.transpose((2, 0, 1))
+        temp_image = np.expand_dims(temp_image, 0)
+        return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        data = data.unsqueeze(0)
+        mask = data[:, 0, :, :]
+        ma = torch.max(mask)  # Normalizes prediction
+        mi = torch.min(mask)
+        predict = ((mask - mi) / (ma - mi)).squeeze()
+        predict_np = predict.cpu().data.numpy() * 255
+        mask = Image.fromarray(predict_np).convert("L")
+        mask = mask.resize(original_image.size, resample=3)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images through neural network and returns segmentation masks as `PIL.Image.Image` instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances
+
+        """
+        collect_masks = []
+        for image_batch in batch_generator(images, self.batch_size):
+            converted_images = thread_pool_processing(
+                lambda x: convert_image(load_image(x)), image_batch
+            )
+            batches = torch.vstack(
+                thread_pool_processing(self.data_preprocessing, converted_images)
+            )
+            with torch.no_grad():
+                batches = batches.to(self.device)
+                masks, d2, d3, d4, d5, d6, d7, d8 = super(BASNET, self).__call__(
+                    batches
+                )
+                masks_cpu = masks.cpu()
+                del d2, d3, d4, d5, d6, d7, d8, batches, masks
+            masks = thread_pool_processing(
+                lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]),
+                range(len(converted_images)),
+            )
+            collect_masks += masks
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class BASNET +(device='cpu', input_image_size:Β Union[List[int],Β int]Β =Β 320, batch_size:Β intΒ =Β 10, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False) +
+
+

BASNet model interface

+

Initialize the BASNET model

+

Args

+
+
device : Literal[cpu, cuda], default=cpu
+
processing device
+
input_image_size : Union[List[int], int], default=320
+
input image size
+
batch_size : int, default=10
+
the number of images that the neural network processes in one run
+
load_pretrained : bool, default=True
+
loading pretrained model
+
fp16 : bool, default=True
+
use fp16 precision not supported at this moment
+
+
+ +Expand source code + +
class BASNET(BASNet):
+    """BASNet model interface"""
+
+    def __init__(
+        self,
+        device="cpu",
+        input_image_size: Union[List[int], int] = 320,
+        batch_size: int = 10,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the BASNET model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (Union[List[int], int], default=320): input image size
+            batch_size (int, default=10): the number of images that the neural network processes in one run
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=True): use fp16 precision **not supported at this moment**
+        """
+        super(BASNET, self).__init__(n_channels=3, n_classes=1)
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(basnet_pretrained(), map_location=self.device)
+            )
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.Tensor: input for neural network
+
+        """
+        resized = data.resize(self.input_image_size)
+        # noinspection PyTypeChecker
+        resized_arr = np.array(resized, dtype=np.float64)
+        temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+        if np.max(resized_arr) != 0:
+            resized_arr /= np.max(resized_arr)
+        temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+        temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+        temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+        temp_image = temp_image.transpose((2, 0, 1))
+        temp_image = np.expand_dims(temp_image, 0)
+        return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        data = data.unsqueeze(0)
+        mask = data[:, 0, :, :]
+        ma = torch.max(mask)  # Normalizes prediction
+        mi = torch.min(mask)
+        predict = ((mask - mi) / (ma - mi)).squeeze()
+        predict_np = predict.cpu().data.numpy() * 255
+        mask = Image.fromarray(predict_np).convert("L")
+        mask = mask.resize(original_image.size, resample=3)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images through neural network and returns segmentation masks as `PIL.Image.Image` instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances
+
+        """
+        collect_masks = []
+        for image_batch in batch_generator(images, self.batch_size):
+            converted_images = thread_pool_processing(
+                lambda x: convert_image(load_image(x)), image_batch
+            )
+            batches = torch.vstack(
+                thread_pool_processing(self.data_preprocessing, converted_images)
+            )
+            with torch.no_grad():
+                batches = batches.to(self.device)
+                masks, d2, d3, d4, d5, d6, d7, d8 = super(BASNET, self).__call__(
+                    batches
+                )
+                masks_cpu = masks.cpu()
+                del d2, d3, d4, d5, d6, d7, d8, batches, masks
+            masks = thread_pool_processing(
+                lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]),
+                range(len(converted_images)),
+            )
+            collect_masks += masks
+        return collect_masks
+
+

Ancestors

+
    +
  • BASNet
  • +
  • torch.nn.modules.module.Module
  • +
+

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, original_image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
original_image : PIL.Image.Image
+
input image which was used for predicted data
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask as PIL Image instance
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, original_image: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        original_image (PIL.Image.Image): input image which was used for predicted data
+
+    Returns:
+        PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+    """
+    data = data.unsqueeze(0)
+    mask = data[:, 0, :, :]
+    ma = torch.max(mask)  # Normalizes prediction
+    mi = torch.min(mask)
+    predict = ((mask - mi) / (ma - mi)).squeeze()
+    predict_np = predict.cpu().data.numpy() * 255
+    mask = Image.fromarray(predict_np).convert("L")
+    mask = mask.resize(original_image.size, resample=3)
+    return mask
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.Tensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : PIL.Image.Image
+
input image
+
+

Returns

+
+
torch.Tensor
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (PIL.Image.Image): input image
+
+    Returns:
+        torch.Tensor: input for neural network
+
+    """
+    resized = data.resize(self.input_image_size)
+    # noinspection PyTypeChecker
+    resized_arr = np.array(resized, dtype=np.float64)
+    temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+    if np.max(resized_arr) != 0:
+        resized_arr /= np.max(resized_arr)
+    temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+    temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+    temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+    temp_image = temp_image.transpose((2, 0, 1))
+    temp_image = np.expand_dims(temp_image, 0)
+    return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/api/autointerface.html b/docs/api/carvekit/api/autointerface.html new file mode 100644 index 0000000..702f700 --- /dev/null +++ b/docs/api/carvekit/api/autointerface.html @@ -0,0 +1,696 @@ + + + + + + +carvekit.api.autointerface API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.api.autointerface

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from collections import Counter
+from pathlib import Path
+
+from PIL import Image
+from typing import Union, List, Dict
+
+from carvekit.api.interface import Interface
+from carvekit.ml.wrap.basnet import BASNET
+from carvekit.ml.wrap.cascadepsp import CascadePSP
+from carvekit.ml.wrap.deeplab_v3 import DeepLabV3
+from carvekit.ml.wrap.fba_matting import FBAMatting
+from carvekit.ml.wrap.scene_classifier import SceneClassifier
+from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7
+from carvekit.ml.wrap.u2net import U2NET
+from carvekit.ml.wrap.yolov4 import SimplifiedYoloV4
+from carvekit.pipelines.postprocessing import CasMattingMethod
+from carvekit.trimap.generator import TrimapGenerator
+
+__all__ = ["AutoInterface"]
+
+from carvekit.utils.image_utils import load_image
+
+from carvekit.utils.pool_utils import thread_pool_processing
+
+
+class AutoInterface(Interface):
+    def __init__(
+        self,
+        scene_classifier: SceneClassifier,
+        object_classifier: SimplifiedYoloV4,
+        segmentation_batch_size: int = 3,
+        refining_batch_size: int = 1,
+        refining_image_size: int = 900,
+        postprocessing_batch_size: int = 1,
+        postprocessing_image_size: int = 2048,
+        segmentation_device: str = "cpu",
+        postprocessing_device: str = "cpu",
+        fp16=False,
+    ):
+        """
+        Args:
+            scene_classifier: SceneClassifier instance
+            object_classifier: YoloV4_COCO instance
+        """
+        self.scene_classifier = scene_classifier
+        self.object_classifier = object_classifier
+        self.segmentation_batch_size = segmentation_batch_size
+        self.refining_batch_size = refining_batch_size
+        self.refining_image_size = refining_image_size
+        self.postprocessing_batch_size = postprocessing_batch_size
+        self.postprocessing_image_size = postprocessing_image_size
+        self.segmentation_device = segmentation_device
+        self.postprocessing_device = postprocessing_device
+        self.fp16 = fp16
+        super().__init__(
+            seg_pipe=None, post_pipe=None, pre_pipe=None
+        )  # just for compatibility with Interface class
+
+    @staticmethod
+    def select_params_for_net(net: Union[TracerUniversalB7, U2NET, DeepLabV3]):
+        """
+        Selects the parameters for the network depending on the scene
+
+        Args:
+            net: network
+        """
+        if net == TracerUniversalB7:
+            return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+        elif net == U2NET:
+            return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+        elif net == DeepLabV3:
+            return {"prob_threshold": 231, "kernel_size": 40, "erosion_iters": 20}
+        elif net == BASNET:
+            return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+        else:
+            raise ValueError("Unknown network type")
+
+    def select_net(self, scene: str, images_info: List[dict]):
+        # TODO: Update this function, when new networks will be added
+        if scene == "hard":
+            for image_info in images_info:
+                objects = image_info["objects"]
+                if len(objects) == 0:
+                    image_info[
+                        "net"
+                    ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+                    continue
+                obj_counter: Dict = dict(Counter([obj for obj in objects]))
+                # fill empty classes
+                for _tag in self.object_classifier.db:
+                    if _tag not in obj_counter:
+                        obj_counter[_tag] = 0
+
+                non_empty_classes = [obj for obj in obj_counter if obj_counter[obj] > 0]
+
+                if obj_counter["human"] > 0 and len(non_empty_classes) == 1:
+                    # Human only case. Hard Scene? It may be a photo of a person in far/middle distance.
+                    image_info["net"] = TracerUniversalB7
+                    # TODO: will use DeepLabV3+ for this image, it is more suitable for this case,
+                    #  but needs checks for small bbox
+                elif obj_counter["human"] > 0 and len(non_empty_classes) > 1:
+                    # Okay, we have a human without extra hairs and something else. Hard border
+                    image_info["net"] = TracerUniversalB7
+                elif obj_counter["cars"] > 0:
+                    # Cars case
+                    image_info["net"] = TracerUniversalB7
+                elif obj_counter["animals"] > 0:
+                    # Animals case
+                    image_info["net"] = U2NET  # animals should be always in soft scenes
+                else:
+                    # We have no idea what is in the image, so we will try to process it with universal model
+                    image_info["net"] = TracerUniversalB7
+
+        elif scene == "soft":
+            for image_info in images_info:
+                objects = image_info["objects"]
+                if len(objects) == 0:
+                    image_info[
+                        "net"
+                    ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+                    continue
+                obj_counter: Dict = dict(Counter([obj for obj in objects]))
+                # fill empty classes
+                for _tag in self.object_classifier.db:
+                    if _tag not in obj_counter:
+                        obj_counter[_tag] = 0
+
+                non_empty_classes = [obj for obj in obj_counter if obj_counter[obj] > 0]
+
+                if obj_counter["human"] > 0 and len(non_empty_classes) == 1:
+                    # Human only case. It may be a portrait
+                    image_info["net"] = U2NET
+                elif obj_counter["human"] > 0 and len(non_empty_classes) > 1:
+                    # Okay, we have a human with hairs and something else
+                    image_info["net"] = U2NET
+                elif obj_counter["cars"] > 0:
+                    # Cars case.
+                    image_info["net"] = TracerUniversalB7
+                elif obj_counter["animals"] > 0:
+                    # Animals case
+                    image_info["net"] = U2NET  # animals should be always in soft scenes
+                else:
+                    # We have no idea what is in the image, so we will try to process it with universal model
+                    image_info["net"] = TracerUniversalB7
+        elif scene == "digital":
+            for image_info in images_info:  # TODO: not implemented yet
+                image_info[
+                    "net"
+                ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+
+    def __call__(self, images: List[Union[str, Path, Image.Image]]):
+        """
+        Automatically detects the scene and selects the appropriate network for segmentation
+
+        Args:
+            interface: Interface instance
+            images: list of images
+
+        Returns:
+            list of masks
+        """
+        loaded_images = thread_pool_processing(load_image, images)
+
+        scene_analysis = self.scene_classifier(loaded_images)
+        images_objects = self.object_classifier(loaded_images)
+
+        images_per_scene = {}
+        for i, image in enumerate(loaded_images):
+            scene_name = scene_analysis[i][0][0]
+            if scene_name not in images_per_scene:
+                images_per_scene[scene_name] = []
+            images_per_scene[scene_name].append(
+                {"image": image, "objects": images_objects[i]}
+            )
+
+        for scene_name, images_info in list(images_per_scene.items()):
+            self.select_net(scene_name, images_info)
+
+        # groups images by net
+        for scene_name, images_info in list(images_per_scene.items()):
+            groups = {}
+            for image_info in images_info:
+                net = image_info["net"]
+                if net not in groups:
+                    groups[net] = []
+                groups[net].append(image_info)
+            for net, gimages_info in list(groups.items()):
+                sc_images = [image_info["image"] for image_info in gimages_info]
+                masks = net(
+                    device=self.segmentation_device,
+                    batch_size=self.segmentation_batch_size,
+                    fp16=self.fp16,
+                )(sc_images)
+
+                for i, image_info in enumerate(gimages_info):
+                    image_info["mask"] = masks[i]
+
+        cascadepsp = CascadePSP(
+            device=self.postprocessing_device,
+            fp16=self.fp16,
+            input_tensor_size=self.refining_image_size,
+            batch_size=self.refining_batch_size,
+        )
+
+        fba = FBAMatting(
+            device=self.postprocessing_device,
+            batch_size=self.postprocessing_batch_size,
+            input_tensor_size=self.postprocessing_image_size,
+            fp16=self.fp16,
+        )
+        # groups images by net
+        for scene_name, images_info in list(images_per_scene.items()):
+            groups = {}
+            for image_info in images_info:
+                net = image_info["net"]
+                if net not in groups:
+                    groups[net] = []
+                groups[net].append(image_info)
+            for net, gimages_info in list(groups.items()):
+                sc_images = [image_info["image"] for image_info in gimages_info]
+                # noinspection PyArgumentList
+                trimap_generator = TrimapGenerator(**self.select_params_for_net(net))
+                matting_method = CasMattingMethod(
+                    refining_module=cascadepsp,
+                    matting_module=fba,
+                    trimap_generator=trimap_generator,
+                    device=self.postprocessing_device,
+                )
+                masks = [image_info["mask"] for image_info in gimages_info]
+                result = matting_method(sc_images, masks)
+
+                for i, image_info in enumerate(gimages_info):
+                    image_info["result"] = result[i]
+
+        # Reconstructing the original order of image
+        result = []
+        for image in loaded_images:
+            for scene_name, images_info in list(images_per_scene.items()):
+                for image_info in images_info:
+                    if image_info["image"] == image:
+                        result.append(image_info["result"])
+                        break
+        if len(result) != len(images):
+            raise RuntimeError(
+                "Something went wrong with restoring original order. Please report this bug."
+            )
+        return result
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class AutoInterface +(scene_classifier:Β SceneClassifier, object_classifier:Β SimplifiedYoloV4, segmentation_batch_size:Β intΒ =Β 3, refining_batch_size:Β intΒ =Β 1, refining_image_size:Β intΒ =Β 900, postprocessing_batch_size:Β intΒ =Β 1, postprocessing_image_size:Β intΒ =Β 2048, segmentation_device:Β strΒ =Β 'cpu', postprocessing_device:Β strΒ =Β 'cpu', fp16=False) +
+
+

Args

+
+
scene_classifier
+
SceneClassifier instance
+
object_classifier
+
YoloV4_COCO instance
+
+
+ +Expand source code + +
class AutoInterface(Interface):
+    def __init__(
+        self,
+        scene_classifier: SceneClassifier,
+        object_classifier: SimplifiedYoloV4,
+        segmentation_batch_size: int = 3,
+        refining_batch_size: int = 1,
+        refining_image_size: int = 900,
+        postprocessing_batch_size: int = 1,
+        postprocessing_image_size: int = 2048,
+        segmentation_device: str = "cpu",
+        postprocessing_device: str = "cpu",
+        fp16=False,
+    ):
+        """
+        Args:
+            scene_classifier: SceneClassifier instance
+            object_classifier: YoloV4_COCO instance
+        """
+        self.scene_classifier = scene_classifier
+        self.object_classifier = object_classifier
+        self.segmentation_batch_size = segmentation_batch_size
+        self.refining_batch_size = refining_batch_size
+        self.refining_image_size = refining_image_size
+        self.postprocessing_batch_size = postprocessing_batch_size
+        self.postprocessing_image_size = postprocessing_image_size
+        self.segmentation_device = segmentation_device
+        self.postprocessing_device = postprocessing_device
+        self.fp16 = fp16
+        super().__init__(
+            seg_pipe=None, post_pipe=None, pre_pipe=None
+        )  # just for compatibility with Interface class
+
+    @staticmethod
+    def select_params_for_net(net: Union[TracerUniversalB7, U2NET, DeepLabV3]):
+        """
+        Selects the parameters for the network depending on the scene
+
+        Args:
+            net: network
+        """
+        if net == TracerUniversalB7:
+            return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+        elif net == U2NET:
+            return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+        elif net == DeepLabV3:
+            return {"prob_threshold": 231, "kernel_size": 40, "erosion_iters": 20}
+        elif net == BASNET:
+            return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+        else:
+            raise ValueError("Unknown network type")
+
+    def select_net(self, scene: str, images_info: List[dict]):
+        # TODO: Update this function, when new networks will be added
+        if scene == "hard":
+            for image_info in images_info:
+                objects = image_info["objects"]
+                if len(objects) == 0:
+                    image_info[
+                        "net"
+                    ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+                    continue
+                obj_counter: Dict = dict(Counter([obj for obj in objects]))
+                # fill empty classes
+                for _tag in self.object_classifier.db:
+                    if _tag not in obj_counter:
+                        obj_counter[_tag] = 0
+
+                non_empty_classes = [obj for obj in obj_counter if obj_counter[obj] > 0]
+
+                if obj_counter["human"] > 0 and len(non_empty_classes) == 1:
+                    # Human only case. Hard Scene? It may be a photo of a person in far/middle distance.
+                    image_info["net"] = TracerUniversalB7
+                    # TODO: will use DeepLabV3+ for this image, it is more suitable for this case,
+                    #  but needs checks for small bbox
+                elif obj_counter["human"] > 0 and len(non_empty_classes) > 1:
+                    # Okay, we have a human without extra hairs and something else. Hard border
+                    image_info["net"] = TracerUniversalB7
+                elif obj_counter["cars"] > 0:
+                    # Cars case
+                    image_info["net"] = TracerUniversalB7
+                elif obj_counter["animals"] > 0:
+                    # Animals case
+                    image_info["net"] = U2NET  # animals should be always in soft scenes
+                else:
+                    # We have no idea what is in the image, so we will try to process it with universal model
+                    image_info["net"] = TracerUniversalB7
+
+        elif scene == "soft":
+            for image_info in images_info:
+                objects = image_info["objects"]
+                if len(objects) == 0:
+                    image_info[
+                        "net"
+                    ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+                    continue
+                obj_counter: Dict = dict(Counter([obj for obj in objects]))
+                # fill empty classes
+                for _tag in self.object_classifier.db:
+                    if _tag not in obj_counter:
+                        obj_counter[_tag] = 0
+
+                non_empty_classes = [obj for obj in obj_counter if obj_counter[obj] > 0]
+
+                if obj_counter["human"] > 0 and len(non_empty_classes) == 1:
+                    # Human only case. It may be a portrait
+                    image_info["net"] = U2NET
+                elif obj_counter["human"] > 0 and len(non_empty_classes) > 1:
+                    # Okay, we have a human with hairs and something else
+                    image_info["net"] = U2NET
+                elif obj_counter["cars"] > 0:
+                    # Cars case.
+                    image_info["net"] = TracerUniversalB7
+                elif obj_counter["animals"] > 0:
+                    # Animals case
+                    image_info["net"] = U2NET  # animals should be always in soft scenes
+                else:
+                    # We have no idea what is in the image, so we will try to process it with universal model
+                    image_info["net"] = TracerUniversalB7
+        elif scene == "digital":
+            for image_info in images_info:  # TODO: not implemented yet
+                image_info[
+                    "net"
+                ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+
+    def __call__(self, images: List[Union[str, Path, Image.Image]]):
+        """
+        Automatically detects the scene and selects the appropriate network for segmentation
+
+        Args:
+            interface: Interface instance
+            images: list of images
+
+        Returns:
+            list of masks
+        """
+        loaded_images = thread_pool_processing(load_image, images)
+
+        scene_analysis = self.scene_classifier(loaded_images)
+        images_objects = self.object_classifier(loaded_images)
+
+        images_per_scene = {}
+        for i, image in enumerate(loaded_images):
+            scene_name = scene_analysis[i][0][0]
+            if scene_name not in images_per_scene:
+                images_per_scene[scene_name] = []
+            images_per_scene[scene_name].append(
+                {"image": image, "objects": images_objects[i]}
+            )
+
+        for scene_name, images_info in list(images_per_scene.items()):
+            self.select_net(scene_name, images_info)
+
+        # groups images by net
+        for scene_name, images_info in list(images_per_scene.items()):
+            groups = {}
+            for image_info in images_info:
+                net = image_info["net"]
+                if net not in groups:
+                    groups[net] = []
+                groups[net].append(image_info)
+            for net, gimages_info in list(groups.items()):
+                sc_images = [image_info["image"] for image_info in gimages_info]
+                masks = net(
+                    device=self.segmentation_device,
+                    batch_size=self.segmentation_batch_size,
+                    fp16=self.fp16,
+                )(sc_images)
+
+                for i, image_info in enumerate(gimages_info):
+                    image_info["mask"] = masks[i]
+
+        cascadepsp = CascadePSP(
+            device=self.postprocessing_device,
+            fp16=self.fp16,
+            input_tensor_size=self.refining_image_size,
+            batch_size=self.refining_batch_size,
+        )
+
+        fba = FBAMatting(
+            device=self.postprocessing_device,
+            batch_size=self.postprocessing_batch_size,
+            input_tensor_size=self.postprocessing_image_size,
+            fp16=self.fp16,
+        )
+        # groups images by net
+        for scene_name, images_info in list(images_per_scene.items()):
+            groups = {}
+            for image_info in images_info:
+                net = image_info["net"]
+                if net not in groups:
+                    groups[net] = []
+                groups[net].append(image_info)
+            for net, gimages_info in list(groups.items()):
+                sc_images = [image_info["image"] for image_info in gimages_info]
+                # noinspection PyArgumentList
+                trimap_generator = TrimapGenerator(**self.select_params_for_net(net))
+                matting_method = CasMattingMethod(
+                    refining_module=cascadepsp,
+                    matting_module=fba,
+                    trimap_generator=trimap_generator,
+                    device=self.postprocessing_device,
+                )
+                masks = [image_info["mask"] for image_info in gimages_info]
+                result = matting_method(sc_images, masks)
+
+                for i, image_info in enumerate(gimages_info):
+                    image_info["result"] = result[i]
+
+        # Reconstructing the original order of image
+        result = []
+        for image in loaded_images:
+            for scene_name, images_info in list(images_per_scene.items()):
+                for image_info in images_info:
+                    if image_info["image"] == image:
+                        result.append(image_info["result"])
+                        break
+        if len(result) != len(images):
+            raise RuntimeError(
+                "Something went wrong with restoring original order. Please report this bug."
+            )
+        return result
+
+

Ancestors

+ +

Static methods

+
+
+def select_params_for_net(net:Β Union[TracerUniversalB7,Β U2NET,Β DeepLabV3]) +
+
+

Selects the parameters for the network depending on the scene

+

Args

+
+
net
+
network
+
+
+ +Expand source code + +
@staticmethod
+def select_params_for_net(net: Union[TracerUniversalB7, U2NET, DeepLabV3]):
+    """
+    Selects the parameters for the network depending on the scene
+
+    Args:
+        net: network
+    """
+    if net == TracerUniversalB7:
+        return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+    elif net == U2NET:
+        return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+    elif net == DeepLabV3:
+        return {"prob_threshold": 231, "kernel_size": 40, "erosion_iters": 20}
+    elif net == BASNET:
+        return {"prob_threshold": 231, "kernel_size": 30, "erosion_iters": 5}
+    else:
+        raise ValueError("Unknown network type")
+
+
+
+

Methods

+
+
+def select_net(self, scene:Β str, images_info:Β List[dict]) +
+
+
+
+ +Expand source code + +
def select_net(self, scene: str, images_info: List[dict]):
+    # TODO: Update this function, when new networks will be added
+    if scene == "hard":
+        for image_info in images_info:
+            objects = image_info["objects"]
+            if len(objects) == 0:
+                image_info[
+                    "net"
+                ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+                continue
+            obj_counter: Dict = dict(Counter([obj for obj in objects]))
+            # fill empty classes
+            for _tag in self.object_classifier.db:
+                if _tag not in obj_counter:
+                    obj_counter[_tag] = 0
+
+            non_empty_classes = [obj for obj in obj_counter if obj_counter[obj] > 0]
+
+            if obj_counter["human"] > 0 and len(non_empty_classes) == 1:
+                # Human only case. Hard Scene? It may be a photo of a person in far/middle distance.
+                image_info["net"] = TracerUniversalB7
+                # TODO: will use DeepLabV3+ for this image, it is more suitable for this case,
+                #  but needs checks for small bbox
+            elif obj_counter["human"] > 0 and len(non_empty_classes) > 1:
+                # Okay, we have a human without extra hairs and something else. Hard border
+                image_info["net"] = TracerUniversalB7
+            elif obj_counter["cars"] > 0:
+                # Cars case
+                image_info["net"] = TracerUniversalB7
+            elif obj_counter["animals"] > 0:
+                # Animals case
+                image_info["net"] = U2NET  # animals should be always in soft scenes
+            else:
+                # We have no idea what is in the image, so we will try to process it with universal model
+                image_info["net"] = TracerUniversalB7
+
+    elif scene == "soft":
+        for image_info in images_info:
+            objects = image_info["objects"]
+            if len(objects) == 0:
+                image_info[
+                    "net"
+                ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+                continue
+            obj_counter: Dict = dict(Counter([obj for obj in objects]))
+            # fill empty classes
+            for _tag in self.object_classifier.db:
+                if _tag not in obj_counter:
+                    obj_counter[_tag] = 0
+
+            non_empty_classes = [obj for obj in obj_counter if obj_counter[obj] > 0]
+
+            if obj_counter["human"] > 0 and len(non_empty_classes) == 1:
+                # Human only case. It may be a portrait
+                image_info["net"] = U2NET
+            elif obj_counter["human"] > 0 and len(non_empty_classes) > 1:
+                # Okay, we have a human with hairs and something else
+                image_info["net"] = U2NET
+            elif obj_counter["cars"] > 0:
+                # Cars case.
+                image_info["net"] = TracerUniversalB7
+            elif obj_counter["animals"] > 0:
+                # Animals case
+                image_info["net"] = U2NET  # animals should be always in soft scenes
+            else:
+                # We have no idea what is in the image, so we will try to process it with universal model
+                image_info["net"] = TracerUniversalB7
+    elif scene == "digital":
+        for image_info in images_info:  # TODO: not implemented yet
+            image_info[
+                "net"
+            ] = TracerUniversalB7  # It seems that the image is empty, but we will try to process it
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/api/high.html b/docs/api/carvekit/api/high.html new file mode 100644 index 0000000..a14e534 --- /dev/null +++ b/docs/api/carvekit/api/high.html @@ -0,0 +1,382 @@ + + + + + + +carvekit.api.high API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.api.high

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import warnings
+
+from carvekit.api.interface import Interface
+from carvekit.ml.wrap.fba_matting import FBAMatting
+from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7
+from carvekit.ml.wrap.cascadepsp import CascadePSP
+from carvekit.ml.wrap.scene_classifier import SceneClassifier
+from carvekit.pipelines.preprocessing import AutoScene
+from carvekit.ml.wrap.u2net import U2NET
+from carvekit.pipelines.postprocessing import CasMattingMethod
+from carvekit.trimap.generator import TrimapGenerator
+
+
+class HiInterface(Interface):
+    def __init__(
+        self,
+        object_type: str = "auto",
+        batch_size_pre=5,
+        batch_size_seg=2,
+        batch_size_matting=1,
+        batch_size_refine=1,
+        device="cpu",
+        seg_mask_size=640,
+        matting_mask_size=2048,
+        refine_mask_size=900,
+        trimap_prob_threshold=231,
+        trimap_dilation=30,
+        trimap_erosion_iters=5,
+        fp16=False,
+    ):
+        """
+        Initializes High Level interface.
+
+        Args:
+            object_type (str, default=object): Interest object type. Can be "object" or "hairs-like".
+            matting_mask_size (int, default=2048):  The size of the input image for the matting neural network.
+            seg_mask_size (int, default=640): The size of the input image for the segmentation neural network.
+            batch_size_pre (int, default=5: Number of images processed per one preprocessing method call.
+            batch_size_seg (int, default=2): Number of images processed per one segmentation neural network call.
+            batch_size_matting (int, matting=1): Number of images processed per one matting neural network call.
+            device (Literal[cpu, cuda], default=cpu): Processing device
+            fp16 (bool, default=False): Use half precision. Reduce memory usage and increase speed.
+            .. CAUTION:: ⚠️ **Experimental support**
+            trimap_prob_threshold (int, default=231): Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied
+            trimap_dilation (int, default=30): The size of the offset radius from the object mask in pixels when forming an unknown area
+            trimap_erosion_iters (int, default=5): The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area
+            refine_mask_size (int, default=900): The size of the input image for the refinement neural network.
+            batch_size_refine (int, default=1): Number of images processed per one refinement neural network call.
+
+
+        .. NOTE::
+            1. Changing seg_mask_size may cause an `out-of-memory` error if the value is too large, and it may also
+            result in reduced precision. I do not recommend changing this value. You can change `matting_mask_size` in
+            range from `(1024 to 4096)` to improve object edge refining quality, but it will cause extra large RAM and
+            video memory consume. Also, you can change batch size to accelerate background removal, but it also causes
+            extra large video memory consume, if value is too big.
+            2. Changing `trimap_prob_threshold`, `trimap_kernel_size`, `trimap_erosion_iters` may improve object edge
+            refining quality.
+        """
+        preprocess_pipeline = None
+
+        if object_type == "object":
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+        elif object_type == "hairs-like":
+            self._segnet = U2NET(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+        elif object_type == "auto":
+            # Using Tracer by default,
+            # but it will dynamically switch to other if needed
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+            self._scene_classifier = SceneClassifier(
+                device=device, fp16=fp16, batch_size=batch_size_pre
+            )
+            preprocess_pipeline = AutoScene(scene_classifier=self._scene_classifier)
+
+        else:
+            warnings.warn(
+                f"Unknown object type: {object_type}. Using default object type: object"
+            )
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+
+        self._cascade_psp = CascadePSP(
+            device=device,
+            batch_size=batch_size_refine,
+            input_tensor_size=refine_mask_size,
+            fp16=fp16,
+        )
+        self._fba = FBAMatting(
+            batch_size=batch_size_matting,
+            device=device,
+            input_tensor_size=matting_mask_size,
+            fp16=fp16,
+        )
+        self._trimap_generator = TrimapGenerator(
+            prob_threshold=trimap_prob_threshold,
+            kernel_size=trimap_dilation,
+            erosion_iters=trimap_erosion_iters,
+        )
+        super(HiInterface, self).__init__(
+            pre_pipe=preprocess_pipeline,
+            seg_pipe=self._segnet,
+            post_pipe=CasMattingMethod(
+                refining_module=self._cascade_psp,
+                matting_module=self._fba,
+                trimap_generator=self._trimap_generator,
+                device=device,
+            ),
+            device=device,
+        )
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class HiInterface +(object_type:Β strΒ =Β 'auto', batch_size_pre=5, batch_size_seg=2, batch_size_matting=1, batch_size_refine=1, device='cpu', seg_mask_size=640, matting_mask_size=2048, refine_mask_size=900, trimap_prob_threshold=231, trimap_dilation=30, trimap_erosion_iters=5, fp16=False) +
+
+

Initializes High Level interface.

+

Args

+
+
object_type : str, default=object
+
Interest object type. Can be "object" or "hairs-like".
+
matting_mask_size : int, default=2048
+
The size of the input image for the matting neural network.
+
seg_mask_size : int, default=640
+
The size of the input image for the segmentation neural network.
+
batch_size_pre (int, default=5: Number of images processed per one preprocessing method call.
+
batch_size_seg : int, default=2
+
Number of images processed per one segmentation neural network call.
+
batch_size_matting : int, matting=1
+
Number of images processed per one matting neural network call.
+
device : Literal[cpu, cuda], default=cpu
+
Processing device
+
fp16 : bool, default=False
+
Use half precision. Reduce memory usage and increase speed.
+
+
+

Caution: ⚠️ Experimental support

+
+
+
trimap_prob_threshold : int, default=231
+
Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied
+
trimap_dilation : int, default=30
+
The size of the offset radius from the object mask in pixels when forming an unknown area
+
trimap_erosion_iters : int, default=5
+
The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area
+
refine_mask_size : int, default=900
+
The size of the input image for the refinement neural network.
+
batch_size_refine : int, default=1
+
Number of images processed per one refinement neural network call.
+
+
+

Note

+
    +
  1. Changing seg_mask_size may cause an out-of-memory error if the value is too large, and it may also +result in reduced precision. I do not recommend changing this value. You can change matting_mask_size in +range from (1024 to 4096) to improve object edge refining quality, but it will cause extra large RAM and +video memory consume. Also, you can change batch size to accelerate background removal, but it also causes +extra large video memory consume, if value is too big.
  2. +
  3. Changing trimap_prob_threshold, trimap_kernel_size, trimap_erosion_iters may improve object edge +refining quality.
  4. +
+
+
+ +Expand source code + +
class HiInterface(Interface):
+    def __init__(
+        self,
+        object_type: str = "auto",
+        batch_size_pre=5,
+        batch_size_seg=2,
+        batch_size_matting=1,
+        batch_size_refine=1,
+        device="cpu",
+        seg_mask_size=640,
+        matting_mask_size=2048,
+        refine_mask_size=900,
+        trimap_prob_threshold=231,
+        trimap_dilation=30,
+        trimap_erosion_iters=5,
+        fp16=False,
+    ):
+        """
+        Initializes High Level interface.
+
+        Args:
+            object_type (str, default=object): Interest object type. Can be "object" or "hairs-like".
+            matting_mask_size (int, default=2048):  The size of the input image for the matting neural network.
+            seg_mask_size (int, default=640): The size of the input image for the segmentation neural network.
+            batch_size_pre (int, default=5: Number of images processed per one preprocessing method call.
+            batch_size_seg (int, default=2): Number of images processed per one segmentation neural network call.
+            batch_size_matting (int, matting=1): Number of images processed per one matting neural network call.
+            device (Literal[cpu, cuda], default=cpu): Processing device
+            fp16 (bool, default=False): Use half precision. Reduce memory usage and increase speed.
+            .. CAUTION:: ⚠️ **Experimental support**
+            trimap_prob_threshold (int, default=231): Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied
+            trimap_dilation (int, default=30): The size of the offset radius from the object mask in pixels when forming an unknown area
+            trimap_erosion_iters (int, default=5): The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area
+            refine_mask_size (int, default=900): The size of the input image for the refinement neural network.
+            batch_size_refine (int, default=1): Number of images processed per one refinement neural network call.
+
+
+        .. NOTE::
+            1. Changing seg_mask_size may cause an `out-of-memory` error if the value is too large, and it may also
+            result in reduced precision. I do not recommend changing this value. You can change `matting_mask_size` in
+            range from `(1024 to 4096)` to improve object edge refining quality, but it will cause extra large RAM and
+            video memory consume. Also, you can change batch size to accelerate background removal, but it also causes
+            extra large video memory consume, if value is too big.
+            2. Changing `trimap_prob_threshold`, `trimap_kernel_size`, `trimap_erosion_iters` may improve object edge
+            refining quality.
+        """
+        preprocess_pipeline = None
+
+        if object_type == "object":
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+        elif object_type == "hairs-like":
+            self._segnet = U2NET(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+        elif object_type == "auto":
+            # Using Tracer by default,
+            # but it will dynamically switch to other if needed
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+            self._scene_classifier = SceneClassifier(
+                device=device, fp16=fp16, batch_size=batch_size_pre
+            )
+            preprocess_pipeline = AutoScene(scene_classifier=self._scene_classifier)
+
+        else:
+            warnings.warn(
+                f"Unknown object type: {object_type}. Using default object type: object"
+            )
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+
+        self._cascade_psp = CascadePSP(
+            device=device,
+            batch_size=batch_size_refine,
+            input_tensor_size=refine_mask_size,
+            fp16=fp16,
+        )
+        self._fba = FBAMatting(
+            batch_size=batch_size_matting,
+            device=device,
+            input_tensor_size=matting_mask_size,
+            fp16=fp16,
+        )
+        self._trimap_generator = TrimapGenerator(
+            prob_threshold=trimap_prob_threshold,
+            kernel_size=trimap_dilation,
+            erosion_iters=trimap_erosion_iters,
+        )
+        super(HiInterface, self).__init__(
+            pre_pipe=preprocess_pipeline,
+            seg_pipe=self._segnet,
+            post_pipe=CasMattingMethod(
+                refining_module=self._cascade_psp,
+                matting_module=self._fba,
+                trimap_generator=self._trimap_generator,
+                device=device,
+            ),
+            device=device,
+        )
+
+

Ancestors

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/api/index.html b/docs/api/carvekit/api/index.html new file mode 100644 index 0000000..906216a --- /dev/null +++ b/docs/api/carvekit/api/index.html @@ -0,0 +1,79 @@ + + + + + + +carvekit.api API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.api

+
+
+
+
+

Sub-modules

+
+
carvekit.api.autointerface
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.api.high
+
+ +
+
carvekit.api.interface
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/api/interface.html b/docs/api/carvekit/api/interface.html new file mode 100644 index 0000000..3929695 --- /dev/null +++ b/docs/api/carvekit/api/interface.html @@ -0,0 +1,245 @@ + + + + + + +carvekit.api.interface API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.api.interface

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from pathlib import Path
+from typing import Union, List, Optional
+
+from PIL import Image
+
+from carvekit.ml.wrap.basnet import BASNET
+from carvekit.ml.wrap.deeplab_v3 import DeepLabV3
+from carvekit.ml.wrap.u2net import U2NET
+from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7
+from carvekit.pipelines.preprocessing import PreprocessingStub, AutoScene
+from carvekit.pipelines.postprocessing import MattingMethod, CasMattingMethod
+from carvekit.utils.image_utils import load_image
+from carvekit.utils.mask_utils import apply_mask
+from carvekit.utils.pool_utils import thread_pool_processing
+
+
+class Interface:
+    def __init__(
+        self,
+        seg_pipe: Optional[Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]],
+        pre_pipe: Optional[Union[PreprocessingStub, AutoScene]] = None,
+        post_pipe: Optional[Union[MattingMethod, CasMattingMethod]] = None,
+        device="cpu",
+    ):
+        """
+        Initializes an object for interacting with pipelines and other components of the CarveKit framework.
+
+        Args:
+            pre_pipe (Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]): Initialized pre-processing pipeline object
+            seg_pipe (Optional[Union[PreprocessingStub]]): Initialized segmentation network object
+            post_pipe (Optional[Union[MattingMethod]]): Initialized postprocessing pipeline object
+            device (Literal[cpu, cuda], default=cpu): The processing device that will be used to apply the masks to the images.
+        """
+        self.device = device
+        self.preprocessing_pipeline = pre_pipe
+        self.segmentation_pipeline = seg_pipe
+        self.postprocessing_pipeline = post_pipe
+
+    def __call__(
+        self, images: List[Union[str, Path, Image.Image]]
+    ) -> List[Image.Image]:
+        """
+        Removes the background from the specified images.
+
+        Args:
+            images: list of input images
+
+        Returns:
+            List of images without background as PIL.Image.Image instances
+        """
+        if self.segmentation_pipeline is None:
+            raise ValueError(
+                "Segmentation pipeline is not initialized."
+                "Override the class or pass the pipeline to the constructor."
+            )
+        images = thread_pool_processing(load_image, images)
+        if self.preprocessing_pipeline is not None:
+            masks: List[Image.Image] = self.preprocessing_pipeline(
+                interface=self, images=images
+            )
+        else:
+            masks: List[Image.Image] = self.segmentation_pipeline(images=images)
+
+        if self.postprocessing_pipeline is not None:
+            images: List[Image.Image] = self.postprocessing_pipeline(
+                images=images, masks=masks
+            )
+        else:
+            images = list(
+                map(
+                    lambda x: apply_mask(
+                        image=images[x], mask=masks[x], device=self.device
+                    ),
+                    range(len(images)),
+                )
+            )
+        return images
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class Interface +(seg_pipe:Β Union[U2NET,Β BASNET,Β DeepLabV3,Β TracerUniversalB7,Β ForwardRef(None)], pre_pipe:Β Union[PreprocessingStub,Β AutoScene,Β ForwardRef(None)]Β =Β None, post_pipe:Β Union[MattingMethod,Β CasMattingMethod,Β ForwardRef(None)]Β =Β None, device='cpu') +
+
+

Initializes an object for interacting with pipelines and other components of the CarveKit framework.

+

Args

+
+
pre_pipe : Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]
+
Initialized pre-processing pipeline object
+
seg_pipe : Optional[Union[PreprocessingStub]]
+
Initialized segmentation network object
+
post_pipe : Optional[Union[MattingMethod]]
+
Initialized postprocessing pipeline object
+
device : Literal[cpu, cuda], default=cpu
+
The processing device that will be used to apply the masks to the images.
+
+
+ +Expand source code + +
class Interface:
+    def __init__(
+        self,
+        seg_pipe: Optional[Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]],
+        pre_pipe: Optional[Union[PreprocessingStub, AutoScene]] = None,
+        post_pipe: Optional[Union[MattingMethod, CasMattingMethod]] = None,
+        device="cpu",
+    ):
+        """
+        Initializes an object for interacting with pipelines and other components of the CarveKit framework.
+
+        Args:
+            pre_pipe (Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]): Initialized pre-processing pipeline object
+            seg_pipe (Optional[Union[PreprocessingStub]]): Initialized segmentation network object
+            post_pipe (Optional[Union[MattingMethod]]): Initialized postprocessing pipeline object
+            device (Literal[cpu, cuda], default=cpu): The processing device that will be used to apply the masks to the images.
+        """
+        self.device = device
+        self.preprocessing_pipeline = pre_pipe
+        self.segmentation_pipeline = seg_pipe
+        self.postprocessing_pipeline = post_pipe
+
+    def __call__(
+        self, images: List[Union[str, Path, Image.Image]]
+    ) -> List[Image.Image]:
+        """
+        Removes the background from the specified images.
+
+        Args:
+            images: list of input images
+
+        Returns:
+            List of images without background as PIL.Image.Image instances
+        """
+        if self.segmentation_pipeline is None:
+            raise ValueError(
+                "Segmentation pipeline is not initialized."
+                "Override the class or pass the pipeline to the constructor."
+            )
+        images = thread_pool_processing(load_image, images)
+        if self.preprocessing_pipeline is not None:
+            masks: List[Image.Image] = self.preprocessing_pipeline(
+                interface=self, images=images
+            )
+        else:
+            masks: List[Image.Image] = self.segmentation_pipeline(images=images)
+
+        if self.postprocessing_pipeline is not None:
+            images: List[Image.Image] = self.postprocessing_pipeline(
+                images=images, masks=masks
+            )
+        else:
+            images = list(
+                map(
+                    lambda x: apply_mask(
+                        image=images[x], mask=masks[x], device=self.device
+                    ),
+                    range(len(images)),
+                )
+            )
+        return images
+
+

Subclasses

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/index.html b/docs/api/carvekit/index.html new file mode 100644 index 0000000..e80162d --- /dev/null +++ b/docs/api/carvekit/index.html @@ -0,0 +1,91 @@ + + + + + + +carvekit API documentation + + + + + + + + + + + +
+
+
+

Package carvekit

+
+
+
+ +Expand source code + +
version = "4.5.0"
+
+
+
+

Sub-modules

+
+
carvekit.api
+
+
+
+
carvekit.ml
+
+
+
+
carvekit.pipelines
+
+
+
+
carvekit.trimap
+
+
+
+
carvekit.utils
+
+
+
+
carvekit.web
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/basnet/basnet.html b/docs/api/carvekit/ml/arch/basnet/basnet.html new file mode 100644 index 0000000..50e45d1 --- /dev/null +++ b/docs/api/carvekit/ml/arch/basnet/basnet.html @@ -0,0 +1,1600 @@ + + + + + + +carvekit.ml.arch.basnet.basnet API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.basnet.basnet

+
+
+

Source url: https://github.com/NathanUA/BASNet +Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: MIT License

+
+ +Expand source code + +
"""
+Source url: https://github.com/NathanUA/BASNet
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: MIT License
+"""
+import torch
+import torch.nn as nn
+from torchvision import models
+
+
+def conv3x3(in_planes, out_planes, stride=1):
+    """3x3 convolution with padding"""
+    return nn.Conv2d(
+        in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False
+    )
+
+
+class BasicBlock(nn.Module):
+    expansion = 1
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(BasicBlock, self).__init__()
+        self.conv1 = conv3x3(inplanes, planes, stride)
+        self.bn1 = nn.BatchNorm2d(planes)
+        self.relu = nn.ReLU(inplace=True)
+        self.conv2 = conv3x3(planes, planes)
+        self.bn2 = nn.BatchNorm2d(planes)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+
+class BasicBlockDe(nn.Module):
+    expansion = 1
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(BasicBlockDe, self).__init__()
+
+        self.convRes = conv3x3(inplanes, planes, stride)
+        self.bnRes = nn.BatchNorm2d(planes)
+        self.reluRes = nn.ReLU(inplace=True)
+
+        self.conv1 = conv3x3(inplanes, planes, stride)
+        self.bn1 = nn.BatchNorm2d(planes)
+        self.relu = nn.ReLU(inplace=True)
+        self.conv2 = conv3x3(planes, planes)
+        self.bn2 = nn.BatchNorm2d(planes)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = self.convRes(x)
+        residual = self.bnRes(residual)
+        residual = self.reluRes(residual)
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+
+class Bottleneck(nn.Module):
+    expansion = 4
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(Bottleneck, self).__init__()
+        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
+        self.bn1 = nn.BatchNorm2d(planes)
+        self.conv2 = nn.Conv2d(
+            planes, planes, kernel_size=3, stride=stride, padding=1, bias=False
+        )
+        self.bn2 = nn.BatchNorm2d(planes)
+        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
+        self.bn3 = nn.BatchNorm2d(planes * 4)
+        self.relu = nn.ReLU(inplace=True)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+        out = self.relu(out)
+
+        out = self.conv3(out)
+        out = self.bn3(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+
+class RefUnet(nn.Module):
+    def __init__(self, in_ch, inc_ch):
+        super(RefUnet, self).__init__()
+
+        self.conv0 = nn.Conv2d(in_ch, inc_ch, 3, padding=1)
+
+        self.conv1 = nn.Conv2d(inc_ch, 64, 3, padding=1)
+        self.bn1 = nn.BatchNorm2d(64)
+        self.relu1 = nn.ReLU(inplace=True)
+
+        self.pool1 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        self.conv2 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn2 = nn.BatchNorm2d(64)
+        self.relu2 = nn.ReLU(inplace=True)
+
+        self.pool2 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        self.conv3 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn3 = nn.BatchNorm2d(64)
+        self.relu3 = nn.ReLU(inplace=True)
+
+        self.pool3 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        self.conv4 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn4 = nn.BatchNorm2d(64)
+        self.relu4 = nn.ReLU(inplace=True)
+
+        self.pool4 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        self.conv5 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn5 = nn.BatchNorm2d(64)
+        self.relu5 = nn.ReLU(inplace=True)
+
+        self.conv_d4 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn_d4 = nn.BatchNorm2d(64)
+        self.relu_d4 = nn.ReLU(inplace=True)
+
+        self.conv_d3 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn_d3 = nn.BatchNorm2d(64)
+        self.relu_d3 = nn.ReLU(inplace=True)
+
+        self.conv_d2 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn_d2 = nn.BatchNorm2d(64)
+        self.relu_d2 = nn.ReLU(inplace=True)
+
+        self.conv_d1 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn_d1 = nn.BatchNorm2d(64)
+        self.relu_d1 = nn.ReLU(inplace=True)
+
+        self.conv_d0 = nn.Conv2d(64, 1, 3, padding=1)
+
+        self.upscore2 = nn.Upsample(
+            scale_factor=2, mode="bilinear", align_corners=False
+        )
+
+    def forward(self, x):
+        hx = x
+        hx = self.conv0(hx)
+
+        hx1 = self.relu1(self.bn1(self.conv1(hx)))
+        hx = self.pool1(hx1)
+
+        hx2 = self.relu2(self.bn2(self.conv2(hx)))
+        hx = self.pool2(hx2)
+
+        hx3 = self.relu3(self.bn3(self.conv3(hx)))
+        hx = self.pool3(hx3)
+
+        hx4 = self.relu4(self.bn4(self.conv4(hx)))
+        hx = self.pool4(hx4)
+
+        hx5 = self.relu5(self.bn5(self.conv5(hx)))
+
+        hx = self.upscore2(hx5)
+
+        d4 = self.relu_d4(self.bn_d4(self.conv_d4(torch.cat((hx, hx4), 1))))
+        hx = self.upscore2(d4)
+
+        d3 = self.relu_d3(self.bn_d3(self.conv_d3(torch.cat((hx, hx3), 1))))
+        hx = self.upscore2(d3)
+
+        d2 = self.relu_d2(self.bn_d2(self.conv_d2(torch.cat((hx, hx2), 1))))
+        hx = self.upscore2(d2)
+
+        d1 = self.relu_d1(self.bn_d1(self.conv_d1(torch.cat((hx, hx1), 1))))
+
+        residual = self.conv_d0(d1)
+
+        return x + residual
+
+
+class BASNet(nn.Module):
+    def __init__(self, n_channels, n_classes):
+        super(BASNet, self).__init__()
+
+        resnet = models.resnet34(pretrained=False)
+
+        # -------------Encoder--------------
+
+        self.inconv = nn.Conv2d(n_channels, 64, 3, padding=1)
+        self.inbn = nn.BatchNorm2d(64)
+        self.inrelu = nn.ReLU(inplace=True)
+
+        # stage 1
+        self.encoder1 = resnet.layer1  # 224
+        # stage 2
+        self.encoder2 = resnet.layer2  # 112
+        # stage 3
+        self.encoder3 = resnet.layer3  # 56
+        # stage 4
+        self.encoder4 = resnet.layer4  # 28
+
+        self.pool4 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        # stage 5
+        self.resb5_1 = BasicBlock(512, 512)
+        self.resb5_2 = BasicBlock(512, 512)
+        self.resb5_3 = BasicBlock(512, 512)  # 14
+
+        self.pool5 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        # stage 6
+        self.resb6_1 = BasicBlock(512, 512)
+        self.resb6_2 = BasicBlock(512, 512)
+        self.resb6_3 = BasicBlock(512, 512)  # 7
+
+        # -------------Bridge--------------
+
+        # stage Bridge
+        self.convbg_1 = nn.Conv2d(512, 512, 3, dilation=2, padding=2)  # 7
+        self.bnbg_1 = nn.BatchNorm2d(512)
+        self.relubg_1 = nn.ReLU(inplace=True)
+        self.convbg_m = nn.Conv2d(512, 512, 3, dilation=2, padding=2)
+        self.bnbg_m = nn.BatchNorm2d(512)
+        self.relubg_m = nn.ReLU(inplace=True)
+        self.convbg_2 = nn.Conv2d(512, 512, 3, dilation=2, padding=2)
+        self.bnbg_2 = nn.BatchNorm2d(512)
+        self.relubg_2 = nn.ReLU(inplace=True)
+
+        # -------------Decoder--------------
+
+        # stage 6d
+        self.conv6d_1 = nn.Conv2d(1024, 512, 3, padding=1)  # 16
+        self.bn6d_1 = nn.BatchNorm2d(512)
+        self.relu6d_1 = nn.ReLU(inplace=True)
+
+        self.conv6d_m = nn.Conv2d(512, 512, 3, dilation=2, padding=2)
+        self.bn6d_m = nn.BatchNorm2d(512)
+        self.relu6d_m = nn.ReLU(inplace=True)
+
+        self.conv6d_2 = nn.Conv2d(512, 512, 3, dilation=2, padding=2)
+        self.bn6d_2 = nn.BatchNorm2d(512)
+        self.relu6d_2 = nn.ReLU(inplace=True)
+
+        # stage 5d
+        self.conv5d_1 = nn.Conv2d(1024, 512, 3, padding=1)  # 16
+        self.bn5d_1 = nn.BatchNorm2d(512)
+        self.relu5d_1 = nn.ReLU(inplace=True)
+
+        self.conv5d_m = nn.Conv2d(512, 512, 3, padding=1)
+        self.bn5d_m = nn.BatchNorm2d(512)
+        self.relu5d_m = nn.ReLU(inplace=True)
+
+        self.conv5d_2 = nn.Conv2d(512, 512, 3, padding=1)
+        self.bn5d_2 = nn.BatchNorm2d(512)
+        self.relu5d_2 = nn.ReLU(inplace=True)
+
+        # stage 4d
+        self.conv4d_1 = nn.Conv2d(1024, 512, 3, padding=1)  # 32
+        self.bn4d_1 = nn.BatchNorm2d(512)
+        self.relu4d_1 = nn.ReLU(inplace=True)
+
+        self.conv4d_m = nn.Conv2d(512, 512, 3, padding=1)
+        self.bn4d_m = nn.BatchNorm2d(512)
+        self.relu4d_m = nn.ReLU(inplace=True)
+
+        self.conv4d_2 = nn.Conv2d(512, 256, 3, padding=1)
+        self.bn4d_2 = nn.BatchNorm2d(256)
+        self.relu4d_2 = nn.ReLU(inplace=True)
+
+        # stage 3d
+        self.conv3d_1 = nn.Conv2d(512, 256, 3, padding=1)  # 64
+        self.bn3d_1 = nn.BatchNorm2d(256)
+        self.relu3d_1 = nn.ReLU(inplace=True)
+
+        self.conv3d_m = nn.Conv2d(256, 256, 3, padding=1)
+        self.bn3d_m = nn.BatchNorm2d(256)
+        self.relu3d_m = nn.ReLU(inplace=True)
+
+        self.conv3d_2 = nn.Conv2d(256, 128, 3, padding=1)
+        self.bn3d_2 = nn.BatchNorm2d(128)
+        self.relu3d_2 = nn.ReLU(inplace=True)
+
+        # stage 2d
+
+        self.conv2d_1 = nn.Conv2d(256, 128, 3, padding=1)  # 128
+        self.bn2d_1 = nn.BatchNorm2d(128)
+        self.relu2d_1 = nn.ReLU(inplace=True)
+
+        self.conv2d_m = nn.Conv2d(128, 128, 3, padding=1)
+        self.bn2d_m = nn.BatchNorm2d(128)
+        self.relu2d_m = nn.ReLU(inplace=True)
+
+        self.conv2d_2 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn2d_2 = nn.BatchNorm2d(64)
+        self.relu2d_2 = nn.ReLU(inplace=True)
+
+        # stage 1d
+        self.conv1d_1 = nn.Conv2d(128, 64, 3, padding=1)  # 256
+        self.bn1d_1 = nn.BatchNorm2d(64)
+        self.relu1d_1 = nn.ReLU(inplace=True)
+
+        self.conv1d_m = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn1d_m = nn.BatchNorm2d(64)
+        self.relu1d_m = nn.ReLU(inplace=True)
+
+        self.conv1d_2 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn1d_2 = nn.BatchNorm2d(64)
+        self.relu1d_2 = nn.ReLU(inplace=True)
+
+        # -------------Bilinear Upsampling--------------
+        self.upscore6 = nn.Upsample(
+            scale_factor=32, mode="bilinear", align_corners=False
+        )
+        self.upscore5 = nn.Upsample(
+            scale_factor=16, mode="bilinear", align_corners=False
+        )
+        self.upscore4 = nn.Upsample(
+            scale_factor=8, mode="bilinear", align_corners=False
+        )
+        self.upscore3 = nn.Upsample(
+            scale_factor=4, mode="bilinear", align_corners=False
+        )
+        self.upscore2 = nn.Upsample(
+            scale_factor=2, mode="bilinear", align_corners=False
+        )
+
+        # -------------Side Output--------------
+        self.outconvb = nn.Conv2d(512, 1, 3, padding=1)
+        self.outconv6 = nn.Conv2d(512, 1, 3, padding=1)
+        self.outconv5 = nn.Conv2d(512, 1, 3, padding=1)
+        self.outconv4 = nn.Conv2d(256, 1, 3, padding=1)
+        self.outconv3 = nn.Conv2d(128, 1, 3, padding=1)
+        self.outconv2 = nn.Conv2d(64, 1, 3, padding=1)
+        self.outconv1 = nn.Conv2d(64, 1, 3, padding=1)
+
+        # -------------Refine Module-------------
+        self.refunet = RefUnet(1, 64)
+
+    def forward(self, x):
+        hx = x
+
+        # -------------Encoder-------------
+        hx = self.inconv(hx)
+        hx = self.inbn(hx)
+        hx = self.inrelu(hx)
+
+        h1 = self.encoder1(hx)  # 256
+        h2 = self.encoder2(h1)  # 128
+        h3 = self.encoder3(h2)  # 64
+        h4 = self.encoder4(h3)  # 32
+
+        hx = self.pool4(h4)  # 16
+
+        hx = self.resb5_1(hx)
+        hx = self.resb5_2(hx)
+        h5 = self.resb5_3(hx)
+
+        hx = self.pool5(h5)  # 8
+
+        hx = self.resb6_1(hx)
+        hx = self.resb6_2(hx)
+        h6 = self.resb6_3(hx)
+
+        # -------------Bridge-------------
+        hx = self.relubg_1(self.bnbg_1(self.convbg_1(h6)))  # 8
+        hx = self.relubg_m(self.bnbg_m(self.convbg_m(hx)))
+        hbg = self.relubg_2(self.bnbg_2(self.convbg_2(hx)))
+
+        # -------------Decoder-------------
+
+        hx = self.relu6d_1(self.bn6d_1(self.conv6d_1(torch.cat((hbg, h6), 1))))
+        hx = self.relu6d_m(self.bn6d_m(self.conv6d_m(hx)))
+        hd6 = self.relu6d_2(self.bn6d_2(self.conv6d_2(hx)))
+
+        hx = self.upscore2(hd6)  # 8 -> 16
+
+        hx = self.relu5d_1(self.bn5d_1(self.conv5d_1(torch.cat((hx, h5), 1))))
+        hx = self.relu5d_m(self.bn5d_m(self.conv5d_m(hx)))
+        hd5 = self.relu5d_2(self.bn5d_2(self.conv5d_2(hx)))
+
+        hx = self.upscore2(hd5)  # 16 -> 32
+
+        hx = self.relu4d_1(self.bn4d_1(self.conv4d_1(torch.cat((hx, h4), 1))))
+        hx = self.relu4d_m(self.bn4d_m(self.conv4d_m(hx)))
+        hd4 = self.relu4d_2(self.bn4d_2(self.conv4d_2(hx)))
+
+        hx = self.upscore2(hd4)  # 32 -> 64
+
+        hx = self.relu3d_1(self.bn3d_1(self.conv3d_1(torch.cat((hx, h3), 1))))
+        hx = self.relu3d_m(self.bn3d_m(self.conv3d_m(hx)))
+        hd3 = self.relu3d_2(self.bn3d_2(self.conv3d_2(hx)))
+
+        hx = self.upscore2(hd3)  # 64 -> 128
+
+        hx = self.relu2d_1(self.bn2d_1(self.conv2d_1(torch.cat((hx, h2), 1))))
+        hx = self.relu2d_m(self.bn2d_m(self.conv2d_m(hx)))
+        hd2 = self.relu2d_2(self.bn2d_2(self.conv2d_2(hx)))
+
+        hx = self.upscore2(hd2)  # 128 -> 256
+
+        hx = self.relu1d_1(self.bn1d_1(self.conv1d_1(torch.cat((hx, h1), 1))))
+        hx = self.relu1d_m(self.bn1d_m(self.conv1d_m(hx)))
+        hd1 = self.relu1d_2(self.bn1d_2(self.conv1d_2(hx)))
+
+        # -------------Side Output-------------
+        db = self.outconvb(hbg)
+        db = self.upscore6(db)  # 8->256
+
+        d6 = self.outconv6(hd6)
+        d6 = self.upscore6(d6)  # 8->256
+
+        d5 = self.outconv5(hd5)
+        d5 = self.upscore5(d5)  # 16->256
+
+        d4 = self.outconv4(hd4)
+        d4 = self.upscore4(d4)  # 32->256
+
+        d3 = self.outconv3(hd3)
+        d3 = self.upscore3(d3)  # 64->256
+
+        d2 = self.outconv2(hd2)
+        d2 = self.upscore2(d2)  # 128->256
+
+        d1 = self.outconv1(hd1)  # 256
+
+        # -------------Refine Module-------------
+        dout = self.refunet(d1)  # 256
+
+        return (
+            torch.sigmoid(dout),
+            torch.sigmoid(d1),
+            torch.sigmoid(d2),
+            torch.sigmoid(d3),
+            torch.sigmoid(d4),
+            torch.sigmoid(d5),
+            torch.sigmoid(d6),
+            torch.sigmoid(db),
+        )
+
+
+
+
+
+
+
+

Functions

+
+
+def conv3x3(in_planes, out_planes, stride=1) +
+
+

3x3 convolution with padding

+
+ +Expand source code + +
def conv3x3(in_planes, out_planes, stride=1):
+    """3x3 convolution with padding"""
+    return nn.Conv2d(
+        in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False
+    )
+
+
+
+
+
+

Classes

+
+
+class BASNet +(n_channels, n_classes) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class BASNet(nn.Module):
+    def __init__(self, n_channels, n_classes):
+        super(BASNet, self).__init__()
+
+        resnet = models.resnet34(pretrained=False)
+
+        # -------------Encoder--------------
+
+        self.inconv = nn.Conv2d(n_channels, 64, 3, padding=1)
+        self.inbn = nn.BatchNorm2d(64)
+        self.inrelu = nn.ReLU(inplace=True)
+
+        # stage 1
+        self.encoder1 = resnet.layer1  # 224
+        # stage 2
+        self.encoder2 = resnet.layer2  # 112
+        # stage 3
+        self.encoder3 = resnet.layer3  # 56
+        # stage 4
+        self.encoder4 = resnet.layer4  # 28
+
+        self.pool4 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        # stage 5
+        self.resb5_1 = BasicBlock(512, 512)
+        self.resb5_2 = BasicBlock(512, 512)
+        self.resb5_3 = BasicBlock(512, 512)  # 14
+
+        self.pool5 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        # stage 6
+        self.resb6_1 = BasicBlock(512, 512)
+        self.resb6_2 = BasicBlock(512, 512)
+        self.resb6_3 = BasicBlock(512, 512)  # 7
+
+        # -------------Bridge--------------
+
+        # stage Bridge
+        self.convbg_1 = nn.Conv2d(512, 512, 3, dilation=2, padding=2)  # 7
+        self.bnbg_1 = nn.BatchNorm2d(512)
+        self.relubg_1 = nn.ReLU(inplace=True)
+        self.convbg_m = nn.Conv2d(512, 512, 3, dilation=2, padding=2)
+        self.bnbg_m = nn.BatchNorm2d(512)
+        self.relubg_m = nn.ReLU(inplace=True)
+        self.convbg_2 = nn.Conv2d(512, 512, 3, dilation=2, padding=2)
+        self.bnbg_2 = nn.BatchNorm2d(512)
+        self.relubg_2 = nn.ReLU(inplace=True)
+
+        # -------------Decoder--------------
+
+        # stage 6d
+        self.conv6d_1 = nn.Conv2d(1024, 512, 3, padding=1)  # 16
+        self.bn6d_1 = nn.BatchNorm2d(512)
+        self.relu6d_1 = nn.ReLU(inplace=True)
+
+        self.conv6d_m = nn.Conv2d(512, 512, 3, dilation=2, padding=2)
+        self.bn6d_m = nn.BatchNorm2d(512)
+        self.relu6d_m = nn.ReLU(inplace=True)
+
+        self.conv6d_2 = nn.Conv2d(512, 512, 3, dilation=2, padding=2)
+        self.bn6d_2 = nn.BatchNorm2d(512)
+        self.relu6d_2 = nn.ReLU(inplace=True)
+
+        # stage 5d
+        self.conv5d_1 = nn.Conv2d(1024, 512, 3, padding=1)  # 16
+        self.bn5d_1 = nn.BatchNorm2d(512)
+        self.relu5d_1 = nn.ReLU(inplace=True)
+
+        self.conv5d_m = nn.Conv2d(512, 512, 3, padding=1)
+        self.bn5d_m = nn.BatchNorm2d(512)
+        self.relu5d_m = nn.ReLU(inplace=True)
+
+        self.conv5d_2 = nn.Conv2d(512, 512, 3, padding=1)
+        self.bn5d_2 = nn.BatchNorm2d(512)
+        self.relu5d_2 = nn.ReLU(inplace=True)
+
+        # stage 4d
+        self.conv4d_1 = nn.Conv2d(1024, 512, 3, padding=1)  # 32
+        self.bn4d_1 = nn.BatchNorm2d(512)
+        self.relu4d_1 = nn.ReLU(inplace=True)
+
+        self.conv4d_m = nn.Conv2d(512, 512, 3, padding=1)
+        self.bn4d_m = nn.BatchNorm2d(512)
+        self.relu4d_m = nn.ReLU(inplace=True)
+
+        self.conv4d_2 = nn.Conv2d(512, 256, 3, padding=1)
+        self.bn4d_2 = nn.BatchNorm2d(256)
+        self.relu4d_2 = nn.ReLU(inplace=True)
+
+        # stage 3d
+        self.conv3d_1 = nn.Conv2d(512, 256, 3, padding=1)  # 64
+        self.bn3d_1 = nn.BatchNorm2d(256)
+        self.relu3d_1 = nn.ReLU(inplace=True)
+
+        self.conv3d_m = nn.Conv2d(256, 256, 3, padding=1)
+        self.bn3d_m = nn.BatchNorm2d(256)
+        self.relu3d_m = nn.ReLU(inplace=True)
+
+        self.conv3d_2 = nn.Conv2d(256, 128, 3, padding=1)
+        self.bn3d_2 = nn.BatchNorm2d(128)
+        self.relu3d_2 = nn.ReLU(inplace=True)
+
+        # stage 2d
+
+        self.conv2d_1 = nn.Conv2d(256, 128, 3, padding=1)  # 128
+        self.bn2d_1 = nn.BatchNorm2d(128)
+        self.relu2d_1 = nn.ReLU(inplace=True)
+
+        self.conv2d_m = nn.Conv2d(128, 128, 3, padding=1)
+        self.bn2d_m = nn.BatchNorm2d(128)
+        self.relu2d_m = nn.ReLU(inplace=True)
+
+        self.conv2d_2 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn2d_2 = nn.BatchNorm2d(64)
+        self.relu2d_2 = nn.ReLU(inplace=True)
+
+        # stage 1d
+        self.conv1d_1 = nn.Conv2d(128, 64, 3, padding=1)  # 256
+        self.bn1d_1 = nn.BatchNorm2d(64)
+        self.relu1d_1 = nn.ReLU(inplace=True)
+
+        self.conv1d_m = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn1d_m = nn.BatchNorm2d(64)
+        self.relu1d_m = nn.ReLU(inplace=True)
+
+        self.conv1d_2 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn1d_2 = nn.BatchNorm2d(64)
+        self.relu1d_2 = nn.ReLU(inplace=True)
+
+        # -------------Bilinear Upsampling--------------
+        self.upscore6 = nn.Upsample(
+            scale_factor=32, mode="bilinear", align_corners=False
+        )
+        self.upscore5 = nn.Upsample(
+            scale_factor=16, mode="bilinear", align_corners=False
+        )
+        self.upscore4 = nn.Upsample(
+            scale_factor=8, mode="bilinear", align_corners=False
+        )
+        self.upscore3 = nn.Upsample(
+            scale_factor=4, mode="bilinear", align_corners=False
+        )
+        self.upscore2 = nn.Upsample(
+            scale_factor=2, mode="bilinear", align_corners=False
+        )
+
+        # -------------Side Output--------------
+        self.outconvb = nn.Conv2d(512, 1, 3, padding=1)
+        self.outconv6 = nn.Conv2d(512, 1, 3, padding=1)
+        self.outconv5 = nn.Conv2d(512, 1, 3, padding=1)
+        self.outconv4 = nn.Conv2d(256, 1, 3, padding=1)
+        self.outconv3 = nn.Conv2d(128, 1, 3, padding=1)
+        self.outconv2 = nn.Conv2d(64, 1, 3, padding=1)
+        self.outconv1 = nn.Conv2d(64, 1, 3, padding=1)
+
+        # -------------Refine Module-------------
+        self.refunet = RefUnet(1, 64)
+
+    def forward(self, x):
+        hx = x
+
+        # -------------Encoder-------------
+        hx = self.inconv(hx)
+        hx = self.inbn(hx)
+        hx = self.inrelu(hx)
+
+        h1 = self.encoder1(hx)  # 256
+        h2 = self.encoder2(h1)  # 128
+        h3 = self.encoder3(h2)  # 64
+        h4 = self.encoder4(h3)  # 32
+
+        hx = self.pool4(h4)  # 16
+
+        hx = self.resb5_1(hx)
+        hx = self.resb5_2(hx)
+        h5 = self.resb5_3(hx)
+
+        hx = self.pool5(h5)  # 8
+
+        hx = self.resb6_1(hx)
+        hx = self.resb6_2(hx)
+        h6 = self.resb6_3(hx)
+
+        # -------------Bridge-------------
+        hx = self.relubg_1(self.bnbg_1(self.convbg_1(h6)))  # 8
+        hx = self.relubg_m(self.bnbg_m(self.convbg_m(hx)))
+        hbg = self.relubg_2(self.bnbg_2(self.convbg_2(hx)))
+
+        # -------------Decoder-------------
+
+        hx = self.relu6d_1(self.bn6d_1(self.conv6d_1(torch.cat((hbg, h6), 1))))
+        hx = self.relu6d_m(self.bn6d_m(self.conv6d_m(hx)))
+        hd6 = self.relu6d_2(self.bn6d_2(self.conv6d_2(hx)))
+
+        hx = self.upscore2(hd6)  # 8 -> 16
+
+        hx = self.relu5d_1(self.bn5d_1(self.conv5d_1(torch.cat((hx, h5), 1))))
+        hx = self.relu5d_m(self.bn5d_m(self.conv5d_m(hx)))
+        hd5 = self.relu5d_2(self.bn5d_2(self.conv5d_2(hx)))
+
+        hx = self.upscore2(hd5)  # 16 -> 32
+
+        hx = self.relu4d_1(self.bn4d_1(self.conv4d_1(torch.cat((hx, h4), 1))))
+        hx = self.relu4d_m(self.bn4d_m(self.conv4d_m(hx)))
+        hd4 = self.relu4d_2(self.bn4d_2(self.conv4d_2(hx)))
+
+        hx = self.upscore2(hd4)  # 32 -> 64
+
+        hx = self.relu3d_1(self.bn3d_1(self.conv3d_1(torch.cat((hx, h3), 1))))
+        hx = self.relu3d_m(self.bn3d_m(self.conv3d_m(hx)))
+        hd3 = self.relu3d_2(self.bn3d_2(self.conv3d_2(hx)))
+
+        hx = self.upscore2(hd3)  # 64 -> 128
+
+        hx = self.relu2d_1(self.bn2d_1(self.conv2d_1(torch.cat((hx, h2), 1))))
+        hx = self.relu2d_m(self.bn2d_m(self.conv2d_m(hx)))
+        hd2 = self.relu2d_2(self.bn2d_2(self.conv2d_2(hx)))
+
+        hx = self.upscore2(hd2)  # 128 -> 256
+
+        hx = self.relu1d_1(self.bn1d_1(self.conv1d_1(torch.cat((hx, h1), 1))))
+        hx = self.relu1d_m(self.bn1d_m(self.conv1d_m(hx)))
+        hd1 = self.relu1d_2(self.bn1d_2(self.conv1d_2(hx)))
+
+        # -------------Side Output-------------
+        db = self.outconvb(hbg)
+        db = self.upscore6(db)  # 8->256
+
+        d6 = self.outconv6(hd6)
+        d6 = self.upscore6(d6)  # 8->256
+
+        d5 = self.outconv5(hd5)
+        d5 = self.upscore5(d5)  # 16->256
+
+        d4 = self.outconv4(hd4)
+        d4 = self.upscore4(d4)  # 32->256
+
+        d3 = self.outconv3(hd3)
+        d3 = self.upscore3(d3)  # 64->256
+
+        d2 = self.outconv2(hd2)
+        d2 = self.upscore2(d2)  # 128->256
+
+        d1 = self.outconv1(hd1)  # 256
+
+        # -------------Refine Module-------------
+        dout = self.refunet(d1)  # 256
+
+        return (
+            torch.sigmoid(dout),
+            torch.sigmoid(d1),
+            torch.sigmoid(d2),
+            torch.sigmoid(d3),
+            torch.sigmoid(d4),
+            torch.sigmoid(d5),
+            torch.sigmoid(d6),
+            torch.sigmoid(db),
+        )
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Subclasses

+ +

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    hx = x
+
+    # -------------Encoder-------------
+    hx = self.inconv(hx)
+    hx = self.inbn(hx)
+    hx = self.inrelu(hx)
+
+    h1 = self.encoder1(hx)  # 256
+    h2 = self.encoder2(h1)  # 128
+    h3 = self.encoder3(h2)  # 64
+    h4 = self.encoder4(h3)  # 32
+
+    hx = self.pool4(h4)  # 16
+
+    hx = self.resb5_1(hx)
+    hx = self.resb5_2(hx)
+    h5 = self.resb5_3(hx)
+
+    hx = self.pool5(h5)  # 8
+
+    hx = self.resb6_1(hx)
+    hx = self.resb6_2(hx)
+    h6 = self.resb6_3(hx)
+
+    # -------------Bridge-------------
+    hx = self.relubg_1(self.bnbg_1(self.convbg_1(h6)))  # 8
+    hx = self.relubg_m(self.bnbg_m(self.convbg_m(hx)))
+    hbg = self.relubg_2(self.bnbg_2(self.convbg_2(hx)))
+
+    # -------------Decoder-------------
+
+    hx = self.relu6d_1(self.bn6d_1(self.conv6d_1(torch.cat((hbg, h6), 1))))
+    hx = self.relu6d_m(self.bn6d_m(self.conv6d_m(hx)))
+    hd6 = self.relu6d_2(self.bn6d_2(self.conv6d_2(hx)))
+
+    hx = self.upscore2(hd6)  # 8 -> 16
+
+    hx = self.relu5d_1(self.bn5d_1(self.conv5d_1(torch.cat((hx, h5), 1))))
+    hx = self.relu5d_m(self.bn5d_m(self.conv5d_m(hx)))
+    hd5 = self.relu5d_2(self.bn5d_2(self.conv5d_2(hx)))
+
+    hx = self.upscore2(hd5)  # 16 -> 32
+
+    hx = self.relu4d_1(self.bn4d_1(self.conv4d_1(torch.cat((hx, h4), 1))))
+    hx = self.relu4d_m(self.bn4d_m(self.conv4d_m(hx)))
+    hd4 = self.relu4d_2(self.bn4d_2(self.conv4d_2(hx)))
+
+    hx = self.upscore2(hd4)  # 32 -> 64
+
+    hx = self.relu3d_1(self.bn3d_1(self.conv3d_1(torch.cat((hx, h3), 1))))
+    hx = self.relu3d_m(self.bn3d_m(self.conv3d_m(hx)))
+    hd3 = self.relu3d_2(self.bn3d_2(self.conv3d_2(hx)))
+
+    hx = self.upscore2(hd3)  # 64 -> 128
+
+    hx = self.relu2d_1(self.bn2d_1(self.conv2d_1(torch.cat((hx, h2), 1))))
+    hx = self.relu2d_m(self.bn2d_m(self.conv2d_m(hx)))
+    hd2 = self.relu2d_2(self.bn2d_2(self.conv2d_2(hx)))
+
+    hx = self.upscore2(hd2)  # 128 -> 256
+
+    hx = self.relu1d_1(self.bn1d_1(self.conv1d_1(torch.cat((hx, h1), 1))))
+    hx = self.relu1d_m(self.bn1d_m(self.conv1d_m(hx)))
+    hd1 = self.relu1d_2(self.bn1d_2(self.conv1d_2(hx)))
+
+    # -------------Side Output-------------
+    db = self.outconvb(hbg)
+    db = self.upscore6(db)  # 8->256
+
+    d6 = self.outconv6(hd6)
+    d6 = self.upscore6(d6)  # 8->256
+
+    d5 = self.outconv5(hd5)
+    d5 = self.upscore5(d5)  # 16->256
+
+    d4 = self.outconv4(hd4)
+    d4 = self.upscore4(d4)  # 32->256
+
+    d3 = self.outconv3(hd3)
+    d3 = self.upscore3(d3)  # 64->256
+
+    d2 = self.outconv2(hd2)
+    d2 = self.upscore2(d2)  # 128->256
+
+    d1 = self.outconv1(hd1)  # 256
+
+    # -------------Refine Module-------------
+    dout = self.refunet(d1)  # 256
+
+    return (
+        torch.sigmoid(dout),
+        torch.sigmoid(d1),
+        torch.sigmoid(d2),
+        torch.sigmoid(d3),
+        torch.sigmoid(d4),
+        torch.sigmoid(d5),
+        torch.sigmoid(d6),
+        torch.sigmoid(db),
+    )
+
+
+
+
+
+class BasicBlock +(inplanes, planes, stride=1, downsample=None) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class BasicBlock(nn.Module):
+    expansion = 1
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(BasicBlock, self).__init__()
+        self.conv1 = conv3x3(inplanes, planes, stride)
+        self.bn1 = nn.BatchNorm2d(planes)
+        self.relu = nn.ReLU(inplace=True)
+        self.conv2 = conv3x3(planes, planes)
+        self.bn2 = nn.BatchNorm2d(planes)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Class variables

+
+
var expansion
+
+
+
+
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    residual = x
+
+    out = self.conv1(x)
+    out = self.bn1(out)
+    out = self.relu(out)
+
+    out = self.conv2(out)
+    out = self.bn2(out)
+
+    if self.downsample is not None:
+        residual = self.downsample(x)
+
+    out += residual
+    out = self.relu(out)
+
+    return out
+
+
+
+
+
+class BasicBlockDe +(inplanes, planes, stride=1, downsample=None) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class BasicBlockDe(nn.Module):
+    expansion = 1
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(BasicBlockDe, self).__init__()
+
+        self.convRes = conv3x3(inplanes, planes, stride)
+        self.bnRes = nn.BatchNorm2d(planes)
+        self.reluRes = nn.ReLU(inplace=True)
+
+        self.conv1 = conv3x3(inplanes, planes, stride)
+        self.bn1 = nn.BatchNorm2d(planes)
+        self.relu = nn.ReLU(inplace=True)
+        self.conv2 = conv3x3(planes, planes)
+        self.bn2 = nn.BatchNorm2d(planes)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = self.convRes(x)
+        residual = self.bnRes(residual)
+        residual = self.reluRes(residual)
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Class variables

+
+
var expansion
+
+
+
+
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    residual = self.convRes(x)
+    residual = self.bnRes(residual)
+    residual = self.reluRes(residual)
+
+    out = self.conv1(x)
+    out = self.bn1(out)
+    out = self.relu(out)
+
+    out = self.conv2(out)
+    out = self.bn2(out)
+
+    if self.downsample is not None:
+        residual = self.downsample(x)
+
+    out += residual
+    out = self.relu(out)
+
+    return out
+
+
+
+
+
+class Bottleneck +(inplanes, planes, stride=1, downsample=None) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Bottleneck(nn.Module):
+    expansion = 4
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(Bottleneck, self).__init__()
+        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
+        self.bn1 = nn.BatchNorm2d(planes)
+        self.conv2 = nn.Conv2d(
+            planes, planes, kernel_size=3, stride=stride, padding=1, bias=False
+        )
+        self.bn2 = nn.BatchNorm2d(planes)
+        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
+        self.bn3 = nn.BatchNorm2d(planes * 4)
+        self.relu = nn.ReLU(inplace=True)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+        out = self.relu(out)
+
+        out = self.conv3(out)
+        out = self.bn3(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Class variables

+
+
var expansion
+
+
+
+
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    residual = x
+
+    out = self.conv1(x)
+    out = self.bn1(out)
+    out = self.relu(out)
+
+    out = self.conv2(out)
+    out = self.bn2(out)
+    out = self.relu(out)
+
+    out = self.conv3(out)
+    out = self.bn3(out)
+
+    if self.downsample is not None:
+        residual = self.downsample(x)
+
+    out += residual
+    out = self.relu(out)
+
+    return out
+
+
+
+
+
+class RefUnet +(in_ch, inc_ch) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class RefUnet(nn.Module):
+    def __init__(self, in_ch, inc_ch):
+        super(RefUnet, self).__init__()
+
+        self.conv0 = nn.Conv2d(in_ch, inc_ch, 3, padding=1)
+
+        self.conv1 = nn.Conv2d(inc_ch, 64, 3, padding=1)
+        self.bn1 = nn.BatchNorm2d(64)
+        self.relu1 = nn.ReLU(inplace=True)
+
+        self.pool1 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        self.conv2 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn2 = nn.BatchNorm2d(64)
+        self.relu2 = nn.ReLU(inplace=True)
+
+        self.pool2 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        self.conv3 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn3 = nn.BatchNorm2d(64)
+        self.relu3 = nn.ReLU(inplace=True)
+
+        self.pool3 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        self.conv4 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn4 = nn.BatchNorm2d(64)
+        self.relu4 = nn.ReLU(inplace=True)
+
+        self.pool4 = nn.MaxPool2d(2, 2, ceil_mode=True)
+
+        self.conv5 = nn.Conv2d(64, 64, 3, padding=1)
+        self.bn5 = nn.BatchNorm2d(64)
+        self.relu5 = nn.ReLU(inplace=True)
+
+        self.conv_d4 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn_d4 = nn.BatchNorm2d(64)
+        self.relu_d4 = nn.ReLU(inplace=True)
+
+        self.conv_d3 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn_d3 = nn.BatchNorm2d(64)
+        self.relu_d3 = nn.ReLU(inplace=True)
+
+        self.conv_d2 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn_d2 = nn.BatchNorm2d(64)
+        self.relu_d2 = nn.ReLU(inplace=True)
+
+        self.conv_d1 = nn.Conv2d(128, 64, 3, padding=1)
+        self.bn_d1 = nn.BatchNorm2d(64)
+        self.relu_d1 = nn.ReLU(inplace=True)
+
+        self.conv_d0 = nn.Conv2d(64, 1, 3, padding=1)
+
+        self.upscore2 = nn.Upsample(
+            scale_factor=2, mode="bilinear", align_corners=False
+        )
+
+    def forward(self, x):
+        hx = x
+        hx = self.conv0(hx)
+
+        hx1 = self.relu1(self.bn1(self.conv1(hx)))
+        hx = self.pool1(hx1)
+
+        hx2 = self.relu2(self.bn2(self.conv2(hx)))
+        hx = self.pool2(hx2)
+
+        hx3 = self.relu3(self.bn3(self.conv3(hx)))
+        hx = self.pool3(hx3)
+
+        hx4 = self.relu4(self.bn4(self.conv4(hx)))
+        hx = self.pool4(hx4)
+
+        hx5 = self.relu5(self.bn5(self.conv5(hx)))
+
+        hx = self.upscore2(hx5)
+
+        d4 = self.relu_d4(self.bn_d4(self.conv_d4(torch.cat((hx, hx4), 1))))
+        hx = self.upscore2(d4)
+
+        d3 = self.relu_d3(self.bn_d3(self.conv_d3(torch.cat((hx, hx3), 1))))
+        hx = self.upscore2(d3)
+
+        d2 = self.relu_d2(self.bn_d2(self.conv_d2(torch.cat((hx, hx2), 1))))
+        hx = self.upscore2(d2)
+
+        d1 = self.relu_d1(self.bn_d1(self.conv_d1(torch.cat((hx, hx1), 1))))
+
+        residual = self.conv_d0(d1)
+
+        return x + residual
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    hx = x
+    hx = self.conv0(hx)
+
+    hx1 = self.relu1(self.bn1(self.conv1(hx)))
+    hx = self.pool1(hx1)
+
+    hx2 = self.relu2(self.bn2(self.conv2(hx)))
+    hx = self.pool2(hx2)
+
+    hx3 = self.relu3(self.bn3(self.conv3(hx)))
+    hx = self.pool3(hx3)
+
+    hx4 = self.relu4(self.bn4(self.conv4(hx)))
+    hx = self.pool4(hx4)
+
+    hx5 = self.relu5(self.bn5(self.conv5(hx)))
+
+    hx = self.upscore2(hx5)
+
+    d4 = self.relu_d4(self.bn_d4(self.conv_d4(torch.cat((hx, hx4), 1))))
+    hx = self.upscore2(d4)
+
+    d3 = self.relu_d3(self.bn_d3(self.conv_d3(torch.cat((hx, hx3), 1))))
+    hx = self.upscore2(d3)
+
+    d2 = self.relu_d2(self.bn_d2(self.conv_d2(torch.cat((hx, hx2), 1))))
+    hx = self.upscore2(d2)
+
+    d1 = self.relu_d1(self.bn_d1(self.conv_d1(torch.cat((hx, hx1), 1))))
+
+    residual = self.conv_d0(d1)
+
+    return x + residual
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/basnet/index.html b/docs/api/carvekit/ml/arch/basnet/index.html new file mode 100644 index 0000000..4730a60 --- /dev/null +++ b/docs/api/carvekit/ml/arch/basnet/index.html @@ -0,0 +1,67 @@ + + + + + + +carvekit.ml.arch.basnet API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.basnet

+
+
+
+
+

Sub-modules

+
+
carvekit.ml.arch.basnet.basnet
+
+

Source url: https://github.com/NathanUA/BASNet +Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: MIT License

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/cascadepsp/extractors.html b/docs/api/carvekit/ml/arch/cascadepsp/extractors.html new file mode 100644 index 0000000..e9449ac --- /dev/null +++ b/docs/api/carvekit/ml/arch/cascadepsp/extractors.html @@ -0,0 +1,522 @@ + + + + + + +carvekit.ml.arch.cascadepsp.extractors API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.cascadepsp.extractors

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/hkchengrex/CascadePSP +License: MIT License

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/hkchengrex/CascadePSP
+License: MIT License
+"""
+import math
+
+import torch.nn as nn
+
+
+def conv3x3(in_planes, out_planes, stride=1, dilation=1):
+    return nn.Conv2d(
+        in_planes,
+        out_planes,
+        kernel_size=3,
+        stride=stride,
+        padding=dilation,
+        dilation=dilation,
+        bias=False,
+    )
+
+
+class Bottleneck(nn.Module):
+    expansion = 4
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1):
+        super(Bottleneck, self).__init__()
+        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
+        self.bn1 = nn.BatchNorm2d(planes)
+        self.conv2 = nn.Conv2d(
+            planes,
+            planes,
+            kernel_size=3,
+            stride=stride,
+            dilation=dilation,
+            padding=dilation,
+            bias=False,
+        )
+        self.bn2 = nn.BatchNorm2d(planes)
+        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
+        self.bn3 = nn.BatchNorm2d(planes * 4)
+        self.relu = nn.ReLU(inplace=True)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+        out = self.relu(out)
+
+        out = self.conv3(out)
+        out = self.bn3(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+
+class ResNet(nn.Module):
+    def __init__(self, block, layers=(3, 4, 23, 3)):
+        self.inplanes = 64
+        super(ResNet, self).__init__()
+        self.conv1 = nn.Conv2d(6, 64, kernel_size=7, stride=2, padding=3, bias=False)
+        self.bn1 = nn.BatchNorm2d(64)
+        self.relu = nn.ReLU(inplace=True)
+        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
+        self.layer1 = self._make_layer(block, 64, layers[0])
+        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
+        self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2)
+        self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4)
+
+        for m in self.modules():
+            if isinstance(m, nn.Conv2d):
+                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
+                m.weight.data.normal_(0, math.sqrt(2.0 / n))
+            elif isinstance(m, nn.BatchNorm2d):
+                m.weight.data.fill_(1)
+                m.bias.data.zero_()
+
+    def _make_layer(self, block, planes, blocks, stride=1, dilation=1):
+        downsample = None
+        if stride != 1 or self.inplanes != planes * block.expansion:
+            downsample = nn.Sequential(
+                nn.Conv2d(
+                    self.inplanes,
+                    planes * block.expansion,
+                    kernel_size=1,
+                    stride=stride,
+                    bias=False,
+                ),
+                nn.BatchNorm2d(planes * block.expansion),
+            )
+
+        layers = [block(self.inplanes, planes, stride, downsample)]
+        self.inplanes = planes * block.expansion
+        for i in range(1, blocks):
+            layers.append(block(self.inplanes, planes, dilation=dilation))
+
+        return nn.Sequential(*layers)
+
+    def forward(self, x):
+        x_1 = self.conv1(x)  # /2
+        x = self.bn1(x_1)
+        x = self.relu(x)
+        x = self.maxpool(x)  # /2
+
+        x_2 = self.layer1(x)
+        x = self.layer2(x_2)  # /2
+        x = self.layer3(x)
+        x = self.layer4(x)
+
+        return x, x_1, x_2
+
+
+def resnet50():
+    model = ResNet(Bottleneck, [3, 4, 6, 3])
+    return model
+
+
+
+
+
+
+
+

Functions

+
+
+def conv3x3(in_planes, out_planes, stride=1, dilation=1) +
+
+
+
+ +Expand source code + +
def conv3x3(in_planes, out_planes, stride=1, dilation=1):
+    return nn.Conv2d(
+        in_planes,
+        out_planes,
+        kernel_size=3,
+        stride=stride,
+        padding=dilation,
+        dilation=dilation,
+        bias=False,
+    )
+
+
+
+def resnet50() +
+
+
+
+ +Expand source code + +
def resnet50():
+    model = ResNet(Bottleneck, [3, 4, 6, 3])
+    return model
+
+
+
+
+
+

Classes

+
+
+class Bottleneck +(inplanes, planes, stride=1, downsample=None, dilation=1) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Bottleneck(nn.Module):
+    expansion = 4
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1):
+        super(Bottleneck, self).__init__()
+        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
+        self.bn1 = nn.BatchNorm2d(planes)
+        self.conv2 = nn.Conv2d(
+            planes,
+            planes,
+            kernel_size=3,
+            stride=stride,
+            dilation=dilation,
+            padding=dilation,
+            bias=False,
+        )
+        self.bn2 = nn.BatchNorm2d(planes)
+        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
+        self.bn3 = nn.BatchNorm2d(planes * 4)
+        self.relu = nn.ReLU(inplace=True)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+        out = self.relu(out)
+
+        out = self.conv3(out)
+        out = self.bn3(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Class variables

+
+
var expansion
+
+
+
+
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    residual = x
+
+    out = self.conv1(x)
+    out = self.bn1(out)
+    out = self.relu(out)
+
+    out = self.conv2(out)
+    out = self.bn2(out)
+    out = self.relu(out)
+
+    out = self.conv3(out)
+    out = self.bn3(out)
+
+    if self.downsample is not None:
+        residual = self.downsample(x)
+
+    out += residual
+    out = self.relu(out)
+
+    return out
+
+
+
+
+
+class ResNet +(block, layers=(3, 4, 23, 3)) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class ResNet(nn.Module):
+    def __init__(self, block, layers=(3, 4, 23, 3)):
+        self.inplanes = 64
+        super(ResNet, self).__init__()
+        self.conv1 = nn.Conv2d(6, 64, kernel_size=7, stride=2, padding=3, bias=False)
+        self.bn1 = nn.BatchNorm2d(64)
+        self.relu = nn.ReLU(inplace=True)
+        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
+        self.layer1 = self._make_layer(block, 64, layers[0])
+        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
+        self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2)
+        self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4)
+
+        for m in self.modules():
+            if isinstance(m, nn.Conv2d):
+                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
+                m.weight.data.normal_(0, math.sqrt(2.0 / n))
+            elif isinstance(m, nn.BatchNorm2d):
+                m.weight.data.fill_(1)
+                m.bias.data.zero_()
+
+    def _make_layer(self, block, planes, blocks, stride=1, dilation=1):
+        downsample = None
+        if stride != 1 or self.inplanes != planes * block.expansion:
+            downsample = nn.Sequential(
+                nn.Conv2d(
+                    self.inplanes,
+                    planes * block.expansion,
+                    kernel_size=1,
+                    stride=stride,
+                    bias=False,
+                ),
+                nn.BatchNorm2d(planes * block.expansion),
+            )
+
+        layers = [block(self.inplanes, planes, stride, downsample)]
+        self.inplanes = planes * block.expansion
+        for i in range(1, blocks):
+            layers.append(block(self.inplanes, planes, dilation=dilation))
+
+        return nn.Sequential(*layers)
+
+    def forward(self, x):
+        x_1 = self.conv1(x)  # /2
+        x = self.bn1(x_1)
+        x = self.relu(x)
+        x = self.maxpool(x)  # /2
+
+        x_2 = self.layer1(x)
+        x = self.layer2(x_2)  # /2
+        x = self.layer3(x)
+        x = self.layer4(x)
+
+        return x, x_1, x_2
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x_1 = self.conv1(x)  # /2
+    x = self.bn1(x_1)
+    x = self.relu(x)
+    x = self.maxpool(x)  # /2
+
+    x_2 = self.layer1(x)
+    x = self.layer2(x_2)  # /2
+    x = self.layer3(x)
+    x = self.layer4(x)
+
+    return x, x_1, x_2
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/cascadepsp/index.html b/docs/api/carvekit/ml/arch/cascadepsp/index.html new file mode 100644 index 0000000..3b6ac43 --- /dev/null +++ b/docs/api/carvekit/ml/arch/cascadepsp/index.html @@ -0,0 +1,79 @@ + + + + + + +carvekit.ml.arch.cascadepsp API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.cascadepsp

+
+
+
+
+

Sub-modules

+
+
carvekit.ml.arch.cascadepsp.extractors
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/hkchengrex/CascadePSP +License: MIT License

+
+
carvekit.ml.arch.cascadepsp.pspnet
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/hkchengrex/CascadePSP +License: MIT License

+
+
carvekit.ml.arch.cascadepsp.utils
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/cascadepsp/pspnet.html b/docs/api/carvekit/ml/arch/cascadepsp/pspnet.html new file mode 100644 index 0000000..9e64ebf --- /dev/null +++ b/docs/api/carvekit/ml/arch/cascadepsp/pspnet.html @@ -0,0 +1,771 @@ + + + + + + +carvekit.ml.arch.cascadepsp.pspnet API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.cascadepsp.pspnet

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/hkchengrex/CascadePSP +License: MIT License

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/hkchengrex/CascadePSP
+License: MIT License
+"""
+
+import torch
+from torch import nn
+from torch.nn import functional as F
+from carvekit.ml.arch.cascadepsp.extractors import resnet50
+
+
+class PSPModule(nn.Module):
+    def __init__(self, features, out_features=1024, sizes=(1, 2, 3, 6)):
+        super().__init__()
+        self.stages = []
+        self.stages = nn.ModuleList(
+            [self._make_stage(features, size) for size in sizes]
+        )
+        self.bottleneck = nn.Conv2d(
+            features * (len(sizes) + 1), out_features, kernel_size=1
+        )
+        self.relu = nn.ReLU(inplace=True)
+
+    def _make_stage(self, features, size):
+        prior = nn.AdaptiveAvgPool2d(output_size=(size, size))
+        conv = nn.Conv2d(features, features, kernel_size=1, bias=False)
+        return nn.Sequential(prior, conv)
+
+    def forward(self, feats):
+        h, w = feats.size(2), feats.size(3)
+        set_priors = [
+            F.interpolate(
+                input=stage(feats), size=(h, w), mode="bilinear", align_corners=False
+            )
+            for stage in self.stages
+        ]
+        priors = set_priors + [feats]
+        bottle = self.bottleneck(torch.cat(priors, 1))
+        return self.relu(bottle)
+
+
+class PSPUpsample(nn.Module):
+    def __init__(self, x_channels, in_channels, out_channels):
+        super().__init__()
+        self.conv = nn.Sequential(
+            nn.BatchNorm2d(in_channels),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(in_channels, out_channels, 3, padding=1),
+            nn.BatchNorm2d(out_channels),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(out_channels, out_channels, 3, padding=1),
+        )
+
+        self.conv2 = nn.Sequential(
+            nn.BatchNorm2d(out_channels),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(out_channels, out_channels, 3, padding=1),
+            nn.BatchNorm2d(out_channels),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(out_channels, out_channels, 3, padding=1),
+        )
+
+        self.shortcut = nn.Conv2d(x_channels, out_channels, kernel_size=1)
+
+    def forward(self, x, up):
+        x = F.interpolate(input=x, scale_factor=2, mode="bilinear", align_corners=False)
+
+        p = self.conv(torch.cat([x, up], 1).type(x.type()))
+        sc = self.shortcut(x)
+
+        p = p + sc
+
+        p2 = self.conv2(p)
+
+        return p + p2
+
+
+class RefinementModule(nn.Module):
+    def __init__(self):
+        super().__init__()
+
+        self.feats = resnet50()
+        self.psp = PSPModule(2048, 1024, (1, 2, 3, 6))
+
+        self.up_1 = PSPUpsample(1024, 1024 + 256, 512)
+        self.up_2 = PSPUpsample(512, 512 + 64, 256)
+        self.up_3 = PSPUpsample(256, 256 + 3, 32)
+
+        self.final_28 = nn.Sequential(
+            nn.Conv2d(1024, 32, kernel_size=1),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(32, 1, kernel_size=1),
+        )
+
+        self.final_56 = nn.Sequential(
+            nn.Conv2d(512, 32, kernel_size=1),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(32, 1, kernel_size=1),
+        )
+
+        self.final_11 = nn.Conv2d(32 + 3, 32, kernel_size=1)
+        self.final_21 = nn.Conv2d(32, 1, kernel_size=1)
+
+    def forward(self, x, seg, inter_s8=None, inter_s4=None):
+
+        images = {}
+
+        """
+        First iteration, s8 output
+        """
+        if inter_s8 is None:
+            p = torch.cat((x, seg, seg, seg), 1)
+
+            f, f_1, f_2 = self.feats(p)
+            p = self.psp(f)
+
+            inter_s8 = self.final_28(p)
+            r_inter_s8 = F.interpolate(
+                inter_s8, scale_factor=8, mode="bilinear", align_corners=False
+            )
+            r_inter_tanh_s8 = torch.tanh(r_inter_s8)
+
+            images["pred_28"] = torch.sigmoid(r_inter_s8)
+            images["out_28"] = r_inter_s8
+        else:
+            r_inter_tanh_s8 = inter_s8
+
+        """
+        Second iteration, s8 output
+        """
+        if inter_s4 is None:
+            p = torch.cat((x, seg, r_inter_tanh_s8, r_inter_tanh_s8), 1)
+
+            f, f_1, f_2 = self.feats(p)
+            p = self.psp(f)
+            inter_s8_2 = self.final_28(p)
+            r_inter_s8_2 = F.interpolate(
+                inter_s8_2, scale_factor=8, mode="bilinear", align_corners=False
+            )
+            r_inter_tanh_s8_2 = torch.tanh(r_inter_s8_2)
+
+            p = self.up_1(p, f_2)
+
+            inter_s4 = self.final_56(p)
+            r_inter_s4 = F.interpolate(
+                inter_s4, scale_factor=4, mode="bilinear", align_corners=False
+            )
+            r_inter_tanh_s4 = torch.tanh(r_inter_s4)
+
+            images["pred_28_2"] = torch.sigmoid(r_inter_s8_2)
+            images["out_28_2"] = r_inter_s8_2
+            images["pred_56"] = torch.sigmoid(r_inter_s4)
+            images["out_56"] = r_inter_s4
+        else:
+            r_inter_tanh_s8_2 = inter_s8
+            r_inter_tanh_s4 = inter_s4
+
+        """
+        Third iteration, s1 output
+        """
+        p = torch.cat((x, seg, r_inter_tanh_s8_2, r_inter_tanh_s4), 1)
+
+        f, f_1, f_2 = self.feats(p)
+        p = self.psp(f)
+        inter_s8_3 = self.final_28(p)
+        r_inter_s8_3 = F.interpolate(
+            inter_s8_3, scale_factor=8, mode="bilinear", align_corners=False
+        )
+
+        p = self.up_1(p, f_2)
+        inter_s4_2 = self.final_56(p)
+        r_inter_s4_2 = F.interpolate(
+            inter_s4_2, scale_factor=4, mode="bilinear", align_corners=False
+        )
+        p = self.up_2(p, f_1)
+        p = self.up_3(p, x)
+
+        """
+        Final output
+        """
+        p = F.relu(self.final_11(torch.cat([p, x], 1)), inplace=True)
+        p = self.final_21(p)
+
+        pred_224 = torch.sigmoid(p)
+
+        images["pred_224"] = pred_224
+        images["out_224"] = p
+        images["pred_28_3"] = torch.sigmoid(r_inter_s8_3)
+        images["pred_56_2"] = torch.sigmoid(r_inter_s4_2)
+        images["out_28_3"] = r_inter_s8_3
+        images["out_56_2"] = r_inter_s4_2
+
+        return images
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class PSPModule +(features, out_features=1024, sizes=(1, 2, 3, 6)) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class PSPModule(nn.Module):
+    def __init__(self, features, out_features=1024, sizes=(1, 2, 3, 6)):
+        super().__init__()
+        self.stages = []
+        self.stages = nn.ModuleList(
+            [self._make_stage(features, size) for size in sizes]
+        )
+        self.bottleneck = nn.Conv2d(
+            features * (len(sizes) + 1), out_features, kernel_size=1
+        )
+        self.relu = nn.ReLU(inplace=True)
+
+    def _make_stage(self, features, size):
+        prior = nn.AdaptiveAvgPool2d(output_size=(size, size))
+        conv = nn.Conv2d(features, features, kernel_size=1, bias=False)
+        return nn.Sequential(prior, conv)
+
+    def forward(self, feats):
+        h, w = feats.size(2), feats.size(3)
+        set_priors = [
+            F.interpolate(
+                input=stage(feats), size=(h, w), mode="bilinear", align_corners=False
+            )
+            for stage in self.stages
+        ]
+        priors = set_priors + [feats]
+        bottle = self.bottleneck(torch.cat(priors, 1))
+        return self.relu(bottle)
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, feats) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, feats):
+    h, w = feats.size(2), feats.size(3)
+    set_priors = [
+        F.interpolate(
+            input=stage(feats), size=(h, w), mode="bilinear", align_corners=False
+        )
+        for stage in self.stages
+    ]
+    priors = set_priors + [feats]
+    bottle = self.bottleneck(torch.cat(priors, 1))
+    return self.relu(bottle)
+
+
+
+
+
+class PSPUpsample +(x_channels, in_channels, out_channels) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class PSPUpsample(nn.Module):
+    def __init__(self, x_channels, in_channels, out_channels):
+        super().__init__()
+        self.conv = nn.Sequential(
+            nn.BatchNorm2d(in_channels),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(in_channels, out_channels, 3, padding=1),
+            nn.BatchNorm2d(out_channels),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(out_channels, out_channels, 3, padding=1),
+        )
+
+        self.conv2 = nn.Sequential(
+            nn.BatchNorm2d(out_channels),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(out_channels, out_channels, 3, padding=1),
+            nn.BatchNorm2d(out_channels),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(out_channels, out_channels, 3, padding=1),
+        )
+
+        self.shortcut = nn.Conv2d(x_channels, out_channels, kernel_size=1)
+
+    def forward(self, x, up):
+        x = F.interpolate(input=x, scale_factor=2, mode="bilinear", align_corners=False)
+
+        p = self.conv(torch.cat([x, up], 1).type(x.type()))
+        sc = self.shortcut(x)
+
+        p = p + sc
+
+        p2 = self.conv2(p)
+
+        return p + p2
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x, up) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x, up):
+    x = F.interpolate(input=x, scale_factor=2, mode="bilinear", align_corners=False)
+
+    p = self.conv(torch.cat([x, up], 1).type(x.type()))
+    sc = self.shortcut(x)
+
+    p = p + sc
+
+    p2 = self.conv2(p)
+
+    return p + p2
+
+
+
+
+
+class RefinementModule +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class RefinementModule(nn.Module):
+    def __init__(self):
+        super().__init__()
+
+        self.feats = resnet50()
+        self.psp = PSPModule(2048, 1024, (1, 2, 3, 6))
+
+        self.up_1 = PSPUpsample(1024, 1024 + 256, 512)
+        self.up_2 = PSPUpsample(512, 512 + 64, 256)
+        self.up_3 = PSPUpsample(256, 256 + 3, 32)
+
+        self.final_28 = nn.Sequential(
+            nn.Conv2d(1024, 32, kernel_size=1),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(32, 1, kernel_size=1),
+        )
+
+        self.final_56 = nn.Sequential(
+            nn.Conv2d(512, 32, kernel_size=1),
+            nn.ReLU(inplace=True),
+            nn.Conv2d(32, 1, kernel_size=1),
+        )
+
+        self.final_11 = nn.Conv2d(32 + 3, 32, kernel_size=1)
+        self.final_21 = nn.Conv2d(32, 1, kernel_size=1)
+
+    def forward(self, x, seg, inter_s8=None, inter_s4=None):
+
+        images = {}
+
+        """
+        First iteration, s8 output
+        """
+        if inter_s8 is None:
+            p = torch.cat((x, seg, seg, seg), 1)
+
+            f, f_1, f_2 = self.feats(p)
+            p = self.psp(f)
+
+            inter_s8 = self.final_28(p)
+            r_inter_s8 = F.interpolate(
+                inter_s8, scale_factor=8, mode="bilinear", align_corners=False
+            )
+            r_inter_tanh_s8 = torch.tanh(r_inter_s8)
+
+            images["pred_28"] = torch.sigmoid(r_inter_s8)
+            images["out_28"] = r_inter_s8
+        else:
+            r_inter_tanh_s8 = inter_s8
+
+        """
+        Second iteration, s8 output
+        """
+        if inter_s4 is None:
+            p = torch.cat((x, seg, r_inter_tanh_s8, r_inter_tanh_s8), 1)
+
+            f, f_1, f_2 = self.feats(p)
+            p = self.psp(f)
+            inter_s8_2 = self.final_28(p)
+            r_inter_s8_2 = F.interpolate(
+                inter_s8_2, scale_factor=8, mode="bilinear", align_corners=False
+            )
+            r_inter_tanh_s8_2 = torch.tanh(r_inter_s8_2)
+
+            p = self.up_1(p, f_2)
+
+            inter_s4 = self.final_56(p)
+            r_inter_s4 = F.interpolate(
+                inter_s4, scale_factor=4, mode="bilinear", align_corners=False
+            )
+            r_inter_tanh_s4 = torch.tanh(r_inter_s4)
+
+            images["pred_28_2"] = torch.sigmoid(r_inter_s8_2)
+            images["out_28_2"] = r_inter_s8_2
+            images["pred_56"] = torch.sigmoid(r_inter_s4)
+            images["out_56"] = r_inter_s4
+        else:
+            r_inter_tanh_s8_2 = inter_s8
+            r_inter_tanh_s4 = inter_s4
+
+        """
+        Third iteration, s1 output
+        """
+        p = torch.cat((x, seg, r_inter_tanh_s8_2, r_inter_tanh_s4), 1)
+
+        f, f_1, f_2 = self.feats(p)
+        p = self.psp(f)
+        inter_s8_3 = self.final_28(p)
+        r_inter_s8_3 = F.interpolate(
+            inter_s8_3, scale_factor=8, mode="bilinear", align_corners=False
+        )
+
+        p = self.up_1(p, f_2)
+        inter_s4_2 = self.final_56(p)
+        r_inter_s4_2 = F.interpolate(
+            inter_s4_2, scale_factor=4, mode="bilinear", align_corners=False
+        )
+        p = self.up_2(p, f_1)
+        p = self.up_3(p, x)
+
+        """
+        Final output
+        """
+        p = F.relu(self.final_11(torch.cat([p, x], 1)), inplace=True)
+        p = self.final_21(p)
+
+        pred_224 = torch.sigmoid(p)
+
+        images["pred_224"] = pred_224
+        images["out_224"] = p
+        images["pred_28_3"] = torch.sigmoid(r_inter_s8_3)
+        images["pred_56_2"] = torch.sigmoid(r_inter_s4_2)
+        images["out_28_3"] = r_inter_s8_3
+        images["out_56_2"] = r_inter_s4_2
+
+        return images
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Subclasses

+ +

Methods

+
+
+def forward(self, x, seg, inter_s8=None, inter_s4=None) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x, seg, inter_s8=None, inter_s4=None):
+
+    images = {}
+
+    """
+    First iteration, s8 output
+    """
+    if inter_s8 is None:
+        p = torch.cat((x, seg, seg, seg), 1)
+
+        f, f_1, f_2 = self.feats(p)
+        p = self.psp(f)
+
+        inter_s8 = self.final_28(p)
+        r_inter_s8 = F.interpolate(
+            inter_s8, scale_factor=8, mode="bilinear", align_corners=False
+        )
+        r_inter_tanh_s8 = torch.tanh(r_inter_s8)
+
+        images["pred_28"] = torch.sigmoid(r_inter_s8)
+        images["out_28"] = r_inter_s8
+    else:
+        r_inter_tanh_s8 = inter_s8
+
+    """
+    Second iteration, s8 output
+    """
+    if inter_s4 is None:
+        p = torch.cat((x, seg, r_inter_tanh_s8, r_inter_tanh_s8), 1)
+
+        f, f_1, f_2 = self.feats(p)
+        p = self.psp(f)
+        inter_s8_2 = self.final_28(p)
+        r_inter_s8_2 = F.interpolate(
+            inter_s8_2, scale_factor=8, mode="bilinear", align_corners=False
+        )
+        r_inter_tanh_s8_2 = torch.tanh(r_inter_s8_2)
+
+        p = self.up_1(p, f_2)
+
+        inter_s4 = self.final_56(p)
+        r_inter_s4 = F.interpolate(
+            inter_s4, scale_factor=4, mode="bilinear", align_corners=False
+        )
+        r_inter_tanh_s4 = torch.tanh(r_inter_s4)
+
+        images["pred_28_2"] = torch.sigmoid(r_inter_s8_2)
+        images["out_28_2"] = r_inter_s8_2
+        images["pred_56"] = torch.sigmoid(r_inter_s4)
+        images["out_56"] = r_inter_s4
+    else:
+        r_inter_tanh_s8_2 = inter_s8
+        r_inter_tanh_s4 = inter_s4
+
+    """
+    Third iteration, s1 output
+    """
+    p = torch.cat((x, seg, r_inter_tanh_s8_2, r_inter_tanh_s4), 1)
+
+    f, f_1, f_2 = self.feats(p)
+    p = self.psp(f)
+    inter_s8_3 = self.final_28(p)
+    r_inter_s8_3 = F.interpolate(
+        inter_s8_3, scale_factor=8, mode="bilinear", align_corners=False
+    )
+
+    p = self.up_1(p, f_2)
+    inter_s4_2 = self.final_56(p)
+    r_inter_s4_2 = F.interpolate(
+        inter_s4_2, scale_factor=4, mode="bilinear", align_corners=False
+    )
+    p = self.up_2(p, f_1)
+    p = self.up_3(p, x)
+
+    """
+    Final output
+    """
+    p = F.relu(self.final_11(torch.cat([p, x], 1)), inplace=True)
+    p = self.final_21(p)
+
+    pred_224 = torch.sigmoid(p)
+
+    images["pred_224"] = pred_224
+    images["out_224"] = p
+    images["pred_28_3"] = torch.sigmoid(r_inter_s8_3)
+    images["pred_56_2"] = torch.sigmoid(r_inter_s4_2)
+    images["out_28_3"] = r_inter_s8_3
+    images["out_56_2"] = r_inter_s4_2
+
+    return images
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/cascadepsp/utils.html b/docs/api/carvekit/ml/arch/cascadepsp/utils.html new file mode 100644 index 0000000..a59b80d --- /dev/null +++ b/docs/api/carvekit/ml/arch/cascadepsp/utils.html @@ -0,0 +1,425 @@ + + + + + + +carvekit.ml.arch.cascadepsp.utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.cascadepsp.utils

+
+
+
+ +Expand source code + +
import torch
+import torch.nn.functional as F
+
+
+def resize_max_side(im, size, method):
+    h, w = im.shape[-2:]
+    max_side = max(h, w)
+    ratio = size / max_side
+    if method in ["bilinear", "bicubic"]:
+        return F.interpolate(im, scale_factor=ratio, mode=method, align_corners=False)
+    else:
+        return F.interpolate(im, scale_factor=ratio, mode=method)
+
+
+def process_high_res_im(model, im, seg, L=900):
+    stride = L // 2
+
+    _, _, h, w = seg.shape
+    if max(h, w) > L:
+        im_small = resize_max_side(im, L, "area")
+        seg_small = resize_max_side(seg, L, "area")
+    elif max(h, w) < L:
+        im_small = resize_max_side(im, L, "bicubic")
+        seg_small = resize_max_side(seg, L, "bilinear")
+    else:
+        im_small = im
+        seg_small = seg
+
+    images = model.safe_forward(im_small, seg_small)
+
+    pred_224 = images["pred_224"]
+    pred_56 = images["pred_56_2"]
+
+    for new_size in [max(h, w)]:
+        im_small = resize_max_side(im, new_size, "area")
+        seg_small = resize_max_side(seg, new_size, "area")
+        _, _, h, w = seg_small.shape
+
+        combined_224 = torch.zeros_like(seg_small)
+        combined_weight = torch.zeros_like(seg_small)
+
+        r_pred_224 = (
+            F.interpolate(pred_224, size=(h, w), mode="bilinear", align_corners=False)
+            > 0.5
+        ).float() * 2 - 1
+        r_pred_56 = (
+            F.interpolate(pred_56, size=(h, w), mode="bilinear", align_corners=False)
+            * 2
+            - 1
+        )
+
+        padding = 16
+        step_size = stride - padding * 2
+        step_len = L
+
+        used_start_idx = {}
+        for x_idx in range((w) // step_size + 1):
+            for y_idx in range((h) // step_size + 1):
+
+                start_x = x_idx * step_size
+                start_y = y_idx * step_size
+                end_x = start_x + step_len
+                end_y = start_y + step_len
+
+                # Shift when required
+                if end_y > h:
+                    end_y = h
+                    start_y = h - step_len
+                if end_x > w:
+                    end_x = w
+                    start_x = w - step_len
+
+                # Bound x/y range
+                start_x = max(0, start_x)
+                start_y = max(0, start_y)
+                end_x = min(w, end_x)
+                end_y = min(h, end_y)
+
+                # The same crop might appear twice due to bounding/shifting
+                start_idx = start_y * w + start_x
+                if start_idx in used_start_idx:
+                    continue
+                else:
+                    used_start_idx[start_idx] = True
+
+                # Take crop
+                im_part = im_small[:, :, start_y:end_y, start_x:end_x]
+                seg_224_part = r_pred_224[:, :, start_y:end_y, start_x:end_x]
+                seg_56_part = r_pred_56[:, :, start_y:end_y, start_x:end_x]
+
+                # Skip when it is not an interesting crop anyway
+                seg_part_norm = (seg_224_part > 0).float()
+                high_thres = 0.9
+                low_thres = 0.1
+                if (seg_part_norm.mean() > high_thres) or (
+                    seg_part_norm.mean() < low_thres
+                ):
+                    continue
+                grid_images = model.safe_forward(im_part, seg_224_part, seg_56_part)
+                grid_pred_224 = grid_images["pred_224"]
+
+                # Padding
+                pred_sx = pred_sy = 0
+                pred_ex = step_len
+                pred_ey = step_len
+
+                if start_x != 0:
+                    start_x += padding
+                    pred_sx += padding
+                if start_y != 0:
+                    start_y += padding
+                    pred_sy += padding
+                if end_x != w:
+                    end_x -= padding
+                    pred_ex -= padding
+                if end_y != h:
+                    end_y -= padding
+                    pred_ey -= padding
+
+                combined_224[:, :, start_y:end_y, start_x:end_x] += grid_pred_224[
+                    :, :, pred_sy:pred_ey, pred_sx:pred_ex
+                ]
+
+                del grid_pred_224
+
+                # Used for averaging
+                combined_weight[:, :, start_y:end_y, start_x:end_x] += 1
+
+        # Final full resolution output
+        seg_norm = r_pred_224 / 2 + 0.5
+        pred_224 = combined_224 / combined_weight
+        pred_224 = torch.where(combined_weight == 0, seg_norm, pred_224)
+
+    _, _, h, w = seg.shape
+    images = {}
+    images["pred_224"] = F.interpolate(
+        pred_224, size=(h, w), mode="bilinear", align_corners=True
+    )
+
+    return images["pred_224"]
+
+
+def process_im_single_pass(model, im, seg, L=900):
+    """
+    A single pass version, aka global step only.
+    """
+
+    _, _, h, w = im.shape
+    if max(h, w) < L:
+        im = resize_max_side(im, L, "bicubic")
+        seg = resize_max_side(seg, L, "bilinear")
+
+    if max(h, w) > L:
+        im = resize_max_side(im, L, "area")
+        seg = resize_max_side(seg, L, "area")
+
+    images = model.safe_forward(im, seg)
+
+    if max(h, w) < L:
+        images["pred_224"] = F.interpolate(images["pred_224"], size=(h, w), mode="area")
+    elif max(h, w) > L:
+        images["pred_224"] = F.interpolate(
+            images["pred_224"], size=(h, w), mode="bilinear", align_corners=True
+        )
+
+    return images["pred_224"]
+
+
+
+
+
+
+
+

Functions

+
+
+def process_high_res_im(model, im, seg, L=900) +
+
+
+
+ +Expand source code + +
def process_high_res_im(model, im, seg, L=900):
+    stride = L // 2
+
+    _, _, h, w = seg.shape
+    if max(h, w) > L:
+        im_small = resize_max_side(im, L, "area")
+        seg_small = resize_max_side(seg, L, "area")
+    elif max(h, w) < L:
+        im_small = resize_max_side(im, L, "bicubic")
+        seg_small = resize_max_side(seg, L, "bilinear")
+    else:
+        im_small = im
+        seg_small = seg
+
+    images = model.safe_forward(im_small, seg_small)
+
+    pred_224 = images["pred_224"]
+    pred_56 = images["pred_56_2"]
+
+    for new_size in [max(h, w)]:
+        im_small = resize_max_side(im, new_size, "area")
+        seg_small = resize_max_side(seg, new_size, "area")
+        _, _, h, w = seg_small.shape
+
+        combined_224 = torch.zeros_like(seg_small)
+        combined_weight = torch.zeros_like(seg_small)
+
+        r_pred_224 = (
+            F.interpolate(pred_224, size=(h, w), mode="bilinear", align_corners=False)
+            > 0.5
+        ).float() * 2 - 1
+        r_pred_56 = (
+            F.interpolate(pred_56, size=(h, w), mode="bilinear", align_corners=False)
+            * 2
+            - 1
+        )
+
+        padding = 16
+        step_size = stride - padding * 2
+        step_len = L
+
+        used_start_idx = {}
+        for x_idx in range((w) // step_size + 1):
+            for y_idx in range((h) // step_size + 1):
+
+                start_x = x_idx * step_size
+                start_y = y_idx * step_size
+                end_x = start_x + step_len
+                end_y = start_y + step_len
+
+                # Shift when required
+                if end_y > h:
+                    end_y = h
+                    start_y = h - step_len
+                if end_x > w:
+                    end_x = w
+                    start_x = w - step_len
+
+                # Bound x/y range
+                start_x = max(0, start_x)
+                start_y = max(0, start_y)
+                end_x = min(w, end_x)
+                end_y = min(h, end_y)
+
+                # The same crop might appear twice due to bounding/shifting
+                start_idx = start_y * w + start_x
+                if start_idx in used_start_idx:
+                    continue
+                else:
+                    used_start_idx[start_idx] = True
+
+                # Take crop
+                im_part = im_small[:, :, start_y:end_y, start_x:end_x]
+                seg_224_part = r_pred_224[:, :, start_y:end_y, start_x:end_x]
+                seg_56_part = r_pred_56[:, :, start_y:end_y, start_x:end_x]
+
+                # Skip when it is not an interesting crop anyway
+                seg_part_norm = (seg_224_part > 0).float()
+                high_thres = 0.9
+                low_thres = 0.1
+                if (seg_part_norm.mean() > high_thres) or (
+                    seg_part_norm.mean() < low_thres
+                ):
+                    continue
+                grid_images = model.safe_forward(im_part, seg_224_part, seg_56_part)
+                grid_pred_224 = grid_images["pred_224"]
+
+                # Padding
+                pred_sx = pred_sy = 0
+                pred_ex = step_len
+                pred_ey = step_len
+
+                if start_x != 0:
+                    start_x += padding
+                    pred_sx += padding
+                if start_y != 0:
+                    start_y += padding
+                    pred_sy += padding
+                if end_x != w:
+                    end_x -= padding
+                    pred_ex -= padding
+                if end_y != h:
+                    end_y -= padding
+                    pred_ey -= padding
+
+                combined_224[:, :, start_y:end_y, start_x:end_x] += grid_pred_224[
+                    :, :, pred_sy:pred_ey, pred_sx:pred_ex
+                ]
+
+                del grid_pred_224
+
+                # Used for averaging
+                combined_weight[:, :, start_y:end_y, start_x:end_x] += 1
+
+        # Final full resolution output
+        seg_norm = r_pred_224 / 2 + 0.5
+        pred_224 = combined_224 / combined_weight
+        pred_224 = torch.where(combined_weight == 0, seg_norm, pred_224)
+
+    _, _, h, w = seg.shape
+    images = {}
+    images["pred_224"] = F.interpolate(
+        pred_224, size=(h, w), mode="bilinear", align_corners=True
+    )
+
+    return images["pred_224"]
+
+
+
+def process_im_single_pass(model, im, seg, L=900) +
+
+

A single pass version, aka global step only.

+
+ +Expand source code + +
def process_im_single_pass(model, im, seg, L=900):
+    """
+    A single pass version, aka global step only.
+    """
+
+    _, _, h, w = im.shape
+    if max(h, w) < L:
+        im = resize_max_side(im, L, "bicubic")
+        seg = resize_max_side(seg, L, "bilinear")
+
+    if max(h, w) > L:
+        im = resize_max_side(im, L, "area")
+        seg = resize_max_side(seg, L, "area")
+
+    images = model.safe_forward(im, seg)
+
+    if max(h, w) < L:
+        images["pred_224"] = F.interpolate(images["pred_224"], size=(h, w), mode="area")
+    elif max(h, w) > L:
+        images["pred_224"] = F.interpolate(
+            images["pred_224"], size=(h, w), mode="bilinear", align_corners=True
+        )
+
+    return images["pred_224"]
+
+
+
+def resize_max_side(im, size, method) +
+
+
+
+ +Expand source code + +
def resize_max_side(im, size, method):
+    h, w = im.shape[-2:]
+    max_side = max(h, w)
+    ratio = size / max_side
+    if method in ["bilinear", "bicubic"]:
+        return F.interpolate(im, scale_factor=ratio, mode=method, align_corners=False)
+    else:
+        return F.interpolate(im, scale_factor=ratio, mode=method)
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/fba_matting/index.html b/docs/api/carvekit/ml/arch/fba_matting/index.html new file mode 100644 index 0000000..5a059ae --- /dev/null +++ b/docs/api/carvekit/ml/arch/fba_matting/index.html @@ -0,0 +1,95 @@ + + + + + + +carvekit.ml.arch.fba_matting API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.fba_matting

+
+
+
+
+

Sub-modules

+
+
carvekit.ml.arch.fba_matting.layers_WS
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+
carvekit.ml.arch.fba_matting.models
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+
carvekit.ml.arch.fba_matting.resnet_GN_WS
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+
carvekit.ml.arch.fba_matting.resnet_bn
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+
carvekit.ml.arch.fba_matting.transforms
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/fba_matting/layers_WS.html b/docs/api/carvekit/ml/arch/fba_matting/layers_WS.html new file mode 100644 index 0000000..1d0f5f9 --- /dev/null +++ b/docs/api/carvekit/ml/arch/fba_matting/layers_WS.html @@ -0,0 +1,383 @@ + + + + + + +carvekit.ml.arch.fba_matting.layers_WS API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.fba_matting.layers_WS

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/MarcoForte/FBA_Matting
+License: MIT License
+"""
+import torch
+import torch.nn as nn
+from torch.nn import functional as F
+
+
+class Conv2d(nn.Conv2d):
+    def __init__(
+        self,
+        in_channels,
+        out_channels,
+        kernel_size,
+        stride=1,
+        padding=0,
+        dilation=1,
+        groups=1,
+        bias=True,
+    ):
+        super(Conv2d, self).__init__(
+            in_channels,
+            out_channels,
+            kernel_size,
+            stride,
+            padding,
+            dilation,
+            groups,
+            bias,
+        )
+
+    def forward(self, x):
+        # return super(Conv2d, self).forward(x)
+        weight = self.weight
+        weight_mean = (
+            weight.mean(dim=1, keepdim=True)
+            .mean(dim=2, keepdim=True)
+            .mean(dim=3, keepdim=True)
+        )
+        weight = weight - weight_mean
+        # std = (weight).view(weight.size(0), -1).std(dim=1).view(-1, 1, 1, 1) + 1e-5
+        std = (
+            torch.sqrt(torch.var(weight.view(weight.size(0), -1), dim=1) + 1e-12).view(
+                -1, 1, 1, 1
+            )
+            + 1e-5
+        )
+        weight = weight / std.expand_as(weight)
+        return F.conv2d(
+            x, weight, self.bias, self.stride, self.padding, self.dilation, self.groups
+        )
+
+
+def BatchNorm2d(num_features):
+    return nn.GroupNorm(num_channels=num_features, num_groups=32)
+
+
+
+
+
+
+
+

Functions

+
+
+def BatchNorm2d(num_features) +
+
+
+
+ +Expand source code + +
def BatchNorm2d(num_features):
+    return nn.GroupNorm(num_channels=num_features, num_groups=32)
+
+
+
+
+
+

Classes

+
+
+class Conv2d +(in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True) +
+
+

Applies a 2D convolution over an input signal composed of several input +planes.

+

In the simplest case, the output value of the layer with input size +:math:(N, C_{\text{in}}, H, W) and output :math:(N, C_{\text{out}}, H_{\text{out}}, W_{\text{out}}) +can be precisely described as:

+

[ \text{out}(N_i, C_{\text{out}j}) = \text{bias}(Cj}) + +\sum^{C_{\text{in}} - 1} \text{weight}(C_{\text{out}j}, k) \star \text{input}(N_i, k) ] +where :math:\star is the valid 2D cross-correlation operator, +:math:N is a batch size, :math:C denotes a number of channels, +:math:H is a height of input planes in pixels, and :math:W is +width in pixels.

+

This module supports :ref:TensorFloat32<tf32_on_ampere>.

+
    +
  • +

    :attr:stride controls the stride for the cross-correlation, a single +number or a tuple.

    +
  • +
  • +

    :attr:padding controls the amount of padding applied to the input. It +can be either a string {'valid', 'same'} or a tuple of ints giving the +amount of implicit padding applied on both sides.

    +
  • +
  • +

    :attr:dilation controls the spacing between the kernel points; also +known as the Γ  trous algorithm. It is harder to describe, but this link_ +has a nice visualization of what :attr:dilation does.

    +
  • +
  • +

    :attr:groups controls the connections between inputs and outputs. +:attr:in_channels and :attr:out_channels must both be divisible by +:attr:groups. For example,

    +
      +
    • At groups=1, all inputs are convolved to all outputs.
    • +
    • At groups=2, the operation becomes equivalent to having two conv +layers side by side, each seeing half the input channels +and producing half the output channels, and both subsequently +concatenated.
    • +
    • At groups= :attr:in_channels, each input channel is convolved with +its own set of filters (of size +:math:\frac{\text{out\_channels}}{\text{in\_channels}}).
    • +
    +
  • +
+

The parameters :attr:kernel_size, :attr:stride, :attr:padding, :attr:dilation can either be:

+
- a single <code>int</code> -- in which case the same value is used for the height and width dimension
+- a <code>tuple</code> of two ints -- in which case, the first <code>int</code> is used for the height dimension,
+  and the second <code>int</code> for the width dimension
+
+

Note

+

When groups == in_channels and out_channels == K * in_channels, +where K is a positive integer, this operation is also known as a "depthwise convolution".

+

In other words, for an input of size :math:(N, C_{in}, L_{in}), +a depthwise convolution with a depthwise multiplier K can be performed with the arguments +:math:(C_\text{in}=C_\text{in}, C_\text{out}=C_\text{in} \times \text{K}, ..., \text{groups}=C_\text{in}).

+

Note

+

In some circumstances when given tensors on a CUDA device and using CuDNN, this operator may select a nondeterministic algorithm to increase performance. If this is undesirable, you can try to make the operation deterministic (potentially at a performance cost) by setting torch.backends.cudnn.deterministic = True. See :doc:/notes/randomness for more information.

+

Note

+

padding='valid' is the same as no padding. padding='same' pads +the input so the output has the shape as the input. However, this mode +doesn't support any stride values other than 1.

+

Args

+
+
in_channels : int
+
Number of channels in the input image
+
out_channels : int
+
Number of channels produced by the convolution
+
kernel_size : int or tuple
+
Size of the convolving kernel
+
stride : int or tuple, optional
+
Stride of the convolution. Default: 1
+
padding : int, tuple or str, optional
+
Padding added to all four sides of +the input. Default: 0
+
padding_mode : string, optional
+
'zeros', 'reflect', +'replicate' or 'circular'. Default: 'zeros'
+
dilation : int or tuple, optional
+
Spacing between kernel elements. Default: 1
+
groups : int, optional
+
Number of blocked connections from input +channels to output channels. Default: 1
+
bias : bool, optional
+
If True, adds a learnable bias to the +output. Default: True
+
+

Shape

+
    +
  • Input: :math:(N, C_{in}, H_{in}, W_{in}) or :math:(C_{in}, H_{in}, W_{in})
  • +
  • Output: :math:(N, C_{out}, H_{out}, W_{out}) or :math:(C_{out}, H_{out}, W_{out}), where
  • +
+

[ H_{out} = \left\lfloor\frac{H_{in} ++ 2 \times \text{padding}[0] - \text{dilation}[0] +\times (\text{kernel_size}[0] - 1) - 1}{\text{stride}[0]} + 1\right\rfloor ] +[ W_{out} = \left\lfloor\frac{W_{in} ++ 2 \times \text{padding}[1] - \text{dilation}[1] +\times (\text{kernel_size}[1] - 1) - 1}{\text{stride}[1]} + 1\right\rfloor ]

+

Attributes

+
+
weight : Tensor
+
the learnable weights of the module of shape +:math:(\text{out\_channels}, \frac{\text{in\_channels}}{\text{groups}}, +:math:\text{kernel\_size[0]}, \text{kernel\_size[1]}). +The values of these weights are sampled from +:math:\mathcal{U}(-\sqrt{k}, \sqrt{k}) where +:math:k = \frac{groups}{C_\text{in} * \prod_{i=0}^{1}\text{kernel\_size}[i]}
+
bias : Tensor
+
the learnable bias of the module of shape +(out_channels). If :attr:bias is True, +then the values of these weights are +sampled from :math:\mathcal{U}(-\sqrt{k}, \sqrt{k}) where +:math:k = \frac{groups}{C_\text{in} * \prod_{i=0}^{1}\text{kernel\_size}[i]}
+
+

Examples

+
>>> # With square kernels and equal stride
+>>> m = nn.Conv2d(16, 33, 3, stride=2)
+>>> # non-square kernels and unequal stride and with padding
+>>> m = nn.Conv2d(16, 33, (3, 5), stride=(2, 1), padding=(4, 2))
+>>> # non-square kernels and unequal stride and with padding and dilation
+>>> m = nn.Conv2d(16, 33, (3, 5), stride=(2, 1), padding=(4, 2), dilation=(3, 1))
+>>> input = torch.randn(20, 16, 50, 100)
+>>> output = m(input)
+
+

.. _cross-correlation: +https://en.wikipedia.org/wiki/Cross-correlation

+

.. _link: +https://github.com/vdumoulin/conv_arithmetic/blob/master/README.md

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Conv2d(nn.Conv2d):
+    def __init__(
+        self,
+        in_channels,
+        out_channels,
+        kernel_size,
+        stride=1,
+        padding=0,
+        dilation=1,
+        groups=1,
+        bias=True,
+    ):
+        super(Conv2d, self).__init__(
+            in_channels,
+            out_channels,
+            kernel_size,
+            stride,
+            padding,
+            dilation,
+            groups,
+            bias,
+        )
+
+    def forward(self, x):
+        # return super(Conv2d, self).forward(x)
+        weight = self.weight
+        weight_mean = (
+            weight.mean(dim=1, keepdim=True)
+            .mean(dim=2, keepdim=True)
+            .mean(dim=3, keepdim=True)
+        )
+        weight = weight - weight_mean
+        # std = (weight).view(weight.size(0), -1).std(dim=1).view(-1, 1, 1, 1) + 1e-5
+        std = (
+            torch.sqrt(torch.var(weight.view(weight.size(0), -1), dim=1) + 1e-12).view(
+                -1, 1, 1, 1
+            )
+            + 1e-5
+        )
+        weight = weight / std.expand_as(weight)
+        return F.conv2d(
+            x, weight, self.bias, self.stride, self.padding, self.dilation, self.groups
+        )
+
+

Ancestors

+
    +
  • torch.nn.modules.conv.Conv2d
  • +
  • torch.nn.modules.conv._ConvNd
  • +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    # return super(Conv2d, self).forward(x)
+    weight = self.weight
+    weight_mean = (
+        weight.mean(dim=1, keepdim=True)
+        .mean(dim=2, keepdim=True)
+        .mean(dim=3, keepdim=True)
+    )
+    weight = weight - weight_mean
+    # std = (weight).view(weight.size(0), -1).std(dim=1).view(-1, 1, 1, 1) + 1e-5
+    std = (
+        torch.sqrt(torch.var(weight.view(weight.size(0), -1), dim=1) + 1e-12).view(
+            -1, 1, 1, 1
+        )
+        + 1e-5
+    )
+    weight = weight / std.expand_as(weight)
+    return F.conv2d(
+        x, weight, self.bias, self.stride, self.padding, self.dilation, self.groups
+    )
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/fba_matting/models.html b/docs/api/carvekit/ml/arch/fba_matting/models.html new file mode 100644 index 0000000..c473494 --- /dev/null +++ b/docs/api/carvekit/ml/arch/fba_matting/models.html @@ -0,0 +1,1236 @@ + + + + + + +carvekit.ml.arch.fba_matting.models API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.fba_matting.models

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/MarcoForte/FBA_Matting
+License: MIT License
+"""
+import torch
+import torch.nn as nn
+import carvekit.ml.arch.fba_matting.resnet_GN_WS as resnet_GN_WS
+import carvekit.ml.arch.fba_matting.layers_WS as L
+import carvekit.ml.arch.fba_matting.resnet_bn as resnet_bn
+from functools import partial
+
+
+class FBA(nn.Module):
+    def __init__(self, encoder: str):
+        super(FBA, self).__init__()
+        self.encoder = build_encoder(arch=encoder)
+        self.decoder = fba_decoder(batch_norm=True if "BN" in encoder else False)
+
+    def forward(self, image, two_chan_trimap, image_n, trimap_transformed):
+        resnet_input = torch.cat((image_n, trimap_transformed, two_chan_trimap), 1)
+        conv_out, indices = self.encoder(resnet_input, return_feature_maps=True)
+        return self.decoder(conv_out, image, indices, two_chan_trimap)
+
+
+class ResnetDilatedBN(nn.Module):
+    def __init__(self, orig_resnet, dilate_scale=8):
+        super(ResnetDilatedBN, self).__init__()
+
+        if dilate_scale == 8:
+            orig_resnet.layer3.apply(partial(self._nostride_dilate, dilate=2))
+            orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=4))
+        elif dilate_scale == 16:
+            orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=2))
+
+        # take pretrained resnet, except AvgPool and FC
+        self.conv1 = orig_resnet.conv1
+        self.bn1 = orig_resnet.bn1
+        self.relu1 = orig_resnet.relu1
+        self.conv2 = orig_resnet.conv2
+        self.bn2 = orig_resnet.bn2
+        self.relu2 = orig_resnet.relu2
+        self.conv3 = orig_resnet.conv3
+        self.bn3 = orig_resnet.bn3
+        self.relu3 = orig_resnet.relu3
+        self.maxpool = orig_resnet.maxpool
+        self.layer1 = orig_resnet.layer1
+        self.layer2 = orig_resnet.layer2
+        self.layer3 = orig_resnet.layer3
+        self.layer4 = orig_resnet.layer4
+
+    def _nostride_dilate(self, m, dilate):
+        classname = m.__class__.__name__
+        if classname.find("Conv") != -1:
+            # the convolution with stride
+            if m.stride == (2, 2):
+                m.stride = (1, 1)
+                if m.kernel_size == (3, 3):
+                    m.dilation = (dilate // 2, dilate // 2)
+                    m.padding = (dilate // 2, dilate // 2)
+            # other convoluions
+            else:
+                if m.kernel_size == (3, 3):
+                    m.dilation = (dilate, dilate)
+                    m.padding = (dilate, dilate)
+
+    def forward(self, x, return_feature_maps=False):
+        conv_out = [x]
+        x = self.relu1(self.bn1(self.conv1(x)))
+        x = self.relu2(self.bn2(self.conv2(x)))
+        x = self.relu3(self.bn3(self.conv3(x)))
+        conv_out.append(x)
+        x, indices = self.maxpool(x)
+        x = self.layer1(x)
+        conv_out.append(x)
+        x = self.layer2(x)
+        conv_out.append(x)
+        x = self.layer3(x)
+        conv_out.append(x)
+        x = self.layer4(x)
+        conv_out.append(x)
+
+        if return_feature_maps:
+            return conv_out, indices
+        return [x]
+
+
+class Resnet(nn.Module):
+    def __init__(self, orig_resnet):
+        super(Resnet, self).__init__()
+
+        # take pretrained resnet, except AvgPool and FC
+        self.conv1 = orig_resnet.conv1
+        self.bn1 = orig_resnet.bn1
+        self.relu1 = orig_resnet.relu1
+        self.conv2 = orig_resnet.conv2
+        self.bn2 = orig_resnet.bn2
+        self.relu2 = orig_resnet.relu2
+        self.conv3 = orig_resnet.conv3
+        self.bn3 = orig_resnet.bn3
+        self.relu3 = orig_resnet.relu3
+        self.maxpool = orig_resnet.maxpool
+        self.layer1 = orig_resnet.layer1
+        self.layer2 = orig_resnet.layer2
+        self.layer3 = orig_resnet.layer3
+        self.layer4 = orig_resnet.layer4
+
+    def forward(self, x, return_feature_maps=False):
+        conv_out = []
+
+        x = self.relu1(self.bn1(self.conv1(x)))
+        x = self.relu2(self.bn2(self.conv2(x)))
+        x = self.relu3(self.bn3(self.conv3(x)))
+        conv_out.append(x)
+        x, indices = self.maxpool(x)
+
+        x = self.layer1(x)
+        conv_out.append(x)
+        x = self.layer2(x)
+        conv_out.append(x)
+        x = self.layer3(x)
+        conv_out.append(x)
+        x = self.layer4(x)
+        conv_out.append(x)
+
+        if return_feature_maps:
+            return conv_out
+        return [x]
+
+
+class ResnetDilated(nn.Module):
+    def __init__(self, orig_resnet, dilate_scale=8):
+        super(ResnetDilated, self).__init__()
+
+        if dilate_scale == 8:
+            orig_resnet.layer3.apply(partial(self._nostride_dilate, dilate=2))
+            orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=4))
+        elif dilate_scale == 16:
+            orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=2))
+
+        # take pretrained resnet, except AvgPool and FC
+        self.conv1 = orig_resnet.conv1
+        self.bn1 = orig_resnet.bn1
+        self.relu = orig_resnet.relu
+        self.maxpool = orig_resnet.maxpool
+        self.layer1 = orig_resnet.layer1
+        self.layer2 = orig_resnet.layer2
+        self.layer3 = orig_resnet.layer3
+        self.layer4 = orig_resnet.layer4
+
+    def _nostride_dilate(self, m, dilate):
+        classname = m.__class__.__name__
+        if classname.find("Conv") != -1:
+            # the convolution with stride
+            if m.stride == (2, 2):
+                m.stride = (1, 1)
+                if m.kernel_size == (3, 3):
+                    m.dilation = (dilate // 2, dilate // 2)
+                    m.padding = (dilate // 2, dilate // 2)
+            # other convoluions
+            else:
+                if m.kernel_size == (3, 3):
+                    m.dilation = (dilate, dilate)
+                    m.padding = (dilate, dilate)
+
+    def forward(self, x, return_feature_maps=False):
+        conv_out = [x]
+        x = self.relu(self.bn1(self.conv1(x)))
+        conv_out.append(x)
+        x, indices = self.maxpool(x)
+        x = self.layer1(x)
+        conv_out.append(x)
+        x = self.layer2(x)
+        conv_out.append(x)
+        x = self.layer3(x)
+        conv_out.append(x)
+        x = self.layer4(x)
+        conv_out.append(x)
+
+        if return_feature_maps:
+            return conv_out, indices
+        return [x]
+
+
+def norm(dim, bn=False):
+    if bn is False:
+        return nn.GroupNorm(32, dim)
+    else:
+        return nn.BatchNorm2d(dim)
+
+
+def fba_fusion(alpha, img, F, B):
+    F = alpha * img + (1 - alpha**2) * F - alpha * (1 - alpha) * B
+    B = (1 - alpha) * img + (2 * alpha - alpha**2) * B - alpha * (1 - alpha) * F
+
+    F = torch.clamp(F, 0, 1)
+    B = torch.clamp(B, 0, 1)
+    la = 0.1
+    alpha = (alpha * la + torch.sum((img - B) * (F - B), 1, keepdim=True)) / (
+        torch.sum((F - B) * (F - B), 1, keepdim=True) + la
+    )
+    alpha = torch.clamp(alpha, 0, 1)
+    return alpha, F, B
+
+
+class fba_decoder(nn.Module):
+    def __init__(self, batch_norm=False):
+        super(fba_decoder, self).__init__()
+        pool_scales = (1, 2, 3, 6)
+        self.batch_norm = batch_norm
+
+        self.ppm = []
+
+        for scale in pool_scales:
+            self.ppm.append(
+                nn.Sequential(
+                    nn.AdaptiveAvgPool2d(scale),
+                    L.Conv2d(2048, 256, kernel_size=1, bias=True),
+                    norm(256, self.batch_norm),
+                    nn.LeakyReLU(),
+                )
+            )
+        self.ppm = nn.ModuleList(self.ppm)
+
+        self.conv_up1 = nn.Sequential(
+            L.Conv2d(
+                2048 + len(pool_scales) * 256, 256, kernel_size=3, padding=1, bias=True
+            ),
+            norm(256, self.batch_norm),
+            nn.LeakyReLU(),
+            L.Conv2d(256, 256, kernel_size=3, padding=1),
+            norm(256, self.batch_norm),
+            nn.LeakyReLU(),
+        )
+
+        self.conv_up2 = nn.Sequential(
+            L.Conv2d(256 + 256, 256, kernel_size=3, padding=1, bias=True),
+            norm(256, self.batch_norm),
+            nn.LeakyReLU(),
+        )
+        if self.batch_norm:
+            d_up3 = 128
+        else:
+            d_up3 = 64
+        self.conv_up3 = nn.Sequential(
+            L.Conv2d(256 + d_up3, 64, kernel_size=3, padding=1, bias=True),
+            norm(64, self.batch_norm),
+            nn.LeakyReLU(),
+        )
+
+        self.unpool = nn.MaxUnpool2d(2, stride=2)
+
+        self.conv_up4 = nn.Sequential(
+            nn.Conv2d(64 + 3 + 3 + 2, 32, kernel_size=3, padding=1, bias=True),
+            nn.LeakyReLU(),
+            nn.Conv2d(32, 16, kernel_size=3, padding=1, bias=True),
+            nn.LeakyReLU(),
+            nn.Conv2d(16, 7, kernel_size=1, padding=0, bias=True),
+        )
+
+    def forward(self, conv_out, img, indices, two_chan_trimap):
+        conv5 = conv_out[-1]
+
+        input_size = conv5.size()
+        ppm_out = [conv5]
+        for pool_scale in self.ppm:
+            ppm_out.append(
+                nn.functional.interpolate(
+                    pool_scale(conv5),
+                    (input_size[2], input_size[3]),
+                    mode="bilinear",
+                    align_corners=False,
+                )
+            )
+        ppm_out = torch.cat(ppm_out, 1)
+        x = self.conv_up1(ppm_out)
+
+        x = torch.nn.functional.interpolate(
+            x, scale_factor=2, mode="bilinear", align_corners=False
+        )
+
+        x = torch.cat((x, conv_out[-4]), 1)
+
+        x = self.conv_up2(x)
+        x = torch.nn.functional.interpolate(
+            x, scale_factor=2, mode="bilinear", align_corners=False
+        )
+
+        x = torch.cat((x, conv_out[-5]), 1)
+        x = self.conv_up3(x)
+
+        x = torch.nn.functional.interpolate(
+            x, scale_factor=2, mode="bilinear", align_corners=False
+        )
+        x = torch.cat((x, conv_out[-6][:, :3], img, two_chan_trimap), 1)
+
+        output = self.conv_up4(x)
+
+        alpha = torch.clamp(output[:, 0][:, None], 0, 1)
+        F = torch.sigmoid(output[:, 1:4])
+        B = torch.sigmoid(output[:, 4:7])
+
+        # FBA Fusion
+        alpha, F, B = fba_fusion(alpha, img, F, B)
+
+        output = torch.cat((alpha, F, B), 1)
+
+        return output
+
+
+def build_encoder(arch="resnet50_GN"):
+    if arch == "resnet50_GN_WS":
+        orig_resnet = resnet_GN_WS.__dict__["l_resnet50"]()
+        net_encoder = ResnetDilated(orig_resnet, dilate_scale=8)
+    elif arch == "resnet50_BN":
+        orig_resnet = resnet_bn.__dict__["l_resnet50"]()
+        net_encoder = ResnetDilatedBN(orig_resnet, dilate_scale=8)
+
+    else:
+        raise ValueError("Architecture undefined!")
+
+    num_channels = 3 + 6 + 2
+
+    if num_channels > 3:
+        net_encoder_sd = net_encoder.state_dict()
+        conv1_weights = net_encoder_sd["conv1.weight"]
+
+        c_out, c_in, h, w = conv1_weights.size()
+        conv1_mod = torch.zeros(c_out, num_channels, h, w)
+        conv1_mod[:, :3, :, :] = conv1_weights
+
+        conv1 = net_encoder.conv1
+        conv1.in_channels = num_channels
+        conv1.weight = torch.nn.Parameter(conv1_mod)
+
+        net_encoder.conv1 = conv1
+
+        net_encoder_sd["conv1.weight"] = conv1_mod
+
+        net_encoder.load_state_dict(net_encoder_sd)
+    return net_encoder
+
+
+
+
+
+
+
+

Functions

+
+
+def build_encoder(arch='resnet50_GN') +
+
+
+
+ +Expand source code + +
def build_encoder(arch="resnet50_GN"):
+    if arch == "resnet50_GN_WS":
+        orig_resnet = resnet_GN_WS.__dict__["l_resnet50"]()
+        net_encoder = ResnetDilated(orig_resnet, dilate_scale=8)
+    elif arch == "resnet50_BN":
+        orig_resnet = resnet_bn.__dict__["l_resnet50"]()
+        net_encoder = ResnetDilatedBN(orig_resnet, dilate_scale=8)
+
+    else:
+        raise ValueError("Architecture undefined!")
+
+    num_channels = 3 + 6 + 2
+
+    if num_channels > 3:
+        net_encoder_sd = net_encoder.state_dict()
+        conv1_weights = net_encoder_sd["conv1.weight"]
+
+        c_out, c_in, h, w = conv1_weights.size()
+        conv1_mod = torch.zeros(c_out, num_channels, h, w)
+        conv1_mod[:, :3, :, :] = conv1_weights
+
+        conv1 = net_encoder.conv1
+        conv1.in_channels = num_channels
+        conv1.weight = torch.nn.Parameter(conv1_mod)
+
+        net_encoder.conv1 = conv1
+
+        net_encoder_sd["conv1.weight"] = conv1_mod
+
+        net_encoder.load_state_dict(net_encoder_sd)
+    return net_encoder
+
+
+
+def fba_fusion(alpha, img, F, B) +
+
+
+
+ +Expand source code + +
def fba_fusion(alpha, img, F, B):
+    F = alpha * img + (1 - alpha**2) * F - alpha * (1 - alpha) * B
+    B = (1 - alpha) * img + (2 * alpha - alpha**2) * B - alpha * (1 - alpha) * F
+
+    F = torch.clamp(F, 0, 1)
+    B = torch.clamp(B, 0, 1)
+    la = 0.1
+    alpha = (alpha * la + torch.sum((img - B) * (F - B), 1, keepdim=True)) / (
+        torch.sum((F - B) * (F - B), 1, keepdim=True) + la
+    )
+    alpha = torch.clamp(alpha, 0, 1)
+    return alpha, F, B
+
+
+
+def norm(dim, bn=False) +
+
+
+
+ +Expand source code + +
def norm(dim, bn=False):
+    if bn is False:
+        return nn.GroupNorm(32, dim)
+    else:
+        return nn.BatchNorm2d(dim)
+
+
+
+
+
+

Classes

+
+
+class FBA +(encoder:Β str) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class FBA(nn.Module):
+    def __init__(self, encoder: str):
+        super(FBA, self).__init__()
+        self.encoder = build_encoder(arch=encoder)
+        self.decoder = fba_decoder(batch_norm=True if "BN" in encoder else False)
+
+    def forward(self, image, two_chan_trimap, image_n, trimap_transformed):
+        resnet_input = torch.cat((image_n, trimap_transformed, two_chan_trimap), 1)
+        conv_out, indices = self.encoder(resnet_input, return_feature_maps=True)
+        return self.decoder(conv_out, image, indices, two_chan_trimap)
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Subclasses

+ +

Methods

+
+
+def forward(self, image, two_chan_trimap, image_n, trimap_transformed) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, image, two_chan_trimap, image_n, trimap_transformed):
+    resnet_input = torch.cat((image_n, trimap_transformed, two_chan_trimap), 1)
+    conv_out, indices = self.encoder(resnet_input, return_feature_maps=True)
+    return self.decoder(conv_out, image, indices, two_chan_trimap)
+
+
+
+
+
+class Resnet +(orig_resnet) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Resnet(nn.Module):
+    def __init__(self, orig_resnet):
+        super(Resnet, self).__init__()
+
+        # take pretrained resnet, except AvgPool and FC
+        self.conv1 = orig_resnet.conv1
+        self.bn1 = orig_resnet.bn1
+        self.relu1 = orig_resnet.relu1
+        self.conv2 = orig_resnet.conv2
+        self.bn2 = orig_resnet.bn2
+        self.relu2 = orig_resnet.relu2
+        self.conv3 = orig_resnet.conv3
+        self.bn3 = orig_resnet.bn3
+        self.relu3 = orig_resnet.relu3
+        self.maxpool = orig_resnet.maxpool
+        self.layer1 = orig_resnet.layer1
+        self.layer2 = orig_resnet.layer2
+        self.layer3 = orig_resnet.layer3
+        self.layer4 = orig_resnet.layer4
+
+    def forward(self, x, return_feature_maps=False):
+        conv_out = []
+
+        x = self.relu1(self.bn1(self.conv1(x)))
+        x = self.relu2(self.bn2(self.conv2(x)))
+        x = self.relu3(self.bn3(self.conv3(x)))
+        conv_out.append(x)
+        x, indices = self.maxpool(x)
+
+        x = self.layer1(x)
+        conv_out.append(x)
+        x = self.layer2(x)
+        conv_out.append(x)
+        x = self.layer3(x)
+        conv_out.append(x)
+        x = self.layer4(x)
+        conv_out.append(x)
+
+        if return_feature_maps:
+            return conv_out
+        return [x]
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x, return_feature_maps=False) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x, return_feature_maps=False):
+    conv_out = []
+
+    x = self.relu1(self.bn1(self.conv1(x)))
+    x = self.relu2(self.bn2(self.conv2(x)))
+    x = self.relu3(self.bn3(self.conv3(x)))
+    conv_out.append(x)
+    x, indices = self.maxpool(x)
+
+    x = self.layer1(x)
+    conv_out.append(x)
+    x = self.layer2(x)
+    conv_out.append(x)
+    x = self.layer3(x)
+    conv_out.append(x)
+    x = self.layer4(x)
+    conv_out.append(x)
+
+    if return_feature_maps:
+        return conv_out
+    return [x]
+
+
+
+
+
+class ResnetDilated +(orig_resnet, dilate_scale=8) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class ResnetDilated(nn.Module):
+    def __init__(self, orig_resnet, dilate_scale=8):
+        super(ResnetDilated, self).__init__()
+
+        if dilate_scale == 8:
+            orig_resnet.layer3.apply(partial(self._nostride_dilate, dilate=2))
+            orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=4))
+        elif dilate_scale == 16:
+            orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=2))
+
+        # take pretrained resnet, except AvgPool and FC
+        self.conv1 = orig_resnet.conv1
+        self.bn1 = orig_resnet.bn1
+        self.relu = orig_resnet.relu
+        self.maxpool = orig_resnet.maxpool
+        self.layer1 = orig_resnet.layer1
+        self.layer2 = orig_resnet.layer2
+        self.layer3 = orig_resnet.layer3
+        self.layer4 = orig_resnet.layer4
+
+    def _nostride_dilate(self, m, dilate):
+        classname = m.__class__.__name__
+        if classname.find("Conv") != -1:
+            # the convolution with stride
+            if m.stride == (2, 2):
+                m.stride = (1, 1)
+                if m.kernel_size == (3, 3):
+                    m.dilation = (dilate // 2, dilate // 2)
+                    m.padding = (dilate // 2, dilate // 2)
+            # other convoluions
+            else:
+                if m.kernel_size == (3, 3):
+                    m.dilation = (dilate, dilate)
+                    m.padding = (dilate, dilate)
+
+    def forward(self, x, return_feature_maps=False):
+        conv_out = [x]
+        x = self.relu(self.bn1(self.conv1(x)))
+        conv_out.append(x)
+        x, indices = self.maxpool(x)
+        x = self.layer1(x)
+        conv_out.append(x)
+        x = self.layer2(x)
+        conv_out.append(x)
+        x = self.layer3(x)
+        conv_out.append(x)
+        x = self.layer4(x)
+        conv_out.append(x)
+
+        if return_feature_maps:
+            return conv_out, indices
+        return [x]
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x, return_feature_maps=False) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x, return_feature_maps=False):
+    conv_out = [x]
+    x = self.relu(self.bn1(self.conv1(x)))
+    conv_out.append(x)
+    x, indices = self.maxpool(x)
+    x = self.layer1(x)
+    conv_out.append(x)
+    x = self.layer2(x)
+    conv_out.append(x)
+    x = self.layer3(x)
+    conv_out.append(x)
+    x = self.layer4(x)
+    conv_out.append(x)
+
+    if return_feature_maps:
+        return conv_out, indices
+    return [x]
+
+
+
+
+
+class ResnetDilatedBN +(orig_resnet, dilate_scale=8) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class ResnetDilatedBN(nn.Module):
+    def __init__(self, orig_resnet, dilate_scale=8):
+        super(ResnetDilatedBN, self).__init__()
+
+        if dilate_scale == 8:
+            orig_resnet.layer3.apply(partial(self._nostride_dilate, dilate=2))
+            orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=4))
+        elif dilate_scale == 16:
+            orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=2))
+
+        # take pretrained resnet, except AvgPool and FC
+        self.conv1 = orig_resnet.conv1
+        self.bn1 = orig_resnet.bn1
+        self.relu1 = orig_resnet.relu1
+        self.conv2 = orig_resnet.conv2
+        self.bn2 = orig_resnet.bn2
+        self.relu2 = orig_resnet.relu2
+        self.conv3 = orig_resnet.conv3
+        self.bn3 = orig_resnet.bn3
+        self.relu3 = orig_resnet.relu3
+        self.maxpool = orig_resnet.maxpool
+        self.layer1 = orig_resnet.layer1
+        self.layer2 = orig_resnet.layer2
+        self.layer3 = orig_resnet.layer3
+        self.layer4 = orig_resnet.layer4
+
+    def _nostride_dilate(self, m, dilate):
+        classname = m.__class__.__name__
+        if classname.find("Conv") != -1:
+            # the convolution with stride
+            if m.stride == (2, 2):
+                m.stride = (1, 1)
+                if m.kernel_size == (3, 3):
+                    m.dilation = (dilate // 2, dilate // 2)
+                    m.padding = (dilate // 2, dilate // 2)
+            # other convoluions
+            else:
+                if m.kernel_size == (3, 3):
+                    m.dilation = (dilate, dilate)
+                    m.padding = (dilate, dilate)
+
+    def forward(self, x, return_feature_maps=False):
+        conv_out = [x]
+        x = self.relu1(self.bn1(self.conv1(x)))
+        x = self.relu2(self.bn2(self.conv2(x)))
+        x = self.relu3(self.bn3(self.conv3(x)))
+        conv_out.append(x)
+        x, indices = self.maxpool(x)
+        x = self.layer1(x)
+        conv_out.append(x)
+        x = self.layer2(x)
+        conv_out.append(x)
+        x = self.layer3(x)
+        conv_out.append(x)
+        x = self.layer4(x)
+        conv_out.append(x)
+
+        if return_feature_maps:
+            return conv_out, indices
+        return [x]
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x, return_feature_maps=False) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x, return_feature_maps=False):
+    conv_out = [x]
+    x = self.relu1(self.bn1(self.conv1(x)))
+    x = self.relu2(self.bn2(self.conv2(x)))
+    x = self.relu3(self.bn3(self.conv3(x)))
+    conv_out.append(x)
+    x, indices = self.maxpool(x)
+    x = self.layer1(x)
+    conv_out.append(x)
+    x = self.layer2(x)
+    conv_out.append(x)
+    x = self.layer3(x)
+    conv_out.append(x)
+    x = self.layer4(x)
+    conv_out.append(x)
+
+    if return_feature_maps:
+        return conv_out, indices
+    return [x]
+
+
+
+
+
+class fba_decoder +(batch_norm=False) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class fba_decoder(nn.Module):
+    def __init__(self, batch_norm=False):
+        super(fba_decoder, self).__init__()
+        pool_scales = (1, 2, 3, 6)
+        self.batch_norm = batch_norm
+
+        self.ppm = []
+
+        for scale in pool_scales:
+            self.ppm.append(
+                nn.Sequential(
+                    nn.AdaptiveAvgPool2d(scale),
+                    L.Conv2d(2048, 256, kernel_size=1, bias=True),
+                    norm(256, self.batch_norm),
+                    nn.LeakyReLU(),
+                )
+            )
+        self.ppm = nn.ModuleList(self.ppm)
+
+        self.conv_up1 = nn.Sequential(
+            L.Conv2d(
+                2048 + len(pool_scales) * 256, 256, kernel_size=3, padding=1, bias=True
+            ),
+            norm(256, self.batch_norm),
+            nn.LeakyReLU(),
+            L.Conv2d(256, 256, kernel_size=3, padding=1),
+            norm(256, self.batch_norm),
+            nn.LeakyReLU(),
+        )
+
+        self.conv_up2 = nn.Sequential(
+            L.Conv2d(256 + 256, 256, kernel_size=3, padding=1, bias=True),
+            norm(256, self.batch_norm),
+            nn.LeakyReLU(),
+        )
+        if self.batch_norm:
+            d_up3 = 128
+        else:
+            d_up3 = 64
+        self.conv_up3 = nn.Sequential(
+            L.Conv2d(256 + d_up3, 64, kernel_size=3, padding=1, bias=True),
+            norm(64, self.batch_norm),
+            nn.LeakyReLU(),
+        )
+
+        self.unpool = nn.MaxUnpool2d(2, stride=2)
+
+        self.conv_up4 = nn.Sequential(
+            nn.Conv2d(64 + 3 + 3 + 2, 32, kernel_size=3, padding=1, bias=True),
+            nn.LeakyReLU(),
+            nn.Conv2d(32, 16, kernel_size=3, padding=1, bias=True),
+            nn.LeakyReLU(),
+            nn.Conv2d(16, 7, kernel_size=1, padding=0, bias=True),
+        )
+
+    def forward(self, conv_out, img, indices, two_chan_trimap):
+        conv5 = conv_out[-1]
+
+        input_size = conv5.size()
+        ppm_out = [conv5]
+        for pool_scale in self.ppm:
+            ppm_out.append(
+                nn.functional.interpolate(
+                    pool_scale(conv5),
+                    (input_size[2], input_size[3]),
+                    mode="bilinear",
+                    align_corners=False,
+                )
+            )
+        ppm_out = torch.cat(ppm_out, 1)
+        x = self.conv_up1(ppm_out)
+
+        x = torch.nn.functional.interpolate(
+            x, scale_factor=2, mode="bilinear", align_corners=False
+        )
+
+        x = torch.cat((x, conv_out[-4]), 1)
+
+        x = self.conv_up2(x)
+        x = torch.nn.functional.interpolate(
+            x, scale_factor=2, mode="bilinear", align_corners=False
+        )
+
+        x = torch.cat((x, conv_out[-5]), 1)
+        x = self.conv_up3(x)
+
+        x = torch.nn.functional.interpolate(
+            x, scale_factor=2, mode="bilinear", align_corners=False
+        )
+        x = torch.cat((x, conv_out[-6][:, :3], img, two_chan_trimap), 1)
+
+        output = self.conv_up4(x)
+
+        alpha = torch.clamp(output[:, 0][:, None], 0, 1)
+        F = torch.sigmoid(output[:, 1:4])
+        B = torch.sigmoid(output[:, 4:7])
+
+        # FBA Fusion
+        alpha, F, B = fba_fusion(alpha, img, F, B)
+
+        output = torch.cat((alpha, F, B), 1)
+
+        return output
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, conv_out, img, indices, two_chan_trimap) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, conv_out, img, indices, two_chan_trimap):
+    conv5 = conv_out[-1]
+
+    input_size = conv5.size()
+    ppm_out = [conv5]
+    for pool_scale in self.ppm:
+        ppm_out.append(
+            nn.functional.interpolate(
+                pool_scale(conv5),
+                (input_size[2], input_size[3]),
+                mode="bilinear",
+                align_corners=False,
+            )
+        )
+    ppm_out = torch.cat(ppm_out, 1)
+    x = self.conv_up1(ppm_out)
+
+    x = torch.nn.functional.interpolate(
+        x, scale_factor=2, mode="bilinear", align_corners=False
+    )
+
+    x = torch.cat((x, conv_out[-4]), 1)
+
+    x = self.conv_up2(x)
+    x = torch.nn.functional.interpolate(
+        x, scale_factor=2, mode="bilinear", align_corners=False
+    )
+
+    x = torch.cat((x, conv_out[-5]), 1)
+    x = self.conv_up3(x)
+
+    x = torch.nn.functional.interpolate(
+        x, scale_factor=2, mode="bilinear", align_corners=False
+    )
+    x = torch.cat((x, conv_out[-6][:, :3], img, two_chan_trimap), 1)
+
+    output = self.conv_up4(x)
+
+    alpha = torch.clamp(output[:, 0][:, None], 0, 1)
+    F = torch.sigmoid(output[:, 1:4])
+    B = torch.sigmoid(output[:, 4:7])
+
+    # FBA Fusion
+    alpha, F, B = fba_fusion(alpha, img, F, B)
+
+    output = torch.cat((alpha, F, B), 1)
+
+    return output
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/fba_matting/resnet_GN_WS.html b/docs/api/carvekit/ml/arch/fba_matting/resnet_GN_WS.html new file mode 100644 index 0000000..3c25b9b --- /dev/null +++ b/docs/api/carvekit/ml/arch/fba_matting/resnet_GN_WS.html @@ -0,0 +1,388 @@ + + + + + + +carvekit.ml.arch.fba_matting.resnet_GN_WS API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.fba_matting.resnet_GN_WS

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/MarcoForte/FBA_Matting
+License: MIT License
+"""
+import torch.nn as nn
+import carvekit.ml.arch.fba_matting.layers_WS as L
+
+__all__ = ["ResNet", "l_resnet50"]
+
+
+def conv3x3(in_planes, out_planes, stride=1):
+    """3x3 convolution with padding"""
+    return L.Conv2d(
+        in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False
+    )
+
+
+def conv1x1(in_planes, out_planes, stride=1):
+    """1x1 convolution"""
+    return L.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
+
+
+class BasicBlock(nn.Module):
+    expansion = 1
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(BasicBlock, self).__init__()
+        self.conv1 = conv3x3(inplanes, planes, stride)
+        self.bn1 = L.BatchNorm2d(planes)
+        self.relu = nn.ReLU(inplace=True)
+        self.conv2 = conv3x3(planes, planes)
+        self.bn2 = L.BatchNorm2d(planes)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        identity = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+
+        if self.downsample is not None:
+            identity = self.downsample(x)
+
+        out += identity
+        out = self.relu(out)
+
+        return out
+
+
+class Bottleneck(nn.Module):
+    expansion = 4
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(Bottleneck, self).__init__()
+        self.conv1 = conv1x1(inplanes, planes)
+        self.bn1 = L.BatchNorm2d(planes)
+        self.conv2 = conv3x3(planes, planes, stride)
+        self.bn2 = L.BatchNorm2d(planes)
+        self.conv3 = conv1x1(planes, planes * self.expansion)
+        self.bn3 = L.BatchNorm2d(planes * self.expansion)
+        self.relu = nn.ReLU(inplace=True)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        identity = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+        out = self.relu(out)
+
+        out = self.conv3(out)
+        out = self.bn3(out)
+
+        if self.downsample is not None:
+            identity = self.downsample(x)
+
+        out += identity
+        out = self.relu(out)
+
+        return out
+
+
+class ResNet(nn.Module):
+    def __init__(self, block, layers, num_classes=1000):
+        super(ResNet, self).__init__()
+        self.inplanes = 64
+        self.conv1 = L.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
+        self.bn1 = L.BatchNorm2d(64)
+        self.relu = nn.ReLU(inplace=True)
+        self.maxpool = nn.MaxPool2d(
+            kernel_size=3, stride=2, padding=1, return_indices=True
+        )
+        self.layer1 = self._make_layer(block, 64, layers[0])
+        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
+        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
+        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
+        self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
+        self.fc = nn.Linear(512 * block.expansion, num_classes)
+
+    def _make_layer(self, block, planes, blocks, stride=1):
+        downsample = None
+        if stride != 1 or self.inplanes != planes * block.expansion:
+            downsample = nn.Sequential(
+                conv1x1(self.inplanes, planes * block.expansion, stride),
+                L.BatchNorm2d(planes * block.expansion),
+            )
+
+        layers = []
+        layers.append(block(self.inplanes, planes, stride, downsample))
+        self.inplanes = planes * block.expansion
+        for _ in range(1, blocks):
+            layers.append(block(self.inplanes, planes))
+
+        return nn.Sequential(*layers)
+
+    def forward(self, x):
+        x = self.conv1(x)
+        x = self.bn1(x)
+        x = self.relu(x)
+        x = self.maxpool(x)
+
+        x = self.layer1(x)
+        x = self.layer2(x)
+        x = self.layer3(x)
+        x = self.layer4(x)
+
+        x = self.avgpool(x)
+        x = x.view(x.size(0), -1)
+        x = self.fc(x)
+
+        return x
+
+
+def l_resnet50(pretrained=False, **kwargs):
+    """Constructs a ResNet-50 model.
+    Args:
+        pretrained (bool): If True, returns a model pre-trained on ImageNet
+    """
+    model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
+    return model
+
+
+
+
+
+
+
+

Functions

+
+
+def l_resnet50(pretrained=False, **kwargs) +
+
+

Constructs a ResNet-50 model.

+

Args

+
+
pretrained : bool
+
If True, returns a model pre-trained on ImageNet
+
+
+ +Expand source code + +
def l_resnet50(pretrained=False, **kwargs):
+    """Constructs a ResNet-50 model.
+    Args:
+        pretrained (bool): If True, returns a model pre-trained on ImageNet
+    """
+    model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
+    return model
+
+
+
+
+
+

Classes

+
+
+class ResNet +(block, layers, num_classes=1000) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class ResNet(nn.Module):
+    def __init__(self, block, layers, num_classes=1000):
+        super(ResNet, self).__init__()
+        self.inplanes = 64
+        self.conv1 = L.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
+        self.bn1 = L.BatchNorm2d(64)
+        self.relu = nn.ReLU(inplace=True)
+        self.maxpool = nn.MaxPool2d(
+            kernel_size=3, stride=2, padding=1, return_indices=True
+        )
+        self.layer1 = self._make_layer(block, 64, layers[0])
+        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
+        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
+        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
+        self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
+        self.fc = nn.Linear(512 * block.expansion, num_classes)
+
+    def _make_layer(self, block, planes, blocks, stride=1):
+        downsample = None
+        if stride != 1 or self.inplanes != planes * block.expansion:
+            downsample = nn.Sequential(
+                conv1x1(self.inplanes, planes * block.expansion, stride),
+                L.BatchNorm2d(planes * block.expansion),
+            )
+
+        layers = []
+        layers.append(block(self.inplanes, planes, stride, downsample))
+        self.inplanes = planes * block.expansion
+        for _ in range(1, blocks):
+            layers.append(block(self.inplanes, planes))
+
+        return nn.Sequential(*layers)
+
+    def forward(self, x):
+        x = self.conv1(x)
+        x = self.bn1(x)
+        x = self.relu(x)
+        x = self.maxpool(x)
+
+        x = self.layer1(x)
+        x = self.layer2(x)
+        x = self.layer3(x)
+        x = self.layer4(x)
+
+        x = self.avgpool(x)
+        x = x.view(x.size(0), -1)
+        x = self.fc(x)
+
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x = self.conv1(x)
+    x = self.bn1(x)
+    x = self.relu(x)
+    x = self.maxpool(x)
+
+    x = self.layer1(x)
+    x = self.layer2(x)
+    x = self.layer3(x)
+    x = self.layer4(x)
+
+    x = self.avgpool(x)
+    x = x.view(x.size(0), -1)
+    x = self.fc(x)
+
+    return x
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/fba_matting/resnet_bn.html b/docs/api/carvekit/ml/arch/fba_matting/resnet_bn.html new file mode 100644 index 0000000..cb43b4c --- /dev/null +++ b/docs/api/carvekit/ml/arch/fba_matting/resnet_bn.html @@ -0,0 +1,394 @@ + + + + + + +carvekit.ml.arch.fba_matting.resnet_bn API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.fba_matting.resnet_bn

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/MarcoForte/FBA_Matting
+License: MIT License
+"""
+import torch.nn as nn
+import math
+from torch.nn import BatchNorm2d
+
+__all__ = ["ResNet"]
+
+
+def conv3x3(in_planes, out_planes, stride=1):
+    "3x3 convolution with padding"
+    return nn.Conv2d(
+        in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False
+    )
+
+
+class BasicBlock(nn.Module):
+    expansion = 1
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(BasicBlock, self).__init__()
+        self.conv1 = conv3x3(inplanes, planes, stride)
+        self.bn1 = BatchNorm2d(planes)
+        self.relu = nn.ReLU(inplace=True)
+        self.conv2 = conv3x3(planes, planes)
+        self.bn2 = BatchNorm2d(planes)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+
+class Bottleneck(nn.Module):
+    expansion = 4
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(Bottleneck, self).__init__()
+        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
+        self.bn1 = BatchNorm2d(planes)
+        self.conv2 = nn.Conv2d(
+            planes, planes, kernel_size=3, stride=stride, padding=1, bias=False
+        )
+        self.bn2 = BatchNorm2d(planes, momentum=0.01)
+        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
+        self.bn3 = BatchNorm2d(planes * 4)
+        self.relu = nn.ReLU(inplace=True)
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+        out = self.relu(out)
+
+        out = self.conv3(out)
+        out = self.bn3(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out += residual
+        out = self.relu(out)
+
+        return out
+
+
+class ResNet(nn.Module):
+    def __init__(self, block, layers, num_classes=1000):
+        self.inplanes = 128
+        super(ResNet, self).__init__()
+        self.conv1 = conv3x3(3, 64, stride=2)
+        self.bn1 = BatchNorm2d(64)
+        self.relu1 = nn.ReLU(inplace=True)
+        self.conv2 = conv3x3(64, 64)
+        self.bn2 = BatchNorm2d(64)
+        self.relu2 = nn.ReLU(inplace=True)
+        self.conv3 = conv3x3(64, 128)
+        self.bn3 = BatchNorm2d(128)
+        self.relu3 = nn.ReLU(inplace=True)
+        self.maxpool = nn.MaxPool2d(
+            kernel_size=3, stride=2, padding=1, return_indices=True
+        )
+
+        self.layer1 = self._make_layer(block, 64, layers[0])
+        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
+        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
+        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
+        self.avgpool = nn.AvgPool2d(7, stride=1)
+        self.fc = nn.Linear(512 * block.expansion, num_classes)
+
+        for m in self.modules():
+            if isinstance(m, nn.Conv2d):
+                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
+                m.weight.data.normal_(0, math.sqrt(2.0 / n))
+            elif isinstance(m, BatchNorm2d):
+                m.weight.data.fill_(1)
+                m.bias.data.zero_()
+
+    def _make_layer(self, block, planes, blocks, stride=1):
+        downsample = None
+        if stride != 1 or self.inplanes != planes * block.expansion:
+            downsample = nn.Sequential(
+                nn.Conv2d(
+                    self.inplanes,
+                    planes * block.expansion,
+                    kernel_size=1,
+                    stride=stride,
+                    bias=False,
+                ),
+                BatchNorm2d(planes * block.expansion),
+            )
+
+        layers = []
+        layers.append(block(self.inplanes, planes, stride, downsample))
+        self.inplanes = planes * block.expansion
+        for i in range(1, blocks):
+            layers.append(block(self.inplanes, planes))
+
+        return nn.Sequential(*layers)
+
+    def forward(self, x):
+        x = self.relu1(self.bn1(self.conv1(x)))
+        x = self.relu2(self.bn2(self.conv2(x)))
+        x = self.relu3(self.bn3(self.conv3(x)))
+        x, indices = self.maxpool(x)
+
+        x = self.layer1(x)
+        x = self.layer2(x)
+        x = self.layer3(x)
+        x = self.layer4(x)
+
+        x = self.avgpool(x)
+        x = x.view(x.size(0), -1)
+        x = self.fc(x)
+        return x
+
+
+def l_resnet50():
+    """Constructs a ResNet-50 model.
+    Args:
+        pretrained (bool): If True, returns a model pre-trained on ImageNet
+    """
+    model = ResNet(Bottleneck, [3, 4, 6, 3])
+    return model
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class ResNet +(block, layers, num_classes=1000) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class ResNet(nn.Module):
+    def __init__(self, block, layers, num_classes=1000):
+        self.inplanes = 128
+        super(ResNet, self).__init__()
+        self.conv1 = conv3x3(3, 64, stride=2)
+        self.bn1 = BatchNorm2d(64)
+        self.relu1 = nn.ReLU(inplace=True)
+        self.conv2 = conv3x3(64, 64)
+        self.bn2 = BatchNorm2d(64)
+        self.relu2 = nn.ReLU(inplace=True)
+        self.conv3 = conv3x3(64, 128)
+        self.bn3 = BatchNorm2d(128)
+        self.relu3 = nn.ReLU(inplace=True)
+        self.maxpool = nn.MaxPool2d(
+            kernel_size=3, stride=2, padding=1, return_indices=True
+        )
+
+        self.layer1 = self._make_layer(block, 64, layers[0])
+        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
+        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
+        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
+        self.avgpool = nn.AvgPool2d(7, stride=1)
+        self.fc = nn.Linear(512 * block.expansion, num_classes)
+
+        for m in self.modules():
+            if isinstance(m, nn.Conv2d):
+                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
+                m.weight.data.normal_(0, math.sqrt(2.0 / n))
+            elif isinstance(m, BatchNorm2d):
+                m.weight.data.fill_(1)
+                m.bias.data.zero_()
+
+    def _make_layer(self, block, planes, blocks, stride=1):
+        downsample = None
+        if stride != 1 or self.inplanes != planes * block.expansion:
+            downsample = nn.Sequential(
+                nn.Conv2d(
+                    self.inplanes,
+                    planes * block.expansion,
+                    kernel_size=1,
+                    stride=stride,
+                    bias=False,
+                ),
+                BatchNorm2d(planes * block.expansion),
+            )
+
+        layers = []
+        layers.append(block(self.inplanes, planes, stride, downsample))
+        self.inplanes = planes * block.expansion
+        for i in range(1, blocks):
+            layers.append(block(self.inplanes, planes))
+
+        return nn.Sequential(*layers)
+
+    def forward(self, x):
+        x = self.relu1(self.bn1(self.conv1(x)))
+        x = self.relu2(self.bn2(self.conv2(x)))
+        x = self.relu3(self.bn3(self.conv3(x)))
+        x, indices = self.maxpool(x)
+
+        x = self.layer1(x)
+        x = self.layer2(x)
+        x = self.layer3(x)
+        x = self.layer4(x)
+
+        x = self.avgpool(x)
+        x = x.view(x.size(0), -1)
+        x = self.fc(x)
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x = self.relu1(self.bn1(self.conv1(x)))
+    x = self.relu2(self.bn2(self.conv2(x)))
+    x = self.relu3(self.bn3(self.conv3(x)))
+    x, indices = self.maxpool(x)
+
+    x = self.layer1(x)
+    x = self.layer2(x)
+    x = self.layer3(x)
+    x = self.layer4(x)
+
+    x = self.avgpool(x)
+    x = x.view(x.size(0), -1)
+    x = self.fc(x)
+    return x
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/fba_matting/transforms.html b/docs/api/carvekit/ml/arch/fba_matting/transforms.html new file mode 100644 index 0000000..55fa4d2 --- /dev/null +++ b/docs/api/carvekit/ml/arch/fba_matting/transforms.html @@ -0,0 +1,180 @@ + + + + + + +carvekit.ml.arch.fba_matting.transforms API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.fba_matting.transforms

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/MarcoForte/FBA_Matting +License: MIT License

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/MarcoForte/FBA_Matting
+License: MIT License
+"""
+import cv2
+import numpy as np
+
+group_norm_std = [0.229, 0.224, 0.225]
+group_norm_mean = [0.485, 0.456, 0.406]
+
+
+def dt(a):
+    return cv2.distanceTransform((a * 255).astype(np.uint8), cv2.DIST_L2, 0)
+
+
+def trimap_transform(trimap):
+    h, w = trimap.shape[0], trimap.shape[1]
+
+    clicks = np.zeros((h, w, 6))
+    for k in range(2):
+        if np.count_nonzero(trimap[:, :, k]) > 0:
+            dt_mask = -dt(1 - trimap[:, :, k]) ** 2
+            L = 320
+            clicks[:, :, 3 * k] = np.exp(dt_mask / (2 * ((0.02 * L) ** 2)))
+            clicks[:, :, 3 * k + 1] = np.exp(dt_mask / (2 * ((0.08 * L) ** 2)))
+            clicks[:, :, 3 * k + 2] = np.exp(dt_mask / (2 * ((0.16 * L) ** 2)))
+
+    return clicks
+
+
+def groupnorm_normalise_image(img, format="nhwc"):
+    """
+    Accept rgb in range 0,1
+    """
+    if format == "nhwc":
+        for i in range(3):
+            img[..., i] = (img[..., i] - group_norm_mean[i]) / group_norm_std[i]
+    else:
+        for i in range(3):
+            img[..., i, :, :] = (
+                img[..., i, :, :] - group_norm_mean[i]
+            ) / group_norm_std[i]
+
+    return img
+
+
+
+
+
+
+
+

Functions

+
+
+def dt(a) +
+
+
+
+ +Expand source code + +
def dt(a):
+    return cv2.distanceTransform((a * 255).astype(np.uint8), cv2.DIST_L2, 0)
+
+
+
+def groupnorm_normalise_image(img, format='nhwc') +
+
+

Accept rgb in range 0,1

+
+ +Expand source code + +
def groupnorm_normalise_image(img, format="nhwc"):
+    """
+    Accept rgb in range 0,1
+    """
+    if format == "nhwc":
+        for i in range(3):
+            img[..., i] = (img[..., i] - group_norm_mean[i]) / group_norm_std[i]
+    else:
+        for i in range(3):
+            img[..., i, :, :] = (
+                img[..., i, :, :] - group_norm_mean[i]
+            ) / group_norm_std[i]
+
+    return img
+
+
+
+def trimap_transform(trimap) +
+
+
+
+ +Expand source code + +
def trimap_transform(trimap):
+    h, w = trimap.shape[0], trimap.shape[1]
+
+    clicks = np.zeros((h, w, 6))
+    for k in range(2):
+        if np.count_nonzero(trimap[:, :, k]) > 0:
+            dt_mask = -dt(1 - trimap[:, :, k]) ** 2
+            L = 320
+            clicks[:, :, 3 * k] = np.exp(dt_mask / (2 * ((0.02 * L) ** 2)))
+            clicks[:, :, 3 * k + 1] = np.exp(dt_mask / (2 * ((0.08 * L) ** 2)))
+            clicks[:, :, 3 * k + 2] = np.exp(dt_mask / (2 * ((0.16 * L) ** 2)))
+
+    return clicks
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/index.html b/docs/api/carvekit/ml/arch/index.html new file mode 100644 index 0000000..a336cab --- /dev/null +++ b/docs/api/carvekit/ml/arch/index.html @@ -0,0 +1,90 @@ + + + + + + +carvekit.ml.arch API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch

+
+
+
+
+

Sub-modules

+
+
carvekit.ml.arch.basnet
+
+
+
+
carvekit.ml.arch.cascadepsp
+
+
+
+
carvekit.ml.arch.fba_matting
+
+
+
+
carvekit.ml.arch.tracerb7
+
+
+
+
carvekit.ml.arch.u2net
+
+
+
+
carvekit.ml.arch.yolov4
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/tracerb7/att_modules.html b/docs/api/carvekit/ml/arch/tracerb7/att_modules.html new file mode 100644 index 0000000..38c59fd --- /dev/null +++ b/docs/api/carvekit/ml/arch/tracerb7/att_modules.html @@ -0,0 +1,1126 @@ + + + + + + +carvekit.ml.arch.tracerb7.att_modules API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.tracerb7.att_modules

+
+
+

Source url: https://github.com/Karel911/TRACER +Author: Min Seok Lee and Wooseok Shin +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/Karel911/TRACER
+Author: Min Seok Lee and Wooseok Shin
+License: Apache License 2.0
+"""
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+from carvekit.ml.arch.tracerb7.conv_modules import BasicConv2d, DWConv, DWSConv
+
+
+class RFB_Block(nn.Module):
+    def __init__(self, in_channel, out_channel):
+        super(RFB_Block, self).__init__()
+        self.relu = nn.ReLU(True)
+        self.branch0 = nn.Sequential(
+            BasicConv2d(in_channel, out_channel, 1),
+        )
+        self.branch1 = nn.Sequential(
+            BasicConv2d(in_channel, out_channel, 1),
+            BasicConv2d(out_channel, out_channel, kernel_size=(1, 3), padding=(0, 1)),
+            BasicConv2d(out_channel, out_channel, kernel_size=(3, 1), padding=(1, 0)),
+            BasicConv2d(out_channel, out_channel, 3, padding=3, dilation=3),
+        )
+        self.branch2 = nn.Sequential(
+            BasicConv2d(in_channel, out_channel, 1),
+            BasicConv2d(out_channel, out_channel, kernel_size=(1, 5), padding=(0, 2)),
+            BasicConv2d(out_channel, out_channel, kernel_size=(5, 1), padding=(2, 0)),
+            BasicConv2d(out_channel, out_channel, 3, padding=5, dilation=5),
+        )
+        self.branch3 = nn.Sequential(
+            BasicConv2d(in_channel, out_channel, 1),
+            BasicConv2d(out_channel, out_channel, kernel_size=(1, 7), padding=(0, 3)),
+            BasicConv2d(out_channel, out_channel, kernel_size=(7, 1), padding=(3, 0)),
+            BasicConv2d(out_channel, out_channel, 3, padding=7, dilation=7),
+        )
+        self.conv_cat = BasicConv2d(4 * out_channel, out_channel, 3, padding=1)
+        self.conv_res = BasicConv2d(in_channel, out_channel, 1)
+
+    def forward(self, x):
+        x0 = self.branch0(x)
+        x1 = self.branch1(x)
+        x2 = self.branch2(x)
+        x3 = self.branch3(x)
+        x_cat = torch.cat((x0, x1, x2, x3), 1)
+        x_cat = self.conv_cat(x_cat)
+
+        x = self.relu(x_cat + self.conv_res(x))
+        return x
+
+
+class GlobalAvgPool(nn.Module):
+    def __init__(self, flatten=False):
+        super(GlobalAvgPool, self).__init__()
+        self.flatten = flatten
+
+    def forward(self, x):
+        if self.flatten:
+            in_size = x.size()
+            return x.view((in_size[0], in_size[1], -1)).mean(dim=2)
+        else:
+            return (
+                x.view(x.size(0), x.size(1), -1)
+                .mean(-1)
+                .view(x.size(0), x.size(1), 1, 1)
+            )
+
+
+class UnionAttentionModule(nn.Module):
+    def __init__(self, n_channels, only_channel_tracing=False):
+        super(UnionAttentionModule, self).__init__()
+        self.GAP = GlobalAvgPool()
+        self.confidence_ratio = 0.1
+        self.bn = nn.BatchNorm2d(n_channels)
+        self.norm = nn.Sequential(
+            nn.BatchNorm2d(n_channels), nn.Dropout3d(self.confidence_ratio)
+        )
+        self.channel_q = nn.Conv2d(
+            in_channels=n_channels,
+            out_channels=n_channels,
+            kernel_size=1,
+            stride=1,
+            padding=0,
+            bias=False,
+        )
+        self.channel_k = nn.Conv2d(
+            in_channels=n_channels,
+            out_channels=n_channels,
+            kernel_size=1,
+            stride=1,
+            padding=0,
+            bias=False,
+        )
+        self.channel_v = nn.Conv2d(
+            in_channels=n_channels,
+            out_channels=n_channels,
+            kernel_size=1,
+            stride=1,
+            padding=0,
+            bias=False,
+        )
+
+        self.fc = nn.Conv2d(
+            in_channels=n_channels,
+            out_channels=n_channels,
+            kernel_size=1,
+            stride=1,
+            padding=0,
+            bias=False,
+        )
+
+        if only_channel_tracing is False:
+            self.spatial_q = nn.Conv2d(
+                in_channels=n_channels,
+                out_channels=1,
+                kernel_size=1,
+                stride=1,
+                padding=0,
+                bias=False,
+            )
+            self.spatial_k = nn.Conv2d(
+                in_channels=n_channels,
+                out_channels=1,
+                kernel_size=1,
+                stride=1,
+                padding=0,
+                bias=False,
+            )
+            self.spatial_v = nn.Conv2d(
+                in_channels=n_channels,
+                out_channels=1,
+                kernel_size=1,
+                stride=1,
+                padding=0,
+                bias=False,
+            )
+        self.sigmoid = nn.Sigmoid()
+
+    def masking(self, x, mask):
+        mask = mask.squeeze(3).squeeze(2)
+        threshold = torch.quantile(
+            mask.float(), self.confidence_ratio, dim=-1, keepdim=True
+        )
+        mask[mask <= threshold] = 0.0
+        mask = mask.unsqueeze(2).unsqueeze(3)
+        mask = mask.expand(-1, x.shape[1], x.shape[2], x.shape[3]).contiguous()
+        masked_x = x * mask
+
+        return masked_x
+
+    def Channel_Tracer(self, x):
+        avg_pool = self.GAP(x)
+        x_norm = self.norm(avg_pool)
+
+        q = self.channel_q(x_norm).squeeze(-1)
+        k = self.channel_k(x_norm).squeeze(-1)
+        v = self.channel_v(x_norm).squeeze(-1)
+
+        # softmax(Q*K^T)
+        QK_T = torch.matmul(q, k.transpose(1, 2))
+        alpha = F.softmax(QK_T, dim=-1)
+
+        # a*v
+        att = torch.matmul(alpha, v).unsqueeze(-1)
+        att = self.fc(att)
+        att = self.sigmoid(att)
+
+        output = (x * att) + x
+        alpha_mask = att.clone()
+
+        return output, alpha_mask
+
+    def forward(self, x):
+        X_c, alpha_mask = self.Channel_Tracer(x)
+        X_c = self.bn(X_c)
+        x_drop = self.masking(X_c, alpha_mask)
+
+        q = self.spatial_q(x_drop).squeeze(1)
+        k = self.spatial_k(x_drop).squeeze(1)
+        v = self.spatial_v(x_drop).squeeze(1)
+
+        # softmax(Q*K^T)
+        QK_T = torch.matmul(q, k.transpose(1, 2))
+        alpha = F.softmax(QK_T, dim=-1)
+
+        output = torch.matmul(alpha, v).unsqueeze(1) + v.unsqueeze(1)
+
+        return output
+
+
+class aggregation(nn.Module):
+    def __init__(self, channel):
+        super(aggregation, self).__init__()
+        self.relu = nn.ReLU(True)
+
+        self.upsample = nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True)
+        self.conv_upsample1 = BasicConv2d(channel[2], channel[1], 3, padding=1)
+        self.conv_upsample2 = BasicConv2d(channel[2], channel[0], 3, padding=1)
+        self.conv_upsample3 = BasicConv2d(channel[1], channel[0], 3, padding=1)
+        self.conv_upsample4 = BasicConv2d(channel[2], channel[2], 3, padding=1)
+        self.conv_upsample5 = BasicConv2d(
+            channel[2] + channel[1], channel[2] + channel[1], 3, padding=1
+        )
+
+        self.conv_concat2 = BasicConv2d(
+            (channel[2] + channel[1]), (channel[2] + channel[1]), 3, padding=1
+        )
+        self.conv_concat3 = BasicConv2d(
+            (channel[0] + channel[1] + channel[2]),
+            (channel[0] + channel[1] + channel[2]),
+            3,
+            padding=1,
+        )
+
+        self.UAM = UnionAttentionModule(channel[0] + channel[1] + channel[2])
+
+    def forward(self, e4, e3, e2):
+        e4_1 = e4
+        e3_1 = self.conv_upsample1(self.upsample(e4)) * e3
+        e2_1 = (
+            self.conv_upsample2(self.upsample(self.upsample(e4)))
+            * self.conv_upsample3(self.upsample(e3))
+            * e2
+        )
+
+        e3_2 = torch.cat((e3_1, self.conv_upsample4(self.upsample(e4_1))), 1)
+        e3_2 = self.conv_concat2(e3_2)
+
+        e2_2 = torch.cat((e2_1, self.conv_upsample5(self.upsample(e3_2))), 1)
+        x = self.conv_concat3(e2_2)
+
+        output = self.UAM(x)
+
+        return output
+
+
+class ObjectAttention(nn.Module):
+    def __init__(self, channel, kernel_size):
+        super(ObjectAttention, self).__init__()
+        self.channel = channel
+        self.DWSConv = DWSConv(
+            channel, channel // 2, kernel=kernel_size, padding=1, kernels_per_layer=1
+        )
+        self.DWConv1 = nn.Sequential(
+            DWConv(channel // 2, channel // 2, kernel=1, padding=0, dilation=1),
+            BasicConv2d(channel // 2, channel // 8, 1),
+        )
+        self.DWConv2 = nn.Sequential(
+            DWConv(channel // 2, channel // 2, kernel=3, padding=1, dilation=1),
+            BasicConv2d(channel // 2, channel // 8, 1),
+        )
+        self.DWConv3 = nn.Sequential(
+            DWConv(channel // 2, channel // 2, kernel=3, padding=3, dilation=3),
+            BasicConv2d(channel // 2, channel // 8, 1),
+        )
+        self.DWConv4 = nn.Sequential(
+            DWConv(channel // 2, channel // 2, kernel=3, padding=5, dilation=5),
+            BasicConv2d(channel // 2, channel // 8, 1),
+        )
+        self.conv1 = BasicConv2d(channel // 2, 1, 1)
+
+    def forward(self, decoder_map, encoder_map):
+        """
+        Args:
+            decoder_map: decoder representation (B, 1, H, W).
+            encoder_map: encoder block output (B, C, H, W).
+        Returns:
+            decoder representation: (B, 1, H, W)
+        """
+        mask_bg = -1 * torch.sigmoid(decoder_map) + 1  # Sigmoid & Reverse
+        mask_ob = torch.sigmoid(decoder_map)  # object attention
+        x = mask_ob.expand(-1, self.channel, -1, -1).mul(encoder_map)
+
+        edge = mask_bg.clone()
+        edge[edge > 0.93] = 0
+        x = x + (edge * encoder_map)
+
+        x = self.DWSConv(x)
+        skip = x.clone()
+        x = (
+            torch.cat(
+                [self.DWConv1(x), self.DWConv2(x), self.DWConv3(x), self.DWConv4(x)],
+                dim=1,
+            )
+            + skip
+        )
+        x = torch.relu(self.conv1(x))
+
+        return x + decoder_map
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class GlobalAvgPool +(flatten=False) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class GlobalAvgPool(nn.Module):
+    def __init__(self, flatten=False):
+        super(GlobalAvgPool, self).__init__()
+        self.flatten = flatten
+
+    def forward(self, x):
+        if self.flatten:
+            in_size = x.size()
+            return x.view((in_size[0], in_size[1], -1)).mean(dim=2)
+        else:
+            return (
+                x.view(x.size(0), x.size(1), -1)
+                .mean(-1)
+                .view(x.size(0), x.size(1), 1, 1)
+            )
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    if self.flatten:
+        in_size = x.size()
+        return x.view((in_size[0], in_size[1], -1)).mean(dim=2)
+    else:
+        return (
+            x.view(x.size(0), x.size(1), -1)
+            .mean(-1)
+            .view(x.size(0), x.size(1), 1, 1)
+        )
+
+
+
+
+
+class ObjectAttention +(channel, kernel_size) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class ObjectAttention(nn.Module):
+    def __init__(self, channel, kernel_size):
+        super(ObjectAttention, self).__init__()
+        self.channel = channel
+        self.DWSConv = DWSConv(
+            channel, channel // 2, kernel=kernel_size, padding=1, kernels_per_layer=1
+        )
+        self.DWConv1 = nn.Sequential(
+            DWConv(channel // 2, channel // 2, kernel=1, padding=0, dilation=1),
+            BasicConv2d(channel // 2, channel // 8, 1),
+        )
+        self.DWConv2 = nn.Sequential(
+            DWConv(channel // 2, channel // 2, kernel=3, padding=1, dilation=1),
+            BasicConv2d(channel // 2, channel // 8, 1),
+        )
+        self.DWConv3 = nn.Sequential(
+            DWConv(channel // 2, channel // 2, kernel=3, padding=3, dilation=3),
+            BasicConv2d(channel // 2, channel // 8, 1),
+        )
+        self.DWConv4 = nn.Sequential(
+            DWConv(channel // 2, channel // 2, kernel=3, padding=5, dilation=5),
+            BasicConv2d(channel // 2, channel // 8, 1),
+        )
+        self.conv1 = BasicConv2d(channel // 2, 1, 1)
+
+    def forward(self, decoder_map, encoder_map):
+        """
+        Args:
+            decoder_map: decoder representation (B, 1, H, W).
+            encoder_map: encoder block output (B, C, H, W).
+        Returns:
+            decoder representation: (B, 1, H, W)
+        """
+        mask_bg = -1 * torch.sigmoid(decoder_map) + 1  # Sigmoid & Reverse
+        mask_ob = torch.sigmoid(decoder_map)  # object attention
+        x = mask_ob.expand(-1, self.channel, -1, -1).mul(encoder_map)
+
+        edge = mask_bg.clone()
+        edge[edge > 0.93] = 0
+        x = x + (edge * encoder_map)
+
+        x = self.DWSConv(x)
+        skip = x.clone()
+        x = (
+            torch.cat(
+                [self.DWConv1(x), self.DWConv2(x), self.DWConv3(x), self.DWConv4(x)],
+                dim=1,
+            )
+            + skip
+        )
+        x = torch.relu(self.conv1(x))
+
+        return x + decoder_map
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, decoder_map, encoder_map) ‑>Β Callable[...,Β Any] +
+
+

Args

+
+
decoder_map
+
decoder representation (B, 1, H, W).
+
encoder_map
+
encoder block output (B, C, H, W).
+
+

Returns

+
+
decoder representation
+
(B, 1, H, W)
+
+
+ +Expand source code + +
def forward(self, decoder_map, encoder_map):
+    """
+    Args:
+        decoder_map: decoder representation (B, 1, H, W).
+        encoder_map: encoder block output (B, C, H, W).
+    Returns:
+        decoder representation: (B, 1, H, W)
+    """
+    mask_bg = -1 * torch.sigmoid(decoder_map) + 1  # Sigmoid & Reverse
+    mask_ob = torch.sigmoid(decoder_map)  # object attention
+    x = mask_ob.expand(-1, self.channel, -1, -1).mul(encoder_map)
+
+    edge = mask_bg.clone()
+    edge[edge > 0.93] = 0
+    x = x + (edge * encoder_map)
+
+    x = self.DWSConv(x)
+    skip = x.clone()
+    x = (
+        torch.cat(
+            [self.DWConv1(x), self.DWConv2(x), self.DWConv3(x), self.DWConv4(x)],
+            dim=1,
+        )
+        + skip
+    )
+    x = torch.relu(self.conv1(x))
+
+    return x + decoder_map
+
+
+
+
+
+class RFB_Block +(in_channel, out_channel) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class RFB_Block(nn.Module):
+    def __init__(self, in_channel, out_channel):
+        super(RFB_Block, self).__init__()
+        self.relu = nn.ReLU(True)
+        self.branch0 = nn.Sequential(
+            BasicConv2d(in_channel, out_channel, 1),
+        )
+        self.branch1 = nn.Sequential(
+            BasicConv2d(in_channel, out_channel, 1),
+            BasicConv2d(out_channel, out_channel, kernel_size=(1, 3), padding=(0, 1)),
+            BasicConv2d(out_channel, out_channel, kernel_size=(3, 1), padding=(1, 0)),
+            BasicConv2d(out_channel, out_channel, 3, padding=3, dilation=3),
+        )
+        self.branch2 = nn.Sequential(
+            BasicConv2d(in_channel, out_channel, 1),
+            BasicConv2d(out_channel, out_channel, kernel_size=(1, 5), padding=(0, 2)),
+            BasicConv2d(out_channel, out_channel, kernel_size=(5, 1), padding=(2, 0)),
+            BasicConv2d(out_channel, out_channel, 3, padding=5, dilation=5),
+        )
+        self.branch3 = nn.Sequential(
+            BasicConv2d(in_channel, out_channel, 1),
+            BasicConv2d(out_channel, out_channel, kernel_size=(1, 7), padding=(0, 3)),
+            BasicConv2d(out_channel, out_channel, kernel_size=(7, 1), padding=(3, 0)),
+            BasicConv2d(out_channel, out_channel, 3, padding=7, dilation=7),
+        )
+        self.conv_cat = BasicConv2d(4 * out_channel, out_channel, 3, padding=1)
+        self.conv_res = BasicConv2d(in_channel, out_channel, 1)
+
+    def forward(self, x):
+        x0 = self.branch0(x)
+        x1 = self.branch1(x)
+        x2 = self.branch2(x)
+        x3 = self.branch3(x)
+        x_cat = torch.cat((x0, x1, x2, x3), 1)
+        x_cat = self.conv_cat(x_cat)
+
+        x = self.relu(x_cat + self.conv_res(x))
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x0 = self.branch0(x)
+    x1 = self.branch1(x)
+    x2 = self.branch2(x)
+    x3 = self.branch3(x)
+    x_cat = torch.cat((x0, x1, x2, x3), 1)
+    x_cat = self.conv_cat(x_cat)
+
+    x = self.relu(x_cat + self.conv_res(x))
+    return x
+
+
+
+
+
+class UnionAttentionModule +(n_channels, only_channel_tracing=False) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class UnionAttentionModule(nn.Module):
+    def __init__(self, n_channels, only_channel_tracing=False):
+        super(UnionAttentionModule, self).__init__()
+        self.GAP = GlobalAvgPool()
+        self.confidence_ratio = 0.1
+        self.bn = nn.BatchNorm2d(n_channels)
+        self.norm = nn.Sequential(
+            nn.BatchNorm2d(n_channels), nn.Dropout3d(self.confidence_ratio)
+        )
+        self.channel_q = nn.Conv2d(
+            in_channels=n_channels,
+            out_channels=n_channels,
+            kernel_size=1,
+            stride=1,
+            padding=0,
+            bias=False,
+        )
+        self.channel_k = nn.Conv2d(
+            in_channels=n_channels,
+            out_channels=n_channels,
+            kernel_size=1,
+            stride=1,
+            padding=0,
+            bias=False,
+        )
+        self.channel_v = nn.Conv2d(
+            in_channels=n_channels,
+            out_channels=n_channels,
+            kernel_size=1,
+            stride=1,
+            padding=0,
+            bias=False,
+        )
+
+        self.fc = nn.Conv2d(
+            in_channels=n_channels,
+            out_channels=n_channels,
+            kernel_size=1,
+            stride=1,
+            padding=0,
+            bias=False,
+        )
+
+        if only_channel_tracing is False:
+            self.spatial_q = nn.Conv2d(
+                in_channels=n_channels,
+                out_channels=1,
+                kernel_size=1,
+                stride=1,
+                padding=0,
+                bias=False,
+            )
+            self.spatial_k = nn.Conv2d(
+                in_channels=n_channels,
+                out_channels=1,
+                kernel_size=1,
+                stride=1,
+                padding=0,
+                bias=False,
+            )
+            self.spatial_v = nn.Conv2d(
+                in_channels=n_channels,
+                out_channels=1,
+                kernel_size=1,
+                stride=1,
+                padding=0,
+                bias=False,
+            )
+        self.sigmoid = nn.Sigmoid()
+
+    def masking(self, x, mask):
+        mask = mask.squeeze(3).squeeze(2)
+        threshold = torch.quantile(
+            mask.float(), self.confidence_ratio, dim=-1, keepdim=True
+        )
+        mask[mask <= threshold] = 0.0
+        mask = mask.unsqueeze(2).unsqueeze(3)
+        mask = mask.expand(-1, x.shape[1], x.shape[2], x.shape[3]).contiguous()
+        masked_x = x * mask
+
+        return masked_x
+
+    def Channel_Tracer(self, x):
+        avg_pool = self.GAP(x)
+        x_norm = self.norm(avg_pool)
+
+        q = self.channel_q(x_norm).squeeze(-1)
+        k = self.channel_k(x_norm).squeeze(-1)
+        v = self.channel_v(x_norm).squeeze(-1)
+
+        # softmax(Q*K^T)
+        QK_T = torch.matmul(q, k.transpose(1, 2))
+        alpha = F.softmax(QK_T, dim=-1)
+
+        # a*v
+        att = torch.matmul(alpha, v).unsqueeze(-1)
+        att = self.fc(att)
+        att = self.sigmoid(att)
+
+        output = (x * att) + x
+        alpha_mask = att.clone()
+
+        return output, alpha_mask
+
+    def forward(self, x):
+        X_c, alpha_mask = self.Channel_Tracer(x)
+        X_c = self.bn(X_c)
+        x_drop = self.masking(X_c, alpha_mask)
+
+        q = self.spatial_q(x_drop).squeeze(1)
+        k = self.spatial_k(x_drop).squeeze(1)
+        v = self.spatial_v(x_drop).squeeze(1)
+
+        # softmax(Q*K^T)
+        QK_T = torch.matmul(q, k.transpose(1, 2))
+        alpha = F.softmax(QK_T, dim=-1)
+
+        output = torch.matmul(alpha, v).unsqueeze(1) + v.unsqueeze(1)
+
+        return output
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def Channel_Tracer(self, x) +
+
+
+
+ +Expand source code + +
def Channel_Tracer(self, x):
+    avg_pool = self.GAP(x)
+    x_norm = self.norm(avg_pool)
+
+    q = self.channel_q(x_norm).squeeze(-1)
+    k = self.channel_k(x_norm).squeeze(-1)
+    v = self.channel_v(x_norm).squeeze(-1)
+
+    # softmax(Q*K^T)
+    QK_T = torch.matmul(q, k.transpose(1, 2))
+    alpha = F.softmax(QK_T, dim=-1)
+
+    # a*v
+    att = torch.matmul(alpha, v).unsqueeze(-1)
+    att = self.fc(att)
+    att = self.sigmoid(att)
+
+    output = (x * att) + x
+    alpha_mask = att.clone()
+
+    return output, alpha_mask
+
+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    X_c, alpha_mask = self.Channel_Tracer(x)
+    X_c = self.bn(X_c)
+    x_drop = self.masking(X_c, alpha_mask)
+
+    q = self.spatial_q(x_drop).squeeze(1)
+    k = self.spatial_k(x_drop).squeeze(1)
+    v = self.spatial_v(x_drop).squeeze(1)
+
+    # softmax(Q*K^T)
+    QK_T = torch.matmul(q, k.transpose(1, 2))
+    alpha = F.softmax(QK_T, dim=-1)
+
+    output = torch.matmul(alpha, v).unsqueeze(1) + v.unsqueeze(1)
+
+    return output
+
+
+
+def masking(self, x, mask) +
+
+
+
+ +Expand source code + +
def masking(self, x, mask):
+    mask = mask.squeeze(3).squeeze(2)
+    threshold = torch.quantile(
+        mask.float(), self.confidence_ratio, dim=-1, keepdim=True
+    )
+    mask[mask <= threshold] = 0.0
+    mask = mask.unsqueeze(2).unsqueeze(3)
+    mask = mask.expand(-1, x.shape[1], x.shape[2], x.shape[3]).contiguous()
+    masked_x = x * mask
+
+    return masked_x
+
+
+
+
+
+class aggregation +(channel) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class aggregation(nn.Module):
+    def __init__(self, channel):
+        super(aggregation, self).__init__()
+        self.relu = nn.ReLU(True)
+
+        self.upsample = nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True)
+        self.conv_upsample1 = BasicConv2d(channel[2], channel[1], 3, padding=1)
+        self.conv_upsample2 = BasicConv2d(channel[2], channel[0], 3, padding=1)
+        self.conv_upsample3 = BasicConv2d(channel[1], channel[0], 3, padding=1)
+        self.conv_upsample4 = BasicConv2d(channel[2], channel[2], 3, padding=1)
+        self.conv_upsample5 = BasicConv2d(
+            channel[2] + channel[1], channel[2] + channel[1], 3, padding=1
+        )
+
+        self.conv_concat2 = BasicConv2d(
+            (channel[2] + channel[1]), (channel[2] + channel[1]), 3, padding=1
+        )
+        self.conv_concat3 = BasicConv2d(
+            (channel[0] + channel[1] + channel[2]),
+            (channel[0] + channel[1] + channel[2]),
+            3,
+            padding=1,
+        )
+
+        self.UAM = UnionAttentionModule(channel[0] + channel[1] + channel[2])
+
+    def forward(self, e4, e3, e2):
+        e4_1 = e4
+        e3_1 = self.conv_upsample1(self.upsample(e4)) * e3
+        e2_1 = (
+            self.conv_upsample2(self.upsample(self.upsample(e4)))
+            * self.conv_upsample3(self.upsample(e3))
+            * e2
+        )
+
+        e3_2 = torch.cat((e3_1, self.conv_upsample4(self.upsample(e4_1))), 1)
+        e3_2 = self.conv_concat2(e3_2)
+
+        e2_2 = torch.cat((e2_1, self.conv_upsample5(self.upsample(e3_2))), 1)
+        x = self.conv_concat3(e2_2)
+
+        output = self.UAM(x)
+
+        return output
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, e4, e3, e2) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, e4, e3, e2):
+    e4_1 = e4
+    e3_1 = self.conv_upsample1(self.upsample(e4)) * e3
+    e2_1 = (
+        self.conv_upsample2(self.upsample(self.upsample(e4)))
+        * self.conv_upsample3(self.upsample(e3))
+        * e2
+    )
+
+    e3_2 = torch.cat((e3_1, self.conv_upsample4(self.upsample(e4_1))), 1)
+    e3_2 = self.conv_concat2(e3_2)
+
+    e2_2 = torch.cat((e2_1, self.conv_upsample5(self.upsample(e3_2))), 1)
+    x = self.conv_concat3(e2_2)
+
+    output = self.UAM(x)
+
+    return output
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/tracerb7/conv_modules.html b/docs/api/carvekit/ml/arch/tracerb7/conv_modules.html new file mode 100644 index 0000000..08302f0 --- /dev/null +++ b/docs/api/carvekit/ml/arch/tracerb7/conv_modules.html @@ -0,0 +1,465 @@ + + + + + + +carvekit.ml.arch.tracerb7.conv_modules API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.tracerb7.conv_modules

+
+
+

Source url: https://github.com/Karel911/TRACER +Author: Min Seok Lee and Wooseok Shin +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/Karel911/TRACER
+Author: Min Seok Lee and Wooseok Shin
+License: Apache License 2.0
+"""
+import torch.nn as nn
+
+
+class BasicConv2d(nn.Module):
+    def __init__(
+        self,
+        in_channel,
+        out_channel,
+        kernel_size,
+        stride=(1, 1),
+        padding=(0, 0),
+        dilation=(1, 1),
+    ):
+        super(BasicConv2d, self).__init__()
+        self.conv = nn.Conv2d(
+            in_channel,
+            out_channel,
+            kernel_size=kernel_size,
+            stride=stride,
+            padding=padding,
+            dilation=dilation,
+            bias=False,
+        )
+        self.bn = nn.BatchNorm2d(out_channel)
+        self.selu = nn.SELU()
+
+    def forward(self, x):
+        x = self.conv(x)
+        x = self.bn(x)
+        x = self.selu(x)
+
+        return x
+
+
+class DWConv(nn.Module):
+    def __init__(self, in_channel, out_channel, kernel, dilation, padding):
+        super(DWConv, self).__init__()
+        self.out_channel = out_channel
+        self.DWConv = nn.Conv2d(
+            in_channel,
+            out_channel,
+            kernel_size=kernel,
+            padding=padding,
+            groups=in_channel,
+            dilation=dilation,
+            bias=False,
+        )
+        self.bn = nn.BatchNorm2d(out_channel)
+        self.selu = nn.SELU()
+
+    def forward(self, x):
+        x = self.DWConv(x)
+        out = self.selu(self.bn(x))
+
+        return out
+
+
+class DWSConv(nn.Module):
+    def __init__(self, in_channel, out_channel, kernel, padding, kernels_per_layer):
+        super(DWSConv, self).__init__()
+        self.out_channel = out_channel
+        self.DWConv = nn.Conv2d(
+            in_channel,
+            in_channel * kernels_per_layer,
+            kernel_size=kernel,
+            padding=padding,
+            groups=in_channel,
+            bias=False,
+        )
+        self.bn = nn.BatchNorm2d(in_channel * kernels_per_layer)
+        self.selu = nn.SELU()
+        self.PWConv = nn.Conv2d(
+            in_channel * kernels_per_layer, out_channel, kernel_size=1, bias=False
+        )
+        self.bn2 = nn.BatchNorm2d(out_channel)
+
+    def forward(self, x):
+        x = self.DWConv(x)
+        x = self.selu(self.bn(x))
+        out = self.PWConv(x)
+        out = self.selu(self.bn2(out))
+
+        return out
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class BasicConv2d +(in_channel, out_channel, kernel_size, stride=(1, 1), padding=(0, 0), dilation=(1, 1)) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class BasicConv2d(nn.Module):
+    def __init__(
+        self,
+        in_channel,
+        out_channel,
+        kernel_size,
+        stride=(1, 1),
+        padding=(0, 0),
+        dilation=(1, 1),
+    ):
+        super(BasicConv2d, self).__init__()
+        self.conv = nn.Conv2d(
+            in_channel,
+            out_channel,
+            kernel_size=kernel_size,
+            stride=stride,
+            padding=padding,
+            dilation=dilation,
+            bias=False,
+        )
+        self.bn = nn.BatchNorm2d(out_channel)
+        self.selu = nn.SELU()
+
+    def forward(self, x):
+        x = self.conv(x)
+        x = self.bn(x)
+        x = self.selu(x)
+
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x = self.conv(x)
+    x = self.bn(x)
+    x = self.selu(x)
+
+    return x
+
+
+
+
+
+class DWConv +(in_channel, out_channel, kernel, dilation, padding) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class DWConv(nn.Module):
+    def __init__(self, in_channel, out_channel, kernel, dilation, padding):
+        super(DWConv, self).__init__()
+        self.out_channel = out_channel
+        self.DWConv = nn.Conv2d(
+            in_channel,
+            out_channel,
+            kernel_size=kernel,
+            padding=padding,
+            groups=in_channel,
+            dilation=dilation,
+            bias=False,
+        )
+        self.bn = nn.BatchNorm2d(out_channel)
+        self.selu = nn.SELU()
+
+    def forward(self, x):
+        x = self.DWConv(x)
+        out = self.selu(self.bn(x))
+
+        return out
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x = self.DWConv(x)
+    out = self.selu(self.bn(x))
+
+    return out
+
+
+
+
+
+class DWSConv +(in_channel, out_channel, kernel, padding, kernels_per_layer) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class DWSConv(nn.Module):
+    def __init__(self, in_channel, out_channel, kernel, padding, kernels_per_layer):
+        super(DWSConv, self).__init__()
+        self.out_channel = out_channel
+        self.DWConv = nn.Conv2d(
+            in_channel,
+            in_channel * kernels_per_layer,
+            kernel_size=kernel,
+            padding=padding,
+            groups=in_channel,
+            bias=False,
+        )
+        self.bn = nn.BatchNorm2d(in_channel * kernels_per_layer)
+        self.selu = nn.SELU()
+        self.PWConv = nn.Conv2d(
+            in_channel * kernels_per_layer, out_channel, kernel_size=1, bias=False
+        )
+        self.bn2 = nn.BatchNorm2d(out_channel)
+
+    def forward(self, x):
+        x = self.DWConv(x)
+        x = self.selu(self.bn(x))
+        out = self.PWConv(x)
+        out = self.selu(self.bn2(out))
+
+        return out
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x = self.DWConv(x)
+    x = self.selu(self.bn(x))
+    out = self.PWConv(x)
+    out = self.selu(self.bn2(out))
+
+    return out
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/tracerb7/effi_utils.html b/docs/api/carvekit/ml/arch/tracerb7/effi_utils.html new file mode 100644 index 0000000..a94c44a --- /dev/null +++ b/docs/api/carvekit/ml/arch/tracerb7/effi_utils.html @@ -0,0 +1,2066 @@ + + + + + + +carvekit.ml.arch.tracerb7.effi_utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.tracerb7.effi_utils

+
+
+

Original author: lukemelas (github username) +Github repo: https://github.com/lukemelas/EfficientNet-PyTorch +With adjustments and added comments by workingcoder (github username). +License: Apache License 2.0 +Reimplemented: Min Seok Lee and Wooseok Shin

+
+ +Expand source code + +
"""
+Original author: lukemelas (github username)
+Github repo: https://github.com/lukemelas/EfficientNet-PyTorch
+With adjustments and added comments by workingcoder (github username).
+License: Apache License 2.0
+Reimplemented: Min Seok Lee and Wooseok Shin
+"""
+
+import collections
+import re
+from functools import partial
+
+import math
+import torch
+from torch import nn
+from torch.nn import functional as F
+
+# Parameters for the entire model (stem, all blocks, and head)
+GlobalParams = collections.namedtuple(
+    "GlobalParams",
+    [
+        "width_coefficient",
+        "depth_coefficient",
+        "image_size",
+        "dropout_rate",
+        "num_classes",
+        "batch_norm_momentum",
+        "batch_norm_epsilon",
+        "drop_connect_rate",
+        "depth_divisor",
+        "min_depth",
+        "include_top",
+    ],
+)
+
+# Parameters for an individual model block
+BlockArgs = collections.namedtuple(
+    "BlockArgs",
+    [
+        "num_repeat",
+        "kernel_size",
+        "stride",
+        "expand_ratio",
+        "input_filters",
+        "output_filters",
+        "se_ratio",
+        "id_skip",
+    ],
+)
+
+# Set GlobalParams and BlockArgs's defaults
+GlobalParams.__new__.__defaults__ = (None,) * len(GlobalParams._fields)
+BlockArgs.__new__.__defaults__ = (None,) * len(BlockArgs._fields)
+
+
+# An ordinary implementation of Swish function
+class Swish(nn.Module):
+    def forward(self, x):
+        return x * torch.sigmoid(x)
+
+
+# A memory-efficient implementation of Swish function
+class SwishImplementation(torch.autograd.Function):
+    @staticmethod
+    def forward(ctx, i):
+        result = i * torch.sigmoid(i)
+        ctx.save_for_backward(i)
+        return result
+
+    @staticmethod
+    def backward(ctx, grad_output):
+        i = ctx.saved_tensors[0]
+        sigmoid_i = torch.sigmoid(i)
+        return grad_output * (sigmoid_i * (1 + i * (1 - sigmoid_i)))
+
+
+class MemoryEfficientSwish(nn.Module):
+    def forward(self, x):
+        return SwishImplementation.apply(x)
+
+
+def round_filters(filters, global_params):
+    """Calculate and round number of filters based on width multiplier.
+       Use width_coefficient, depth_divisor and min_depth of global_params.
+
+    Args:
+        filters (int): Filters number to be calculated.
+        global_params (namedtuple): Global params of the model.
+
+    Returns:
+        new_filters: New filters number after calculating.
+    """
+    multiplier = global_params.width_coefficient
+    if not multiplier:
+        return filters
+    divisor = global_params.depth_divisor
+    min_depth = global_params.min_depth
+    filters *= multiplier
+    min_depth = min_depth or divisor  # pay attention to this line when using min_depth
+    # follow the formula transferred from official TensorFlow implementation
+    new_filters = max(min_depth, int(filters + divisor / 2) // divisor * divisor)
+    if new_filters < 0.9 * filters:  # prevent rounding by more than 10%
+        new_filters += divisor
+    return int(new_filters)
+
+
+def round_repeats(repeats, global_params):
+    """Calculate module's repeat number of a block based on depth multiplier.
+       Use depth_coefficient of global_params.
+
+    Args:
+        repeats (int): num_repeat to be calculated.
+        global_params (namedtuple): Global params of the model.
+
+    Returns:
+        new repeat: New repeat number after calculating.
+    """
+    multiplier = global_params.depth_coefficient
+    if not multiplier:
+        return repeats
+    # follow the formula transferred from official TensorFlow implementation
+    return int(math.ceil(multiplier * repeats))
+
+
+def drop_connect(inputs, p, training):
+    """Drop connect.
+
+    Args:
+        input (tensor: BCWH): Input of this structure.
+        p (float: 0.0~1.0): Probability of drop connection.
+        training (bool): The running mode.
+
+    Returns:
+        output: Output after drop connection.
+    """
+    assert 0 <= p <= 1, "p must be in range of [0,1]"
+
+    if not training:
+        return inputs
+
+    batch_size = inputs.shape[0]
+    keep_prob = 1 - p
+
+    # generate binary_tensor mask according to probability (p for 0, 1-p for 1)
+    random_tensor = keep_prob
+    random_tensor += torch.rand(
+        [batch_size, 1, 1, 1], dtype=inputs.dtype, device=inputs.device
+    )
+    binary_tensor = torch.floor(random_tensor)
+
+    output = inputs / keep_prob * binary_tensor
+    return output
+
+
+def get_width_and_height_from_size(x):
+    """Obtain height and width from x.
+
+    Args:
+        x (int, tuple or list): Data size.
+
+    Returns:
+        size: A tuple or list (H,W).
+    """
+    if isinstance(x, int):
+        return x, x
+    if isinstance(x, list) or isinstance(x, tuple):
+        return x
+    else:
+        raise TypeError()
+
+
+def calculate_output_image_size(input_image_size, stride):
+    """Calculates the output image size when using Conv2dSamePadding with a stride.
+       Necessary for static padding. Thanks to mannatsingh for pointing this out.
+
+    Args:
+        input_image_size (int, tuple or list): Size of input image.
+        stride (int, tuple or list): Conv2d operation's stride.
+
+    Returns:
+        output_image_size: A list [H,W].
+    """
+    if input_image_size is None:
+        return None
+    image_height, image_width = get_width_and_height_from_size(input_image_size)
+    stride = stride if isinstance(stride, int) else stride[0]
+    image_height = int(math.ceil(image_height / stride))
+    image_width = int(math.ceil(image_width / stride))
+    return [image_height, image_width]
+
+
+# Note:
+# The following 'SamePadding' functions make output size equal ceil(input size/stride).
+# Only when stride equals 1, can the output size be the same as input size.
+# Don't be confused by their function names ! ! !
+
+
+def get_same_padding_conv2d(image_size=None):
+    """Chooses static padding if you have specified an image size, and dynamic padding otherwise.
+       Static padding is necessary for ONNX exporting of models.
+
+    Args:
+        image_size (int or tuple): Size of the image.
+
+    Returns:
+        Conv2dDynamicSamePadding or Conv2dStaticSamePadding.
+    """
+    if image_size is None:
+        return Conv2dDynamicSamePadding
+    else:
+        return partial(Conv2dStaticSamePadding, image_size=image_size)
+
+
+class Conv2dDynamicSamePadding(nn.Conv2d):
+    """2D Convolutions like TensorFlow, for a dynamic image size.
+    The padding is operated in forward function by calculating dynamically.
+    """
+
+    # Tips for 'SAME' mode padding.
+    #     Given the following:
+    #         i: width or height
+    #         s: stride
+    #         k: kernel size
+    #         d: dilation
+    #         p: padding
+    #     Output after Conv2d:
+    #         o = floor((i+p-((k-1)*d+1))/s+1)
+    # If o equals i, i = floor((i+p-((k-1)*d+1))/s+1),
+    # => p = (i-1)*s+((k-1)*d+1)-i
+
+    def __init__(
+        self,
+        in_channels,
+        out_channels,
+        kernel_size,
+        stride=1,
+        dilation=1,
+        groups=1,
+        bias=True,
+    ):
+        super().__init__(
+            in_channels, out_channels, kernel_size, stride, 0, dilation, groups, bias
+        )
+        self.stride = self.stride if len(self.stride) == 2 else [self.stride[0]] * 2
+
+    def forward(self, x):
+        ih, iw = x.size()[-2:]
+        kh, kw = self.weight.size()[-2:]
+        sh, sw = self.stride
+        oh, ow = math.ceil(ih / sh), math.ceil(
+            iw / sw
+        )  # change the output size according to stride ! ! !
+        pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+        pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+        if pad_h > 0 or pad_w > 0:
+            x = F.pad(
+                x, [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2]
+            )
+        return F.conv2d(
+            x,
+            self.weight,
+            self.bias,
+            self.stride,
+            self.padding,
+            self.dilation,
+            self.groups,
+        )
+
+
+class Conv2dStaticSamePadding(nn.Conv2d):
+    """2D Convolutions like TensorFlow's 'SAME' mode, with the given input image size.
+    The padding mudule is calculated in construction function, then used in forward.
+    """
+
+    # With the same calculation as Conv2dDynamicSamePadding
+
+    def __init__(
+        self,
+        in_channels,
+        out_channels,
+        kernel_size,
+        stride=1,
+        image_size=None,
+        **kwargs
+    ):
+        super().__init__(in_channels, out_channels, kernel_size, stride, **kwargs)
+        self.stride = self.stride if len(self.stride) == 2 else [self.stride[0]] * 2
+
+        # Calculate padding based on image size and save it
+        assert image_size is not None
+        ih, iw = (image_size, image_size) if isinstance(image_size, int) else image_size
+        kh, kw = self.weight.size()[-2:]
+        sh, sw = self.stride
+        oh, ow = math.ceil(ih / sh), math.ceil(iw / sw)
+        pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+        pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+        if pad_h > 0 or pad_w > 0:
+            self.static_padding = nn.ZeroPad2d(
+                (pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2)
+            )
+        else:
+            self.static_padding = nn.Identity()
+
+    def forward(self, x):
+        x = self.static_padding(x)
+        x = F.conv2d(
+            x,
+            self.weight,
+            self.bias,
+            self.stride,
+            self.padding,
+            self.dilation,
+            self.groups,
+        )
+        return x
+
+
+def get_same_padding_maxPool2d(image_size=None):
+    """Chooses static padding if you have specified an image size, and dynamic padding otherwise.
+       Static padding is necessary for ONNX exporting of models.
+
+    Args:
+        image_size (int or tuple): Size of the image.
+
+    Returns:
+        MaxPool2dDynamicSamePadding or MaxPool2dStaticSamePadding.
+    """
+    if image_size is None:
+        return MaxPool2dDynamicSamePadding
+    else:
+        return partial(MaxPool2dStaticSamePadding, image_size=image_size)
+
+
+class MaxPool2dDynamicSamePadding(nn.MaxPool2d):
+    """2D MaxPooling like TensorFlow's 'SAME' mode, with a dynamic image size.
+    The padding is operated in forward function by calculating dynamically.
+    """
+
+    def __init__(
+        self,
+        kernel_size,
+        stride,
+        padding=0,
+        dilation=1,
+        return_indices=False,
+        ceil_mode=False,
+    ):
+        super().__init__(
+            kernel_size, stride, padding, dilation, return_indices, ceil_mode
+        )
+        self.stride = [self.stride] * 2 if isinstance(self.stride, int) else self.stride
+        self.kernel_size = (
+            [self.kernel_size] * 2
+            if isinstance(self.kernel_size, int)
+            else self.kernel_size
+        )
+        self.dilation = (
+            [self.dilation] * 2 if isinstance(self.dilation, int) else self.dilation
+        )
+
+    def forward(self, x):
+        ih, iw = x.size()[-2:]
+        kh, kw = self.kernel_size
+        sh, sw = self.stride
+        oh, ow = math.ceil(ih / sh), math.ceil(iw / sw)
+        pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+        pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+        if pad_h > 0 or pad_w > 0:
+            x = F.pad(
+                x, [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2]
+            )
+        return F.max_pool2d(
+            x,
+            self.kernel_size,
+            self.stride,
+            self.padding,
+            self.dilation,
+            self.ceil_mode,
+            self.return_indices,
+        )
+
+
+class MaxPool2dStaticSamePadding(nn.MaxPool2d):
+    """2D MaxPooling like TensorFlow's 'SAME' mode, with the given input image size.
+    The padding mudule is calculated in construction function, then used in forward.
+    """
+
+    def __init__(self, kernel_size, stride, image_size=None, **kwargs):
+        super().__init__(kernel_size, stride, **kwargs)
+        self.stride = [self.stride] * 2 if isinstance(self.stride, int) else self.stride
+        self.kernel_size = (
+            [self.kernel_size] * 2
+            if isinstance(self.kernel_size, int)
+            else self.kernel_size
+        )
+        self.dilation = (
+            [self.dilation] * 2 if isinstance(self.dilation, int) else self.dilation
+        )
+
+        # Calculate padding based on image size and save it
+        assert image_size is not None
+        ih, iw = (image_size, image_size) if isinstance(image_size, int) else image_size
+        kh, kw = self.kernel_size
+        sh, sw = self.stride
+        oh, ow = math.ceil(ih / sh), math.ceil(iw / sw)
+        pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+        pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+        if pad_h > 0 or pad_w > 0:
+            self.static_padding = nn.ZeroPad2d(
+                (pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2)
+            )
+        else:
+            self.static_padding = nn.Identity()
+
+    def forward(self, x):
+        x = self.static_padding(x)
+        x = F.max_pool2d(
+            x,
+            self.kernel_size,
+            self.stride,
+            self.padding,
+            self.dilation,
+            self.ceil_mode,
+            self.return_indices,
+        )
+        return x
+
+
+class BlockDecoder(object):
+    """Block Decoder for readability,
+    straight from the official TensorFlow repository.
+    """
+
+    @staticmethod
+    def _decode_block_string(block_string):
+        """Get a block through a string notation of arguments.
+
+        Args:
+            block_string (str): A string notation of arguments.
+                                Examples: 'r1_k3_s11_e1_i32_o16_se0.25_noskip'.
+
+        Returns:
+            BlockArgs: The namedtuple defined at the top of this file.
+        """
+        assert isinstance(block_string, str)
+
+        ops = block_string.split("_")
+        options = {}
+        for op in ops:
+            splits = re.split(r"(\d.*)", op)
+            if len(splits) >= 2:
+                key, value = splits[:2]
+                options[key] = value
+
+        # Check stride
+        assert ("s" in options and len(options["s"]) == 1) or (
+            len(options["s"]) == 2 and options["s"][0] == options["s"][1]
+        )
+
+        return BlockArgs(
+            num_repeat=int(options["r"]),
+            kernel_size=int(options["k"]),
+            stride=[int(options["s"][0])],
+            expand_ratio=int(options["e"]),
+            input_filters=int(options["i"]),
+            output_filters=int(options["o"]),
+            se_ratio=float(options["se"]) if "se" in options else None,
+            id_skip=("noskip" not in block_string),
+        )
+
+    @staticmethod
+    def _encode_block_string(block):
+        """Encode a block to a string.
+
+        Args:
+            block (namedtuple): A BlockArgs type argument.
+
+        Returns:
+            block_string: A String form of BlockArgs.
+        """
+        args = [
+            "r%d" % block.num_repeat,
+            "k%d" % block.kernel_size,
+            "s%d%d" % (block.strides[0], block.strides[1]),
+            "e%s" % block.expand_ratio,
+            "i%d" % block.input_filters,
+            "o%d" % block.output_filters,
+        ]
+        if 0 < block.se_ratio <= 1:
+            args.append("se%s" % block.se_ratio)
+        if block.id_skip is False:
+            args.append("noskip")
+        return "_".join(args)
+
+    @staticmethod
+    def decode(string_list):
+        """Decode a list of string notations to specify blocks inside the network.
+
+        Args:
+            string_list (list[str]): A list of strings, each string is a notation of block.
+
+        Returns:
+            blocks_args: A list of BlockArgs namedtuples of block args.
+        """
+        assert isinstance(string_list, list)
+        blocks_args = []
+        for block_string in string_list:
+            blocks_args.append(BlockDecoder._decode_block_string(block_string))
+        return blocks_args
+
+    @staticmethod
+    def encode(blocks_args):
+        """Encode a list of BlockArgs to a list of strings.
+
+        Args:
+            blocks_args (list[namedtuples]): A list of BlockArgs namedtuples of block args.
+
+        Returns:
+            block_strings: A list of strings, each string is a notation of block.
+        """
+        block_strings = []
+        for block in blocks_args:
+            block_strings.append(BlockDecoder._encode_block_string(block))
+        return block_strings
+
+
+def create_block_args(
+    width_coefficient=None,
+    depth_coefficient=None,
+    image_size=None,
+    dropout_rate=0.2,
+    drop_connect_rate=0.2,
+    num_classes=1000,
+    include_top=True,
+):
+    """Create BlockArgs and GlobalParams for efficientnet model.
+
+    Args:
+        width_coefficient (float)
+        depth_coefficient (float)
+        image_size (int)
+        dropout_rate (float)
+        drop_connect_rate (float)
+        num_classes (int)
+
+        Meaning as the name suggests.
+
+    Returns:
+        blocks_args, global_params.
+    """
+
+    # Blocks args for the whole model(efficientnet-b0 by default)
+    # It will be modified in the construction of EfficientNet Class according to model
+    blocks_args = [
+        "r1_k3_s11_e1_i32_o16_se0.25",
+        "r2_k3_s22_e6_i16_o24_se0.25",
+        "r2_k5_s22_e6_i24_o40_se0.25",
+        "r3_k3_s22_e6_i40_o80_se0.25",
+        "r3_k5_s11_e6_i80_o112_se0.25",
+        "r4_k5_s22_e6_i112_o192_se0.25",
+        "r1_k3_s11_e6_i192_o320_se0.25",
+    ]
+    blocks_args = BlockDecoder.decode(blocks_args)
+
+    global_params = GlobalParams(
+        width_coefficient=width_coefficient,
+        depth_coefficient=depth_coefficient,
+        image_size=image_size,
+        dropout_rate=dropout_rate,
+        num_classes=num_classes,
+        batch_norm_momentum=0.99,
+        batch_norm_epsilon=1e-3,
+        drop_connect_rate=drop_connect_rate,
+        depth_divisor=8,
+        min_depth=None,
+        include_top=include_top,
+    )
+
+    return blocks_args, global_params
+
+
+
+
+
+
+
+

Functions

+
+
+def calculate_output_image_size(input_image_size, stride) +
+
+

Calculates the output image size when using Conv2dSamePadding with a stride. +Necessary for static padding. Thanks to mannatsingh for pointing this out.

+

Args

+
+
input_image_size : int, tuple or list
+
Size of input image.
+
stride : int, tuple or list
+
Conv2d operation's stride.
+
+

Returns

+
+
output_image_size
+
A list [H,W].
+
+
+ +Expand source code + +
def calculate_output_image_size(input_image_size, stride):
+    """Calculates the output image size when using Conv2dSamePadding with a stride.
+       Necessary for static padding. Thanks to mannatsingh for pointing this out.
+
+    Args:
+        input_image_size (int, tuple or list): Size of input image.
+        stride (int, tuple or list): Conv2d operation's stride.
+
+    Returns:
+        output_image_size: A list [H,W].
+    """
+    if input_image_size is None:
+        return None
+    image_height, image_width = get_width_and_height_from_size(input_image_size)
+    stride = stride if isinstance(stride, int) else stride[0]
+    image_height = int(math.ceil(image_height / stride))
+    image_width = int(math.ceil(image_width / stride))
+    return [image_height, image_width]
+
+
+
+def create_block_args(width_coefficient=None, depth_coefficient=None, image_size=None, dropout_rate=0.2, drop_connect_rate=0.2, num_classes=1000, include_top=True) +
+
+

Create BlockArgs and GlobalParams for efficientnet model.

+

Args

+

width_coefficient (float) +depth_coefficient (float) +image_size (int) +dropout_rate (float) +drop_connect_rate (float) +num_classes (int)

+

Meaning as the name suggests.

+

Returns

+

blocks_args, global_params.

+
+ +Expand source code + +
def create_block_args(
+    width_coefficient=None,
+    depth_coefficient=None,
+    image_size=None,
+    dropout_rate=0.2,
+    drop_connect_rate=0.2,
+    num_classes=1000,
+    include_top=True,
+):
+    """Create BlockArgs and GlobalParams for efficientnet model.
+
+    Args:
+        width_coefficient (float)
+        depth_coefficient (float)
+        image_size (int)
+        dropout_rate (float)
+        drop_connect_rate (float)
+        num_classes (int)
+
+        Meaning as the name suggests.
+
+    Returns:
+        blocks_args, global_params.
+    """
+
+    # Blocks args for the whole model(efficientnet-b0 by default)
+    # It will be modified in the construction of EfficientNet Class according to model
+    blocks_args = [
+        "r1_k3_s11_e1_i32_o16_se0.25",
+        "r2_k3_s22_e6_i16_o24_se0.25",
+        "r2_k5_s22_e6_i24_o40_se0.25",
+        "r3_k3_s22_e6_i40_o80_se0.25",
+        "r3_k5_s11_e6_i80_o112_se0.25",
+        "r4_k5_s22_e6_i112_o192_se0.25",
+        "r1_k3_s11_e6_i192_o320_se0.25",
+    ]
+    blocks_args = BlockDecoder.decode(blocks_args)
+
+    global_params = GlobalParams(
+        width_coefficient=width_coefficient,
+        depth_coefficient=depth_coefficient,
+        image_size=image_size,
+        dropout_rate=dropout_rate,
+        num_classes=num_classes,
+        batch_norm_momentum=0.99,
+        batch_norm_epsilon=1e-3,
+        drop_connect_rate=drop_connect_rate,
+        depth_divisor=8,
+        min_depth=None,
+        include_top=include_top,
+    )
+
+    return blocks_args, global_params
+
+
+
+def drop_connect(inputs, p, training) +
+
+

Drop connect.

+

Args

+
+
input (tensor: BCWH): Input of this structure.
+
p (float: 0.0~1.0): Probability of drop connection.
+
training : bool
+
The running mode.
+
+

Returns

+
+
output
+
Output after drop connection.
+
+
+ +Expand source code + +
def drop_connect(inputs, p, training):
+    """Drop connect.
+
+    Args:
+        input (tensor: BCWH): Input of this structure.
+        p (float: 0.0~1.0): Probability of drop connection.
+        training (bool): The running mode.
+
+    Returns:
+        output: Output after drop connection.
+    """
+    assert 0 <= p <= 1, "p must be in range of [0,1]"
+
+    if not training:
+        return inputs
+
+    batch_size = inputs.shape[0]
+    keep_prob = 1 - p
+
+    # generate binary_tensor mask according to probability (p for 0, 1-p for 1)
+    random_tensor = keep_prob
+    random_tensor += torch.rand(
+        [batch_size, 1, 1, 1], dtype=inputs.dtype, device=inputs.device
+    )
+    binary_tensor = torch.floor(random_tensor)
+
+    output = inputs / keep_prob * binary_tensor
+    return output
+
+
+
+def get_same_padding_conv2d(image_size=None) +
+
+

Chooses static padding if you have specified an image size, and dynamic padding otherwise. +Static padding is necessary for ONNX exporting of models.

+

Args

+
+
image_size : int or tuple
+
Size of the image.
+
+

Returns

+

Conv2dDynamicSamePadding or Conv2dStaticSamePadding.

+
+ +Expand source code + +
def get_same_padding_conv2d(image_size=None):
+    """Chooses static padding if you have specified an image size, and dynamic padding otherwise.
+       Static padding is necessary for ONNX exporting of models.
+
+    Args:
+        image_size (int or tuple): Size of the image.
+
+    Returns:
+        Conv2dDynamicSamePadding or Conv2dStaticSamePadding.
+    """
+    if image_size is None:
+        return Conv2dDynamicSamePadding
+    else:
+        return partial(Conv2dStaticSamePadding, image_size=image_size)
+
+
+
+def get_same_padding_maxPool2d(image_size=None) +
+
+

Chooses static padding if you have specified an image size, and dynamic padding otherwise. +Static padding is necessary for ONNX exporting of models.

+

Args

+
+
image_size : int or tuple
+
Size of the image.
+
+

Returns

+

MaxPool2dDynamicSamePadding or MaxPool2dStaticSamePadding.

+
+ +Expand source code + +
def get_same_padding_maxPool2d(image_size=None):
+    """Chooses static padding if you have specified an image size, and dynamic padding otherwise.
+       Static padding is necessary for ONNX exporting of models.
+
+    Args:
+        image_size (int or tuple): Size of the image.
+
+    Returns:
+        MaxPool2dDynamicSamePadding or MaxPool2dStaticSamePadding.
+    """
+    if image_size is None:
+        return MaxPool2dDynamicSamePadding
+    else:
+        return partial(MaxPool2dStaticSamePadding, image_size=image_size)
+
+
+
+def get_width_and_height_from_size(x) +
+
+

Obtain height and width from x.

+

Args

+
+
x : int, tuple or list
+
Data size.
+
+

Returns

+
+
size
+
A tuple or list (H,W).
+
+
+ +Expand source code + +
def get_width_and_height_from_size(x):
+    """Obtain height and width from x.
+
+    Args:
+        x (int, tuple or list): Data size.
+
+    Returns:
+        size: A tuple or list (H,W).
+    """
+    if isinstance(x, int):
+        return x, x
+    if isinstance(x, list) or isinstance(x, tuple):
+        return x
+    else:
+        raise TypeError()
+
+
+
+def round_filters(filters, global_params) +
+
+

Calculate and round number of filters based on width multiplier. +Use width_coefficient, depth_divisor and min_depth of global_params.

+

Args

+
+
filters : int
+
Filters number to be calculated.
+
global_params : namedtuple
+
Global params of the model.
+
+

Returns

+
+
new_filters
+
New filters number after calculating.
+
+
+ +Expand source code + +
def round_filters(filters, global_params):
+    """Calculate and round number of filters based on width multiplier.
+       Use width_coefficient, depth_divisor and min_depth of global_params.
+
+    Args:
+        filters (int): Filters number to be calculated.
+        global_params (namedtuple): Global params of the model.
+
+    Returns:
+        new_filters: New filters number after calculating.
+    """
+    multiplier = global_params.width_coefficient
+    if not multiplier:
+        return filters
+    divisor = global_params.depth_divisor
+    min_depth = global_params.min_depth
+    filters *= multiplier
+    min_depth = min_depth or divisor  # pay attention to this line when using min_depth
+    # follow the formula transferred from official TensorFlow implementation
+    new_filters = max(min_depth, int(filters + divisor / 2) // divisor * divisor)
+    if new_filters < 0.9 * filters:  # prevent rounding by more than 10%
+        new_filters += divisor
+    return int(new_filters)
+
+
+
+def round_repeats(repeats, global_params) +
+
+

Calculate module's repeat number of a block based on depth multiplier. +Use depth_coefficient of global_params.

+

Args

+
+
repeats : int
+
num_repeat to be calculated.
+
global_params : namedtuple
+
Global params of the model.
+
+

Returns

+
+
new repeat
+
New repeat number after calculating.
+
+
+ +Expand source code + +
def round_repeats(repeats, global_params):
+    """Calculate module's repeat number of a block based on depth multiplier.
+       Use depth_coefficient of global_params.
+
+    Args:
+        repeats (int): num_repeat to be calculated.
+        global_params (namedtuple): Global params of the model.
+
+    Returns:
+        new repeat: New repeat number after calculating.
+    """
+    multiplier = global_params.depth_coefficient
+    if not multiplier:
+        return repeats
+    # follow the formula transferred from official TensorFlow implementation
+    return int(math.ceil(multiplier * repeats))
+
+
+
+
+
+

Classes

+
+
+class BlockArgs +(num_repeat=None, kernel_size=None, stride=None, expand_ratio=None, input_filters=None, output_filters=None, se_ratio=None, id_skip=None) +
+
+

BlockArgs(num_repeat, kernel_size, stride, expand_ratio, input_filters, output_filters, se_ratio, id_skip)

+

Ancestors

+
    +
  • builtins.tuple
  • +
+

Instance variables

+
+
var expand_ratio
+
+

Alias for field number 3

+
+
var id_skip
+
+

Alias for field number 7

+
+
var input_filters
+
+

Alias for field number 4

+
+
var kernel_size
+
+

Alias for field number 1

+
+
var num_repeat
+
+

Alias for field number 0

+
+
var output_filters
+
+

Alias for field number 5

+
+
var se_ratio
+
+

Alias for field number 6

+
+
var stride
+
+

Alias for field number 2

+
+
+
+
+class BlockDecoder +
+
+

Block Decoder for readability, +straight from the official TensorFlow repository.

+
+ +Expand source code + +
class BlockDecoder(object):
+    """Block Decoder for readability,
+    straight from the official TensorFlow repository.
+    """
+
+    @staticmethod
+    def _decode_block_string(block_string):
+        """Get a block through a string notation of arguments.
+
+        Args:
+            block_string (str): A string notation of arguments.
+                                Examples: 'r1_k3_s11_e1_i32_o16_se0.25_noskip'.
+
+        Returns:
+            BlockArgs: The namedtuple defined at the top of this file.
+        """
+        assert isinstance(block_string, str)
+
+        ops = block_string.split("_")
+        options = {}
+        for op in ops:
+            splits = re.split(r"(\d.*)", op)
+            if len(splits) >= 2:
+                key, value = splits[:2]
+                options[key] = value
+
+        # Check stride
+        assert ("s" in options and len(options["s"]) == 1) or (
+            len(options["s"]) == 2 and options["s"][0] == options["s"][1]
+        )
+
+        return BlockArgs(
+            num_repeat=int(options["r"]),
+            kernel_size=int(options["k"]),
+            stride=[int(options["s"][0])],
+            expand_ratio=int(options["e"]),
+            input_filters=int(options["i"]),
+            output_filters=int(options["o"]),
+            se_ratio=float(options["se"]) if "se" in options else None,
+            id_skip=("noskip" not in block_string),
+        )
+
+    @staticmethod
+    def _encode_block_string(block):
+        """Encode a block to a string.
+
+        Args:
+            block (namedtuple): A BlockArgs type argument.
+
+        Returns:
+            block_string: A String form of BlockArgs.
+        """
+        args = [
+            "r%d" % block.num_repeat,
+            "k%d" % block.kernel_size,
+            "s%d%d" % (block.strides[0], block.strides[1]),
+            "e%s" % block.expand_ratio,
+            "i%d" % block.input_filters,
+            "o%d" % block.output_filters,
+        ]
+        if 0 < block.se_ratio <= 1:
+            args.append("se%s" % block.se_ratio)
+        if block.id_skip is False:
+            args.append("noskip")
+        return "_".join(args)
+
+    @staticmethod
+    def decode(string_list):
+        """Decode a list of string notations to specify blocks inside the network.
+
+        Args:
+            string_list (list[str]): A list of strings, each string is a notation of block.
+
+        Returns:
+            blocks_args: A list of BlockArgs namedtuples of block args.
+        """
+        assert isinstance(string_list, list)
+        blocks_args = []
+        for block_string in string_list:
+            blocks_args.append(BlockDecoder._decode_block_string(block_string))
+        return blocks_args
+
+    @staticmethod
+    def encode(blocks_args):
+        """Encode a list of BlockArgs to a list of strings.
+
+        Args:
+            blocks_args (list[namedtuples]): A list of BlockArgs namedtuples of block args.
+
+        Returns:
+            block_strings: A list of strings, each string is a notation of block.
+        """
+        block_strings = []
+        for block in blocks_args:
+            block_strings.append(BlockDecoder._encode_block_string(block))
+        return block_strings
+
+

Static methods

+
+
+def decode(string_list) +
+
+

Decode a list of string notations to specify blocks inside the network.

+

Args

+
+
string_list : list[str]
+
A list of strings, each string is a notation of block.
+
+

Returns

+
+
blocks_args
+
A list of BlockArgs namedtuples of block args.
+
+
+ +Expand source code + +
@staticmethod
+def decode(string_list):
+    """Decode a list of string notations to specify blocks inside the network.
+
+    Args:
+        string_list (list[str]): A list of strings, each string is a notation of block.
+
+    Returns:
+        blocks_args: A list of BlockArgs namedtuples of block args.
+    """
+    assert isinstance(string_list, list)
+    blocks_args = []
+    for block_string in string_list:
+        blocks_args.append(BlockDecoder._decode_block_string(block_string))
+    return blocks_args
+
+
+
+def encode(blocks_args) +
+
+

Encode a list of BlockArgs to a list of strings.

+

Args

+
+
blocks_args : list[namedtuples]
+
A list of BlockArgs namedtuples of block args.
+
+

Returns

+
+
block_strings
+
A list of strings, each string is a notation of block.
+
+
+ +Expand source code + +
@staticmethod
+def encode(blocks_args):
+    """Encode a list of BlockArgs to a list of strings.
+
+    Args:
+        blocks_args (list[namedtuples]): A list of BlockArgs namedtuples of block args.
+
+    Returns:
+        block_strings: A list of strings, each string is a notation of block.
+    """
+    block_strings = []
+    for block in blocks_args:
+        block_strings.append(BlockDecoder._encode_block_string(block))
+    return block_strings
+
+
+
+
+
+class Conv2dDynamicSamePadding +(in_channels, out_channels, kernel_size, stride=1, dilation=1, groups=1, bias=True) +
+
+

2D Convolutions like TensorFlow, for a dynamic image size. +The padding is operated in forward function by calculating dynamically.

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Conv2dDynamicSamePadding(nn.Conv2d):
+    """2D Convolutions like TensorFlow, for a dynamic image size.
+    The padding is operated in forward function by calculating dynamically.
+    """
+
+    # Tips for 'SAME' mode padding.
+    #     Given the following:
+    #         i: width or height
+    #         s: stride
+    #         k: kernel size
+    #         d: dilation
+    #         p: padding
+    #     Output after Conv2d:
+    #         o = floor((i+p-((k-1)*d+1))/s+1)
+    # If o equals i, i = floor((i+p-((k-1)*d+1))/s+1),
+    # => p = (i-1)*s+((k-1)*d+1)-i
+
+    def __init__(
+        self,
+        in_channels,
+        out_channels,
+        kernel_size,
+        stride=1,
+        dilation=1,
+        groups=1,
+        bias=True,
+    ):
+        super().__init__(
+            in_channels, out_channels, kernel_size, stride, 0, dilation, groups, bias
+        )
+        self.stride = self.stride if len(self.stride) == 2 else [self.stride[0]] * 2
+
+    def forward(self, x):
+        ih, iw = x.size()[-2:]
+        kh, kw = self.weight.size()[-2:]
+        sh, sw = self.stride
+        oh, ow = math.ceil(ih / sh), math.ceil(
+            iw / sw
+        )  # change the output size according to stride ! ! !
+        pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+        pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+        if pad_h > 0 or pad_w > 0:
+            x = F.pad(
+                x, [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2]
+            )
+        return F.conv2d(
+            x,
+            self.weight,
+            self.bias,
+            self.stride,
+            self.padding,
+            self.dilation,
+            self.groups,
+        )
+
+

Ancestors

+
    +
  • torch.nn.modules.conv.Conv2d
  • +
  • torch.nn.modules.conv._ConvNd
  • +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    ih, iw = x.size()[-2:]
+    kh, kw = self.weight.size()[-2:]
+    sh, sw = self.stride
+    oh, ow = math.ceil(ih / sh), math.ceil(
+        iw / sw
+    )  # change the output size according to stride ! ! !
+    pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+    pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+    if pad_h > 0 or pad_w > 0:
+        x = F.pad(
+            x, [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2]
+        )
+    return F.conv2d(
+        x,
+        self.weight,
+        self.bias,
+        self.stride,
+        self.padding,
+        self.dilation,
+        self.groups,
+    )
+
+
+
+
+
+class Conv2dStaticSamePadding +(in_channels, out_channels, kernel_size, stride=1, image_size=None, **kwargs) +
+
+

2D Convolutions like TensorFlow's 'SAME' mode, with the given input image size. +The padding mudule is calculated in construction function, then used in forward.

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Conv2dStaticSamePadding(nn.Conv2d):
+    """2D Convolutions like TensorFlow's 'SAME' mode, with the given input image size.
+    The padding mudule is calculated in construction function, then used in forward.
+    """
+
+    # With the same calculation as Conv2dDynamicSamePadding
+
+    def __init__(
+        self,
+        in_channels,
+        out_channels,
+        kernel_size,
+        stride=1,
+        image_size=None,
+        **kwargs
+    ):
+        super().__init__(in_channels, out_channels, kernel_size, stride, **kwargs)
+        self.stride = self.stride if len(self.stride) == 2 else [self.stride[0]] * 2
+
+        # Calculate padding based on image size and save it
+        assert image_size is not None
+        ih, iw = (image_size, image_size) if isinstance(image_size, int) else image_size
+        kh, kw = self.weight.size()[-2:]
+        sh, sw = self.stride
+        oh, ow = math.ceil(ih / sh), math.ceil(iw / sw)
+        pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+        pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+        if pad_h > 0 or pad_w > 0:
+            self.static_padding = nn.ZeroPad2d(
+                (pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2)
+            )
+        else:
+            self.static_padding = nn.Identity()
+
+    def forward(self, x):
+        x = self.static_padding(x)
+        x = F.conv2d(
+            x,
+            self.weight,
+            self.bias,
+            self.stride,
+            self.padding,
+            self.dilation,
+            self.groups,
+        )
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.conv.Conv2d
  • +
  • torch.nn.modules.conv._ConvNd
  • +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x = self.static_padding(x)
+    x = F.conv2d(
+        x,
+        self.weight,
+        self.bias,
+        self.stride,
+        self.padding,
+        self.dilation,
+        self.groups,
+    )
+    return x
+
+
+
+
+
+class GlobalParams +(width_coefficient=None, depth_coefficient=None, image_size=None, dropout_rate=None, num_classes=None, batch_norm_momentum=None, batch_norm_epsilon=None, drop_connect_rate=None, depth_divisor=None, min_depth=None, include_top=None) +
+
+

GlobalParams(width_coefficient, depth_coefficient, image_size, dropout_rate, num_classes, batch_norm_momentum, batch_norm_epsilon, drop_connect_rate, depth_divisor, min_depth, include_top)

+

Ancestors

+
    +
  • builtins.tuple
  • +
+

Instance variables

+
+
var batch_norm_epsilon
+
+

Alias for field number 6

+
+
var batch_norm_momentum
+
+

Alias for field number 5

+
+
var depth_coefficient
+
+

Alias for field number 1

+
+
var depth_divisor
+
+

Alias for field number 8

+
+
var drop_connect_rate
+
+

Alias for field number 7

+
+
var dropout_rate
+
+

Alias for field number 3

+
+
var image_size
+
+

Alias for field number 2

+
+
var include_top
+
+

Alias for field number 10

+
+
var min_depth
+
+

Alias for field number 9

+
+
var num_classes
+
+

Alias for field number 4

+
+
var width_coefficient
+
+

Alias for field number 0

+
+
+
+
+class MaxPool2dDynamicSamePadding +(kernel_size, stride, padding=0, dilation=1, return_indices=False, ceil_mode=False) +
+
+

2D MaxPooling like TensorFlow's 'SAME' mode, with a dynamic image size. +The padding is operated in forward function by calculating dynamically.

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class MaxPool2dDynamicSamePadding(nn.MaxPool2d):
+    """2D MaxPooling like TensorFlow's 'SAME' mode, with a dynamic image size.
+    The padding is operated in forward function by calculating dynamically.
+    """
+
+    def __init__(
+        self,
+        kernel_size,
+        stride,
+        padding=0,
+        dilation=1,
+        return_indices=False,
+        ceil_mode=False,
+    ):
+        super().__init__(
+            kernel_size, stride, padding, dilation, return_indices, ceil_mode
+        )
+        self.stride = [self.stride] * 2 if isinstance(self.stride, int) else self.stride
+        self.kernel_size = (
+            [self.kernel_size] * 2
+            if isinstance(self.kernel_size, int)
+            else self.kernel_size
+        )
+        self.dilation = (
+            [self.dilation] * 2 if isinstance(self.dilation, int) else self.dilation
+        )
+
+    def forward(self, x):
+        ih, iw = x.size()[-2:]
+        kh, kw = self.kernel_size
+        sh, sw = self.stride
+        oh, ow = math.ceil(ih / sh), math.ceil(iw / sw)
+        pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+        pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+        if pad_h > 0 or pad_w > 0:
+            x = F.pad(
+                x, [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2]
+            )
+        return F.max_pool2d(
+            x,
+            self.kernel_size,
+            self.stride,
+            self.padding,
+            self.dilation,
+            self.ceil_mode,
+            self.return_indices,
+        )
+
+

Ancestors

+
    +
  • torch.nn.modules.pooling.MaxPool2d
  • +
  • torch.nn.modules.pooling._MaxPoolNd
  • +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    ih, iw = x.size()[-2:]
+    kh, kw = self.kernel_size
+    sh, sw = self.stride
+    oh, ow = math.ceil(ih / sh), math.ceil(iw / sw)
+    pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+    pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+    if pad_h > 0 or pad_w > 0:
+        x = F.pad(
+            x, [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2]
+        )
+    return F.max_pool2d(
+        x,
+        self.kernel_size,
+        self.stride,
+        self.padding,
+        self.dilation,
+        self.ceil_mode,
+        self.return_indices,
+    )
+
+
+
+
+
+class MaxPool2dStaticSamePadding +(kernel_size, stride, image_size=None, **kwargs) +
+
+

2D MaxPooling like TensorFlow's 'SAME' mode, with the given input image size. +The padding mudule is calculated in construction function, then used in forward.

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class MaxPool2dStaticSamePadding(nn.MaxPool2d):
+    """2D MaxPooling like TensorFlow's 'SAME' mode, with the given input image size.
+    The padding mudule is calculated in construction function, then used in forward.
+    """
+
+    def __init__(self, kernel_size, stride, image_size=None, **kwargs):
+        super().__init__(kernel_size, stride, **kwargs)
+        self.stride = [self.stride] * 2 if isinstance(self.stride, int) else self.stride
+        self.kernel_size = (
+            [self.kernel_size] * 2
+            if isinstance(self.kernel_size, int)
+            else self.kernel_size
+        )
+        self.dilation = (
+            [self.dilation] * 2 if isinstance(self.dilation, int) else self.dilation
+        )
+
+        # Calculate padding based on image size and save it
+        assert image_size is not None
+        ih, iw = (image_size, image_size) if isinstance(image_size, int) else image_size
+        kh, kw = self.kernel_size
+        sh, sw = self.stride
+        oh, ow = math.ceil(ih / sh), math.ceil(iw / sw)
+        pad_h = max((oh - 1) * self.stride[0] + (kh - 1) * self.dilation[0] + 1 - ih, 0)
+        pad_w = max((ow - 1) * self.stride[1] + (kw - 1) * self.dilation[1] + 1 - iw, 0)
+        if pad_h > 0 or pad_w > 0:
+            self.static_padding = nn.ZeroPad2d(
+                (pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2)
+            )
+        else:
+            self.static_padding = nn.Identity()
+
+    def forward(self, x):
+        x = self.static_padding(x)
+        x = F.max_pool2d(
+            x,
+            self.kernel_size,
+            self.stride,
+            self.padding,
+            self.dilation,
+            self.ceil_mode,
+            self.return_indices,
+        )
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.pooling.MaxPool2d
  • +
  • torch.nn.modules.pooling._MaxPoolNd
  • +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x = self.static_padding(x)
+    x = F.max_pool2d(
+        x,
+        self.kernel_size,
+        self.stride,
+        self.padding,
+        self.dilation,
+        self.ceil_mode,
+        self.return_indices,
+    )
+    return x
+
+
+
+
+
+class MemoryEfficientSwish +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class MemoryEfficientSwish(nn.Module):
+    def forward(self, x):
+        return SwishImplementation.apply(x)
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    return SwishImplementation.apply(x)
+
+
+
+
+
+class Swish +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Swish(nn.Module):
+    def forward(self, x):
+        return x * torch.sigmoid(x)
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    return x * torch.sigmoid(x)
+
+
+
+
+
+class SwishImplementation +(*args, **kwargs) +
+
+

Base class to create custom autograd.Function

+

To create a custom autograd.Function, subclass this class and implement +the :meth:forward and :meth:backward static methods. Then, to use your custom +op in the forward pass, call the class method apply. Do not call +:meth:forward directly.

+

To ensure correctness and best performance, make sure you are calling the +correct methods on ctx and validating your backward function using +:func:torch.autograd.gradcheck.

+

See :ref:extending-autograd for more details on how to use this class.

+

Examples::

+
>>> class Exp(Function):
+>>>     @staticmethod
+>>>     def forward(ctx, i):
+>>>         result = i.exp()
+>>>         ctx.save_for_backward(result)
+>>>         return result
+>>>
+>>>     @staticmethod
+>>>     def backward(ctx, grad_output):
+>>>         result, = ctx.saved_tensors
+>>>         return grad_output * result
+>>>
+>>> # Use it by calling the apply method:
+>>> output = Exp.apply(input)
+
+
+ +Expand source code + +
class SwishImplementation(torch.autograd.Function):
+    @staticmethod
+    def forward(ctx, i):
+        result = i * torch.sigmoid(i)
+        ctx.save_for_backward(i)
+        return result
+
+    @staticmethod
+    def backward(ctx, grad_output):
+        i = ctx.saved_tensors[0]
+        sigmoid_i = torch.sigmoid(i)
+        return grad_output * (sigmoid_i * (1 + i * (1 - sigmoid_i)))
+
+

Ancestors

+
    +
  • torch.autograd.function.Function
  • +
  • torch._C._FunctionBase
  • +
  • torch.autograd.function.FunctionCtx
  • +
  • torch.autograd.function._HookMixin
  • +
+

Static methods

+
+
+def backward(ctx, grad_output) +
+
+

Defines a formula for differentiating the operation with backward mode +automatic differentiation (alias to the vjp function).

+

This function is to be overridden by all subclasses.

+

It must accept a context :attr:ctx as the first argument, followed by +as many outputs as the :func:forward returned (None will be passed in +for non tensor outputs of the forward function), +and it should return as many tensors, as there were inputs to +:func:forward. Each argument is the gradient w.r.t the given output, +and each returned value should be the gradient w.r.t. the +corresponding input. If an input is not a Tensor or is a Tensor not +requiring grads, you can just pass None as a gradient for that input.

+

The context can be used to retrieve tensors saved during the forward +pass. It also has an attribute :attr:ctx.needs_input_grad as a tuple +of booleans representing whether each input needs gradient. E.g., +:func:backward will have ctx.needs_input_grad[0] = True if the +first input to :func:forward needs gradient computated w.r.t. the +output.

+
+ +Expand source code + +
@staticmethod
+def backward(ctx, grad_output):
+    i = ctx.saved_tensors[0]
+    sigmoid_i = torch.sigmoid(i)
+    return grad_output * (sigmoid_i * (1 + i * (1 - sigmoid_i)))
+
+
+
+def forward(ctx, i) +
+
+

Performs the operation.

+

This function is to be overridden by all subclasses.

+

It must accept a context ctx as the first argument, followed by any +number of arguments (tensors or other types).

+

The context can be used to store arbitrary data that can be then +retrieved during the backward pass. Tensors should not be stored +directly on ctx (though this is not currently enforced for +backward compatibility). Instead, tensors should be saved either with +:func:ctx.save_for_backward if they are intended to be used in +backward (equivalently, vjp) or :func:ctx.save_for_forward +if they are intended to be used for in jvp.

+
+ +Expand source code + +
@staticmethod
+def forward(ctx, i):
+    result = i * torch.sigmoid(i)
+    ctx.save_for_backward(i)
+    return result
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/tracerb7/efficientnet.html b/docs/api/carvekit/ml/arch/tracerb7/efficientnet.html new file mode 100644 index 0000000..192582a --- /dev/null +++ b/docs/api/carvekit/ml/arch/tracerb7/efficientnet.html @@ -0,0 +1,1077 @@ + + + + + + +carvekit.ml.arch.tracerb7.efficientnet API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.tracerb7.efficientnet

+
+
+

Source url: https://github.com/lukemelas/EfficientNet-PyTorch +Modified by Min Seok Lee, Wooseok Shin, Nikita Selin +License: Apache License 2.0

+

Changes

+
    +
  • Added support for extracting edge features
  • +
  • Added support for extracting object features at different levels
  • +
  • Refactored the code
  • +
+
+ +Expand source code + +
"""
+Source url: https://github.com/lukemelas/EfficientNet-PyTorch
+Modified by Min Seok Lee, Wooseok Shin, Nikita Selin
+License: Apache License 2.0
+Changes:
+    - Added support for extracting edge features
+    - Added support for extracting object features at different levels
+    - Refactored the code
+"""
+from typing import Any, List
+
+import torch
+from torch import nn
+from torch.nn import functional as F
+
+from carvekit.ml.arch.tracerb7.effi_utils import (
+    get_same_padding_conv2d,
+    calculate_output_image_size,
+    MemoryEfficientSwish,
+    drop_connect,
+    round_filters,
+    round_repeats,
+    Swish,
+    create_block_args,
+)
+
+
+class MBConvBlock(nn.Module):
+    """Mobile Inverted Residual Bottleneck Block.
+
+    Args:
+        block_args (namedtuple): BlockArgs, defined in utils.py.
+        global_params (namedtuple): GlobalParam, defined in utils.py.
+        image_size (tuple or list): [image_height, image_width].
+
+    References:
+        [1] https://arxiv.org/abs/1704.04861 (MobileNet v1)
+        [2] https://arxiv.org/abs/1801.04381 (MobileNet v2)
+        [3] https://arxiv.org/abs/1905.02244 (MobileNet v3)
+    """
+
+    def __init__(self, block_args, global_params, image_size=None):
+        super().__init__()
+        self._block_args = block_args
+        self._bn_mom = (
+            1 - global_params.batch_norm_momentum
+        )  # pytorch's difference from tensorflow
+        self._bn_eps = global_params.batch_norm_epsilon
+        self.has_se = (self._block_args.se_ratio is not None) and (
+            0 < self._block_args.se_ratio <= 1
+        )
+        self.id_skip = (
+            block_args.id_skip
+        )  # whether to use skip connection and drop connect
+
+        # Expansion phase (Inverted Bottleneck)
+        inp = self._block_args.input_filters  # number of input channels
+        oup = (
+            self._block_args.input_filters * self._block_args.expand_ratio
+        )  # number of output channels
+        if self._block_args.expand_ratio != 1:
+            Conv2d = get_same_padding_conv2d(image_size=image_size)
+            self._expand_conv = Conv2d(
+                in_channels=inp, out_channels=oup, kernel_size=1, bias=False
+            )
+            self._bn0 = nn.BatchNorm2d(
+                num_features=oup, momentum=self._bn_mom, eps=self._bn_eps
+            )
+            # image_size = calculate_output_image_size(image_size, 1) <-- this wouldn't modify image_size
+
+        # Depthwise convolution phase
+        k = self._block_args.kernel_size
+        s = self._block_args.stride
+        Conv2d = get_same_padding_conv2d(image_size=image_size)
+        self._depthwise_conv = Conv2d(
+            in_channels=oup,
+            out_channels=oup,
+            groups=oup,  # groups makes it depthwise
+            kernel_size=k,
+            stride=s,
+            bias=False,
+        )
+        self._bn1 = nn.BatchNorm2d(
+            num_features=oup, momentum=self._bn_mom, eps=self._bn_eps
+        )
+        image_size = calculate_output_image_size(image_size, s)
+
+        # Squeeze and Excitation layer, if desired
+        if self.has_se:
+            Conv2d = get_same_padding_conv2d(image_size=(1, 1))
+            num_squeezed_channels = max(
+                1, int(self._block_args.input_filters * self._block_args.se_ratio)
+            )
+            self._se_reduce = Conv2d(
+                in_channels=oup, out_channels=num_squeezed_channels, kernel_size=1
+            )
+            self._se_expand = Conv2d(
+                in_channels=num_squeezed_channels, out_channels=oup, kernel_size=1
+            )
+
+        # Pointwise convolution phase
+        final_oup = self._block_args.output_filters
+        Conv2d = get_same_padding_conv2d(image_size=image_size)
+        self._project_conv = Conv2d(
+            in_channels=oup, out_channels=final_oup, kernel_size=1, bias=False
+        )
+        self._bn2 = nn.BatchNorm2d(
+            num_features=final_oup, momentum=self._bn_mom, eps=self._bn_eps
+        )
+        self._swish = MemoryEfficientSwish()
+
+    def forward(self, inputs, drop_connect_rate=None):
+        """MBConvBlock's forward function.
+
+        Args:
+            inputs (tensor): Input tensor.
+            drop_connect_rate (bool): Drop connect rate (float, between 0 and 1).
+
+        Returns:
+            Output of this block after processing.
+        """
+
+        # Expansion and Depthwise Convolution
+        x = inputs
+        if self._block_args.expand_ratio != 1:
+            x = self._expand_conv(inputs)
+            x = self._bn0(x)
+            x = self._swish(x)
+
+        x = self._depthwise_conv(x)
+        x = self._bn1(x)
+        x = self._swish(x)
+
+        # Squeeze and Excitation
+        if self.has_se:
+            x_squeezed = F.adaptive_avg_pool2d(x, 1)
+            x_squeezed = self._se_reduce(x_squeezed)
+            x_squeezed = self._swish(x_squeezed)
+            x_squeezed = self._se_expand(x_squeezed)
+            x = torch.sigmoid(x_squeezed) * x
+
+        # Pointwise Convolution
+        x = self._project_conv(x)
+        x = self._bn2(x)
+
+        # Skip connection and drop connect
+        input_filters, output_filters = (
+            self._block_args.input_filters,
+            self._block_args.output_filters,
+        )
+        if (
+            self.id_skip
+            and self._block_args.stride == 1
+            and input_filters == output_filters
+        ):
+            # The combination of skip connection and drop connect brings about stochastic depth.
+            if drop_connect_rate:
+                x = drop_connect(x, p=drop_connect_rate, training=self.training)
+            x = x + inputs  # skip connection
+        return x
+
+    def set_swish(self, memory_efficient=True):
+        """Sets swish function as memory efficient (for training) or standard (for export).
+
+        Args:
+            memory_efficient (bool): Whether to use memory-efficient version of swish.
+        """
+        self._swish = MemoryEfficientSwish() if memory_efficient else Swish()
+
+
+class EfficientNet(nn.Module):
+    def __init__(self, blocks_args=None, global_params=None):
+        super().__init__()
+        assert isinstance(blocks_args, list), "blocks_args should be a list"
+        assert len(blocks_args) > 0, "block args must be greater than 0"
+        self._global_params = global_params
+        self._blocks_args = blocks_args
+
+        # Batch norm parameters
+        bn_mom = 1 - self._global_params.batch_norm_momentum
+        bn_eps = self._global_params.batch_norm_epsilon
+
+        # Get stem static or dynamic convolution depending on image size
+        image_size = global_params.image_size
+        Conv2d = get_same_padding_conv2d(image_size=image_size)
+
+        # Stem
+        in_channels = 3  # rgb
+        out_channels = round_filters(
+            32, self._global_params
+        )  # number of output channels
+        self._conv_stem = Conv2d(
+            in_channels, out_channels, kernel_size=3, stride=2, bias=False
+        )
+        self._bn0 = nn.BatchNorm2d(
+            num_features=out_channels, momentum=bn_mom, eps=bn_eps
+        )
+        image_size = calculate_output_image_size(image_size, 2)
+
+        # Build blocks
+        self._blocks = nn.ModuleList([])
+        for block_args in self._blocks_args:
+
+            # Update block input and output filters based on depth multiplier.
+            block_args = block_args._replace(
+                input_filters=round_filters(
+                    block_args.input_filters, self._global_params
+                ),
+                output_filters=round_filters(
+                    block_args.output_filters, self._global_params
+                ),
+                num_repeat=round_repeats(block_args.num_repeat, self._global_params),
+            )
+
+            # The first block needs to take care of stride and filter size increase.
+            self._blocks.append(
+                MBConvBlock(block_args, self._global_params, image_size=image_size)
+            )
+            image_size = calculate_output_image_size(image_size, block_args.stride)
+            if block_args.num_repeat > 1:  # modify block_args to keep same output size
+                block_args = block_args._replace(
+                    input_filters=block_args.output_filters, stride=1
+                )
+            for _ in range(block_args.num_repeat - 1):
+                self._blocks.append(
+                    MBConvBlock(block_args, self._global_params, image_size=image_size)
+                )
+                # image_size = calculate_output_image_size(image_size, block_args.stride)  # stride = 1
+
+        self._swish = MemoryEfficientSwish()
+
+    def set_swish(self, memory_efficient=True):
+        """Sets swish function as memory efficient (for training) or standard (for export).
+
+        Args:
+            memory_efficient (bool): Whether to use memory-efficient version of swish.
+
+        """
+        self._swish = MemoryEfficientSwish() if memory_efficient else Swish()
+        for block in self._blocks:
+            block.set_swish(memory_efficient)
+
+    def extract_endpoints(self, inputs):
+        endpoints = dict()
+
+        # Stem
+        x = self._swish(self._bn0(self._conv_stem(inputs)))
+        prev_x = x
+
+        # Blocks
+        for idx, block in enumerate(self._blocks):
+            drop_connect_rate = self._global_params.drop_connect_rate
+            if drop_connect_rate:
+                drop_connect_rate *= float(idx) / len(
+                    self._blocks
+                )  # scale drop connect_rate
+            x = block(x, drop_connect_rate=drop_connect_rate)
+            if prev_x.size(2) > x.size(2):
+                endpoints["reduction_{}".format(len(endpoints) + 1)] = prev_x
+            prev_x = x
+
+        # Head
+        x = self._swish(self._bn1(self._conv_head(x)))
+        endpoints["reduction_{}".format(len(endpoints) + 1)] = x
+
+        return endpoints
+
+    def _change_in_channels(self, in_channels):
+        """Adjust model's first convolution layer to in_channels, if in_channels not equals 3.
+
+        Args:
+            in_channels (int): Input data's channel number.
+        """
+        if in_channels != 3:
+            Conv2d = get_same_padding_conv2d(image_size=self._global_params.image_size)
+            out_channels = round_filters(32, self._global_params)
+            self._conv_stem = Conv2d(
+                in_channels, out_channels, kernel_size=3, stride=2, bias=False
+            )
+
+
+class EfficientEncoderB7(EfficientNet):
+    def __init__(self):
+        super().__init__(
+            *create_block_args(
+                width_coefficient=2.0,
+                depth_coefficient=3.1,
+                dropout_rate=0.5,
+                image_size=600,
+            )
+        )
+        self._change_in_channels(3)
+        self.block_idx = [10, 17, 37, 54]
+        self.channels = [48, 80, 224, 640]
+
+    def initial_conv(self, inputs):
+        x = self._swish(self._bn0(self._conv_stem(inputs)))
+        return x
+
+    def get_blocks(self, x, H, W, block_idx):
+        features = []
+        for idx, block in enumerate(self._blocks):
+            drop_connect_rate = self._global_params.drop_connect_rate
+            if drop_connect_rate:
+                drop_connect_rate *= float(idx) / len(
+                    self._blocks
+                )  # scale drop connect_rate
+            x = block(x, drop_connect_rate=drop_connect_rate)
+            if idx == block_idx[0]:
+                features.append(x.clone())
+            if idx == block_idx[1]:
+                features.append(x.clone())
+            if idx == block_idx[2]:
+                features.append(x.clone())
+            if idx == block_idx[3]:
+                features.append(x.clone())
+
+        return features
+
+    def forward(self, inputs: torch.Tensor) -> List[Any]:
+        B, C, H, W = inputs.size()
+        x = self.initial_conv(inputs)  # Prepare input for the backbone
+        return self.get_blocks(
+            x, H, W, block_idx=self.block_idx
+        )  # Get backbone features and edge maps
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class EfficientEncoderB7 +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class EfficientEncoderB7(EfficientNet):
+    def __init__(self):
+        super().__init__(
+            *create_block_args(
+                width_coefficient=2.0,
+                depth_coefficient=3.1,
+                dropout_rate=0.5,
+                image_size=600,
+            )
+        )
+        self._change_in_channels(3)
+        self.block_idx = [10, 17, 37, 54]
+        self.channels = [48, 80, 224, 640]
+
+    def initial_conv(self, inputs):
+        x = self._swish(self._bn0(self._conv_stem(inputs)))
+        return x
+
+    def get_blocks(self, x, H, W, block_idx):
+        features = []
+        for idx, block in enumerate(self._blocks):
+            drop_connect_rate = self._global_params.drop_connect_rate
+            if drop_connect_rate:
+                drop_connect_rate *= float(idx) / len(
+                    self._blocks
+                )  # scale drop connect_rate
+            x = block(x, drop_connect_rate=drop_connect_rate)
+            if idx == block_idx[0]:
+                features.append(x.clone())
+            if idx == block_idx[1]:
+                features.append(x.clone())
+            if idx == block_idx[2]:
+                features.append(x.clone())
+            if idx == block_idx[3]:
+                features.append(x.clone())
+
+        return features
+
+    def forward(self, inputs: torch.Tensor) -> List[Any]:
+        B, C, H, W = inputs.size()
+        x = self.initial_conv(inputs)  # Prepare input for the backbone
+        return self.get_blocks(
+            x, H, W, block_idx=self.block_idx
+        )  # Get backbone features and edge maps
+
+

Ancestors

+ +

Methods

+
+
+def forward(self, inputs:Β torch.Tensor) ‑>Β List[Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, inputs: torch.Tensor) -> List[Any]:
+    B, C, H, W = inputs.size()
+    x = self.initial_conv(inputs)  # Prepare input for the backbone
+    return self.get_blocks(
+        x, H, W, block_idx=self.block_idx
+    )  # Get backbone features and edge maps
+
+
+
+def get_blocks(self, x, H, W, block_idx) +
+
+
+
+ +Expand source code + +
def get_blocks(self, x, H, W, block_idx):
+    features = []
+    for idx, block in enumerate(self._blocks):
+        drop_connect_rate = self._global_params.drop_connect_rate
+        if drop_connect_rate:
+            drop_connect_rate *= float(idx) / len(
+                self._blocks
+            )  # scale drop connect_rate
+        x = block(x, drop_connect_rate=drop_connect_rate)
+        if idx == block_idx[0]:
+            features.append(x.clone())
+        if idx == block_idx[1]:
+            features.append(x.clone())
+        if idx == block_idx[2]:
+            features.append(x.clone())
+        if idx == block_idx[3]:
+            features.append(x.clone())
+
+    return features
+
+
+
+def initial_conv(self, inputs) +
+
+
+
+ +Expand source code + +
def initial_conv(self, inputs):
+    x = self._swish(self._bn0(self._conv_stem(inputs)))
+    return x
+
+
+
+

Inherited members

+ +
+
+class EfficientNet +(blocks_args=None, global_params=None) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class EfficientNet(nn.Module):
+    def __init__(self, blocks_args=None, global_params=None):
+        super().__init__()
+        assert isinstance(blocks_args, list), "blocks_args should be a list"
+        assert len(blocks_args) > 0, "block args must be greater than 0"
+        self._global_params = global_params
+        self._blocks_args = blocks_args
+
+        # Batch norm parameters
+        bn_mom = 1 - self._global_params.batch_norm_momentum
+        bn_eps = self._global_params.batch_norm_epsilon
+
+        # Get stem static or dynamic convolution depending on image size
+        image_size = global_params.image_size
+        Conv2d = get_same_padding_conv2d(image_size=image_size)
+
+        # Stem
+        in_channels = 3  # rgb
+        out_channels = round_filters(
+            32, self._global_params
+        )  # number of output channels
+        self._conv_stem = Conv2d(
+            in_channels, out_channels, kernel_size=3, stride=2, bias=False
+        )
+        self._bn0 = nn.BatchNorm2d(
+            num_features=out_channels, momentum=bn_mom, eps=bn_eps
+        )
+        image_size = calculate_output_image_size(image_size, 2)
+
+        # Build blocks
+        self._blocks = nn.ModuleList([])
+        for block_args in self._blocks_args:
+
+            # Update block input and output filters based on depth multiplier.
+            block_args = block_args._replace(
+                input_filters=round_filters(
+                    block_args.input_filters, self._global_params
+                ),
+                output_filters=round_filters(
+                    block_args.output_filters, self._global_params
+                ),
+                num_repeat=round_repeats(block_args.num_repeat, self._global_params),
+            )
+
+            # The first block needs to take care of stride and filter size increase.
+            self._blocks.append(
+                MBConvBlock(block_args, self._global_params, image_size=image_size)
+            )
+            image_size = calculate_output_image_size(image_size, block_args.stride)
+            if block_args.num_repeat > 1:  # modify block_args to keep same output size
+                block_args = block_args._replace(
+                    input_filters=block_args.output_filters, stride=1
+                )
+            for _ in range(block_args.num_repeat - 1):
+                self._blocks.append(
+                    MBConvBlock(block_args, self._global_params, image_size=image_size)
+                )
+                # image_size = calculate_output_image_size(image_size, block_args.stride)  # stride = 1
+
+        self._swish = MemoryEfficientSwish()
+
+    def set_swish(self, memory_efficient=True):
+        """Sets swish function as memory efficient (for training) or standard (for export).
+
+        Args:
+            memory_efficient (bool): Whether to use memory-efficient version of swish.
+
+        """
+        self._swish = MemoryEfficientSwish() if memory_efficient else Swish()
+        for block in self._blocks:
+            block.set_swish(memory_efficient)
+
+    def extract_endpoints(self, inputs):
+        endpoints = dict()
+
+        # Stem
+        x = self._swish(self._bn0(self._conv_stem(inputs)))
+        prev_x = x
+
+        # Blocks
+        for idx, block in enumerate(self._blocks):
+            drop_connect_rate = self._global_params.drop_connect_rate
+            if drop_connect_rate:
+                drop_connect_rate *= float(idx) / len(
+                    self._blocks
+                )  # scale drop connect_rate
+            x = block(x, drop_connect_rate=drop_connect_rate)
+            if prev_x.size(2) > x.size(2):
+                endpoints["reduction_{}".format(len(endpoints) + 1)] = prev_x
+            prev_x = x
+
+        # Head
+        x = self._swish(self._bn1(self._conv_head(x)))
+        endpoints["reduction_{}".format(len(endpoints) + 1)] = x
+
+        return endpoints
+
+    def _change_in_channels(self, in_channels):
+        """Adjust model's first convolution layer to in_channels, if in_channels not equals 3.
+
+        Args:
+            in_channels (int): Input data's channel number.
+        """
+        if in_channels != 3:
+            Conv2d = get_same_padding_conv2d(image_size=self._global_params.image_size)
+            out_channels = round_filters(32, self._global_params)
+            self._conv_stem = Conv2d(
+                in_channels, out_channels, kernel_size=3, stride=2, bias=False
+            )
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Subclasses

+ +

Methods

+
+
+def extract_endpoints(self, inputs) +
+
+
+
+ +Expand source code + +
def extract_endpoints(self, inputs):
+    endpoints = dict()
+
+    # Stem
+    x = self._swish(self._bn0(self._conv_stem(inputs)))
+    prev_x = x
+
+    # Blocks
+    for idx, block in enumerate(self._blocks):
+        drop_connect_rate = self._global_params.drop_connect_rate
+        if drop_connect_rate:
+            drop_connect_rate *= float(idx) / len(
+                self._blocks
+            )  # scale drop connect_rate
+        x = block(x, drop_connect_rate=drop_connect_rate)
+        if prev_x.size(2) > x.size(2):
+            endpoints["reduction_{}".format(len(endpoints) + 1)] = prev_x
+        prev_x = x
+
+    # Head
+    x = self._swish(self._bn1(self._conv_head(x)))
+    endpoints["reduction_{}".format(len(endpoints) + 1)] = x
+
+    return endpoints
+
+
+
+def set_swish(self, memory_efficient=True) +
+
+

Sets swish function as memory efficient (for training) or standard (for export).

+

Args

+
+
memory_efficient : bool
+
Whether to use memory-efficient version of swish.
+
+
+ +Expand source code + +
def set_swish(self, memory_efficient=True):
+    """Sets swish function as memory efficient (for training) or standard (for export).
+
+    Args:
+        memory_efficient (bool): Whether to use memory-efficient version of swish.
+
+    """
+    self._swish = MemoryEfficientSwish() if memory_efficient else Swish()
+    for block in self._blocks:
+        block.set_swish(memory_efficient)
+
+
+
+
+
+class MBConvBlock +(block_args, global_params, image_size=None) +
+
+

Mobile Inverted Residual Bottleneck Block.

+

Args

+
+
block_args : namedtuple
+
BlockArgs, defined in utils.py.
+
global_params : namedtuple
+
GlobalParam, defined in utils.py.
+
image_size : tuple or list
+
[image_height, image_width].
+
+

References

+

[1] https://arxiv.org/abs/1704.04861 (MobileNet v1) +[2] https://arxiv.org/abs/1801.04381 (MobileNet v2) +[3] https://arxiv.org/abs/1905.02244 (MobileNet v3)

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class MBConvBlock(nn.Module):
+    """Mobile Inverted Residual Bottleneck Block.
+
+    Args:
+        block_args (namedtuple): BlockArgs, defined in utils.py.
+        global_params (namedtuple): GlobalParam, defined in utils.py.
+        image_size (tuple or list): [image_height, image_width].
+
+    References:
+        [1] https://arxiv.org/abs/1704.04861 (MobileNet v1)
+        [2] https://arxiv.org/abs/1801.04381 (MobileNet v2)
+        [3] https://arxiv.org/abs/1905.02244 (MobileNet v3)
+    """
+
+    def __init__(self, block_args, global_params, image_size=None):
+        super().__init__()
+        self._block_args = block_args
+        self._bn_mom = (
+            1 - global_params.batch_norm_momentum
+        )  # pytorch's difference from tensorflow
+        self._bn_eps = global_params.batch_norm_epsilon
+        self.has_se = (self._block_args.se_ratio is not None) and (
+            0 < self._block_args.se_ratio <= 1
+        )
+        self.id_skip = (
+            block_args.id_skip
+        )  # whether to use skip connection and drop connect
+
+        # Expansion phase (Inverted Bottleneck)
+        inp = self._block_args.input_filters  # number of input channels
+        oup = (
+            self._block_args.input_filters * self._block_args.expand_ratio
+        )  # number of output channels
+        if self._block_args.expand_ratio != 1:
+            Conv2d = get_same_padding_conv2d(image_size=image_size)
+            self._expand_conv = Conv2d(
+                in_channels=inp, out_channels=oup, kernel_size=1, bias=False
+            )
+            self._bn0 = nn.BatchNorm2d(
+                num_features=oup, momentum=self._bn_mom, eps=self._bn_eps
+            )
+            # image_size = calculate_output_image_size(image_size, 1) <-- this wouldn't modify image_size
+
+        # Depthwise convolution phase
+        k = self._block_args.kernel_size
+        s = self._block_args.stride
+        Conv2d = get_same_padding_conv2d(image_size=image_size)
+        self._depthwise_conv = Conv2d(
+            in_channels=oup,
+            out_channels=oup,
+            groups=oup,  # groups makes it depthwise
+            kernel_size=k,
+            stride=s,
+            bias=False,
+        )
+        self._bn1 = nn.BatchNorm2d(
+            num_features=oup, momentum=self._bn_mom, eps=self._bn_eps
+        )
+        image_size = calculate_output_image_size(image_size, s)
+
+        # Squeeze and Excitation layer, if desired
+        if self.has_se:
+            Conv2d = get_same_padding_conv2d(image_size=(1, 1))
+            num_squeezed_channels = max(
+                1, int(self._block_args.input_filters * self._block_args.se_ratio)
+            )
+            self._se_reduce = Conv2d(
+                in_channels=oup, out_channels=num_squeezed_channels, kernel_size=1
+            )
+            self._se_expand = Conv2d(
+                in_channels=num_squeezed_channels, out_channels=oup, kernel_size=1
+            )
+
+        # Pointwise convolution phase
+        final_oup = self._block_args.output_filters
+        Conv2d = get_same_padding_conv2d(image_size=image_size)
+        self._project_conv = Conv2d(
+            in_channels=oup, out_channels=final_oup, kernel_size=1, bias=False
+        )
+        self._bn2 = nn.BatchNorm2d(
+            num_features=final_oup, momentum=self._bn_mom, eps=self._bn_eps
+        )
+        self._swish = MemoryEfficientSwish()
+
+    def forward(self, inputs, drop_connect_rate=None):
+        """MBConvBlock's forward function.
+
+        Args:
+            inputs (tensor): Input tensor.
+            drop_connect_rate (bool): Drop connect rate (float, between 0 and 1).
+
+        Returns:
+            Output of this block after processing.
+        """
+
+        # Expansion and Depthwise Convolution
+        x = inputs
+        if self._block_args.expand_ratio != 1:
+            x = self._expand_conv(inputs)
+            x = self._bn0(x)
+            x = self._swish(x)
+
+        x = self._depthwise_conv(x)
+        x = self._bn1(x)
+        x = self._swish(x)
+
+        # Squeeze and Excitation
+        if self.has_se:
+            x_squeezed = F.adaptive_avg_pool2d(x, 1)
+            x_squeezed = self._se_reduce(x_squeezed)
+            x_squeezed = self._swish(x_squeezed)
+            x_squeezed = self._se_expand(x_squeezed)
+            x = torch.sigmoid(x_squeezed) * x
+
+        # Pointwise Convolution
+        x = self._project_conv(x)
+        x = self._bn2(x)
+
+        # Skip connection and drop connect
+        input_filters, output_filters = (
+            self._block_args.input_filters,
+            self._block_args.output_filters,
+        )
+        if (
+            self.id_skip
+            and self._block_args.stride == 1
+            and input_filters == output_filters
+        ):
+            # The combination of skip connection and drop connect brings about stochastic depth.
+            if drop_connect_rate:
+                x = drop_connect(x, p=drop_connect_rate, training=self.training)
+            x = x + inputs  # skip connection
+        return x
+
+    def set_swish(self, memory_efficient=True):
+        """Sets swish function as memory efficient (for training) or standard (for export).
+
+        Args:
+            memory_efficient (bool): Whether to use memory-efficient version of swish.
+        """
+        self._swish = MemoryEfficientSwish() if memory_efficient else Swish()
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, inputs, drop_connect_rate=None) ‑>Β Callable[...,Β Any] +
+
+

MBConvBlock's forward function.

+

Args

+
+
inputs : tensor
+
Input tensor.
+
drop_connect_rate : bool
+
Drop connect rate (float, between 0 and 1).
+
+

Returns

+

Output of this block after processing.

+
+ +Expand source code + +
def forward(self, inputs, drop_connect_rate=None):
+    """MBConvBlock's forward function.
+
+    Args:
+        inputs (tensor): Input tensor.
+        drop_connect_rate (bool): Drop connect rate (float, between 0 and 1).
+
+    Returns:
+        Output of this block after processing.
+    """
+
+    # Expansion and Depthwise Convolution
+    x = inputs
+    if self._block_args.expand_ratio != 1:
+        x = self._expand_conv(inputs)
+        x = self._bn0(x)
+        x = self._swish(x)
+
+    x = self._depthwise_conv(x)
+    x = self._bn1(x)
+    x = self._swish(x)
+
+    # Squeeze and Excitation
+    if self.has_se:
+        x_squeezed = F.adaptive_avg_pool2d(x, 1)
+        x_squeezed = self._se_reduce(x_squeezed)
+        x_squeezed = self._swish(x_squeezed)
+        x_squeezed = self._se_expand(x_squeezed)
+        x = torch.sigmoid(x_squeezed) * x
+
+    # Pointwise Convolution
+    x = self._project_conv(x)
+    x = self._bn2(x)
+
+    # Skip connection and drop connect
+    input_filters, output_filters = (
+        self._block_args.input_filters,
+        self._block_args.output_filters,
+    )
+    if (
+        self.id_skip
+        and self._block_args.stride == 1
+        and input_filters == output_filters
+    ):
+        # The combination of skip connection and drop connect brings about stochastic depth.
+        if drop_connect_rate:
+            x = drop_connect(x, p=drop_connect_rate, training=self.training)
+        x = x + inputs  # skip connection
+    return x
+
+
+
+def set_swish(self, memory_efficient=True) +
+
+

Sets swish function as memory efficient (for training) or standard (for export).

+

Args

+
+
memory_efficient : bool
+
Whether to use memory-efficient version of swish.
+
+
+ +Expand source code + +
def set_swish(self, memory_efficient=True):
+    """Sets swish function as memory efficient (for training) or standard (for export).
+
+    Args:
+        memory_efficient (bool): Whether to use memory-efficient version of swish.
+    """
+    self._swish = MemoryEfficientSwish() if memory_efficient else Swish()
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/tracerb7/index.html b/docs/api/carvekit/ml/arch/tracerb7/index.html new file mode 100644 index 0000000..69a9c40 --- /dev/null +++ b/docs/api/carvekit/ml/arch/tracerb7/index.html @@ -0,0 +1,96 @@ + + + + + + +carvekit.ml.arch.tracerb7 API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.tracerb7

+
+
+
+
+

Sub-modules

+
+
carvekit.ml.arch.tracerb7.att_modules
+
+

Source url: https://github.com/Karel911/TRACER +Author: Min Seok Lee and Wooseok Shin +License: Apache License 2.0

+
+
carvekit.ml.arch.tracerb7.conv_modules
+
+

Source url: https://github.com/Karel911/TRACER +Author: Min Seok Lee and Wooseok Shin +License: Apache License 2.0

+
+
carvekit.ml.arch.tracerb7.effi_utils
+
+

Original author: lukemelas (github username) +Github repo: https://github.com/lukemelas/EfficientNet-PyTorch +With adjustments and added comments by …

+
+
carvekit.ml.arch.tracerb7.efficientnet
+
+

Source url: https://github.com/lukemelas/EfficientNet-PyTorch +Modified by Min Seok Lee, Wooseok Shin, Nikita Selin +License: Apache License 2.0 +…

+
+
carvekit.ml.arch.tracerb7.tracer
+
+

Source url: https://github.com/Karel911/TRACER +Author: Min Seok Lee and Wooseok Shin +Modified by Nikita Selin …

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/tracerb7/tracer.html b/docs/api/carvekit/ml/arch/tracerb7/tracer.html new file mode 100644 index 0000000..8885c88 --- /dev/null +++ b/docs/api/carvekit/ml/arch/tracerb7/tracer.html @@ -0,0 +1,332 @@ + + + + + + +carvekit.ml.arch.tracerb7.tracer API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.tracerb7.tracer

+
+
+

Source url: https://github.com/Karel911/TRACER +Author: Min Seok Lee and Wooseok Shin +Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+

Changes

+
    +
  • Refactored code
  • +
  • Removed unused code
  • +
  • Added comments
  • +
+
+ +Expand source code + +
"""
+Source url: https://github.com/Karel911/TRACER
+Author: Min Seok Lee and Wooseok Shin
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+Changes:
+    - Refactored code
+    - Removed unused code
+    - Added comments
+"""
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from typing import List, Optional, Tuple
+
+from torch import Tensor
+
+from carvekit.ml.arch.tracerb7.efficientnet import EfficientEncoderB7
+from carvekit.ml.arch.tracerb7.att_modules import (
+    RFB_Block,
+    aggregation,
+    ObjectAttention,
+)
+
+
+class TracerDecoder(nn.Module):
+    """Tracer Decoder"""
+
+    def __init__(
+        self,
+        encoder: EfficientEncoderB7,
+        features_channels: Optional[List[int]] = None,
+        rfb_channel: Optional[List[int]] = None,
+    ):
+        """
+        Initialize the tracer decoder.
+
+        Args:
+            encoder: The encoder to use.
+            features_channels: The channels of the backbone features at different stages. default: [48, 80, 224, 640]
+            rfb_channel: The channels of the RFB features. default: [32, 64, 128]
+        """
+        super().__init__()
+        if rfb_channel is None:
+            rfb_channel = [32, 64, 128]
+        if features_channels is None:
+            features_channels = [48, 80, 224, 640]
+        self.encoder = encoder
+        self.features_channels = features_channels
+
+        # Receptive Field Blocks
+        features_channels = rfb_channel
+        self.rfb2 = RFB_Block(self.features_channels[1], features_channels[0])
+        self.rfb3 = RFB_Block(self.features_channels[2], features_channels[1])
+        self.rfb4 = RFB_Block(self.features_channels[3], features_channels[2])
+
+        # Multi-level aggregation
+        self.agg = aggregation(features_channels)
+
+        # Object Attention
+        self.ObjectAttention2 = ObjectAttention(
+            channel=self.features_channels[1], kernel_size=3
+        )
+        self.ObjectAttention1 = ObjectAttention(
+            channel=self.features_channels[0], kernel_size=3
+        )
+
+    def forward(self, inputs: torch.Tensor) -> Tensor:
+        """
+        Forward pass of the tracer decoder.
+
+        Args:
+            inputs: Preprocessed images.
+
+        Returns:
+            Tensors of segmentation masks and mask of object edges.
+        """
+        features = self.encoder(inputs)
+        x3_rfb = self.rfb2(features[1])
+        x4_rfb = self.rfb3(features[2])
+        x5_rfb = self.rfb4(features[3])
+
+        D_0 = self.agg(x5_rfb, x4_rfb, x3_rfb)
+
+        ds_map0 = F.interpolate(D_0, scale_factor=8, mode="bilinear")
+
+        D_1 = self.ObjectAttention2(D_0, features[1])
+        ds_map1 = F.interpolate(D_1, scale_factor=8, mode="bilinear")
+
+        ds_map = F.interpolate(D_1, scale_factor=2, mode="bilinear")
+        D_2 = self.ObjectAttention1(ds_map, features[0])
+        ds_map2 = F.interpolate(D_2, scale_factor=4, mode="bilinear")
+
+        final_map = (ds_map2 + ds_map1 + ds_map0) / 3
+
+        return torch.sigmoid(final_map)
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class TracerDecoder +(encoder:Β EfficientEncoderB7, features_channels:Β Optional[List[int]]Β =Β None, rfb_channel:Β Optional[List[int]]Β =Β None) +
+
+

Tracer Decoder

+

Initialize the tracer decoder.

+

Args

+
+
encoder
+
The encoder to use.
+
features_channels
+
The channels of the backbone features at different stages. default: [48, 80, 224, 640]
+
rfb_channel
+
The channels of the RFB features. default: [32, 64, 128]
+
+
+ +Expand source code + +
class TracerDecoder(nn.Module):
+    """Tracer Decoder"""
+
+    def __init__(
+        self,
+        encoder: EfficientEncoderB7,
+        features_channels: Optional[List[int]] = None,
+        rfb_channel: Optional[List[int]] = None,
+    ):
+        """
+        Initialize the tracer decoder.
+
+        Args:
+            encoder: The encoder to use.
+            features_channels: The channels of the backbone features at different stages. default: [48, 80, 224, 640]
+            rfb_channel: The channels of the RFB features. default: [32, 64, 128]
+        """
+        super().__init__()
+        if rfb_channel is None:
+            rfb_channel = [32, 64, 128]
+        if features_channels is None:
+            features_channels = [48, 80, 224, 640]
+        self.encoder = encoder
+        self.features_channels = features_channels
+
+        # Receptive Field Blocks
+        features_channels = rfb_channel
+        self.rfb2 = RFB_Block(self.features_channels[1], features_channels[0])
+        self.rfb3 = RFB_Block(self.features_channels[2], features_channels[1])
+        self.rfb4 = RFB_Block(self.features_channels[3], features_channels[2])
+
+        # Multi-level aggregation
+        self.agg = aggregation(features_channels)
+
+        # Object Attention
+        self.ObjectAttention2 = ObjectAttention(
+            channel=self.features_channels[1], kernel_size=3
+        )
+        self.ObjectAttention1 = ObjectAttention(
+            channel=self.features_channels[0], kernel_size=3
+        )
+
+    def forward(self, inputs: torch.Tensor) -> Tensor:
+        """
+        Forward pass of the tracer decoder.
+
+        Args:
+            inputs: Preprocessed images.
+
+        Returns:
+            Tensors of segmentation masks and mask of object edges.
+        """
+        features = self.encoder(inputs)
+        x3_rfb = self.rfb2(features[1])
+        x4_rfb = self.rfb3(features[2])
+        x5_rfb = self.rfb4(features[3])
+
+        D_0 = self.agg(x5_rfb, x4_rfb, x3_rfb)
+
+        ds_map0 = F.interpolate(D_0, scale_factor=8, mode="bilinear")
+
+        D_1 = self.ObjectAttention2(D_0, features[1])
+        ds_map1 = F.interpolate(D_1, scale_factor=8, mode="bilinear")
+
+        ds_map = F.interpolate(D_1, scale_factor=2, mode="bilinear")
+        D_2 = self.ObjectAttention1(ds_map, features[0])
+        ds_map2 = F.interpolate(D_2, scale_factor=4, mode="bilinear")
+
+        final_map = (ds_map2 + ds_map1 + ds_map0) / 3
+
+        return torch.sigmoid(final_map)
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Subclasses

+ +

Methods

+
+
+def forward(self, inputs:Β torch.Tensor) ‑>Β torch.Tensor +
+
+

Forward pass of the tracer decoder.

+

Args

+
+
inputs
+
Preprocessed images.
+
+

Returns

+

Tensors of segmentation masks and mask of object edges.

+
+ +Expand source code + +
def forward(self, inputs: torch.Tensor) -> Tensor:
+    """
+    Forward pass of the tracer decoder.
+
+    Args:
+        inputs: Preprocessed images.
+
+    Returns:
+        Tensors of segmentation masks and mask of object edges.
+    """
+    features = self.encoder(inputs)
+    x3_rfb = self.rfb2(features[1])
+    x4_rfb = self.rfb3(features[2])
+    x5_rfb = self.rfb4(features[3])
+
+    D_0 = self.agg(x5_rfb, x4_rfb, x3_rfb)
+
+    ds_map0 = F.interpolate(D_0, scale_factor=8, mode="bilinear")
+
+    D_1 = self.ObjectAttention2(D_0, features[1])
+    ds_map1 = F.interpolate(D_1, scale_factor=8, mode="bilinear")
+
+    ds_map = F.interpolate(D_1, scale_factor=2, mode="bilinear")
+    D_2 = self.ObjectAttention1(ds_map, features[0])
+    ds_map2 = F.interpolate(D_2, scale_factor=4, mode="bilinear")
+
+    final_map = (ds_map2 + ds_map1 + ds_map0) / 3
+
+    return torch.sigmoid(final_map)
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/u2net/index.html b/docs/api/carvekit/ml/arch/u2net/index.html new file mode 100644 index 0000000..468a43f --- /dev/null +++ b/docs/api/carvekit/ml/arch/u2net/index.html @@ -0,0 +1,67 @@ + + + + + + +carvekit.ml.arch.u2net API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.u2net

+
+
+
+
+

Sub-modules

+
+
carvekit.ml.arch.u2net.u2net
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/xuebinqin/U-2-Net +License: Apache License 2.0

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/u2net/u2net.html b/docs/api/carvekit/ml/arch/u2net/u2net.html new file mode 100644 index 0000000..c1dfb98 --- /dev/null +++ b/docs/api/carvekit/ml/arch/u2net/u2net.html @@ -0,0 +1,431 @@ + + + + + + +carvekit.ml.arch.u2net.u2net API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.u2net.u2net

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/xuebinqin/U-2-Net +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/xuebinqin/U-2-Net
+License: Apache License 2.0
+"""
+from typing import Union
+
+import torch
+import torch.nn as nn
+
+import math
+
+__all__ = ["U2NETArchitecture"]
+
+
+def _upsample_like(x, size):
+    return nn.Upsample(size=size, mode="bilinear", align_corners=False)(x)
+
+
+def _size_map(x, height):
+    # {height: size} for Upsample
+    size = list(x.shape[-2:])
+    sizes = {}
+    for h in range(1, height):
+        sizes[h] = size
+        size = [math.ceil(w / 2) for w in size]
+    return sizes
+
+
+class REBNCONV(nn.Module):
+    def __init__(self, in_ch=3, out_ch=3, dilate=1):
+        super(REBNCONV, self).__init__()
+
+        self.conv_s1 = nn.Conv2d(
+            in_ch, out_ch, 3, padding=1 * dilate, dilation=1 * dilate
+        )
+        self.bn_s1 = nn.BatchNorm2d(out_ch)
+        self.relu_s1 = nn.ReLU(inplace=True)
+
+    def forward(self, x):
+        return self.relu_s1(self.bn_s1(self.conv_s1(x)))
+
+
+class RSU(nn.Module):
+    def __init__(self, name, height, in_ch, mid_ch, out_ch, dilated=False):
+        super(RSU, self).__init__()
+        self.name = name
+        self.height = height
+        self.dilated = dilated
+        self._make_layers(height, in_ch, mid_ch, out_ch, dilated)
+
+    def forward(self, x):
+        sizes = _size_map(x, self.height)
+        x = self.rebnconvin(x)
+
+        # U-Net like symmetric encoder-decoder structure
+        def unet(x, height=1):
+            if height < self.height:
+                x1 = getattr(self, f"rebnconv{height}")(x)
+                if not self.dilated and height < self.height - 1:
+                    x2 = unet(getattr(self, "downsample")(x1), height + 1)
+                else:
+                    x2 = unet(x1, height + 1)
+
+                x = getattr(self, f"rebnconv{height}d")(torch.cat((x2, x1), 1))
+                return (
+                    _upsample_like(x, sizes[height - 1])
+                    if not self.dilated and height > 1
+                    else x
+                )
+            else:
+                return getattr(self, f"rebnconv{height}")(x)
+
+        return x + unet(x)
+
+    def _make_layers(self, height, in_ch, mid_ch, out_ch, dilated=False):
+        self.add_module("rebnconvin", REBNCONV(in_ch, out_ch))
+        self.add_module("downsample", nn.MaxPool2d(2, stride=2, ceil_mode=True))
+
+        self.add_module("rebnconv1", REBNCONV(out_ch, mid_ch))
+        self.add_module("rebnconv1d", REBNCONV(mid_ch * 2, out_ch))
+
+        for i in range(2, height):
+            dilate = 1 if not dilated else 2 ** (i - 1)
+            self.add_module(f"rebnconv{i}", REBNCONV(mid_ch, mid_ch, dilate=dilate))
+            self.add_module(
+                f"rebnconv{i}d", REBNCONV(mid_ch * 2, mid_ch, dilate=dilate)
+            )
+
+        dilate = 2 if not dilated else 2 ** (height - 1)
+        self.add_module(f"rebnconv{height}", REBNCONV(mid_ch, mid_ch, dilate=dilate))
+
+
+class U2NETArchitecture(nn.Module):
+    def __init__(self, cfg_type: Union[dict, str] = "full", out_ch: int = 1):
+        super(U2NETArchitecture, self).__init__()
+        if isinstance(cfg_type, str):
+            if cfg_type == "full":
+                layers_cfgs = {
+                    # cfgs for building RSUs and sides
+                    # {stage : [name, (height(L), in_ch, mid_ch, out_ch, dilated), side]}
+                    "stage1": ["En_1", (7, 3, 32, 64), -1],
+                    "stage2": ["En_2", (6, 64, 32, 128), -1],
+                    "stage3": ["En_3", (5, 128, 64, 256), -1],
+                    "stage4": ["En_4", (4, 256, 128, 512), -1],
+                    "stage5": ["En_5", (4, 512, 256, 512, True), -1],
+                    "stage6": ["En_6", (4, 512, 256, 512, True), 512],
+                    "stage5d": ["De_5", (4, 1024, 256, 512, True), 512],
+                    "stage4d": ["De_4", (4, 1024, 128, 256), 256],
+                    "stage3d": ["De_3", (5, 512, 64, 128), 128],
+                    "stage2d": ["De_2", (6, 256, 32, 64), 64],
+                    "stage1d": ["De_1", (7, 128, 16, 64), 64],
+                }
+            else:
+                raise ValueError("Unknown U^2-Net architecture conf. name")
+        elif isinstance(cfg_type, dict):
+            layers_cfgs = cfg_type
+        else:
+            raise ValueError("Unknown U^2-Net architecture conf. type")
+        self.out_ch = out_ch
+        self._make_layers(layers_cfgs)
+
+    def forward(self, x):
+        sizes = _size_map(x, self.height)
+        maps = []  # storage for maps
+
+        # side saliency map
+        def unet(x, height=1):
+            if height < 6:
+                x1 = getattr(self, f"stage{height}")(x)
+                x2 = unet(getattr(self, "downsample")(x1), height + 1)
+                x = getattr(self, f"stage{height}d")(torch.cat((x2, x1), 1))
+                side(x, height)
+                return _upsample_like(x, sizes[height - 1]) if height > 1 else x
+            else:
+                x = getattr(self, f"stage{height}")(x)
+                side(x, height)
+                return _upsample_like(x, sizes[height - 1])
+
+        def side(x, h):
+            # side output saliency map (before sigmoid)
+            x = getattr(self, f"side{h}")(x)
+            x = _upsample_like(x, sizes[1])
+            maps.append(x)
+
+        def fuse():
+            # fuse saliency probability maps
+            maps.reverse()
+            x = torch.cat(maps, 1)
+            x = getattr(self, "outconv")(x)
+            maps.insert(0, x)
+            return [torch.sigmoid(x) for x in maps]
+
+        unet(x)
+        maps = fuse()
+        return maps
+
+    def _make_layers(self, cfgs):
+        self.height = int((len(cfgs) + 1) / 2)
+        self.add_module("downsample", nn.MaxPool2d(2, stride=2, ceil_mode=True))
+        for k, v in cfgs.items():
+            # build rsu block
+            self.add_module(k, RSU(v[0], *v[1]))
+            if v[2] > 0:
+                # build side layer
+                self.add_module(
+                    f"side{v[0][-1]}", nn.Conv2d(v[2], self.out_ch, 3, padding=1)
+                )
+        # build fuse layer
+        self.add_module(
+            "outconv", nn.Conv2d(int(self.height * self.out_ch), self.out_ch, 1)
+        )
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class U2NETArchitecture +(cfg_type:Β Union[dict,Β str]Β =Β 'full', out_ch:Β intΒ =Β 1) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class U2NETArchitecture(nn.Module):
+    def __init__(self, cfg_type: Union[dict, str] = "full", out_ch: int = 1):
+        super(U2NETArchitecture, self).__init__()
+        if isinstance(cfg_type, str):
+            if cfg_type == "full":
+                layers_cfgs = {
+                    # cfgs for building RSUs and sides
+                    # {stage : [name, (height(L), in_ch, mid_ch, out_ch, dilated), side]}
+                    "stage1": ["En_1", (7, 3, 32, 64), -1],
+                    "stage2": ["En_2", (6, 64, 32, 128), -1],
+                    "stage3": ["En_3", (5, 128, 64, 256), -1],
+                    "stage4": ["En_4", (4, 256, 128, 512), -1],
+                    "stage5": ["En_5", (4, 512, 256, 512, True), -1],
+                    "stage6": ["En_6", (4, 512, 256, 512, True), 512],
+                    "stage5d": ["De_5", (4, 1024, 256, 512, True), 512],
+                    "stage4d": ["De_4", (4, 1024, 128, 256), 256],
+                    "stage3d": ["De_3", (5, 512, 64, 128), 128],
+                    "stage2d": ["De_2", (6, 256, 32, 64), 64],
+                    "stage1d": ["De_1", (7, 128, 16, 64), 64],
+                }
+            else:
+                raise ValueError("Unknown U^2-Net architecture conf. name")
+        elif isinstance(cfg_type, dict):
+            layers_cfgs = cfg_type
+        else:
+            raise ValueError("Unknown U^2-Net architecture conf. type")
+        self.out_ch = out_ch
+        self._make_layers(layers_cfgs)
+
+    def forward(self, x):
+        sizes = _size_map(x, self.height)
+        maps = []  # storage for maps
+
+        # side saliency map
+        def unet(x, height=1):
+            if height < 6:
+                x1 = getattr(self, f"stage{height}")(x)
+                x2 = unet(getattr(self, "downsample")(x1), height + 1)
+                x = getattr(self, f"stage{height}d")(torch.cat((x2, x1), 1))
+                side(x, height)
+                return _upsample_like(x, sizes[height - 1]) if height > 1 else x
+            else:
+                x = getattr(self, f"stage{height}")(x)
+                side(x, height)
+                return _upsample_like(x, sizes[height - 1])
+
+        def side(x, h):
+            # side output saliency map (before sigmoid)
+            x = getattr(self, f"side{h}")(x)
+            x = _upsample_like(x, sizes[1])
+            maps.append(x)
+
+        def fuse():
+            # fuse saliency probability maps
+            maps.reverse()
+            x = torch.cat(maps, 1)
+            x = getattr(self, "outconv")(x)
+            maps.insert(0, x)
+            return [torch.sigmoid(x) for x in maps]
+
+        unet(x)
+        maps = fuse()
+        return maps
+
+    def _make_layers(self, cfgs):
+        self.height = int((len(cfgs) + 1) / 2)
+        self.add_module("downsample", nn.MaxPool2d(2, stride=2, ceil_mode=True))
+        for k, v in cfgs.items():
+            # build rsu block
+            self.add_module(k, RSU(v[0], *v[1]))
+            if v[2] > 0:
+                # build side layer
+                self.add_module(
+                    f"side{v[0][-1]}", nn.Conv2d(v[2], self.out_ch, 3, padding=1)
+                )
+        # build fuse layer
+        self.add_module(
+            "outconv", nn.Conv2d(int(self.height * self.out_ch), self.out_ch, 1)
+        )
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Subclasses

+ +

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    sizes = _size_map(x, self.height)
+    maps = []  # storage for maps
+
+    # side saliency map
+    def unet(x, height=1):
+        if height < 6:
+            x1 = getattr(self, f"stage{height}")(x)
+            x2 = unet(getattr(self, "downsample")(x1), height + 1)
+            x = getattr(self, f"stage{height}d")(torch.cat((x2, x1), 1))
+            side(x, height)
+            return _upsample_like(x, sizes[height - 1]) if height > 1 else x
+        else:
+            x = getattr(self, f"stage{height}")(x)
+            side(x, height)
+            return _upsample_like(x, sizes[height - 1])
+
+    def side(x, h):
+        # side output saliency map (before sigmoid)
+        x = getattr(self, f"side{h}")(x)
+        x = _upsample_like(x, sizes[1])
+        maps.append(x)
+
+    def fuse():
+        # fuse saliency probability maps
+        maps.reverse()
+        x = torch.cat(maps, 1)
+        x = getattr(self, "outconv")(x)
+        maps.insert(0, x)
+        return [torch.sigmoid(x) for x in maps]
+
+    unet(x)
+    maps = fuse()
+    return maps
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/yolov4/index.html b/docs/api/carvekit/ml/arch/yolov4/index.html new file mode 100644 index 0000000..10cd438 --- /dev/null +++ b/docs/api/carvekit/ml/arch/yolov4/index.html @@ -0,0 +1,79 @@ + + + + + + +carvekit.ml.arch.yolov4 API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.yolov4

+
+
+
+
+

Sub-modules

+
+
carvekit.ml.arch.yolov4.models
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/Tianxiaomo/pytorch-YOLOv4 +License: Apache License …

+
+
carvekit.ml.arch.yolov4.utils
+
+
+
+
carvekit.ml.arch.yolov4.yolo_layer
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/Tianxiaomo/pytorch-YOLOv4 +License: Apache License …

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/yolov4/models.html b/docs/api/carvekit/ml/arch/yolov4/models.html new file mode 100644 index 0000000..5974ec3 --- /dev/null +++ b/docs/api/carvekit/ml/arch/yolov4/models.html @@ -0,0 +1,2193 @@ + + + + + + +carvekit.ml.arch.yolov4.models API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.yolov4.models

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/Tianxiaomo/pytorch-YOLOv4 +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/Tianxiaomo/pytorch-YOLOv4
+License: Apache License 2.0
+"""
+import torch
+from torch import nn
+import torch.nn.functional as F
+from carvekit.ml.arch.yolov4.yolo_layer import YoloLayer
+
+
+def get_region_boxes(boxes_and_confs):
+    # print('Getting boxes from boxes and confs ...')
+
+    boxes_list = []
+    confs_list = []
+
+    for item in boxes_and_confs:
+        boxes_list.append(item[0])
+        confs_list.append(item[1])
+
+    # boxes: [batch, num1 + num2 + num3, 1, 4]
+    # confs: [batch, num1 + num2 + num3, num_classes]
+    boxes = torch.cat(boxes_list, dim=1)
+    confs = torch.cat(confs_list, dim=1)
+
+    return [boxes, confs]
+
+
+class Mish(torch.nn.Module):
+    def __init__(self):
+        super().__init__()
+
+    def forward(self, x):
+        x = x * (torch.tanh(torch.nn.functional.softplus(x)))
+        return x
+
+
+class Upsample(nn.Module):
+    def __init__(self):
+        super(Upsample, self).__init__()
+
+    def forward(self, x, target_size, inference=False):
+        assert x.data.dim() == 4
+        # _, _, tH, tW = target_size
+
+        if inference:
+
+            # B = x.data.size(0)
+            # C = x.data.size(1)
+            # H = x.data.size(2)
+            # W = x.data.size(3)
+
+            return (
+                x.view(x.size(0), x.size(1), x.size(2), 1, x.size(3), 1)
+                .expand(
+                    x.size(0),
+                    x.size(1),
+                    x.size(2),
+                    target_size[2] // x.size(2),
+                    x.size(3),
+                    target_size[3] // x.size(3),
+                )
+                .contiguous()
+                .view(x.size(0), x.size(1), target_size[2], target_size[3])
+            )
+        else:
+            return F.interpolate(
+                x, size=(target_size[2], target_size[3]), mode="nearest"
+            )
+
+
+class Conv_Bn_Activation(nn.Module):
+    def __init__(
+        self,
+        in_channels,
+        out_channels,
+        kernel_size,
+        stride,
+        activation,
+        bn=True,
+        bias=False,
+    ):
+        super().__init__()
+        pad = (kernel_size - 1) // 2
+
+        self.conv = nn.ModuleList()
+        if bias:
+            self.conv.append(
+                nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad)
+            )
+        else:
+            self.conv.append(
+                nn.Conv2d(
+                    in_channels, out_channels, kernel_size, stride, pad, bias=False
+                )
+            )
+        if bn:
+            self.conv.append(nn.BatchNorm2d(out_channels))
+        if activation == "mish":
+            self.conv.append(Mish())
+        elif activation == "relu":
+            self.conv.append(nn.ReLU(inplace=True))
+        elif activation == "leaky":
+            self.conv.append(nn.LeakyReLU(0.1, inplace=True))
+        elif activation == "linear":
+            pass
+        else:
+            raise Exception("activation error")
+
+    def forward(self, x):
+        for l in self.conv:
+            x = l(x)
+        return x
+
+
+class ResBlock(nn.Module):
+    """
+    Sequential residual blocks each of which consists of \
+    two convolution layers.
+    Args:
+        ch (int): number of input and output channels.
+        nblocks (int): number of residual blocks.
+        shortcut (bool): if True, residual tensor addition is enabled.
+    """
+
+    def __init__(self, ch, nblocks=1, shortcut=True):
+        super().__init__()
+        self.shortcut = shortcut
+        self.module_list = nn.ModuleList()
+        for i in range(nblocks):
+            resblock_one = nn.ModuleList()
+            resblock_one.append(Conv_Bn_Activation(ch, ch, 1, 1, "mish"))
+            resblock_one.append(Conv_Bn_Activation(ch, ch, 3, 1, "mish"))
+            self.module_list.append(resblock_one)
+
+    def forward(self, x):
+        for module in self.module_list:
+            h = x
+            for res in module:
+                h = res(h)
+            x = x + h if self.shortcut else h
+        return x
+
+
+class DownSample1(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(3, 32, 3, 1, "mish")
+
+        self.conv2 = Conv_Bn_Activation(32, 64, 3, 2, "mish")
+        self.conv3 = Conv_Bn_Activation(64, 64, 1, 1, "mish")
+        # [route]
+        # layers = -2
+        self.conv4 = Conv_Bn_Activation(64, 64, 1, 1, "mish")
+
+        self.conv5 = Conv_Bn_Activation(64, 32, 1, 1, "mish")
+        self.conv6 = Conv_Bn_Activation(32, 64, 3, 1, "mish")
+        # [shortcut]
+        # from=-3
+        # activation = linear
+
+        self.conv7 = Conv_Bn_Activation(64, 64, 1, 1, "mish")
+        # [route]
+        # layers = -1, -7
+        self.conv8 = Conv_Bn_Activation(128, 64, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x2)
+        # route -2
+        x4 = self.conv4(x2)
+        x5 = self.conv5(x4)
+        x6 = self.conv6(x5)
+        # shortcut -3
+        x6 = x6 + x4
+
+        x7 = self.conv7(x6)
+        # [route]
+        # layers = -1, -7
+        x7 = torch.cat([x7, x3], dim=1)
+        x8 = self.conv8(x7)
+        return x8
+
+
+class DownSample2(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(64, 128, 3, 2, "mish")
+        self.conv2 = Conv_Bn_Activation(128, 64, 1, 1, "mish")
+        # r -2
+        self.conv3 = Conv_Bn_Activation(128, 64, 1, 1, "mish")
+
+        self.resblock = ResBlock(ch=64, nblocks=2)
+
+        # s -3
+        self.conv4 = Conv_Bn_Activation(64, 64, 1, 1, "mish")
+        # r -1 -10
+        self.conv5 = Conv_Bn_Activation(128, 128, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x1)
+
+        r = self.resblock(x3)
+        x4 = self.conv4(r)
+
+        x4 = torch.cat([x4, x2], dim=1)
+        x5 = self.conv5(x4)
+        return x5
+
+
+class DownSample3(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(128, 256, 3, 2, "mish")
+        self.conv2 = Conv_Bn_Activation(256, 128, 1, 1, "mish")
+        self.conv3 = Conv_Bn_Activation(256, 128, 1, 1, "mish")
+
+        self.resblock = ResBlock(ch=128, nblocks=8)
+        self.conv4 = Conv_Bn_Activation(128, 128, 1, 1, "mish")
+        self.conv5 = Conv_Bn_Activation(256, 256, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x1)
+
+        r = self.resblock(x3)
+        x4 = self.conv4(r)
+
+        x4 = torch.cat([x4, x2], dim=1)
+        x5 = self.conv5(x4)
+        return x5
+
+
+class DownSample4(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(256, 512, 3, 2, "mish")
+        self.conv2 = Conv_Bn_Activation(512, 256, 1, 1, "mish")
+        self.conv3 = Conv_Bn_Activation(512, 256, 1, 1, "mish")
+
+        self.resblock = ResBlock(ch=256, nblocks=8)
+        self.conv4 = Conv_Bn_Activation(256, 256, 1, 1, "mish")
+        self.conv5 = Conv_Bn_Activation(512, 512, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x1)
+
+        r = self.resblock(x3)
+        x4 = self.conv4(r)
+
+        x4 = torch.cat([x4, x2], dim=1)
+        x5 = self.conv5(x4)
+        return x5
+
+
+class DownSample5(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(512, 1024, 3, 2, "mish")
+        self.conv2 = Conv_Bn_Activation(1024, 512, 1, 1, "mish")
+        self.conv3 = Conv_Bn_Activation(1024, 512, 1, 1, "mish")
+
+        self.resblock = ResBlock(ch=512, nblocks=4)
+        self.conv4 = Conv_Bn_Activation(512, 512, 1, 1, "mish")
+        self.conv5 = Conv_Bn_Activation(1024, 1024, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x1)
+
+        r = self.resblock(x3)
+        x4 = self.conv4(r)
+
+        x4 = torch.cat([x4, x2], dim=1)
+        x5 = self.conv5(x4)
+        return x5
+
+
+class Neck(nn.Module):
+    def __init__(self, inference=False):
+        super().__init__()
+        self.inference = inference
+
+        self.conv1 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv2 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv3 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        # SPP
+        self.maxpool1 = nn.MaxPool2d(kernel_size=5, stride=1, padding=5 // 2)
+        self.maxpool2 = nn.MaxPool2d(kernel_size=9, stride=1, padding=9 // 2)
+        self.maxpool3 = nn.MaxPool2d(kernel_size=13, stride=1, padding=13 // 2)
+
+        # R -1 -3 -5 -6
+        # SPP
+        self.conv4 = Conv_Bn_Activation(2048, 512, 1, 1, "leaky")
+        self.conv5 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv6 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv7 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        # UP
+        self.upsample1 = Upsample()
+        # R 85
+        self.conv8 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        # R -1 -3
+        self.conv9 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv10 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv11 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv12 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv13 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv14 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+        # UP
+        self.upsample2 = Upsample()
+        # R 54
+        self.conv15 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+        # R -1 -3
+        self.conv16 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+        self.conv17 = Conv_Bn_Activation(128, 256, 3, 1, "leaky")
+        self.conv18 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+        self.conv19 = Conv_Bn_Activation(128, 256, 3, 1, "leaky")
+        self.conv20 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+
+    def forward(self, input, downsample4, downsample3, inference=False):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x2)
+        # SPP
+        m1 = self.maxpool1(x3)
+        m2 = self.maxpool2(x3)
+        m3 = self.maxpool3(x3)
+        spp = torch.cat([m3, m2, m1, x3], dim=1)
+        # SPP end
+        x4 = self.conv4(spp)
+        x5 = self.conv5(x4)
+        x6 = self.conv6(x5)
+        x7 = self.conv7(x6)
+        # UP
+        up = self.upsample1(x7, downsample4.size(), self.inference)
+        # R 85
+        x8 = self.conv8(downsample4)
+        # R -1 -3
+        x8 = torch.cat([x8, up], dim=1)
+
+        x9 = self.conv9(x8)
+        x10 = self.conv10(x9)
+        x11 = self.conv11(x10)
+        x12 = self.conv12(x11)
+        x13 = self.conv13(x12)
+        x14 = self.conv14(x13)
+
+        # UP
+        up = self.upsample2(x14, downsample3.size(), self.inference)
+        # R 54
+        x15 = self.conv15(downsample3)
+        # R -1 -3
+        x15 = torch.cat([x15, up], dim=1)
+
+        x16 = self.conv16(x15)
+        x17 = self.conv17(x16)
+        x18 = self.conv18(x17)
+        x19 = self.conv19(x18)
+        x20 = self.conv20(x19)
+        return x20, x13, x6
+
+
+class Yolov4Head(nn.Module):
+    def __init__(self, output_ch, n_classes, inference=False):
+        super().__init__()
+        self.inference = inference
+
+        self.conv1 = Conv_Bn_Activation(128, 256, 3, 1, "leaky")
+        self.conv2 = Conv_Bn_Activation(
+            256, output_ch, 1, 1, "linear", bn=False, bias=True
+        )
+
+        self.yolo1 = YoloLayer(
+            anchor_mask=[0, 1, 2],
+            num_classes=n_classes,
+            anchors=[
+                12,
+                16,
+                19,
+                36,
+                40,
+                28,
+                36,
+                75,
+                76,
+                55,
+                72,
+                146,
+                142,
+                110,
+                192,
+                243,
+                459,
+                401,
+            ],
+            num_anchors=9,
+            stride=8,
+        )
+
+        # R -4
+        self.conv3 = Conv_Bn_Activation(128, 256, 3, 2, "leaky")
+
+        # R -1 -16
+        self.conv4 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv5 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv6 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv7 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv8 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv9 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv10 = Conv_Bn_Activation(
+            512, output_ch, 1, 1, "linear", bn=False, bias=True
+        )
+
+        self.yolo2 = YoloLayer(
+            anchor_mask=[3, 4, 5],
+            num_classes=n_classes,
+            anchors=[
+                12,
+                16,
+                19,
+                36,
+                40,
+                28,
+                36,
+                75,
+                76,
+                55,
+                72,
+                146,
+                142,
+                110,
+                192,
+                243,
+                459,
+                401,
+            ],
+            num_anchors=9,
+            stride=16,
+        )
+
+        # R -4
+        self.conv11 = Conv_Bn_Activation(256, 512, 3, 2, "leaky")
+
+        # R -1 -37
+        self.conv12 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv13 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv14 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv15 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv16 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv17 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv18 = Conv_Bn_Activation(
+            1024, output_ch, 1, 1, "linear", bn=False, bias=True
+        )
+
+        self.yolo3 = YoloLayer(
+            anchor_mask=[6, 7, 8],
+            num_classes=n_classes,
+            anchors=[
+                12,
+                16,
+                19,
+                36,
+                40,
+                28,
+                36,
+                75,
+                76,
+                55,
+                72,
+                146,
+                142,
+                110,
+                192,
+                243,
+                459,
+                401,
+            ],
+            num_anchors=9,
+            stride=32,
+        )
+
+    def forward(self, input1, input2, input3):
+        x1 = self.conv1(input1)
+        x2 = self.conv2(x1)
+
+        x3 = self.conv3(input1)
+        # R -1 -16
+        x3 = torch.cat([x3, input2], dim=1)
+        x4 = self.conv4(x3)
+        x5 = self.conv5(x4)
+        x6 = self.conv6(x5)
+        x7 = self.conv7(x6)
+        x8 = self.conv8(x7)
+        x9 = self.conv9(x8)
+        x10 = self.conv10(x9)
+
+        # R -4
+        x11 = self.conv11(x8)
+        # R -1 -37
+        x11 = torch.cat([x11, input3], dim=1)
+
+        x12 = self.conv12(x11)
+        x13 = self.conv13(x12)
+        x14 = self.conv14(x13)
+        x15 = self.conv15(x14)
+        x16 = self.conv16(x15)
+        x17 = self.conv17(x16)
+        x18 = self.conv18(x17)
+
+        if self.inference:
+            y1 = self.yolo1(x2)
+            y2 = self.yolo2(x10)
+            y3 = self.yolo3(x18)
+
+            return get_region_boxes([y1, y2, y3])
+
+        else:
+            return [x2, x10, x18]
+
+
+class Yolov4(nn.Module):
+    def __init__(self, n_classes=80, inference=False):
+        super().__init__()
+
+        output_ch = (4 + 1 + n_classes) * 3
+
+        # backbone
+        self.down1 = DownSample1()
+        self.down2 = DownSample2()
+        self.down3 = DownSample3()
+        self.down4 = DownSample4()
+        self.down5 = DownSample5()
+        # neck
+        self.neek = Neck(inference)
+
+        # head
+        self.head = Yolov4Head(output_ch, n_classes, inference)
+
+    def forward(self, input):
+        d1 = self.down1(input)
+        d2 = self.down2(d1)
+        d3 = self.down3(d2)
+        d4 = self.down4(d3)
+        d5 = self.down5(d4)
+
+        x20, x13, x6 = self.neek(d5, d4, d3)
+
+        output = self.head(x20, x13, x6)
+        return output
+
+
+
+
+
+
+
+

Functions

+
+
+def get_region_boxes(boxes_and_confs) +
+
+
+
+ +Expand source code + +
def get_region_boxes(boxes_and_confs):
+    # print('Getting boxes from boxes and confs ...')
+
+    boxes_list = []
+    confs_list = []
+
+    for item in boxes_and_confs:
+        boxes_list.append(item[0])
+        confs_list.append(item[1])
+
+    # boxes: [batch, num1 + num2 + num3, 1, 4]
+    # confs: [batch, num1 + num2 + num3, num_classes]
+    boxes = torch.cat(boxes_list, dim=1)
+    confs = torch.cat(confs_list, dim=1)
+
+    return [boxes, confs]
+
+
+
+
+
+

Classes

+
+
+class Conv_Bn_Activation +(in_channels, out_channels, kernel_size, stride, activation, bn=True, bias=False) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Conv_Bn_Activation(nn.Module):
+    def __init__(
+        self,
+        in_channels,
+        out_channels,
+        kernel_size,
+        stride,
+        activation,
+        bn=True,
+        bias=False,
+    ):
+        super().__init__()
+        pad = (kernel_size - 1) // 2
+
+        self.conv = nn.ModuleList()
+        if bias:
+            self.conv.append(
+                nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad)
+            )
+        else:
+            self.conv.append(
+                nn.Conv2d(
+                    in_channels, out_channels, kernel_size, stride, pad, bias=False
+                )
+            )
+        if bn:
+            self.conv.append(nn.BatchNorm2d(out_channels))
+        if activation == "mish":
+            self.conv.append(Mish())
+        elif activation == "relu":
+            self.conv.append(nn.ReLU(inplace=True))
+        elif activation == "leaky":
+            self.conv.append(nn.LeakyReLU(0.1, inplace=True))
+        elif activation == "linear":
+            pass
+        else:
+            raise Exception("activation error")
+
+    def forward(self, x):
+        for l in self.conv:
+            x = l(x)
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    for l in self.conv:
+        x = l(x)
+    return x
+
+
+
+
+
+class DownSample1 +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class DownSample1(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(3, 32, 3, 1, "mish")
+
+        self.conv2 = Conv_Bn_Activation(32, 64, 3, 2, "mish")
+        self.conv3 = Conv_Bn_Activation(64, 64, 1, 1, "mish")
+        # [route]
+        # layers = -2
+        self.conv4 = Conv_Bn_Activation(64, 64, 1, 1, "mish")
+
+        self.conv5 = Conv_Bn_Activation(64, 32, 1, 1, "mish")
+        self.conv6 = Conv_Bn_Activation(32, 64, 3, 1, "mish")
+        # [shortcut]
+        # from=-3
+        # activation = linear
+
+        self.conv7 = Conv_Bn_Activation(64, 64, 1, 1, "mish")
+        # [route]
+        # layers = -1, -7
+        self.conv8 = Conv_Bn_Activation(128, 64, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x2)
+        # route -2
+        x4 = self.conv4(x2)
+        x5 = self.conv5(x4)
+        x6 = self.conv6(x5)
+        # shortcut -3
+        x6 = x6 + x4
+
+        x7 = self.conv7(x6)
+        # [route]
+        # layers = -1, -7
+        x7 = torch.cat([x7, x3], dim=1)
+        x8 = self.conv8(x7)
+        return x8
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, input) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, input):
+    x1 = self.conv1(input)
+    x2 = self.conv2(x1)
+    x3 = self.conv3(x2)
+    # route -2
+    x4 = self.conv4(x2)
+    x5 = self.conv5(x4)
+    x6 = self.conv6(x5)
+    # shortcut -3
+    x6 = x6 + x4
+
+    x7 = self.conv7(x6)
+    # [route]
+    # layers = -1, -7
+    x7 = torch.cat([x7, x3], dim=1)
+    x8 = self.conv8(x7)
+    return x8
+
+
+
+
+
+class DownSample2 +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class DownSample2(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(64, 128, 3, 2, "mish")
+        self.conv2 = Conv_Bn_Activation(128, 64, 1, 1, "mish")
+        # r -2
+        self.conv3 = Conv_Bn_Activation(128, 64, 1, 1, "mish")
+
+        self.resblock = ResBlock(ch=64, nblocks=2)
+
+        # s -3
+        self.conv4 = Conv_Bn_Activation(64, 64, 1, 1, "mish")
+        # r -1 -10
+        self.conv5 = Conv_Bn_Activation(128, 128, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x1)
+
+        r = self.resblock(x3)
+        x4 = self.conv4(r)
+
+        x4 = torch.cat([x4, x2], dim=1)
+        x5 = self.conv5(x4)
+        return x5
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, input) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, input):
+    x1 = self.conv1(input)
+    x2 = self.conv2(x1)
+    x3 = self.conv3(x1)
+
+    r = self.resblock(x3)
+    x4 = self.conv4(r)
+
+    x4 = torch.cat([x4, x2], dim=1)
+    x5 = self.conv5(x4)
+    return x5
+
+
+
+
+
+class DownSample3 +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class DownSample3(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(128, 256, 3, 2, "mish")
+        self.conv2 = Conv_Bn_Activation(256, 128, 1, 1, "mish")
+        self.conv3 = Conv_Bn_Activation(256, 128, 1, 1, "mish")
+
+        self.resblock = ResBlock(ch=128, nblocks=8)
+        self.conv4 = Conv_Bn_Activation(128, 128, 1, 1, "mish")
+        self.conv5 = Conv_Bn_Activation(256, 256, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x1)
+
+        r = self.resblock(x3)
+        x4 = self.conv4(r)
+
+        x4 = torch.cat([x4, x2], dim=1)
+        x5 = self.conv5(x4)
+        return x5
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, input) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, input):
+    x1 = self.conv1(input)
+    x2 = self.conv2(x1)
+    x3 = self.conv3(x1)
+
+    r = self.resblock(x3)
+    x4 = self.conv4(r)
+
+    x4 = torch.cat([x4, x2], dim=1)
+    x5 = self.conv5(x4)
+    return x5
+
+
+
+
+
+class DownSample4 +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class DownSample4(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(256, 512, 3, 2, "mish")
+        self.conv2 = Conv_Bn_Activation(512, 256, 1, 1, "mish")
+        self.conv3 = Conv_Bn_Activation(512, 256, 1, 1, "mish")
+
+        self.resblock = ResBlock(ch=256, nblocks=8)
+        self.conv4 = Conv_Bn_Activation(256, 256, 1, 1, "mish")
+        self.conv5 = Conv_Bn_Activation(512, 512, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x1)
+
+        r = self.resblock(x3)
+        x4 = self.conv4(r)
+
+        x4 = torch.cat([x4, x2], dim=1)
+        x5 = self.conv5(x4)
+        return x5
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, input) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, input):
+    x1 = self.conv1(input)
+    x2 = self.conv2(x1)
+    x3 = self.conv3(x1)
+
+    r = self.resblock(x3)
+    x4 = self.conv4(r)
+
+    x4 = torch.cat([x4, x2], dim=1)
+    x5 = self.conv5(x4)
+    return x5
+
+
+
+
+
+class DownSample5 +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class DownSample5(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = Conv_Bn_Activation(512, 1024, 3, 2, "mish")
+        self.conv2 = Conv_Bn_Activation(1024, 512, 1, 1, "mish")
+        self.conv3 = Conv_Bn_Activation(1024, 512, 1, 1, "mish")
+
+        self.resblock = ResBlock(ch=512, nblocks=4)
+        self.conv4 = Conv_Bn_Activation(512, 512, 1, 1, "mish")
+        self.conv5 = Conv_Bn_Activation(1024, 1024, 1, 1, "mish")
+
+    def forward(self, input):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x1)
+
+        r = self.resblock(x3)
+        x4 = self.conv4(r)
+
+        x4 = torch.cat([x4, x2], dim=1)
+        x5 = self.conv5(x4)
+        return x5
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, input) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, input):
+    x1 = self.conv1(input)
+    x2 = self.conv2(x1)
+    x3 = self.conv3(x1)
+
+    r = self.resblock(x3)
+    x4 = self.conv4(r)
+
+    x4 = torch.cat([x4, x2], dim=1)
+    x5 = self.conv5(x4)
+    return x5
+
+
+
+
+
+class Mish +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Mish(torch.nn.Module):
+    def __init__(self):
+        super().__init__()
+
+    def forward(self, x):
+        x = x * (torch.tanh(torch.nn.functional.softplus(x)))
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    x = x * (torch.tanh(torch.nn.functional.softplus(x)))
+    return x
+
+
+
+
+
+class Neck +(inference=False) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Neck(nn.Module):
+    def __init__(self, inference=False):
+        super().__init__()
+        self.inference = inference
+
+        self.conv1 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv2 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv3 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        # SPP
+        self.maxpool1 = nn.MaxPool2d(kernel_size=5, stride=1, padding=5 // 2)
+        self.maxpool2 = nn.MaxPool2d(kernel_size=9, stride=1, padding=9 // 2)
+        self.maxpool3 = nn.MaxPool2d(kernel_size=13, stride=1, padding=13 // 2)
+
+        # R -1 -3 -5 -6
+        # SPP
+        self.conv4 = Conv_Bn_Activation(2048, 512, 1, 1, "leaky")
+        self.conv5 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv6 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv7 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        # UP
+        self.upsample1 = Upsample()
+        # R 85
+        self.conv8 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        # R -1 -3
+        self.conv9 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv10 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv11 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv12 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv13 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv14 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+        # UP
+        self.upsample2 = Upsample()
+        # R 54
+        self.conv15 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+        # R -1 -3
+        self.conv16 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+        self.conv17 = Conv_Bn_Activation(128, 256, 3, 1, "leaky")
+        self.conv18 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+        self.conv19 = Conv_Bn_Activation(128, 256, 3, 1, "leaky")
+        self.conv20 = Conv_Bn_Activation(256, 128, 1, 1, "leaky")
+
+    def forward(self, input, downsample4, downsample3, inference=False):
+        x1 = self.conv1(input)
+        x2 = self.conv2(x1)
+        x3 = self.conv3(x2)
+        # SPP
+        m1 = self.maxpool1(x3)
+        m2 = self.maxpool2(x3)
+        m3 = self.maxpool3(x3)
+        spp = torch.cat([m3, m2, m1, x3], dim=1)
+        # SPP end
+        x4 = self.conv4(spp)
+        x5 = self.conv5(x4)
+        x6 = self.conv6(x5)
+        x7 = self.conv7(x6)
+        # UP
+        up = self.upsample1(x7, downsample4.size(), self.inference)
+        # R 85
+        x8 = self.conv8(downsample4)
+        # R -1 -3
+        x8 = torch.cat([x8, up], dim=1)
+
+        x9 = self.conv9(x8)
+        x10 = self.conv10(x9)
+        x11 = self.conv11(x10)
+        x12 = self.conv12(x11)
+        x13 = self.conv13(x12)
+        x14 = self.conv14(x13)
+
+        # UP
+        up = self.upsample2(x14, downsample3.size(), self.inference)
+        # R 54
+        x15 = self.conv15(downsample3)
+        # R -1 -3
+        x15 = torch.cat([x15, up], dim=1)
+
+        x16 = self.conv16(x15)
+        x17 = self.conv17(x16)
+        x18 = self.conv18(x17)
+        x19 = self.conv19(x18)
+        x20 = self.conv20(x19)
+        return x20, x13, x6
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, input, downsample4, downsample3, inference=False) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, input, downsample4, downsample3, inference=False):
+    x1 = self.conv1(input)
+    x2 = self.conv2(x1)
+    x3 = self.conv3(x2)
+    # SPP
+    m1 = self.maxpool1(x3)
+    m2 = self.maxpool2(x3)
+    m3 = self.maxpool3(x3)
+    spp = torch.cat([m3, m2, m1, x3], dim=1)
+    # SPP end
+    x4 = self.conv4(spp)
+    x5 = self.conv5(x4)
+    x6 = self.conv6(x5)
+    x7 = self.conv7(x6)
+    # UP
+    up = self.upsample1(x7, downsample4.size(), self.inference)
+    # R 85
+    x8 = self.conv8(downsample4)
+    # R -1 -3
+    x8 = torch.cat([x8, up], dim=1)
+
+    x9 = self.conv9(x8)
+    x10 = self.conv10(x9)
+    x11 = self.conv11(x10)
+    x12 = self.conv12(x11)
+    x13 = self.conv13(x12)
+    x14 = self.conv14(x13)
+
+    # UP
+    up = self.upsample2(x14, downsample3.size(), self.inference)
+    # R 54
+    x15 = self.conv15(downsample3)
+    # R -1 -3
+    x15 = torch.cat([x15, up], dim=1)
+
+    x16 = self.conv16(x15)
+    x17 = self.conv17(x16)
+    x18 = self.conv18(x17)
+    x19 = self.conv19(x18)
+    x20 = self.conv20(x19)
+    return x20, x13, x6
+
+
+
+
+
+class ResBlock +(ch, nblocks=1, shortcut=True) +
+
+

Sequential residual blocks each of which consists of +two convolution layers.

+

Args

+
+
ch : int
+
number of input and output channels.
+
nblocks : int
+
number of residual blocks.
+
shortcut : bool
+
if True, residual tensor addition is enabled.
+
+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class ResBlock(nn.Module):
+    """
+    Sequential residual blocks each of which consists of \
+    two convolution layers.
+    Args:
+        ch (int): number of input and output channels.
+        nblocks (int): number of residual blocks.
+        shortcut (bool): if True, residual tensor addition is enabled.
+    """
+
+    def __init__(self, ch, nblocks=1, shortcut=True):
+        super().__init__()
+        self.shortcut = shortcut
+        self.module_list = nn.ModuleList()
+        for i in range(nblocks):
+            resblock_one = nn.ModuleList()
+            resblock_one.append(Conv_Bn_Activation(ch, ch, 1, 1, "mish"))
+            resblock_one.append(Conv_Bn_Activation(ch, ch, 3, 1, "mish"))
+            self.module_list.append(resblock_one)
+
+    def forward(self, x):
+        for module in self.module_list:
+            h = x
+            for res in module:
+                h = res(h)
+            x = x + h if self.shortcut else h
+        return x
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x):
+    for module in self.module_list:
+        h = x
+        for res in module:
+            h = res(h)
+        x = x + h if self.shortcut else h
+    return x
+
+
+
+
+
+class Upsample +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Upsample(nn.Module):
+    def __init__(self):
+        super(Upsample, self).__init__()
+
+    def forward(self, x, target_size, inference=False):
+        assert x.data.dim() == 4
+        # _, _, tH, tW = target_size
+
+        if inference:
+
+            # B = x.data.size(0)
+            # C = x.data.size(1)
+            # H = x.data.size(2)
+            # W = x.data.size(3)
+
+            return (
+                x.view(x.size(0), x.size(1), x.size(2), 1, x.size(3), 1)
+                .expand(
+                    x.size(0),
+                    x.size(1),
+                    x.size(2),
+                    target_size[2] // x.size(2),
+                    x.size(3),
+                    target_size[3] // x.size(3),
+                )
+                .contiguous()
+                .view(x.size(0), x.size(1), target_size[2], target_size[3])
+            )
+        else:
+            return F.interpolate(
+                x, size=(target_size[2], target_size[3]), mode="nearest"
+            )
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, x, target_size, inference=False) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, x, target_size, inference=False):
+    assert x.data.dim() == 4
+    # _, _, tH, tW = target_size
+
+    if inference:
+
+        # B = x.data.size(0)
+        # C = x.data.size(1)
+        # H = x.data.size(2)
+        # W = x.data.size(3)
+
+        return (
+            x.view(x.size(0), x.size(1), x.size(2), 1, x.size(3), 1)
+            .expand(
+                x.size(0),
+                x.size(1),
+                x.size(2),
+                target_size[2] // x.size(2),
+                x.size(3),
+                target_size[3] // x.size(3),
+            )
+            .contiguous()
+            .view(x.size(0), x.size(1), target_size[2], target_size[3])
+        )
+    else:
+        return F.interpolate(
+            x, size=(target_size[2], target_size[3]), mode="nearest"
+        )
+
+
+
+
+
+class Yolov4 +(n_classes=80, inference=False) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Yolov4(nn.Module):
+    def __init__(self, n_classes=80, inference=False):
+        super().__init__()
+
+        output_ch = (4 + 1 + n_classes) * 3
+
+        # backbone
+        self.down1 = DownSample1()
+        self.down2 = DownSample2()
+        self.down3 = DownSample3()
+        self.down4 = DownSample4()
+        self.down5 = DownSample5()
+        # neck
+        self.neek = Neck(inference)
+
+        # head
+        self.head = Yolov4Head(output_ch, n_classes, inference)
+
+    def forward(self, input):
+        d1 = self.down1(input)
+        d2 = self.down2(d1)
+        d3 = self.down3(d2)
+        d4 = self.down4(d3)
+        d5 = self.down5(d4)
+
+        x20, x13, x6 = self.neek(d5, d4, d3)
+
+        output = self.head(x20, x13, x6)
+        return output
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Subclasses

+ +

Methods

+
+
+def forward(self, input) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, input):
+    d1 = self.down1(input)
+    d2 = self.down2(d1)
+    d3 = self.down3(d2)
+    d4 = self.down4(d3)
+    d5 = self.down5(d4)
+
+    x20, x13, x6 = self.neek(d5, d4, d3)
+
+    output = self.head(x20, x13, x6)
+    return output
+
+
+
+
+
+class Yolov4Head +(output_ch, n_classes, inference=False) +
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in +a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+
+

Submodules assigned in this way will be registered, and will have their +parameters converted too when you call :meth:to, etc.

+
+

Note

+

As per the example above, an __init__() call to the parent class +must be made before assignment on the child.

+
+

:ivar training: Boolean represents whether this module is in training or +evaluation mode. +:vartype training: bool

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class Yolov4Head(nn.Module):
+    def __init__(self, output_ch, n_classes, inference=False):
+        super().__init__()
+        self.inference = inference
+
+        self.conv1 = Conv_Bn_Activation(128, 256, 3, 1, "leaky")
+        self.conv2 = Conv_Bn_Activation(
+            256, output_ch, 1, 1, "linear", bn=False, bias=True
+        )
+
+        self.yolo1 = YoloLayer(
+            anchor_mask=[0, 1, 2],
+            num_classes=n_classes,
+            anchors=[
+                12,
+                16,
+                19,
+                36,
+                40,
+                28,
+                36,
+                75,
+                76,
+                55,
+                72,
+                146,
+                142,
+                110,
+                192,
+                243,
+                459,
+                401,
+            ],
+            num_anchors=9,
+            stride=8,
+        )
+
+        # R -4
+        self.conv3 = Conv_Bn_Activation(128, 256, 3, 2, "leaky")
+
+        # R -1 -16
+        self.conv4 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv5 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv6 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv7 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv8 = Conv_Bn_Activation(512, 256, 1, 1, "leaky")
+        self.conv9 = Conv_Bn_Activation(256, 512, 3, 1, "leaky")
+        self.conv10 = Conv_Bn_Activation(
+            512, output_ch, 1, 1, "linear", bn=False, bias=True
+        )
+
+        self.yolo2 = YoloLayer(
+            anchor_mask=[3, 4, 5],
+            num_classes=n_classes,
+            anchors=[
+                12,
+                16,
+                19,
+                36,
+                40,
+                28,
+                36,
+                75,
+                76,
+                55,
+                72,
+                146,
+                142,
+                110,
+                192,
+                243,
+                459,
+                401,
+            ],
+            num_anchors=9,
+            stride=16,
+        )
+
+        # R -4
+        self.conv11 = Conv_Bn_Activation(256, 512, 3, 2, "leaky")
+
+        # R -1 -37
+        self.conv12 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv13 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv14 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv15 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv16 = Conv_Bn_Activation(1024, 512, 1, 1, "leaky")
+        self.conv17 = Conv_Bn_Activation(512, 1024, 3, 1, "leaky")
+        self.conv18 = Conv_Bn_Activation(
+            1024, output_ch, 1, 1, "linear", bn=False, bias=True
+        )
+
+        self.yolo3 = YoloLayer(
+            anchor_mask=[6, 7, 8],
+            num_classes=n_classes,
+            anchors=[
+                12,
+                16,
+                19,
+                36,
+                40,
+                28,
+                36,
+                75,
+                76,
+                55,
+                72,
+                146,
+                142,
+                110,
+                192,
+                243,
+                459,
+                401,
+            ],
+            num_anchors=9,
+            stride=32,
+        )
+
+    def forward(self, input1, input2, input3):
+        x1 = self.conv1(input1)
+        x2 = self.conv2(x1)
+
+        x3 = self.conv3(input1)
+        # R -1 -16
+        x3 = torch.cat([x3, input2], dim=1)
+        x4 = self.conv4(x3)
+        x5 = self.conv5(x4)
+        x6 = self.conv6(x5)
+        x7 = self.conv7(x6)
+        x8 = self.conv8(x7)
+        x9 = self.conv9(x8)
+        x10 = self.conv10(x9)
+
+        # R -4
+        x11 = self.conv11(x8)
+        # R -1 -37
+        x11 = torch.cat([x11, input3], dim=1)
+
+        x12 = self.conv12(x11)
+        x13 = self.conv13(x12)
+        x14 = self.conv14(x13)
+        x15 = self.conv15(x14)
+        x16 = self.conv16(x15)
+        x17 = self.conv17(x16)
+        x18 = self.conv18(x17)
+
+        if self.inference:
+            y1 = self.yolo1(x2)
+            y2 = self.yolo2(x10)
+            y3 = self.yolo3(x18)
+
+            return get_region_boxes([y1, y2, y3])
+
+        else:
+            return [x2, x10, x18]
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, input1, input2, input3) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, input1, input2, input3):
+    x1 = self.conv1(input1)
+    x2 = self.conv2(x1)
+
+    x3 = self.conv3(input1)
+    # R -1 -16
+    x3 = torch.cat([x3, input2], dim=1)
+    x4 = self.conv4(x3)
+    x5 = self.conv5(x4)
+    x6 = self.conv6(x5)
+    x7 = self.conv7(x6)
+    x8 = self.conv8(x7)
+    x9 = self.conv9(x8)
+    x10 = self.conv10(x9)
+
+    # R -4
+    x11 = self.conv11(x8)
+    # R -1 -37
+    x11 = torch.cat([x11, input3], dim=1)
+
+    x12 = self.conv12(x11)
+    x13 = self.conv13(x12)
+    x14 = self.conv14(x13)
+    x15 = self.conv15(x14)
+    x16 = self.conv16(x15)
+    x17 = self.conv17(x16)
+    x18 = self.conv18(x17)
+
+    if self.inference:
+        y1 = self.yolo1(x2)
+        y2 = self.yolo2(x10)
+        y3 = self.yolo3(x18)
+
+        return get_region_boxes([y1, y2, y3])
+
+    else:
+        return [x2, x10, x18]
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/yolov4/utils.html b/docs/api/carvekit/ml/arch/yolov4/utils.html new file mode 100644 index 0000000..0f226f6 --- /dev/null +++ b/docs/api/carvekit/ml/arch/yolov4/utils.html @@ -0,0 +1,294 @@ + + + + + + +carvekit.ml.arch.yolov4.utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.yolov4.utils

+
+
+
+ +Expand source code + +
import numpy as np
+
+
+def nms_cpu(boxes, confs, nms_thresh=0.5, min_mode=False):
+    # print(boxes.shape)
+    x1 = boxes[:, 0]
+    y1 = boxes[:, 1]
+    x2 = boxes[:, 2]
+    y2 = boxes[:, 3]
+
+    areas = (x2 - x1) * (y2 - y1)
+    order = confs.argsort()[::-1]
+
+    keep = []
+    while order.size > 0:
+        idx_self = order[0]
+        idx_other = order[1:]
+
+        keep.append(idx_self)
+
+        xx1 = np.maximum(x1[idx_self], x1[idx_other])
+        yy1 = np.maximum(y1[idx_self], y1[idx_other])
+        xx2 = np.minimum(x2[idx_self], x2[idx_other])
+        yy2 = np.minimum(y2[idx_self], y2[idx_other])
+
+        w = np.maximum(0.0, xx2 - xx1)
+        h = np.maximum(0.0, yy2 - yy1)
+        inter = w * h
+
+        if min_mode:
+            over = inter / np.minimum(areas[order[0]], areas[order[1:]])
+        else:
+            over = inter / (areas[order[0]] + areas[order[1:]] - inter)
+
+        inds = np.where(over <= nms_thresh)[0]
+        order = order[inds + 1]
+
+    return np.array(keep)
+
+
+def post_processing(conf_thresh, nms_thresh, output):
+    # anchors = [12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401]
+    # num_anchors = 9
+    # anchor_masks = [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+    # strides = [8, 16, 32]
+    # anchor_step = len(anchors) // num_anchors
+
+    # [batch, num, 1, 4]
+    box_array = output[0]
+    # [batch, num, num_classes]
+    confs = output[1]
+
+    if type(box_array).__name__ != "ndarray":
+        box_array = box_array.cpu().detach().numpy()
+        confs = confs.cpu().detach().numpy()
+
+    num_classes = confs.shape[2]
+
+    # [batch, num, 4]
+    box_array = box_array[:, :, 0]
+
+    # [batch, num, num_classes] --> [batch, num]
+    max_conf = np.max(confs, axis=2)
+    max_id = np.argmax(confs, axis=2)
+
+    bboxes_batch = []
+    for i in range(box_array.shape[0]):
+
+        argwhere = max_conf[i] > conf_thresh
+        l_box_array = box_array[i, argwhere, :]
+        l_max_conf = max_conf[i, argwhere]
+        l_max_id = max_id[i, argwhere]
+
+        bboxes = []
+        # nms for each class
+        for j in range(num_classes):
+
+            cls_argwhere = l_max_id == j
+            ll_box_array = l_box_array[cls_argwhere, :]
+            ll_max_conf = l_max_conf[cls_argwhere]
+            ll_max_id = l_max_id[cls_argwhere]
+
+            keep = nms_cpu(ll_box_array, ll_max_conf, nms_thresh)
+
+            if keep.size > 0:
+                ll_box_array = ll_box_array[keep, :]
+                ll_max_conf = ll_max_conf[keep]
+                ll_max_id = ll_max_id[keep]
+
+                for k in range(ll_box_array.shape[0]):
+                    bboxes.append(
+                        [
+                            ll_box_array[k, 0],
+                            ll_box_array[k, 1],
+                            ll_box_array[k, 2],
+                            ll_box_array[k, 3],
+                            ll_max_conf[k],
+                            ll_max_conf[k],
+                            ll_max_id[k],
+                        ]
+                    )
+
+        bboxes_batch.append(bboxes)
+
+    return bboxes_batch
+
+
+
+
+
+
+
+

Functions

+
+
+def nms_cpu(boxes, confs, nms_thresh=0.5, min_mode=False) +
+
+
+
+ +Expand source code + +
def nms_cpu(boxes, confs, nms_thresh=0.5, min_mode=False):
+    # print(boxes.shape)
+    x1 = boxes[:, 0]
+    y1 = boxes[:, 1]
+    x2 = boxes[:, 2]
+    y2 = boxes[:, 3]
+
+    areas = (x2 - x1) * (y2 - y1)
+    order = confs.argsort()[::-1]
+
+    keep = []
+    while order.size > 0:
+        idx_self = order[0]
+        idx_other = order[1:]
+
+        keep.append(idx_self)
+
+        xx1 = np.maximum(x1[idx_self], x1[idx_other])
+        yy1 = np.maximum(y1[idx_self], y1[idx_other])
+        xx2 = np.minimum(x2[idx_self], x2[idx_other])
+        yy2 = np.minimum(y2[idx_self], y2[idx_other])
+
+        w = np.maximum(0.0, xx2 - xx1)
+        h = np.maximum(0.0, yy2 - yy1)
+        inter = w * h
+
+        if min_mode:
+            over = inter / np.minimum(areas[order[0]], areas[order[1:]])
+        else:
+            over = inter / (areas[order[0]] + areas[order[1:]] - inter)
+
+        inds = np.where(over <= nms_thresh)[0]
+        order = order[inds + 1]
+
+    return np.array(keep)
+
+
+
+def post_processing(conf_thresh, nms_thresh, output) +
+
+
+
+ +Expand source code + +
def post_processing(conf_thresh, nms_thresh, output):
+    # anchors = [12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401]
+    # num_anchors = 9
+    # anchor_masks = [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+    # strides = [8, 16, 32]
+    # anchor_step = len(anchors) // num_anchors
+
+    # [batch, num, 1, 4]
+    box_array = output[0]
+    # [batch, num, num_classes]
+    confs = output[1]
+
+    if type(box_array).__name__ != "ndarray":
+        box_array = box_array.cpu().detach().numpy()
+        confs = confs.cpu().detach().numpy()
+
+    num_classes = confs.shape[2]
+
+    # [batch, num, 4]
+    box_array = box_array[:, :, 0]
+
+    # [batch, num, num_classes] --> [batch, num]
+    max_conf = np.max(confs, axis=2)
+    max_id = np.argmax(confs, axis=2)
+
+    bboxes_batch = []
+    for i in range(box_array.shape[0]):
+
+        argwhere = max_conf[i] > conf_thresh
+        l_box_array = box_array[i, argwhere, :]
+        l_max_conf = max_conf[i, argwhere]
+        l_max_id = max_id[i, argwhere]
+
+        bboxes = []
+        # nms for each class
+        for j in range(num_classes):
+
+            cls_argwhere = l_max_id == j
+            ll_box_array = l_box_array[cls_argwhere, :]
+            ll_max_conf = l_max_conf[cls_argwhere]
+            ll_max_id = l_max_id[cls_argwhere]
+
+            keep = nms_cpu(ll_box_array, ll_max_conf, nms_thresh)
+
+            if keep.size > 0:
+                ll_box_array = ll_box_array[keep, :]
+                ll_max_conf = ll_max_conf[keep]
+                ll_max_id = ll_max_id[keep]
+
+                for k in range(ll_box_array.shape[0]):
+                    bboxes.append(
+                        [
+                            ll_box_array[k, 0],
+                            ll_box_array[k, 1],
+                            ll_box_array[k, 2],
+                            ll_box_array[k, 3],
+                            ll_max_conf[k],
+                            ll_max_conf[k],
+                            ll_max_id[k],
+                        ]
+                    )
+
+        bboxes_batch.append(bboxes)
+
+    return bboxes_batch
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/arch/yolov4/yolo_layer.html b/docs/api/carvekit/ml/arch/yolov4/yolo_layer.html new file mode 100644 index 0000000..1d4eb26 --- /dev/null +++ b/docs/api/carvekit/ml/arch/yolov4/yolo_layer.html @@ -0,0 +1,984 @@ + + + + + + +carvekit.ml.arch.yolov4.yolo_layer API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.arch.yolov4.yolo_layer

+
+
+

Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +Source url: https://github.com/Tianxiaomo/pytorch-YOLOv4 +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Modified by Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+Source url: https://github.com/Tianxiaomo/pytorch-YOLOv4
+License: Apache License 2.0
+"""
+import numpy as np
+import torch
+import torch.nn as nn
+
+
+def yolo_forward(
+    output,
+    conf_thresh,
+    num_classes,
+    anchors,
+    num_anchors,
+    scale_x_y,
+    only_objectness=1,
+    validation=False,
+):
+    # Output would be invalid if it does not satisfy this assert
+    # assert (output.size(1) == (5 + num_classes) * num_anchors)
+
+    # print(output.size())
+
+    # Slice the second dimension (channel) of output into:
+    # [ 2, 2, 1, num_classes, 2, 2, 1, num_classes, 2, 2, 1, num_classes ]
+    # And then into
+    # bxy = [ 6 ] bwh = [ 6 ] det_conf = [ 3 ] cls_conf = [ num_classes * 3 ]
+    batch = output.size(0)
+    H = output.size(2)
+    W = output.size(3)
+
+    bxy_list = []
+    bwh_list = []
+    det_confs_list = []
+    cls_confs_list = []
+
+    for i in range(num_anchors):
+        begin = i * (5 + num_classes)
+        end = (i + 1) * (5 + num_classes)
+
+        bxy_list.append(output[:, begin : begin + 2])
+        bwh_list.append(output[:, begin + 2 : begin + 4])
+        det_confs_list.append(output[:, begin + 4 : begin + 5])
+        cls_confs_list.append(output[:, begin + 5 : end])
+
+    # Shape: [batch, num_anchors * 2, H, W]
+    bxy = torch.cat(bxy_list, dim=1)
+    # Shape: [batch, num_anchors * 2, H, W]
+    bwh = torch.cat(bwh_list, dim=1)
+
+    # Shape: [batch, num_anchors, H, W]
+    det_confs = torch.cat(det_confs_list, dim=1)
+    # Shape: [batch, num_anchors * H * W]
+    det_confs = det_confs.view(batch, num_anchors * H * W)
+
+    # Shape: [batch, num_anchors * num_classes, H, W]
+    cls_confs = torch.cat(cls_confs_list, dim=1)
+    # Shape: [batch, num_anchors, num_classes, H * W]
+    cls_confs = cls_confs.view(batch, num_anchors, num_classes, H * W)
+    # Shape: [batch, num_anchors, num_classes, H * W] --> [batch, num_anchors * H * W, num_classes]
+    cls_confs = cls_confs.permute(0, 1, 3, 2).reshape(
+        batch, num_anchors * H * W, num_classes
+    )
+
+    # Apply sigmoid(), exp() and softmax() to slices
+    #
+    bxy = torch.sigmoid(bxy) * scale_x_y - 0.5 * (scale_x_y - 1)
+    bwh = torch.exp(bwh)
+    det_confs = torch.sigmoid(det_confs)
+    cls_confs = torch.sigmoid(cls_confs)
+
+    # Prepare C-x, C-y, P-w, P-h (None of them are torch related)
+    grid_x = np.expand_dims(
+        np.expand_dims(
+            np.expand_dims(np.linspace(0, W - 1, W), axis=0).repeat(H, 0), axis=0
+        ),
+        axis=0,
+    )
+    grid_y = np.expand_dims(
+        np.expand_dims(
+            np.expand_dims(np.linspace(0, H - 1, H), axis=1).repeat(W, 1), axis=0
+        ),
+        axis=0,
+    )
+    # grid_x = torch.linspace(0, W - 1, W).reshape(1, 1, 1, W).repeat(1, 1, H, 1)
+    # grid_y = torch.linspace(0, H - 1, H).reshape(1, 1, H, 1).repeat(1, 1, 1, W)
+
+    anchor_w = []
+    anchor_h = []
+    for i in range(num_anchors):
+        anchor_w.append(anchors[i * 2])
+        anchor_h.append(anchors[i * 2 + 1])
+
+    device = None
+    cuda_check = output.is_cuda
+    if cuda_check:
+        device = output.get_device()
+
+    bx_list = []
+    by_list = []
+    bw_list = []
+    bh_list = []
+
+    # Apply C-x, C-y, P-w, P-h
+    for i in range(num_anchors):
+        ii = i * 2
+        # Shape: [batch, 1, H, W]
+        bx = bxy[:, ii : ii + 1] + torch.tensor(
+            grid_x, device=device, dtype=torch.float32
+        )  # grid_x.to(device=device, dtype=torch.float32)
+        # Shape: [batch, 1, H, W]
+        by = bxy[:, ii + 1 : ii + 2] + torch.tensor(
+            grid_y, device=device, dtype=torch.float32
+        )  # grid_y.to(device=device, dtype=torch.float32)
+        # Shape: [batch, 1, H, W]
+        bw = bwh[:, ii : ii + 1] * anchor_w[i]
+        # Shape: [batch, 1, H, W]
+        bh = bwh[:, ii + 1 : ii + 2] * anchor_h[i]
+
+        bx_list.append(bx)
+        by_list.append(by)
+        bw_list.append(bw)
+        bh_list.append(bh)
+
+    ########################################
+    #   Figure out bboxes from slices     #
+    ########################################
+
+    # Shape: [batch, num_anchors, H, W]
+    bx = torch.cat(bx_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    by = torch.cat(by_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    bw = torch.cat(bw_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    bh = torch.cat(bh_list, dim=1)
+
+    # Shape: [batch, 2 * num_anchors, H, W]
+    bx_bw = torch.cat((bx, bw), dim=1)
+    # Shape: [batch, 2 * num_anchors, H, W]
+    by_bh = torch.cat((by, bh), dim=1)
+
+    # normalize coordinates to [0, 1]
+    bx_bw /= W
+    by_bh /= H
+
+    # Shape: [batch, num_anchors * H * W, 1]
+    bx = bx_bw[:, :num_anchors].view(batch, num_anchors * H * W, 1)
+    by = by_bh[:, :num_anchors].view(batch, num_anchors * H * W, 1)
+    bw = bx_bw[:, num_anchors:].view(batch, num_anchors * H * W, 1)
+    bh = by_bh[:, num_anchors:].view(batch, num_anchors * H * W, 1)
+
+    bx1 = bx - bw * 0.5
+    by1 = by - bh * 0.5
+    bx2 = bx1 + bw
+    by2 = by1 + bh
+
+    # Shape: [batch, num_anchors * h * w, 4] -> [batch, num_anchors * h * w, 1, 4]
+    boxes = torch.cat((bx1, by1, bx2, by2), dim=2).view(
+        batch, num_anchors * H * W, 1, 4
+    )
+    # boxes = boxes.repeat(1, 1, num_classes, 1)
+
+    # boxes:     [batch, num_anchors * H * W, 1, 4]
+    # cls_confs: [batch, num_anchors * H * W, num_classes]
+    # det_confs: [batch, num_anchors * H * W]
+
+    det_confs = det_confs.view(batch, num_anchors * H * W, 1)
+    confs = cls_confs * det_confs
+
+    # boxes: [batch, num_anchors * H * W, 1, 4]
+    # confs: [batch, num_anchors * H * W, num_classes]
+
+    return boxes, confs
+
+
+def yolo_forward_dynamic(
+    output,
+    conf_thresh,
+    num_classes,
+    anchors,
+    num_anchors,
+    scale_x_y,
+    only_objectness=1,
+    validation=False,
+):
+    # Output would be invalid if it does not satisfy this assert
+    # assert (output.size(1) == (5 + num_classes) * num_anchors)
+
+    # print(output.size())
+
+    # Slice the second dimension (channel) of output into:
+    # [ 2, 2, 1, num_classes, 2, 2, 1, num_classes, 2, 2, 1, num_classes ]
+    # And then into
+    # bxy = [ 6 ] bwh = [ 6 ] det_conf = [ 3 ] cls_conf = [ num_classes * 3 ]
+    # batch = output.size(0)
+    # H = output.size(2)
+    # W = output.size(3)
+
+    bxy_list = []
+    bwh_list = []
+    det_confs_list = []
+    cls_confs_list = []
+
+    for i in range(num_anchors):
+        begin = i * (5 + num_classes)
+        end = (i + 1) * (5 + num_classes)
+
+        bxy_list.append(output[:, begin : begin + 2])
+        bwh_list.append(output[:, begin + 2 : begin + 4])
+        det_confs_list.append(output[:, begin + 4 : begin + 5])
+        cls_confs_list.append(output[:, begin + 5 : end])
+
+    # Shape: [batch, num_anchors * 2, H, W]
+    bxy = torch.cat(bxy_list, dim=1)
+    # Shape: [batch, num_anchors * 2, H, W]
+    bwh = torch.cat(bwh_list, dim=1)
+
+    # Shape: [batch, num_anchors, H, W]
+    det_confs = torch.cat(det_confs_list, dim=1)
+    # Shape: [batch, num_anchors * H * W]
+    det_confs = det_confs.view(
+        output.size(0), num_anchors * output.size(2) * output.size(3)
+    )
+
+    # Shape: [batch, num_anchors * num_classes, H, W]
+    cls_confs = torch.cat(cls_confs_list, dim=1)
+    # Shape: [batch, num_anchors, num_classes, H * W]
+    cls_confs = cls_confs.view(
+        output.size(0), num_anchors, num_classes, output.size(2) * output.size(3)
+    )
+    # Shape: [batch, num_anchors, num_classes, H * W] --> [batch, num_anchors * H * W, num_classes]
+    cls_confs = cls_confs.permute(0, 1, 3, 2).reshape(
+        output.size(0), num_anchors * output.size(2) * output.size(3), num_classes
+    )
+
+    # Apply sigmoid(), exp() and softmax() to slices
+    #
+    bxy = torch.sigmoid(bxy) * scale_x_y - 0.5 * (scale_x_y - 1)
+    bwh = torch.exp(bwh)
+    det_confs = torch.sigmoid(det_confs)
+    cls_confs = torch.sigmoid(cls_confs)
+
+    # Prepare C-x, C-y, P-w, P-h (None of them are torch related)
+    grid_x = np.expand_dims(
+        np.expand_dims(
+            np.expand_dims(
+                np.linspace(0, output.size(3) - 1, output.size(3)), axis=0
+            ).repeat(output.size(2), 0),
+            axis=0,
+        ),
+        axis=0,
+    )
+    grid_y = np.expand_dims(
+        np.expand_dims(
+            np.expand_dims(
+                np.linspace(0, output.size(2) - 1, output.size(2)), axis=1
+            ).repeat(output.size(3), 1),
+            axis=0,
+        ),
+        axis=0,
+    )
+    # grid_x = torch.linspace(0, W - 1, W).reshape(1, 1, 1, W).repeat(1, 1, H, 1)
+    # grid_y = torch.linspace(0, H - 1, H).reshape(1, 1, H, 1).repeat(1, 1, 1, W)
+
+    anchor_w = []
+    anchor_h = []
+    for i in range(num_anchors):
+        anchor_w.append(anchors[i * 2])
+        anchor_h.append(anchors[i * 2 + 1])
+
+    device = None
+    cuda_check = output.is_cuda
+    if cuda_check:
+        device = output.get_device()
+
+    bx_list = []
+    by_list = []
+    bw_list = []
+    bh_list = []
+
+    # Apply C-x, C-y, P-w, P-h
+    for i in range(num_anchors):
+        ii = i * 2
+        # Shape: [batch, 1, H, W]
+        bx = bxy[:, ii : ii + 1] + torch.tensor(
+            grid_x, device=device, dtype=torch.float32
+        )  # grid_x.to(device=device, dtype=torch.float32)
+        # Shape: [batch, 1, H, W]
+        by = bxy[:, ii + 1 : ii + 2] + torch.tensor(
+            grid_y, device=device, dtype=torch.float32
+        )  # grid_y.to(device=device, dtype=torch.float32)
+        # Shape: [batch, 1, H, W]
+        bw = bwh[:, ii : ii + 1] * anchor_w[i]
+        # Shape: [batch, 1, H, W]
+        bh = bwh[:, ii + 1 : ii + 2] * anchor_h[i]
+
+        bx_list.append(bx)
+        by_list.append(by)
+        bw_list.append(bw)
+        bh_list.append(bh)
+
+    ########################################
+    #   Figure out bboxes from slices     #
+    ########################################
+
+    # Shape: [batch, num_anchors, H, W]
+    bx = torch.cat(bx_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    by = torch.cat(by_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    bw = torch.cat(bw_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    bh = torch.cat(bh_list, dim=1)
+
+    # Shape: [batch, 2 * num_anchors, H, W]
+    bx_bw = torch.cat((bx, bw), dim=1)
+    # Shape: [batch, 2 * num_anchors, H, W]
+    by_bh = torch.cat((by, bh), dim=1)
+
+    # normalize coordinates to [0, 1]
+    bx_bw /= output.size(3)
+    by_bh /= output.size(2)
+
+    # Shape: [batch, num_anchors * H * W, 1]
+    bx = bx_bw[:, :num_anchors].view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+    by = by_bh[:, :num_anchors].view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+    bw = bx_bw[:, num_anchors:].view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+    bh = by_bh[:, num_anchors:].view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+
+    bx1 = bx - bw * 0.5
+    by1 = by - bh * 0.5
+    bx2 = bx1 + bw
+    by2 = by1 + bh
+
+    # Shape: [batch, num_anchors * h * w, 4] -> [batch, num_anchors * h * w, 1, 4]
+    boxes = torch.cat((bx1, by1, bx2, by2), dim=2).view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1, 4
+    )
+    # boxes = boxes.repeat(1, 1, num_classes, 1)
+
+    # boxes:     [batch, num_anchors * H * W, 1, 4]
+    # cls_confs: [batch, num_anchors * H * W, num_classes]
+    # det_confs: [batch, num_anchors * H * W]
+
+    det_confs = det_confs.view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+    confs = cls_confs * det_confs
+
+    # boxes: [batch, num_anchors * H * W, 1, 4]
+    # confs: [batch, num_anchors * H * W, num_classes]
+
+    return boxes, confs
+
+
+class YoloLayer(nn.Module):
+    """Yolo layer
+    model_out: while inference,is post-processing inside or outside the model
+        true:outside
+    """
+
+    def __init__(
+        self,
+        anchor_mask=[],
+        num_classes=0,
+        anchors=[],
+        num_anchors=1,
+        stride=32,
+        model_out=False,
+    ):
+        super(YoloLayer, self).__init__()
+        self.anchor_mask = anchor_mask
+        self.num_classes = num_classes
+        self.anchors = anchors
+        self.num_anchors = num_anchors
+        self.anchor_step = len(anchors) // num_anchors
+        self.coord_scale = 1
+        self.noobject_scale = 1
+        self.object_scale = 5
+        self.class_scale = 1
+        self.thresh = 0.6
+        self.stride = stride
+        self.seen = 0
+        self.scale_x_y = 1
+
+        self.model_out = model_out
+
+    def forward(self, output, target=None):
+        if self.training:
+            return output
+        masked_anchors = []
+        for m in self.anchor_mask:
+            masked_anchors += self.anchors[
+                m * self.anchor_step : (m + 1) * self.anchor_step
+            ]
+        masked_anchors = [anchor / self.stride for anchor in masked_anchors]
+
+        return yolo_forward_dynamic(
+            output,
+            self.thresh,
+            self.num_classes,
+            masked_anchors,
+            len(self.anchor_mask),
+            scale_x_y=self.scale_x_y,
+        )
+
+
+
+
+
+
+
+

Functions

+
+
+def yolo_forward(output, conf_thresh, num_classes, anchors, num_anchors, scale_x_y, only_objectness=1, validation=False) +
+
+
+
+ +Expand source code + +
def yolo_forward(
+    output,
+    conf_thresh,
+    num_classes,
+    anchors,
+    num_anchors,
+    scale_x_y,
+    only_objectness=1,
+    validation=False,
+):
+    # Output would be invalid if it does not satisfy this assert
+    # assert (output.size(1) == (5 + num_classes) * num_anchors)
+
+    # print(output.size())
+
+    # Slice the second dimension (channel) of output into:
+    # [ 2, 2, 1, num_classes, 2, 2, 1, num_classes, 2, 2, 1, num_classes ]
+    # And then into
+    # bxy = [ 6 ] bwh = [ 6 ] det_conf = [ 3 ] cls_conf = [ num_classes * 3 ]
+    batch = output.size(0)
+    H = output.size(2)
+    W = output.size(3)
+
+    bxy_list = []
+    bwh_list = []
+    det_confs_list = []
+    cls_confs_list = []
+
+    for i in range(num_anchors):
+        begin = i * (5 + num_classes)
+        end = (i + 1) * (5 + num_classes)
+
+        bxy_list.append(output[:, begin : begin + 2])
+        bwh_list.append(output[:, begin + 2 : begin + 4])
+        det_confs_list.append(output[:, begin + 4 : begin + 5])
+        cls_confs_list.append(output[:, begin + 5 : end])
+
+    # Shape: [batch, num_anchors * 2, H, W]
+    bxy = torch.cat(bxy_list, dim=1)
+    # Shape: [batch, num_anchors * 2, H, W]
+    bwh = torch.cat(bwh_list, dim=1)
+
+    # Shape: [batch, num_anchors, H, W]
+    det_confs = torch.cat(det_confs_list, dim=1)
+    # Shape: [batch, num_anchors * H * W]
+    det_confs = det_confs.view(batch, num_anchors * H * W)
+
+    # Shape: [batch, num_anchors * num_classes, H, W]
+    cls_confs = torch.cat(cls_confs_list, dim=1)
+    # Shape: [batch, num_anchors, num_classes, H * W]
+    cls_confs = cls_confs.view(batch, num_anchors, num_classes, H * W)
+    # Shape: [batch, num_anchors, num_classes, H * W] --> [batch, num_anchors * H * W, num_classes]
+    cls_confs = cls_confs.permute(0, 1, 3, 2).reshape(
+        batch, num_anchors * H * W, num_classes
+    )
+
+    # Apply sigmoid(), exp() and softmax() to slices
+    #
+    bxy = torch.sigmoid(bxy) * scale_x_y - 0.5 * (scale_x_y - 1)
+    bwh = torch.exp(bwh)
+    det_confs = torch.sigmoid(det_confs)
+    cls_confs = torch.sigmoid(cls_confs)
+
+    # Prepare C-x, C-y, P-w, P-h (None of them are torch related)
+    grid_x = np.expand_dims(
+        np.expand_dims(
+            np.expand_dims(np.linspace(0, W - 1, W), axis=0).repeat(H, 0), axis=0
+        ),
+        axis=0,
+    )
+    grid_y = np.expand_dims(
+        np.expand_dims(
+            np.expand_dims(np.linspace(0, H - 1, H), axis=1).repeat(W, 1), axis=0
+        ),
+        axis=0,
+    )
+    # grid_x = torch.linspace(0, W - 1, W).reshape(1, 1, 1, W).repeat(1, 1, H, 1)
+    # grid_y = torch.linspace(0, H - 1, H).reshape(1, 1, H, 1).repeat(1, 1, 1, W)
+
+    anchor_w = []
+    anchor_h = []
+    for i in range(num_anchors):
+        anchor_w.append(anchors[i * 2])
+        anchor_h.append(anchors[i * 2 + 1])
+
+    device = None
+    cuda_check = output.is_cuda
+    if cuda_check:
+        device = output.get_device()
+
+    bx_list = []
+    by_list = []
+    bw_list = []
+    bh_list = []
+
+    # Apply C-x, C-y, P-w, P-h
+    for i in range(num_anchors):
+        ii = i * 2
+        # Shape: [batch, 1, H, W]
+        bx = bxy[:, ii : ii + 1] + torch.tensor(
+            grid_x, device=device, dtype=torch.float32
+        )  # grid_x.to(device=device, dtype=torch.float32)
+        # Shape: [batch, 1, H, W]
+        by = bxy[:, ii + 1 : ii + 2] + torch.tensor(
+            grid_y, device=device, dtype=torch.float32
+        )  # grid_y.to(device=device, dtype=torch.float32)
+        # Shape: [batch, 1, H, W]
+        bw = bwh[:, ii : ii + 1] * anchor_w[i]
+        # Shape: [batch, 1, H, W]
+        bh = bwh[:, ii + 1 : ii + 2] * anchor_h[i]
+
+        bx_list.append(bx)
+        by_list.append(by)
+        bw_list.append(bw)
+        bh_list.append(bh)
+
+    ########################################
+    #   Figure out bboxes from slices     #
+    ########################################
+
+    # Shape: [batch, num_anchors, H, W]
+    bx = torch.cat(bx_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    by = torch.cat(by_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    bw = torch.cat(bw_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    bh = torch.cat(bh_list, dim=1)
+
+    # Shape: [batch, 2 * num_anchors, H, W]
+    bx_bw = torch.cat((bx, bw), dim=1)
+    # Shape: [batch, 2 * num_anchors, H, W]
+    by_bh = torch.cat((by, bh), dim=1)
+
+    # normalize coordinates to [0, 1]
+    bx_bw /= W
+    by_bh /= H
+
+    # Shape: [batch, num_anchors * H * W, 1]
+    bx = bx_bw[:, :num_anchors].view(batch, num_anchors * H * W, 1)
+    by = by_bh[:, :num_anchors].view(batch, num_anchors * H * W, 1)
+    bw = bx_bw[:, num_anchors:].view(batch, num_anchors * H * W, 1)
+    bh = by_bh[:, num_anchors:].view(batch, num_anchors * H * W, 1)
+
+    bx1 = bx - bw * 0.5
+    by1 = by - bh * 0.5
+    bx2 = bx1 + bw
+    by2 = by1 + bh
+
+    # Shape: [batch, num_anchors * h * w, 4] -> [batch, num_anchors * h * w, 1, 4]
+    boxes = torch.cat((bx1, by1, bx2, by2), dim=2).view(
+        batch, num_anchors * H * W, 1, 4
+    )
+    # boxes = boxes.repeat(1, 1, num_classes, 1)
+
+    # boxes:     [batch, num_anchors * H * W, 1, 4]
+    # cls_confs: [batch, num_anchors * H * W, num_classes]
+    # det_confs: [batch, num_anchors * H * W]
+
+    det_confs = det_confs.view(batch, num_anchors * H * W, 1)
+    confs = cls_confs * det_confs
+
+    # boxes: [batch, num_anchors * H * W, 1, 4]
+    # confs: [batch, num_anchors * H * W, num_classes]
+
+    return boxes, confs
+
+
+
+def yolo_forward_dynamic(output, conf_thresh, num_classes, anchors, num_anchors, scale_x_y, only_objectness=1, validation=False) +
+
+
+
+ +Expand source code + +
def yolo_forward_dynamic(
+    output,
+    conf_thresh,
+    num_classes,
+    anchors,
+    num_anchors,
+    scale_x_y,
+    only_objectness=1,
+    validation=False,
+):
+    # Output would be invalid if it does not satisfy this assert
+    # assert (output.size(1) == (5 + num_classes) * num_anchors)
+
+    # print(output.size())
+
+    # Slice the second dimension (channel) of output into:
+    # [ 2, 2, 1, num_classes, 2, 2, 1, num_classes, 2, 2, 1, num_classes ]
+    # And then into
+    # bxy = [ 6 ] bwh = [ 6 ] det_conf = [ 3 ] cls_conf = [ num_classes * 3 ]
+    # batch = output.size(0)
+    # H = output.size(2)
+    # W = output.size(3)
+
+    bxy_list = []
+    bwh_list = []
+    det_confs_list = []
+    cls_confs_list = []
+
+    for i in range(num_anchors):
+        begin = i * (5 + num_classes)
+        end = (i + 1) * (5 + num_classes)
+
+        bxy_list.append(output[:, begin : begin + 2])
+        bwh_list.append(output[:, begin + 2 : begin + 4])
+        det_confs_list.append(output[:, begin + 4 : begin + 5])
+        cls_confs_list.append(output[:, begin + 5 : end])
+
+    # Shape: [batch, num_anchors * 2, H, W]
+    bxy = torch.cat(bxy_list, dim=1)
+    # Shape: [batch, num_anchors * 2, H, W]
+    bwh = torch.cat(bwh_list, dim=1)
+
+    # Shape: [batch, num_anchors, H, W]
+    det_confs = torch.cat(det_confs_list, dim=1)
+    # Shape: [batch, num_anchors * H * W]
+    det_confs = det_confs.view(
+        output.size(0), num_anchors * output.size(2) * output.size(3)
+    )
+
+    # Shape: [batch, num_anchors * num_classes, H, W]
+    cls_confs = torch.cat(cls_confs_list, dim=1)
+    # Shape: [batch, num_anchors, num_classes, H * W]
+    cls_confs = cls_confs.view(
+        output.size(0), num_anchors, num_classes, output.size(2) * output.size(3)
+    )
+    # Shape: [batch, num_anchors, num_classes, H * W] --> [batch, num_anchors * H * W, num_classes]
+    cls_confs = cls_confs.permute(0, 1, 3, 2).reshape(
+        output.size(0), num_anchors * output.size(2) * output.size(3), num_classes
+    )
+
+    # Apply sigmoid(), exp() and softmax() to slices
+    #
+    bxy = torch.sigmoid(bxy) * scale_x_y - 0.5 * (scale_x_y - 1)
+    bwh = torch.exp(bwh)
+    det_confs = torch.sigmoid(det_confs)
+    cls_confs = torch.sigmoid(cls_confs)
+
+    # Prepare C-x, C-y, P-w, P-h (None of them are torch related)
+    grid_x = np.expand_dims(
+        np.expand_dims(
+            np.expand_dims(
+                np.linspace(0, output.size(3) - 1, output.size(3)), axis=0
+            ).repeat(output.size(2), 0),
+            axis=0,
+        ),
+        axis=0,
+    )
+    grid_y = np.expand_dims(
+        np.expand_dims(
+            np.expand_dims(
+                np.linspace(0, output.size(2) - 1, output.size(2)), axis=1
+            ).repeat(output.size(3), 1),
+            axis=0,
+        ),
+        axis=0,
+    )
+    # grid_x = torch.linspace(0, W - 1, W).reshape(1, 1, 1, W).repeat(1, 1, H, 1)
+    # grid_y = torch.linspace(0, H - 1, H).reshape(1, 1, H, 1).repeat(1, 1, 1, W)
+
+    anchor_w = []
+    anchor_h = []
+    for i in range(num_anchors):
+        anchor_w.append(anchors[i * 2])
+        anchor_h.append(anchors[i * 2 + 1])
+
+    device = None
+    cuda_check = output.is_cuda
+    if cuda_check:
+        device = output.get_device()
+
+    bx_list = []
+    by_list = []
+    bw_list = []
+    bh_list = []
+
+    # Apply C-x, C-y, P-w, P-h
+    for i in range(num_anchors):
+        ii = i * 2
+        # Shape: [batch, 1, H, W]
+        bx = bxy[:, ii : ii + 1] + torch.tensor(
+            grid_x, device=device, dtype=torch.float32
+        )  # grid_x.to(device=device, dtype=torch.float32)
+        # Shape: [batch, 1, H, W]
+        by = bxy[:, ii + 1 : ii + 2] + torch.tensor(
+            grid_y, device=device, dtype=torch.float32
+        )  # grid_y.to(device=device, dtype=torch.float32)
+        # Shape: [batch, 1, H, W]
+        bw = bwh[:, ii : ii + 1] * anchor_w[i]
+        # Shape: [batch, 1, H, W]
+        bh = bwh[:, ii + 1 : ii + 2] * anchor_h[i]
+
+        bx_list.append(bx)
+        by_list.append(by)
+        bw_list.append(bw)
+        bh_list.append(bh)
+
+    ########################################
+    #   Figure out bboxes from slices     #
+    ########################################
+
+    # Shape: [batch, num_anchors, H, W]
+    bx = torch.cat(bx_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    by = torch.cat(by_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    bw = torch.cat(bw_list, dim=1)
+    # Shape: [batch, num_anchors, H, W]
+    bh = torch.cat(bh_list, dim=1)
+
+    # Shape: [batch, 2 * num_anchors, H, W]
+    bx_bw = torch.cat((bx, bw), dim=1)
+    # Shape: [batch, 2 * num_anchors, H, W]
+    by_bh = torch.cat((by, bh), dim=1)
+
+    # normalize coordinates to [0, 1]
+    bx_bw /= output.size(3)
+    by_bh /= output.size(2)
+
+    # Shape: [batch, num_anchors * H * W, 1]
+    bx = bx_bw[:, :num_anchors].view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+    by = by_bh[:, :num_anchors].view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+    bw = bx_bw[:, num_anchors:].view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+    bh = by_bh[:, num_anchors:].view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+
+    bx1 = bx - bw * 0.5
+    by1 = by - bh * 0.5
+    bx2 = bx1 + bw
+    by2 = by1 + bh
+
+    # Shape: [batch, num_anchors * h * w, 4] -> [batch, num_anchors * h * w, 1, 4]
+    boxes = torch.cat((bx1, by1, bx2, by2), dim=2).view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1, 4
+    )
+    # boxes = boxes.repeat(1, 1, num_classes, 1)
+
+    # boxes:     [batch, num_anchors * H * W, 1, 4]
+    # cls_confs: [batch, num_anchors * H * W, num_classes]
+    # det_confs: [batch, num_anchors * H * W]
+
+    det_confs = det_confs.view(
+        output.size(0), num_anchors * output.size(2) * output.size(3), 1
+    )
+    confs = cls_confs * det_confs
+
+    # boxes: [batch, num_anchors * H * W, 1, 4]
+    # confs: [batch, num_anchors * H * W, num_classes]
+
+    return boxes, confs
+
+
+
+
+
+

Classes

+
+
+class YoloLayer +(anchor_mask=[], num_classes=0, anchors=[], num_anchors=1, stride=32, model_out=False) +
+
+

Yolo layer +model_out: while inference,is post-processing inside or outside the model +true:outside

+

Initializes internal Module state, shared by both nn.Module and ScriptModule.

+
+ +Expand source code + +
class YoloLayer(nn.Module):
+    """Yolo layer
+    model_out: while inference,is post-processing inside or outside the model
+        true:outside
+    """
+
+    def __init__(
+        self,
+        anchor_mask=[],
+        num_classes=0,
+        anchors=[],
+        num_anchors=1,
+        stride=32,
+        model_out=False,
+    ):
+        super(YoloLayer, self).__init__()
+        self.anchor_mask = anchor_mask
+        self.num_classes = num_classes
+        self.anchors = anchors
+        self.num_anchors = num_anchors
+        self.anchor_step = len(anchors) // num_anchors
+        self.coord_scale = 1
+        self.noobject_scale = 1
+        self.object_scale = 5
+        self.class_scale = 1
+        self.thresh = 0.6
+        self.stride = stride
+        self.seen = 0
+        self.scale_x_y = 1
+
+        self.model_out = model_out
+
+    def forward(self, output, target=None):
+        if self.training:
+            return output
+        masked_anchors = []
+        for m in self.anchor_mask:
+            masked_anchors += self.anchors[
+                m * self.anchor_step : (m + 1) * self.anchor_step
+            ]
+        masked_anchors = [anchor / self.stride for anchor in masked_anchors]
+
+        return yolo_forward_dynamic(
+            output,
+            self.thresh,
+            self.num_classes,
+            masked_anchors,
+            len(self.anchor_mask),
+            scale_x_y=self.scale_x_y,
+        )
+
+

Ancestors

+
    +
  • torch.nn.modules.module.Module
  • +
+

Methods

+
+
+def forward(self, output, target=None) ‑>Β Callable[...,Β Any] +
+
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the :class:Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +Expand source code + +
def forward(self, output, target=None):
+    if self.training:
+        return output
+    masked_anchors = []
+    for m in self.anchor_mask:
+        masked_anchors += self.anchors[
+            m * self.anchor_step : (m + 1) * self.anchor_step
+        ]
+    masked_anchors = [anchor / self.stride for anchor in masked_anchors]
+
+    return yolo_forward_dynamic(
+        output,
+        self.thresh,
+        self.num_classes,
+        masked_anchors,
+        len(self.anchor_mask),
+        scale_x_y=self.scale_x_y,
+    )
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/files/index.html b/docs/api/carvekit/ml/files/index.html new file mode 100644 index 0000000..6bf5e0b --- /dev/null +++ b/docs/api/carvekit/ml/files/index.html @@ -0,0 +1,77 @@ + + + + + + +carvekit.ml.files API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.files

+
+
+
+ +Expand source code + +
from pathlib import Path
+
+carvekit_dir = Path.home().joinpath(".cache/carvekit")
+
+carvekit_dir.mkdir(parents=True, exist_ok=True)
+
+checkpoints_dir = carvekit_dir.joinpath("checkpoints")
+
+
+
+

Sub-modules

+
+
carvekit.ml.files.models_loc
+
+ +
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/files/models_loc.html b/docs/api/carvekit/ml/files/models_loc.html new file mode 100644 index 0000000..24e43a5 --- /dev/null +++ b/docs/api/carvekit/ml/files/models_loc.html @@ -0,0 +1,417 @@ + + + + + + +carvekit.ml.files.models_loc API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.files.models_loc

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import pathlib
+from carvekit.ml.files import checkpoints_dir
+from carvekit.utils.download_models import downloader
+
+
+def u2net_full_pretrained() -> pathlib.Path:
+    """Returns u2net pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("u2net.pth")
+
+
+def basnet_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("basnet.pth")
+
+
+def deeplab_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("deeplab.pth")
+
+
+def fba_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("fba_matting.pth")
+
+
+def tracer_b7_pretrained() -> pathlib.Path:
+    """Returns TRACER with EfficientNet v1 b7 encoder pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("tracer_b7.pth")
+
+
+def scene_classifier_pretrained() -> pathlib.Path:
+    """Returns scene classifier pretrained model location
+    This model is used to classify scenes into 3 categories: hard, soft, digital
+
+    hard - scenes with hard edges, such as objects, buildings, etc.
+    soft - scenes with soft edges, such as portraits, hairs, animal, etc.
+    digital - digital scenes, such as screenshots, graphics, etc.
+
+    more info: https://huggingface.co/Carve/scene_classifier
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("scene_classifier.pth")
+
+
+def yolov4_coco_pretrained() -> pathlib.Path:
+    """Returns yolov4 classifier pretrained model location
+    This model is used to classify objects in images.
+
+    Training dataset: COCO 2017
+    Training classes: 80
+
+    It's a modified version of the original model from https://github.com/Tianxiaomo/pytorch-YOLOv4 (pytorch)
+    We have only added coco classnames to the model.
+
+    Returns:
+        pathlib.Path to model location
+    """
+    return downloader("yolov4_coco_with_classes.pth")
+
+
+def cascadepsp_pretrained() -> pathlib.Path:
+    """Returns cascade psp pretrained model location
+    This model is used to refine segmentation masks.
+
+    Training dataset: MSRA-10K, DUT-OMRON, ECSSD and FSS-1000
+    more info: https://huggingface.co/Carve/cascadepsp
+
+    Returns:
+        pathlib.Path to model location
+    """
+    return downloader("cascadepsp.pth")
+
+
+def download_all():
+    u2net_full_pretrained()
+    fba_pretrained()
+    deeplab_pretrained()
+    basnet_pretrained()
+    tracer_b7_pretrained()
+    scene_classifier_pretrained()
+    yolov4_coco_pretrained()
+    cascadepsp_pretrained()
+
+
+
+
+
+
+
+

Functions

+
+
+def basnet_pretrained() ‑>Β pathlib.Path +
+
+

Returns basnet pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def basnet_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("basnet.pth")
+
+
+
+def cascadepsp_pretrained() ‑>Β pathlib.Path +
+
+

Returns cascade psp pretrained model location +This model is used to refine segmentation masks.

+

Training dataset: MSRA-10K, DUT-OMRON, ECSSD and FSS-1000 +more info: https://huggingface.co/Carve/cascadepsp

+

Returns

+

pathlib.Path to model location

+
+ +Expand source code + +
def cascadepsp_pretrained() -> pathlib.Path:
+    """Returns cascade psp pretrained model location
+    This model is used to refine segmentation masks.
+
+    Training dataset: MSRA-10K, DUT-OMRON, ECSSD and FSS-1000
+    more info: https://huggingface.co/Carve/cascadepsp
+
+    Returns:
+        pathlib.Path to model location
+    """
+    return downloader("cascadepsp.pth")
+
+
+
+def deeplab_pretrained() ‑>Β pathlib.Path +
+
+

Returns basnet pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def deeplab_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("deeplab.pth")
+
+
+
+def download_all() +
+
+
+
+ +Expand source code + +
def download_all():
+    u2net_full_pretrained()
+    fba_pretrained()
+    deeplab_pretrained()
+    basnet_pretrained()
+    tracer_b7_pretrained()
+    scene_classifier_pretrained()
+    yolov4_coco_pretrained()
+    cascadepsp_pretrained()
+
+
+
+def fba_pretrained() ‑>Β pathlib.Path +
+
+

Returns basnet pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def fba_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("fba_matting.pth")
+
+
+
+def scene_classifier_pretrained() ‑>Β pathlib.Path +
+
+

Returns scene classifier pretrained model location +This model is used to classify scenes into 3 categories: hard, soft, digital

+

hard - scenes with hard edges, such as objects, buildings, etc. +soft - scenes with soft edges, such as portraits, hairs, animal, etc. +digital - digital scenes, such as screenshots, graphics, etc.

+

more info: https://huggingface.co/Carve/scene_classifier

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def scene_classifier_pretrained() -> pathlib.Path:
+    """Returns scene classifier pretrained model location
+    This model is used to classify scenes into 3 categories: hard, soft, digital
+
+    hard - scenes with hard edges, such as objects, buildings, etc.
+    soft - scenes with soft edges, such as portraits, hairs, animal, etc.
+    digital - digital scenes, such as screenshots, graphics, etc.
+
+    more info: https://huggingface.co/Carve/scene_classifier
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("scene_classifier.pth")
+
+
+
+def tracer_b7_pretrained() ‑>Β pathlib.Path +
+
+

Returns TRACER with EfficientNet v1 b7 encoder pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def tracer_b7_pretrained() -> pathlib.Path:
+    """Returns TRACER with EfficientNet v1 b7 encoder pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("tracer_b7.pth")
+
+
+
+def u2net_full_pretrained() ‑>Β pathlib.Path +
+
+

Returns u2net pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def u2net_full_pretrained() -> pathlib.Path:
+    """Returns u2net pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("u2net.pth")
+
+
+
+def yolov4_coco_pretrained() ‑>Β pathlib.Path +
+
+

Returns yolov4 classifier pretrained model location +This model is used to classify objects in images.

+

Training dataset: COCO 2017 +Training classes: 80

+

It's a modified version of the original model from https://github.com/Tianxiaomo/pytorch-YOLOv4 (pytorch) +We have only added coco classnames to the model.

+

Returns

+

pathlib.Path to model location

+
+ +Expand source code + +
def yolov4_coco_pretrained() -> pathlib.Path:
+    """Returns yolov4 classifier pretrained model location
+    This model is used to classify objects in images.
+
+    Training dataset: COCO 2017
+    Training classes: 80
+
+    It's a modified version of the original model from https://github.com/Tianxiaomo/pytorch-YOLOv4 (pytorch)
+    We have only added coco classnames to the model.
+
+    Returns:
+        pathlib.Path to model location
+    """
+    return downloader("yolov4_coco_with_classes.pth")
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/index.html b/docs/api/carvekit/ml/index.html new file mode 100644 index 0000000..b09ab9d --- /dev/null +++ b/docs/api/carvekit/ml/index.html @@ -0,0 +1,84 @@ + + + + + + +carvekit.ml API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml

+
+
+
+ +Expand source code + +
from carvekit.utils.models_utils import fix_seed, suppress_warnings
+
+fix_seed()
+suppress_warnings()
+
+
+
+

Sub-modules

+
+
carvekit.ml.arch
+
+
+
+
carvekit.ml.files
+
+
+
+
carvekit.ml.wrap
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/basnet.html b/docs/api/carvekit/ml/wrap/basnet.html new file mode 100644 index 0000000..fa3f82e --- /dev/null +++ b/docs/api/carvekit/ml/wrap/basnet.html @@ -0,0 +1,474 @@ + + + + + + +carvekit.ml.wrap.basnet API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap.basnet

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import pathlib
+from typing import Union, List
+
+import PIL
+import numpy as np
+import torch
+from PIL import Image
+
+from carvekit.ml.arch.basnet.basnet import BASNet
+from carvekit.ml.files.models_loc import basnet_pretrained
+from carvekit.utils.image_utils import convert_image, load_image
+from carvekit.utils.pool_utils import batch_generator, thread_pool_processing
+
+__all__ = ["BASNET"]
+
+
+class BASNET(BASNet):
+    """BASNet model interface"""
+
+    def __init__(
+        self,
+        device="cpu",
+        input_image_size: Union[List[int], int] = 320,
+        batch_size: int = 10,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the BASNET model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (Union[List[int], int], default=320): input image size
+            batch_size (int, default=10): the number of images that the neural network processes in one run
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=True): use fp16 precision **not supported at this moment**
+        """
+        super(BASNET, self).__init__(n_channels=3, n_classes=1)
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(basnet_pretrained(), map_location=self.device)
+            )
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.Tensor: input for neural network
+
+        """
+        resized = data.resize(self.input_image_size)
+        # noinspection PyTypeChecker
+        resized_arr = np.array(resized, dtype=np.float64)
+        temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+        if np.max(resized_arr) != 0:
+            resized_arr /= np.max(resized_arr)
+        temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+        temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+        temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+        temp_image = temp_image.transpose((2, 0, 1))
+        temp_image = np.expand_dims(temp_image, 0)
+        return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        data = data.unsqueeze(0)
+        mask = data[:, 0, :, :]
+        ma = torch.max(mask)  # Normalizes prediction
+        mi = torch.min(mask)
+        predict = ((mask - mi) / (ma - mi)).squeeze()
+        predict_np = predict.cpu().data.numpy() * 255
+        mask = Image.fromarray(predict_np).convert("L")
+        mask = mask.resize(original_image.size, resample=3)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images through neural network and returns segmentation masks as `PIL.Image.Image` instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances
+
+        """
+        collect_masks = []
+        for image_batch in batch_generator(images, self.batch_size):
+            converted_images = thread_pool_processing(
+                lambda x: convert_image(load_image(x)), image_batch
+            )
+            batches = torch.vstack(
+                thread_pool_processing(self.data_preprocessing, converted_images)
+            )
+            with torch.no_grad():
+                batches = batches.to(self.device)
+                masks, d2, d3, d4, d5, d6, d7, d8 = super(BASNET, self).__call__(
+                    batches
+                )
+                masks_cpu = masks.cpu()
+                del d2, d3, d4, d5, d6, d7, d8, batches, masks
+            masks = thread_pool_processing(
+                lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]),
+                range(len(converted_images)),
+            )
+            collect_masks += masks
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class BASNET +(device='cpu', input_image_size:Β Union[List[int],Β int]Β =Β 320, batch_size:Β intΒ =Β 10, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False) +
+
+

BASNet model interface

+

Initialize the BASNET model

+

Args

+
+
device : Literal[cpu, cuda], default=cpu
+
processing device
+
input_image_size : Union[List[int], int], default=320
+
input image size
+
batch_size : int, default=10
+
the number of images that the neural network processes in one run
+
load_pretrained : bool, default=True
+
loading pretrained model
+
fp16 : bool, default=True
+
use fp16 precision not supported at this moment
+
+
+ +Expand source code + +
class BASNET(BASNet):
+    """BASNet model interface"""
+
+    def __init__(
+        self,
+        device="cpu",
+        input_image_size: Union[List[int], int] = 320,
+        batch_size: int = 10,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the BASNET model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (Union[List[int], int], default=320): input image size
+            batch_size (int, default=10): the number of images that the neural network processes in one run
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=True): use fp16 precision **not supported at this moment**
+        """
+        super(BASNET, self).__init__(n_channels=3, n_classes=1)
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(basnet_pretrained(), map_location=self.device)
+            )
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.Tensor: input for neural network
+
+        """
+        resized = data.resize(self.input_image_size)
+        # noinspection PyTypeChecker
+        resized_arr = np.array(resized, dtype=np.float64)
+        temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+        if np.max(resized_arr) != 0:
+            resized_arr /= np.max(resized_arr)
+        temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+        temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+        temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+        temp_image = temp_image.transpose((2, 0, 1))
+        temp_image = np.expand_dims(temp_image, 0)
+        return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        data = data.unsqueeze(0)
+        mask = data[:, 0, :, :]
+        ma = torch.max(mask)  # Normalizes prediction
+        mi = torch.min(mask)
+        predict = ((mask - mi) / (ma - mi)).squeeze()
+        predict_np = predict.cpu().data.numpy() * 255
+        mask = Image.fromarray(predict_np).convert("L")
+        mask = mask.resize(original_image.size, resample=3)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images through neural network and returns segmentation masks as `PIL.Image.Image` instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances
+
+        """
+        collect_masks = []
+        for image_batch in batch_generator(images, self.batch_size):
+            converted_images = thread_pool_processing(
+                lambda x: convert_image(load_image(x)), image_batch
+            )
+            batches = torch.vstack(
+                thread_pool_processing(self.data_preprocessing, converted_images)
+            )
+            with torch.no_grad():
+                batches = batches.to(self.device)
+                masks, d2, d3, d4, d5, d6, d7, d8 = super(BASNET, self).__call__(
+                    batches
+                )
+                masks_cpu = masks.cpu()
+                del d2, d3, d4, d5, d6, d7, d8, batches, masks
+            masks = thread_pool_processing(
+                lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]),
+                range(len(converted_images)),
+            )
+            collect_masks += masks
+        return collect_masks
+
+

Ancestors

+
    +
  • BASNet
  • +
  • torch.nn.modules.module.Module
  • +
+

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, original_image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
original_image : PIL.Image.Image
+
input image which was used for predicted data
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask as PIL Image instance
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, original_image: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        original_image (PIL.Image.Image): input image which was used for predicted data
+
+    Returns:
+        PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+    """
+    data = data.unsqueeze(0)
+    mask = data[:, 0, :, :]
+    ma = torch.max(mask)  # Normalizes prediction
+    mi = torch.min(mask)
+    predict = ((mask - mi) / (ma - mi)).squeeze()
+    predict_np = predict.cpu().data.numpy() * 255
+    mask = Image.fromarray(predict_np).convert("L")
+    mask = mask.resize(original_image.size, resample=3)
+    return mask
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.Tensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : PIL.Image.Image
+
input image
+
+

Returns

+
+
torch.Tensor
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (PIL.Image.Image): input image
+
+    Returns:
+        torch.Tensor: input for neural network
+
+    """
+    resized = data.resize(self.input_image_size)
+    # noinspection PyTypeChecker
+    resized_arr = np.array(resized, dtype=np.float64)
+    temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+    if np.max(resized_arr) != 0:
+        resized_arr /= np.max(resized_arr)
+    temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+    temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+    temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+    temp_image = temp_image.transpose((2, 0, 1))
+    temp_image = np.expand_dims(temp_image, 0)
+    return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/cascadepsp.html b/docs/api/carvekit/ml/wrap/cascadepsp.html new file mode 100644 index 0000000..231db25 --- /dev/null +++ b/docs/api/carvekit/ml/wrap/cascadepsp.html @@ -0,0 +1,907 @@ + + + + + + +carvekit.ml.wrap.cascadepsp API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap.cascadepsp

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+import warnings
+
+import PIL
+import numpy as np
+import torch
+from PIL import Image
+from torchvision import transforms
+from typing import Union, List
+
+from carvekit.ml.arch.cascadepsp.pspnet import RefinementModule
+from carvekit.ml.arch.cascadepsp.utils import (
+    process_im_single_pass,
+    process_high_res_im,
+)
+from carvekit.ml.files.models_loc import cascadepsp_pretrained
+from carvekit.utils.image_utils import convert_image, load_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import batch_generator, thread_pool_processing
+
+__all__ = ["CascadePSP"]
+
+
+class CascadePSP(RefinementModule):
+    """
+    CascadePSP to refine the mask from segmentation network
+    """
+
+    def __init__(
+        self,
+        device="cpu",
+        input_tensor_size: int = 900,
+        batch_size: int = 1,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        mask_binary_threshold=127,
+        global_step_only=False,
+        processing_accelerate_image_size=2048,
+    ):
+        """
+        Initialize the CascadePSP model
+
+        Args:
+            device: processing device
+            input_tensor_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            load_pretrained: loading pretrained model
+            fp16: use half precision
+            global_step_only: if True, only global step will be used for prediction. See paper for details.
+            mask_binary_threshold: threshold for binary mask, default 70, set to 0 for no threshold
+            processing_accelerate_image_size: thumbnail size for image processing acceleration. Set to 0 to disable
+
+        """
+        super().__init__()
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        self.mask_binary_threshold = mask_binary_threshold
+        self.global_step_only = global_step_only
+        self.processing_accelerate_image_size = processing_accelerate_image_size
+        self.input_tensor_size = input_tensor_size
+
+        self.to(device)
+        if batch_size > 1:
+            warnings.warn(
+                "Batch size > 1 is experimental feature for CascadePSP."
+                " Please, don't use it if you have GPU with small memory!"
+            )
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(cascadepsp_pretrained(), map_location=self.device)
+            )
+        self.eval()
+
+        self._image_transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+
+        self._seg_transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(mean=[0.5], std=[0.5]),
+            ]
+        )
+
+    def data_preprocessing(self, data: Union[PIL.Image.Image]) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+        preprocessed_data = data.copy()
+        if self.batch_size == 1 and self.processing_accelerate_image_size > 0:
+            # Okay, we have only one image, so
+            # we can use image processing acceleration for accelerate high resolution image processing
+            preprocessed_data.thumbnail(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+        elif self.batch_size == 1:
+            pass  # No need to do anything
+        elif self.batch_size > 1 and self.global_step_only is True:
+            # If we have more than one image and we use only global step,
+            # there aren't any reason to use image processing acceleration,
+            # because we will use only global step for prediction and anyway it will be resized to input_tensor_size
+            preprocessed_data = preprocessed_data.resize(
+                (self.input_tensor_size, self.input_tensor_size)
+            )
+        elif (
+            self.batch_size > 1
+            and self.global_step_only is False
+            and self.processing_accelerate_image_size > 0
+        ):
+            # If we have more than one image and we use local step,
+            # we can use image processing acceleration for accelerate high resolution image processing
+            # but we need to resize image to processing_accelerate_image_size to stack it with other images
+            preprocessed_data = preprocessed_data.resize(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+        elif (
+            self.batch_size > 1
+            and self.global_step_only is False
+            and not (self.processing_accelerate_image_size > 0)
+        ):
+            raise ValueError(
+                "If you use local step with batch_size > 2, "
+                "you need to set processing_accelerate_image_size > 0,"
+                "since we cannot stack images with different sizes to one batch"
+            )
+        else:  # some extra cases
+            preprocessed_data = preprocessed_data.resize(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+
+        if data.mode == "RGB":
+            preprocessed_data = self._image_transform(
+                np.array(preprocessed_data)
+            ).unsqueeze(0)
+        elif data.mode == "L":
+            preprocessed_data = np.array(preprocessed_data)
+            if 0 < self.mask_binary_threshold <= 255:
+                preprocessed_data = (
+                    preprocessed_data > self.mask_binary_threshold
+                ).astype(np.uint8) * 255
+            elif self.mask_binary_threshold > 255 or self.mask_binary_threshold < 0:
+                warnings.warn(
+                    "mask_binary_threshold should be in range [0, 255], "
+                    "but got {}. Disabling mask_binary_threshold!".format(
+                        self.mask_binary_threshold
+                    )
+                )
+
+            preprocessed_data = self._seg_transform(preprocessed_data).unsqueeze(
+                0
+            )  # [H,W,1]
+
+        return preprocessed_data
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, mask: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+            mask: input mask
+
+        Returns:
+            Segmentation mask as PIL Image instance
+
+        """
+        refined_mask = (data[0, :, :].cpu().numpy() * 255).astype("uint8")
+        return Image.fromarray(refined_mask).convert("L").resize(mask.size)
+
+    def safe_forward(self, im, seg, inter_s8=None, inter_s4=None):
+        """
+        Slightly pads the input image such that its length is a multiple of 8
+        """
+        b, _, ph, pw = seg.shape
+        if (ph % 8 != 0) or (pw % 8 != 0):
+            newH = (ph // 8 + 1) * 8
+            newW = (pw // 8 + 1) * 8
+            p_im = torch.zeros(b, 3, newH, newW, device=im.device)
+            p_seg = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+
+            p_im[:, :, 0:ph, 0:pw] = im
+            p_seg[:, :, 0:ph, 0:pw] = seg
+            im = p_im
+            seg = p_seg
+
+            if inter_s8 is not None:
+                p_inter_s8 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+                p_inter_s8[:, :, 0:ph, 0:pw] = inter_s8
+                inter_s8 = p_inter_s8
+            if inter_s4 is not None:
+                p_inter_s4 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+                p_inter_s4[:, :, 0:ph, 0:pw] = inter_s4
+                inter_s4 = p_inter_s4
+
+        images = super().__call__(im, seg, inter_s8, inter_s4)
+        return_im = {}
+
+        for key in ["pred_224", "pred_28_3", "pred_56_2"]:
+            return_im[key] = images[key][:, :, 0:ph, 0:pw]
+        del images
+
+        return return_im
+
+    def __call__(
+        self,
+        images: List[Union[str, pathlib.Path, PIL.Image.Image]],
+        masks: List[Union[str, pathlib.Path, PIL.Image.Image]],
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images: input images
+            masks: Segmentation masks to refine
+
+        Returns:
+            segmentation masks as for input images, as PIL.Image.Image instances
+
+        """
+
+        if len(images) != len(masks):
+            raise ValueError(
+                "Len of specified arrays of images and trimaps should be equal!"
+            )
+
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for idx_batch in batch_generator(range(len(images)), self.batch_size):
+                inpt_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(images[x])), idx_batch
+                )
+
+                inpt_masks = thread_pool_processing(
+                    lambda x: convert_image(load_image(masks[x]), mode="L"), idx_batch
+                )
+
+                inpt_img_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_images
+                )
+                inpt_masks_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_masks
+                )
+                if self.batch_size > 1:  # We need to stack images, if batch_size > 1
+                    inpt_img_batches = torch.vstack(inpt_img_batches)
+                    inpt_masks_batches = torch.vstack(inpt_masks_batches)
+                else:
+                    inpt_img_batches = inpt_img_batches[
+                        0
+                    ]  # Get only one image from list
+                    inpt_masks_batches = inpt_masks_batches[0]
+
+                with torch.no_grad():
+                    inpt_img_batches = inpt_img_batches.to(self.device)
+                    inpt_masks_batches = inpt_masks_batches.to(self.device)
+                    if self.global_step_only:
+                        refined_batches = process_im_single_pass(
+                            self,
+                            inpt_img_batches,
+                            inpt_masks_batches,
+                            self.input_tensor_size,
+                        )
+
+                    else:
+                        refined_batches = process_high_res_im(
+                            self,
+                            inpt_img_batches,
+                            inpt_masks_batches,
+                            self.input_tensor_size,
+                        )
+
+                    refined_masks = refined_batches.cpu()
+                    del (inpt_img_batches, inpt_masks_batches, refined_batches)
+                collect_masks += thread_pool_processing(
+                    lambda x: self.data_postprocessing(refined_masks[x], inpt_masks[x]),
+                    range(len(inpt_masks)),
+                )
+            return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class CascadePSP +(device='cpu', input_tensor_size:Β intΒ =Β 900, batch_size:Β intΒ =Β 1, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False, mask_binary_threshold=127, global_step_only=False, processing_accelerate_image_size=2048) +
+
+

CascadePSP to refine the mask from segmentation network

+

Initialize the CascadePSP model

+

Args

+
+
device
+
processing device
+
input_tensor_size
+
input image size
+
batch_size
+
the number of images that the neural network processes in one run
+
load_pretrained
+
loading pretrained model
+
fp16
+
use half precision
+
global_step_only
+
if True, only global step will be used for prediction. See paper for details.
+
mask_binary_threshold
+
threshold for binary mask, default 70, set to 0 for no threshold
+
processing_accelerate_image_size
+
thumbnail size for image processing acceleration. Set to 0 to disable
+
+
+ +Expand source code + +
class CascadePSP(RefinementModule):
+    """
+    CascadePSP to refine the mask from segmentation network
+    """
+
+    def __init__(
+        self,
+        device="cpu",
+        input_tensor_size: int = 900,
+        batch_size: int = 1,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        mask_binary_threshold=127,
+        global_step_only=False,
+        processing_accelerate_image_size=2048,
+    ):
+        """
+        Initialize the CascadePSP model
+
+        Args:
+            device: processing device
+            input_tensor_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            load_pretrained: loading pretrained model
+            fp16: use half precision
+            global_step_only: if True, only global step will be used for prediction. See paper for details.
+            mask_binary_threshold: threshold for binary mask, default 70, set to 0 for no threshold
+            processing_accelerate_image_size: thumbnail size for image processing acceleration. Set to 0 to disable
+
+        """
+        super().__init__()
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        self.mask_binary_threshold = mask_binary_threshold
+        self.global_step_only = global_step_only
+        self.processing_accelerate_image_size = processing_accelerate_image_size
+        self.input_tensor_size = input_tensor_size
+
+        self.to(device)
+        if batch_size > 1:
+            warnings.warn(
+                "Batch size > 1 is experimental feature for CascadePSP."
+                " Please, don't use it if you have GPU with small memory!"
+            )
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(cascadepsp_pretrained(), map_location=self.device)
+            )
+        self.eval()
+
+        self._image_transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+
+        self._seg_transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(mean=[0.5], std=[0.5]),
+            ]
+        )
+
+    def data_preprocessing(self, data: Union[PIL.Image.Image]) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+        preprocessed_data = data.copy()
+        if self.batch_size == 1 and self.processing_accelerate_image_size > 0:
+            # Okay, we have only one image, so
+            # we can use image processing acceleration for accelerate high resolution image processing
+            preprocessed_data.thumbnail(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+        elif self.batch_size == 1:
+            pass  # No need to do anything
+        elif self.batch_size > 1 and self.global_step_only is True:
+            # If we have more than one image and we use only global step,
+            # there aren't any reason to use image processing acceleration,
+            # because we will use only global step for prediction and anyway it will be resized to input_tensor_size
+            preprocessed_data = preprocessed_data.resize(
+                (self.input_tensor_size, self.input_tensor_size)
+            )
+        elif (
+            self.batch_size > 1
+            and self.global_step_only is False
+            and self.processing_accelerate_image_size > 0
+        ):
+            # If we have more than one image and we use local step,
+            # we can use image processing acceleration for accelerate high resolution image processing
+            # but we need to resize image to processing_accelerate_image_size to stack it with other images
+            preprocessed_data = preprocessed_data.resize(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+        elif (
+            self.batch_size > 1
+            and self.global_step_only is False
+            and not (self.processing_accelerate_image_size > 0)
+        ):
+            raise ValueError(
+                "If you use local step with batch_size > 2, "
+                "you need to set processing_accelerate_image_size > 0,"
+                "since we cannot stack images with different sizes to one batch"
+            )
+        else:  # some extra cases
+            preprocessed_data = preprocessed_data.resize(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+
+        if data.mode == "RGB":
+            preprocessed_data = self._image_transform(
+                np.array(preprocessed_data)
+            ).unsqueeze(0)
+        elif data.mode == "L":
+            preprocessed_data = np.array(preprocessed_data)
+            if 0 < self.mask_binary_threshold <= 255:
+                preprocessed_data = (
+                    preprocessed_data > self.mask_binary_threshold
+                ).astype(np.uint8) * 255
+            elif self.mask_binary_threshold > 255 or self.mask_binary_threshold < 0:
+                warnings.warn(
+                    "mask_binary_threshold should be in range [0, 255], "
+                    "but got {}. Disabling mask_binary_threshold!".format(
+                        self.mask_binary_threshold
+                    )
+                )
+
+            preprocessed_data = self._seg_transform(preprocessed_data).unsqueeze(
+                0
+            )  # [H,W,1]
+
+        return preprocessed_data
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, mask: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+            mask: input mask
+
+        Returns:
+            Segmentation mask as PIL Image instance
+
+        """
+        refined_mask = (data[0, :, :].cpu().numpy() * 255).astype("uint8")
+        return Image.fromarray(refined_mask).convert("L").resize(mask.size)
+
+    def safe_forward(self, im, seg, inter_s8=None, inter_s4=None):
+        """
+        Slightly pads the input image such that its length is a multiple of 8
+        """
+        b, _, ph, pw = seg.shape
+        if (ph % 8 != 0) or (pw % 8 != 0):
+            newH = (ph // 8 + 1) * 8
+            newW = (pw // 8 + 1) * 8
+            p_im = torch.zeros(b, 3, newH, newW, device=im.device)
+            p_seg = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+
+            p_im[:, :, 0:ph, 0:pw] = im
+            p_seg[:, :, 0:ph, 0:pw] = seg
+            im = p_im
+            seg = p_seg
+
+            if inter_s8 is not None:
+                p_inter_s8 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+                p_inter_s8[:, :, 0:ph, 0:pw] = inter_s8
+                inter_s8 = p_inter_s8
+            if inter_s4 is not None:
+                p_inter_s4 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+                p_inter_s4[:, :, 0:ph, 0:pw] = inter_s4
+                inter_s4 = p_inter_s4
+
+        images = super().__call__(im, seg, inter_s8, inter_s4)
+        return_im = {}
+
+        for key in ["pred_224", "pred_28_3", "pred_56_2"]:
+            return_im[key] = images[key][:, :, 0:ph, 0:pw]
+        del images
+
+        return return_im
+
+    def __call__(
+        self,
+        images: List[Union[str, pathlib.Path, PIL.Image.Image]],
+        masks: List[Union[str, pathlib.Path, PIL.Image.Image]],
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images: input images
+            masks: Segmentation masks to refine
+
+        Returns:
+            segmentation masks as for input images, as PIL.Image.Image instances
+
+        """
+
+        if len(images) != len(masks):
+            raise ValueError(
+                "Len of specified arrays of images and trimaps should be equal!"
+            )
+
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for idx_batch in batch_generator(range(len(images)), self.batch_size):
+                inpt_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(images[x])), idx_batch
+                )
+
+                inpt_masks = thread_pool_processing(
+                    lambda x: convert_image(load_image(masks[x]), mode="L"), idx_batch
+                )
+
+                inpt_img_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_images
+                )
+                inpt_masks_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_masks
+                )
+                if self.batch_size > 1:  # We need to stack images, if batch_size > 1
+                    inpt_img_batches = torch.vstack(inpt_img_batches)
+                    inpt_masks_batches = torch.vstack(inpt_masks_batches)
+                else:
+                    inpt_img_batches = inpt_img_batches[
+                        0
+                    ]  # Get only one image from list
+                    inpt_masks_batches = inpt_masks_batches[0]
+
+                with torch.no_grad():
+                    inpt_img_batches = inpt_img_batches.to(self.device)
+                    inpt_masks_batches = inpt_masks_batches.to(self.device)
+                    if self.global_step_only:
+                        refined_batches = process_im_single_pass(
+                            self,
+                            inpt_img_batches,
+                            inpt_masks_batches,
+                            self.input_tensor_size,
+                        )
+
+                    else:
+                        refined_batches = process_high_res_im(
+                            self,
+                            inpt_img_batches,
+                            inpt_masks_batches,
+                            self.input_tensor_size,
+                        )
+
+                    refined_masks = refined_batches.cpu()
+                    del (inpt_img_batches, inpt_masks_batches, refined_batches)
+                collect_masks += thread_pool_processing(
+                    lambda x: self.data_postprocessing(refined_masks[x], inpt_masks[x]),
+                    range(len(inpt_masks)),
+                )
+            return collect_masks
+
+

Ancestors

+ +

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, mask:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data
+
output data from neural network
+
mask
+
input mask
+
+

Returns

+

Segmentation mask as PIL Image instance

+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, mask: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data: output data from neural network
+        mask: input mask
+
+    Returns:
+        Segmentation mask as PIL Image instance
+
+    """
+    refined_mask = (data[0, :, :].cpu().numpy() * 255).astype("uint8")
+    return Image.fromarray(refined_mask).convert("L").resize(mask.size)
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data
+
input image
+
+

Returns

+

input for neural network

+
+ +Expand source code + +
def data_preprocessing(self, data: Union[PIL.Image.Image]) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data: input image
+
+    Returns:
+        input for neural network
+
+    """
+    preprocessed_data = data.copy()
+    if self.batch_size == 1 and self.processing_accelerate_image_size > 0:
+        # Okay, we have only one image, so
+        # we can use image processing acceleration for accelerate high resolution image processing
+        preprocessed_data.thumbnail(
+            (
+                self.processing_accelerate_image_size,
+                self.processing_accelerate_image_size,
+            )
+        )
+    elif self.batch_size == 1:
+        pass  # No need to do anything
+    elif self.batch_size > 1 and self.global_step_only is True:
+        # If we have more than one image and we use only global step,
+        # there aren't any reason to use image processing acceleration,
+        # because we will use only global step for prediction and anyway it will be resized to input_tensor_size
+        preprocessed_data = preprocessed_data.resize(
+            (self.input_tensor_size, self.input_tensor_size)
+        )
+    elif (
+        self.batch_size > 1
+        and self.global_step_only is False
+        and self.processing_accelerate_image_size > 0
+    ):
+        # If we have more than one image and we use local step,
+        # we can use image processing acceleration for accelerate high resolution image processing
+        # but we need to resize image to processing_accelerate_image_size to stack it with other images
+        preprocessed_data = preprocessed_data.resize(
+            (
+                self.processing_accelerate_image_size,
+                self.processing_accelerate_image_size,
+            )
+        )
+    elif (
+        self.batch_size > 1
+        and self.global_step_only is False
+        and not (self.processing_accelerate_image_size > 0)
+    ):
+        raise ValueError(
+            "If you use local step with batch_size > 2, "
+            "you need to set processing_accelerate_image_size > 0,"
+            "since we cannot stack images with different sizes to one batch"
+        )
+    else:  # some extra cases
+        preprocessed_data = preprocessed_data.resize(
+            (
+                self.processing_accelerate_image_size,
+                self.processing_accelerate_image_size,
+            )
+        )
+
+    if data.mode == "RGB":
+        preprocessed_data = self._image_transform(
+            np.array(preprocessed_data)
+        ).unsqueeze(0)
+    elif data.mode == "L":
+        preprocessed_data = np.array(preprocessed_data)
+        if 0 < self.mask_binary_threshold <= 255:
+            preprocessed_data = (
+                preprocessed_data > self.mask_binary_threshold
+            ).astype(np.uint8) * 255
+        elif self.mask_binary_threshold > 255 or self.mask_binary_threshold < 0:
+            warnings.warn(
+                "mask_binary_threshold should be in range [0, 255], "
+                "but got {}. Disabling mask_binary_threshold!".format(
+                    self.mask_binary_threshold
+                )
+            )
+
+        preprocessed_data = self._seg_transform(preprocessed_data).unsqueeze(
+            0
+        )  # [H,W,1]
+
+    return preprocessed_data
+
+
+
+def safe_forward(self, im, seg, inter_s8=None, inter_s4=None) +
+
+

Slightly pads the input image such that its length is a multiple of 8

+
+ +Expand source code + +
def safe_forward(self, im, seg, inter_s8=None, inter_s4=None):
+    """
+    Slightly pads the input image such that its length is a multiple of 8
+    """
+    b, _, ph, pw = seg.shape
+    if (ph % 8 != 0) or (pw % 8 != 0):
+        newH = (ph // 8 + 1) * 8
+        newW = (pw // 8 + 1) * 8
+        p_im = torch.zeros(b, 3, newH, newW, device=im.device)
+        p_seg = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+
+        p_im[:, :, 0:ph, 0:pw] = im
+        p_seg[:, :, 0:ph, 0:pw] = seg
+        im = p_im
+        seg = p_seg
+
+        if inter_s8 is not None:
+            p_inter_s8 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+            p_inter_s8[:, :, 0:ph, 0:pw] = inter_s8
+            inter_s8 = p_inter_s8
+        if inter_s4 is not None:
+            p_inter_s4 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+            p_inter_s4[:, :, 0:ph, 0:pw] = inter_s4
+            inter_s4 = p_inter_s4
+
+    images = super().__call__(im, seg, inter_s8, inter_s4)
+    return_im = {}
+
+    for key in ["pred_224", "pred_28_3", "pred_56_2"]:
+        return_im[key] = images[key][:, :, 0:ph, 0:pw]
+    del images
+
+    return return_im
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/deeplab_v3.html b/docs/api/carvekit/ml/wrap/deeplab_v3.html new file mode 100644 index 0000000..57e1eb1 --- /dev/null +++ b/docs/api/carvekit/ml/wrap/deeplab_v3.html @@ -0,0 +1,490 @@ + + + + + + +carvekit.ml.wrap.deeplab_v3 API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap.deeplab_v3

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import pathlib
+from typing import List, Union
+
+import PIL.Image
+import torch
+from PIL import Image
+from torchvision import transforms
+from torchvision.models.segmentation import deeplabv3_resnet101
+from carvekit.ml.files.models_loc import deeplab_pretrained
+from carvekit.utils.image_utils import convert_image, load_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import batch_generator, thread_pool_processing
+
+__all__ = ["DeepLabV3"]
+
+
+class DeepLabV3:
+    def __init__(
+        self,
+        device="cpu",
+        batch_size: int = 10,
+        input_image_size: Union[List[int], int] = 1024,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the `DeepLabV3` model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (): input image size
+            batch_size (int, default=10): the number of images that the neural network processes in one run
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use half precision
+
+        """
+        self.device = device
+        self.batch_size = batch_size
+        self.network = deeplabv3_resnet101(
+            pretrained=False, pretrained_backbone=False, aux_loss=True
+        )
+        self.network.to(self.device)
+        if load_pretrained:
+            self.network.load_state_dict(
+                torch.load(deeplab_pretrained(), map_location=self.device)
+            )
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.network.eval()
+        self.fp16 = fp16
+        self.transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+
+    def to(self, device: str):
+        """
+        Moves neural network to specified processing device
+
+        Args:
+            device (Literal[cpu, cuda]): the desired device.
+
+        """
+        self.network.to(device)
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.Tensor: input for neural network
+
+        """
+        copy = data.copy()
+        copy.thumbnail(self.input_image_size, resample=3)
+        return self.transform(copy)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        return (
+            Image.fromarray(data.numpy() * 255).convert("L").resize(original_image.size)
+        )
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as `PIL.Image.Image` instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self.network, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = thread_pool_processing(
+                    self.data_preprocessing, converted_images
+                )
+                with torch.no_grad():
+                    masks = [
+                        self.network(i.to(self.device).unsqueeze(0))["out"][0]
+                        .argmax(0)
+                        .byte()
+                        .cpu()
+                        for i in batches
+                    ]
+                    del batches
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(masks[x], converted_images[x]),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class DeepLabV3 +(device='cpu', batch_size:Β intΒ =Β 10, input_image_size:Β Union[List[int],Β int]Β =Β 1024, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False) +
+
+

Initialize the DeepLabV3 model

+

Args

+
+
device : Literal[cpu, cuda], default=cpu
+
processing device
+
input_image_size (): input image size
+
batch_size : int, default=10
+
the number of images that the neural network processes in one run
+
load_pretrained : bool, default=True
+
loading pretrained model
+
fp16 : bool, default=False
+
use half precision
+
+
+ +Expand source code + +
class DeepLabV3:
+    def __init__(
+        self,
+        device="cpu",
+        batch_size: int = 10,
+        input_image_size: Union[List[int], int] = 1024,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the `DeepLabV3` model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (): input image size
+            batch_size (int, default=10): the number of images that the neural network processes in one run
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use half precision
+
+        """
+        self.device = device
+        self.batch_size = batch_size
+        self.network = deeplabv3_resnet101(
+            pretrained=False, pretrained_backbone=False, aux_loss=True
+        )
+        self.network.to(self.device)
+        if load_pretrained:
+            self.network.load_state_dict(
+                torch.load(deeplab_pretrained(), map_location=self.device)
+            )
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.network.eval()
+        self.fp16 = fp16
+        self.transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+
+    def to(self, device: str):
+        """
+        Moves neural network to specified processing device
+
+        Args:
+            device (Literal[cpu, cuda]): the desired device.
+
+        """
+        self.network.to(device)
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.Tensor: input for neural network
+
+        """
+        copy = data.copy()
+        copy.thumbnail(self.input_image_size, resample=3)
+        return self.transform(copy)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        return (
+            Image.fromarray(data.numpy() * 255).convert("L").resize(original_image.size)
+        )
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as `PIL.Image.Image` instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self.network, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = thread_pool_processing(
+                    self.data_preprocessing, converted_images
+                )
+                with torch.no_grad():
+                    masks = [
+                        self.network(i.to(self.device).unsqueeze(0))["out"][0]
+                        .argmax(0)
+                        .byte()
+                        .cpu()
+                        for i in batches
+                    ]
+                    del batches
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(masks[x], converted_images[x]),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+        return collect_masks
+
+

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, original_image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
original_image : PIL.Image.Image
+
input image which was used for predicted data
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask as PIL Image instance
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, original_image: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        original_image (PIL.Image.Image): input image which was used for predicted data
+
+    Returns:
+        PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+    """
+    return (
+        Image.fromarray(data.numpy() * 255).convert("L").resize(original_image.size)
+    )
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.Tensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : PIL.Image.Image
+
input image
+
+

Returns

+
+
torch.Tensor
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (PIL.Image.Image): input image
+
+    Returns:
+        torch.Tensor: input for neural network
+
+    """
+    copy = data.copy()
+    copy.thumbnail(self.input_image_size, resample=3)
+    return self.transform(copy)
+
+
+
+def to(self, device:Β str) +
+
+

Moves neural network to specified processing device

+

Args

+
+
device : Literal[cpu, cuda]
+
the desired device.
+
+
+ +Expand source code + +
def to(self, device: str):
+    """
+    Moves neural network to specified processing device
+
+    Args:
+        device (Literal[cpu, cuda]): the desired device.
+
+    """
+    self.network.to(device)
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/fba_matting.html b/docs/api/carvekit/ml/wrap/fba_matting.html new file mode 100644 index 0000000..9aee452 --- /dev/null +++ b/docs/api/carvekit/ml/wrap/fba_matting.html @@ -0,0 +1,674 @@ + + + + + + +carvekit.ml.wrap.fba_matting API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap.fba_matting

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+from typing import Union, List, Tuple
+
+import PIL
+import cv2
+import numpy as np
+import torch
+from PIL import Image
+
+from carvekit.ml.arch.fba_matting.models import FBA
+from carvekit.ml.arch.fba_matting.transforms import (
+    trimap_transform,
+    groupnorm_normalise_image,
+)
+from carvekit.ml.files.models_loc import fba_pretrained
+from carvekit.utils.image_utils import convert_image, load_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import batch_generator, thread_pool_processing
+
+__all__ = ["FBAMatting"]
+
+
+class FBAMatting(FBA):
+    """
+    FBA Matting Neural Network to improve edges on image.
+    """
+
+    def __init__(
+        self,
+        device="cpu",
+        input_tensor_size: Union[List[int], int] = 2048,
+        batch_size: int = 2,
+        encoder="resnet50_GN_WS",
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the FBAMatting model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_tensor_size (Union[List[int], int], default=2048): input image size
+            batch_size (int, default=2): the number of images that the neural network processes in one run
+            encoder (str, default=resnet50_GN_WS): neural network encoder head
+            .. TODO::
+                Add more encoders to documentation as Literal typehint.
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use half precision
+
+        """
+        super(FBAMatting, self).__init__(encoder=encoder)
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_tensor_size, list):
+            self.input_image_size = input_tensor_size[:2]
+        else:
+            self.input_image_size = (input_tensor_size, input_tensor_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(torch.load(fba_pretrained(), map_location=self.device))
+        self.eval()
+
+    def data_preprocessing(
+        self, data: Union[PIL.Image.Image, np.ndarray]
+    ) -> Tuple[torch.FloatTensor, torch.FloatTensor]:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (Union[PIL.Image.Image, np.ndarray]): input image
+
+        Returns:
+            Tuple[torch.FloatTensor, torch.FloatTensor]: input for neural network
+
+        """
+        resized = data.copy()
+        if self.batch_size == 1:
+            resized.thumbnail(self.input_image_size, resample=3)
+        else:
+            resized = resized.resize(self.input_image_size, resample=3)
+        # noinspection PyTypeChecker
+        image = np.array(resized, dtype=np.float64)
+        image = image / 255.0  # Normalize image to [0, 1] values range
+        if resized.mode == "RGB":
+            image = image[:, :, ::-1]
+        elif resized.mode == "L":
+            image2 = np.copy(image)
+            h, w = image2.shape
+            image = np.zeros((h, w, 2))  # Transform trimap to binary data format
+            image[image2 == 1, 1] = 1
+            image[image2 == 0, 0] = 1
+        else:
+            raise ValueError("Incorrect color mode for image")
+        h, w = image.shape[:2]  # Scale input mlt to 8
+        h1 = int(np.ceil(1.0 * h / 8) * 8)
+        w1 = int(np.ceil(1.0 * w / 8) * 8)
+        x_scale = cv2.resize(image, (w1, h1), interpolation=cv2.INTER_LANCZOS4)
+        image_tensor = torch.from_numpy(x_scale).permute(2, 0, 1)[None, :, :, :].float()
+        if resized.mode == "RGB":
+            return image_tensor, groupnorm_normalise_image(
+                image_tensor.clone(), format="nchw"
+            )
+        else:
+            return (
+                image_tensor,
+                torch.from_numpy(trimap_transform(x_scale))
+                .permute(2, 0, 1)[None, :, :, :]
+                .float(),
+            )
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, trimap: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            trimap (PIL.Image.Image): Map with the area we need to refine
+
+        Returns:
+            PIL.Image.Image: Segmentation mask
+
+        """
+        if trimap.mode != "L":
+            raise ValueError("Incorrect color mode for trimap")
+        pred = data.numpy().transpose((1, 2, 0))
+        pred = cv2.resize(pred, trimap.size, cv2.INTER_LANCZOS4)[:, :, 0]
+        # noinspection PyTypeChecker
+        # Clean mask by removing all false predictions outside trimap and already known area
+        trimap_arr = np.array(trimap.copy())
+        pred[trimap_arr[:, :] == 0] = 0
+        # pred[trimap_arr[:, :] == 255] = 1
+        pred[pred < 0.3] = 0
+        return Image.fromarray(pred * 255).convert("L")
+
+    def __call__(
+        self,
+        images: List[Union[str, pathlib.Path, PIL.Image.Image]],
+        trimaps: List[Union[str, pathlib.Path, PIL.Image.Image]],
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+            trimaps (List[Union[str, pathlib.Path, PIL.Image.Image]]): Maps with the areas we need to refine
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images
+
+        """
+
+        if len(images) != len(trimaps):
+            raise ValueError(
+                "Len of specified arrays of images and trimaps should be equal!"
+            )
+
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for idx_batch in batch_generator(range(len(images)), self.batch_size):
+                inpt_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(images[x])), idx_batch
+                )
+
+                inpt_trimaps = thread_pool_processing(
+                    lambda x: convert_image(load_image(trimaps[x]), mode="L"), idx_batch
+                )
+
+                inpt_img_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_images
+                )
+                inpt_trimaps_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_trimaps
+                )
+
+                inpt_img_batches_transformed = torch.vstack(
+                    [i[1] for i in inpt_img_batches]
+                )
+                inpt_img_batches = torch.vstack([i[0] for i in inpt_img_batches])
+
+                inpt_trimaps_transformed = torch.vstack(
+                    [i[1] for i in inpt_trimaps_batches]
+                )
+                inpt_trimaps_batches = torch.vstack(
+                    [i[0] for i in inpt_trimaps_batches]
+                )
+
+                with torch.no_grad():
+                    inpt_img_batches = inpt_img_batches.to(self.device)
+                    inpt_trimaps_batches = inpt_trimaps_batches.to(self.device)
+                    inpt_img_batches_transformed = inpt_img_batches_transformed.to(
+                        self.device
+                    )
+                    inpt_trimaps_transformed = inpt_trimaps_transformed.to(self.device)
+
+                    output = super(FBAMatting, self).__call__(
+                        inpt_img_batches,
+                        inpt_trimaps_batches,
+                        inpt_img_batches_transformed,
+                        inpt_trimaps_transformed,
+                    )
+                    output_cpu = output.cpu()
+                    del (
+                        inpt_img_batches,
+                        inpt_trimaps_batches,
+                        inpt_img_batches_transformed,
+                        inpt_trimaps_transformed,
+                        output,
+                    )
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(output_cpu[x], inpt_trimaps[x]),
+                    range(len(inpt_images)),
+                )
+                collect_masks += masks
+            return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class FBAMatting +(device='cpu', input_tensor_size:Β Union[List[int],Β int]Β =Β 2048, batch_size:Β intΒ =Β 2, encoder='resnet50_GN_WS', load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False) +
+
+

FBA Matting Neural Network to improve edges on image.

+

Initialize the FBAMatting model

+

Args

+
+
device : Literal[cpu, cuda], default=cpu
+
processing device
+
input_tensor_size : Union[List[int], int], default=2048
+
input image size
+
batch_size : int, default=2
+
the number of images that the neural network processes in one run
+
encoder : str, default=resnet50_GN_WS
+
neural network encoder head
+
+
+

TODO

+

Add more encoders to documentation as Literal typehint.

+
+
+
load_pretrained : bool, default=True
+
loading pretrained model
+
fp16 : bool, default=False
+
use half precision
+
+
+ +Expand source code + +
class FBAMatting(FBA):
+    """
+    FBA Matting Neural Network to improve edges on image.
+    """
+
+    def __init__(
+        self,
+        device="cpu",
+        input_tensor_size: Union[List[int], int] = 2048,
+        batch_size: int = 2,
+        encoder="resnet50_GN_WS",
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the FBAMatting model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_tensor_size (Union[List[int], int], default=2048): input image size
+            batch_size (int, default=2): the number of images that the neural network processes in one run
+            encoder (str, default=resnet50_GN_WS): neural network encoder head
+            .. TODO::
+                Add more encoders to documentation as Literal typehint.
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use half precision
+
+        """
+        super(FBAMatting, self).__init__(encoder=encoder)
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_tensor_size, list):
+            self.input_image_size = input_tensor_size[:2]
+        else:
+            self.input_image_size = (input_tensor_size, input_tensor_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(torch.load(fba_pretrained(), map_location=self.device))
+        self.eval()
+
+    def data_preprocessing(
+        self, data: Union[PIL.Image.Image, np.ndarray]
+    ) -> Tuple[torch.FloatTensor, torch.FloatTensor]:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (Union[PIL.Image.Image, np.ndarray]): input image
+
+        Returns:
+            Tuple[torch.FloatTensor, torch.FloatTensor]: input for neural network
+
+        """
+        resized = data.copy()
+        if self.batch_size == 1:
+            resized.thumbnail(self.input_image_size, resample=3)
+        else:
+            resized = resized.resize(self.input_image_size, resample=3)
+        # noinspection PyTypeChecker
+        image = np.array(resized, dtype=np.float64)
+        image = image / 255.0  # Normalize image to [0, 1] values range
+        if resized.mode == "RGB":
+            image = image[:, :, ::-1]
+        elif resized.mode == "L":
+            image2 = np.copy(image)
+            h, w = image2.shape
+            image = np.zeros((h, w, 2))  # Transform trimap to binary data format
+            image[image2 == 1, 1] = 1
+            image[image2 == 0, 0] = 1
+        else:
+            raise ValueError("Incorrect color mode for image")
+        h, w = image.shape[:2]  # Scale input mlt to 8
+        h1 = int(np.ceil(1.0 * h / 8) * 8)
+        w1 = int(np.ceil(1.0 * w / 8) * 8)
+        x_scale = cv2.resize(image, (w1, h1), interpolation=cv2.INTER_LANCZOS4)
+        image_tensor = torch.from_numpy(x_scale).permute(2, 0, 1)[None, :, :, :].float()
+        if resized.mode == "RGB":
+            return image_tensor, groupnorm_normalise_image(
+                image_tensor.clone(), format="nchw"
+            )
+        else:
+            return (
+                image_tensor,
+                torch.from_numpy(trimap_transform(x_scale))
+                .permute(2, 0, 1)[None, :, :, :]
+                .float(),
+            )
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, trimap: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            trimap (PIL.Image.Image): Map with the area we need to refine
+
+        Returns:
+            PIL.Image.Image: Segmentation mask
+
+        """
+        if trimap.mode != "L":
+            raise ValueError("Incorrect color mode for trimap")
+        pred = data.numpy().transpose((1, 2, 0))
+        pred = cv2.resize(pred, trimap.size, cv2.INTER_LANCZOS4)[:, :, 0]
+        # noinspection PyTypeChecker
+        # Clean mask by removing all false predictions outside trimap and already known area
+        trimap_arr = np.array(trimap.copy())
+        pred[trimap_arr[:, :] == 0] = 0
+        # pred[trimap_arr[:, :] == 255] = 1
+        pred[pred < 0.3] = 0
+        return Image.fromarray(pred * 255).convert("L")
+
+    def __call__(
+        self,
+        images: List[Union[str, pathlib.Path, PIL.Image.Image]],
+        trimaps: List[Union[str, pathlib.Path, PIL.Image.Image]],
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+            trimaps (List[Union[str, pathlib.Path, PIL.Image.Image]]): Maps with the areas we need to refine
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images
+
+        """
+
+        if len(images) != len(trimaps):
+            raise ValueError(
+                "Len of specified arrays of images and trimaps should be equal!"
+            )
+
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for idx_batch in batch_generator(range(len(images)), self.batch_size):
+                inpt_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(images[x])), idx_batch
+                )
+
+                inpt_trimaps = thread_pool_processing(
+                    lambda x: convert_image(load_image(trimaps[x]), mode="L"), idx_batch
+                )
+
+                inpt_img_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_images
+                )
+                inpt_trimaps_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_trimaps
+                )
+
+                inpt_img_batches_transformed = torch.vstack(
+                    [i[1] for i in inpt_img_batches]
+                )
+                inpt_img_batches = torch.vstack([i[0] for i in inpt_img_batches])
+
+                inpt_trimaps_transformed = torch.vstack(
+                    [i[1] for i in inpt_trimaps_batches]
+                )
+                inpt_trimaps_batches = torch.vstack(
+                    [i[0] for i in inpt_trimaps_batches]
+                )
+
+                with torch.no_grad():
+                    inpt_img_batches = inpt_img_batches.to(self.device)
+                    inpt_trimaps_batches = inpt_trimaps_batches.to(self.device)
+                    inpt_img_batches_transformed = inpt_img_batches_transformed.to(
+                        self.device
+                    )
+                    inpt_trimaps_transformed = inpt_trimaps_transformed.to(self.device)
+
+                    output = super(FBAMatting, self).__call__(
+                        inpt_img_batches,
+                        inpt_trimaps_batches,
+                        inpt_img_batches_transformed,
+                        inpt_trimaps_transformed,
+                    )
+                    output_cpu = output.cpu()
+                    del (
+                        inpt_img_batches,
+                        inpt_trimaps_batches,
+                        inpt_img_batches_transformed,
+                        inpt_trimaps_transformed,
+                        output,
+                    )
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(output_cpu[x], inpt_trimaps[x]),
+                    range(len(inpt_images)),
+                )
+                collect_masks += masks
+            return collect_masks
+
+

Ancestors

+
    +
  • FBA
  • +
  • torch.nn.modules.module.Module
  • +
+

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, trimap:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
trimap : PIL.Image.Image
+
Map with the area we need to refine
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, trimap: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        trimap (PIL.Image.Image): Map with the area we need to refine
+
+    Returns:
+        PIL.Image.Image: Segmentation mask
+
+    """
+    if trimap.mode != "L":
+        raise ValueError("Incorrect color mode for trimap")
+    pred = data.numpy().transpose((1, 2, 0))
+    pred = cv2.resize(pred, trimap.size, cv2.INTER_LANCZOS4)[:, :, 0]
+    # noinspection PyTypeChecker
+    # Clean mask by removing all false predictions outside trimap and already known area
+    trimap_arr = np.array(trimap.copy())
+    pred[trimap_arr[:, :] == 0] = 0
+    # pred[trimap_arr[:, :] == 255] = 1
+    pred[pred < 0.3] = 0
+    return Image.fromarray(pred * 255).convert("L")
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β Union[PIL.Image.Image,Β numpy.ndarray]) ‑>Β Tuple[torch.FloatTensor,Β torch.FloatTensor] +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : Union[PIL.Image.Image, np.ndarray]
+
input image
+
+

Returns

+
+
Tuple[torch.FloatTensor, torch.FloatTensor]
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(
+    self, data: Union[PIL.Image.Image, np.ndarray]
+) -> Tuple[torch.FloatTensor, torch.FloatTensor]:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (Union[PIL.Image.Image, np.ndarray]): input image
+
+    Returns:
+        Tuple[torch.FloatTensor, torch.FloatTensor]: input for neural network
+
+    """
+    resized = data.copy()
+    if self.batch_size == 1:
+        resized.thumbnail(self.input_image_size, resample=3)
+    else:
+        resized = resized.resize(self.input_image_size, resample=3)
+    # noinspection PyTypeChecker
+    image = np.array(resized, dtype=np.float64)
+    image = image / 255.0  # Normalize image to [0, 1] values range
+    if resized.mode == "RGB":
+        image = image[:, :, ::-1]
+    elif resized.mode == "L":
+        image2 = np.copy(image)
+        h, w = image2.shape
+        image = np.zeros((h, w, 2))  # Transform trimap to binary data format
+        image[image2 == 1, 1] = 1
+        image[image2 == 0, 0] = 1
+    else:
+        raise ValueError("Incorrect color mode for image")
+    h, w = image.shape[:2]  # Scale input mlt to 8
+    h1 = int(np.ceil(1.0 * h / 8) * 8)
+    w1 = int(np.ceil(1.0 * w / 8) * 8)
+    x_scale = cv2.resize(image, (w1, h1), interpolation=cv2.INTER_LANCZOS4)
+    image_tensor = torch.from_numpy(x_scale).permute(2, 0, 1)[None, :, :, :].float()
+    if resized.mode == "RGB":
+        return image_tensor, groupnorm_normalise_image(
+            image_tensor.clone(), format="nchw"
+        )
+    else:
+        return (
+            image_tensor,
+            torch.from_numpy(trimap_transform(x_scale))
+            .permute(2, 0, 1)[None, :, :, :]
+            .float(),
+        )
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/index.html b/docs/api/carvekit/ml/wrap/index.html new file mode 100644 index 0000000..c2ad10b --- /dev/null +++ b/docs/api/carvekit/ml/wrap/index.html @@ -0,0 +1,112 @@ + + + + + + +carvekit.ml.wrap API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap

+
+
+
+
+

Sub-modules

+
+
carvekit.ml.wrap.basnet
+
+ +
+
carvekit.ml.wrap.cascadepsp
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.ml.wrap.deeplab_v3
+
+ +
+
carvekit.ml.wrap.fba_matting
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.ml.wrap.scene_classifier
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.ml.wrap.tracer_b7
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.ml.wrap.u2net
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.ml.wrap.yolov4
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/scene_classifier.html b/docs/api/carvekit/ml/wrap/scene_classifier.html new file mode 100644 index 0000000..14c52e6 --- /dev/null +++ b/docs/api/carvekit/ml/wrap/scene_classifier.html @@ -0,0 +1,459 @@ + + + + + + +carvekit.ml.wrap.scene_classifier API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap.scene_classifier

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+
+import PIL.Image
+import torch
+import torch.nn.functional as F
+import torchvision.transforms as transforms
+from typing import List, Union, Tuple
+from torch.autograd import Variable
+
+from carvekit.ml.files.models_loc import scene_classifier_pretrained
+from carvekit.utils.image_utils import load_image, convert_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import thread_pool_processing, batch_generator
+
+__all__ = ["SceneClassifier"]
+
+
+class SceneClassifier:
+    """
+    SceneClassifier model interface
+
+    Description:
+        Performs a primary analysis of the image in order to select the necessary method for removing the background.
+        The choice is made by classifying the scene type.
+
+        The output can be the following types:
+        - hard
+        - soft
+        - digital
+
+    """
+
+    def __init__(
+        self,
+        topk: int = 1,
+        device="cpu",
+        batch_size: int = 4,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the Scene Classifier.
+
+        Args:
+            topk: number of top classes to return
+            device: processing device
+            batch_size: the number of images that the neural network processes in one run
+            fp16: use fp16 precision
+
+        """
+        if model_path is None:
+            model_path = scene_classifier_pretrained()
+        self.topk = topk
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+
+        self.transform = transforms.Compose(
+            [
+                transforms.Resize(256),
+                transforms.CenterCrop(224),
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+        state_dict = torch.load(model_path, map_location=device)
+        self.model = state_dict["model"]
+        self.class_to_idx = state_dict["class_to_idx"]
+        self.idx_to_class = {v: k for k, v in self.class_to_idx.items()}
+        self.model.to(device)
+        self.model.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+
+        return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+    def data_postprocessing(self, data: torch.Tensor) -> Tuple[List[str], List[float]]:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+
+        Returns:
+            Top-k class of scene type, probability of these classes
+
+        """
+        ps = F.softmax(data.float(), dim=0)
+        topk = ps.cpu().topk(self.topk)
+
+        probs, classes = (e.data.numpy().squeeze().tolist() for e in topk)
+        if isinstance(classes, int):
+            classes = [classes]
+            probs = [probs]
+        return list(map(lambda x: self.idx_to_class[x], classes)), probs
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> Tuple[List[str], List[float]]:
+        """
+        Passes input images though neural network and returns class predictions.
+
+        Args:
+            images: input images
+
+        Returns:
+            Top-k class of scene type, probability of these classes for every passed image
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self.model, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = Variable(batches).to(self.device)
+                    masks = self.model.forward(batches)
+                    masks_cpu = masks.cpu()
+                    del batches, masks
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(masks_cpu[x]),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class SceneClassifier +(topk:Β intΒ =Β 1, device='cpu', batch_size:Β intΒ =Β 4, fp16:Β boolΒ =Β False, model_path:Β Union[str,Β pathlib.Path]Β =Β None) +
+
+

SceneClassifier model interface

+

Description

+

Performs a primary analysis of the image in order to select the necessary method for removing the background. +The choice is made by classifying the scene type.

+

The output can be the following types: +- hard +- soft +- digital

+

Initialize the Scene Classifier.

+

Args

+
+
topk
+
number of top classes to return
+
device
+
processing device
+
batch_size
+
the number of images that the neural network processes in one run
+
fp16
+
use fp16 precision
+
+
+ +Expand source code + +
class SceneClassifier:
+    """
+    SceneClassifier model interface
+
+    Description:
+        Performs a primary analysis of the image in order to select the necessary method for removing the background.
+        The choice is made by classifying the scene type.
+
+        The output can be the following types:
+        - hard
+        - soft
+        - digital
+
+    """
+
+    def __init__(
+        self,
+        topk: int = 1,
+        device="cpu",
+        batch_size: int = 4,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the Scene Classifier.
+
+        Args:
+            topk: number of top classes to return
+            device: processing device
+            batch_size: the number of images that the neural network processes in one run
+            fp16: use fp16 precision
+
+        """
+        if model_path is None:
+            model_path = scene_classifier_pretrained()
+        self.topk = topk
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+
+        self.transform = transforms.Compose(
+            [
+                transforms.Resize(256),
+                transforms.CenterCrop(224),
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+        state_dict = torch.load(model_path, map_location=device)
+        self.model = state_dict["model"]
+        self.class_to_idx = state_dict["class_to_idx"]
+        self.idx_to_class = {v: k for k, v in self.class_to_idx.items()}
+        self.model.to(device)
+        self.model.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+
+        return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+    def data_postprocessing(self, data: torch.Tensor) -> Tuple[List[str], List[float]]:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+
+        Returns:
+            Top-k class of scene type, probability of these classes
+
+        """
+        ps = F.softmax(data.float(), dim=0)
+        topk = ps.cpu().topk(self.topk)
+
+        probs, classes = (e.data.numpy().squeeze().tolist() for e in topk)
+        if isinstance(classes, int):
+            classes = [classes]
+            probs = [probs]
+        return list(map(lambda x: self.idx_to_class[x], classes)), probs
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> Tuple[List[str], List[float]]:
+        """
+        Passes input images though neural network and returns class predictions.
+
+        Args:
+            images: input images
+
+        Returns:
+            Top-k class of scene type, probability of these classes for every passed image
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self.model, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = Variable(batches).to(self.device)
+                    masks = self.model.forward(batches)
+                    masks_cpu = masks.cpu()
+                    del batches, masks
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(masks_cpu[x]),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+
+        return collect_masks
+
+

Methods

+
+
+def data_postprocessing(self, data:Β torch.Tensor) ‑>Β Tuple[List[str],Β List[float]] +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data
+
output data from neural network
+
+

Returns

+

Top-k class of scene type, probability of these classes

+
+ +Expand source code + +
def data_postprocessing(self, data: torch.Tensor) -> Tuple[List[str], List[float]]:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data: output data from neural network
+
+    Returns:
+        Top-k class of scene type, probability of these classes
+
+    """
+    ps = F.softmax(data.float(), dim=0)
+    topk = ps.cpu().topk(self.topk)
+
+    probs, classes = (e.data.numpy().squeeze().tolist() for e in topk)
+    if isinstance(classes, int):
+        classes = [classes]
+        probs = [probs]
+    return list(map(lambda x: self.idx_to_class[x], classes)), probs
+
+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data
+
input image
+
+

Returns

+

input for neural network

+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data: input image
+
+    Returns:
+        input for neural network
+
+    """
+
+    return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/tracer_b7.html b/docs/api/carvekit/ml/wrap/tracer_b7.html new file mode 100644 index 0000000..97365c5 --- /dev/null +++ b/docs/api/carvekit/ml/wrap/tracer_b7.html @@ -0,0 +1,492 @@ + + + + + + +carvekit.ml.wrap.tracer_b7 API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap.tracer_b7

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+from typing import List, Union
+
+import PIL.Image
+import numpy as np
+import torch
+import torchvision.transforms as transforms
+from PIL import Image
+
+from carvekit.ml.arch.tracerb7.efficientnet import EfficientEncoderB7
+from carvekit.ml.arch.tracerb7.tracer import TracerDecoder
+from carvekit.ml.files.models_loc import tracer_b7_pretrained
+from carvekit.utils.image_utils import load_image, convert_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import thread_pool_processing, batch_generator
+
+__all__ = ["TracerUniversalB7"]
+
+
+class TracerUniversalB7(TracerDecoder):
+    """TRACER B7 model interface"""
+
+    def __init__(
+        self,
+        device="cpu",
+        input_image_size: Union[List[int], int] = 640,
+        batch_size: int = 4,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the TRACER model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (Union[List[int], int], default=640): input image size
+            batch_size(int, default=4): the number of images that the neural network processes in one run
+            load_pretrained(bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use fp16 precision
+            model_path (Union[str, pathlib.Path], default=None): path to the model
+            .. note:: REDO
+        """
+        if model_path is None:
+            model_path = tracer_b7_pretrained()
+        super(TracerUniversalB7, self).__init__(
+            encoder=EfficientEncoderB7(),
+            rfb_channel=[32, 64, 128],
+            features_channels=[48, 80, 224, 640],
+        )
+
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+
+        self.transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Resize(self.input_image_size),
+                transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
+            ]
+        )
+        self.to(device)
+        if load_pretrained:
+            # TODO remove edge detector from weights. It doesn't work well with this model!
+            self.load_state_dict(
+                torch.load(model_path, map_location=self.device), strict=False
+            )
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.FloatTensor: input for neural network
+
+        """
+
+        return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask
+
+        """
+        output = (data.type(torch.FloatTensor).detach().cpu().numpy() * 255.0).astype(
+            np.uint8
+        )
+        output = output.squeeze(0)
+        mask = Image.fromarray(output).convert("L")
+        mask = mask.resize(original_image.size, resample=Image.BILINEAR)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = batches.to(self.device)
+                    masks = super(TracerDecoder, self).__call__(batches)
+                    masks_cpu = masks.cpu()
+                    del batches, masks
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(
+                        masks_cpu[x], converted_images[x]
+                    ),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class TracerUniversalB7 +(device='cpu', input_image_size:Β Union[List[int],Β int]Β =Β 640, batch_size:Β intΒ =Β 4, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False, model_path:Β Union[str,Β pathlib.Path]Β =Β None) +
+
+

TRACER B7 model interface

+

Initialize the TRACER model

+

Args

+
+
device : Literal[cpu, cuda], default=cpu
+
processing device
+
input_image_size : Union[List[int], int], default=640
+
input image size
+
batch_size(int, default=4): the number of images that the neural network processes in one run
+
load_pretrained(bool, default=True): loading pretrained model
+
fp16 : bool, default=False
+
use fp16 precision
+
model_path : Union[str, pathlib.Path], default=None
+
path to the model
+
+
+

Note: REDO

+
+
+ +Expand source code + +
class TracerUniversalB7(TracerDecoder):
+    """TRACER B7 model interface"""
+
+    def __init__(
+        self,
+        device="cpu",
+        input_image_size: Union[List[int], int] = 640,
+        batch_size: int = 4,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the TRACER model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (Union[List[int], int], default=640): input image size
+            batch_size(int, default=4): the number of images that the neural network processes in one run
+            load_pretrained(bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use fp16 precision
+            model_path (Union[str, pathlib.Path], default=None): path to the model
+            .. note:: REDO
+        """
+        if model_path is None:
+            model_path = tracer_b7_pretrained()
+        super(TracerUniversalB7, self).__init__(
+            encoder=EfficientEncoderB7(),
+            rfb_channel=[32, 64, 128],
+            features_channels=[48, 80, 224, 640],
+        )
+
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+
+        self.transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Resize(self.input_image_size),
+                transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
+            ]
+        )
+        self.to(device)
+        if load_pretrained:
+            # TODO remove edge detector from weights. It doesn't work well with this model!
+            self.load_state_dict(
+                torch.load(model_path, map_location=self.device), strict=False
+            )
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.FloatTensor: input for neural network
+
+        """
+
+        return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask
+
+        """
+        output = (data.type(torch.FloatTensor).detach().cpu().numpy() * 255.0).astype(
+            np.uint8
+        )
+        output = output.squeeze(0)
+        mask = Image.fromarray(output).convert("L")
+        mask = mask.resize(original_image.size, resample=Image.BILINEAR)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = batches.to(self.device)
+                    masks = super(TracerDecoder, self).__call__(batches)
+                    masks_cpu = masks.cpu()
+                    del batches, masks
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(
+                        masks_cpu[x], converted_images[x]
+                    ),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+
+        return collect_masks
+
+

Ancestors

+ +

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, original_image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
original_image : PIL.Image.Image
+
input image which was used for predicted data
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, original_image: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        original_image (PIL.Image.Image): input image which was used for predicted data
+
+    Returns:
+        PIL.Image.Image: Segmentation mask
+
+    """
+    output = (data.type(torch.FloatTensor).detach().cpu().numpy() * 255.0).astype(
+        np.uint8
+    )
+    output = output.squeeze(0)
+    mask = Image.fromarray(output).convert("L")
+    mask = mask.resize(original_image.size, resample=Image.BILINEAR)
+    return mask
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : PIL.Image.Image
+
input image
+
+

Returns

+
+
torch.FloatTensor
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (PIL.Image.Image): input image
+
+    Returns:
+        torch.FloatTensor: input for neural network
+
+    """
+
+    return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/u2net.html b/docs/api/carvekit/ml/wrap/u2net.html new file mode 100644 index 0000000..705f732 --- /dev/null +++ b/docs/api/carvekit/ml/wrap/u2net.html @@ -0,0 +1,485 @@ + + + + + + +carvekit.ml.wrap.u2net API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap.u2net

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+import warnings
+
+from typing import List, Union
+import PIL.Image
+import numpy as np
+import torch
+from PIL import Image
+
+from carvekit.ml.arch.u2net.u2net import U2NETArchitecture
+from carvekit.ml.files.models_loc import u2net_full_pretrained
+from carvekit.utils.image_utils import load_image, convert_image
+from carvekit.utils.pool_utils import thread_pool_processing, batch_generator
+
+__all__ = ["U2NET"]
+
+
+class U2NET(U2NETArchitecture):
+    """U^2-Net model interface"""
+
+    def __init__(
+        self,
+        layers_cfg="full",
+        device="cpu",
+        input_image_size: Union[List[int], int] = 320,
+        batch_size: int = 10,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the U2NET model
+
+        Args:
+            layers_cfg: neural network layers configuration
+            device: processing device
+            input_image_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            load_pretrained: loading pretrained model
+            fp16: use fp16 precision // not supported at this moment.
+
+        """
+        super(U2NET, self).__init__(cfg_type=layers_cfg, out_ch=1)
+        if fp16:
+            warnings.warn("FP16 is not supported at this moment for U2NET model")
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(u2net_full_pretrained(), map_location=self.device)
+            )
+
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.FloatTensor: input for neural network
+
+        """
+        resized = data.resize(self.input_image_size, resample=3)
+        # noinspection PyTypeChecker
+        resized_arr = np.array(resized, dtype=float)
+        temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+        if np.max(resized_arr) != 0:
+            resized_arr /= np.max(resized_arr)
+        temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+        temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+        temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+        temp_image = temp_image.transpose((2, 0, 1))
+        temp_image = np.expand_dims(temp_image, 0)
+        return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        data = data.unsqueeze(0)
+        mask = data[:, 0, :, :]
+        ma = torch.max(mask)  # Normalizes prediction
+        mi = torch.min(mask)
+        predict = ((mask - mi) / (ma - mi)).squeeze()
+        predict_np = predict.cpu().data.numpy() * 255
+        mask = Image.fromarray(predict_np).convert("L")
+        mask = mask.resize(original_image.size, resample=3)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images: input images
+
+        Returns:
+            segmentation masks as for input images, as PIL.Image.Image instances
+
+        """
+        collect_masks = []
+        for image_batch in batch_generator(images, self.batch_size):
+            converted_images = thread_pool_processing(
+                lambda x: convert_image(load_image(x)), image_batch
+            )
+            batches = torch.vstack(
+                thread_pool_processing(self.data_preprocessing, converted_images)
+            )
+            with torch.no_grad():
+                batches = batches.to(self.device)
+                masks, d2, d3, d4, d5, d6, d7 = super(U2NET, self).__call__(batches)
+                masks_cpu = masks.cpu()
+                del d2, d3, d4, d5, d6, d7, batches, masks
+            masks = thread_pool_processing(
+                lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]),
+                range(len(converted_images)),
+            )
+            collect_masks += masks
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class U2NET +(layers_cfg='full', device='cpu', input_image_size:Β Union[List[int],Β int]Β =Β 320, batch_size:Β intΒ =Β 10, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False) +
+
+

U^2-Net model interface

+

Initialize the U2NET model

+

Args

+
+
layers_cfg
+
neural network layers configuration
+
device
+
processing device
+
input_image_size
+
input image size
+
batch_size
+
the number of images that the neural network processes in one run
+
load_pretrained
+
loading pretrained model
+
fp16
+
use fp16 precision // not supported at this moment.
+
+
+ +Expand source code + +
class U2NET(U2NETArchitecture):
+    """U^2-Net model interface"""
+
+    def __init__(
+        self,
+        layers_cfg="full",
+        device="cpu",
+        input_image_size: Union[List[int], int] = 320,
+        batch_size: int = 10,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the U2NET model
+
+        Args:
+            layers_cfg: neural network layers configuration
+            device: processing device
+            input_image_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            load_pretrained: loading pretrained model
+            fp16: use fp16 precision // not supported at this moment.
+
+        """
+        super(U2NET, self).__init__(cfg_type=layers_cfg, out_ch=1)
+        if fp16:
+            warnings.warn("FP16 is not supported at this moment for U2NET model")
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(u2net_full_pretrained(), map_location=self.device)
+            )
+
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.FloatTensor: input for neural network
+
+        """
+        resized = data.resize(self.input_image_size, resample=3)
+        # noinspection PyTypeChecker
+        resized_arr = np.array(resized, dtype=float)
+        temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+        if np.max(resized_arr) != 0:
+            resized_arr /= np.max(resized_arr)
+        temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+        temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+        temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+        temp_image = temp_image.transpose((2, 0, 1))
+        temp_image = np.expand_dims(temp_image, 0)
+        return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        data = data.unsqueeze(0)
+        mask = data[:, 0, :, :]
+        ma = torch.max(mask)  # Normalizes prediction
+        mi = torch.min(mask)
+        predict = ((mask - mi) / (ma - mi)).squeeze()
+        predict_np = predict.cpu().data.numpy() * 255
+        mask = Image.fromarray(predict_np).convert("L")
+        mask = mask.resize(original_image.size, resample=3)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images: input images
+
+        Returns:
+            segmentation masks as for input images, as PIL.Image.Image instances
+
+        """
+        collect_masks = []
+        for image_batch in batch_generator(images, self.batch_size):
+            converted_images = thread_pool_processing(
+                lambda x: convert_image(load_image(x)), image_batch
+            )
+            batches = torch.vstack(
+                thread_pool_processing(self.data_preprocessing, converted_images)
+            )
+            with torch.no_grad():
+                batches = batches.to(self.device)
+                masks, d2, d3, d4, d5, d6, d7 = super(U2NET, self).__call__(batches)
+                masks_cpu = masks.cpu()
+                del d2, d3, d4, d5, d6, d7, batches, masks
+            masks = thread_pool_processing(
+                lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]),
+                range(len(converted_images)),
+            )
+            collect_masks += masks
+        return collect_masks
+
+

Ancestors

+ +

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, original_image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
original_image : PIL.Image.Image
+
input image which was used for predicted data
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask as PIL Image instance
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, original_image: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        original_image (PIL.Image.Image): input image which was used for predicted data
+
+    Returns:
+        PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+    """
+    data = data.unsqueeze(0)
+    mask = data[:, 0, :, :]
+    ma = torch.max(mask)  # Normalizes prediction
+    mi = torch.min(mask)
+    predict = ((mask - mi) / (ma - mi)).squeeze()
+    predict_np = predict.cpu().data.numpy() * 255
+    mask = Image.fromarray(predict_np).convert("L")
+    mask = mask.resize(original_image.size, resample=3)
+    return mask
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : PIL.Image.Image
+
input image
+
+

Returns

+
+
torch.FloatTensor
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (PIL.Image.Image): input image
+
+    Returns:
+        torch.FloatTensor: input for neural network
+
+    """
+    resized = data.resize(self.input_image_size, resample=3)
+    # noinspection PyTypeChecker
+    resized_arr = np.array(resized, dtype=float)
+    temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+    if np.max(resized_arr) != 0:
+        resized_arr /= np.max(resized_arr)
+    temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+    temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+    temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+    temp_image = temp_image.transpose((2, 0, 1))
+    temp_image = np.expand_dims(temp_image, 0)
+    return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/ml/wrap/yolov4.html b/docs/api/carvekit/ml/wrap/yolov4.html new file mode 100644 index 0000000..25d9a6a --- /dev/null +++ b/docs/api/carvekit/ml/wrap/yolov4.html @@ -0,0 +1,881 @@ + + + + + + +carvekit.ml.wrap.yolov4 API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.ml.wrap.yolov4

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+
+import pathlib
+
+import PIL.Image
+import PIL.Image
+import numpy as np
+import pydantic
+import torch
+from torch.autograd import Variable
+from typing import List, Union
+
+from carvekit.ml.arch.yolov4.models import Yolov4
+from carvekit.ml.arch.yolov4.utils import post_processing
+from carvekit.ml.files.models_loc import yolov4_coco_pretrained
+from carvekit.utils.image_utils import load_image, convert_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import thread_pool_processing, batch_generator
+
+__all__ = ["YoloV4_COCO", "SimplifiedYoloV4"]
+
+
+class Object(pydantic.BaseModel):
+    """Object class"""
+
+    class_name: str
+    confidence: float
+    x1: int
+    y1: int
+    x2: int
+    y2: int
+
+
+class YoloV4_COCO(Yolov4):
+    """YoloV4 COCO model wrapper"""
+
+    def __init__(
+        self,
+        n_classes: int = 80,
+        device="cpu",
+        classes: List[str] = None,
+        input_image_size: Union[List[int], int] = 608,
+        batch_size: int = 4,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the YoloV4 COCO.
+
+        Args:
+            n_classes: number of classes
+            device: processing device
+            input_image_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            fp16: use fp16 precision
+            model_path: path to model weights
+            load_pretrained: load pretrained weights
+        """
+        if model_path is None:
+            model_path = yolov4_coco_pretrained()
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+
+        if load_pretrained:
+            state_dict = torch.load(model_path, map_location="cpu")
+            self.classes = state_dict["classes"]
+            super().__init__(n_classes=len(state_dict["classes"]), inference=True)
+            self.load_state_dict(state_dict["state"])
+        else:
+            self.classes = classes
+            super().__init__(n_classes=n_classes, inference=True)
+
+        self.to(device)
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+        image = data.resize(self.input_image_size)
+        # noinspection PyTypeChecker
+        image = np.array(image).astype(np.float32)
+        image = image.transpose((2, 0, 1))
+        image = image / 255.0
+        image = torch.from_numpy(image).float()
+        return torch.unsqueeze(image, 0).type(torch.FloatTensor)
+
+    def data_postprocessing(
+        self, data: List[torch.FloatTensor], images: List[PIL.Image.Image]
+    ) -> List[Object]:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+            images: input images
+
+
+        Returns:
+            list of objects for each image
+
+        """
+        output = post_processing(0.4, 0.6, data)
+        images_objects = []
+        for image_idx, image_objects in enumerate(output):
+            image_size = images[image_idx].size
+            objects = []
+            for obj in image_objects:
+                objects.append(
+                    Object(
+                        class_name=self.classes[obj[6]],
+                        confidence=obj[5],
+                        x1=int(obj[0] * image_size[0]),
+                        y1=int(obj[1] * image_size[1]),
+                        x2=int(obj[2] * image_size[0]),
+                        y2=int(obj[3] * image_size[1]),
+                    )
+                )
+            images_objects.append(objects)
+
+        return images_objects
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[List[Object]]:
+        """
+        Passes input images though neural network
+
+        Args:
+            images: input images
+
+        Returns:
+            list of objects for each image
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = Variable(batches).to(self.device)
+                    out = super().__call__(batches)
+                    out_cpu = [out_i.cpu() for out_i in out]
+                    del batches, out
+                out = self.data_postprocessing(out_cpu, converted_images)
+                collect_masks += out
+
+        return collect_masks
+
+
+class SimplifiedYoloV4(YoloV4_COCO):
+    """
+    The YoloV4 COCO classifier, but classifies only 7 supercategories.
+
+    human - Scenes of people, such as portrait photographs
+    animals - Scenes with animals
+    objects - Scenes with normal objects
+    cars - Scenes with cars
+    other - Other scenes
+    """
+
+    db = {
+        "human": ["person"],
+        "animals": [
+            "bird",
+            "cat",
+            "dog",
+            "horse",
+            "sheep",
+            "cow",
+            "elephant",
+            "bear",
+            "zebra",
+            "giraffe",
+        ],
+        "cars": [
+            "car",
+            "motorbike",
+            "bus",
+            "truck",
+        ],
+        "objects": [
+            "bicycle",
+            "traffic light",
+            "fire hydrant",
+            "stop sign",
+            "parking meter",
+            "bench",
+            "backpack",
+            "umbrella",
+            "handbag",
+            "tie",
+            "suitcase",
+            "frisbee",
+            "skis",
+            "snowboard",
+            "sports ball",
+            "kite",
+            "baseball bat",
+            "baseball glove",
+            "skateboard",
+            "surfboard",
+            "tennis racket",
+            "bottle",
+            "wine glass",
+            "cup",
+            "fork",
+            "knife",
+            "spoon",
+            "bowl",
+            "banana",
+            "apple",
+            "sandwich",
+            "orange",
+            "broccoli",
+            "carrot",
+            "hot dog",
+            "pizza",
+            "donut",
+            "cake",
+            "chair",
+            "sofa",
+            "pottedplant",
+            "bed",
+            "diningtable",
+            "toilet",
+            "tvmonitor",
+            "laptop",
+            "mouse",
+            "remote",
+            "keyboard",
+            "cell phone",
+            "microwave",
+            "oven",
+            "toaster",
+            "sink",
+            "refrigerator",
+            "book",
+            "clock",
+            "vase",
+            "scissors",
+            "teddy bear",
+            "hair drier",
+            "toothbrush",
+        ],
+        "other": ["aeroplane", "train", "boat"],
+    }
+
+    def data_postprocessing(
+        self, data: List[torch.FloatTensor], images: List[PIL.Image.Image]
+    ) -> List[List[str]]:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+            images: input images
+        """
+        objects = super().data_postprocessing(data, images)
+        new_output = []
+
+        for image_objects in objects:
+            new_objects = []
+            for obj in image_objects:
+                for key, values in list(self.db.items()):
+                    if obj.class_name in values:
+                        new_objects.append(key)  # We don't need bbox at this moment
+            new_output.append(new_objects)
+
+        return new_output
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class SimplifiedYoloV4 +(n_classes:Β intΒ =Β 80, device='cpu', classes:Β List[str]Β =Β None, input_image_size:Β Union[List[int],Β int]Β =Β 608, batch_size:Β intΒ =Β 4, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False, model_path:Β Union[str,Β pathlib.Path]Β =Β None) +
+
+

The YoloV4 COCO classifier, but classifies only 7 supercategories.

+

human - Scenes of people, such as portrait photographs +animals - Scenes with animals +objects - Scenes with normal objects +cars - Scenes with cars +other - Other scenes

+

Initialize the YoloV4 COCO.

+

Args

+
+
n_classes
+
number of classes
+
device
+
processing device
+
input_image_size
+
input image size
+
batch_size
+
the number of images that the neural network processes in one run
+
fp16
+
use fp16 precision
+
model_path
+
path to model weights
+
load_pretrained
+
load pretrained weights
+
+
+ +Expand source code + +
class SimplifiedYoloV4(YoloV4_COCO):
+    """
+    The YoloV4 COCO classifier, but classifies only 7 supercategories.
+
+    human - Scenes of people, such as portrait photographs
+    animals - Scenes with animals
+    objects - Scenes with normal objects
+    cars - Scenes with cars
+    other - Other scenes
+    """
+
+    db = {
+        "human": ["person"],
+        "animals": [
+            "bird",
+            "cat",
+            "dog",
+            "horse",
+            "sheep",
+            "cow",
+            "elephant",
+            "bear",
+            "zebra",
+            "giraffe",
+        ],
+        "cars": [
+            "car",
+            "motorbike",
+            "bus",
+            "truck",
+        ],
+        "objects": [
+            "bicycle",
+            "traffic light",
+            "fire hydrant",
+            "stop sign",
+            "parking meter",
+            "bench",
+            "backpack",
+            "umbrella",
+            "handbag",
+            "tie",
+            "suitcase",
+            "frisbee",
+            "skis",
+            "snowboard",
+            "sports ball",
+            "kite",
+            "baseball bat",
+            "baseball glove",
+            "skateboard",
+            "surfboard",
+            "tennis racket",
+            "bottle",
+            "wine glass",
+            "cup",
+            "fork",
+            "knife",
+            "spoon",
+            "bowl",
+            "banana",
+            "apple",
+            "sandwich",
+            "orange",
+            "broccoli",
+            "carrot",
+            "hot dog",
+            "pizza",
+            "donut",
+            "cake",
+            "chair",
+            "sofa",
+            "pottedplant",
+            "bed",
+            "diningtable",
+            "toilet",
+            "tvmonitor",
+            "laptop",
+            "mouse",
+            "remote",
+            "keyboard",
+            "cell phone",
+            "microwave",
+            "oven",
+            "toaster",
+            "sink",
+            "refrigerator",
+            "book",
+            "clock",
+            "vase",
+            "scissors",
+            "teddy bear",
+            "hair drier",
+            "toothbrush",
+        ],
+        "other": ["aeroplane", "train", "boat"],
+    }
+
+    def data_postprocessing(
+        self, data: List[torch.FloatTensor], images: List[PIL.Image.Image]
+    ) -> List[List[str]]:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+            images: input images
+        """
+        objects = super().data_postprocessing(data, images)
+        new_output = []
+
+        for image_objects in objects:
+            new_objects = []
+            for obj in image_objects:
+                for key, values in list(self.db.items()):
+                    if obj.class_name in values:
+                        new_objects.append(key)  # We don't need bbox at this moment
+            new_output.append(new_objects)
+
+        return new_output
+
+

Ancestors

+ +

Class variables

+
+
var db
+
+
+
+
+

Methods

+
+
+def data_postprocessing(self, data:Β List[torch.FloatTensor], images:Β List[PIL.Image.Image]) ‑>Β List[List[str]] +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data
+
output data from neural network
+
images
+
input images
+
+
+ +Expand source code + +
def data_postprocessing(
+    self, data: List[torch.FloatTensor], images: List[PIL.Image.Image]
+) -> List[List[str]]:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data: output data from neural network
+        images: input images
+    """
+    objects = super().data_postprocessing(data, images)
+    new_output = []
+
+    for image_objects in objects:
+        new_objects = []
+        for obj in image_objects:
+            for key, values in list(self.db.items()):
+                if obj.class_name in values:
+                    new_objects.append(key)  # We don't need bbox at this moment
+        new_output.append(new_objects)
+
+    return new_output
+
+
+
+

Inherited members

+ +
+
+class YoloV4_COCO +(n_classes:Β intΒ =Β 80, device='cpu', classes:Β List[str]Β =Β None, input_image_size:Β Union[List[int],Β int]Β =Β 608, batch_size:Β intΒ =Β 4, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False, model_path:Β Union[str,Β pathlib.Path]Β =Β None) +
+
+

YoloV4 COCO model wrapper

+

Initialize the YoloV4 COCO.

+

Args

+
+
n_classes
+
number of classes
+
device
+
processing device
+
input_image_size
+
input image size
+
batch_size
+
the number of images that the neural network processes in one run
+
fp16
+
use fp16 precision
+
model_path
+
path to model weights
+
load_pretrained
+
load pretrained weights
+
+
+ +Expand source code + +
class YoloV4_COCO(Yolov4):
+    """YoloV4 COCO model wrapper"""
+
+    def __init__(
+        self,
+        n_classes: int = 80,
+        device="cpu",
+        classes: List[str] = None,
+        input_image_size: Union[List[int], int] = 608,
+        batch_size: int = 4,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the YoloV4 COCO.
+
+        Args:
+            n_classes: number of classes
+            device: processing device
+            input_image_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            fp16: use fp16 precision
+            model_path: path to model weights
+            load_pretrained: load pretrained weights
+        """
+        if model_path is None:
+            model_path = yolov4_coco_pretrained()
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+
+        if load_pretrained:
+            state_dict = torch.load(model_path, map_location="cpu")
+            self.classes = state_dict["classes"]
+            super().__init__(n_classes=len(state_dict["classes"]), inference=True)
+            self.load_state_dict(state_dict["state"])
+        else:
+            self.classes = classes
+            super().__init__(n_classes=n_classes, inference=True)
+
+        self.to(device)
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+        image = data.resize(self.input_image_size)
+        # noinspection PyTypeChecker
+        image = np.array(image).astype(np.float32)
+        image = image.transpose((2, 0, 1))
+        image = image / 255.0
+        image = torch.from_numpy(image).float()
+        return torch.unsqueeze(image, 0).type(torch.FloatTensor)
+
+    def data_postprocessing(
+        self, data: List[torch.FloatTensor], images: List[PIL.Image.Image]
+    ) -> List[Object]:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+            images: input images
+
+
+        Returns:
+            list of objects for each image
+
+        """
+        output = post_processing(0.4, 0.6, data)
+        images_objects = []
+        for image_idx, image_objects in enumerate(output):
+            image_size = images[image_idx].size
+            objects = []
+            for obj in image_objects:
+                objects.append(
+                    Object(
+                        class_name=self.classes[obj[6]],
+                        confidence=obj[5],
+                        x1=int(obj[0] * image_size[0]),
+                        y1=int(obj[1] * image_size[1]),
+                        x2=int(obj[2] * image_size[0]),
+                        y2=int(obj[3] * image_size[1]),
+                    )
+                )
+            images_objects.append(objects)
+
+        return images_objects
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[List[Object]]:
+        """
+        Passes input images though neural network
+
+        Args:
+            images: input images
+
+        Returns:
+            list of objects for each image
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = Variable(batches).to(self.device)
+                    out = super().__call__(batches)
+                    out_cpu = [out_i.cpu() for out_i in out]
+                    del batches, out
+                out = self.data_postprocessing(out_cpu, converted_images)
+                collect_masks += out
+
+        return collect_masks
+
+

Ancestors

+
    +
  • Yolov4
  • +
  • torch.nn.modules.module.Module
  • +
+

Subclasses

+ +

Methods

+
+
+def data_postprocessing(self, data:Β List[torch.FloatTensor], images:Β List[PIL.Image.Image]) ‑>Β List[carvekit.ml.wrap.yolov4.Object] +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data
+
output data from neural network
+
images
+
input images
+
+

Returns

+

list of objects for each image

+
+ +Expand source code + +
def data_postprocessing(
+    self, data: List[torch.FloatTensor], images: List[PIL.Image.Image]
+) -> List[Object]:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data: output data from neural network
+        images: input images
+
+
+    Returns:
+        list of objects for each image
+
+    """
+    output = post_processing(0.4, 0.6, data)
+    images_objects = []
+    for image_idx, image_objects in enumerate(output):
+        image_size = images[image_idx].size
+        objects = []
+        for obj in image_objects:
+            objects.append(
+                Object(
+                    class_name=self.classes[obj[6]],
+                    confidence=obj[5],
+                    x1=int(obj[0] * image_size[0]),
+                    y1=int(obj[1] * image_size[1]),
+                    x2=int(obj[2] * image_size[0]),
+                    y2=int(obj[3] * image_size[1]),
+                )
+            )
+        images_objects.append(objects)
+
+    return images_objects
+
+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data
+
input image
+
+

Returns

+

input for neural network

+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data: input image
+
+    Returns:
+        input for neural network
+
+    """
+    image = data.resize(self.input_image_size)
+    # noinspection PyTypeChecker
+    image = np.array(image).astype(np.float32)
+    image = image.transpose((2, 0, 1))
+    image = image / 255.0
+    image = torch.from_numpy(image).float()
+    return torch.unsqueeze(image, 0).type(torch.FloatTensor)
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/index.html b/docs/api/carvekit/pipelines/index.html new file mode 100644 index 0000000..52e92a9 --- /dev/null +++ b/docs/api/carvekit/pipelines/index.html @@ -0,0 +1,70 @@ + + + + + + +carvekit.pipelines API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines

+
+
+
+
+

Sub-modules

+
+
carvekit.pipelines.postprocessing
+
+
+
+
carvekit.pipelines.preprocessing
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/postprocessing.html b/docs/api/carvekit/pipelines/postprocessing.html new file mode 100644 index 0000000..354fb9a --- /dev/null +++ b/docs/api/carvekit/pipelines/postprocessing.html @@ -0,0 +1,226 @@ + + + + + + +carvekit.pipelines.postprocessing API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines.postprocessing

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from carvekit.ml.wrap.fba_matting import FBAMatting
+from typing import Union, List
+from PIL import Image
+from pathlib import Path
+from carvekit.trimap.cv_gen import CV2TrimapGenerator
+from carvekit.trimap.generator import TrimapGenerator
+from carvekit.utils.mask_utils import apply_mask
+from carvekit.utils.pool_utils import thread_pool_processing
+from carvekit.utils.image_utils import load_image, convert_image
+
+__all__ = ["MattingMethod"]
+
+
+class MattingMethod:
+    """
+    Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap.
+    Neural network for matting performs accurate object edge detection by using a special map called trimap,
+    with unknown area that we scan for boundary, already known general object area and the background."""
+
+    def __init__(
+        self,
+        matting_module: Union[FBAMatting],
+        trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator],
+        device="cpu",
+    ):
+        """
+        Initializes Matting Method class.
+
+        Args:
+        - `matting_module`: Initialized matting neural network class
+        - `trimap_generator`: Initialized trimap generator class
+        - `device`: Processing device used for applying mask to image
+        """
+        self.device = device
+        self.matting_module = matting_module
+        self.trimap_generator = trimap_generator
+
+    def __call__(
+        self,
+        images: List[Union[str, Path, Image.Image]],
+        masks: List[Union[str, Path, Image.Image]],
+    ):
+        """
+        Passes data through apply_mask function
+
+        Args:
+        - `images`: list of images
+        - `masks`: list pf masks
+
+        Returns:
+        list of images
+        """
+        if len(images) != len(masks):
+            raise ValueError("Images and Masks lists should have same length!")
+        images = thread_pool_processing(lambda x: convert_image(load_image(x)), images)
+        masks = thread_pool_processing(
+            lambda x: convert_image(load_image(x), mode="L"), masks
+        )
+        trimaps = thread_pool_processing(
+            lambda x: self.trimap_generator(original_image=images[x], mask=masks[x]),
+            range(len(images)),
+        )
+        alpha = self.matting_module(images=images, trimaps=trimaps)
+        return list(
+            map(
+                lambda x: apply_mask(
+                    image=images[x], mask=alpha[x], device=self.device
+                ),
+                range(len(images)),
+            )
+        )
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class MattingMethod +(matting_module:Β FBAMatting, trimap_generator:Β Union[TrimapGenerator,Β CV2TrimapGenerator], device='cpu') +
+
+

Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap. +Neural network for matting performs accurate object edge detection by using a special map called trimap, +with unknown area that we scan for boundary, already known general object area and the background.

+

Initializes Matting Method class.

+

Args: +- matting_module: Initialized matting neural network class +- trimap_generator: Initialized trimap generator class +- device: Processing device used for applying mask to image

+
+ +Expand source code + +
class MattingMethod:
+    """
+    Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap.
+    Neural network for matting performs accurate object edge detection by using a special map called trimap,
+    with unknown area that we scan for boundary, already known general object area and the background."""
+
+    def __init__(
+        self,
+        matting_module: Union[FBAMatting],
+        trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator],
+        device="cpu",
+    ):
+        """
+        Initializes Matting Method class.
+
+        Args:
+        - `matting_module`: Initialized matting neural network class
+        - `trimap_generator`: Initialized trimap generator class
+        - `device`: Processing device used for applying mask to image
+        """
+        self.device = device
+        self.matting_module = matting_module
+        self.trimap_generator = trimap_generator
+
+    def __call__(
+        self,
+        images: List[Union[str, Path, Image.Image]],
+        masks: List[Union[str, Path, Image.Image]],
+    ):
+        """
+        Passes data through apply_mask function
+
+        Args:
+        - `images`: list of images
+        - `masks`: list pf masks
+
+        Returns:
+        list of images
+        """
+        if len(images) != len(masks):
+            raise ValueError("Images and Masks lists should have same length!")
+        images = thread_pool_processing(lambda x: convert_image(load_image(x)), images)
+        masks = thread_pool_processing(
+            lambda x: convert_image(load_image(x), mode="L"), masks
+        )
+        trimaps = thread_pool_processing(
+            lambda x: self.trimap_generator(original_image=images[x], mask=masks[x]),
+            range(len(images)),
+        )
+        alpha = self.matting_module(images=images, trimaps=trimaps)
+        return list(
+            map(
+                lambda x: apply_mask(
+                    image=images[x], mask=alpha[x], device=self.device
+                ),
+                range(len(images)),
+            )
+        )
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/postprocessing/casmatting.html b/docs/api/carvekit/pipelines/postprocessing/casmatting.html new file mode 100644 index 0000000..16732cc --- /dev/null +++ b/docs/api/carvekit/pipelines/postprocessing/casmatting.html @@ -0,0 +1,245 @@ + + + + + + +carvekit.pipelines.postprocessing.casmatting API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines.postprocessing.casmatting

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from carvekit.ml.wrap.fba_matting import FBAMatting
+from carvekit.ml.wrap.cascadepsp import CascadePSP
+from typing import Union, List
+from PIL import Image
+from pathlib import Path
+from carvekit.trimap.cv_gen import CV2TrimapGenerator
+from carvekit.trimap.generator import TrimapGenerator
+from carvekit.utils.mask_utils import apply_mask
+from carvekit.utils.pool_utils import thread_pool_processing
+from carvekit.utils.image_utils import load_image, convert_image
+
+__all__ = ["CasMattingMethod"]
+
+
+class CasMattingMethod:
+    """
+    Improve segmentation quality by refining segmentation with the CascadePSP model
+    and post-processing the segmentation with the FBAMatting model
+    """
+
+    def __init__(
+        self,
+        refining_module: Union[CascadePSP],
+        matting_module: Union[FBAMatting],
+        trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator],
+        device="cpu",
+    ):
+        """
+        Initializes CasMattingMethod class.
+
+        Args:
+            refining_module: Initialized refining network
+            matting_module: Initialized matting neural network class
+            trimap_generator: Initialized trimap generator class
+            device: Processing device used for applying mask to image
+        """
+        self.device = device
+        self.refining_module = refining_module
+        self.matting_module = matting_module
+        self.trimap_generator = trimap_generator
+
+    def __call__(
+        self,
+        images: List[Union[str, Path, Image.Image]],
+        masks: List[Union[str, Path, Image.Image]],
+    ):
+        """
+        Passes data through apply_mask function
+
+        Args:
+            images: list of images
+            masks: list pf masks
+
+        Returns:
+            list of images
+        """
+        if len(images) != len(masks):
+            raise ValueError("Images and Masks lists should have same length!")
+        images = thread_pool_processing(lambda x: convert_image(load_image(x)), images)
+        masks = thread_pool_processing(
+            lambda x: convert_image(load_image(x), mode="L"), masks
+        )
+        refined_masks = self.refining_module(images, masks)
+        trimaps = thread_pool_processing(
+            lambda x: self.trimap_generator(
+                original_image=images[x], mask=refined_masks[x]
+            ),
+            range(len(images)),
+        )
+        alpha = self.matting_module(images=images, trimaps=trimaps)
+        return list(
+            map(
+                lambda x: apply_mask(
+                    image=images[x], mask=alpha[x], device=self.device
+                ),
+                range(len(images)),
+            )
+        )
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class CasMattingMethod +(refining_module:Β CascadePSP, matting_module:Β FBAMatting, trimap_generator:Β Union[TrimapGenerator,Β CV2TrimapGenerator], device='cpu') +
+
+

Improve segmentation quality by refining segmentation with the CascadePSP model +and post-processing the segmentation with the FBAMatting model

+

Initializes CasMattingMethod class.

+

Args

+
+
refining_module
+
Initialized refining network
+
matting_module
+
Initialized matting neural network class
+
trimap_generator
+
Initialized trimap generator class
+
device
+
Processing device used for applying mask to image
+
+
+ +Expand source code + +
class CasMattingMethod:
+    """
+    Improve segmentation quality by refining segmentation with the CascadePSP model
+    and post-processing the segmentation with the FBAMatting model
+    """
+
+    def __init__(
+        self,
+        refining_module: Union[CascadePSP],
+        matting_module: Union[FBAMatting],
+        trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator],
+        device="cpu",
+    ):
+        """
+        Initializes CasMattingMethod class.
+
+        Args:
+            refining_module: Initialized refining network
+            matting_module: Initialized matting neural network class
+            trimap_generator: Initialized trimap generator class
+            device: Processing device used for applying mask to image
+        """
+        self.device = device
+        self.refining_module = refining_module
+        self.matting_module = matting_module
+        self.trimap_generator = trimap_generator
+
+    def __call__(
+        self,
+        images: List[Union[str, Path, Image.Image]],
+        masks: List[Union[str, Path, Image.Image]],
+    ):
+        """
+        Passes data through apply_mask function
+
+        Args:
+            images: list of images
+            masks: list pf masks
+
+        Returns:
+            list of images
+        """
+        if len(images) != len(masks):
+            raise ValueError("Images and Masks lists should have same length!")
+        images = thread_pool_processing(lambda x: convert_image(load_image(x)), images)
+        masks = thread_pool_processing(
+            lambda x: convert_image(load_image(x), mode="L"), masks
+        )
+        refined_masks = self.refining_module(images, masks)
+        trimaps = thread_pool_processing(
+            lambda x: self.trimap_generator(
+                original_image=images[x], mask=refined_masks[x]
+            ),
+            range(len(images)),
+        )
+        alpha = self.matting_module(images=images, trimaps=trimaps)
+        return list(
+            map(
+                lambda x: apply_mask(
+                    image=images[x], mask=alpha[x], device=self.device
+                ),
+                range(len(images)),
+            )
+        )
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/postprocessing/index.html b/docs/api/carvekit/pipelines/postprocessing/index.html new file mode 100644 index 0000000..18d0b62 --- /dev/null +++ b/docs/api/carvekit/pipelines/postprocessing/index.html @@ -0,0 +1,81 @@ + + + + + + +carvekit.pipelines.postprocessing API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines.postprocessing

+
+
+
+ +Expand source code + +
from carvekit.pipelines.postprocessing.matting import MattingMethod
+from carvekit.pipelines.postprocessing.casmatting import CasMattingMethod
+
+
+
+

Sub-modules

+
+
carvekit.pipelines.postprocessing.casmatting
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.pipelines.postprocessing.matting
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/postprocessing/matting.html b/docs/api/carvekit/pipelines/postprocessing/matting.html new file mode 100644 index 0000000..677908e --- /dev/null +++ b/docs/api/carvekit/pipelines/postprocessing/matting.html @@ -0,0 +1,226 @@ + + + + + + +carvekit.pipelines.postprocessing.matting API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines.postprocessing.matting

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from carvekit.ml.wrap.fba_matting import FBAMatting
+from typing import Union, List
+from PIL import Image
+from pathlib import Path
+from carvekit.trimap.cv_gen import CV2TrimapGenerator
+from carvekit.trimap.generator import TrimapGenerator
+from carvekit.utils.mask_utils import apply_mask
+from carvekit.utils.pool_utils import thread_pool_processing
+from carvekit.utils.image_utils import load_image, convert_image
+
+__all__ = ["MattingMethod"]
+
+
+class MattingMethod:
+    """
+    Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap.
+    Neural network for matting performs accurate object edge detection by using a special map called trimap,
+    with unknown area that we scan for boundary, already known general object area and the background."""
+
+    def __init__(
+        self,
+        matting_module: Union[FBAMatting],
+        trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator],
+        device="cpu",
+    ):
+        """
+        Initializes Matting Method class.
+
+        Args:
+        - `matting_module`: Initialized matting neural network class
+        - `trimap_generator`: Initialized trimap generator class
+        - `device`: Processing device used for applying mask to image
+        """
+        self.device = device
+        self.matting_module = matting_module
+        self.trimap_generator = trimap_generator
+
+    def __call__(
+        self,
+        images: List[Union[str, Path, Image.Image]],
+        masks: List[Union[str, Path, Image.Image]],
+    ):
+        """
+        Passes data through apply_mask function
+
+        Args:
+        - `images`: list of images
+        - `masks`: list pf masks
+
+        Returns:
+        list of images
+        """
+        if len(images) != len(masks):
+            raise ValueError("Images and Masks lists should have same length!")
+        images = thread_pool_processing(lambda x: convert_image(load_image(x)), images)
+        masks = thread_pool_processing(
+            lambda x: convert_image(load_image(x), mode="L"), masks
+        )
+        trimaps = thread_pool_processing(
+            lambda x: self.trimap_generator(original_image=images[x], mask=masks[x]),
+            range(len(images)),
+        )
+        alpha = self.matting_module(images=images, trimaps=trimaps)
+        return list(
+            map(
+                lambda x: apply_mask(
+                    image=images[x], mask=alpha[x], device=self.device
+                ),
+                range(len(images)),
+            )
+        )
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class MattingMethod +(matting_module:Β FBAMatting, trimap_generator:Β Union[TrimapGenerator,Β CV2TrimapGenerator], device='cpu') +
+
+

Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap. +Neural network for matting performs accurate object edge detection by using a special map called trimap, +with unknown area that we scan for boundary, already known general object area and the background.

+

Initializes Matting Method class.

+

Args: +- matting_module: Initialized matting neural network class +- trimap_generator: Initialized trimap generator class +- device: Processing device used for applying mask to image

+
+ +Expand source code + +
class MattingMethod:
+    """
+    Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap.
+    Neural network for matting performs accurate object edge detection by using a special map called trimap,
+    with unknown area that we scan for boundary, already known general object area and the background."""
+
+    def __init__(
+        self,
+        matting_module: Union[FBAMatting],
+        trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator],
+        device="cpu",
+    ):
+        """
+        Initializes Matting Method class.
+
+        Args:
+        - `matting_module`: Initialized matting neural network class
+        - `trimap_generator`: Initialized trimap generator class
+        - `device`: Processing device used for applying mask to image
+        """
+        self.device = device
+        self.matting_module = matting_module
+        self.trimap_generator = trimap_generator
+
+    def __call__(
+        self,
+        images: List[Union[str, Path, Image.Image]],
+        masks: List[Union[str, Path, Image.Image]],
+    ):
+        """
+        Passes data through apply_mask function
+
+        Args:
+        - `images`: list of images
+        - `masks`: list pf masks
+
+        Returns:
+        list of images
+        """
+        if len(images) != len(masks):
+            raise ValueError("Images and Masks lists should have same length!")
+        images = thread_pool_processing(lambda x: convert_image(load_image(x)), images)
+        masks = thread_pool_processing(
+            lambda x: convert_image(load_image(x), mode="L"), masks
+        )
+        trimaps = thread_pool_processing(
+            lambda x: self.trimap_generator(original_image=images[x], mask=masks[x]),
+            range(len(images)),
+        )
+        alpha = self.matting_module(images=images, trimaps=trimaps)
+        return list(
+            map(
+                lambda x: apply_mask(
+                    image=images[x], mask=alpha[x], device=self.device
+                ),
+                range(len(images)),
+            )
+        )
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/preprocessing.html b/docs/api/carvekit/pipelines/preprocessing.html new file mode 100644 index 0000000..5e6f830 --- /dev/null +++ b/docs/api/carvekit/pipelines/preprocessing.html @@ -0,0 +1,127 @@ + + + + + + +carvekit.pipelines.preprocessing API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines.preprocessing

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from pathlib import Path
+from typing import Union, List
+
+from PIL import Image
+
+__all__ = ["PreprocessingStub"]
+
+
+class PreprocessingStub:
+    """Stub for future preprocessing methods"""
+
+    def __call__(self, interface, images: List[Union[str, Path, Image.Image]]):
+        """
+        Passes data though `interface.segmentation_pipeline()` method
+
+        Args:
+        - `interface`: Interface instance
+        - `images`: list of images
+
+        Returns:
+            the result of passing data through segmentation_pipeline method of interface
+        """
+        return interface.segmentation_pipeline(images=images)
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class PreprocessingStub +
+
+

Stub for future preprocessing methods

+
+ +Expand source code + +
class PreprocessingStub:
+    """Stub for future preprocessing methods"""
+
+    def __call__(self, interface, images: List[Union[str, Path, Image.Image]]):
+        """
+        Passes data though `interface.segmentation_pipeline()` method
+
+        Args:
+        - `interface`: Interface instance
+        - `images`: list of images
+
+        Returns:
+            the result of passing data through segmentation_pipeline method of interface
+        """
+        return interface.segmentation_pipeline(images=images)
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/preprocessing/autoscene.html b/docs/api/carvekit/pipelines/preprocessing/autoscene.html new file mode 100644 index 0000000..cddfecd --- /dev/null +++ b/docs/api/carvekit/pipelines/preprocessing/autoscene.html @@ -0,0 +1,279 @@ + + + + + + +carvekit.pipelines.preprocessing.autoscene API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines.preprocessing.autoscene

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from pathlib import Path
+
+from PIL import Image
+from typing import Union, List
+
+from carvekit.ml.wrap.scene_classifier import SceneClassifier
+from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7
+from carvekit.ml.wrap.u2net import U2NET
+
+__all__ = ["AutoScene"]
+
+
+class AutoScene:
+    """AutoScene preprocessing method"""
+
+    def __init__(self, scene_classifier: SceneClassifier):
+        """
+        Args:
+            scene_classifier: SceneClassifier instance
+        """
+        self.scene_classifier = scene_classifier
+
+    @staticmethod
+    def select_net(scene: str):
+        """
+        Selects the network to be used for segmentation based on the detected scene
+
+        Args:
+            scene: scene name
+        """
+        if scene == "hard":
+            return TracerUniversalB7
+        elif scene == "soft":
+            return U2NET
+        elif scene == "digital":
+            return TracerUniversalB7  # TODO: not implemented yet
+
+    def __call__(self, interface, images: List[Union[str, Path, Image.Image]]):
+        """
+        Automatically detects the scene and selects the appropriate network for segmentation
+
+        Args:
+            interface: Interface instance
+            images: list of images
+
+        Returns:
+            list of masks
+        """
+        scene_analysis = self.scene_classifier(images)
+        images_per_scene = {}
+        for i, image in enumerate(images):
+            scene_name = scene_analysis[i][0][0]
+            if scene_name not in images_per_scene:
+                images_per_scene[scene_name] = []
+            images_per_scene[scene_name].append(image)
+
+        masks_per_scene = {}
+        for scene_name, igs in list(images_per_scene.items()):
+            net = self.select_net(scene_name)
+            if isinstance(interface.segmentation_pipeline, net):
+                masks_per_scene[scene_name] = interface.segmentation_pipeline(igs)
+            else:
+                old_device = interface.segmentation_pipeline.device
+                interface.segmentation_pipeline.to(
+                    "cpu"
+                )  # unload model from gpu, to avoid OOM
+                net_instance = net(device=old_device)
+                masks_per_scene[scene_name] = net_instance(igs)
+                del net_instance
+                interface.segmentation_pipeline.to(old_device)  # load model back to gpu
+
+        # restore one list of masks with the same order as images
+        masks = []
+        for i, image in enumerate(images):
+            scene_name = scene_analysis[i][0][0]
+            masks.append(
+                masks_per_scene[scene_name][images_per_scene[scene_name].index(image)]
+            )
+
+        return masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class AutoScene +(scene_classifier:Β SceneClassifier) +
+
+

AutoScene preprocessing method

+

Args

+
+
scene_classifier
+
SceneClassifier instance
+
+
+ +Expand source code + +
class AutoScene:
+    """AutoScene preprocessing method"""
+
+    def __init__(self, scene_classifier: SceneClassifier):
+        """
+        Args:
+            scene_classifier: SceneClassifier instance
+        """
+        self.scene_classifier = scene_classifier
+
+    @staticmethod
+    def select_net(scene: str):
+        """
+        Selects the network to be used for segmentation based on the detected scene
+
+        Args:
+            scene: scene name
+        """
+        if scene == "hard":
+            return TracerUniversalB7
+        elif scene == "soft":
+            return U2NET
+        elif scene == "digital":
+            return TracerUniversalB7  # TODO: not implemented yet
+
+    def __call__(self, interface, images: List[Union[str, Path, Image.Image]]):
+        """
+        Automatically detects the scene and selects the appropriate network for segmentation
+
+        Args:
+            interface: Interface instance
+            images: list of images
+
+        Returns:
+            list of masks
+        """
+        scene_analysis = self.scene_classifier(images)
+        images_per_scene = {}
+        for i, image in enumerate(images):
+            scene_name = scene_analysis[i][0][0]
+            if scene_name not in images_per_scene:
+                images_per_scene[scene_name] = []
+            images_per_scene[scene_name].append(image)
+
+        masks_per_scene = {}
+        for scene_name, igs in list(images_per_scene.items()):
+            net = self.select_net(scene_name)
+            if isinstance(interface.segmentation_pipeline, net):
+                masks_per_scene[scene_name] = interface.segmentation_pipeline(igs)
+            else:
+                old_device = interface.segmentation_pipeline.device
+                interface.segmentation_pipeline.to(
+                    "cpu"
+                )  # unload model from gpu, to avoid OOM
+                net_instance = net(device=old_device)
+                masks_per_scene[scene_name] = net_instance(igs)
+                del net_instance
+                interface.segmentation_pipeline.to(old_device)  # load model back to gpu
+
+        # restore one list of masks with the same order as images
+        masks = []
+        for i, image in enumerate(images):
+            scene_name = scene_analysis[i][0][0]
+            masks.append(
+                masks_per_scene[scene_name][images_per_scene[scene_name].index(image)]
+            )
+
+        return masks
+
+

Static methods

+
+
+def select_net(scene:Β str) +
+
+

Selects the network to be used for segmentation based on the detected scene

+

Args

+
+
scene
+
scene name
+
+
+ +Expand source code + +
@staticmethod
+def select_net(scene: str):
+    """
+    Selects the network to be used for segmentation based on the detected scene
+
+    Args:
+        scene: scene name
+    """
+    if scene == "hard":
+        return TracerUniversalB7
+    elif scene == "soft":
+        return U2NET
+    elif scene == "digital":
+        return TracerUniversalB7  # TODO: not implemented yet
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/preprocessing/index.html b/docs/api/carvekit/pipelines/preprocessing/index.html new file mode 100644 index 0000000..2c01a4a --- /dev/null +++ b/docs/api/carvekit/pipelines/preprocessing/index.html @@ -0,0 +1,81 @@ + + + + + + +carvekit.pipelines.preprocessing API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines.preprocessing

+
+
+
+ +Expand source code + +
from carvekit.pipelines.preprocessing.stub import PreprocessingStub
+from carvekit.pipelines.preprocessing.autoscene import AutoScene
+
+
+
+

Sub-modules

+
+
carvekit.pipelines.preprocessing.autoscene
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.pipelines.preprocessing.stub
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/pipelines/preprocessing/stub.html b/docs/api/carvekit/pipelines/preprocessing/stub.html new file mode 100644 index 0000000..db50267 --- /dev/null +++ b/docs/api/carvekit/pipelines/preprocessing/stub.html @@ -0,0 +1,127 @@ + + + + + + +carvekit.pipelines.preprocessing.stub API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.pipelines.preprocessing.stub

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from pathlib import Path
+from typing import Union, List
+
+from PIL import Image
+
+__all__ = ["PreprocessingStub"]
+
+
+class PreprocessingStub:
+    """Stub for future preprocessing methods"""
+
+    def __call__(self, interface, images: List[Union[str, Path, Image.Image]]):
+        """
+        Passes data though `interface.segmentation_pipeline()` method
+
+        Args:
+        - `interface`: Interface instance
+        - `images`: list of images
+
+        Returns:
+            the result of passing data through segmentation_pipeline method of interface
+        """
+        return interface.segmentation_pipeline(images=images)
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class PreprocessingStub +
+
+

Stub for future preprocessing methods

+
+ +Expand source code + +
class PreprocessingStub:
+    """Stub for future preprocessing methods"""
+
+    def __call__(self, interface, images: List[Union[str, Path, Image.Image]]):
+        """
+        Passes data though `interface.segmentation_pipeline()` method
+
+        Args:
+        - `interface`: Interface instance
+        - `images`: list of images
+
+        Returns:
+            the result of passing data through segmentation_pipeline method of interface
+        """
+        return interface.segmentation_pipeline(images=images)
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/trimap/add_ops.html b/docs/api/carvekit/trimap/add_ops.html new file mode 100644 index 0000000..24e3625 --- /dev/null +++ b/docs/api/carvekit/trimap/add_ops.html @@ -0,0 +1,321 @@ + + + + + + +carvekit.trimap.add_ops API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.trimap.add_ops

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import cv2
+import numpy as np
+from PIL import Image
+
+
+def prob_filter(mask: Image.Image, prob_threshold=231) -> Image.Image:
+    """
+    Applies a filter to the mask by the probability of locating an object in the object area.
+
+    Args:
+        prob_threshold (int, default=231): Threshold of probability for mark area as background.
+        mask (Image.Image): Predicted object mask
+
+    Raises:
+        ValueError: if mask or trimap has wrong color mode
+
+    Returns:
+        Image.Image: generated trimap for image.
+    """
+    if mask.mode != "L":
+        raise ValueError("Input mask has wrong color mode.")
+    # noinspection PyTypeChecker
+    mask_array = np.array(mask)
+    mask_array[mask_array > prob_threshold] = 255  # Probability filter for mask
+    mask_array[mask_array <= prob_threshold] = 0
+    return Image.fromarray(mask_array).convert("L")
+
+
+def prob_as_unknown_area(
+    trimap: Image.Image, mask: Image.Image, prob_threshold=255
+) -> Image.Image:
+    """
+    Marks any uncertainty in the seg mask as an unknown region.
+
+    Args:
+        prob_threshold (int, default=255): Threshold of probability for mark area as unknown.
+        trimap (Image.Image): Generated trimap.
+        mask (Image.Image): Predicted object mask
+
+    Raises:
+        ValueError: if mask or trimap has wrong color mode
+
+    Returns:
+        Image.Image: Generated trimap for image.
+    """
+    if mask.mode != "L" or trimap.mode != "L":
+        raise ValueError("Input mask has wrong color mode.")
+    # noinspection PyTypeChecker
+    mask_array = np.array(mask)
+    # noinspection PyTypeChecker
+    trimap_array = np.array(trimap)
+    trimap_array[np.logical_and(mask_array <= prob_threshold, mask_array > 0)] = 127
+    return Image.fromarray(trimap_array).convert("L")
+
+
+def post_erosion(trimap: Image.Image, erosion_iters=1) -> Image.Image:
+    """
+    Performs erosion on the mask and marks the resulting area as an unknown region.
+
+    Args:
+        erosion_iters (int, default=1): The number of iterations of erosion that
+        the object's mask will be subjected to before forming an unknown area
+        trimap (Image.Image): Generated trimap.
+
+    Returns:
+        Image.Image: Generated trimap for image.
+    """
+    if trimap.mode != "L":
+        raise ValueError("Input mask has wrong color mode.")
+    # noinspection PyTypeChecker
+    trimap_array = np.array(trimap)
+    if erosion_iters > 0:
+        without_unknown_area = trimap_array.copy()
+        without_unknown_area[without_unknown_area == 127] = 0
+
+        erosion_kernel = np.ones((3, 3), np.uint8)
+        erode = cv2.erode(
+            without_unknown_area, erosion_kernel, iterations=erosion_iters
+        )
+        erode = np.where(erode == 0, 0, without_unknown_area)
+        trimap_array[np.logical_and(erode == 0, without_unknown_area > 0)] = 127
+        erode = trimap_array.copy()
+    else:
+        erode = trimap_array.copy()
+    return Image.fromarray(erode).convert("L")
+
+
+
+
+
+
+
+

Functions

+
+
+def post_erosion(trimap:Β PIL.Image.Image, erosion_iters=1) ‑>Β PIL.Image.Image +
+
+

Performs erosion on the mask and marks the resulting area as an unknown region.

+

Args

+
+
erosion_iters : int, default=1
+
The number of iterations of erosion that
+
the object's mask will be subjected to before forming an unknown area
+
trimap : Image.Image
+
Generated trimap.
+
+

Returns

+
+
Image.Image
+
Generated trimap for image.
+
+
+ +Expand source code + +
def post_erosion(trimap: Image.Image, erosion_iters=1) -> Image.Image:
+    """
+    Performs erosion on the mask and marks the resulting area as an unknown region.
+
+    Args:
+        erosion_iters (int, default=1): The number of iterations of erosion that
+        the object's mask will be subjected to before forming an unknown area
+        trimap (Image.Image): Generated trimap.
+
+    Returns:
+        Image.Image: Generated trimap for image.
+    """
+    if trimap.mode != "L":
+        raise ValueError("Input mask has wrong color mode.")
+    # noinspection PyTypeChecker
+    trimap_array = np.array(trimap)
+    if erosion_iters > 0:
+        without_unknown_area = trimap_array.copy()
+        without_unknown_area[without_unknown_area == 127] = 0
+
+        erosion_kernel = np.ones((3, 3), np.uint8)
+        erode = cv2.erode(
+            without_unknown_area, erosion_kernel, iterations=erosion_iters
+        )
+        erode = np.where(erode == 0, 0, without_unknown_area)
+        trimap_array[np.logical_and(erode == 0, without_unknown_area > 0)] = 127
+        erode = trimap_array.copy()
+    else:
+        erode = trimap_array.copy()
+    return Image.fromarray(erode).convert("L")
+
+
+
+def prob_as_unknown_area(trimap:Β PIL.Image.Image, mask:Β PIL.Image.Image, prob_threshold=255) ‑>Β PIL.Image.Image +
+
+

Marks any uncertainty in the seg mask as an unknown region.

+

Args

+
+
prob_threshold : int, default=255
+
Threshold of probability for mark area as unknown.
+
trimap : Image.Image
+
Generated trimap.
+
mask : Image.Image
+
Predicted object mask
+
+

Raises

+
+
ValueError
+
if mask or trimap has wrong color mode
+
+

Returns

+
+
Image.Image
+
Generated trimap for image.
+
+
+ +Expand source code + +
def prob_as_unknown_area(
+    trimap: Image.Image, mask: Image.Image, prob_threshold=255
+) -> Image.Image:
+    """
+    Marks any uncertainty in the seg mask as an unknown region.
+
+    Args:
+        prob_threshold (int, default=255): Threshold of probability for mark area as unknown.
+        trimap (Image.Image): Generated trimap.
+        mask (Image.Image): Predicted object mask
+
+    Raises:
+        ValueError: if mask or trimap has wrong color mode
+
+    Returns:
+        Image.Image: Generated trimap for image.
+    """
+    if mask.mode != "L" or trimap.mode != "L":
+        raise ValueError("Input mask has wrong color mode.")
+    # noinspection PyTypeChecker
+    mask_array = np.array(mask)
+    # noinspection PyTypeChecker
+    trimap_array = np.array(trimap)
+    trimap_array[np.logical_and(mask_array <= prob_threshold, mask_array > 0)] = 127
+    return Image.fromarray(trimap_array).convert("L")
+
+
+
+def prob_filter(mask:Β PIL.Image.Image, prob_threshold=231) ‑>Β PIL.Image.Image +
+
+

Applies a filter to the mask by the probability of locating an object in the object area.

+

Args

+
+
prob_threshold : int, default=231
+
Threshold of probability for mark area as background.
+
mask : Image.Image
+
Predicted object mask
+
+

Raises

+
+
ValueError
+
if mask or trimap has wrong color mode
+
+

Returns

+
+
Image.Image
+
generated trimap for image.
+
+
+ +Expand source code + +
def prob_filter(mask: Image.Image, prob_threshold=231) -> Image.Image:
+    """
+    Applies a filter to the mask by the probability of locating an object in the object area.
+
+    Args:
+        prob_threshold (int, default=231): Threshold of probability for mark area as background.
+        mask (Image.Image): Predicted object mask
+
+    Raises:
+        ValueError: if mask or trimap has wrong color mode
+
+    Returns:
+        Image.Image: generated trimap for image.
+    """
+    if mask.mode != "L":
+        raise ValueError("Input mask has wrong color mode.")
+    # noinspection PyTypeChecker
+    mask_array = np.array(mask)
+    mask_array[mask_array > prob_threshold] = 255  # Probability filter for mask
+    mask_array[mask_array <= prob_threshold] = 0
+    return Image.fromarray(mask_array).convert("L")
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/trimap/cv_gen.html b/docs/api/carvekit/trimap/cv_gen.html new file mode 100644 index 0000000..dbcaf39 --- /dev/null +++ b/docs/api/carvekit/trimap/cv_gen.html @@ -0,0 +1,215 @@ + + + + + + +carvekit.trimap.cv_gen API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.trimap.cv_gen

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import PIL.Image
+import cv2
+import numpy as np
+
+
+class CV2TrimapGenerator:
+    def __init__(self, kernel_size: int = 30, erosion_iters: int = 1):
+        """
+        Initialize a new CV2TrimapGenerator instance
+
+        Args:
+            kernel_size (int, default=30): The size of the offset from the object mask
+            in pixels when an unknown area is detected in the trimap
+            erosion_iters (int, default=1: The number of iterations of erosion that
+            the object's mask will be subjected to before forming an unknown area
+        """
+        self.kernel_size = kernel_size
+        self.erosion_iters = erosion_iters
+
+    def __call__(
+        self, original_image: PIL.Image.Image, mask: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Generates trimap based on predicted object mask to refine object mask borders.
+        Based on cv2 erosion algorithm.
+
+        Args:
+            original_image (PIL.Image.Image): Original image
+            mask (PIL.Image.Image): Predicted object mask
+
+        Returns:
+            PIL.Image.Image: Generated trimap for image.
+        """
+        if mask.mode != "L":
+            raise ValueError("Input mask has wrong color mode.")
+        if mask.size != original_image.size:
+            raise ValueError("Sizes of input image and predicted mask doesn't equal")
+        # noinspection PyTypeChecker
+        mask_array = np.array(mask)
+        pixels = 2 * self.kernel_size + 1
+        kernel = np.ones((pixels, pixels), np.uint8)
+
+        if self.erosion_iters > 0:
+            erosion_kernel = np.ones((3, 3), np.uint8)
+            erode = cv2.erode(mask_array, erosion_kernel, iterations=self.erosion_iters)
+            erode = np.where(erode == 0, 0, mask_array)
+        else:
+            erode = mask_array.copy()
+
+        dilation = cv2.dilate(erode, kernel, iterations=1)
+
+        dilation = np.where(dilation == 255, 127, dilation)  # WHITE to GRAY
+        trimap = np.where(erode > 127, 200, dilation)  # mark the tumor inside GRAY
+
+        trimap = np.where(trimap < 127, 0, trimap)  # Embelishment
+        trimap = np.where(trimap > 200, 0, trimap)  # Embelishment
+        trimap = np.where(trimap == 200, 255, trimap)  # GRAY to WHITE
+
+        return PIL.Image.fromarray(trimap).convert("L")
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class CV2TrimapGenerator +(kernel_size:Β intΒ =Β 30, erosion_iters:Β intΒ =Β 1) +
+
+

Initialize a new CV2TrimapGenerator instance

+

Args

+
+
kernel_size : int, default=30
+
The size of the offset from the object mask
+
+

in pixels when an unknown area is detected in the trimap +erosion_iters (int, default=1: The number of iterations of erosion that +the object's mask will be subjected to before forming an unknown area

+
+ +Expand source code + +
class CV2TrimapGenerator:
+    def __init__(self, kernel_size: int = 30, erosion_iters: int = 1):
+        """
+        Initialize a new CV2TrimapGenerator instance
+
+        Args:
+            kernel_size (int, default=30): The size of the offset from the object mask
+            in pixels when an unknown area is detected in the trimap
+            erosion_iters (int, default=1: The number of iterations of erosion that
+            the object's mask will be subjected to before forming an unknown area
+        """
+        self.kernel_size = kernel_size
+        self.erosion_iters = erosion_iters
+
+    def __call__(
+        self, original_image: PIL.Image.Image, mask: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Generates trimap based on predicted object mask to refine object mask borders.
+        Based on cv2 erosion algorithm.
+
+        Args:
+            original_image (PIL.Image.Image): Original image
+            mask (PIL.Image.Image): Predicted object mask
+
+        Returns:
+            PIL.Image.Image: Generated trimap for image.
+        """
+        if mask.mode != "L":
+            raise ValueError("Input mask has wrong color mode.")
+        if mask.size != original_image.size:
+            raise ValueError("Sizes of input image and predicted mask doesn't equal")
+        # noinspection PyTypeChecker
+        mask_array = np.array(mask)
+        pixels = 2 * self.kernel_size + 1
+        kernel = np.ones((pixels, pixels), np.uint8)
+
+        if self.erosion_iters > 0:
+            erosion_kernel = np.ones((3, 3), np.uint8)
+            erode = cv2.erode(mask_array, erosion_kernel, iterations=self.erosion_iters)
+            erode = np.where(erode == 0, 0, mask_array)
+        else:
+            erode = mask_array.copy()
+
+        dilation = cv2.dilate(erode, kernel, iterations=1)
+
+        dilation = np.where(dilation == 255, 127, dilation)  # WHITE to GRAY
+        trimap = np.where(erode > 127, 200, dilation)  # mark the tumor inside GRAY
+
+        trimap = np.where(trimap < 127, 0, trimap)  # Embelishment
+        trimap = np.where(trimap > 200, 0, trimap)  # Embelishment
+        trimap = np.where(trimap == 200, 255, trimap)  # GRAY to WHITE
+
+        return PIL.Image.fromarray(trimap).convert("L")
+
+

Subclasses

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/trimap/generator.html b/docs/api/carvekit/trimap/generator.html new file mode 100644 index 0000000..f241a2f --- /dev/null +++ b/docs/api/carvekit/trimap/generator.html @@ -0,0 +1,187 @@ + + + + + + +carvekit.trimap.generator API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.trimap.generator

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from PIL import Image
+from carvekit.trimap.cv_gen import CV2TrimapGenerator
+from carvekit.trimap.add_ops import prob_filter, prob_as_unknown_area, post_erosion
+
+
+class TrimapGenerator(CV2TrimapGenerator):
+    def __init__(
+        self, prob_threshold: int = 231, kernel_size: int = 30, erosion_iters: int = 5
+    ):
+        """
+        Initialize a TrimapGenerator instance
+
+        Args:
+            prob_threshold (int, default=231): Probability threshold at which the
+            prob_filter and prob_as_unknown_area operations will be applied
+            kernel_size (int, default=30): The size of the offset from the object mask
+            in pixels when an unknown area is detected in the trimap
+            erosion_iters (int, default=5): The number of iterations of erosion that
+            the object's mask will be subjected to before forming an unknown area
+        """
+        super().__init__(kernel_size, erosion_iters=0)
+        self.prob_threshold = prob_threshold
+        self.__erosion_iters = erosion_iters
+
+    def __call__(self, original_image: Image.Image, mask: Image.Image) -> Image.Image:
+        """
+        Generates trimap based on predicted object mask to refine object mask borders.
+        Based on cv2 erosion algorithm and additional prob. filters.
+
+        Args:
+            original_image (Image.Image): Original image
+            mask (Image.Image): Predicted object mask
+
+        Returns:
+            Image.Image: Generated trimap for image.
+        """
+        filter_mask = prob_filter(mask=mask, prob_threshold=self.prob_threshold)
+        trimap = super(TrimapGenerator, self).__call__(original_image, filter_mask)
+        new_trimap = prob_as_unknown_area(
+            trimap=trimap, mask=mask, prob_threshold=self.prob_threshold
+        )
+        new_trimap = post_erosion(new_trimap, self.__erosion_iters)
+        return new_trimap
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class TrimapGenerator +(prob_threshold:Β intΒ =Β 231, kernel_size:Β intΒ =Β 30, erosion_iters:Β intΒ =Β 5) +
+
+

Initialize a TrimapGenerator instance

+

Args

+
+
prob_threshold : int, default=231
+
Probability threshold at which the
+
prob_filter and prob_as_unknown_area operations will be applied
+
kernel_size : int, default=30
+
The size of the offset from the object mask
+
in pixels when an unknown area is detected in the trimap
+
erosion_iters : int, default=5
+
The number of iterations of erosion that
+
+

the object's mask will be subjected to before forming an unknown area

+
+ +Expand source code + +
class TrimapGenerator(CV2TrimapGenerator):
+    def __init__(
+        self, prob_threshold: int = 231, kernel_size: int = 30, erosion_iters: int = 5
+    ):
+        """
+        Initialize a TrimapGenerator instance
+
+        Args:
+            prob_threshold (int, default=231): Probability threshold at which the
+            prob_filter and prob_as_unknown_area operations will be applied
+            kernel_size (int, default=30): The size of the offset from the object mask
+            in pixels when an unknown area is detected in the trimap
+            erosion_iters (int, default=5): The number of iterations of erosion that
+            the object's mask will be subjected to before forming an unknown area
+        """
+        super().__init__(kernel_size, erosion_iters=0)
+        self.prob_threshold = prob_threshold
+        self.__erosion_iters = erosion_iters
+
+    def __call__(self, original_image: Image.Image, mask: Image.Image) -> Image.Image:
+        """
+        Generates trimap based on predicted object mask to refine object mask borders.
+        Based on cv2 erosion algorithm and additional prob. filters.
+
+        Args:
+            original_image (Image.Image): Original image
+            mask (Image.Image): Predicted object mask
+
+        Returns:
+            Image.Image: Generated trimap for image.
+        """
+        filter_mask = prob_filter(mask=mask, prob_threshold=self.prob_threshold)
+        trimap = super(TrimapGenerator, self).__call__(original_image, filter_mask)
+        new_trimap = prob_as_unknown_area(
+            trimap=trimap, mask=mask, prob_threshold=self.prob_threshold
+        )
+        new_trimap = post_erosion(new_trimap, self.__erosion_iters)
+        return new_trimap
+
+

Ancestors

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/trimap/index.html b/docs/api/carvekit/trimap/index.html new file mode 100644 index 0000000..315ef68 --- /dev/null +++ b/docs/api/carvekit/trimap/index.html @@ -0,0 +1,81 @@ + + + + + + +carvekit.trimap API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.trimap

+
+
+
+
+

Sub-modules

+
+
carvekit.trimap.add_ops
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.trimap.cv_gen
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.trimap.generator
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/utils/download_models.html b/docs/api/carvekit/utils/download_models.html new file mode 100644 index 0000000..67b6c2d --- /dev/null +++ b/docs/api/carvekit/utils/download_models.html @@ -0,0 +1,780 @@ + + + + + + +carvekit.utils.download_models API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.utils.download_models

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import hashlib
+import os
+import warnings
+from abc import ABCMeta, abstractmethod, ABC
+from pathlib import Path
+from typing import Optional
+
+import carvekit
+from carvekit.ml.files import checkpoints_dir
+
+import requests
+import tqdm
+
+requests = requests.Session()
+requests.headers.update({"User-Agent": f"Carvekit/{carvekit.version}"})
+
+MODELS_URLS = {
+    "basnet.pth": {
+        "repository": "Carve/basnet-universal",
+        "revision": "870becbdb364fda6d8fdb2c10b072542f8d08701",
+        "filename": "basnet.pth",
+    },
+    "deeplab.pth": {
+        "repository": "Carve/deeplabv3-resnet101",
+        "revision": "d504005392fc877565afdf58aad0cd524682d2b0",
+        "filename": "deeplab.pth",
+    },
+    "fba_matting.pth": {
+        "repository": "Carve/fba",
+        "revision": "a5d3457df0fb9c88ea19ed700d409756ca2069d1",
+        "filename": "fba_matting.pth",
+    },
+    "u2net.pth": {
+        "repository": "Carve/u2net-universal",
+        "revision": "10305d785481cf4b2eee1d447c39cd6e5f43d74b",
+        "filename": "full_weights.pth",
+    },
+    "tracer_b7.pth": {
+        "repository": "Carve/tracer_b7",
+        "revision": "d8a8fd9e7b3fa0d2f1506fe7242966b34381e9c5",
+        "filename": "tracer_b7.pth",
+    },
+    "scene_classifier.pth": {
+        "repository": "Carve/scene_classifier",
+        "revision": "71c8e4c771dd5a20ff0c5c9e3c8f1c9cf8082740",
+        "filename": "scene_classifier.pth",
+    },
+    "yolov4_coco_with_classes.pth": {
+        "repository": "Carve/yolov4_coco",
+        "revision": "e3fc9cd22f86e456d2749d1ae148400f2f950fb3",
+        "filename": "yolov4_coco_with_classes.pth",
+    },
+    "cascadepsp.pth": {
+        "repository": "Carve/cascadepsp",
+        "revision": "3ca1e5e432344b1277bc88d1c6d4265c46cff62f",
+        "filename": "cascadepsp.pth",
+    },
+}
+"""
+All data needed to build path relative to huggingface.co for model download
+"""
+
+MODELS_CHECKSUMS = {
+    "basnet.pth": "e409cb709f4abca87cb11bd44a9ad3f909044a917977ab65244b4c94dd33"
+    "8b1a37755c4253d7cb54526b7763622a094d7b676d34b5e6886689256754e5a5e6ad",
+    "deeplab.pth": "9c5a1795bc8baa267200a44b49ac544a1ba2687d210f63777e4bd715387324469a59b072f8a28"
+    "9cc471c637b367932177e5b312e8ea6351c1763d9ff44b4857c",
+    "fba_matting.pth": "890906ec94c1bfd2ad08707a63e4ccb0955d7f5d25e32853950c24c78"
+    "4cbad2e59be277999defc3754905d0f15aa75702cdead3cfe669ff72f08811c52971613",
+    "u2net.pth": "16f8125e2fedd8c85db0e001ee15338b4aa2fda77bab8ba70c25e"
+    "bea1533fda5ee70a909b934a9bd495b432cef89d629f00a07858a517742476fa8b346de24f7",
+    "tracer_b7.pth": "c439c5c12d4d43d5f9be9ec61e68b2e54658a541bccac2577ef5a54fb252b6e8415d41f7e"
+    "c2487033d0c02b4dd08367958e4e62091318111c519f93e2632be7b",
+    "scene_classifier.pth": "6d8692510abde453b406a1fea557afdea62fd2a2a2677283a3ecc2"
+    "341a4895ee99ed65cedcb79b80775db14c3ffcfc0aad2caec1d85140678852039d2d4e76b4",
+    "yolov4_coco_with_classes.pth": "44b6ec2dd35dc3802bf8c512002f76e00e97bfbc86bc7af6de2fafce229a41b4ca"
+    "12c6f3d7589278c71cd4ddd62df80389b148c19b84fa03216905407a107fff",
+    "cascadepsp.pth": "3f895f5126d80d6f73186f045557ea7c8eab4dfa3d69a995815bb2c03d564573f36c474f04d7bf0022a27829f583a1a793b036adf801cb423e41a4831b830122",
+}
+"""
+Model -> checksum dictionary
+"""
+
+
+def sha512_checksum_calc(file: Path) -> str:
+    """
+    Calculates the SHA512 hash digest of a file on fs
+
+    Args:
+        file (Path): Path to the file
+
+    Returns:
+        SHA512 hash digest of a file.
+    """
+    dd = hashlib.sha512()
+    with file.open("rb") as f:
+        for chunk in iter(lambda: f.read(4096), b""):
+            dd.update(chunk)
+    return dd.hexdigest()
+
+
+class CachedDownloader:
+    """
+    Metaclass for models downloaders.
+    """
+
+    __metaclass__ = ABCMeta
+
+    @property
+    @abstractmethod
+    def name(self) -> str:
+        return self.__class__.__name__
+
+    @property
+    @abstractmethod
+    def fallback_downloader(self) -> Optional["CachedDownloader"]:
+        """
+        Property MAY be overriden in subclasses.
+        Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy.
+        Less preferred downloader SHOULD be provided by this property.
+        """
+        pass
+
+    def download_model(self, file_name: str) -> Path:
+        """
+        Downloads model from the internet and saves it to the cache.
+
+        Behavior:
+            If model is already downloaded it will be loaded from the cache.
+
+            If model is already downloaded, but checksum is invalid, it will be downloaded again.
+
+            If model download failed, fallback downloader will be used.
+        """
+        try:
+            return self.download_model_base(file_name)
+        except BaseException as e:
+            if self.fallback_downloader is not None:
+                warnings.warn(
+                    f"Failed to download model from {self.name} downloader."
+                    f" Trying to download from {self.fallback_downloader.name} downloader."
+                )
+                return self.fallback_downloader.download_model(file_name)
+            else:
+                warnings.warn(
+                    f"Failed to download model from {self.name} downloader."
+                    f" No fallback downloader available."
+                )
+                raise e
+
+    @abstractmethod
+    def download_model_base(self, model_name: str) -> Path:
+        """
+        Download model from any source if not cached.
+        Returns:
+            pathlib.Path: Path to the downloaded model.
+        """
+
+    def __call__(self, model_name: str):
+        return self.download_model(model_name)
+
+
+class HuggingFaceCompatibleDownloader(CachedDownloader, ABC):
+    """
+    Downloader for models from HuggingFace Hub.
+    Private models are not supported.
+    """
+
+    def __init__(
+        self,
+        name: str = "Huggingface.co",
+        base_url: str = "https://huggingface.co",
+        fb_downloader: Optional["CachedDownloader"] = None,
+    ):
+        self.cache_dir = checkpoints_dir
+        """SHOULD be same for all instances to prevent downloading same model multiple times
+        Points to ~/.cache/carvekit/checkpoints"""
+        self.base_url = base_url
+        """MUST be a base url with protocol and domain name to huggingface or another, compatible in terms of models downloading API source"""
+        self._name = name
+        self._fallback_downloader = fb_downloader
+
+    @property
+    def fallback_downloader(self) -> Optional["CachedDownloader"]:
+        return self._fallback_downloader
+
+    @property
+    def name(self):
+        return self._name
+
+    def check_for_existence(self, model_name: str) -> Optional[Path]:
+        """
+        Checks if model is already downloaded and cached. Verifies file integrity by checksum.
+        Returns:
+            Optional[pathlib.Path]: Path to the cached model if cached.
+        """
+        if model_name not in MODELS_URLS.keys():
+            raise FileNotFoundError("Unknown model!")
+        path = (
+            self.cache_dir
+            / MODELS_URLS[model_name]["repository"].split("/")[1]
+            / model_name
+        )
+
+        if not path.exists():
+            return None
+
+        if MODELS_CHECKSUMS[path.name] != sha512_checksum_calc(path):
+            warnings.warn(
+                f"Invalid checksum for model {path.name}. Downloading correct model!"
+            )
+            os.remove(path)
+            return None
+        return path
+
+    def download_model_base(self, model_name: str) -> Path:
+        cached_path = self.check_for_existence(model_name)
+        if cached_path is not None:
+            return cached_path
+        else:
+            cached_path = (
+                self.cache_dir
+                / MODELS_URLS[model_name]["repository"].split("/")[1]
+                / model_name
+            )
+            cached_path.parent.mkdir(parents=True, exist_ok=True)
+            url = MODELS_URLS[model_name]
+            hugging_face_url = f"{self.base_url}/{url['repository']}/resolve/{url['revision']}/{url['filename']}"
+
+            try:
+                r = requests.get(hugging_face_url, stream=True, timeout=10)
+                if r.status_code < 400:
+                    with open(cached_path, "wb") as f:
+                        r.raw.decode_content = True
+                        for chunk in tqdm.tqdm(
+                            r,
+                            desc="Downloading " + cached_path.name + " model",
+                            colour="blue",
+                        ):
+                            f.write(chunk)
+                else:
+                    if r.status_code == 404:
+                        raise FileNotFoundError(f"Model {model_name} not found!")
+                    else:
+                        raise ConnectionError(
+                            f"Error {r.status_code} while downloading model {model_name}!"
+                        )
+            except BaseException as e:
+                if cached_path.exists():
+                    os.remove(cached_path)
+                raise ConnectionError(
+                    f"Exception caught when downloading model! "
+                    f"Model name: {cached_path.name}. Exception: {str(e)}."
+                )
+            return cached_path
+
+
+fallback_downloader: CachedDownloader = HuggingFaceCompatibleDownloader()
+downloader: CachedDownloader = HuggingFaceCompatibleDownloader(
+    base_url="https://cdn.carve.photos",
+    fb_downloader=fallback_downloader,
+    name="Carve CDN",
+)
+downloader._fallback_downloader = fallback_downloader
+
+
+
+
+
+

Global variables

+
+
var MODELS_CHECKSUMS
+
+

Model -> checksum dictionary

+
+
var MODELS_URLS
+
+

All data needed to build path relative to huggingface.co for model download

+
+
+
+
+

Functions

+
+
+def sha512_checksum_calc(file:Β pathlib.Path) ‑>Β str +
+
+

Calculates the SHA512 hash digest of a file on fs

+

Args

+
+
file : Path
+
Path to the file
+
+

Returns

+

SHA512 hash digest of a file.

+
+ +Expand source code + +
def sha512_checksum_calc(file: Path) -> str:
+    """
+    Calculates the SHA512 hash digest of a file on fs
+
+    Args:
+        file (Path): Path to the file
+
+    Returns:
+        SHA512 hash digest of a file.
+    """
+    dd = hashlib.sha512()
+    with file.open("rb") as f:
+        for chunk in iter(lambda: f.read(4096), b""):
+            dd.update(chunk)
+    return dd.hexdigest()
+
+
+
+
+
+

Classes

+
+
+class CachedDownloader +
+
+

Metaclass for models downloaders.

+
+ +Expand source code + +
class CachedDownloader:
+    """
+    Metaclass for models downloaders.
+    """
+
+    __metaclass__ = ABCMeta
+
+    @property
+    @abstractmethod
+    def name(self) -> str:
+        return self.__class__.__name__
+
+    @property
+    @abstractmethod
+    def fallback_downloader(self) -> Optional["CachedDownloader"]:
+        """
+        Property MAY be overriden in subclasses.
+        Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy.
+        Less preferred downloader SHOULD be provided by this property.
+        """
+        pass
+
+    def download_model(self, file_name: str) -> Path:
+        """
+        Downloads model from the internet and saves it to the cache.
+
+        Behavior:
+            If model is already downloaded it will be loaded from the cache.
+
+            If model is already downloaded, but checksum is invalid, it will be downloaded again.
+
+            If model download failed, fallback downloader will be used.
+        """
+        try:
+            return self.download_model_base(file_name)
+        except BaseException as e:
+            if self.fallback_downloader is not None:
+                warnings.warn(
+                    f"Failed to download model from {self.name} downloader."
+                    f" Trying to download from {self.fallback_downloader.name} downloader."
+                )
+                return self.fallback_downloader.download_model(file_name)
+            else:
+                warnings.warn(
+                    f"Failed to download model from {self.name} downloader."
+                    f" No fallback downloader available."
+                )
+                raise e
+
+    @abstractmethod
+    def download_model_base(self, model_name: str) -> Path:
+        """
+        Download model from any source if not cached.
+        Returns:
+            pathlib.Path: Path to the downloaded model.
+        """
+
+    def __call__(self, model_name: str):
+        return self.download_model(model_name)
+
+

Subclasses

+ +

Instance variables

+
+
var fallback_downloader :Β Optional[CachedDownloader]
+
+

Property MAY be overriden in subclasses. +Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy. +Less preferred downloader SHOULD be provided by this property.

+
+ +Expand source code + +
@property
+@abstractmethod
+def fallback_downloader(self) -> Optional["CachedDownloader"]:
+    """
+    Property MAY be overriden in subclasses.
+    Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy.
+    Less preferred downloader SHOULD be provided by this property.
+    """
+    pass
+
+
+
var name :Β str
+
+
+
+ +Expand source code + +
@property
+@abstractmethod
+def name(self) -> str:
+    return self.__class__.__name__
+
+
+
+

Methods

+
+
+def download_model(self, file_name:Β str) ‑>Β pathlib.Path +
+
+

Downloads model from the internet and saves it to the cache.

+

Behavior

+

If model is already downloaded it will be loaded from the cache.

+

If model is already downloaded, but checksum is invalid, it will be downloaded again.

+

If model download failed, fallback downloader will be used.

+
+ +Expand source code + +
def download_model(self, file_name: str) -> Path:
+    """
+    Downloads model from the internet and saves it to the cache.
+
+    Behavior:
+        If model is already downloaded it will be loaded from the cache.
+
+        If model is already downloaded, but checksum is invalid, it will be downloaded again.
+
+        If model download failed, fallback downloader will be used.
+    """
+    try:
+        return self.download_model_base(file_name)
+    except BaseException as e:
+        if self.fallback_downloader is not None:
+            warnings.warn(
+                f"Failed to download model from {self.name} downloader."
+                f" Trying to download from {self.fallback_downloader.name} downloader."
+            )
+            return self.fallback_downloader.download_model(file_name)
+        else:
+            warnings.warn(
+                f"Failed to download model from {self.name} downloader."
+                f" No fallback downloader available."
+            )
+            raise e
+
+
+
+def download_model_base(self, model_name:Β str) ‑>Β pathlib.Path +
+
+

Download model from any source if not cached.

+

Returns

+
+
pathlib.Path
+
Path to the downloaded model.
+
+
+ +Expand source code + +
@abstractmethod
+def download_model_base(self, model_name: str) -> Path:
+    """
+    Download model from any source if not cached.
+    Returns:
+        pathlib.Path: Path to the downloaded model.
+    """
+
+
+
+
+
+class HuggingFaceCompatibleDownloader +(name:Β strΒ =Β 'Huggingface.co', base_url:Β strΒ =Β 'https://huggingface.co', fb_downloader:Β Optional[ForwardRef('CachedDownloader')]Β =Β None) +
+
+

Downloader for models from HuggingFace Hub. +Private models are not supported.

+
+ +Expand source code + +
class HuggingFaceCompatibleDownloader(CachedDownloader, ABC):
+    """
+    Downloader for models from HuggingFace Hub.
+    Private models are not supported.
+    """
+
+    def __init__(
+        self,
+        name: str = "Huggingface.co",
+        base_url: str = "https://huggingface.co",
+        fb_downloader: Optional["CachedDownloader"] = None,
+    ):
+        self.cache_dir = checkpoints_dir
+        """SHOULD be same for all instances to prevent downloading same model multiple times
+        Points to ~/.cache/carvekit/checkpoints"""
+        self.base_url = base_url
+        """MUST be a base url with protocol and domain name to huggingface or another, compatible in terms of models downloading API source"""
+        self._name = name
+        self._fallback_downloader = fb_downloader
+
+    @property
+    def fallback_downloader(self) -> Optional["CachedDownloader"]:
+        return self._fallback_downloader
+
+    @property
+    def name(self):
+        return self._name
+
+    def check_for_existence(self, model_name: str) -> Optional[Path]:
+        """
+        Checks if model is already downloaded and cached. Verifies file integrity by checksum.
+        Returns:
+            Optional[pathlib.Path]: Path to the cached model if cached.
+        """
+        if model_name not in MODELS_URLS.keys():
+            raise FileNotFoundError("Unknown model!")
+        path = (
+            self.cache_dir
+            / MODELS_URLS[model_name]["repository"].split("/")[1]
+            / model_name
+        )
+
+        if not path.exists():
+            return None
+
+        if MODELS_CHECKSUMS[path.name] != sha512_checksum_calc(path):
+            warnings.warn(
+                f"Invalid checksum for model {path.name}. Downloading correct model!"
+            )
+            os.remove(path)
+            return None
+        return path
+
+    def download_model_base(self, model_name: str) -> Path:
+        cached_path = self.check_for_existence(model_name)
+        if cached_path is not None:
+            return cached_path
+        else:
+            cached_path = (
+                self.cache_dir
+                / MODELS_URLS[model_name]["repository"].split("/")[1]
+                / model_name
+            )
+            cached_path.parent.mkdir(parents=True, exist_ok=True)
+            url = MODELS_URLS[model_name]
+            hugging_face_url = f"{self.base_url}/{url['repository']}/resolve/{url['revision']}/{url['filename']}"
+
+            try:
+                r = requests.get(hugging_face_url, stream=True, timeout=10)
+                if r.status_code < 400:
+                    with open(cached_path, "wb") as f:
+                        r.raw.decode_content = True
+                        for chunk in tqdm.tqdm(
+                            r,
+                            desc="Downloading " + cached_path.name + " model",
+                            colour="blue",
+                        ):
+                            f.write(chunk)
+                else:
+                    if r.status_code == 404:
+                        raise FileNotFoundError(f"Model {model_name} not found!")
+                    else:
+                        raise ConnectionError(
+                            f"Error {r.status_code} while downloading model {model_name}!"
+                        )
+            except BaseException as e:
+                if cached_path.exists():
+                    os.remove(cached_path)
+                raise ConnectionError(
+                    f"Exception caught when downloading model! "
+                    f"Model name: {cached_path.name}. Exception: {str(e)}."
+                )
+            return cached_path
+
+

Ancestors

+ +

Instance variables

+
+
var base_url
+
+

MUST be a base url with protocol and domain name to huggingface or another, compatible in terms of models downloading API source

+
+
var cache_dir
+
+

SHOULD be same for all instances to prevent downloading same model multiple times +Points to ~/.cache/carvekit/checkpoints

+
+
var name
+
+
+
+ +Expand source code + +
@property
+def name(self):
+    return self._name
+
+
+
+

Methods

+
+
+def check_for_existence(self, model_name:Β str) ‑>Β Optional[pathlib.Path] +
+
+

Checks if model is already downloaded and cached. Verifies file integrity by checksum.

+

Returns

+
+
Optional[pathlib.Path]
+
Path to the cached model if cached.
+
+
+ +Expand source code + +
def check_for_existence(self, model_name: str) -> Optional[Path]:
+    """
+    Checks if model is already downloaded and cached. Verifies file integrity by checksum.
+    Returns:
+        Optional[pathlib.Path]: Path to the cached model if cached.
+    """
+    if model_name not in MODELS_URLS.keys():
+        raise FileNotFoundError("Unknown model!")
+    path = (
+        self.cache_dir
+        / MODELS_URLS[model_name]["repository"].split("/")[1]
+        / model_name
+    )
+
+    if not path.exists():
+        return None
+
+    if MODELS_CHECKSUMS[path.name] != sha512_checksum_calc(path):
+        warnings.warn(
+            f"Invalid checksum for model {path.name}. Downloading correct model!"
+        )
+        os.remove(path)
+        return None
+    return path
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/utils/fs_utils.html b/docs/api/carvekit/utils/fs_utils.html new file mode 100644 index 0000000..e31a554 --- /dev/null +++ b/docs/api/carvekit/utils/fs_utils.html @@ -0,0 +1,156 @@ + + + + + + +carvekit.utils.fs_utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.utils.fs_utils

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+from pathlib import Path
+from PIL import Image
+import warnings
+from typing import Optional
+
+
+def save_file(output: Optional[Path], input_path: Path, image: Image.Image):
+    """
+    Saves an image to the file system
+
+    Args:
+        output (Optional[pathlib.Path]): Output path [dir or end file]
+        input_path (pathlib.Path): Input path of the image
+        image (Image.Image): Image to be saved.
+    """
+    if isinstance(output, Path) and str(output) != "none":
+        if output.is_dir() and output.exists():
+            image.save(output.joinpath(input_path.with_suffix(".png").name))
+        elif output.suffix != "":
+            if output.suffix != ".png":
+                warnings.warn(
+                    f"Only export with .png extension is supported! Your {output.suffix}"
+                    f" extension will be ignored and replaced with .png!"
+                )
+            image.save(output.with_suffix(".png"))
+        else:
+            raise ValueError("Wrong output path!")
+    elif output is None or str(output) == "none":
+        image.save(
+            input_path.with_name(
+                input_path.stem.split(".")[0] + "_bg_removed"
+            ).with_suffix(".png")
+        )
+
+
+
+
+
+
+
+

Functions

+
+
+def save_file(output:Β Optional[pathlib.Path], input_path:Β pathlib.Path, image:Β PIL.Image.Image) +
+
+

Saves an image to the file system

+

Args

+
+
output : Optional[pathlib.Path]
+
Output path [dir or end file]
+
input_path : pathlib.Path
+
Input path of the image
+
image : Image.Image
+
Image to be saved.
+
+
+ +Expand source code + +
def save_file(output: Optional[Path], input_path: Path, image: Image.Image):
+    """
+    Saves an image to the file system
+
+    Args:
+        output (Optional[pathlib.Path]): Output path [dir or end file]
+        input_path (pathlib.Path): Input path of the image
+        image (Image.Image): Image to be saved.
+    """
+    if isinstance(output, Path) and str(output) != "none":
+        if output.is_dir() and output.exists():
+            image.save(output.joinpath(input_path.with_suffix(".png").name))
+        elif output.suffix != "":
+            if output.suffix != ".png":
+                warnings.warn(
+                    f"Only export with .png extension is supported! Your {output.suffix}"
+                    f" extension will be ignored and replaced with .png!"
+                )
+            image.save(output.with_suffix(".png"))
+        else:
+            raise ValueError("Wrong output path!")
+    elif output is None or str(output) == "none":
+        image.save(
+            input_path.with_name(
+                input_path.stem.split(".")[0] + "_bg_removed"
+            ).with_suffix(".png")
+        )
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/utils/image_utils.html b/docs/api/carvekit/utils/image_utils.html new file mode 100644 index 0000000..fde17ac --- /dev/null +++ b/docs/api/carvekit/utils/image_utils.html @@ -0,0 +1,515 @@ + + + + + + +carvekit.utils.image_utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.utils.image_utils

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+    Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+    Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+    License: Apache License 2.0
+"""
+
+import pathlib
+from typing import Union, Any, Tuple
+
+import PIL.Image
+import numpy as np
+import torch
+
+ALLOWED_SUFFIXES = [".jpg", ".jpeg", ".bmp", ".png", ".webp"]
+
+
+def to_tensor(x: Any) -> torch.Tensor:
+    """
+    Returns a PIL.Image.Image as torch tensor without swap tensor dims.
+
+    Args:
+        x (PIL.Image.Image): image
+
+    Returns:
+        torch.Tensor: image as torch tensor
+    """
+    return torch.tensor(np.array(x, copy=True))
+
+
+def load_image(file: Union[str, pathlib.Path, PIL.Image.Image]) -> PIL.Image.Image:
+    """Returns a `PIL.Image.Image` class by string path or `pathlib.Path` or `PIL.Image.Image` instance
+
+    Args:
+        file (Union[str, pathlib.Path, PIL.Image.Image]): File path or `PIL.Image.Image` instance
+
+    Returns:
+        PIL.Image.Image: image instance loaded from `file` location
+
+    Raises:
+        ValueError: If file not exists or file is directory or file isn't an image or file is not correct PIL Image
+
+    """
+    if isinstance(file, str) and is_image_valid(pathlib.Path(file)):
+        return PIL.Image.open(file)
+    elif isinstance(file, PIL.Image.Image):
+        return file
+    elif isinstance(file, pathlib.Path) and is_image_valid(file):
+        return PIL.Image.open(str(file))
+    else:
+        raise ValueError("Unknown input file type")
+
+
+def convert_image(image: PIL.Image.Image, mode="RGB") -> PIL.Image.Image:
+    """Performs image conversion to correct color mode
+
+    Args:
+        image (PIL.Image.Image): `PIL.Image.Image` instance
+        mode (str, default=RGB): Color mode to convert
+
+    Returns:
+        PIL.Image.Image: converted image
+
+    Raises:
+        ValueError: If image hasn't convertable color mode, or it is too small
+    """
+    if is_image_valid(image):
+        return image.convert(mode)
+
+
+def is_image_valid(image: Union[pathlib.Path, PIL.Image.Image]) -> bool:
+    """This function performs image validation.
+
+    Args:
+        image (Union[pathlib.Path, PIL.Image.Image]): Path to the image or `PIL.Image.Image` instance being checked.
+
+    Returns:
+        bool: True if image is valid, False otherwise.
+
+    Raises:
+        ValueError: If file not a valid image path or image hasn't convertable color mode, or it is too small
+
+    """
+    if isinstance(image, pathlib.Path):
+        if not image.exists():
+            raise ValueError("File is not exists")
+        elif image.is_dir():
+            raise ValueError("File is a directory")
+        elif image.suffix.lower() not in ALLOWED_SUFFIXES:
+            raise ValueError(
+                f"Unsupported image format. Supported file formats: {', '.join(ALLOWED_SUFFIXES)}"
+            )
+    elif isinstance(image, PIL.Image.Image):
+        if not (image.size[0] > 32 and image.size[1] > 32):
+            raise ValueError("Image should be bigger then (32x32) pixels.")
+        elif image.mode not in [
+            "RGB",
+            "RGBA",
+            "L",
+        ]:
+            raise ValueError("Wrong image color mode.")
+    else:
+        raise ValueError("Unknown input file type")
+    return True
+
+
+def transparency_paste(
+    bg_img: PIL.Image.Image, fg_img: PIL.Image.Image, box=(0, 0)
+) -> PIL.Image.Image:
+    """
+    Inserts an image into another image while maintaining transparency.
+
+    Args:
+        bg_img (PIL.Image.Image): background image
+        fg_img (PIL.Image.Image): foreground image
+        box (tuple[int, int]): place to paste
+
+    Returns:
+        PIL.Image.Image: Background image with pasted foreground image at point or in the specified box
+    """
+    fg_img_trans = PIL.Image.new("RGBA", bg_img.size)
+    fg_img_trans.paste(fg_img, box, mask=fg_img)
+    new_img = PIL.Image.alpha_composite(bg_img, fg_img_trans)
+    return new_img
+
+
+def add_margin(
+    pil_img: PIL.Image.Image,
+    top: int,
+    right: int,
+    bottom: int,
+    left: int,
+    color: Tuple[int, int, int, int],
+) -> PIL.Image.Image:
+    """
+    Adds margin to the image.
+
+    Args:
+        pil_img (PIL.Image.Image): Image that needed to add margin.
+        top (int): pixels count at top side
+        right (int): pixels count at right side
+        bottom (int): pixels count at bottom side
+        left (int): pixels count at left side
+        color (Tuple[int, int, int, int]): color of margin
+
+    Returns:
+        PIL.Image.Image: Image with margin.
+    """
+    width, height = pil_img.size
+    new_width = width + right + left
+    new_height = height + top + bottom
+    # noinspection PyTypeChecker
+    result = PIL.Image.new(pil_img.mode, (new_width, new_height), color)
+    result.paste(pil_img, (left, top))
+    return result
+
+
+
+
+
+
+
+

Functions

+
+
+def add_margin(pil_img:Β PIL.Image.Image, top:Β int, right:Β int, bottom:Β int, left:Β int, color:Β Tuple[int,Β int,Β int,Β int]) ‑>Β PIL.Image.Image +
+
+

Adds margin to the image.

+

Args

+
+
pil_img : PIL.Image.Image
+
Image that needed to add margin.
+
top : int
+
pixels count at top side
+
right : int
+
pixels count at right side
+
bottom : int
+
pixels count at bottom side
+
left : int
+
pixels count at left side
+
color : Tuple[int, int, int, int]
+
color of margin
+
+

Returns

+
+
PIL.Image.Image
+
Image with margin.
+
+
+ +Expand source code + +
def add_margin(
+    pil_img: PIL.Image.Image,
+    top: int,
+    right: int,
+    bottom: int,
+    left: int,
+    color: Tuple[int, int, int, int],
+) -> PIL.Image.Image:
+    """
+    Adds margin to the image.
+
+    Args:
+        pil_img (PIL.Image.Image): Image that needed to add margin.
+        top (int): pixels count at top side
+        right (int): pixels count at right side
+        bottom (int): pixels count at bottom side
+        left (int): pixels count at left side
+        color (Tuple[int, int, int, int]): color of margin
+
+    Returns:
+        PIL.Image.Image: Image with margin.
+    """
+    width, height = pil_img.size
+    new_width = width + right + left
+    new_height = height + top + bottom
+    # noinspection PyTypeChecker
+    result = PIL.Image.new(pil_img.mode, (new_width, new_height), color)
+    result.paste(pil_img, (left, top))
+    return result
+
+
+
+def convert_image(image:Β PIL.Image.Image, mode='RGB') ‑>Β PIL.Image.Image +
+
+

Performs image conversion to correct color mode

+

Args

+
+
image : PIL.Image.Image
+
PIL.Image.Image instance
+
mode : str, default=RGB
+
Color mode to convert
+
+

Returns

+
+
PIL.Image.Image
+
converted image
+
+

Raises

+
+
ValueError
+
If image hasn't convertable color mode, or it is too small
+
+
+ +Expand source code + +
def convert_image(image: PIL.Image.Image, mode="RGB") -> PIL.Image.Image:
+    """Performs image conversion to correct color mode
+
+    Args:
+        image (PIL.Image.Image): `PIL.Image.Image` instance
+        mode (str, default=RGB): Color mode to convert
+
+    Returns:
+        PIL.Image.Image: converted image
+
+    Raises:
+        ValueError: If image hasn't convertable color mode, or it is too small
+    """
+    if is_image_valid(image):
+        return image.convert(mode)
+
+
+
+def is_image_valid(image:Β Union[pathlib.Path,Β PIL.Image.Image]) ‑>Β bool +
+
+

This function performs image validation.

+

Args

+
+
image : Union[pathlib.Path, PIL.Image.Image]
+
Path to the image or PIL.Image.Image instance being checked.
+
+

Returns

+
+
bool
+
True if image is valid, False otherwise.
+
+

Raises

+
+
ValueError
+
If file not a valid image path or image hasn't convertable color mode, or it is too small
+
+
+ +Expand source code + +
def is_image_valid(image: Union[pathlib.Path, PIL.Image.Image]) -> bool:
+    """This function performs image validation.
+
+    Args:
+        image (Union[pathlib.Path, PIL.Image.Image]): Path to the image or `PIL.Image.Image` instance being checked.
+
+    Returns:
+        bool: True if image is valid, False otherwise.
+
+    Raises:
+        ValueError: If file not a valid image path or image hasn't convertable color mode, or it is too small
+
+    """
+    if isinstance(image, pathlib.Path):
+        if not image.exists():
+            raise ValueError("File is not exists")
+        elif image.is_dir():
+            raise ValueError("File is a directory")
+        elif image.suffix.lower() not in ALLOWED_SUFFIXES:
+            raise ValueError(
+                f"Unsupported image format. Supported file formats: {', '.join(ALLOWED_SUFFIXES)}"
+            )
+    elif isinstance(image, PIL.Image.Image):
+        if not (image.size[0] > 32 and image.size[1] > 32):
+            raise ValueError("Image should be bigger then (32x32) pixels.")
+        elif image.mode not in [
+            "RGB",
+            "RGBA",
+            "L",
+        ]:
+            raise ValueError("Wrong image color mode.")
+    else:
+        raise ValueError("Unknown input file type")
+    return True
+
+
+
+def load_image(file:Β Union[str,Β pathlib.Path,Β PIL.Image.Image]) ‑>Β PIL.Image.Image +
+
+

Returns a PIL.Image.Image class by string path or pathlib.Path or PIL.Image.Image instance

+

Args

+
+
file : Union[str, pathlib.Path, PIL.Image.Image]
+
File path or PIL.Image.Image instance
+
+

Returns

+
+
PIL.Image.Image
+
image instance loaded from file location
+
+

Raises

+
+
ValueError
+
If file not exists or file is directory or file isn't an image or file is not correct PIL Image
+
+
+ +Expand source code + +
def load_image(file: Union[str, pathlib.Path, PIL.Image.Image]) -> PIL.Image.Image:
+    """Returns a `PIL.Image.Image` class by string path or `pathlib.Path` or `PIL.Image.Image` instance
+
+    Args:
+        file (Union[str, pathlib.Path, PIL.Image.Image]): File path or `PIL.Image.Image` instance
+
+    Returns:
+        PIL.Image.Image: image instance loaded from `file` location
+
+    Raises:
+        ValueError: If file not exists or file is directory or file isn't an image or file is not correct PIL Image
+
+    """
+    if isinstance(file, str) and is_image_valid(pathlib.Path(file)):
+        return PIL.Image.open(file)
+    elif isinstance(file, PIL.Image.Image):
+        return file
+    elif isinstance(file, pathlib.Path) and is_image_valid(file):
+        return PIL.Image.open(str(file))
+    else:
+        raise ValueError("Unknown input file type")
+
+
+
+def to_tensor(x:Β Any) ‑>Β torch.Tensor +
+
+

Returns a PIL.Image.Image as torch tensor without swap tensor dims.

+

Args

+
+
x : PIL.Image.Image
+
image
+
+

Returns

+
+
torch.Tensor
+
image as torch tensor
+
+
+ +Expand source code + +
def to_tensor(x: Any) -> torch.Tensor:
+    """
+    Returns a PIL.Image.Image as torch tensor without swap tensor dims.
+
+    Args:
+        x (PIL.Image.Image): image
+
+    Returns:
+        torch.Tensor: image as torch tensor
+    """
+    return torch.tensor(np.array(x, copy=True))
+
+
+
+def transparency_paste(bg_img:Β PIL.Image.Image, fg_img:Β PIL.Image.Image, box=(0, 0)) ‑>Β PIL.Image.Image +
+
+

Inserts an image into another image while maintaining transparency.

+

Args

+
+
bg_img : PIL.Image.Image
+
background image
+
fg_img : PIL.Image.Image
+
foreground image
+
box : tuple[int, int]
+
place to paste
+
+

Returns

+
+
PIL.Image.Image
+
Background image with pasted foreground image at point or in the specified box
+
+
+ +Expand source code + +
def transparency_paste(
+    bg_img: PIL.Image.Image, fg_img: PIL.Image.Image, box=(0, 0)
+) -> PIL.Image.Image:
+    """
+    Inserts an image into another image while maintaining transparency.
+
+    Args:
+        bg_img (PIL.Image.Image): background image
+        fg_img (PIL.Image.Image): foreground image
+        box (tuple[int, int]): place to paste
+
+    Returns:
+        PIL.Image.Image: Background image with pasted foreground image at point or in the specified box
+    """
+    fg_img_trans = PIL.Image.new("RGBA", bg_img.size)
+    fg_img_trans.paste(fg_img, box, mask=fg_img)
+    new_img = PIL.Image.alpha_composite(bg_img, fg_img_trans)
+    return new_img
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/utils/index.html b/docs/api/carvekit/utils/index.html new file mode 100644 index 0000000..851f59a --- /dev/null +++ b/docs/api/carvekit/utils/index.html @@ -0,0 +1,92 @@ + + + + + + +carvekit.utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.utils

+
+
+
+
+

Sub-modules

+
+
carvekit.utils.download_models
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: …

+
+
carvekit.utils.fs_utils
+
+ +
+
carvekit.utils.image_utils
+
+ +
+
carvekit.utils.mask_utils
+
+ +
+
carvekit.utils.models_utils
+
+ +
+
carvekit.utils.pool_utils
+
+ +
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/utils/mask_utils.html b/docs/api/carvekit/utils/mask_utils.html new file mode 100644 index 0000000..4eb1ca2 --- /dev/null +++ b/docs/api/carvekit/utils/mask_utils.html @@ -0,0 +1,303 @@ + + + + + + +carvekit.utils.mask_utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.utils.mask_utils

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import PIL.Image
+import torch
+from carvekit.utils.image_utils import to_tensor
+
+
+def composite(
+    foreground: PIL.Image.Image,
+    background: PIL.Image.Image,
+    alpha: PIL.Image.Image,
+    device="cpu",
+):
+    """
+    Composites foreground with background by following
+    https://pymatting.github.io/intro.html#alpha-matting math formula.
+
+    Args:
+        foreground (PIL.Image.Image): Image that will be pasted to background image with following alpha mask.
+        background (PIL.Image.Image): Background image
+        alpha (PIL.Image.Image): Alpha Image
+        device (Literal[cpu, cuda]): Processing device
+
+    Returns:
+        PIL.Image.Image: Composited image.
+    """
+
+    foreground = foreground.convert("RGBA")
+    background = background.convert("RGBA")
+    alpha_rgba = alpha.convert("RGBA")
+    alpha_l = alpha.convert("L")
+
+    fg = to_tensor(foreground).to(device)
+    alpha_rgba = to_tensor(alpha_rgba).to(device)
+    alpha_l = to_tensor(alpha_l).to(device)
+    bg = to_tensor(background).to(device)
+
+    alpha_l = alpha_l / 255
+    alpha_rgba = alpha_rgba / 255
+
+    bg = torch.where(torch.logical_not(alpha_rgba >= 1), bg, fg)
+    bg[:, :, 0] = alpha_l[:, :] * fg[:, :, 0] + (1 - alpha_l[:, :]) * bg[:, :, 0]
+    bg[:, :, 1] = alpha_l[:, :] * fg[:, :, 1] + (1 - alpha_l[:, :]) * bg[:, :, 1]
+    bg[:, :, 2] = alpha_l[:, :] * fg[:, :, 2] + (1 - alpha_l[:, :]) * bg[:, :, 2]
+    bg[:, :, 3] = alpha_l[:, :] * 255
+
+    del alpha_l, alpha_rgba, fg
+    return PIL.Image.fromarray(bg.cpu().numpy()).convert("RGBA")
+
+
+def apply_mask(
+    image: PIL.Image.Image, mask: PIL.Image.Image, device="cpu"
+) -> PIL.Image.Image:
+    """
+    Applies mask to foreground.
+
+    Args:
+        image (PIL.Image.Image): Image with background.
+        mask (PIL.Image.Image): Alpha Channel mask for this image.
+        device (Literal[cpu, cuda]): Processing device.
+
+    Returns:
+        PIL.Image.Image: Image without background, where mask was black.
+    """
+    background = PIL.Image.new("RGBA", image.size, color=(130, 130, 130, 0))
+    return composite(image, background, mask, device=device).convert("RGBA")
+
+
+def extract_alpha_channel(image: PIL.Image.Image) -> PIL.Image.Image:
+    """
+    Extracts alpha channel from the RGBA image.
+
+    Args:
+        image: RGBA PIL image
+
+    Returns:
+        PIL.Image.Image: RGBA alpha channel image
+    """
+    alpha = image.split()[-1]
+    bg = PIL.Image.new("RGBA", image.size, (0, 0, 0, 255))
+    bg.paste(alpha, mask=alpha)
+    return bg.convert("RGBA")
+
+
+
+
+
+
+
+

Functions

+
+
+def apply_mask(image:Β PIL.Image.Image, mask:Β PIL.Image.Image, device='cpu') ‑>Β PIL.Image.Image +
+
+

Applies mask to foreground.

+

Args

+
+
image : PIL.Image.Image
+
Image with background.
+
mask : PIL.Image.Image
+
Alpha Channel mask for this image.
+
device : Literal[cpu, cuda]
+
Processing device.
+
+

Returns

+
+
PIL.Image.Image
+
Image without background, where mask was black.
+
+
+ +Expand source code + +
def apply_mask(
+    image: PIL.Image.Image, mask: PIL.Image.Image, device="cpu"
+) -> PIL.Image.Image:
+    """
+    Applies mask to foreground.
+
+    Args:
+        image (PIL.Image.Image): Image with background.
+        mask (PIL.Image.Image): Alpha Channel mask for this image.
+        device (Literal[cpu, cuda]): Processing device.
+
+    Returns:
+        PIL.Image.Image: Image without background, where mask was black.
+    """
+    background = PIL.Image.new("RGBA", image.size, color=(130, 130, 130, 0))
+    return composite(image, background, mask, device=device).convert("RGBA")
+
+
+
+def composite(foreground:Β PIL.Image.Image, background:Β PIL.Image.Image, alpha:Β PIL.Image.Image, device='cpu') +
+
+

Composites foreground with background by following +https://pymatting.github.io/intro.html#alpha-matting math formula.

+

Args

+
+
foreground : PIL.Image.Image
+
Image that will be pasted to background image with following alpha mask.
+
background : PIL.Image.Image
+
Background image
+
alpha : PIL.Image.Image
+
Alpha Image
+
device : Literal[cpu, cuda]
+
Processing device
+
+

Returns

+
+
PIL.Image.Image
+
Composited image.
+
+
+ +Expand source code + +
def composite(
+    foreground: PIL.Image.Image,
+    background: PIL.Image.Image,
+    alpha: PIL.Image.Image,
+    device="cpu",
+):
+    """
+    Composites foreground with background by following
+    https://pymatting.github.io/intro.html#alpha-matting math formula.
+
+    Args:
+        foreground (PIL.Image.Image): Image that will be pasted to background image with following alpha mask.
+        background (PIL.Image.Image): Background image
+        alpha (PIL.Image.Image): Alpha Image
+        device (Literal[cpu, cuda]): Processing device
+
+    Returns:
+        PIL.Image.Image: Composited image.
+    """
+
+    foreground = foreground.convert("RGBA")
+    background = background.convert("RGBA")
+    alpha_rgba = alpha.convert("RGBA")
+    alpha_l = alpha.convert("L")
+
+    fg = to_tensor(foreground).to(device)
+    alpha_rgba = to_tensor(alpha_rgba).to(device)
+    alpha_l = to_tensor(alpha_l).to(device)
+    bg = to_tensor(background).to(device)
+
+    alpha_l = alpha_l / 255
+    alpha_rgba = alpha_rgba / 255
+
+    bg = torch.where(torch.logical_not(alpha_rgba >= 1), bg, fg)
+    bg[:, :, 0] = alpha_l[:, :] * fg[:, :, 0] + (1 - alpha_l[:, :]) * bg[:, :, 0]
+    bg[:, :, 1] = alpha_l[:, :] * fg[:, :, 1] + (1 - alpha_l[:, :]) * bg[:, :, 1]
+    bg[:, :, 2] = alpha_l[:, :] * fg[:, :, 2] + (1 - alpha_l[:, :]) * bg[:, :, 2]
+    bg[:, :, 3] = alpha_l[:, :] * 255
+
+    del alpha_l, alpha_rgba, fg
+    return PIL.Image.fromarray(bg.cpu().numpy()).convert("RGBA")
+
+
+
+def extract_alpha_channel(image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Extracts alpha channel from the RGBA image.

+

Args

+
+
image
+
RGBA PIL image
+
+

Returns

+
+
PIL.Image.Image
+
RGBA alpha channel image
+
+
+ +Expand source code + +
def extract_alpha_channel(image: PIL.Image.Image) -> PIL.Image.Image:
+    """
+    Extracts alpha channel from the RGBA image.
+
+    Args:
+        image: RGBA PIL image
+
+    Returns:
+        PIL.Image.Image: RGBA alpha channel image
+    """
+    alpha = image.split()[-1]
+    bg = PIL.Image.new("RGBA", image.size, (0, 0, 0, 255))
+    bg.paste(alpha, mask=alpha)
+    return bg.convert("RGBA")
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/utils/models_utils.html b/docs/api/carvekit/utils/models_utils.html new file mode 100644 index 0000000..4002420 --- /dev/null +++ b/docs/api/carvekit/utils/models_utils.html @@ -0,0 +1,399 @@ + + + + + + +carvekit.utils.models_utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.utils.models_utils

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+
+import random
+import warnings
+from typing import Union, Tuple, Any
+
+import torch
+from torch import autocast
+
+
+class EmptyAutocast(object):
+    """
+    Empty class for any auto-casting disabling.
+    """
+
+    def __enter__(self):
+        return None
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        return
+
+    def __call__(self, func):
+        return
+
+
+def get_precision_autocast(
+    device="cpu", fp16=True, override_dtype=None
+) -> Union[
+    Tuple[EmptyAutocast, Union[torch.dtype, Any]],
+    Tuple[autocast, Union[torch.dtype, Any]],
+]:
+    """
+    Returns precision and auto-cast settings for given device and fp16 settings.
+
+    Args:
+        device (Literal[cpu, cuda]): Device to get precision and auto-cast settings for.
+        fp16 (bool): Whether to use fp16 precision.
+        override_dtype (bool): Override dtype for auto-cast.
+
+    Returns:
+        Union[Tuple[EmptyAutocast, Union[torch.dtype, Any]],Tuple[autocast, Union[torch.dtype, Any]]]: Autocast object, dtype
+    """
+    dtype = torch.float32
+    cache_enabled = None
+
+    if device == "cpu" and fp16:
+        warnings.warn("FP16 is not supported on CPU. Using FP32 instead.")
+        dtype = torch.float32
+
+        # TODO: Implement BFP16 on CPU. There are unexpected slowdowns on cpu on a clean environment.
+        # warnings.warn(
+        #     "Accuracy BFP16 has experimental support on the CPU. "
+        #     "This may result in an unexpected reduction in quality."
+        # )
+        # dtype = (
+        #     torch.bfloat16
+        # )  # Using bfloat16 for CPU, since autocast is not supported for float16
+
+    if "cuda" in device and fp16:
+        dtype = torch.float16
+        cache_enabled = True
+
+    if override_dtype is not None:
+        dtype = override_dtype
+
+    if dtype == torch.float32 and device == "cpu":
+        return EmptyAutocast(), dtype
+
+    return (
+        torch.autocast(
+            device_type=device, dtype=dtype, enabled=True, cache_enabled=cache_enabled
+        ),
+        dtype,
+    )
+
+
+def cast_network(network: torch.nn.Module, dtype: torch.dtype):
+    """
+    Cast network to given dtype
+
+    Args:
+        network (torch.nn.Module): Network to be casted
+        dtype (torch.dtype): Dtype to cast network to
+    """
+    if dtype == torch.float16:
+        network.half()
+    elif dtype == torch.bfloat16:
+        network.bfloat16()
+    elif dtype == torch.float32:
+        network.float()
+    else:
+        raise ValueError(f"Unknown dtype {dtype}")
+
+
+def fix_seed(seed: int = 42):
+    """
+    Sets fixed random seed
+
+    Args:
+        seed (int, default=42): Random seed to be set
+    """
+    random.seed(seed)
+    torch.manual_seed(seed)
+    if torch.cuda.is_available():
+        torch.cuda.manual_seed(seed)
+        torch.cuda.manual_seed_all(seed)
+        # noinspection PyUnresolvedReferences
+        torch.backends.cudnn.deterministic = True
+        # noinspection PyUnresolvedReferences
+        torch.backends.cudnn.benchmark = False
+    return True
+
+
+def suppress_warnings():
+    # Suppress PyTorch 1.11.0 warning associated with changing order of args in nn.MaxPool2d layer,
+    # since source code is not affected by this issue and there aren't any other correct way to hide this message.
+    warnings.filterwarnings(
+        "ignore",
+        category=UserWarning,
+        message="Note that order of the arguments: ceil_mode and "
+        "return_indices will changeto match the args list "
+        "in nn.MaxPool2d in a future release.",
+        module="torch",
+    )
+
+
+
+
+
+
+
+

Functions

+
+
+def cast_network(network:Β torch.nn.modules.module.Module, dtype:Β torch.dtype) +
+
+

Cast network to given dtype

+

Args

+
+
network : torch.nn.Module
+
Network to be casted
+
dtype : torch.dtype
+
Dtype to cast network to
+
+
+ +Expand source code + +
def cast_network(network: torch.nn.Module, dtype: torch.dtype):
+    """
+    Cast network to given dtype
+
+    Args:
+        network (torch.nn.Module): Network to be casted
+        dtype (torch.dtype): Dtype to cast network to
+    """
+    if dtype == torch.float16:
+        network.half()
+    elif dtype == torch.bfloat16:
+        network.bfloat16()
+    elif dtype == torch.float32:
+        network.float()
+    else:
+        raise ValueError(f"Unknown dtype {dtype}")
+
+
+
+def fix_seed(seed:Β intΒ =Β 42) +
+
+

Sets fixed random seed

+

Args

+
+
seed : int, default=42
+
Random seed to be set
+
+
+ +Expand source code + +
def fix_seed(seed: int = 42):
+    """
+    Sets fixed random seed
+
+    Args:
+        seed (int, default=42): Random seed to be set
+    """
+    random.seed(seed)
+    torch.manual_seed(seed)
+    if torch.cuda.is_available():
+        torch.cuda.manual_seed(seed)
+        torch.cuda.manual_seed_all(seed)
+        # noinspection PyUnresolvedReferences
+        torch.backends.cudnn.deterministic = True
+        # noinspection PyUnresolvedReferences
+        torch.backends.cudnn.benchmark = False
+    return True
+
+
+
+def get_precision_autocast(device='cpu', fp16=True, override_dtype=None) ‑>Β Union[Tuple[EmptyAutocast,Β Union[torch.dtype,Β Any]],Β Tuple[torch.autocast_mode.autocast,Β Union[torch.dtype,Β Any]]] +
+
+

Returns precision and auto-cast settings for given device and fp16 settings.

+

Args

+
+
device : Literal[cpu, cuda]
+
Device to get precision and auto-cast settings for.
+
fp16 : bool
+
Whether to use fp16 precision.
+
override_dtype : bool
+
Override dtype for auto-cast.
+
+

Returns

+
+
Union[Tuple[EmptyAutocast, Union[torch.dtype, Any]],Tuple[autocast, Union[torch.dtype, Any]]]
+
Autocast object, dtype
+
+
+ +Expand source code + +
def get_precision_autocast(
+    device="cpu", fp16=True, override_dtype=None
+) -> Union[
+    Tuple[EmptyAutocast, Union[torch.dtype, Any]],
+    Tuple[autocast, Union[torch.dtype, Any]],
+]:
+    """
+    Returns precision and auto-cast settings for given device and fp16 settings.
+
+    Args:
+        device (Literal[cpu, cuda]): Device to get precision and auto-cast settings for.
+        fp16 (bool): Whether to use fp16 precision.
+        override_dtype (bool): Override dtype for auto-cast.
+
+    Returns:
+        Union[Tuple[EmptyAutocast, Union[torch.dtype, Any]],Tuple[autocast, Union[torch.dtype, Any]]]: Autocast object, dtype
+    """
+    dtype = torch.float32
+    cache_enabled = None
+
+    if device == "cpu" and fp16:
+        warnings.warn("FP16 is not supported on CPU. Using FP32 instead.")
+        dtype = torch.float32
+
+        # TODO: Implement BFP16 on CPU. There are unexpected slowdowns on cpu on a clean environment.
+        # warnings.warn(
+        #     "Accuracy BFP16 has experimental support on the CPU. "
+        #     "This may result in an unexpected reduction in quality."
+        # )
+        # dtype = (
+        #     torch.bfloat16
+        # )  # Using bfloat16 for CPU, since autocast is not supported for float16
+
+    if "cuda" in device and fp16:
+        dtype = torch.float16
+        cache_enabled = True
+
+    if override_dtype is not None:
+        dtype = override_dtype
+
+    if dtype == torch.float32 and device == "cpu":
+        return EmptyAutocast(), dtype
+
+    return (
+        torch.autocast(
+            device_type=device, dtype=dtype, enabled=True, cache_enabled=cache_enabled
+        ),
+        dtype,
+    )
+
+
+
+def suppress_warnings() +
+
+
+
+ +Expand source code + +
def suppress_warnings():
+    # Suppress PyTorch 1.11.0 warning associated with changing order of args in nn.MaxPool2d layer,
+    # since source code is not affected by this issue and there aren't any other correct way to hide this message.
+    warnings.filterwarnings(
+        "ignore",
+        category=UserWarning,
+        message="Note that order of the arguments: ceil_mode and "
+        "return_indices will changeto match the args list "
+        "in nn.MaxPool2d in a future release.",
+        module="torch",
+    )
+
+
+
+
+
+

Classes

+
+
+class EmptyAutocast +
+
+

Empty class for any auto-casting disabling.

+
+ +Expand source code + +
class EmptyAutocast(object):
+    """
+    Empty class for any auto-casting disabling.
+    """
+
+    def __enter__(self):
+        return None
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        return
+
+    def __call__(self, func):
+        return
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/utils/pool_utils.html b/docs/api/carvekit/utils/pool_utils.html new file mode 100644 index 0000000..abcae5f --- /dev/null +++ b/docs/api/carvekit/utils/pool_utils.html @@ -0,0 +1,189 @@ + + + + + + +carvekit.utils.pool_utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.utils.pool_utils

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+from concurrent.futures import ThreadPoolExecutor
+from typing import Any, Iterable, Callable, Collection, List
+
+
+def thread_pool_processing(func: Callable[[Any], Any], data: Iterable, workers=18):
+    """
+    Passes all iterator data through the given function
+
+    Args:
+        workers (int, default=18): Count of workers.
+        func (Callable[[Any], Any]): function to pass data through
+        data (Iterable): input iterator
+
+    Returns:
+        List[Any]: list of results
+
+    """
+    with ThreadPoolExecutor(workers) as p:
+        return list(p.map(func, data))
+
+
+def batch_generator(iterable: Collection, n: int = 1) -> Iterable[Collection]:
+    """
+    Splits any iterable into n-size packets
+
+    Args:
+        iterable (Collection): iterator
+        n (int, default=1): size of packets
+
+    Returns:
+        Iterable[Collection]: new n-size packet
+    """
+    it = len(iterable)
+    for ndx in range(0, it, n):
+        yield iterable[ndx : min(ndx + n, it)]
+
+
+
+
+
+
+
+

Functions

+
+
+def batch_generator(iterable:Β Collection, n:Β intΒ =Β 1) ‑>Β Iterable[Collection] +
+
+

Splits any iterable into n-size packets

+

Args

+
+
iterable : Collection
+
iterator
+
n : int, default=1
+
size of packets
+
+

Returns

+
+
Iterable[Collection]
+
new n-size packet
+
+
+ +Expand source code + +
def batch_generator(iterable: Collection, n: int = 1) -> Iterable[Collection]:
+    """
+    Splits any iterable into n-size packets
+
+    Args:
+        iterable (Collection): iterator
+        n (int, default=1): size of packets
+
+    Returns:
+        Iterable[Collection]: new n-size packet
+    """
+    it = len(iterable)
+    for ndx in range(0, it, n):
+        yield iterable[ndx : min(ndx + n, it)]
+
+
+
+def thread_pool_processing(func:Β Callable[[Any],Β Any], data:Β Iterable, workers=18) +
+
+

Passes all iterator data through the given function

+

Args

+
+
workers : int, default=18
+
Count of workers.
+
func : Callable[[Any], Any]
+
function to pass data through
+
data : Iterable
+
input iterator
+
+

Returns

+
+
List[Any]
+
list of results
+
+
+ +Expand source code + +
def thread_pool_processing(func: Callable[[Any], Any], data: Iterable, workers=18):
+    """
+    Passes all iterator data through the given function
+
+    Args:
+        workers (int, default=18): Count of workers.
+        func (Callable[[Any], Any]): function to pass data through
+        data (Iterable): input iterator
+
+    Returns:
+        List[Any]: list of results
+
+    """
+    with ThreadPoolExecutor(workers) as p:
+        return list(p.map(func, data))
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/app.html b/docs/api/carvekit/web/app.html new file mode 100644 index 0000000..72495e0 --- /dev/null +++ b/docs/api/carvekit/web/app.html @@ -0,0 +1,88 @@ + + + + + + +carvekit.web.app API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.app

+
+
+
+ +Expand source code + +
from pathlib import Path
+
+import uvicorn
+from fastapi import FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+from starlette.staticfiles import StaticFiles
+
+from carvekit import version
+from carvekit.web.deps import config
+from carvekit.web.routers.api_router import api_router
+
+app = FastAPI(title="CarveKit Web API", version=version)
+
+app.add_middleware(
+    CORSMiddleware,
+    allow_origins=["*"],
+    allow_credentials=True,
+    allow_methods=["*"],
+    allow_headers=["*"],
+)
+
+app.include_router(api_router, prefix="/api")
+app.mount(
+    "/",
+    StaticFiles(directory=Path(__file__).parent.joinpath("static"), html=True),
+    name="static",
+)
+
+if __name__ == "__main__":
+    uvicorn.run(app, host=config.host, port=config.port)
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/deps.html b/docs/api/carvekit/web/deps.html new file mode 100644 index 0000000..e8b94c3 --- /dev/null +++ b/docs/api/carvekit/web/deps.html @@ -0,0 +1,64 @@ + + + + + + +carvekit.web.deps API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.deps

+
+
+
+ +Expand source code + +
from carvekit.web.schemas.config import WebAPIConfig
+from carvekit.web.utils.init_utils import init_config
+from carvekit.web.utils.task_queue import MLProcessor
+
+config: WebAPIConfig = init_config()
+ml_processor = MLProcessor(api_config=config)
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/handlers/index.html b/docs/api/carvekit/web/handlers/index.html new file mode 100644 index 0000000..82a3996 --- /dev/null +++ b/docs/api/carvekit/web/handlers/index.html @@ -0,0 +1,65 @@ + + + + + + +carvekit.web.handlers API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.handlers

+
+
+
+
+

Sub-modules

+
+
carvekit.web.handlers.response
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/handlers/response.html b/docs/api/carvekit/web/handlers/response.html new file mode 100644 index 0000000..ced0e8f --- /dev/null +++ b/docs/api/carvekit/web/handlers/response.html @@ -0,0 +1,205 @@ + + + + + + +carvekit.web.handlers.response API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.handlers.response

+
+
+
+ +Expand source code + +
from typing import Union
+
+from fastapi import Header
+from fastapi.responses import Response, JSONResponse
+from carvekit.web.deps import config
+
+
+def Authenticate(x_api_key: Union[str, None] = Header(None)) -> Union[bool, str]:
+    if x_api_key in config.auth.allowed_tokens:
+        return "allowed"
+    elif x_api_key == config.auth.admin_token:
+        return "admin"
+    elif config.auth.auth is False:
+        return "allowed"
+    else:
+        return False
+
+
+def handle_response(response, original_image) -> Response:
+    """
+    Response handler from TaskQueue
+    :param response: TaskQueue response
+    :param original_image: Original PIL image
+    :return: Complete flask response
+    """
+    response_object = None
+    if isinstance(response, dict):
+        if response["type"] == "jpg":
+            response_object = Response(
+                content=response["data"][0].read(), media_type="image/jpeg"
+            )
+        elif response["type"] == "png":
+            response_object = Response(
+                content=response["data"][0].read(), media_type="image/png"
+            )
+        elif response["type"] == "zip":
+            response_object = Response(
+                content=response["data"][0], media_type="application/zip"
+            )
+            response_object.headers[
+                "Content-Disposition"
+            ] = "attachment; filename='no-bg.zip'"
+
+        # Add headers to output result
+        response_object.headers["X-Credits-Charged"] = "0"
+        response_object.headers["X-Type"] = "other"  # TODO Make support for this
+        response_object.headers["X-Max-Width"] = str(original_image.size[0])
+        response_object.headers["X-Max-Height"] = str(original_image.size[1])
+        response_object.headers[
+            "X-Ratelimit-Limit"
+        ] = "500"  # TODO Make ratelimit support
+        response_object.headers["X-Ratelimit-Remaining"] = "500"
+        response_object.headers["X-Ratelimit-Reset"] = "1"
+        response_object.headers["X-Width"] = str(response["data"][1][0])
+        response_object.headers["X-Height"] = str(response["data"][1][1])
+
+    else:
+        response = JSONResponse(content=response[0])
+        response.headers["X-Credits-Charged"] = "0"
+
+    return response_object
+
+
+
+
+
+
+
+

Functions

+
+
+def Authenticate(x_api_key:Β Optional[str]Β =Β Header(None)) ‑>Β Union[bool,Β str] +
+
+
+
+ +Expand source code + +
def Authenticate(x_api_key: Union[str, None] = Header(None)) -> Union[bool, str]:
+    if x_api_key in config.auth.allowed_tokens:
+        return "allowed"
+    elif x_api_key == config.auth.admin_token:
+        return "admin"
+    elif config.auth.auth is False:
+        return "allowed"
+    else:
+        return False
+
+
+
+def handle_response(response, original_image) ‑>Β starlette.responses.Response +
+
+

Response handler from TaskQueue +:param response: TaskQueue response +:param original_image: Original PIL image +:return: Complete flask response

+
+ +Expand source code + +
def handle_response(response, original_image) -> Response:
+    """
+    Response handler from TaskQueue
+    :param response: TaskQueue response
+    :param original_image: Original PIL image
+    :return: Complete flask response
+    """
+    response_object = None
+    if isinstance(response, dict):
+        if response["type"] == "jpg":
+            response_object = Response(
+                content=response["data"][0].read(), media_type="image/jpeg"
+            )
+        elif response["type"] == "png":
+            response_object = Response(
+                content=response["data"][0].read(), media_type="image/png"
+            )
+        elif response["type"] == "zip":
+            response_object = Response(
+                content=response["data"][0], media_type="application/zip"
+            )
+            response_object.headers[
+                "Content-Disposition"
+            ] = "attachment; filename='no-bg.zip'"
+
+        # Add headers to output result
+        response_object.headers["X-Credits-Charged"] = "0"
+        response_object.headers["X-Type"] = "other"  # TODO Make support for this
+        response_object.headers["X-Max-Width"] = str(original_image.size[0])
+        response_object.headers["X-Max-Height"] = str(original_image.size[1])
+        response_object.headers[
+            "X-Ratelimit-Limit"
+        ] = "500"  # TODO Make ratelimit support
+        response_object.headers["X-Ratelimit-Remaining"] = "500"
+        response_object.headers["X-Ratelimit-Reset"] = "1"
+        response_object.headers["X-Width"] = str(response["data"][1][0])
+        response_object.headers["X-Height"] = str(response["data"][1][1])
+
+    else:
+        response = JSONResponse(content=response[0])
+        response.headers["X-Credits-Charged"] = "0"
+
+    return response_object
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/index.html b/docs/api/carvekit/web/index.html new file mode 100644 index 0000000..59bd3d6 --- /dev/null +++ b/docs/api/carvekit/web/index.html @@ -0,0 +1,100 @@ + + + + + + +carvekit.web API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web

+
+
+
+
+

Sub-modules

+
+
carvekit.web.app
+
+
+
+
carvekit.web.deps
+
+
+
+
carvekit.web.handlers
+
+
+
+
carvekit.web.other
+
+
+
+
carvekit.web.responses
+
+
+
+
carvekit.web.routers
+
+
+
+
carvekit.web.schemas
+
+
+
+
carvekit.web.utils
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/other/index.html b/docs/api/carvekit/web/other/index.html new file mode 100644 index 0000000..6d6bdb0 --- /dev/null +++ b/docs/api/carvekit/web/other/index.html @@ -0,0 +1,65 @@ + + + + + + +carvekit.web.other API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.other

+
+
+
+
+

Sub-modules

+
+
carvekit.web.other.removebg
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/other/removebg.html b/docs/api/carvekit/web/other/removebg.html new file mode 100644 index 0000000..e657a22 --- /dev/null +++ b/docs/api/carvekit/web/other/removebg.html @@ -0,0 +1,571 @@ + + + + + + +carvekit.web.other.removebg API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.other.removebg

+
+
+
+ +Expand source code + +
import io
+import time
+import zipfile
+
+import requests
+from PIL import Image, ImageColor
+
+from carvekit.utils.image_utils import transparency_paste, add_margin
+from carvekit.utils.mask_utils import extract_alpha_channel
+from carvekit.web.responses.api import error_dict
+from carvekit.api.interface import Interface
+
+
+def process_remove_bg(
+    interface: Interface, params, image, bg, is_json_or_www_encoded=False
+):
+    """
+    Handles a request to the removebg api method
+
+    Args:
+        interface: CarveKit interface
+        bg: background pil image
+        is_json_or_www_encoded: is "json" or "x-www-form-urlencoded" content-type
+        image: foreground pil image
+        params: parameters
+    """
+    h, w = image.size
+    if h < 2 or w < 2:
+        return error_dict("Image is too small. Minimum size 2x2"), 400
+
+    if "size" in params.keys():
+        value = params["size"]
+        if value == "preview" or value == "small" or value == "regular":
+            image.thumbnail((625, 400), resample=3)  # 0.25 mp
+        elif value == "medium":
+            image.thumbnail((1504, 1000), resample=3)  # 1.5 mp
+        elif value == "hd":
+            image.thumbnail((2000, 2000), resample=3)  # 2.5 mp
+        else:
+            image.thumbnail((6250, 4000), resample=3)  # 25 mp
+
+    roi_box = [0, 0, image.size[0], image.size[1]]
+    if "type" in params.keys():
+        value = params["type"]
+        pass
+
+    if "roi" in params.keys():
+        value = params["roi"].split(" ")
+        if len(value) == 4:
+            for i, coord in enumerate(value):
+                if "px" in coord:
+                    coord = coord.replace("px", "")
+                    try:
+                        coord = int(coord)
+                    except BaseException:
+                        return (
+                            error_dict(
+                                "Error converting roi coordinate string to number!"
+                            ),
+                            400,
+                        )
+                    if coord < 0:
+                        error_dict("Bad roi coordinate."), 400
+                    if (i == 0 or i == 2) and coord > image.size[0]:
+                        return (
+                            error_dict(
+                                "The roi coordinate cannot be larger than the image size."
+                            ),
+                            400,
+                        )
+                    elif (i == 1 or i == 3) and coord > image.size[1]:
+                        return (
+                            error_dict(
+                                "The roi coordinate cannot be larger than the image size."
+                            ),
+                            400,
+                        )
+                    roi_box[i] = int(coord)
+                elif "%" in coord:
+                    coord = coord.replace("%", "")
+                    try:
+                        coord = int(coord)
+                    except BaseException:
+                        return (
+                            error_dict(
+                                "Error converting roi coordinate string to number!"
+                            ),
+                            400,
+                        )
+                    if coord > 100:
+                        return (
+                            error_dict("The coordinate cannot be more than 100%"),
+                            400,
+                        )
+                    elif coord < 0:
+                        return error_dict("Coordinate cannot be less than 0%"), 400
+                    if i == 0 or i == 2:
+                        coord = int(image.size[0] * coord / 100)
+                    elif i == 1 or i == 3:
+                        coord = int(image.size[1] * coord / 100)
+                    roi_box[i] = coord
+                else:
+                    return error_dict("Something wrong with roi coordinates!"), 400
+
+    new_image = image.copy()
+    new_image = new_image.crop(roi_box)
+    h, w = new_image.size
+    if h < 2 or w < 2:
+        return error_dict("Image is too small. Minimum size 2x2"), 400
+    new_image = interface([new_image])[0]
+
+    scaled = False
+    if "scale" in params.keys() and params["scale"] != 100:
+        value = params["scale"]
+        new_image.thumbnail(
+            (int(image.size[0] * value / 100), int(image.size[1] * value / 100)),
+            resample=3,
+        )
+        scaled = True
+    if "crop" in params.keys():
+        value = params["crop"]
+        if value:
+            new_image = new_image.crop(new_image.getbbox())
+            if "crop_margin" in params.keys():
+                crop_margin = params["crop_margin"]
+                if "px" in crop_margin:
+                    crop_margin = crop_margin.replace("px", "")
+                    crop_margin = abs(int(crop_margin))
+                    if crop_margin > 500:
+                        return (
+                            error_dict(
+                                "The crop_margin cannot be larger than the original image size."
+                            ),
+                            400,
+                        )
+                    new_image = add_margin(
+                        new_image,
+                        crop_margin,
+                        crop_margin,
+                        crop_margin,
+                        crop_margin,
+                        (0, 0, 0, 0),
+                    )
+                elif "%" in crop_margin:
+                    crop_margin = crop_margin.replace("%", "")
+                    crop_margin = int(crop_margin)
+                    new_image = add_margin(
+                        new_image,
+                        int(new_image.size[1] * crop_margin / 100),
+                        int(new_image.size[0] * crop_margin / 100),
+                        int(new_image.size[1] * crop_margin / 100),
+                        int(new_image.size[0] * crop_margin / 100),
+                        (0, 0, 0, 0),
+                    )
+        else:
+            if "position" in params.keys() and scaled is False:
+                value = params["position"]
+                if len(value) == 2:
+                    new_image = transparency_paste(
+                        Image.new("RGBA", image.size),
+                        new_image,
+                        (
+                            int(image.size[0] * value[0] / 100),
+                            int(image.size[1] * value[1] / 100),
+                        ),
+                    )
+                else:
+                    new_image = transparency_paste(
+                        Image.new("RGBA", image.size), new_image, roi_box
+                    )
+            elif scaled is False:
+                new_image = transparency_paste(
+                    Image.new("RGBA", image.size), new_image, roi_box
+                )
+
+    if "channels" in params.keys():
+        value = params["channels"]
+        if value == "alpha":
+            new_image = extract_alpha_channel(new_image)
+        else:
+            bg_changed = False
+            if "bg_color" in params.keys():
+                value = params["bg_color"]
+                if len(value) > 0:
+                    color = ImageColor.getcolor(value, "RGB")
+                    bg = Image.new("RGBA", new_image.size, color)
+                    bg = transparency_paste(bg, new_image, (0, 0))
+                    new_image = bg.copy()
+                    bg_changed = True
+            if "bg_image_url" in params.keys() and bg_changed is False:
+                value = params["bg_image_url"]
+                if len(value) > 0:
+                    try:
+                        bg = Image.open(io.BytesIO(requests.get(value).content))
+                    except BaseException:
+                        return error_dict("Error download background image!"), 400
+                    bg = bg.resize(new_image.size)
+                    bg = bg.convert("RGBA")
+                    bg = transparency_paste(bg, new_image, (0, 0))
+                    new_image = bg.copy()
+                    bg_changed = True
+            if not is_json_or_www_encoded:
+                if bg and bg_changed is False:
+                    bg = bg.resize(new_image.size)
+                    bg = bg.convert("RGBA")
+                    bg = transparency_paste(bg, new_image, (0, 0))
+                    new_image = bg.copy()
+    if "format" in params.keys():
+        value = params["format"]
+        if value == "jpg":
+            new_image = new_image.convert("RGB")
+            img_io = io.BytesIO()
+            new_image.save(img_io, "JPEG", quality=100)
+            img_io.seek(0)
+            return {"type": "jpg", "data": [img_io, new_image.size]}
+        elif value == "zip":
+            mask = extract_alpha_channel(new_image)
+            mask_buff = io.BytesIO()
+            mask.save(mask_buff, "PNG")
+            mask_buff.seek(0)
+            image_buff = io.BytesIO()
+            image.save(image_buff, "JPEG")
+            image_buff.seek(0)
+            fileobj = io.BytesIO()
+            with zipfile.ZipFile(fileobj, "w") as zip_file:
+                zip_info = zipfile.ZipInfo(filename="color.jpg")
+                zip_info.date_time = time.localtime(time.time())[:6]
+                zip_info.compress_type = zipfile.ZIP_DEFLATED
+                zip_file.writestr(zip_info, image_buff.getvalue())
+                zip_info = zipfile.ZipInfo(filename="alpha.png")
+                zip_info.date_time = time.localtime(time.time())[:6]
+                zip_info.compress_type = zipfile.ZIP_DEFLATED
+                zip_file.writestr(zip_info, mask_buff.getvalue())
+            fileobj.seek(0)
+            return {"type": "zip", "data": [fileobj.read(), new_image.size]}
+        else:
+            buff = io.BytesIO()
+            new_image.save(buff, "PNG")
+            buff.seek(0)
+            return {"type": "png", "data": [buff, new_image.size]}
+    return (
+        error_dict(
+            "Something wrong with request or http api. Please, open new issue on Github! This is error in "
+            "code."
+        ),
+        400,
+    )
+
+
+
+
+
+
+
+

Functions

+
+
+def process_remove_bg(interface:Β Interface, params, image, bg, is_json_or_www_encoded=False) +
+
+

Handles a request to the removebg api method

+

Args

+
+
interface
+
CarveKit interface
+
bg
+
background pil image
+
is_json_or_www_encoded
+
is "json" or "x-www-form-urlencoded" content-type
+
image
+
foreground pil image
+
params
+
parameters
+
+
+ +Expand source code + +
def process_remove_bg(
+    interface: Interface, params, image, bg, is_json_or_www_encoded=False
+):
+    """
+    Handles a request to the removebg api method
+
+    Args:
+        interface: CarveKit interface
+        bg: background pil image
+        is_json_or_www_encoded: is "json" or "x-www-form-urlencoded" content-type
+        image: foreground pil image
+        params: parameters
+    """
+    h, w = image.size
+    if h < 2 or w < 2:
+        return error_dict("Image is too small. Minimum size 2x2"), 400
+
+    if "size" in params.keys():
+        value = params["size"]
+        if value == "preview" or value == "small" or value == "regular":
+            image.thumbnail((625, 400), resample=3)  # 0.25 mp
+        elif value == "medium":
+            image.thumbnail((1504, 1000), resample=3)  # 1.5 mp
+        elif value == "hd":
+            image.thumbnail((2000, 2000), resample=3)  # 2.5 mp
+        else:
+            image.thumbnail((6250, 4000), resample=3)  # 25 mp
+
+    roi_box = [0, 0, image.size[0], image.size[1]]
+    if "type" in params.keys():
+        value = params["type"]
+        pass
+
+    if "roi" in params.keys():
+        value = params["roi"].split(" ")
+        if len(value) == 4:
+            for i, coord in enumerate(value):
+                if "px" in coord:
+                    coord = coord.replace("px", "")
+                    try:
+                        coord = int(coord)
+                    except BaseException:
+                        return (
+                            error_dict(
+                                "Error converting roi coordinate string to number!"
+                            ),
+                            400,
+                        )
+                    if coord < 0:
+                        error_dict("Bad roi coordinate."), 400
+                    if (i == 0 or i == 2) and coord > image.size[0]:
+                        return (
+                            error_dict(
+                                "The roi coordinate cannot be larger than the image size."
+                            ),
+                            400,
+                        )
+                    elif (i == 1 or i == 3) and coord > image.size[1]:
+                        return (
+                            error_dict(
+                                "The roi coordinate cannot be larger than the image size."
+                            ),
+                            400,
+                        )
+                    roi_box[i] = int(coord)
+                elif "%" in coord:
+                    coord = coord.replace("%", "")
+                    try:
+                        coord = int(coord)
+                    except BaseException:
+                        return (
+                            error_dict(
+                                "Error converting roi coordinate string to number!"
+                            ),
+                            400,
+                        )
+                    if coord > 100:
+                        return (
+                            error_dict("The coordinate cannot be more than 100%"),
+                            400,
+                        )
+                    elif coord < 0:
+                        return error_dict("Coordinate cannot be less than 0%"), 400
+                    if i == 0 or i == 2:
+                        coord = int(image.size[0] * coord / 100)
+                    elif i == 1 or i == 3:
+                        coord = int(image.size[1] * coord / 100)
+                    roi_box[i] = coord
+                else:
+                    return error_dict("Something wrong with roi coordinates!"), 400
+
+    new_image = image.copy()
+    new_image = new_image.crop(roi_box)
+    h, w = new_image.size
+    if h < 2 or w < 2:
+        return error_dict("Image is too small. Minimum size 2x2"), 400
+    new_image = interface([new_image])[0]
+
+    scaled = False
+    if "scale" in params.keys() and params["scale"] != 100:
+        value = params["scale"]
+        new_image.thumbnail(
+            (int(image.size[0] * value / 100), int(image.size[1] * value / 100)),
+            resample=3,
+        )
+        scaled = True
+    if "crop" in params.keys():
+        value = params["crop"]
+        if value:
+            new_image = new_image.crop(new_image.getbbox())
+            if "crop_margin" in params.keys():
+                crop_margin = params["crop_margin"]
+                if "px" in crop_margin:
+                    crop_margin = crop_margin.replace("px", "")
+                    crop_margin = abs(int(crop_margin))
+                    if crop_margin > 500:
+                        return (
+                            error_dict(
+                                "The crop_margin cannot be larger than the original image size."
+                            ),
+                            400,
+                        )
+                    new_image = add_margin(
+                        new_image,
+                        crop_margin,
+                        crop_margin,
+                        crop_margin,
+                        crop_margin,
+                        (0, 0, 0, 0),
+                    )
+                elif "%" in crop_margin:
+                    crop_margin = crop_margin.replace("%", "")
+                    crop_margin = int(crop_margin)
+                    new_image = add_margin(
+                        new_image,
+                        int(new_image.size[1] * crop_margin / 100),
+                        int(new_image.size[0] * crop_margin / 100),
+                        int(new_image.size[1] * crop_margin / 100),
+                        int(new_image.size[0] * crop_margin / 100),
+                        (0, 0, 0, 0),
+                    )
+        else:
+            if "position" in params.keys() and scaled is False:
+                value = params["position"]
+                if len(value) == 2:
+                    new_image = transparency_paste(
+                        Image.new("RGBA", image.size),
+                        new_image,
+                        (
+                            int(image.size[0] * value[0] / 100),
+                            int(image.size[1] * value[1] / 100),
+                        ),
+                    )
+                else:
+                    new_image = transparency_paste(
+                        Image.new("RGBA", image.size), new_image, roi_box
+                    )
+            elif scaled is False:
+                new_image = transparency_paste(
+                    Image.new("RGBA", image.size), new_image, roi_box
+                )
+
+    if "channels" in params.keys():
+        value = params["channels"]
+        if value == "alpha":
+            new_image = extract_alpha_channel(new_image)
+        else:
+            bg_changed = False
+            if "bg_color" in params.keys():
+                value = params["bg_color"]
+                if len(value) > 0:
+                    color = ImageColor.getcolor(value, "RGB")
+                    bg = Image.new("RGBA", new_image.size, color)
+                    bg = transparency_paste(bg, new_image, (0, 0))
+                    new_image = bg.copy()
+                    bg_changed = True
+            if "bg_image_url" in params.keys() and bg_changed is False:
+                value = params["bg_image_url"]
+                if len(value) > 0:
+                    try:
+                        bg = Image.open(io.BytesIO(requests.get(value).content))
+                    except BaseException:
+                        return error_dict("Error download background image!"), 400
+                    bg = bg.resize(new_image.size)
+                    bg = bg.convert("RGBA")
+                    bg = transparency_paste(bg, new_image, (0, 0))
+                    new_image = bg.copy()
+                    bg_changed = True
+            if not is_json_or_www_encoded:
+                if bg and bg_changed is False:
+                    bg = bg.resize(new_image.size)
+                    bg = bg.convert("RGBA")
+                    bg = transparency_paste(bg, new_image, (0, 0))
+                    new_image = bg.copy()
+    if "format" in params.keys():
+        value = params["format"]
+        if value == "jpg":
+            new_image = new_image.convert("RGB")
+            img_io = io.BytesIO()
+            new_image.save(img_io, "JPEG", quality=100)
+            img_io.seek(0)
+            return {"type": "jpg", "data": [img_io, new_image.size]}
+        elif value == "zip":
+            mask = extract_alpha_channel(new_image)
+            mask_buff = io.BytesIO()
+            mask.save(mask_buff, "PNG")
+            mask_buff.seek(0)
+            image_buff = io.BytesIO()
+            image.save(image_buff, "JPEG")
+            image_buff.seek(0)
+            fileobj = io.BytesIO()
+            with zipfile.ZipFile(fileobj, "w") as zip_file:
+                zip_info = zipfile.ZipInfo(filename="color.jpg")
+                zip_info.date_time = time.localtime(time.time())[:6]
+                zip_info.compress_type = zipfile.ZIP_DEFLATED
+                zip_file.writestr(zip_info, image_buff.getvalue())
+                zip_info = zipfile.ZipInfo(filename="alpha.png")
+                zip_info.date_time = time.localtime(time.time())[:6]
+                zip_info.compress_type = zipfile.ZIP_DEFLATED
+                zip_file.writestr(zip_info, mask_buff.getvalue())
+            fileobj.seek(0)
+            return {"type": "zip", "data": [fileobj.read(), new_image.size]}
+        else:
+            buff = io.BytesIO()
+            new_image.save(buff, "PNG")
+            buff.seek(0)
+            return {"type": "png", "data": [buff, new_image.size]}
+    return (
+        error_dict(
+            "Something wrong with request or http api. Please, open new issue on Github! This is error in "
+            "code."
+        ),
+        400,
+    )
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/responses/api.html b/docs/api/carvekit/web/responses/api.html new file mode 100644 index 0000000..f1507e0 --- /dev/null +++ b/docs/api/carvekit/web/responses/api.html @@ -0,0 +1,95 @@ + + + + + + +carvekit.web.responses.api API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.responses.api

+
+
+
+ +Expand source code + +
def error_dict(error_text: str):
+    """
+    Generates a dictionary containing $error_text error
+    :param error_text: Error text
+    :return: error dictionary
+    """
+    resp = {"errors": [{"title": error_text}]}
+    return resp
+
+
+
+
+
+
+
+

Functions

+
+
+def error_dict(error_text:Β str) +
+
+

Generates a dictionary containing $error_text error +:param error_text: Error text +:return: error dictionary

+
+ +Expand source code + +
def error_dict(error_text: str):
+    """
+    Generates a dictionary containing $error_text error
+    :param error_text: Error text
+    :return: error dictionary
+    """
+    resp = {"errors": [{"title": error_text}]}
+    return resp
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/responses/index.html b/docs/api/carvekit/web/responses/index.html new file mode 100644 index 0000000..021527c --- /dev/null +++ b/docs/api/carvekit/web/responses/index.html @@ -0,0 +1,65 @@ + + + + + + +carvekit.web.responses API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.responses

+
+
+
+
+

Sub-modules

+
+
carvekit.web.responses.api
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/routers/api_router.html b/docs/api/carvekit/web/routers/api_router.html new file mode 100644 index 0000000..d99ca0e --- /dev/null +++ b/docs/api/carvekit/web/routers/api_router.html @@ -0,0 +1,517 @@ + + + + + + +carvekit.web.routers.api_router API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.routers.api_router

+
+
+
+ +Expand source code + +
import base64
+import http
+import io
+import time
+from json import JSONDecodeError
+from typing import Optional
+
+import requests
+from PIL import Image
+from fastapi import Header, Depends, Form, File, Request, APIRouter, UploadFile
+from fastapi.openapi.models import Response
+from pydantic import ValidationError
+from starlette.responses import JSONResponse
+
+from carvekit.web.deps import config, ml_processor
+from carvekit.web.handlers.response import handle_response, Authenticate
+from carvekit.web.responses.api import error_dict
+from carvekit.web.schemas.request import Parameters
+from carvekit.web.utils.net_utils import is_loopback
+
+api_router = APIRouter(prefix="", tags=["api"])
+
+
+# noinspection PyBroadException
+@api_router.post("/removebg")
+async def removebg(
+    request: Request,
+    image_file: Optional[bytes] = File(None),
+    auth: bool = Depends(Authenticate),
+    content_type: str = Header(""),
+    image_file_b64: Optional[str] = Form(None),
+    image_url: Optional[str] = Form(None),
+    bg_image_file: Optional[bytes] = File(None),
+    size: Optional[str] = Form("full"),
+    type: Optional[str] = Form("auto"),
+    format: Optional[str] = Form("auto"),
+    roi: str = Form("0% 0% 100% 100%"),
+    crop: bool = Form(False),
+    crop_margin: Optional[str] = Form("0px"),
+    scale: Optional[str] = Form("original"),
+    position: Optional[str] = Form("original"),
+    channels: Optional[str] = Form("rgba"),
+    add_shadow: bool = Form(False),  # Not supported at the moment
+    semitransparency: bool = Form(False),  # Not supported at the moment
+    bg_color: Optional[str] = Form(""),
+):
+    if auth is False:
+        return JSONResponse(content=error_dict("Missing API Key"), status_code=403)
+    if (
+        content_type not in ["application/x-www-form-urlencoded", "application/json"]
+        and "multipart/form-data" not in content_type
+    ):
+        return JSONResponse(
+            content=error_dict("Invalid request content type"), status_code=400
+        )
+
+    if image_url:
+        if not (
+            image_url.startswith("http://") or image_url.startswith("https://")
+        ) or is_loopback(image_url):
+            print(
+                f"Possible ssrf attempt to /api/removebg endpoint with image url: {image_url}"
+            )
+            return JSONResponse(
+                content=error_dict("Invalid image url."), status_code=400
+            )  # possible ssrf attempt
+
+    image = None
+    bg = None
+    parameters = None
+    if (
+        content_type == "application/x-www-form-urlencoded"
+        or "multipart/form-data" in content_type
+    ):
+        if image_file_b64 is None and image_url is None and image_file is None:
+            return JSONResponse(content=error_dict("File not found"), status_code=400)
+
+        if image_file_b64:
+            if len(image_file_b64) == 0:
+                return JSONResponse(content=error_dict("Empty image"), status_code=400)
+            try:
+                image = Image.open(io.BytesIO(base64.b64decode(image_file_b64)))
+            except BaseException:
+                return JSONResponse(
+                    content=error_dict("Error decode image!"), status_code=400
+                )
+        elif image_url:
+            try:
+                image = Image.open(io.BytesIO(requests.get(image_url).content))
+            except BaseException:
+                return JSONResponse(
+                    content=error_dict("Error download image!"), status_code=400
+                )
+        elif image_file:
+            if len(image_file) == 0:
+                return JSONResponse(content=error_dict("Empty image"), status_code=400)
+            image = Image.open(io.BytesIO(image_file))
+
+        if bg_image_file:
+            if len(bg_image_file) == 0:
+                return JSONResponse(content=error_dict("Empty image"), status_code=400)
+            bg = Image.open(io.BytesIO(bg_image_file))
+        try:
+            parameters = Parameters(
+                image_file_b64=image_file_b64,
+                image_url=image_url,
+                size=size,
+                type=type,
+                format=format,
+                roi=roi,
+                crop=crop,
+                crop_margin=crop_margin,
+                scale=scale,
+                position=position,
+                channels=channels,
+                add_shadow=add_shadow,
+                semitransparency=semitransparency,
+                bg_color=bg_color,
+            )
+        except ValidationError as e:
+            return JSONResponse(
+                content=e.json(), status_code=400, media_type="application/json"
+            )
+
+    else:
+        payload = None
+        try:
+            payload = await request.json()
+        except JSONDecodeError:
+            return JSONResponse(content=error_dict("Empty json"), status_code=400)
+        try:
+            parameters = Parameters(**payload)
+        except ValidationError as e:
+            return Response(
+                content=e.json(), status_code=400, media_type="application/json"
+            )
+        if parameters.image_file_b64 is None and parameters.image_url is None:
+            return JSONResponse(content=error_dict("File not found"), status_code=400)
+
+        if parameters.image_file_b64:
+            if len(parameters.image_file_b64) == 0:
+                return JSONResponse(content=error_dict("Empty image"), status_code=400)
+            try:
+                image = Image.open(
+                    io.BytesIO(base64.b64decode(parameters.image_file_b64))
+                )
+            except BaseException:
+                return JSONResponse(
+                    content=error_dict("Error decode image!"), status_code=400
+                )
+        elif parameters.image_url:
+            if not (
+                parameters.image_url.startswith("http://")
+                or parameters.image_url.startswith("https://")
+            ) or is_loopback(parameters.image_url):
+                print(
+                    f"Possible ssrf attempt to /api/removebg endpoint with image url: {parameters.image_url}"
+                )
+                return JSONResponse(
+                    content=error_dict("Invalid image url."), status_code=400
+                )  # possible ssrf attempt
+            try:
+                image = Image.open(
+                    io.BytesIO(requests.get(parameters.image_url).content)
+                )
+            except BaseException:
+                return JSONResponse(
+                    content=error_dict("Error download image!"), status_code=400
+                )
+        if image is None:
+            return JSONResponse(
+                content=error_dict("Error download image!"), status_code=400
+            )
+
+    job_id = ml_processor.job_create([parameters.dict(), image, bg, False])
+
+    while ml_processor.job_status(job_id) != "finished":
+        if ml_processor.job_status(job_id) == "not_found":
+            return JSONResponse(
+                content=error_dict("Job ID not found!"), status_code=500
+            )
+        time.sleep(5)
+
+    result = ml_processor.job_result(job_id)
+    return handle_response(result, image)
+
+
+@api_router.get("/account")
+def account():
+    """
+    Stub for compatibility with remove.bg api libraries
+    """
+    return JSONResponse(
+        content={
+            "data": {
+                "attributes": {
+                    "credits": {
+                        "total": 99999,
+                        "subscription": 99999,
+                        "payg": 99999,
+                        "enterprise": 99999,
+                    },
+                    "api": {"free_calls": 99999, "sizes": "all"},
+                }
+            }
+        },
+        status_code=200,
+    )
+
+
+@api_router.get("/admin/config")
+def status(auth: str = Depends(Authenticate)):
+    """
+    Returns the current server config.
+    """
+    if not auth or auth != "admin":
+        return JSONResponse(
+            content=error_dict("Authentication failed"), status_code=403
+        )
+    resp = JSONResponse(content=config.json(), status_code=200)
+    resp.headers["X-Credits-Charged"] = "0"
+    return resp
+
+
+
+
+
+
+
+

Functions

+
+
+def account() +
+
+

Stub for compatibility with remove.bg api libraries

+
+ +Expand source code + +
@api_router.get("/account")
+def account():
+    """
+    Stub for compatibility with remove.bg api libraries
+    """
+    return JSONResponse(
+        content={
+            "data": {
+                "attributes": {
+                    "credits": {
+                        "total": 99999,
+                        "subscription": 99999,
+                        "payg": 99999,
+                        "enterprise": 99999,
+                    },
+                    "api": {"free_calls": 99999, "sizes": "all"},
+                }
+            }
+        },
+        status_code=200,
+    )
+
+
+
+async def removebg(request:Β starlette.requests.Request, image_file:Β Optional[bytes]Β =Β File(None), auth:Β boolΒ =Β Depends(Authenticate), content_type:Β strΒ =Β Header(), image_file_b64:Β Optional[str]Β =Β Form(None), image_url:Β Optional[str]Β =Β Form(None), bg_image_file:Β Optional[bytes]Β =Β File(None), size:Β Optional[str]Β =Β Form(full), type:Β Optional[str]Β =Β Form(auto), format:Β Optional[str]Β =Β Form(auto), roi:Β strΒ =Β Form(0% 0% 100% 100%), crop:Β boolΒ =Β Form(False), crop_margin:Β Optional[str]Β =Β Form(0px), scale:Β Optional[str]Β =Β Form(original), position:Β Optional[str]Β =Β Form(original), channels:Β Optional[str]Β =Β Form(rgba), add_shadow:Β boolΒ =Β Form(False), semitransparency:Β boolΒ =Β Form(False), bg_color:Β Optional[str]Β =Β Form()) +
+
+
+
+ +Expand source code + +
@api_router.post("/removebg")
+async def removebg(
+    request: Request,
+    image_file: Optional[bytes] = File(None),
+    auth: bool = Depends(Authenticate),
+    content_type: str = Header(""),
+    image_file_b64: Optional[str] = Form(None),
+    image_url: Optional[str] = Form(None),
+    bg_image_file: Optional[bytes] = File(None),
+    size: Optional[str] = Form("full"),
+    type: Optional[str] = Form("auto"),
+    format: Optional[str] = Form("auto"),
+    roi: str = Form("0% 0% 100% 100%"),
+    crop: bool = Form(False),
+    crop_margin: Optional[str] = Form("0px"),
+    scale: Optional[str] = Form("original"),
+    position: Optional[str] = Form("original"),
+    channels: Optional[str] = Form("rgba"),
+    add_shadow: bool = Form(False),  # Not supported at the moment
+    semitransparency: bool = Form(False),  # Not supported at the moment
+    bg_color: Optional[str] = Form(""),
+):
+    if auth is False:
+        return JSONResponse(content=error_dict("Missing API Key"), status_code=403)
+    if (
+        content_type not in ["application/x-www-form-urlencoded", "application/json"]
+        and "multipart/form-data" not in content_type
+    ):
+        return JSONResponse(
+            content=error_dict("Invalid request content type"), status_code=400
+        )
+
+    if image_url:
+        if not (
+            image_url.startswith("http://") or image_url.startswith("https://")
+        ) or is_loopback(image_url):
+            print(
+                f"Possible ssrf attempt to /api/removebg endpoint with image url: {image_url}"
+            )
+            return JSONResponse(
+                content=error_dict("Invalid image url."), status_code=400
+            )  # possible ssrf attempt
+
+    image = None
+    bg = None
+    parameters = None
+    if (
+        content_type == "application/x-www-form-urlencoded"
+        or "multipart/form-data" in content_type
+    ):
+        if image_file_b64 is None and image_url is None and image_file is None:
+            return JSONResponse(content=error_dict("File not found"), status_code=400)
+
+        if image_file_b64:
+            if len(image_file_b64) == 0:
+                return JSONResponse(content=error_dict("Empty image"), status_code=400)
+            try:
+                image = Image.open(io.BytesIO(base64.b64decode(image_file_b64)))
+            except BaseException:
+                return JSONResponse(
+                    content=error_dict("Error decode image!"), status_code=400
+                )
+        elif image_url:
+            try:
+                image = Image.open(io.BytesIO(requests.get(image_url).content))
+            except BaseException:
+                return JSONResponse(
+                    content=error_dict("Error download image!"), status_code=400
+                )
+        elif image_file:
+            if len(image_file) == 0:
+                return JSONResponse(content=error_dict("Empty image"), status_code=400)
+            image = Image.open(io.BytesIO(image_file))
+
+        if bg_image_file:
+            if len(bg_image_file) == 0:
+                return JSONResponse(content=error_dict("Empty image"), status_code=400)
+            bg = Image.open(io.BytesIO(bg_image_file))
+        try:
+            parameters = Parameters(
+                image_file_b64=image_file_b64,
+                image_url=image_url,
+                size=size,
+                type=type,
+                format=format,
+                roi=roi,
+                crop=crop,
+                crop_margin=crop_margin,
+                scale=scale,
+                position=position,
+                channels=channels,
+                add_shadow=add_shadow,
+                semitransparency=semitransparency,
+                bg_color=bg_color,
+            )
+        except ValidationError as e:
+            return JSONResponse(
+                content=e.json(), status_code=400, media_type="application/json"
+            )
+
+    else:
+        payload = None
+        try:
+            payload = await request.json()
+        except JSONDecodeError:
+            return JSONResponse(content=error_dict("Empty json"), status_code=400)
+        try:
+            parameters = Parameters(**payload)
+        except ValidationError as e:
+            return Response(
+                content=e.json(), status_code=400, media_type="application/json"
+            )
+        if parameters.image_file_b64 is None and parameters.image_url is None:
+            return JSONResponse(content=error_dict("File not found"), status_code=400)
+
+        if parameters.image_file_b64:
+            if len(parameters.image_file_b64) == 0:
+                return JSONResponse(content=error_dict("Empty image"), status_code=400)
+            try:
+                image = Image.open(
+                    io.BytesIO(base64.b64decode(parameters.image_file_b64))
+                )
+            except BaseException:
+                return JSONResponse(
+                    content=error_dict("Error decode image!"), status_code=400
+                )
+        elif parameters.image_url:
+            if not (
+                parameters.image_url.startswith("http://")
+                or parameters.image_url.startswith("https://")
+            ) or is_loopback(parameters.image_url):
+                print(
+                    f"Possible ssrf attempt to /api/removebg endpoint with image url: {parameters.image_url}"
+                )
+                return JSONResponse(
+                    content=error_dict("Invalid image url."), status_code=400
+                )  # possible ssrf attempt
+            try:
+                image = Image.open(
+                    io.BytesIO(requests.get(parameters.image_url).content)
+                )
+            except BaseException:
+                return JSONResponse(
+                    content=error_dict("Error download image!"), status_code=400
+                )
+        if image is None:
+            return JSONResponse(
+                content=error_dict("Error download image!"), status_code=400
+            )
+
+    job_id = ml_processor.job_create([parameters.dict(), image, bg, False])
+
+    while ml_processor.job_status(job_id) != "finished":
+        if ml_processor.job_status(job_id) == "not_found":
+            return JSONResponse(
+                content=error_dict("Job ID not found!"), status_code=500
+            )
+        time.sleep(5)
+
+    result = ml_processor.job_result(job_id)
+    return handle_response(result, image)
+
+
+
+def status(auth:Β strΒ =Β Depends(Authenticate)) +
+
+

Returns the current server config.

+
+ +Expand source code + +
@api_router.get("/admin/config")
+def status(auth: str = Depends(Authenticate)):
+    """
+    Returns the current server config.
+    """
+    if not auth or auth != "admin":
+        return JSONResponse(
+            content=error_dict("Authentication failed"), status_code=403
+        )
+    resp = JSONResponse(content=config.json(), status_code=200)
+    resp.headers["X-Credits-Charged"] = "0"
+    return resp
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/routers/index.html b/docs/api/carvekit/web/routers/index.html new file mode 100644 index 0000000..1da91f5 --- /dev/null +++ b/docs/api/carvekit/web/routers/index.html @@ -0,0 +1,65 @@ + + + + + + +carvekit.web.routers API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.routers

+
+
+
+
+

Sub-modules

+
+
carvekit.web.routers.api_router
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/schemas/config.html b/docs/api/carvekit/web/schemas/config.html new file mode 100644 index 0000000..3e5285e --- /dev/null +++ b/docs/api/carvekit/web/schemas/config.html @@ -0,0 +1,550 @@ + + + + + + +carvekit.web.schemas.config API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.schemas.config

+
+
+
+ +Expand source code + +
import secrets
+from typing import List
+from typing_extensions import Literal
+
+import torch.cuda
+from pydantic import BaseModel, validator
+
+
+class AuthConfig(BaseModel):
+    """Config for web api token authentication"""
+
+    auth: bool = True
+    """Enables Token Authentication for API"""
+    admin_token: str = secrets.token_hex(32)
+    """Admin Token"""
+    allowed_tokens: List[str] = [secrets.token_hex(32)]
+    """All allowed tokens"""
+
+
+class MLConfig(BaseModel):
+    """Config for ml part of framework"""
+
+    segmentation_network: Literal[
+        "u2net", "deeplabv3", "basnet", "tracer_b7"
+    ] = "tracer_b7"
+    """Segmentation Network"""
+    preprocessing_method: Literal["none", "stub", "autoscene", "auto"] = "autoscene"
+    """Pre-processing Method"""
+    postprocessing_method: Literal["fba", "cascade_fba", "none"] = "cascade_fba"
+    """Post-Processing Network"""
+    device: str = "cpu"
+    """Processing device"""
+    batch_size_pre: int = 5
+    """Batch size for preprocessing method"""
+    batch_size_seg: int = 5
+    """Batch size for segmentation network"""
+    batch_size_matting: int = 1
+    """Batch size for matting network"""
+    batch_size_refine: int = 1
+    """Batch size for refine network"""
+    seg_mask_size: int = 640
+    """The size of the input image for the segmentation neural network."""
+    matting_mask_size: int = 2048
+    """The size of the input image for the matting neural network."""
+    refine_mask_size: int = 900
+    """The size of the input image for the refine neural network."""
+    fp16: bool = False
+    """Use half precision for inference"""
+    trimap_dilation: int = 30
+    """Dilation size for trimap"""
+    trimap_erosion: int = 5
+    """Erosion levels for trimap"""
+    trimap_prob_threshold: int = 231
+    """Probability threshold for trimap generation"""
+
+    @validator("seg_mask_size")
+    def seg_mask_size_validator(cls, value: int, values):
+        if value > 0:
+            return value
+        else:
+            raise ValueError("Incorrect seg_mask_size!")
+
+    @validator("matting_mask_size")
+    def matting_mask_size_validator(cls, value: int, values):
+        if value > 0:
+            return value
+        else:
+            raise ValueError("Incorrect matting_mask_size!")
+
+    @validator("batch_size_seg")
+    def batch_size_seg_validator(cls, value: int, values):
+        if value > 0:
+            return value
+        else:
+            raise ValueError("Incorrect batch size!")
+
+    @validator("batch_size_matting")
+    def batch_size_matting_validator(cls, value: int, values):
+        if value > 0:
+            return value
+        else:
+            raise ValueError("Incorrect batch size!")
+
+    @validator("device")
+    def device_validator(cls, value):
+        if torch.cuda.is_available() is False and "cuda" in value:
+            raise ValueError(
+                "GPU is not available, but specified as processing device!"
+            )
+        if "cuda" not in value and "cpu" != value:
+            raise ValueError("Unknown processing device! It should be cpu or cuda!")
+        return value
+
+
+class WebAPIConfig(BaseModel):
+    """FastAPI app config"""
+
+    port: int = 5000
+    """Web API port"""
+    host: str = "0.0.0.0"
+    """Web API host"""
+    ml: MLConfig = MLConfig()
+    """Config for ml part of framework"""
+    auth: AuthConfig = AuthConfig()
+    """Config for web api token authentication """
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class AuthConfig +(**data:Β Any) +
+
+

Config for web api token authentication

+

Create a new model by parsing and validating input data from keyword arguments.

+

Raises ValidationError if the input data cannot be parsed to form a valid model.

+
+ +Expand source code + +
class AuthConfig(BaseModel):
+    """Config for web api token authentication"""
+
+    auth: bool = True
+    """Enables Token Authentication for API"""
+    admin_token: str = secrets.token_hex(32)
+    """Admin Token"""
+    allowed_tokens: List[str] = [secrets.token_hex(32)]
+    """All allowed tokens"""
+
+

Ancestors

+
    +
  • pydantic.main.BaseModel
  • +
  • pydantic.utils.Representation
  • +
+

Class variables

+
+
var admin_token :Β str
+
+

Admin Token

+
+
var allowed_tokens :Β List[str]
+
+

All allowed tokens

+
+
var auth :Β bool
+
+

Enables Token Authentication for API

+
+
+
+
+class MLConfig +(**data:Β Any) +
+
+

Config for ml part of framework

+

Create a new model by parsing and validating input data from keyword arguments.

+

Raises ValidationError if the input data cannot be parsed to form a valid model.

+
+ +Expand source code + +
class MLConfig(BaseModel):
+    """Config for ml part of framework"""
+
+    segmentation_network: Literal[
+        "u2net", "deeplabv3", "basnet", "tracer_b7"
+    ] = "tracer_b7"
+    """Segmentation Network"""
+    preprocessing_method: Literal["none", "stub", "autoscene", "auto"] = "autoscene"
+    """Pre-processing Method"""
+    postprocessing_method: Literal["fba", "cascade_fba", "none"] = "cascade_fba"
+    """Post-Processing Network"""
+    device: str = "cpu"
+    """Processing device"""
+    batch_size_pre: int = 5
+    """Batch size for preprocessing method"""
+    batch_size_seg: int = 5
+    """Batch size for segmentation network"""
+    batch_size_matting: int = 1
+    """Batch size for matting network"""
+    batch_size_refine: int = 1
+    """Batch size for refine network"""
+    seg_mask_size: int = 640
+    """The size of the input image for the segmentation neural network."""
+    matting_mask_size: int = 2048
+    """The size of the input image for the matting neural network."""
+    refine_mask_size: int = 900
+    """The size of the input image for the refine neural network."""
+    fp16: bool = False
+    """Use half precision for inference"""
+    trimap_dilation: int = 30
+    """Dilation size for trimap"""
+    trimap_erosion: int = 5
+    """Erosion levels for trimap"""
+    trimap_prob_threshold: int = 231
+    """Probability threshold for trimap generation"""
+
+    @validator("seg_mask_size")
+    def seg_mask_size_validator(cls, value: int, values):
+        if value > 0:
+            return value
+        else:
+            raise ValueError("Incorrect seg_mask_size!")
+
+    @validator("matting_mask_size")
+    def matting_mask_size_validator(cls, value: int, values):
+        if value > 0:
+            return value
+        else:
+            raise ValueError("Incorrect matting_mask_size!")
+
+    @validator("batch_size_seg")
+    def batch_size_seg_validator(cls, value: int, values):
+        if value > 0:
+            return value
+        else:
+            raise ValueError("Incorrect batch size!")
+
+    @validator("batch_size_matting")
+    def batch_size_matting_validator(cls, value: int, values):
+        if value > 0:
+            return value
+        else:
+            raise ValueError("Incorrect batch size!")
+
+    @validator("device")
+    def device_validator(cls, value):
+        if torch.cuda.is_available() is False and "cuda" in value:
+            raise ValueError(
+                "GPU is not available, but specified as processing device!"
+            )
+        if "cuda" not in value and "cpu" != value:
+            raise ValueError("Unknown processing device! It should be cpu or cuda!")
+        return value
+
+

Ancestors

+
    +
  • pydantic.main.BaseModel
  • +
  • pydantic.utils.Representation
  • +
+

Class variables

+
+
var batch_size_matting :Β int
+
+

Batch size for matting network

+
+
var batch_size_pre :Β int
+
+

Batch size for preprocessing method

+
+
var batch_size_refine :Β int
+
+

Batch size for refine network

+
+
var batch_size_seg :Β int
+
+

Batch size for segmentation network

+
+
var device :Β str
+
+

Processing device

+
+
var fp16 :Β bool
+
+

Use half precision for inference

+
+
var matting_mask_size :Β int
+
+

The size of the input image for the matting neural network.

+
+
var postprocessing_method :Β Literal['fba',Β 'cascade_fba',Β 'none']
+
+

Post-Processing Network

+
+
var preprocessing_method :Β Literal['none',Β 'stub',Β 'autoscene',Β 'auto']
+
+

Pre-processing Method

+
+
var refine_mask_size :Β int
+
+

The size of the input image for the refine neural network.

+
+
var seg_mask_size :Β int
+
+

The size of the input image for the segmentation neural network.

+
+
var segmentation_network :Β Literal['u2net',Β 'deeplabv3',Β 'basnet',Β 'tracer_b7']
+
+

Segmentation Network

+
+
var trimap_dilation :Β int
+
+

Dilation size for trimap

+
+
var trimap_erosion :Β int
+
+

Erosion levels for trimap

+
+
var trimap_prob_threshold :Β int
+
+

Probability threshold for trimap generation

+
+
+

Static methods

+
+
+def batch_size_matting_validator(value:Β int, values) +
+
+
+
+ +Expand source code + +
@validator("batch_size_matting")
+def batch_size_matting_validator(cls, value: int, values):
+    if value > 0:
+        return value
+    else:
+        raise ValueError("Incorrect batch size!")
+
+
+
+def batch_size_seg_validator(value:Β int, values) +
+
+
+
+ +Expand source code + +
@validator("batch_size_seg")
+def batch_size_seg_validator(cls, value: int, values):
+    if value > 0:
+        return value
+    else:
+        raise ValueError("Incorrect batch size!")
+
+
+
+def device_validator(value) +
+
+
+
+ +Expand source code + +
@validator("device")
+def device_validator(cls, value):
+    if torch.cuda.is_available() is False and "cuda" in value:
+        raise ValueError(
+            "GPU is not available, but specified as processing device!"
+        )
+    if "cuda" not in value and "cpu" != value:
+        raise ValueError("Unknown processing device! It should be cpu or cuda!")
+    return value
+
+
+
+def matting_mask_size_validator(value:Β int, values) +
+
+
+
+ +Expand source code + +
@validator("matting_mask_size")
+def matting_mask_size_validator(cls, value: int, values):
+    if value > 0:
+        return value
+    else:
+        raise ValueError("Incorrect matting_mask_size!")
+
+
+
+def seg_mask_size_validator(value:Β int, values) +
+
+
+
+ +Expand source code + +
@validator("seg_mask_size")
+def seg_mask_size_validator(cls, value: int, values):
+    if value > 0:
+        return value
+    else:
+        raise ValueError("Incorrect seg_mask_size!")
+
+
+
+
+
+class WebAPIConfig +(**data:Β Any) +
+
+

FastAPI app config

+

Create a new model by parsing and validating input data from keyword arguments.

+

Raises ValidationError if the input data cannot be parsed to form a valid model.

+
+ +Expand source code + +
class WebAPIConfig(BaseModel):
+    """FastAPI app config"""
+
+    port: int = 5000
+    """Web API port"""
+    host: str = "0.0.0.0"
+    """Web API host"""
+    ml: MLConfig = MLConfig()
+    """Config for ml part of framework"""
+    auth: AuthConfig = AuthConfig()
+    """Config for web api token authentication """
+
+

Ancestors

+
    +
  • pydantic.main.BaseModel
  • +
  • pydantic.utils.Representation
  • +
+

Class variables

+
+
var auth :Β AuthConfig
+
+

Config for web api token authentication

+
+
var host :Β str
+
+

Web API host

+
+
var ml :Β MLConfig
+
+

Config for ml part of framework

+
+
var port :Β int
+
+

Web API port

+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/schemas/index.html b/docs/api/carvekit/web/schemas/index.html new file mode 100644 index 0000000..20ae6cd --- /dev/null +++ b/docs/api/carvekit/web/schemas/index.html @@ -0,0 +1,70 @@ + + + + + + +carvekit.web.schemas API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.schemas

+
+
+
+
+

Sub-modules

+
+
carvekit.web.schemas.config
+
+
+
+
carvekit.web.schemas.request
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/schemas/request.html b/docs/api/carvekit/web/schemas/request.html new file mode 100644 index 0000000..faaa723 --- /dev/null +++ b/docs/api/carvekit/web/schemas/request.html @@ -0,0 +1,397 @@ + + + + + + +carvekit.web.schemas.request API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.schemas.request

+
+
+
+ +Expand source code + +
import re
+from typing import Optional
+
+from pydantic import BaseModel, validator
+from typing_extensions import Literal
+
+
+class Parameters(BaseModel):
+    image_file_b64: Optional[str] = ""
+    image_url: Optional[str] = ""
+    size: Optional[Literal["preview", "full", "auto"]] = "preview"
+    type: Optional[
+        Literal["auto", "product", "person", "car"]
+    ] = "auto"  # Not supported at the moment
+    format: Optional[Literal["auto", "jpg", "png", "zip"]] = "auto"
+    roi: str = "0% 0% 100% 100%"
+    crop: bool = False
+    crop_margin: Optional[str] = "0px"
+    scale: Optional[str] = "original"
+    position: Optional[str] = "original"
+    channels: Optional[Literal["rgba", "alpha"]] = "rgba"
+    add_shadow: str = "false"  # Not supported at the moment
+    semitransparency: str = "false"  # Not supported at the moment
+    bg_color: Optional[str] = ""
+    bg_image_url: Optional[str] = ""
+
+    @validator("crop_margin")
+    def crop_margin_validator(cls, value):
+        if not re.match(r"[0-9]+(px|%)$", value):
+            raise ValueError(
+                "crop_margin paramter is not valid"
+            )  # TODO: Add support of several values
+        if "%" in value and (int(value[:-1]) < 0 or int(value[:-1]) > 100):
+            raise ValueError("crop_margin mast be in range between 0% and 100%")
+        return value
+
+    @validator("scale")
+    def scale_validator(cls, value):
+        if value != "original" and (
+            not re.match(r"[0-9]+%$", value)
+            or not int(value[:-1]) <= 100
+            or not int(value[:-1]) >= 10
+        ):
+            raise ValueError("scale must be original or in between of 10% and 100%")
+
+        if value == "original":
+            return 100
+
+        return int(value[:-1])
+
+    @validator("position")
+    def position_validator(cls, value, values):
+        if len(value.split(" ")) > 2:
+            raise ValueError(
+                "Position must be a value from 0 to 100 "
+                "for both vertical and horizontal axises or for both axises respectively"
+            )
+
+        if value == "original":
+            return "original"
+        elif len(value.split(" ")) == 1:
+            return [int(value[:-1]), int(value[:-1])]
+        else:
+            return [int(value.split(" ")[0][:-1]), int(value.split(" ")[1][:-1])]
+
+    @validator("bg_color")
+    def bg_color_validator(cls, value):
+        if not re.match(r"(#{0,1}[0-9a-f]{3}){0,2}$", value):
+            raise ValueError("bg_color is not in hex")
+        if len(value) and value[0] != "#":
+            value = "#" + value
+        return value
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class Parameters +(**data:Β Any) +
+
+

Create a new model by parsing and validating input data from keyword arguments.

+

Raises ValidationError if the input data cannot be parsed to form a valid model.

+
+ +Expand source code + +
class Parameters(BaseModel):
+    image_file_b64: Optional[str] = ""
+    image_url: Optional[str] = ""
+    size: Optional[Literal["preview", "full", "auto"]] = "preview"
+    type: Optional[
+        Literal["auto", "product", "person", "car"]
+    ] = "auto"  # Not supported at the moment
+    format: Optional[Literal["auto", "jpg", "png", "zip"]] = "auto"
+    roi: str = "0% 0% 100% 100%"
+    crop: bool = False
+    crop_margin: Optional[str] = "0px"
+    scale: Optional[str] = "original"
+    position: Optional[str] = "original"
+    channels: Optional[Literal["rgba", "alpha"]] = "rgba"
+    add_shadow: str = "false"  # Not supported at the moment
+    semitransparency: str = "false"  # Not supported at the moment
+    bg_color: Optional[str] = ""
+    bg_image_url: Optional[str] = ""
+
+    @validator("crop_margin")
+    def crop_margin_validator(cls, value):
+        if not re.match(r"[0-9]+(px|%)$", value):
+            raise ValueError(
+                "crop_margin paramter is not valid"
+            )  # TODO: Add support of several values
+        if "%" in value and (int(value[:-1]) < 0 or int(value[:-1]) > 100):
+            raise ValueError("crop_margin mast be in range between 0% and 100%")
+        return value
+
+    @validator("scale")
+    def scale_validator(cls, value):
+        if value != "original" and (
+            not re.match(r"[0-9]+%$", value)
+            or not int(value[:-1]) <= 100
+            or not int(value[:-1]) >= 10
+        ):
+            raise ValueError("scale must be original or in between of 10% and 100%")
+
+        if value == "original":
+            return 100
+
+        return int(value[:-1])
+
+    @validator("position")
+    def position_validator(cls, value, values):
+        if len(value.split(" ")) > 2:
+            raise ValueError(
+                "Position must be a value from 0 to 100 "
+                "for both vertical and horizontal axises or for both axises respectively"
+            )
+
+        if value == "original":
+            return "original"
+        elif len(value.split(" ")) == 1:
+            return [int(value[:-1]), int(value[:-1])]
+        else:
+            return [int(value.split(" ")[0][:-1]), int(value.split(" ")[1][:-1])]
+
+    @validator("bg_color")
+    def bg_color_validator(cls, value):
+        if not re.match(r"(#{0,1}[0-9a-f]{3}){0,2}$", value):
+            raise ValueError("bg_color is not in hex")
+        if len(value) and value[0] != "#":
+            value = "#" + value
+        return value
+
+

Ancestors

+
    +
  • pydantic.main.BaseModel
  • +
  • pydantic.utils.Representation
  • +
+

Class variables

+
+
var add_shadow :Β str
+
+
+
+
var bg_color :Β Optional[str]
+
+
+
+
var bg_image_url :Β Optional[str]
+
+
+
+
var channels :Β Optional[Literal['rgba',Β 'alpha']]
+
+
+
+
var crop :Β bool
+
+
+
+
var crop_margin :Β Optional[str]
+
+
+
+
var format :Β Optional[Literal['auto',Β 'jpg',Β 'png',Β 'zip']]
+
+
+
+
var image_file_b64 :Β Optional[str]
+
+
+
+
var image_url :Β Optional[str]
+
+
+
+
var position :Β Optional[str]
+
+
+
+
var roi :Β str
+
+
+
+
var scale :Β Optional[str]
+
+
+
+
var semitransparency :Β str
+
+
+
+
var size :Β Optional[Literal['preview',Β 'full',Β 'auto']]
+
+
+
+
var type :Β Optional[Literal['auto',Β 'product',Β 'person',Β 'car']]
+
+
+
+
+

Static methods

+
+
+def bg_color_validator(value) +
+
+
+
+ +Expand source code + +
@validator("bg_color")
+def bg_color_validator(cls, value):
+    if not re.match(r"(#{0,1}[0-9a-f]{3}){0,2}$", value):
+        raise ValueError("bg_color is not in hex")
+    if len(value) and value[0] != "#":
+        value = "#" + value
+    return value
+
+
+
+def crop_margin_validator(value) +
+
+
+
+ +Expand source code + +
@validator("crop_margin")
+def crop_margin_validator(cls, value):
+    if not re.match(r"[0-9]+(px|%)$", value):
+        raise ValueError(
+            "crop_margin paramter is not valid"
+        )  # TODO: Add support of several values
+    if "%" in value and (int(value[:-1]) < 0 or int(value[:-1]) > 100):
+        raise ValueError("crop_margin mast be in range between 0% and 100%")
+    return value
+
+
+
+def position_validator(value, values) +
+
+
+
+ +Expand source code + +
@validator("position")
+def position_validator(cls, value, values):
+    if len(value.split(" ")) > 2:
+        raise ValueError(
+            "Position must be a value from 0 to 100 "
+            "for both vertical and horizontal axises or for both axises respectively"
+        )
+
+    if value == "original":
+        return "original"
+    elif len(value.split(" ")) == 1:
+        return [int(value[:-1]), int(value[:-1])]
+    else:
+        return [int(value.split(" ")[0][:-1]), int(value.split(" ")[1][:-1])]
+
+
+
+def scale_validator(value) +
+
+
+
+ +Expand source code + +
@validator("scale")
+def scale_validator(cls, value):
+    if value != "original" and (
+        not re.match(r"[0-9]+%$", value)
+        or not int(value[:-1]) <= 100
+        or not int(value[:-1]) >= 10
+    ):
+        raise ValueError("scale must be original or in between of 10% and 100%")
+
+    if value == "original":
+        return 100
+
+    return int(value[:-1])
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/utils/index.html b/docs/api/carvekit/web/utils/index.html new file mode 100644 index 0000000..94dd635 --- /dev/null +++ b/docs/api/carvekit/web/utils/index.html @@ -0,0 +1,75 @@ + + + + + + +carvekit.web.utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.utils

+
+
+
+
+

Sub-modules

+
+
carvekit.web.utils.init_utils
+
+
+
+
carvekit.web.utils.net_utils
+
+
+
+
carvekit.web.utils.task_queue
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/utils/init_utils.html b/docs/api/carvekit/web/utils/init_utils.html new file mode 100644 index 0000000..da42da3 --- /dev/null +++ b/docs/api/carvekit/web/utils/init_utils.html @@ -0,0 +1,551 @@ + + + + + + +carvekit.web.utils.init_utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.utils.init_utils

+
+
+
+ +Expand source code + +
import warnings
+from os import getenv
+from typing import Union
+
+from loguru import logger
+
+from carvekit.ml.wrap.cascadepsp import CascadePSP
+from carvekit.ml.wrap.scene_classifier import SceneClassifier
+from carvekit.web.schemas.config import WebAPIConfig, MLConfig, AuthConfig
+
+from carvekit.api.interface import Interface
+from carvekit.api.autointerface import AutoInterface
+
+from carvekit.ml.wrap.fba_matting import FBAMatting
+from carvekit.ml.wrap.u2net import U2NET
+from carvekit.ml.wrap.deeplab_v3 import DeepLabV3
+from carvekit.ml.wrap.basnet import BASNET
+from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7
+from carvekit.ml.wrap.yolov4 import SimplifiedYoloV4
+
+
+from carvekit.pipelines.postprocessing import MattingMethod, CasMattingMethod
+from carvekit.pipelines.preprocessing import PreprocessingStub, AutoScene
+from carvekit.trimap.generator import TrimapGenerator
+
+
+def init_config() -> WebAPIConfig:
+    default_config = WebAPIConfig()
+    config = WebAPIConfig(
+        **dict(
+            port=int(getenv("CARVEKIT_PORT", default_config.port)),
+            host=getenv("CARVEKIT_HOST", default_config.host),
+            ml=MLConfig(
+                segmentation_network=getenv(
+                    "CARVEKIT_SEGMENTATION_NETWORK",
+                    default_config.ml.segmentation_network,
+                ),
+                preprocessing_method=getenv(
+                    "CARVEKIT_PREPROCESSING_METHOD",
+                    default_config.ml.preprocessing_method,
+                ),
+                postprocessing_method=getenv(
+                    "CARVEKIT_POSTPROCESSING_METHOD",
+                    default_config.ml.postprocessing_method,
+                ),
+                device=getenv("CARVEKIT_DEVICE", default_config.ml.device),
+                batch_size_pre=int(
+                    getenv("CARVEKIT_BATCH_SIZE_PRE", default_config.ml.batch_size_pre)
+                ),
+                batch_size_seg=int(
+                    getenv("CARVEKIT_BATCH_SIZE_SEG", default_config.ml.batch_size_seg)
+                ),
+                batch_size_matting=int(
+                    getenv(
+                        "CARVEKIT_BATCH_SIZE_MATTING",
+                        default_config.ml.batch_size_matting,
+                    )
+                ),
+                batch_size_refine=int(
+                    getenv(
+                        "CARVEKIT_BATCH_SIZE_REFINE",
+                        default_config.ml.batch_size_refine,
+                    )
+                ),
+                seg_mask_size=int(
+                    getenv("CARVEKIT_SEG_MASK_SIZE", default_config.ml.seg_mask_size)
+                ),
+                matting_mask_size=int(
+                    getenv(
+                        "CARVEKIT_MATTING_MASK_SIZE",
+                        default_config.ml.matting_mask_size,
+                    )
+                ),
+                refine_mask_size=int(
+                    getenv(
+                        "CARVEKIT_REFINE_MASK_SIZE",
+                        default_config.ml.refine_mask_size,
+                    )
+                ),
+                fp16=bool(int(getenv("CARVEKIT_FP16", default_config.ml.fp16))),
+                trimap_prob_threshold=int(
+                    getenv(
+                        "CARVEKIT_TRIMAP_PROB_THRESHOLD",
+                        default_config.ml.trimap_prob_threshold,
+                    )
+                ),
+                trimap_dilation=int(
+                    getenv(
+                        "CARVEKIT_TRIMAP_DILATION", default_config.ml.trimap_dilation
+                    )
+                ),
+                trimap_erosion=int(
+                    getenv("CARVEKIT_TRIMAP_EROSION", default_config.ml.trimap_erosion)
+                ),
+            ),
+            auth=AuthConfig(
+                auth=bool(
+                    int(getenv("CARVEKIT_AUTH_ENABLE", default_config.auth.auth))
+                ),
+                admin_token=getenv(
+                    "CARVEKIT_ADMIN_TOKEN", default_config.auth.admin_token
+                ),
+                allowed_tokens=default_config.auth.allowed_tokens
+                if getenv("CARVEKIT_ALLOWED_TOKENS") is None
+                else getenv("CARVEKIT_ALLOWED_TOKENS").split(","),
+            ),
+        )
+    )
+
+    logger.info(f"Admin token for Web API is {config.auth.admin_token}")
+    logger.debug(f"Running Web API with this config: {config.json()}")
+    return config
+
+
+def init_interface(config: Union[WebAPIConfig, MLConfig]) -> Interface:
+    if isinstance(config, WebAPIConfig):
+        config = config.ml
+    if config.preprocessing_method == "auto":
+        warnings.warn(
+            "Preprocessing_method is set to `auto`."
+            "We will use automatic methods to determine the best methods for your images! "
+            "Please note that this is not always the best option and all other options will be ignored!"
+        )
+        scene_classifier = SceneClassifier(
+            device=config.device, batch_size=config.batch_size_pre, fp16=config.fp16
+        )
+        object_classifier = SimplifiedYoloV4(
+            device=config.device, batch_size=config.batch_size_pre, fp16=config.fp16
+        )
+        return AutoInterface(
+            scene_classifier=scene_classifier,
+            object_classifier=object_classifier,
+            segmentation_batch_size=config.batch_size_seg,
+            postprocessing_batch_size=config.batch_size_matting,
+            postprocessing_image_size=config.matting_mask_size,
+            segmentation_device=config.device,
+            postprocessing_device=config.device,
+            fp16=config.fp16,
+        )
+
+    else:
+        if config.segmentation_network == "u2net":
+            seg_net = U2NET(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+        elif config.segmentation_network == "deeplabv3":
+            seg_net = DeepLabV3(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+        elif config.segmentation_network == "basnet":
+            seg_net = BASNET(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+        elif config.segmentation_network == "tracer_b7":
+            seg_net = TracerUniversalB7(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+        else:
+            seg_net = TracerUniversalB7(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+
+        if config.preprocessing_method == "stub":
+            preprocessing = PreprocessingStub()
+        elif config.preprocessing_method == "none":
+            preprocessing = None
+        elif config.preprocessing_method == "autoscene":
+            preprocessing = AutoScene(
+                scene_classifier=SceneClassifier(
+                    device=config.device,
+                    batch_size=config.batch_size_pre,
+                    fp16=config.fp16,
+                )
+            )
+        else:
+            preprocessing = None
+
+        if config.postprocessing_method == "fba":
+            fba = FBAMatting(
+                device=config.device,
+                batch_size=config.batch_size_matting,
+                input_tensor_size=config.matting_mask_size,
+                fp16=config.fp16,
+            )
+            trimap_generator = TrimapGenerator(
+                prob_threshold=config.trimap_prob_threshold,
+                kernel_size=config.trimap_dilation,
+                erosion_iters=config.trimap_erosion,
+            )
+            postprocessing = MattingMethod(
+                device=config.device,
+                matting_module=fba,
+                trimap_generator=trimap_generator,
+            )
+        elif config.postprocessing_method == "cascade_fba":
+            cascadepsp = CascadePSP(
+                device=config.device,
+                batch_size=config.batch_size_refine,
+                input_tensor_size=config.refine_mask_size,
+                fp16=config.fp16,
+            )
+            fba = FBAMatting(
+                device=config.device,
+                batch_size=config.batch_size_matting,
+                input_tensor_size=config.matting_mask_size,
+                fp16=config.fp16,
+            )
+            trimap_generator = TrimapGenerator(
+                prob_threshold=config.trimap_prob_threshold,
+                kernel_size=config.trimap_dilation,
+                erosion_iters=config.trimap_erosion,
+            )
+            postprocessing = CasMattingMethod(
+                device=config.device,
+                matting_module=fba,
+                trimap_generator=trimap_generator,
+                refining_module=cascadepsp,
+            )
+        elif config.postprocessing_method == "none":
+            postprocessing = None
+        else:
+            postprocessing = None
+
+        interface = Interface(
+            pre_pipe=preprocessing,
+            post_pipe=postprocessing,
+            seg_pipe=seg_net,
+            device=config.device,
+        )
+    return interface
+
+
+
+
+
+
+
+

Functions

+
+
+def init_config() ‑>Β WebAPIConfig +
+
+
+
+ +Expand source code + +
def init_config() -> WebAPIConfig:
+    default_config = WebAPIConfig()
+    config = WebAPIConfig(
+        **dict(
+            port=int(getenv("CARVEKIT_PORT", default_config.port)),
+            host=getenv("CARVEKIT_HOST", default_config.host),
+            ml=MLConfig(
+                segmentation_network=getenv(
+                    "CARVEKIT_SEGMENTATION_NETWORK",
+                    default_config.ml.segmentation_network,
+                ),
+                preprocessing_method=getenv(
+                    "CARVEKIT_PREPROCESSING_METHOD",
+                    default_config.ml.preprocessing_method,
+                ),
+                postprocessing_method=getenv(
+                    "CARVEKIT_POSTPROCESSING_METHOD",
+                    default_config.ml.postprocessing_method,
+                ),
+                device=getenv("CARVEKIT_DEVICE", default_config.ml.device),
+                batch_size_pre=int(
+                    getenv("CARVEKIT_BATCH_SIZE_PRE", default_config.ml.batch_size_pre)
+                ),
+                batch_size_seg=int(
+                    getenv("CARVEKIT_BATCH_SIZE_SEG", default_config.ml.batch_size_seg)
+                ),
+                batch_size_matting=int(
+                    getenv(
+                        "CARVEKIT_BATCH_SIZE_MATTING",
+                        default_config.ml.batch_size_matting,
+                    )
+                ),
+                batch_size_refine=int(
+                    getenv(
+                        "CARVEKIT_BATCH_SIZE_REFINE",
+                        default_config.ml.batch_size_refine,
+                    )
+                ),
+                seg_mask_size=int(
+                    getenv("CARVEKIT_SEG_MASK_SIZE", default_config.ml.seg_mask_size)
+                ),
+                matting_mask_size=int(
+                    getenv(
+                        "CARVEKIT_MATTING_MASK_SIZE",
+                        default_config.ml.matting_mask_size,
+                    )
+                ),
+                refine_mask_size=int(
+                    getenv(
+                        "CARVEKIT_REFINE_MASK_SIZE",
+                        default_config.ml.refine_mask_size,
+                    )
+                ),
+                fp16=bool(int(getenv("CARVEKIT_FP16", default_config.ml.fp16))),
+                trimap_prob_threshold=int(
+                    getenv(
+                        "CARVEKIT_TRIMAP_PROB_THRESHOLD",
+                        default_config.ml.trimap_prob_threshold,
+                    )
+                ),
+                trimap_dilation=int(
+                    getenv(
+                        "CARVEKIT_TRIMAP_DILATION", default_config.ml.trimap_dilation
+                    )
+                ),
+                trimap_erosion=int(
+                    getenv("CARVEKIT_TRIMAP_EROSION", default_config.ml.trimap_erosion)
+                ),
+            ),
+            auth=AuthConfig(
+                auth=bool(
+                    int(getenv("CARVEKIT_AUTH_ENABLE", default_config.auth.auth))
+                ),
+                admin_token=getenv(
+                    "CARVEKIT_ADMIN_TOKEN", default_config.auth.admin_token
+                ),
+                allowed_tokens=default_config.auth.allowed_tokens
+                if getenv("CARVEKIT_ALLOWED_TOKENS") is None
+                else getenv("CARVEKIT_ALLOWED_TOKENS").split(","),
+            ),
+        )
+    )
+
+    logger.info(f"Admin token for Web API is {config.auth.admin_token}")
+    logger.debug(f"Running Web API with this config: {config.json()}")
+    return config
+
+
+
+def init_interface(config:Β Union[WebAPIConfig,Β MLConfig]) ‑>Β Interface +
+
+
+
+ +Expand source code + +
def init_interface(config: Union[WebAPIConfig, MLConfig]) -> Interface:
+    if isinstance(config, WebAPIConfig):
+        config = config.ml
+    if config.preprocessing_method == "auto":
+        warnings.warn(
+            "Preprocessing_method is set to `auto`."
+            "We will use automatic methods to determine the best methods for your images! "
+            "Please note that this is not always the best option and all other options will be ignored!"
+        )
+        scene_classifier = SceneClassifier(
+            device=config.device, batch_size=config.batch_size_pre, fp16=config.fp16
+        )
+        object_classifier = SimplifiedYoloV4(
+            device=config.device, batch_size=config.batch_size_pre, fp16=config.fp16
+        )
+        return AutoInterface(
+            scene_classifier=scene_classifier,
+            object_classifier=object_classifier,
+            segmentation_batch_size=config.batch_size_seg,
+            postprocessing_batch_size=config.batch_size_matting,
+            postprocessing_image_size=config.matting_mask_size,
+            segmentation_device=config.device,
+            postprocessing_device=config.device,
+            fp16=config.fp16,
+        )
+
+    else:
+        if config.segmentation_network == "u2net":
+            seg_net = U2NET(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+        elif config.segmentation_network == "deeplabv3":
+            seg_net = DeepLabV3(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+        elif config.segmentation_network == "basnet":
+            seg_net = BASNET(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+        elif config.segmentation_network == "tracer_b7":
+            seg_net = TracerUniversalB7(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+        else:
+            seg_net = TracerUniversalB7(
+                device=config.device,
+                batch_size=config.batch_size_seg,
+                input_image_size=config.seg_mask_size,
+                fp16=config.fp16,
+            )
+
+        if config.preprocessing_method == "stub":
+            preprocessing = PreprocessingStub()
+        elif config.preprocessing_method == "none":
+            preprocessing = None
+        elif config.preprocessing_method == "autoscene":
+            preprocessing = AutoScene(
+                scene_classifier=SceneClassifier(
+                    device=config.device,
+                    batch_size=config.batch_size_pre,
+                    fp16=config.fp16,
+                )
+            )
+        else:
+            preprocessing = None
+
+        if config.postprocessing_method == "fba":
+            fba = FBAMatting(
+                device=config.device,
+                batch_size=config.batch_size_matting,
+                input_tensor_size=config.matting_mask_size,
+                fp16=config.fp16,
+            )
+            trimap_generator = TrimapGenerator(
+                prob_threshold=config.trimap_prob_threshold,
+                kernel_size=config.trimap_dilation,
+                erosion_iters=config.trimap_erosion,
+            )
+            postprocessing = MattingMethod(
+                device=config.device,
+                matting_module=fba,
+                trimap_generator=trimap_generator,
+            )
+        elif config.postprocessing_method == "cascade_fba":
+            cascadepsp = CascadePSP(
+                device=config.device,
+                batch_size=config.batch_size_refine,
+                input_tensor_size=config.refine_mask_size,
+                fp16=config.fp16,
+            )
+            fba = FBAMatting(
+                device=config.device,
+                batch_size=config.batch_size_matting,
+                input_tensor_size=config.matting_mask_size,
+                fp16=config.fp16,
+            )
+            trimap_generator = TrimapGenerator(
+                prob_threshold=config.trimap_prob_threshold,
+                kernel_size=config.trimap_dilation,
+                erosion_iters=config.trimap_erosion,
+            )
+            postprocessing = CasMattingMethod(
+                device=config.device,
+                matting_module=fba,
+                trimap_generator=trimap_generator,
+                refining_module=cascadepsp,
+            )
+        elif config.postprocessing_method == "none":
+            postprocessing = None
+        else:
+            postprocessing = None
+
+        interface = Interface(
+            pre_pipe=preprocessing,
+            post_pipe=postprocessing,
+            seg_pipe=seg_net,
+            device=config.device,
+        )
+    return interface
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/utils/net_utils.html b/docs/api/carvekit/web/utils/net_utils.html new file mode 100644 index 0000000..4df1b17 --- /dev/null +++ b/docs/api/carvekit/web/utils/net_utils.html @@ -0,0 +1,139 @@ + + + + + + +carvekit.web.utils.net_utils API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.utils.net_utils

+
+
+
+ +Expand source code + +
import socket
+import struct
+from typing import Optional
+from urllib.parse import urlparse
+
+
+def is_loopback(address):
+    host: Optional[str] = None
+
+    try:
+        parsed_url = urlparse(address)
+        host = parsed_url.hostname
+    except ValueError:
+        return False  # url is not even a url
+
+    loopback_checker = {
+        socket.AF_INET: lambda x: struct.unpack("!I", socket.inet_aton(x))[0]
+        >> (32 - 8)
+        == 127,
+        socket.AF_INET6: lambda x: x == "::1",
+    }
+    for family in (socket.AF_INET, socket.AF_INET6):
+        try:
+            r = socket.getaddrinfo(host, None, family, socket.SOCK_STREAM)
+        except socket.gaierror:
+            continue
+        for family, _, _, _, sockaddr in r:
+            if loopback_checker[family](sockaddr[0]):
+                return True
+
+    if host in ("localhost",):
+        return True
+
+    return False
+
+
+
+
+
+
+
+

Functions

+
+
+def is_loopback(address) +
+
+
+
+ +Expand source code + +
def is_loopback(address):
+    host: Optional[str] = None
+
+    try:
+        parsed_url = urlparse(address)
+        host = parsed_url.hostname
+    except ValueError:
+        return False  # url is not even a url
+
+    loopback_checker = {
+        socket.AF_INET: lambda x: struct.unpack("!I", socket.inet_aton(x))[0]
+        >> (32 - 8)
+        == 127,
+        socket.AF_INET6: lambda x: x == "::1",
+    }
+    for family in (socket.AF_INET, socket.AF_INET6):
+        try:
+            r = socket.getaddrinfo(host, None, family, socket.SOCK_STREAM)
+        except socket.gaierror:
+            continue
+        for family, _, _, _, sockaddr in r:
+            if loopback_checker[family](sockaddr[0]):
+                return True
+
+    if host in ("localhost",):
+        return True
+
+    return False
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/carvekit/web/utils/task_queue.html b/docs/api/carvekit/web/utils/task_queue.html new file mode 100644 index 0000000..f685f87 --- /dev/null +++ b/docs/api/carvekit/web/utils/task_queue.html @@ -0,0 +1,481 @@ + + + + + + +carvekit.web.utils.task_queue API documentation + + + + + + + + + + + +
+
+
+

Module carvekit.web.utils.task_queue

+
+
+
+ +Expand source code + +
import gc
+import threading
+import time
+import uuid
+from typing import Optional
+
+from loguru import logger
+
+from carvekit.api.interface import Interface
+from carvekit.web.schemas.config import WebAPIConfig
+from carvekit.web.utils.init_utils import init_interface
+from carvekit.web.other.removebg import process_remove_bg
+
+
+class MLProcessor(threading.Thread):
+    """Simple ml task queue processor"""
+
+    def __init__(self, api_config: WebAPIConfig):
+        super().__init__()
+        self.api_config = api_config
+        self.interface: Optional[Interface] = None
+        self.jobs = {}
+        self.completed_jobs = {}
+
+    def run(self):
+        """Starts listening for new jobs."""
+        unused_completed_jobs_timer = time.time()
+        if self.interface is None:
+            self.interface = init_interface(self.api_config)
+        while True:
+            # Clear unused completed jobs every hour
+            if time.time() - unused_completed_jobs_timer > 60:
+                self.clear_old_completed_jobs()
+                unused_completed_jobs_timer = time.time()
+
+            if len(self.jobs.keys()) >= 1:
+                id = list(self.jobs.keys())[0]
+                data = self.jobs[id]
+                # TODO add pydantic scheme here
+                response = process_remove_bg(
+                    self.interface, data[0], data[1], data[2], data[3]
+                )
+                self.completed_jobs[id] = [response, time.time()]
+                try:
+                    del self.jobs[id]
+                except KeyError or NameError as e:
+                    logger.error(f"Something went wrong with Task Queue: {str(e)}")
+                gc.collect()
+            else:
+                time.sleep(1)
+                continue
+
+    def clear_old_completed_jobs(self):
+        """Clears old completed jobs"""
+
+        if len(self.completed_jobs.keys()) >= 1:
+            for job_id in self.completed_jobs.keys():
+                job_finished_time = self.completed_jobs[job_id][1]
+                if time.time() - job_finished_time > 3600:
+                    try:
+                        del self.completed_jobs[job_id]
+                    except KeyError or NameError as e:
+                        logger.error(f"Something went wrong with Task Queue: {str(e)}")
+            gc.collect()
+
+    def job_status(self, id: str) -> str:
+        """
+        Returns current job status
+
+        Args:
+            id: id of the job
+
+        Returns:
+            Current job status for specified id. Job status can be [finished, wait, not_found]
+        """
+        if id in self.completed_jobs.keys():
+            return "finished"
+        elif id in self.jobs.keys():
+            return "wait"
+        else:
+            return "not_found"
+
+    def job_result(self, id: str):
+        """
+        Returns job processing result.
+
+        Args:
+            id: id of the job
+
+        Returns:
+            job processing result.
+        """
+        if id in self.completed_jobs.keys():
+            data = self.completed_jobs[id][0]
+            try:
+                del self.completed_jobs[id]
+            except KeyError or NameError:
+                pass
+            return data
+        else:
+            return False
+
+    def job_create(self, data: list):
+        """
+        Send job to ML Processor
+
+        Args:
+            data: data object
+        """
+        if self.is_alive() is False:
+            self.start()
+        id = uuid.uuid4().hex
+        self.jobs[id] = data
+        return id
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class MLProcessor +(api_config:Β WebAPIConfig) +
+
+

Simple ml task queue processor

+

This constructor should always be called with keyword arguments. Arguments are:

+

group should be None; reserved for future extension when a ThreadGroup +class is implemented.

+

target is the callable object to be invoked by the run() +method. Defaults to None, meaning nothing is called.

+

name is the thread name. By default, a unique name is constructed of +the form "Thread-N" where N is a small decimal number.

+

args is the argument tuple for the target invocation. Defaults to ().

+

kwargs is a dictionary of keyword arguments for the target +invocation. Defaults to {}.

+

If a subclass overrides the constructor, it must make sure to invoke +the base class constructor (Thread.init()) before doing anything +else to the thread.

+
+ +Expand source code + +
class MLProcessor(threading.Thread):
+    """Simple ml task queue processor"""
+
+    def __init__(self, api_config: WebAPIConfig):
+        super().__init__()
+        self.api_config = api_config
+        self.interface: Optional[Interface] = None
+        self.jobs = {}
+        self.completed_jobs = {}
+
+    def run(self):
+        """Starts listening for new jobs."""
+        unused_completed_jobs_timer = time.time()
+        if self.interface is None:
+            self.interface = init_interface(self.api_config)
+        while True:
+            # Clear unused completed jobs every hour
+            if time.time() - unused_completed_jobs_timer > 60:
+                self.clear_old_completed_jobs()
+                unused_completed_jobs_timer = time.time()
+
+            if len(self.jobs.keys()) >= 1:
+                id = list(self.jobs.keys())[0]
+                data = self.jobs[id]
+                # TODO add pydantic scheme here
+                response = process_remove_bg(
+                    self.interface, data[0], data[1], data[2], data[3]
+                )
+                self.completed_jobs[id] = [response, time.time()]
+                try:
+                    del self.jobs[id]
+                except KeyError or NameError as e:
+                    logger.error(f"Something went wrong with Task Queue: {str(e)}")
+                gc.collect()
+            else:
+                time.sleep(1)
+                continue
+
+    def clear_old_completed_jobs(self):
+        """Clears old completed jobs"""
+
+        if len(self.completed_jobs.keys()) >= 1:
+            for job_id in self.completed_jobs.keys():
+                job_finished_time = self.completed_jobs[job_id][1]
+                if time.time() - job_finished_time > 3600:
+                    try:
+                        del self.completed_jobs[job_id]
+                    except KeyError or NameError as e:
+                        logger.error(f"Something went wrong with Task Queue: {str(e)}")
+            gc.collect()
+
+    def job_status(self, id: str) -> str:
+        """
+        Returns current job status
+
+        Args:
+            id: id of the job
+
+        Returns:
+            Current job status for specified id. Job status can be [finished, wait, not_found]
+        """
+        if id in self.completed_jobs.keys():
+            return "finished"
+        elif id in self.jobs.keys():
+            return "wait"
+        else:
+            return "not_found"
+
+    def job_result(self, id: str):
+        """
+        Returns job processing result.
+
+        Args:
+            id: id of the job
+
+        Returns:
+            job processing result.
+        """
+        if id in self.completed_jobs.keys():
+            data = self.completed_jobs[id][0]
+            try:
+                del self.completed_jobs[id]
+            except KeyError or NameError:
+                pass
+            return data
+        else:
+            return False
+
+    def job_create(self, data: list):
+        """
+        Send job to ML Processor
+
+        Args:
+            data: data object
+        """
+        if self.is_alive() is False:
+            self.start()
+        id = uuid.uuid4().hex
+        self.jobs[id] = data
+        return id
+
+

Ancestors

+
    +
  • threading.Thread
  • +
+

Methods

+
+
+def clear_old_completed_jobs(self) +
+
+

Clears old completed jobs

+
+ +Expand source code + +
def clear_old_completed_jobs(self):
+    """Clears old completed jobs"""
+
+    if len(self.completed_jobs.keys()) >= 1:
+        for job_id in self.completed_jobs.keys():
+            job_finished_time = self.completed_jobs[job_id][1]
+            if time.time() - job_finished_time > 3600:
+                try:
+                    del self.completed_jobs[job_id]
+                except KeyError or NameError as e:
+                    logger.error(f"Something went wrong with Task Queue: {str(e)}")
+        gc.collect()
+
+
+
+def job_create(self, data:Β list) +
+
+

Send job to ML Processor

+

Args

+
+
data
+
data object
+
+
+ +Expand source code + +
def job_create(self, data: list):
+    """
+    Send job to ML Processor
+
+    Args:
+        data: data object
+    """
+    if self.is_alive() is False:
+        self.start()
+    id = uuid.uuid4().hex
+    self.jobs[id] = data
+    return id
+
+
+
+def job_result(self, id:Β str) +
+
+

Returns job processing result.

+

Args

+
+
id
+
id of the job
+
+

Returns

+

job processing result.

+
+ +Expand source code + +
def job_result(self, id: str):
+    """
+    Returns job processing result.
+
+    Args:
+        id: id of the job
+
+    Returns:
+        job processing result.
+    """
+    if id in self.completed_jobs.keys():
+        data = self.completed_jobs[id][0]
+        try:
+            del self.completed_jobs[id]
+        except KeyError or NameError:
+            pass
+        return data
+    else:
+        return False
+
+
+
+def job_status(self, id:Β str) ‑>Β str +
+
+

Returns current job status

+

Args

+
+
id
+
id of the job
+
+

Returns

+

Current job status for specified id. Job status can be [finished, wait, not_found]

+
+ +Expand source code + +
def job_status(self, id: str) -> str:
+    """
+    Returns current job status
+
+    Args:
+        id: id of the job
+
+    Returns:
+        Current job status for specified id. Job status can be [finished, wait, not_found]
+    """
+    if id in self.completed_jobs.keys():
+        return "finished"
+    elif id in self.jobs.keys():
+        return "wait"
+    else:
+        return "not_found"
+
+
+
+def run(self) +
+
+

Starts listening for new jobs.

+
+ +Expand source code + +
def run(self):
+    """Starts listening for new jobs."""
+    unused_completed_jobs_timer = time.time()
+    if self.interface is None:
+        self.interface = init_interface(self.api_config)
+    while True:
+        # Clear unused completed jobs every hour
+        if time.time() - unused_completed_jobs_timer > 60:
+            self.clear_old_completed_jobs()
+            unused_completed_jobs_timer = time.time()
+
+        if len(self.jobs.keys()) >= 1:
+            id = list(self.jobs.keys())[0]
+            data = self.jobs[id]
+            # TODO add pydantic scheme here
+            response = process_remove_bg(
+                self.interface, data[0], data[1], data[2], data[3]
+            )
+            self.completed_jobs[id] = [response, time.time()]
+            try:
+                del self.jobs[id]
+            except KeyError or NameError as e:
+                logger.error(f"Something went wrong with Task Queue: {str(e)}")
+            gc.collect()
+        else:
+            time.sleep(1)
+            continue
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/cascadepsp.html b/docs/api/cascadepsp.html new file mode 100644 index 0000000..62d0a40 --- /dev/null +++ b/docs/api/cascadepsp.html @@ -0,0 +1,902 @@ + + + + + + +cascadepsp API documentation + + + + + + + + + + + +
+
+
+

Module cascadepsp

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+import warnings
+
+import PIL
+import numpy as np
+import torch
+from PIL import Image
+from torchvision import transforms
+from typing import Union, List
+
+from carvekit.ml.arch.cascadepsp.pspnet import RefinementModule
+from carvekit.ml.arch.cascadepsp.utils import (
+    process_im_single_pass,
+    process_high_res_im,
+)
+from carvekit.ml.files.models_loc import cascadepsp_pretrained
+from carvekit.utils.image_utils import convert_image, load_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import batch_generator, thread_pool_processing
+
+__all__ = ["CascadePSP"]
+
+
+class CascadePSP(RefinementModule):
+    """
+    CascadePSP to refine the mask from segmentation network
+    """
+
+    def __init__(
+        self,
+        device="cpu",
+        input_tensor_size: int = 900,
+        batch_size: int = 1,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        mask_binary_threshold=127,
+        global_step_only=False,
+        processing_accelerate_image_size=2048,
+    ):
+        """
+        Initialize the CascadePSP model
+
+        Args:
+            device: processing device
+            input_tensor_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            load_pretrained: loading pretrained model
+            fp16: use half precision
+            global_step_only: if True, only global step will be used for prediction. See paper for details.
+            mask_binary_threshold: threshold for binary mask, default 70, set to 0 for no threshold
+            processing_accelerate_image_size: thumbnail size for image processing acceleration. Set to 0 to disable
+
+        """
+        super().__init__()
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        self.mask_binary_threshold = mask_binary_threshold
+        self.global_step_only = global_step_only
+        self.processing_accelerate_image_size = processing_accelerate_image_size
+        self.input_tensor_size = input_tensor_size
+
+        self.to(device)
+        if batch_size > 1:
+            warnings.warn(
+                "Batch size > 1 is experimental feature for CascadePSP."
+                " Please, don't use it if you have GPU with small memory!"
+            )
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(cascadepsp_pretrained(), map_location=self.device)
+            )
+        self.eval()
+
+        self._image_transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+
+        self._seg_transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(mean=[0.5], std=[0.5]),
+            ]
+        )
+
+    def data_preprocessing(self, data: Union[PIL.Image.Image]) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+        preprocessed_data = data.copy()
+        if self.batch_size == 1 and self.processing_accelerate_image_size > 0:
+            # Okay, we have only one image, so
+            # we can use image processing acceleration for accelerate high resolution image processing
+            preprocessed_data.thumbnail(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+        elif self.batch_size == 1:
+            pass  # No need to do anything
+        elif self.batch_size > 1 and self.global_step_only is True:
+            # If we have more than one image and we use only global step,
+            # there aren't any reason to use image processing acceleration,
+            # because we will use only global step for prediction and anyway it will be resized to input_tensor_size
+            preprocessed_data = preprocessed_data.resize(
+                (self.input_tensor_size, self.input_tensor_size)
+            )
+        elif (
+            self.batch_size > 1
+            and self.global_step_only is False
+            and self.processing_accelerate_image_size > 0
+        ):
+            # If we have more than one image and we use local step,
+            # we can use image processing acceleration for accelerate high resolution image processing
+            # but we need to resize image to processing_accelerate_image_size to stack it with other images
+            preprocessed_data = preprocessed_data.resize(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+        elif (
+            self.batch_size > 1
+            and self.global_step_only is False
+            and not (self.processing_accelerate_image_size > 0)
+        ):
+            raise ValueError(
+                "If you use local step with batch_size > 2, "
+                "you need to set processing_accelerate_image_size > 0,"
+                "since we cannot stack images with different sizes to one batch"
+            )
+        else:  # some extra cases
+            preprocessed_data = preprocessed_data.resize(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+
+        if data.mode == "RGB":
+            preprocessed_data = self._image_transform(
+                np.array(preprocessed_data)
+            ).unsqueeze(0)
+        elif data.mode == "L":
+            preprocessed_data = np.array(preprocessed_data)
+            if 0 < self.mask_binary_threshold <= 255:
+                preprocessed_data = (
+                    preprocessed_data > self.mask_binary_threshold
+                ).astype(np.uint8) * 255
+            elif self.mask_binary_threshold > 255 or self.mask_binary_threshold < 0:
+                warnings.warn(
+                    "mask_binary_threshold should be in range [0, 255], "
+                    "but got {}. Disabling mask_binary_threshold!".format(
+                        self.mask_binary_threshold
+                    )
+                )
+
+            preprocessed_data = self._seg_transform(preprocessed_data).unsqueeze(
+                0
+            )  # [H,W,1]
+
+        return preprocessed_data
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, mask: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+            mask: input mask
+
+        Returns:
+            Segmentation mask as PIL Image instance
+
+        """
+        refined_mask = (data[0, :, :].cpu().numpy() * 255).astype("uint8")
+        return Image.fromarray(refined_mask).convert("L").resize(mask.size)
+
+    def safe_forward(self, im, seg, inter_s8=None, inter_s4=None):
+        """
+        Slightly pads the input image such that its length is a multiple of 8
+        """
+        b, _, ph, pw = seg.shape
+        if (ph % 8 != 0) or (pw % 8 != 0):
+            newH = (ph // 8 + 1) * 8
+            newW = (pw // 8 + 1) * 8
+            p_im = torch.zeros(b, 3, newH, newW, device=im.device)
+            p_seg = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+
+            p_im[:, :, 0:ph, 0:pw] = im
+            p_seg[:, :, 0:ph, 0:pw] = seg
+            im = p_im
+            seg = p_seg
+
+            if inter_s8 is not None:
+                p_inter_s8 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+                p_inter_s8[:, :, 0:ph, 0:pw] = inter_s8
+                inter_s8 = p_inter_s8
+            if inter_s4 is not None:
+                p_inter_s4 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+                p_inter_s4[:, :, 0:ph, 0:pw] = inter_s4
+                inter_s4 = p_inter_s4
+
+        images = super().__call__(im, seg, inter_s8, inter_s4)
+        return_im = {}
+
+        for key in ["pred_224", "pred_28_3", "pred_56_2"]:
+            return_im[key] = images[key][:, :, 0:ph, 0:pw]
+        del images
+
+        return return_im
+
+    def __call__(
+        self,
+        images: List[Union[str, pathlib.Path, PIL.Image.Image]],
+        masks: List[Union[str, pathlib.Path, PIL.Image.Image]],
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images: input images
+            masks: Segmentation masks to refine
+
+        Returns:
+            segmentation masks as for input images, as PIL.Image.Image instances
+
+        """
+
+        if len(images) != len(masks):
+            raise ValueError(
+                "Len of specified arrays of images and trimaps should be equal!"
+            )
+
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for idx_batch in batch_generator(range(len(images)), self.batch_size):
+                inpt_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(images[x])), idx_batch
+                )
+
+                inpt_masks = thread_pool_processing(
+                    lambda x: convert_image(load_image(masks[x]), mode="L"), idx_batch
+                )
+
+                inpt_img_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_images
+                )
+                inpt_masks_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_masks
+                )
+                if self.batch_size > 1:  # We need to stack images, if batch_size > 1
+                    inpt_img_batches = torch.vstack(inpt_img_batches)
+                    inpt_masks_batches = torch.vstack(inpt_masks_batches)
+                else:
+                    inpt_img_batches = inpt_img_batches[
+                        0
+                    ]  # Get only one image from list
+                    inpt_masks_batches = inpt_masks_batches[0]
+
+                with torch.no_grad():
+                    inpt_img_batches = inpt_img_batches.to(self.device)
+                    inpt_masks_batches = inpt_masks_batches.to(self.device)
+                    if self.global_step_only:
+                        refined_batches = process_im_single_pass(
+                            self,
+                            inpt_img_batches,
+                            inpt_masks_batches,
+                            self.input_tensor_size,
+                        )
+
+                    else:
+                        refined_batches = process_high_res_im(
+                            self,
+                            inpt_img_batches,
+                            inpt_masks_batches,
+                            self.input_tensor_size,
+                        )
+
+                    refined_masks = refined_batches.cpu()
+                    del (inpt_img_batches, inpt_masks_batches, refined_batches)
+                collect_masks += thread_pool_processing(
+                    lambda x: self.data_postprocessing(refined_masks[x], inpt_masks[x]),
+                    range(len(inpt_masks)),
+                )
+            return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class CascadePSP +(device='cpu', input_tensor_size:Β intΒ =Β 900, batch_size:Β intΒ =Β 1, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False, mask_binary_threshold=127, global_step_only=False, processing_accelerate_image_size=2048) +
+
+

CascadePSP to refine the mask from segmentation network

+

Initialize the CascadePSP model

+

Args

+
+
device
+
processing device
+
input_tensor_size
+
input image size
+
batch_size
+
the number of images that the neural network processes in one run
+
load_pretrained
+
loading pretrained model
+
fp16
+
use half precision
+
global_step_only
+
if True, only global step will be used for prediction. See paper for details.
+
mask_binary_threshold
+
threshold for binary mask, default 70, set to 0 for no threshold
+
processing_accelerate_image_size
+
thumbnail size for image processing acceleration. Set to 0 to disable
+
+
+ +Expand source code + +
class CascadePSP(RefinementModule):
+    """
+    CascadePSP to refine the mask from segmentation network
+    """
+
+    def __init__(
+        self,
+        device="cpu",
+        input_tensor_size: int = 900,
+        batch_size: int = 1,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        mask_binary_threshold=127,
+        global_step_only=False,
+        processing_accelerate_image_size=2048,
+    ):
+        """
+        Initialize the CascadePSP model
+
+        Args:
+            device: processing device
+            input_tensor_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            load_pretrained: loading pretrained model
+            fp16: use half precision
+            global_step_only: if True, only global step will be used for prediction. See paper for details.
+            mask_binary_threshold: threshold for binary mask, default 70, set to 0 for no threshold
+            processing_accelerate_image_size: thumbnail size for image processing acceleration. Set to 0 to disable
+
+        """
+        super().__init__()
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        self.mask_binary_threshold = mask_binary_threshold
+        self.global_step_only = global_step_only
+        self.processing_accelerate_image_size = processing_accelerate_image_size
+        self.input_tensor_size = input_tensor_size
+
+        self.to(device)
+        if batch_size > 1:
+            warnings.warn(
+                "Batch size > 1 is experimental feature for CascadePSP."
+                " Please, don't use it if you have GPU with small memory!"
+            )
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(cascadepsp_pretrained(), map_location=self.device)
+            )
+        self.eval()
+
+        self._image_transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+
+        self._seg_transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(mean=[0.5], std=[0.5]),
+            ]
+        )
+
+    def data_preprocessing(self, data: Union[PIL.Image.Image]) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+        preprocessed_data = data.copy()
+        if self.batch_size == 1 and self.processing_accelerate_image_size > 0:
+            # Okay, we have only one image, so
+            # we can use image processing acceleration for accelerate high resolution image processing
+            preprocessed_data.thumbnail(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+        elif self.batch_size == 1:
+            pass  # No need to do anything
+        elif self.batch_size > 1 and self.global_step_only is True:
+            # If we have more than one image and we use only global step,
+            # there aren't any reason to use image processing acceleration,
+            # because we will use only global step for prediction and anyway it will be resized to input_tensor_size
+            preprocessed_data = preprocessed_data.resize(
+                (self.input_tensor_size, self.input_tensor_size)
+            )
+        elif (
+            self.batch_size > 1
+            and self.global_step_only is False
+            and self.processing_accelerate_image_size > 0
+        ):
+            # If we have more than one image and we use local step,
+            # we can use image processing acceleration for accelerate high resolution image processing
+            # but we need to resize image to processing_accelerate_image_size to stack it with other images
+            preprocessed_data = preprocessed_data.resize(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+        elif (
+            self.batch_size > 1
+            and self.global_step_only is False
+            and not (self.processing_accelerate_image_size > 0)
+        ):
+            raise ValueError(
+                "If you use local step with batch_size > 2, "
+                "you need to set processing_accelerate_image_size > 0,"
+                "since we cannot stack images with different sizes to one batch"
+            )
+        else:  # some extra cases
+            preprocessed_data = preprocessed_data.resize(
+                (
+                    self.processing_accelerate_image_size,
+                    self.processing_accelerate_image_size,
+                )
+            )
+
+        if data.mode == "RGB":
+            preprocessed_data = self._image_transform(
+                np.array(preprocessed_data)
+            ).unsqueeze(0)
+        elif data.mode == "L":
+            preprocessed_data = np.array(preprocessed_data)
+            if 0 < self.mask_binary_threshold <= 255:
+                preprocessed_data = (
+                    preprocessed_data > self.mask_binary_threshold
+                ).astype(np.uint8) * 255
+            elif self.mask_binary_threshold > 255 or self.mask_binary_threshold < 0:
+                warnings.warn(
+                    "mask_binary_threshold should be in range [0, 255], "
+                    "but got {}. Disabling mask_binary_threshold!".format(
+                        self.mask_binary_threshold
+                    )
+                )
+
+            preprocessed_data = self._seg_transform(preprocessed_data).unsqueeze(
+                0
+            )  # [H,W,1]
+
+        return preprocessed_data
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, mask: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+            mask: input mask
+
+        Returns:
+            Segmentation mask as PIL Image instance
+
+        """
+        refined_mask = (data[0, :, :].cpu().numpy() * 255).astype("uint8")
+        return Image.fromarray(refined_mask).convert("L").resize(mask.size)
+
+    def safe_forward(self, im, seg, inter_s8=None, inter_s4=None):
+        """
+        Slightly pads the input image such that its length is a multiple of 8
+        """
+        b, _, ph, pw = seg.shape
+        if (ph % 8 != 0) or (pw % 8 != 0):
+            newH = (ph // 8 + 1) * 8
+            newW = (pw // 8 + 1) * 8
+            p_im = torch.zeros(b, 3, newH, newW, device=im.device)
+            p_seg = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+
+            p_im[:, :, 0:ph, 0:pw] = im
+            p_seg[:, :, 0:ph, 0:pw] = seg
+            im = p_im
+            seg = p_seg
+
+            if inter_s8 is not None:
+                p_inter_s8 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+                p_inter_s8[:, :, 0:ph, 0:pw] = inter_s8
+                inter_s8 = p_inter_s8
+            if inter_s4 is not None:
+                p_inter_s4 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+                p_inter_s4[:, :, 0:ph, 0:pw] = inter_s4
+                inter_s4 = p_inter_s4
+
+        images = super().__call__(im, seg, inter_s8, inter_s4)
+        return_im = {}
+
+        for key in ["pred_224", "pred_28_3", "pred_56_2"]:
+            return_im[key] = images[key][:, :, 0:ph, 0:pw]
+        del images
+
+        return return_im
+
+    def __call__(
+        self,
+        images: List[Union[str, pathlib.Path, PIL.Image.Image]],
+        masks: List[Union[str, pathlib.Path, PIL.Image.Image]],
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images: input images
+            masks: Segmentation masks to refine
+
+        Returns:
+            segmentation masks as for input images, as PIL.Image.Image instances
+
+        """
+
+        if len(images) != len(masks):
+            raise ValueError(
+                "Len of specified arrays of images and trimaps should be equal!"
+            )
+
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for idx_batch in batch_generator(range(len(images)), self.batch_size):
+                inpt_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(images[x])), idx_batch
+                )
+
+                inpt_masks = thread_pool_processing(
+                    lambda x: convert_image(load_image(masks[x]), mode="L"), idx_batch
+                )
+
+                inpt_img_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_images
+                )
+                inpt_masks_batches = thread_pool_processing(
+                    self.data_preprocessing, inpt_masks
+                )
+                if self.batch_size > 1:  # We need to stack images, if batch_size > 1
+                    inpt_img_batches = torch.vstack(inpt_img_batches)
+                    inpt_masks_batches = torch.vstack(inpt_masks_batches)
+                else:
+                    inpt_img_batches = inpt_img_batches[
+                        0
+                    ]  # Get only one image from list
+                    inpt_masks_batches = inpt_masks_batches[0]
+
+                with torch.no_grad():
+                    inpt_img_batches = inpt_img_batches.to(self.device)
+                    inpt_masks_batches = inpt_masks_batches.to(self.device)
+                    if self.global_step_only:
+                        refined_batches = process_im_single_pass(
+                            self,
+                            inpt_img_batches,
+                            inpt_masks_batches,
+                            self.input_tensor_size,
+                        )
+
+                    else:
+                        refined_batches = process_high_res_im(
+                            self,
+                            inpt_img_batches,
+                            inpt_masks_batches,
+                            self.input_tensor_size,
+                        )
+
+                    refined_masks = refined_batches.cpu()
+                    del (inpt_img_batches, inpt_masks_batches, refined_batches)
+                collect_masks += thread_pool_processing(
+                    lambda x: self.data_postprocessing(refined_masks[x], inpt_masks[x]),
+                    range(len(inpt_masks)),
+                )
+            return collect_masks
+
+

Ancestors

+ +

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, mask:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data
+
output data from neural network
+
mask
+
input mask
+
+

Returns

+

Segmentation mask as PIL Image instance

+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, mask: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data: output data from neural network
+        mask: input mask
+
+    Returns:
+        Segmentation mask as PIL Image instance
+
+    """
+    refined_mask = (data[0, :, :].cpu().numpy() * 255).astype("uint8")
+    return Image.fromarray(refined_mask).convert("L").resize(mask.size)
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data
+
input image
+
+

Returns

+

input for neural network

+
+ +Expand source code + +
def data_preprocessing(self, data: Union[PIL.Image.Image]) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data: input image
+
+    Returns:
+        input for neural network
+
+    """
+    preprocessed_data = data.copy()
+    if self.batch_size == 1 and self.processing_accelerate_image_size > 0:
+        # Okay, we have only one image, so
+        # we can use image processing acceleration for accelerate high resolution image processing
+        preprocessed_data.thumbnail(
+            (
+                self.processing_accelerate_image_size,
+                self.processing_accelerate_image_size,
+            )
+        )
+    elif self.batch_size == 1:
+        pass  # No need to do anything
+    elif self.batch_size > 1 and self.global_step_only is True:
+        # If we have more than one image and we use only global step,
+        # there aren't any reason to use image processing acceleration,
+        # because we will use only global step for prediction and anyway it will be resized to input_tensor_size
+        preprocessed_data = preprocessed_data.resize(
+            (self.input_tensor_size, self.input_tensor_size)
+        )
+    elif (
+        self.batch_size > 1
+        and self.global_step_only is False
+        and self.processing_accelerate_image_size > 0
+    ):
+        # If we have more than one image and we use local step,
+        # we can use image processing acceleration for accelerate high resolution image processing
+        # but we need to resize image to processing_accelerate_image_size to stack it with other images
+        preprocessed_data = preprocessed_data.resize(
+            (
+                self.processing_accelerate_image_size,
+                self.processing_accelerate_image_size,
+            )
+        )
+    elif (
+        self.batch_size > 1
+        and self.global_step_only is False
+        and not (self.processing_accelerate_image_size > 0)
+    ):
+        raise ValueError(
+            "If you use local step with batch_size > 2, "
+            "you need to set processing_accelerate_image_size > 0,"
+            "since we cannot stack images with different sizes to one batch"
+        )
+    else:  # some extra cases
+        preprocessed_data = preprocessed_data.resize(
+            (
+                self.processing_accelerate_image_size,
+                self.processing_accelerate_image_size,
+            )
+        )
+
+    if data.mode == "RGB":
+        preprocessed_data = self._image_transform(
+            np.array(preprocessed_data)
+        ).unsqueeze(0)
+    elif data.mode == "L":
+        preprocessed_data = np.array(preprocessed_data)
+        if 0 < self.mask_binary_threshold <= 255:
+            preprocessed_data = (
+                preprocessed_data > self.mask_binary_threshold
+            ).astype(np.uint8) * 255
+        elif self.mask_binary_threshold > 255 or self.mask_binary_threshold < 0:
+            warnings.warn(
+                "mask_binary_threshold should be in range [0, 255], "
+                "but got {}. Disabling mask_binary_threshold!".format(
+                    self.mask_binary_threshold
+                )
+            )
+
+        preprocessed_data = self._seg_transform(preprocessed_data).unsqueeze(
+            0
+        )  # [H,W,1]
+
+    return preprocessed_data
+
+
+
+def safe_forward(self, im, seg, inter_s8=None, inter_s4=None) +
+
+

Slightly pads the input image such that its length is a multiple of 8

+
+ +Expand source code + +
def safe_forward(self, im, seg, inter_s8=None, inter_s4=None):
+    """
+    Slightly pads the input image such that its length is a multiple of 8
+    """
+    b, _, ph, pw = seg.shape
+    if (ph % 8 != 0) or (pw % 8 != 0):
+        newH = (ph // 8 + 1) * 8
+        newW = (pw // 8 + 1) * 8
+        p_im = torch.zeros(b, 3, newH, newW, device=im.device)
+        p_seg = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+
+        p_im[:, :, 0:ph, 0:pw] = im
+        p_seg[:, :, 0:ph, 0:pw] = seg
+        im = p_im
+        seg = p_seg
+
+        if inter_s8 is not None:
+            p_inter_s8 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+            p_inter_s8[:, :, 0:ph, 0:pw] = inter_s8
+            inter_s8 = p_inter_s8
+        if inter_s4 is not None:
+            p_inter_s4 = torch.zeros(b, 1, newH, newW, device=im.device) - 1
+            p_inter_s4[:, :, 0:ph, 0:pw] = inter_s4
+            inter_s4 = p_inter_s4
+
+    images = super().__call__(im, seg, inter_s8, inter_s4)
+    return_im = {}
+
+    for key in ["pred_224", "pred_28_3", "pred_56_2"]:
+        return_im[key] = images[key][:, :, 0:ph, 0:pw]
+    del images
+
+    return return_im
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/deeplab_v3.html b/docs/api/deeplab_v3.html new file mode 100644 index 0000000..65e14cc --- /dev/null +++ b/docs/api/deeplab_v3.html @@ -0,0 +1,485 @@ + + + + + + +deeplab_v3 API documentation + + + + + + + + + + + +
+
+
+

Module deeplab_v3

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import pathlib
+from typing import List, Union
+
+import PIL.Image
+import torch
+from PIL import Image
+from torchvision import transforms
+from torchvision.models.segmentation import deeplabv3_resnet101
+from carvekit.ml.files.models_loc import deeplab_pretrained
+from carvekit.utils.image_utils import convert_image, load_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import batch_generator, thread_pool_processing
+
+__all__ = ["DeepLabV3"]
+
+
+class DeepLabV3:
+    def __init__(
+        self,
+        device="cpu",
+        batch_size: int = 10,
+        input_image_size: Union[List[int], int] = 1024,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the `DeepLabV3` model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (): input image size
+            batch_size (int, default=10): the number of images that the neural network processes in one run
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use half precision
+
+        """
+        self.device = device
+        self.batch_size = batch_size
+        self.network = deeplabv3_resnet101(
+            pretrained=False, pretrained_backbone=False, aux_loss=True
+        )
+        self.network.to(self.device)
+        if load_pretrained:
+            self.network.load_state_dict(
+                torch.load(deeplab_pretrained(), map_location=self.device)
+            )
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.network.eval()
+        self.fp16 = fp16
+        self.transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+
+    def to(self, device: str):
+        """
+        Moves neural network to specified processing device
+
+        Args:
+            device (Literal[cpu, cuda]): the desired device.
+
+        """
+        self.network.to(device)
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.Tensor: input for neural network
+
+        """
+        copy = data.copy()
+        copy.thumbnail(self.input_image_size, resample=3)
+        return self.transform(copy)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        return (
+            Image.fromarray(data.numpy() * 255).convert("L").resize(original_image.size)
+        )
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as `PIL.Image.Image` instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self.network, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = thread_pool_processing(
+                    self.data_preprocessing, converted_images
+                )
+                with torch.no_grad():
+                    masks = [
+                        self.network(i.to(self.device).unsqueeze(0))["out"][0]
+                        .argmax(0)
+                        .byte()
+                        .cpu()
+                        for i in batches
+                    ]
+                    del batches
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(masks[x], converted_images[x]),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class DeepLabV3 +(device='cpu', batch_size:Β intΒ =Β 10, input_image_size:Β Union[List[int],Β int]Β =Β 1024, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False) +
+
+

Initialize the DeepLabV3 model

+

Args

+
+
device : Literal[cpu, cuda], default=cpu
+
processing device
+
input_image_size (): input image size
+
batch_size : int, default=10
+
the number of images that the neural network processes in one run
+
load_pretrained : bool, default=True
+
loading pretrained model
+
fp16 : bool, default=False
+
use half precision
+
+
+ +Expand source code + +
class DeepLabV3:
+    def __init__(
+        self,
+        device="cpu",
+        batch_size: int = 10,
+        input_image_size: Union[List[int], int] = 1024,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the `DeepLabV3` model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (): input image size
+            batch_size (int, default=10): the number of images that the neural network processes in one run
+            load_pretrained (bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use half precision
+
+        """
+        self.device = device
+        self.batch_size = batch_size
+        self.network = deeplabv3_resnet101(
+            pretrained=False, pretrained_backbone=False, aux_loss=True
+        )
+        self.network.to(self.device)
+        if load_pretrained:
+            self.network.load_state_dict(
+                torch.load(deeplab_pretrained(), map_location=self.device)
+            )
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.network.eval()
+        self.fp16 = fp16
+        self.transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+
+    def to(self, device: str):
+        """
+        Moves neural network to specified processing device
+
+        Args:
+            device (Literal[cpu, cuda]): the desired device.
+
+        """
+        self.network.to(device)
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.Tensor: input for neural network
+
+        """
+        copy = data.copy()
+        copy.thumbnail(self.input_image_size, resample=3)
+        return self.transform(copy)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        return (
+            Image.fromarray(data.numpy() * 255).convert("L").resize(original_image.size)
+        )
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as `PIL.Image.Image` instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images, as `PIL.Image.Image` instances
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self.network, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = thread_pool_processing(
+                    self.data_preprocessing, converted_images
+                )
+                with torch.no_grad():
+                    masks = [
+                        self.network(i.to(self.device).unsqueeze(0))["out"][0]
+                        .argmax(0)
+                        .byte()
+                        .cpu()
+                        for i in batches
+                    ]
+                    del batches
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(masks[x], converted_images[x]),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+        return collect_masks
+
+

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, original_image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
original_image : PIL.Image.Image
+
input image which was used for predicted data
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask as PIL Image instance
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, original_image: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        original_image (PIL.Image.Image): input image which was used for predicted data
+
+    Returns:
+        PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+    """
+    return (
+        Image.fromarray(data.numpy() * 255).convert("L").resize(original_image.size)
+    )
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.Tensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : PIL.Image.Image
+
input image
+
+

Returns

+
+
torch.Tensor
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.Tensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (PIL.Image.Image): input image
+
+    Returns:
+        torch.Tensor: input for neural network
+
+    """
+    copy = data.copy()
+    copy.thumbnail(self.input_image_size, resample=3)
+    return self.transform(copy)
+
+
+
+def to(self, device:Β str) +
+
+

Moves neural network to specified processing device

+

Args

+
+
device : Literal[cpu, cuda]
+
the desired device.
+
+
+ +Expand source code + +
def to(self, device: str):
+    """
+    Moves neural network to specified processing device
+
+    Args:
+        device (Literal[cpu, cuda]): the desired device.
+
+    """
+    self.network.to(device)
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/download_models.html b/docs/api/download_models.html new file mode 100644 index 0000000..643813d --- /dev/null +++ b/docs/api/download_models.html @@ -0,0 +1,775 @@ + + + + + + +download_models API documentation + + + + + + + + + + + +
+
+
+

Module download_models

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import hashlib
+import os
+import warnings
+from abc import ABCMeta, abstractmethod, ABC
+from pathlib import Path
+from typing import Optional
+
+import carvekit
+from carvekit.ml.files import checkpoints_dir
+
+import requests
+import tqdm
+
+requests = requests.Session()
+requests.headers.update({"User-Agent": f"Carvekit/{carvekit.version}"})
+
+MODELS_URLS = {
+    "basnet.pth": {
+        "repository": "Carve/basnet-universal",
+        "revision": "870becbdb364fda6d8fdb2c10b072542f8d08701",
+        "filename": "basnet.pth",
+    },
+    "deeplab.pth": {
+        "repository": "Carve/deeplabv3-resnet101",
+        "revision": "d504005392fc877565afdf58aad0cd524682d2b0",
+        "filename": "deeplab.pth",
+    },
+    "fba_matting.pth": {
+        "repository": "Carve/fba",
+        "revision": "a5d3457df0fb9c88ea19ed700d409756ca2069d1",
+        "filename": "fba_matting.pth",
+    },
+    "u2net.pth": {
+        "repository": "Carve/u2net-universal",
+        "revision": "10305d785481cf4b2eee1d447c39cd6e5f43d74b",
+        "filename": "full_weights.pth",
+    },
+    "tracer_b7.pth": {
+        "repository": "Carve/tracer_b7",
+        "revision": "d8a8fd9e7b3fa0d2f1506fe7242966b34381e9c5",
+        "filename": "tracer_b7.pth",
+    },
+    "scene_classifier.pth": {
+        "repository": "Carve/scene_classifier",
+        "revision": "71c8e4c771dd5a20ff0c5c9e3c8f1c9cf8082740",
+        "filename": "scene_classifier.pth",
+    },
+    "yolov4_coco_with_classes.pth": {
+        "repository": "Carve/yolov4_coco",
+        "revision": "e3fc9cd22f86e456d2749d1ae148400f2f950fb3",
+        "filename": "yolov4_coco_with_classes.pth",
+    },
+    "cascadepsp.pth": {
+        "repository": "Carve/cascadepsp",
+        "revision": "3ca1e5e432344b1277bc88d1c6d4265c46cff62f",
+        "filename": "cascadepsp.pth",
+    },
+}
+"""
+All data needed to build path relative to huggingface.co for model download
+"""
+
+MODELS_CHECKSUMS = {
+    "basnet.pth": "e409cb709f4abca87cb11bd44a9ad3f909044a917977ab65244b4c94dd33"
+    "8b1a37755c4253d7cb54526b7763622a094d7b676d34b5e6886689256754e5a5e6ad",
+    "deeplab.pth": "9c5a1795bc8baa267200a44b49ac544a1ba2687d210f63777e4bd715387324469a59b072f8a28"
+    "9cc471c637b367932177e5b312e8ea6351c1763d9ff44b4857c",
+    "fba_matting.pth": "890906ec94c1bfd2ad08707a63e4ccb0955d7f5d25e32853950c24c78"
+    "4cbad2e59be277999defc3754905d0f15aa75702cdead3cfe669ff72f08811c52971613",
+    "u2net.pth": "16f8125e2fedd8c85db0e001ee15338b4aa2fda77bab8ba70c25e"
+    "bea1533fda5ee70a909b934a9bd495b432cef89d629f00a07858a517742476fa8b346de24f7",
+    "tracer_b7.pth": "c439c5c12d4d43d5f9be9ec61e68b2e54658a541bccac2577ef5a54fb252b6e8415d41f7e"
+    "c2487033d0c02b4dd08367958e4e62091318111c519f93e2632be7b",
+    "scene_classifier.pth": "6d8692510abde453b406a1fea557afdea62fd2a2a2677283a3ecc2"
+    "341a4895ee99ed65cedcb79b80775db14c3ffcfc0aad2caec1d85140678852039d2d4e76b4",
+    "yolov4_coco_with_classes.pth": "44b6ec2dd35dc3802bf8c512002f76e00e97bfbc86bc7af6de2fafce229a41b4ca"
+    "12c6f3d7589278c71cd4ddd62df80389b148c19b84fa03216905407a107fff",
+    "cascadepsp.pth": "3f895f5126d80d6f73186f045557ea7c8eab4dfa3d69a995815bb2c03d564573f36c474f04d7bf0022a27829f583a1a793b036adf801cb423e41a4831b830122",
+}
+"""
+Model -> checksum dictionary
+"""
+
+
+def sha512_checksum_calc(file: Path) -> str:
+    """
+    Calculates the SHA512 hash digest of a file on fs
+
+    Args:
+        file (Path): Path to the file
+
+    Returns:
+        SHA512 hash digest of a file.
+    """
+    dd = hashlib.sha512()
+    with file.open("rb") as f:
+        for chunk in iter(lambda: f.read(4096), b""):
+            dd.update(chunk)
+    return dd.hexdigest()
+
+
+class CachedDownloader:
+    """
+    Metaclass for models downloaders.
+    """
+
+    __metaclass__ = ABCMeta
+
+    @property
+    @abstractmethod
+    def name(self) -> str:
+        return self.__class__.__name__
+
+    @property
+    @abstractmethod
+    def fallback_downloader(self) -> Optional["CachedDownloader"]:
+        """
+        Property MAY be overriden in subclasses.
+        Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy.
+        Less preferred downloader SHOULD be provided by this property.
+        """
+        pass
+
+    def download_model(self, file_name: str) -> Path:
+        """
+        Downloads model from the internet and saves it to the cache.
+
+        Behavior:
+            If model is already downloaded it will be loaded from the cache.
+
+            If model is already downloaded, but checksum is invalid, it will be downloaded again.
+
+            If model download failed, fallback downloader will be used.
+        """
+        try:
+            return self.download_model_base(file_name)
+        except BaseException as e:
+            if self.fallback_downloader is not None:
+                warnings.warn(
+                    f"Failed to download model from {self.name} downloader."
+                    f" Trying to download from {self.fallback_downloader.name} downloader."
+                )
+                return self.fallback_downloader.download_model(file_name)
+            else:
+                warnings.warn(
+                    f"Failed to download model from {self.name} downloader."
+                    f" No fallback downloader available."
+                )
+                raise e
+
+    @abstractmethod
+    def download_model_base(self, model_name: str) -> Path:
+        """
+        Download model from any source if not cached.
+        Returns:
+            pathlib.Path: Path to the downloaded model.
+        """
+
+    def __call__(self, model_name: str):
+        return self.download_model(model_name)
+
+
+class HuggingFaceCompatibleDownloader(CachedDownloader, ABC):
+    """
+    Downloader for models from HuggingFace Hub.
+    Private models are not supported.
+    """
+
+    def __init__(
+        self,
+        name: str = "Huggingface.co",
+        base_url: str = "https://huggingface.co",
+        fb_downloader: Optional["CachedDownloader"] = None,
+    ):
+        self.cache_dir = checkpoints_dir
+        """SHOULD be same for all instances to prevent downloading same model multiple times
+        Points to ~/.cache/carvekit/checkpoints"""
+        self.base_url = base_url
+        """MUST be a base url with protocol and domain name to huggingface or another, compatible in terms of models downloading API source"""
+        self._name = name
+        self._fallback_downloader = fb_downloader
+
+    @property
+    def fallback_downloader(self) -> Optional["CachedDownloader"]:
+        return self._fallback_downloader
+
+    @property
+    def name(self):
+        return self._name
+
+    def check_for_existence(self, model_name: str) -> Optional[Path]:
+        """
+        Checks if model is already downloaded and cached. Verifies file integrity by checksum.
+        Returns:
+            Optional[pathlib.Path]: Path to the cached model if cached.
+        """
+        if model_name not in MODELS_URLS.keys():
+            raise FileNotFoundError("Unknown model!")
+        path = (
+            self.cache_dir
+            / MODELS_URLS[model_name]["repository"].split("/")[1]
+            / model_name
+        )
+
+        if not path.exists():
+            return None
+
+        if MODELS_CHECKSUMS[path.name] != sha512_checksum_calc(path):
+            warnings.warn(
+                f"Invalid checksum for model {path.name}. Downloading correct model!"
+            )
+            os.remove(path)
+            return None
+        return path
+
+    def download_model_base(self, model_name: str) -> Path:
+        cached_path = self.check_for_existence(model_name)
+        if cached_path is not None:
+            return cached_path
+        else:
+            cached_path = (
+                self.cache_dir
+                / MODELS_URLS[model_name]["repository"].split("/")[1]
+                / model_name
+            )
+            cached_path.parent.mkdir(parents=True, exist_ok=True)
+            url = MODELS_URLS[model_name]
+            hugging_face_url = f"{self.base_url}/{url['repository']}/resolve/{url['revision']}/{url['filename']}"
+
+            try:
+                r = requests.get(hugging_face_url, stream=True, timeout=10)
+                if r.status_code < 400:
+                    with open(cached_path, "wb") as f:
+                        r.raw.decode_content = True
+                        for chunk in tqdm.tqdm(
+                            r,
+                            desc="Downloading " + cached_path.name + " model",
+                            colour="blue",
+                        ):
+                            f.write(chunk)
+                else:
+                    if r.status_code == 404:
+                        raise FileNotFoundError(f"Model {model_name} not found!")
+                    else:
+                        raise ConnectionError(
+                            f"Error {r.status_code} while downloading model {model_name}!"
+                        )
+            except BaseException as e:
+                if cached_path.exists():
+                    os.remove(cached_path)
+                raise ConnectionError(
+                    f"Exception caught when downloading model! "
+                    f"Model name: {cached_path.name}. Exception: {str(e)}."
+                )
+            return cached_path
+
+
+fallback_downloader: CachedDownloader = HuggingFaceCompatibleDownloader()
+downloader: CachedDownloader = HuggingFaceCompatibleDownloader(
+    base_url="https://cdn.carve.photos",
+    fb_downloader=fallback_downloader,
+    name="Carve CDN",
+)
+downloader._fallback_downloader = fallback_downloader
+
+
+
+
+
+

Global variables

+
+
var MODELS_CHECKSUMS
+
+

Model -> checksum dictionary

+
+
var MODELS_URLS
+
+

All data needed to build path relative to huggingface.co for model download

+
+
+
+
+

Functions

+
+
+def sha512_checksum_calc(file:Β pathlib.Path) ‑>Β str +
+
+

Calculates the SHA512 hash digest of a file on fs

+

Args

+
+
file : Path
+
Path to the file
+
+

Returns

+

SHA512 hash digest of a file.

+
+ +Expand source code + +
def sha512_checksum_calc(file: Path) -> str:
+    """
+    Calculates the SHA512 hash digest of a file on fs
+
+    Args:
+        file (Path): Path to the file
+
+    Returns:
+        SHA512 hash digest of a file.
+    """
+    dd = hashlib.sha512()
+    with file.open("rb") as f:
+        for chunk in iter(lambda: f.read(4096), b""):
+            dd.update(chunk)
+    return dd.hexdigest()
+
+
+
+
+
+

Classes

+
+
+class CachedDownloader +
+
+

Metaclass for models downloaders.

+
+ +Expand source code + +
class CachedDownloader:
+    """
+    Metaclass for models downloaders.
+    """
+
+    __metaclass__ = ABCMeta
+
+    @property
+    @abstractmethod
+    def name(self) -> str:
+        return self.__class__.__name__
+
+    @property
+    @abstractmethod
+    def fallback_downloader(self) -> Optional["CachedDownloader"]:
+        """
+        Property MAY be overriden in subclasses.
+        Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy.
+        Less preferred downloader SHOULD be provided by this property.
+        """
+        pass
+
+    def download_model(self, file_name: str) -> Path:
+        """
+        Downloads model from the internet and saves it to the cache.
+
+        Behavior:
+            If model is already downloaded it will be loaded from the cache.
+
+            If model is already downloaded, but checksum is invalid, it will be downloaded again.
+
+            If model download failed, fallback downloader will be used.
+        """
+        try:
+            return self.download_model_base(file_name)
+        except BaseException as e:
+            if self.fallback_downloader is not None:
+                warnings.warn(
+                    f"Failed to download model from {self.name} downloader."
+                    f" Trying to download from {self.fallback_downloader.name} downloader."
+                )
+                return self.fallback_downloader.download_model(file_name)
+            else:
+                warnings.warn(
+                    f"Failed to download model from {self.name} downloader."
+                    f" No fallback downloader available."
+                )
+                raise e
+
+    @abstractmethod
+    def download_model_base(self, model_name: str) -> Path:
+        """
+        Download model from any source if not cached.
+        Returns:
+            pathlib.Path: Path to the downloaded model.
+        """
+
+    def __call__(self, model_name: str):
+        return self.download_model(model_name)
+
+

Subclasses

+ +

Instance variables

+
+
var fallback_downloader :Β Optional[CachedDownloader]
+
+

Property MAY be overriden in subclasses. +Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy. +Less preferred downloader SHOULD be provided by this property.

+
+ +Expand source code + +
@property
+@abstractmethod
+def fallback_downloader(self) -> Optional["CachedDownloader"]:
+    """
+    Property MAY be overriden in subclasses.
+    Used in case if subclass failed to download model. So preferred downloader SHOULD be placed higher in the hierarchy.
+    Less preferred downloader SHOULD be provided by this property.
+    """
+    pass
+
+
+
var name :Β str
+
+
+
+ +Expand source code + +
@property
+@abstractmethod
+def name(self) -> str:
+    return self.__class__.__name__
+
+
+
+

Methods

+
+
+def download_model(self, file_name:Β str) ‑>Β pathlib.Path +
+
+

Downloads model from the internet and saves it to the cache.

+

Behavior

+

If model is already downloaded it will be loaded from the cache.

+

If model is already downloaded, but checksum is invalid, it will be downloaded again.

+

If model download failed, fallback downloader will be used.

+
+ +Expand source code + +
def download_model(self, file_name: str) -> Path:
+    """
+    Downloads model from the internet and saves it to the cache.
+
+    Behavior:
+        If model is already downloaded it will be loaded from the cache.
+
+        If model is already downloaded, but checksum is invalid, it will be downloaded again.
+
+        If model download failed, fallback downloader will be used.
+    """
+    try:
+        return self.download_model_base(file_name)
+    except BaseException as e:
+        if self.fallback_downloader is not None:
+            warnings.warn(
+                f"Failed to download model from {self.name} downloader."
+                f" Trying to download from {self.fallback_downloader.name} downloader."
+            )
+            return self.fallback_downloader.download_model(file_name)
+        else:
+            warnings.warn(
+                f"Failed to download model from {self.name} downloader."
+                f" No fallback downloader available."
+            )
+            raise e
+
+
+
+def download_model_base(self, model_name:Β str) ‑>Β pathlib.Path +
+
+

Download model from any source if not cached.

+

Returns

+
+
pathlib.Path
+
Path to the downloaded model.
+
+
+ +Expand source code + +
@abstractmethod
+def download_model_base(self, model_name: str) -> Path:
+    """
+    Download model from any source if not cached.
+    Returns:
+        pathlib.Path: Path to the downloaded model.
+    """
+
+
+
+
+
+class HuggingFaceCompatibleDownloader +(name:Β strΒ =Β 'Huggingface.co', base_url:Β strΒ =Β 'https://huggingface.co', fb_downloader:Β Optional[ForwardRef('CachedDownloader')]Β =Β None) +
+
+

Downloader for models from HuggingFace Hub. +Private models are not supported.

+
+ +Expand source code + +
class HuggingFaceCompatibleDownloader(CachedDownloader, ABC):
+    """
+    Downloader for models from HuggingFace Hub.
+    Private models are not supported.
+    """
+
+    def __init__(
+        self,
+        name: str = "Huggingface.co",
+        base_url: str = "https://huggingface.co",
+        fb_downloader: Optional["CachedDownloader"] = None,
+    ):
+        self.cache_dir = checkpoints_dir
+        """SHOULD be same for all instances to prevent downloading same model multiple times
+        Points to ~/.cache/carvekit/checkpoints"""
+        self.base_url = base_url
+        """MUST be a base url with protocol and domain name to huggingface or another, compatible in terms of models downloading API source"""
+        self._name = name
+        self._fallback_downloader = fb_downloader
+
+    @property
+    def fallback_downloader(self) -> Optional["CachedDownloader"]:
+        return self._fallback_downloader
+
+    @property
+    def name(self):
+        return self._name
+
+    def check_for_existence(self, model_name: str) -> Optional[Path]:
+        """
+        Checks if model is already downloaded and cached. Verifies file integrity by checksum.
+        Returns:
+            Optional[pathlib.Path]: Path to the cached model if cached.
+        """
+        if model_name not in MODELS_URLS.keys():
+            raise FileNotFoundError("Unknown model!")
+        path = (
+            self.cache_dir
+            / MODELS_URLS[model_name]["repository"].split("/")[1]
+            / model_name
+        )
+
+        if not path.exists():
+            return None
+
+        if MODELS_CHECKSUMS[path.name] != sha512_checksum_calc(path):
+            warnings.warn(
+                f"Invalid checksum for model {path.name}. Downloading correct model!"
+            )
+            os.remove(path)
+            return None
+        return path
+
+    def download_model_base(self, model_name: str) -> Path:
+        cached_path = self.check_for_existence(model_name)
+        if cached_path is not None:
+            return cached_path
+        else:
+            cached_path = (
+                self.cache_dir
+                / MODELS_URLS[model_name]["repository"].split("/")[1]
+                / model_name
+            )
+            cached_path.parent.mkdir(parents=True, exist_ok=True)
+            url = MODELS_URLS[model_name]
+            hugging_face_url = f"{self.base_url}/{url['repository']}/resolve/{url['revision']}/{url['filename']}"
+
+            try:
+                r = requests.get(hugging_face_url, stream=True, timeout=10)
+                if r.status_code < 400:
+                    with open(cached_path, "wb") as f:
+                        r.raw.decode_content = True
+                        for chunk in tqdm.tqdm(
+                            r,
+                            desc="Downloading " + cached_path.name + " model",
+                            colour="blue",
+                        ):
+                            f.write(chunk)
+                else:
+                    if r.status_code == 404:
+                        raise FileNotFoundError(f"Model {model_name} not found!")
+                    else:
+                        raise ConnectionError(
+                            f"Error {r.status_code} while downloading model {model_name}!"
+                        )
+            except BaseException as e:
+                if cached_path.exists():
+                    os.remove(cached_path)
+                raise ConnectionError(
+                    f"Exception caught when downloading model! "
+                    f"Model name: {cached_path.name}. Exception: {str(e)}."
+                )
+            return cached_path
+
+

Ancestors

+ +

Instance variables

+
+
var base_url
+
+

MUST be a base url with protocol and domain name to huggingface or another, compatible in terms of models downloading API source

+
+
var cache_dir
+
+

SHOULD be same for all instances to prevent downloading same model multiple times +Points to ~/.cache/carvekit/checkpoints

+
+
var name
+
+
+
+ +Expand source code + +
@property
+def name(self):
+    return self._name
+
+
+
+

Methods

+
+
+def check_for_existence(self, model_name:Β str) ‑>Β Optional[pathlib.Path] +
+
+

Checks if model is already downloaded and cached. Verifies file integrity by checksum.

+

Returns

+
+
Optional[pathlib.Path]
+
Path to the cached model if cached.
+
+
+ +Expand source code + +
def check_for_existence(self, model_name: str) -> Optional[Path]:
+    """
+    Checks if model is already downloaded and cached. Verifies file integrity by checksum.
+    Returns:
+        Optional[pathlib.Path]: Path to the cached model if cached.
+    """
+    if model_name not in MODELS_URLS.keys():
+        raise FileNotFoundError("Unknown model!")
+    path = (
+        self.cache_dir
+        / MODELS_URLS[model_name]["repository"].split("/")[1]
+        / model_name
+    )
+
+    if not path.exists():
+        return None
+
+    if MODELS_CHECKSUMS[path.name] != sha512_checksum_calc(path):
+        warnings.warn(
+            f"Invalid checksum for model {path.name}. Downloading correct model!"
+        )
+        os.remove(path)
+        return None
+    return path
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/high.html b/docs/api/high.html new file mode 100644 index 0000000..ee982a1 --- /dev/null +++ b/docs/api/high.html @@ -0,0 +1,377 @@ + + + + + + +high API documentation + + + + + + + + + + + +
+
+
+

Module high

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import warnings
+
+from carvekit.api.interface import Interface
+from carvekit.ml.wrap.fba_matting import FBAMatting
+from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7
+from carvekit.ml.wrap.cascadepsp import CascadePSP
+from carvekit.ml.wrap.scene_classifier import SceneClassifier
+from carvekit.pipelines.preprocessing import AutoScene
+from carvekit.ml.wrap.u2net import U2NET
+from carvekit.pipelines.postprocessing import CasMattingMethod
+from carvekit.trimap.generator import TrimapGenerator
+
+
+class HiInterface(Interface):
+    def __init__(
+        self,
+        object_type: str = "auto",
+        batch_size_pre=5,
+        batch_size_seg=2,
+        batch_size_matting=1,
+        batch_size_refine=1,
+        device="cpu",
+        seg_mask_size=640,
+        matting_mask_size=2048,
+        refine_mask_size=900,
+        trimap_prob_threshold=231,
+        trimap_dilation=30,
+        trimap_erosion_iters=5,
+        fp16=False,
+    ):
+        """
+        Initializes High Level interface.
+
+        Args:
+            object_type (str, default=object): Interest object type. Can be "object" or "hairs-like".
+            matting_mask_size (int, default=2048):  The size of the input image for the matting neural network.
+            seg_mask_size (int, default=640): The size of the input image for the segmentation neural network.
+            batch_size_pre (int, default=5: Number of images processed per one preprocessing method call.
+            batch_size_seg (int, default=2): Number of images processed per one segmentation neural network call.
+            batch_size_matting (int, matting=1): Number of images processed per one matting neural network call.
+            device (Literal[cpu, cuda], default=cpu): Processing device
+            fp16 (bool, default=False): Use half precision. Reduce memory usage and increase speed.
+            .. CAUTION:: ⚠️ **Experimental support**
+            trimap_prob_threshold (int, default=231): Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied
+            trimap_dilation (int, default=30): The size of the offset radius from the object mask in pixels when forming an unknown area
+            trimap_erosion_iters (int, default=5): The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area
+            refine_mask_size (int, default=900): The size of the input image for the refinement neural network.
+            batch_size_refine (int, default=1): Number of images processed per one refinement neural network call.
+
+
+        .. NOTE::
+            1. Changing seg_mask_size may cause an `out-of-memory` error if the value is too large, and it may also
+            result in reduced precision. I do not recommend changing this value. You can change `matting_mask_size` in
+            range from `(1024 to 4096)` to improve object edge refining quality, but it will cause extra large RAM and
+            video memory consume. Also, you can change batch size to accelerate background removal, but it also causes
+            extra large video memory consume, if value is too big.
+            2. Changing `trimap_prob_threshold`, `trimap_kernel_size`, `trimap_erosion_iters` may improve object edge
+            refining quality.
+        """
+        preprocess_pipeline = None
+
+        if object_type == "object":
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+        elif object_type == "hairs-like":
+            self._segnet = U2NET(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+        elif object_type == "auto":
+            # Using Tracer by default,
+            # but it will dynamically switch to other if needed
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+            self._scene_classifier = SceneClassifier(
+                device=device, fp16=fp16, batch_size=batch_size_pre
+            )
+            preprocess_pipeline = AutoScene(scene_classifier=self._scene_classifier)
+
+        else:
+            warnings.warn(
+                f"Unknown object type: {object_type}. Using default object type: object"
+            )
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+
+        self._cascade_psp = CascadePSP(
+            device=device,
+            batch_size=batch_size_refine,
+            input_tensor_size=refine_mask_size,
+            fp16=fp16,
+        )
+        self._fba = FBAMatting(
+            batch_size=batch_size_matting,
+            device=device,
+            input_tensor_size=matting_mask_size,
+            fp16=fp16,
+        )
+        self._trimap_generator = TrimapGenerator(
+            prob_threshold=trimap_prob_threshold,
+            kernel_size=trimap_dilation,
+            erosion_iters=trimap_erosion_iters,
+        )
+        super(HiInterface, self).__init__(
+            pre_pipe=preprocess_pipeline,
+            seg_pipe=self._segnet,
+            post_pipe=CasMattingMethod(
+                refining_module=self._cascade_psp,
+                matting_module=self._fba,
+                trimap_generator=self._trimap_generator,
+                device=device,
+            ),
+            device=device,
+        )
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class HiInterface +(object_type:Β strΒ =Β 'auto', batch_size_pre=5, batch_size_seg=2, batch_size_matting=1, batch_size_refine=1, device='cpu', seg_mask_size=640, matting_mask_size=2048, refine_mask_size=900, trimap_prob_threshold=231, trimap_dilation=30, trimap_erosion_iters=5, fp16=False) +
+
+

Initializes High Level interface.

+

Args

+
+
object_type : str, default=object
+
Interest object type. Can be "object" or "hairs-like".
+
matting_mask_size : int, default=2048
+
The size of the input image for the matting neural network.
+
seg_mask_size : int, default=640
+
The size of the input image for the segmentation neural network.
+
batch_size_pre (int, default=5: Number of images processed per one preprocessing method call.
+
batch_size_seg : int, default=2
+
Number of images processed per one segmentation neural network call.
+
batch_size_matting : int, matting=1
+
Number of images processed per one matting neural network call.
+
device : Literal[cpu, cuda], default=cpu
+
Processing device
+
fp16 : bool, default=False
+
Use half precision. Reduce memory usage and increase speed.
+
+
+

Caution: ⚠️ Experimental support

+
+
+
trimap_prob_threshold : int, default=231
+
Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied
+
trimap_dilation : int, default=30
+
The size of the offset radius from the object mask in pixels when forming an unknown area
+
trimap_erosion_iters : int, default=5
+
The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area
+
refine_mask_size : int, default=900
+
The size of the input image for the refinement neural network.
+
batch_size_refine : int, default=1
+
Number of images processed per one refinement neural network call.
+
+
+

Note

+
    +
  1. Changing seg_mask_size may cause an out-of-memory error if the value is too large, and it may also +result in reduced precision. I do not recommend changing this value. You can change matting_mask_size in +range from (1024 to 4096) to improve object edge refining quality, but it will cause extra large RAM and +video memory consume. Also, you can change batch size to accelerate background removal, but it also causes +extra large video memory consume, if value is too big.
  2. +
  3. Changing trimap_prob_threshold, trimap_kernel_size, trimap_erosion_iters may improve object edge +refining quality.
  4. +
+
+
+ +Expand source code + +
class HiInterface(Interface):
+    def __init__(
+        self,
+        object_type: str = "auto",
+        batch_size_pre=5,
+        batch_size_seg=2,
+        batch_size_matting=1,
+        batch_size_refine=1,
+        device="cpu",
+        seg_mask_size=640,
+        matting_mask_size=2048,
+        refine_mask_size=900,
+        trimap_prob_threshold=231,
+        trimap_dilation=30,
+        trimap_erosion_iters=5,
+        fp16=False,
+    ):
+        """
+        Initializes High Level interface.
+
+        Args:
+            object_type (str, default=object): Interest object type. Can be "object" or "hairs-like".
+            matting_mask_size (int, default=2048):  The size of the input image for the matting neural network.
+            seg_mask_size (int, default=640): The size of the input image for the segmentation neural network.
+            batch_size_pre (int, default=5: Number of images processed per one preprocessing method call.
+            batch_size_seg (int, default=2): Number of images processed per one segmentation neural network call.
+            batch_size_matting (int, matting=1): Number of images processed per one matting neural network call.
+            device (Literal[cpu, cuda], default=cpu): Processing device
+            fp16 (bool, default=False): Use half precision. Reduce memory usage and increase speed.
+            .. CAUTION:: ⚠️ **Experimental support**
+            trimap_prob_threshold (int, default=231): Probability threshold at which the prob_filter and prob_as_unknown_area operations will be applied
+            trimap_dilation (int, default=30): The size of the offset radius from the object mask in pixels when forming an unknown area
+            trimap_erosion_iters (int, default=5): The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area
+            refine_mask_size (int, default=900): The size of the input image for the refinement neural network.
+            batch_size_refine (int, default=1): Number of images processed per one refinement neural network call.
+
+
+        .. NOTE::
+            1. Changing seg_mask_size may cause an `out-of-memory` error if the value is too large, and it may also
+            result in reduced precision. I do not recommend changing this value. You can change `matting_mask_size` in
+            range from `(1024 to 4096)` to improve object edge refining quality, but it will cause extra large RAM and
+            video memory consume. Also, you can change batch size to accelerate background removal, but it also causes
+            extra large video memory consume, if value is too big.
+            2. Changing `trimap_prob_threshold`, `trimap_kernel_size`, `trimap_erosion_iters` may improve object edge
+            refining quality.
+        """
+        preprocess_pipeline = None
+
+        if object_type == "object":
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+        elif object_type == "hairs-like":
+            self._segnet = U2NET(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+        elif object_type == "auto":
+            # Using Tracer by default,
+            # but it will dynamically switch to other if needed
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+            self._scene_classifier = SceneClassifier(
+                device=device, fp16=fp16, batch_size=batch_size_pre
+            )
+            preprocess_pipeline = AutoScene(scene_classifier=self._scene_classifier)
+
+        else:
+            warnings.warn(
+                f"Unknown object type: {object_type}. Using default object type: object"
+            )
+            self._segnet = TracerUniversalB7(
+                device=device,
+                batch_size=batch_size_seg,
+                input_image_size=seg_mask_size,
+                fp16=fp16,
+            )
+
+        self._cascade_psp = CascadePSP(
+            device=device,
+            batch_size=batch_size_refine,
+            input_tensor_size=refine_mask_size,
+            fp16=fp16,
+        )
+        self._fba = FBAMatting(
+            batch_size=batch_size_matting,
+            device=device,
+            input_tensor_size=matting_mask_size,
+            fp16=fp16,
+        )
+        self._trimap_generator = TrimapGenerator(
+            prob_threshold=trimap_prob_threshold,
+            kernel_size=trimap_dilation,
+            erosion_iters=trimap_erosion_iters,
+        )
+        super(HiInterface, self).__init__(
+            pre_pipe=preprocess_pipeline,
+            seg_pipe=self._segnet,
+            post_pipe=CasMattingMethod(
+                refining_module=self._cascade_psp,
+                matting_module=self._fba,
+                trimap_generator=self._trimap_generator,
+                device=device,
+            ),
+            device=device,
+        )
+
+

Ancestors

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/image_utils.html b/docs/api/image_utils.html new file mode 100644 index 0000000..c384714 --- /dev/null +++ b/docs/api/image_utils.html @@ -0,0 +1,510 @@ + + + + + + +image_utils API documentation + + + + + + + + + + + +
+
+
+

Module image_utils

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+    Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+    Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+    License: Apache License 2.0
+"""
+
+import pathlib
+from typing import Union, Any, Tuple
+
+import PIL.Image
+import numpy as np
+import torch
+
+ALLOWED_SUFFIXES = [".jpg", ".jpeg", ".bmp", ".png", ".webp"]
+
+
+def to_tensor(x: Any) -> torch.Tensor:
+    """
+    Returns a PIL.Image.Image as torch tensor without swap tensor dims.
+
+    Args:
+        x (PIL.Image.Image): image
+
+    Returns:
+        torch.Tensor: image as torch tensor
+    """
+    return torch.tensor(np.array(x, copy=True))
+
+
+def load_image(file: Union[str, pathlib.Path, PIL.Image.Image]) -> PIL.Image.Image:
+    """Returns a `PIL.Image.Image` class by string path or `pathlib.Path` or `PIL.Image.Image` instance
+
+    Args:
+        file (Union[str, pathlib.Path, PIL.Image.Image]): File path or `PIL.Image.Image` instance
+
+    Returns:
+        PIL.Image.Image: image instance loaded from `file` location
+
+    Raises:
+        ValueError: If file not exists or file is directory or file isn't an image or file is not correct PIL Image
+
+    """
+    if isinstance(file, str) and is_image_valid(pathlib.Path(file)):
+        return PIL.Image.open(file)
+    elif isinstance(file, PIL.Image.Image):
+        return file
+    elif isinstance(file, pathlib.Path) and is_image_valid(file):
+        return PIL.Image.open(str(file))
+    else:
+        raise ValueError("Unknown input file type")
+
+
+def convert_image(image: PIL.Image.Image, mode="RGB") -> PIL.Image.Image:
+    """Performs image conversion to correct color mode
+
+    Args:
+        image (PIL.Image.Image): `PIL.Image.Image` instance
+        mode (str, default=RGB): Color mode to convert
+
+    Returns:
+        PIL.Image.Image: converted image
+
+    Raises:
+        ValueError: If image hasn't convertable color mode, or it is too small
+    """
+    if is_image_valid(image):
+        return image.convert(mode)
+
+
+def is_image_valid(image: Union[pathlib.Path, PIL.Image.Image]) -> bool:
+    """This function performs image validation.
+
+    Args:
+        image (Union[pathlib.Path, PIL.Image.Image]): Path to the image or `PIL.Image.Image` instance being checked.
+
+    Returns:
+        bool: True if image is valid, False otherwise.
+
+    Raises:
+        ValueError: If file not a valid image path or image hasn't convertable color mode, or it is too small
+
+    """
+    if isinstance(image, pathlib.Path):
+        if not image.exists():
+            raise ValueError("File is not exists")
+        elif image.is_dir():
+            raise ValueError("File is a directory")
+        elif image.suffix.lower() not in ALLOWED_SUFFIXES:
+            raise ValueError(
+                f"Unsupported image format. Supported file formats: {', '.join(ALLOWED_SUFFIXES)}"
+            )
+    elif isinstance(image, PIL.Image.Image):
+        if not (image.size[0] > 32 and image.size[1] > 32):
+            raise ValueError("Image should be bigger then (32x32) pixels.")
+        elif image.mode not in [
+            "RGB",
+            "RGBA",
+            "L",
+        ]:
+            raise ValueError("Wrong image color mode.")
+    else:
+        raise ValueError("Unknown input file type")
+    return True
+
+
+def transparency_paste(
+    bg_img: PIL.Image.Image, fg_img: PIL.Image.Image, box=(0, 0)
+) -> PIL.Image.Image:
+    """
+    Inserts an image into another image while maintaining transparency.
+
+    Args:
+        bg_img (PIL.Image.Image): background image
+        fg_img (PIL.Image.Image): foreground image
+        box (tuple[int, int]): place to paste
+
+    Returns:
+        PIL.Image.Image: Background image with pasted foreground image at point or in the specified box
+    """
+    fg_img_trans = PIL.Image.new("RGBA", bg_img.size)
+    fg_img_trans.paste(fg_img, box, mask=fg_img)
+    new_img = PIL.Image.alpha_composite(bg_img, fg_img_trans)
+    return new_img
+
+
+def add_margin(
+    pil_img: PIL.Image.Image,
+    top: int,
+    right: int,
+    bottom: int,
+    left: int,
+    color: Tuple[int, int, int, int],
+) -> PIL.Image.Image:
+    """
+    Adds margin to the image.
+
+    Args:
+        pil_img (PIL.Image.Image): Image that needed to add margin.
+        top (int): pixels count at top side
+        right (int): pixels count at right side
+        bottom (int): pixels count at bottom side
+        left (int): pixels count at left side
+        color (Tuple[int, int, int, int]): color of margin
+
+    Returns:
+        PIL.Image.Image: Image with margin.
+    """
+    width, height = pil_img.size
+    new_width = width + right + left
+    new_height = height + top + bottom
+    # noinspection PyTypeChecker
+    result = PIL.Image.new(pil_img.mode, (new_width, new_height), color)
+    result.paste(pil_img, (left, top))
+    return result
+
+
+
+
+
+
+
+

Functions

+
+
+def add_margin(pil_img:Β PIL.Image.Image, top:Β int, right:Β int, bottom:Β int, left:Β int, color:Β Tuple[int,Β int,Β int,Β int]) ‑>Β PIL.Image.Image +
+
+

Adds margin to the image.

+

Args

+
+
pil_img : PIL.Image.Image
+
Image that needed to add margin.
+
top : int
+
pixels count at top side
+
right : int
+
pixels count at right side
+
bottom : int
+
pixels count at bottom side
+
left : int
+
pixels count at left side
+
color : Tuple[int, int, int, int]
+
color of margin
+
+

Returns

+
+
PIL.Image.Image
+
Image with margin.
+
+
+ +Expand source code + +
def add_margin(
+    pil_img: PIL.Image.Image,
+    top: int,
+    right: int,
+    bottom: int,
+    left: int,
+    color: Tuple[int, int, int, int],
+) -> PIL.Image.Image:
+    """
+    Adds margin to the image.
+
+    Args:
+        pil_img (PIL.Image.Image): Image that needed to add margin.
+        top (int): pixels count at top side
+        right (int): pixels count at right side
+        bottom (int): pixels count at bottom side
+        left (int): pixels count at left side
+        color (Tuple[int, int, int, int]): color of margin
+
+    Returns:
+        PIL.Image.Image: Image with margin.
+    """
+    width, height = pil_img.size
+    new_width = width + right + left
+    new_height = height + top + bottom
+    # noinspection PyTypeChecker
+    result = PIL.Image.new(pil_img.mode, (new_width, new_height), color)
+    result.paste(pil_img, (left, top))
+    return result
+
+
+
+def convert_image(image:Β PIL.Image.Image, mode='RGB') ‑>Β PIL.Image.Image +
+
+

Performs image conversion to correct color mode

+

Args

+
+
image : PIL.Image.Image
+
PIL.Image.Image instance
+
mode : str, default=RGB
+
Color mode to convert
+
+

Returns

+
+
PIL.Image.Image
+
converted image
+
+

Raises

+
+
ValueError
+
If image hasn't convertable color mode, or it is too small
+
+
+ +Expand source code + +
def convert_image(image: PIL.Image.Image, mode="RGB") -> PIL.Image.Image:
+    """Performs image conversion to correct color mode
+
+    Args:
+        image (PIL.Image.Image): `PIL.Image.Image` instance
+        mode (str, default=RGB): Color mode to convert
+
+    Returns:
+        PIL.Image.Image: converted image
+
+    Raises:
+        ValueError: If image hasn't convertable color mode, or it is too small
+    """
+    if is_image_valid(image):
+        return image.convert(mode)
+
+
+
+def is_image_valid(image:Β Union[pathlib.Path,Β PIL.Image.Image]) ‑>Β bool +
+
+

This function performs image validation.

+

Args

+
+
image : Union[pathlib.Path, PIL.Image.Image]
+
Path to the image or PIL.Image.Image instance being checked.
+
+

Returns

+
+
bool
+
True if image is valid, False otherwise.
+
+

Raises

+
+
ValueError
+
If file not a valid image path or image hasn't convertable color mode, or it is too small
+
+
+ +Expand source code + +
def is_image_valid(image: Union[pathlib.Path, PIL.Image.Image]) -> bool:
+    """This function performs image validation.
+
+    Args:
+        image (Union[pathlib.Path, PIL.Image.Image]): Path to the image or `PIL.Image.Image` instance being checked.
+
+    Returns:
+        bool: True if image is valid, False otherwise.
+
+    Raises:
+        ValueError: If file not a valid image path or image hasn't convertable color mode, or it is too small
+
+    """
+    if isinstance(image, pathlib.Path):
+        if not image.exists():
+            raise ValueError("File is not exists")
+        elif image.is_dir():
+            raise ValueError("File is a directory")
+        elif image.suffix.lower() not in ALLOWED_SUFFIXES:
+            raise ValueError(
+                f"Unsupported image format. Supported file formats: {', '.join(ALLOWED_SUFFIXES)}"
+            )
+    elif isinstance(image, PIL.Image.Image):
+        if not (image.size[0] > 32 and image.size[1] > 32):
+            raise ValueError("Image should be bigger then (32x32) pixels.")
+        elif image.mode not in [
+            "RGB",
+            "RGBA",
+            "L",
+        ]:
+            raise ValueError("Wrong image color mode.")
+    else:
+        raise ValueError("Unknown input file type")
+    return True
+
+
+
+def load_image(file:Β Union[str,Β pathlib.Path,Β PIL.Image.Image]) ‑>Β PIL.Image.Image +
+
+

Returns a PIL.Image.Image class by string path or pathlib.Path or PIL.Image.Image instance

+

Args

+
+
file : Union[str, pathlib.Path, PIL.Image.Image]
+
File path or PIL.Image.Image instance
+
+

Returns

+
+
PIL.Image.Image
+
image instance loaded from file location
+
+

Raises

+
+
ValueError
+
If file not exists or file is directory or file isn't an image or file is not correct PIL Image
+
+
+ +Expand source code + +
def load_image(file: Union[str, pathlib.Path, PIL.Image.Image]) -> PIL.Image.Image:
+    """Returns a `PIL.Image.Image` class by string path or `pathlib.Path` or `PIL.Image.Image` instance
+
+    Args:
+        file (Union[str, pathlib.Path, PIL.Image.Image]): File path or `PIL.Image.Image` instance
+
+    Returns:
+        PIL.Image.Image: image instance loaded from `file` location
+
+    Raises:
+        ValueError: If file not exists or file is directory or file isn't an image or file is not correct PIL Image
+
+    """
+    if isinstance(file, str) and is_image_valid(pathlib.Path(file)):
+        return PIL.Image.open(file)
+    elif isinstance(file, PIL.Image.Image):
+        return file
+    elif isinstance(file, pathlib.Path) and is_image_valid(file):
+        return PIL.Image.open(str(file))
+    else:
+        raise ValueError("Unknown input file type")
+
+
+
+def to_tensor(x:Β Any) ‑>Β torch.Tensor +
+
+

Returns a PIL.Image.Image as torch tensor without swap tensor dims.

+

Args

+
+
x : PIL.Image.Image
+
image
+
+

Returns

+
+
torch.Tensor
+
image as torch tensor
+
+
+ +Expand source code + +
def to_tensor(x: Any) -> torch.Tensor:
+    """
+    Returns a PIL.Image.Image as torch tensor without swap tensor dims.
+
+    Args:
+        x (PIL.Image.Image): image
+
+    Returns:
+        torch.Tensor: image as torch tensor
+    """
+    return torch.tensor(np.array(x, copy=True))
+
+
+
+def transparency_paste(bg_img:Β PIL.Image.Image, fg_img:Β PIL.Image.Image, box=(0, 0)) ‑>Β PIL.Image.Image +
+
+

Inserts an image into another image while maintaining transparency.

+

Args

+
+
bg_img : PIL.Image.Image
+
background image
+
fg_img : PIL.Image.Image
+
foreground image
+
box : tuple[int, int]
+
place to paste
+
+

Returns

+
+
PIL.Image.Image
+
Background image with pasted foreground image at point or in the specified box
+
+
+ +Expand source code + +
def transparency_paste(
+    bg_img: PIL.Image.Image, fg_img: PIL.Image.Image, box=(0, 0)
+) -> PIL.Image.Image:
+    """
+    Inserts an image into another image while maintaining transparency.
+
+    Args:
+        bg_img (PIL.Image.Image): background image
+        fg_img (PIL.Image.Image): foreground image
+        box (tuple[int, int]): place to paste
+
+    Returns:
+        PIL.Image.Image: Background image with pasted foreground image at point or in the specified box
+    """
+    fg_img_trans = PIL.Image.new("RGBA", bg_img.size)
+    fg_img_trans.paste(fg_img, box, mask=fg_img)
+    new_img = PIL.Image.alpha_composite(bg_img, fg_img_trans)
+    return new_img
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/interface.html b/docs/api/interface.html new file mode 100644 index 0000000..39c5799 --- /dev/null +++ b/docs/api/interface.html @@ -0,0 +1,234 @@ + + + + + + +interface API documentation + + + + + + + + + + + +
+
+
+

Module interface

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from pathlib import Path
+from typing import Union, List, Optional
+
+from PIL import Image
+
+from carvekit.ml.wrap.basnet import BASNET
+from carvekit.ml.wrap.deeplab_v3 import DeepLabV3
+from carvekit.ml.wrap.u2net import U2NET
+from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7
+from carvekit.pipelines.preprocessing import PreprocessingStub, AutoScene
+from carvekit.pipelines.postprocessing import MattingMethod, CasMattingMethod
+from carvekit.utils.image_utils import load_image
+from carvekit.utils.mask_utils import apply_mask
+from carvekit.utils.pool_utils import thread_pool_processing
+
+
+class Interface:
+    def __init__(
+        self,
+        seg_pipe: Optional[Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]],
+        pre_pipe: Optional[Union[PreprocessingStub, AutoScene]] = None,
+        post_pipe: Optional[Union[MattingMethod, CasMattingMethod]] = None,
+        device="cpu",
+    ):
+        """
+        Initializes an object for interacting with pipelines and other components of the CarveKit framework.
+
+        Args:
+            pre_pipe (Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]): Initialized pre-processing pipeline object
+            seg_pipe (Optional[Union[PreprocessingStub]]): Initialized segmentation network object
+            post_pipe (Optional[Union[MattingMethod]]): Initialized postprocessing pipeline object
+            device (Literal[cpu, cuda], default=cpu): The processing device that will be used to apply the masks to the images.
+        """
+        self.device = device
+        self.preprocessing_pipeline = pre_pipe
+        self.segmentation_pipeline = seg_pipe
+        self.postprocessing_pipeline = post_pipe
+
+    def __call__(
+        self, images: List[Union[str, Path, Image.Image]]
+    ) -> List[Image.Image]:
+        """
+        Removes the background from the specified images.
+
+        Args:
+            images: list of input images
+
+        Returns:
+            List of images without background as PIL.Image.Image instances
+        """
+        if self.segmentation_pipeline is None:
+            raise ValueError(
+                "Segmentation pipeline is not initialized."
+                "Override the class or pass the pipeline to the constructor."
+            )
+        images = thread_pool_processing(load_image, images)
+        if self.preprocessing_pipeline is not None:
+            masks: List[Image.Image] = self.preprocessing_pipeline(
+                interface=self, images=images
+            )
+        else:
+            masks: List[Image.Image] = self.segmentation_pipeline(images=images)
+
+        if self.postprocessing_pipeline is not None:
+            images: List[Image.Image] = self.postprocessing_pipeline(
+                images=images, masks=masks
+            )
+        else:
+            images = list(
+                map(
+                    lambda x: apply_mask(
+                        image=images[x], mask=masks[x], device=self.device
+                    ),
+                    range(len(images)),
+                )
+            )
+        return images
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class Interface +(seg_pipe:Β Union[U2NET,Β BASNET,Β DeepLabV3,Β TracerUniversalB7,Β ForwardRef(None)], pre_pipe:Β Union[PreprocessingStub,Β AutoScene,Β ForwardRef(None)]Β =Β None, post_pipe:Β Union[MattingMethod,Β CasMattingMethod,Β ForwardRef(None)]Β =Β None, device='cpu') +
+
+

Initializes an object for interacting with pipelines and other components of the CarveKit framework.

+

Args

+
+
pre_pipe : Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]
+
Initialized pre-processing pipeline object
+
seg_pipe : Optional[Union[PreprocessingStub]]
+
Initialized segmentation network object
+
post_pipe : Optional[Union[MattingMethod]]
+
Initialized postprocessing pipeline object
+
device : Literal[cpu, cuda], default=cpu
+
The processing device that will be used to apply the masks to the images.
+
+
+ +Expand source code + +
class Interface:
+    def __init__(
+        self,
+        seg_pipe: Optional[Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]],
+        pre_pipe: Optional[Union[PreprocessingStub, AutoScene]] = None,
+        post_pipe: Optional[Union[MattingMethod, CasMattingMethod]] = None,
+        device="cpu",
+    ):
+        """
+        Initializes an object for interacting with pipelines and other components of the CarveKit framework.
+
+        Args:
+            pre_pipe (Union[U2NET, BASNET, DeepLabV3, TracerUniversalB7]): Initialized pre-processing pipeline object
+            seg_pipe (Optional[Union[PreprocessingStub]]): Initialized segmentation network object
+            post_pipe (Optional[Union[MattingMethod]]): Initialized postprocessing pipeline object
+            device (Literal[cpu, cuda], default=cpu): The processing device that will be used to apply the masks to the images.
+        """
+        self.device = device
+        self.preprocessing_pipeline = pre_pipe
+        self.segmentation_pipeline = seg_pipe
+        self.postprocessing_pipeline = post_pipe
+
+    def __call__(
+        self, images: List[Union[str, Path, Image.Image]]
+    ) -> List[Image.Image]:
+        """
+        Removes the background from the specified images.
+
+        Args:
+            images: list of input images
+
+        Returns:
+            List of images without background as PIL.Image.Image instances
+        """
+        if self.segmentation_pipeline is None:
+            raise ValueError(
+                "Segmentation pipeline is not initialized."
+                "Override the class or pass the pipeline to the constructor."
+            )
+        images = thread_pool_processing(load_image, images)
+        if self.preprocessing_pipeline is not None:
+            masks: List[Image.Image] = self.preprocessing_pipeline(
+                interface=self, images=images
+            )
+        else:
+            masks: List[Image.Image] = self.segmentation_pipeline(images=images)
+
+        if self.postprocessing_pipeline is not None:
+            images: List[Image.Image] = self.postprocessing_pipeline(
+                images=images, masks=masks
+            )
+        else:
+            images = list(
+                map(
+                    lambda x: apply_mask(
+                        image=images[x], mask=masks[x], device=self.device
+                    ),
+                    range(len(images)),
+                )
+            )
+        return images
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/matting.html b/docs/api/matting.html new file mode 100644 index 0000000..77ca37f --- /dev/null +++ b/docs/api/matting.html @@ -0,0 +1,221 @@ + + + + + + +matting API documentation + + + + + + + + + + + +
+
+
+

Module matting

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from carvekit.ml.wrap.fba_matting import FBAMatting
+from typing import Union, List
+from PIL import Image
+from pathlib import Path
+from carvekit.trimap.cv_gen import CV2TrimapGenerator
+from carvekit.trimap.generator import TrimapGenerator
+from carvekit.utils.mask_utils import apply_mask
+from carvekit.utils.pool_utils import thread_pool_processing
+from carvekit.utils.image_utils import load_image, convert_image
+
+__all__ = ["MattingMethod"]
+
+
+class MattingMethod:
+    """
+    Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap.
+    Neural network for matting performs accurate object edge detection by using a special map called trimap,
+    with unknown area that we scan for boundary, already known general object area and the background."""
+
+    def __init__(
+        self,
+        matting_module: Union[FBAMatting],
+        trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator],
+        device="cpu",
+    ):
+        """
+        Initializes Matting Method class.
+
+        Args:
+        - `matting_module`: Initialized matting neural network class
+        - `trimap_generator`: Initialized trimap generator class
+        - `device`: Processing device used for applying mask to image
+        """
+        self.device = device
+        self.matting_module = matting_module
+        self.trimap_generator = trimap_generator
+
+    def __call__(
+        self,
+        images: List[Union[str, Path, Image.Image]],
+        masks: List[Union[str, Path, Image.Image]],
+    ):
+        """
+        Passes data through apply_mask function
+
+        Args:
+        - `images`: list of images
+        - `masks`: list pf masks
+
+        Returns:
+        list of images
+        """
+        if len(images) != len(masks):
+            raise ValueError("Images and Masks lists should have same length!")
+        images = thread_pool_processing(lambda x: convert_image(load_image(x)), images)
+        masks = thread_pool_processing(
+            lambda x: convert_image(load_image(x), mode="L"), masks
+        )
+        trimaps = thread_pool_processing(
+            lambda x: self.trimap_generator(original_image=images[x], mask=masks[x]),
+            range(len(images)),
+        )
+        alpha = self.matting_module(images=images, trimaps=trimaps)
+        return list(
+            map(
+                lambda x: apply_mask(
+                    image=images[x], mask=alpha[x], device=self.device
+                ),
+                range(len(images)),
+            )
+        )
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class MattingMethod +(matting_module:Β FBAMatting, trimap_generator:Β Union[TrimapGenerator,Β CV2TrimapGenerator], device='cpu') +
+
+

Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap. +Neural network for matting performs accurate object edge detection by using a special map called trimap, +with unknown area that we scan for boundary, already known general object area and the background.

+

Initializes Matting Method class.

+

Args: +- matting_module: Initialized matting neural network class +- trimap_generator: Initialized trimap generator class +- device: Processing device used for applying mask to image

+
+ +Expand source code + +
class MattingMethod:
+    """
+    Improving the edges of the object mask using neural networks for matting and algorithms for creating trimap.
+    Neural network for matting performs accurate object edge detection by using a special map called trimap,
+    with unknown area that we scan for boundary, already known general object area and the background."""
+
+    def __init__(
+        self,
+        matting_module: Union[FBAMatting],
+        trimap_generator: Union[TrimapGenerator, CV2TrimapGenerator],
+        device="cpu",
+    ):
+        """
+        Initializes Matting Method class.
+
+        Args:
+        - `matting_module`: Initialized matting neural network class
+        - `trimap_generator`: Initialized trimap generator class
+        - `device`: Processing device used for applying mask to image
+        """
+        self.device = device
+        self.matting_module = matting_module
+        self.trimap_generator = trimap_generator
+
+    def __call__(
+        self,
+        images: List[Union[str, Path, Image.Image]],
+        masks: List[Union[str, Path, Image.Image]],
+    ):
+        """
+        Passes data through apply_mask function
+
+        Args:
+        - `images`: list of images
+        - `masks`: list pf masks
+
+        Returns:
+        list of images
+        """
+        if len(images) != len(masks):
+            raise ValueError("Images and Masks lists should have same length!")
+        images = thread_pool_processing(lambda x: convert_image(load_image(x)), images)
+        masks = thread_pool_processing(
+            lambda x: convert_image(load_image(x), mode="L"), masks
+        )
+        trimaps = thread_pool_processing(
+            lambda x: self.trimap_generator(original_image=images[x], mask=masks[x]),
+            range(len(images)),
+        )
+        alpha = self.matting_module(images=images, trimaps=trimaps)
+        return list(
+            map(
+                lambda x: apply_mask(
+                    image=images[x], mask=alpha[x], device=self.device
+                ),
+                range(len(images)),
+            )
+        )
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/models_loc.html b/docs/api/models_loc.html new file mode 100644 index 0000000..34ecdec --- /dev/null +++ b/docs/api/models_loc.html @@ -0,0 +1,412 @@ + + + + + + +models_loc API documentation + + + + + + + + + + + +
+
+
+

Module models_loc

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import pathlib
+from carvekit.ml.files import checkpoints_dir
+from carvekit.utils.download_models import downloader
+
+
+def u2net_full_pretrained() -> pathlib.Path:
+    """Returns u2net pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("u2net.pth")
+
+
+def basnet_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("basnet.pth")
+
+
+def deeplab_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("deeplab.pth")
+
+
+def fba_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("fba_matting.pth")
+
+
+def tracer_b7_pretrained() -> pathlib.Path:
+    """Returns TRACER with EfficientNet v1 b7 encoder pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("tracer_b7.pth")
+
+
+def scene_classifier_pretrained() -> pathlib.Path:
+    """Returns scene classifier pretrained model location
+    This model is used to classify scenes into 3 categories: hard, soft, digital
+
+    hard - scenes with hard edges, such as objects, buildings, etc.
+    soft - scenes with soft edges, such as portraits, hairs, animal, etc.
+    digital - digital scenes, such as screenshots, graphics, etc.
+
+    more info: https://huggingface.co/Carve/scene_classifier
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("scene_classifier.pth")
+
+
+def yolov4_coco_pretrained() -> pathlib.Path:
+    """Returns yolov4 classifier pretrained model location
+    This model is used to classify objects in images.
+
+    Training dataset: COCO 2017
+    Training classes: 80
+
+    It's a modified version of the original model from https://github.com/Tianxiaomo/pytorch-YOLOv4 (pytorch)
+    We have only added coco classnames to the model.
+
+    Returns:
+        pathlib.Path to model location
+    """
+    return downloader("yolov4_coco_with_classes.pth")
+
+
+def cascadepsp_pretrained() -> pathlib.Path:
+    """Returns cascade psp pretrained model location
+    This model is used to refine segmentation masks.
+
+    Training dataset: MSRA-10K, DUT-OMRON, ECSSD and FSS-1000
+    more info: https://huggingface.co/Carve/cascadepsp
+
+    Returns:
+        pathlib.Path to model location
+    """
+    return downloader("cascadepsp.pth")
+
+
+def download_all():
+    u2net_full_pretrained()
+    fba_pretrained()
+    deeplab_pretrained()
+    basnet_pretrained()
+    tracer_b7_pretrained()
+    scene_classifier_pretrained()
+    yolov4_coco_pretrained()
+    cascadepsp_pretrained()
+
+
+
+
+
+
+
+

Functions

+
+
+def basnet_pretrained() ‑>Β pathlib.Path +
+
+

Returns basnet pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def basnet_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("basnet.pth")
+
+
+
+def cascadepsp_pretrained() ‑>Β pathlib.Path +
+
+

Returns cascade psp pretrained model location +This model is used to refine segmentation masks.

+

Training dataset: MSRA-10K, DUT-OMRON, ECSSD and FSS-1000 +more info: https://huggingface.co/Carve/cascadepsp

+

Returns

+

pathlib.Path to model location

+
+ +Expand source code + +
def cascadepsp_pretrained() -> pathlib.Path:
+    """Returns cascade psp pretrained model location
+    This model is used to refine segmentation masks.
+
+    Training dataset: MSRA-10K, DUT-OMRON, ECSSD and FSS-1000
+    more info: https://huggingface.co/Carve/cascadepsp
+
+    Returns:
+        pathlib.Path to model location
+    """
+    return downloader("cascadepsp.pth")
+
+
+
+def deeplab_pretrained() ‑>Β pathlib.Path +
+
+

Returns basnet pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def deeplab_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("deeplab.pth")
+
+
+
+def download_all() +
+
+
+
+ +Expand source code + +
def download_all():
+    u2net_full_pretrained()
+    fba_pretrained()
+    deeplab_pretrained()
+    basnet_pretrained()
+    tracer_b7_pretrained()
+    scene_classifier_pretrained()
+    yolov4_coco_pretrained()
+    cascadepsp_pretrained()
+
+
+
+def fba_pretrained() ‑>Β pathlib.Path +
+
+

Returns basnet pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def fba_pretrained() -> pathlib.Path:
+    """Returns basnet pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("fba_matting.pth")
+
+
+
+def scene_classifier_pretrained() ‑>Β pathlib.Path +
+
+

Returns scene classifier pretrained model location +This model is used to classify scenes into 3 categories: hard, soft, digital

+

hard - scenes with hard edges, such as objects, buildings, etc. +soft - scenes with soft edges, such as portraits, hairs, animal, etc. +digital - digital scenes, such as screenshots, graphics, etc.

+

more info: https://huggingface.co/Carve/scene_classifier

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def scene_classifier_pretrained() -> pathlib.Path:
+    """Returns scene classifier pretrained model location
+    This model is used to classify scenes into 3 categories: hard, soft, digital
+
+    hard - scenes with hard edges, such as objects, buildings, etc.
+    soft - scenes with soft edges, such as portraits, hairs, animal, etc.
+    digital - digital scenes, such as screenshots, graphics, etc.
+
+    more info: https://huggingface.co/Carve/scene_classifier
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("scene_classifier.pth")
+
+
+
+def tracer_b7_pretrained() ‑>Β pathlib.Path +
+
+

Returns TRACER with EfficientNet v1 b7 encoder pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def tracer_b7_pretrained() -> pathlib.Path:
+    """Returns TRACER with EfficientNet v1 b7 encoder pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("tracer_b7.pth")
+
+
+
+def u2net_full_pretrained() ‑>Β pathlib.Path +
+
+

Returns u2net pretrained model location

+

Returns

+
+
pathlib.Path
+
model location
+
+
+ +Expand source code + +
def u2net_full_pretrained() -> pathlib.Path:
+    """Returns u2net pretrained model location
+
+    Returns:
+        pathlib.Path: model location
+    """
+    return downloader("u2net.pth")
+
+
+
+def yolov4_coco_pretrained() ‑>Β pathlib.Path +
+
+

Returns yolov4 classifier pretrained model location +This model is used to classify objects in images.

+

Training dataset: COCO 2017 +Training classes: 80

+

It's a modified version of the original model from https://github.com/Tianxiaomo/pytorch-YOLOv4 (pytorch) +We have only added coco classnames to the model.

+

Returns

+

pathlib.Path to model location

+
+ +Expand source code + +
def yolov4_coco_pretrained() -> pathlib.Path:
+    """Returns yolov4 classifier pretrained model location
+    This model is used to classify objects in images.
+
+    Training dataset: COCO 2017
+    Training classes: 80
+
+    It's a modified version of the original model from https://github.com/Tianxiaomo/pytorch-YOLOv4 (pytorch)
+    We have only added coco classnames to the model.
+
+    Returns:
+        pathlib.Path to model location
+    """
+    return downloader("yolov4_coco_with_classes.pth")
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/scene_classifier.html b/docs/api/scene_classifier.html new file mode 100644 index 0000000..6936110 --- /dev/null +++ b/docs/api/scene_classifier.html @@ -0,0 +1,454 @@ + + + + + + +scene_classifier API documentation + + + + + + + + + + + +
+
+
+

Module scene_classifier

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+
+import PIL.Image
+import torch
+import torch.nn.functional as F
+import torchvision.transforms as transforms
+from typing import List, Union, Tuple
+from torch.autograd import Variable
+
+from carvekit.ml.files.models_loc import scene_classifier_pretrained
+from carvekit.utils.image_utils import load_image, convert_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import thread_pool_processing, batch_generator
+
+__all__ = ["SceneClassifier"]
+
+
+class SceneClassifier:
+    """
+    SceneClassifier model interface
+
+    Description:
+        Performs a primary analysis of the image in order to select the necessary method for removing the background.
+        The choice is made by classifying the scene type.
+
+        The output can be the following types:
+        - hard
+        - soft
+        - digital
+
+    """
+
+    def __init__(
+        self,
+        topk: int = 1,
+        device="cpu",
+        batch_size: int = 4,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the Scene Classifier.
+
+        Args:
+            topk: number of top classes to return
+            device: processing device
+            batch_size: the number of images that the neural network processes in one run
+            fp16: use fp16 precision
+
+        """
+        if model_path is None:
+            model_path = scene_classifier_pretrained()
+        self.topk = topk
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+
+        self.transform = transforms.Compose(
+            [
+                transforms.Resize(256),
+                transforms.CenterCrop(224),
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+        state_dict = torch.load(model_path, map_location=device)
+        self.model = state_dict["model"]
+        self.class_to_idx = state_dict["class_to_idx"]
+        self.idx_to_class = {v: k for k, v in self.class_to_idx.items()}
+        self.model.to(device)
+        self.model.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+
+        return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+    def data_postprocessing(self, data: torch.Tensor) -> Tuple[List[str], List[float]]:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+
+        Returns:
+            Top-k class of scene type, probability of these classes
+
+        """
+        ps = F.softmax(data.float(), dim=0)
+        topk = ps.cpu().topk(self.topk)
+
+        probs, classes = (e.data.numpy().squeeze().tolist() for e in topk)
+        if isinstance(classes, int):
+            classes = [classes]
+            probs = [probs]
+        return list(map(lambda x: self.idx_to_class[x], classes)), probs
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> Tuple[List[str], List[float]]:
+        """
+        Passes input images though neural network and returns class predictions.
+
+        Args:
+            images: input images
+
+        Returns:
+            Top-k class of scene type, probability of these classes for every passed image
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self.model, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = Variable(batches).to(self.device)
+                    masks = self.model.forward(batches)
+                    masks_cpu = masks.cpu()
+                    del batches, masks
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(masks_cpu[x]),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class SceneClassifier +(topk:Β intΒ =Β 1, device='cpu', batch_size:Β intΒ =Β 4, fp16:Β boolΒ =Β False, model_path:Β Union[str,Β pathlib.Path]Β =Β None) +
+
+

SceneClassifier model interface

+

Description

+

Performs a primary analysis of the image in order to select the necessary method for removing the background. +The choice is made by classifying the scene type.

+

The output can be the following types: +- hard +- soft +- digital

+

Initialize the Scene Classifier.

+

Args

+
+
topk
+
number of top classes to return
+
device
+
processing device
+
batch_size
+
the number of images that the neural network processes in one run
+
fp16
+
use fp16 precision
+
+
+ +Expand source code + +
class SceneClassifier:
+    """
+    SceneClassifier model interface
+
+    Description:
+        Performs a primary analysis of the image in order to select the necessary method for removing the background.
+        The choice is made by classifying the scene type.
+
+        The output can be the following types:
+        - hard
+        - soft
+        - digital
+
+    """
+
+    def __init__(
+        self,
+        topk: int = 1,
+        device="cpu",
+        batch_size: int = 4,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the Scene Classifier.
+
+        Args:
+            topk: number of top classes to return
+            device: processing device
+            batch_size: the number of images that the neural network processes in one run
+            fp16: use fp16 precision
+
+        """
+        if model_path is None:
+            model_path = scene_classifier_pretrained()
+        self.topk = topk
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+
+        self.transform = transforms.Compose(
+            [
+                transforms.Resize(256),
+                transforms.CenterCrop(224),
+                transforms.ToTensor(),
+                transforms.Normalize(
+                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
+                ),
+            ]
+        )
+        state_dict = torch.load(model_path, map_location=device)
+        self.model = state_dict["model"]
+        self.class_to_idx = state_dict["class_to_idx"]
+        self.idx_to_class = {v: k for k, v in self.class_to_idx.items()}
+        self.model.to(device)
+        self.model.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data: input image
+
+        Returns:
+            input for neural network
+
+        """
+
+        return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+    def data_postprocessing(self, data: torch.Tensor) -> Tuple[List[str], List[float]]:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data: output data from neural network
+
+        Returns:
+            Top-k class of scene type, probability of these classes
+
+        """
+        ps = F.softmax(data.float(), dim=0)
+        topk = ps.cpu().topk(self.topk)
+
+        probs, classes = (e.data.numpy().squeeze().tolist() for e in topk)
+        if isinstance(classes, int):
+            classes = [classes]
+            probs = [probs]
+        return list(map(lambda x: self.idx_to_class[x], classes)), probs
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> Tuple[List[str], List[float]]:
+        """
+        Passes input images though neural network and returns class predictions.
+
+        Args:
+            images: input images
+
+        Returns:
+            Top-k class of scene type, probability of these classes for every passed image
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self.model, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = Variable(batches).to(self.device)
+                    masks = self.model.forward(batches)
+                    masks_cpu = masks.cpu()
+                    del batches, masks
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(masks_cpu[x]),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+
+        return collect_masks
+
+

Methods

+
+
+def data_postprocessing(self, data:Β torch.Tensor) ‑>Β Tuple[List[str],Β List[float]] +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data
+
output data from neural network
+
+

Returns

+

Top-k class of scene type, probability of these classes

+
+ +Expand source code + +
def data_postprocessing(self, data: torch.Tensor) -> Tuple[List[str], List[float]]:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data: output data from neural network
+
+    Returns:
+        Top-k class of scene type, probability of these classes
+
+    """
+    ps = F.softmax(data.float(), dim=0)
+    topk = ps.cpu().topk(self.topk)
+
+    probs, classes = (e.data.numpy().squeeze().tolist() for e in topk)
+    if isinstance(classes, int):
+        classes = [classes]
+        probs = [probs]
+    return list(map(lambda x: self.idx_to_class[x], classes)), probs
+
+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data
+
input image
+
+

Returns

+

input for neural network

+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data: input image
+
+    Returns:
+        input for neural network
+
+    """
+
+    return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/stub.html b/docs/api/stub.html new file mode 100644 index 0000000..8b15793 --- /dev/null +++ b/docs/api/stub.html @@ -0,0 +1,122 @@ + + + + + + +stub API documentation + + + + + + + + + + + +
+
+
+

Module stub

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+from pathlib import Path
+from typing import Union, List
+
+from PIL import Image
+
+__all__ = ["PreprocessingStub"]
+
+
+class PreprocessingStub:
+    """Stub for future preprocessing methods"""
+
+    def __call__(self, interface, images: List[Union[str, Path, Image.Image]]):
+        """
+        Passes data though `interface.segmentation_pipeline()` method
+
+        Args:
+        - `interface`: Interface instance
+        - `images`: list of images
+
+        Returns:
+            the result of passing data through segmentation_pipeline method of interface
+        """
+        return interface.segmentation_pipeline(images=images)
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class PreprocessingStub +
+
+

Stub for future preprocessing methods

+
+ +Expand source code + +
class PreprocessingStub:
+    """Stub for future preprocessing methods"""
+
+    def __call__(self, interface, images: List[Union[str, Path, Image.Image]]):
+        """
+        Passes data though `interface.segmentation_pipeline()` method
+
+        Args:
+        - `interface`: Interface instance
+        - `images`: list of images
+
+        Returns:
+            the result of passing data through segmentation_pipeline method of interface
+        """
+        return interface.segmentation_pipeline(images=images)
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/test_trimap.html b/docs/api/test_trimap.html new file mode 100644 index 0000000..339bf27 --- /dev/null +++ b/docs/api/test_trimap.html @@ -0,0 +1,172 @@ + + + + + + +test_trimap API documentation + + + + + + + + + + + +
+
+
+

Module test_trimap

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool

+

Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].

+

License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+
+License: Apache License 2.0
+"""
+import PIL.Image
+import pytest
+
+from carvekit.trimap.add_ops import prob_as_unknown_area
+
+
+def test_trimap_generator(trimap_instance, image_mask, image_pil):
+    te = trimap_instance()
+    assert isinstance(te(image_pil, image_mask), PIL.Image.Image)
+    assert isinstance(
+        te(PIL.Image.new("RGB", (512, 512)), PIL.Image.new("L", (512, 512))),
+        PIL.Image.Image,
+    )
+    assert isinstance(
+        te(
+            PIL.Image.new("RGB", (512, 512), color=(255, 255, 255)),
+            PIL.Image.new("L", (512, 512), color=255),
+        ),
+        PIL.Image.Image,
+    )
+    with pytest.raises(ValueError):
+        te(PIL.Image.new("RGB", (512, 512)), PIL.Image.new("RGB", (512, 512)))
+    with pytest.raises(ValueError):
+        te(PIL.Image.new("RGB", (512, 512)), PIL.Image.new("RGB", (512, 512)))
+
+
+def test_cv2_generator(cv2_trimap_instance, image_pil, image_mask):
+    cv2trimapgen = cv2_trimap_instance()
+    assert isinstance(cv2trimapgen(image_pil, image_mask), PIL.Image.Image)
+    with pytest.raises(ValueError):
+        cv2trimapgen(PIL.Image.new("RGB", (512, 512)), PIL.Image.new("RGB", (512, 512)))
+    with pytest.raises(ValueError):
+        cv2trimapgen(PIL.Image.new("L", (256, 256)), PIL.Image.new("L", (512, 512)))
+
+
+def test_prob_as_unknown_area(image_pil, image_mask):
+    with pytest.raises(ValueError):
+        prob_as_unknown_area(image_pil, image_mask)
+
+
+
+
+
+
+
+

Functions

+
+
+def test_cv2_generator(cv2_trimap_instance, image_pil, image_mask) +
+
+
+
+ +Expand source code + +
def test_cv2_generator(cv2_trimap_instance, image_pil, image_mask):
+    cv2trimapgen = cv2_trimap_instance()
+    assert isinstance(cv2trimapgen(image_pil, image_mask), PIL.Image.Image)
+    with pytest.raises(ValueError):
+        cv2trimapgen(PIL.Image.new("RGB", (512, 512)), PIL.Image.new("RGB", (512, 512)))
+    with pytest.raises(ValueError):
+        cv2trimapgen(PIL.Image.new("L", (256, 256)), PIL.Image.new("L", (512, 512)))
+
+
+
+def test_prob_as_unknown_area(image_pil, image_mask) +
+
+
+
+ +Expand source code + +
def test_prob_as_unknown_area(image_pil, image_mask):
+    with pytest.raises(ValueError):
+        prob_as_unknown_area(image_pil, image_mask)
+
+
+
+def test_trimap_generator(trimap_instance, image_mask, image_pil) +
+
+
+
+ +Expand source code + +
def test_trimap_generator(trimap_instance, image_mask, image_pil):
+    te = trimap_instance()
+    assert isinstance(te(image_pil, image_mask), PIL.Image.Image)
+    assert isinstance(
+        te(PIL.Image.new("RGB", (512, 512)), PIL.Image.new("L", (512, 512))),
+        PIL.Image.Image,
+    )
+    assert isinstance(
+        te(
+            PIL.Image.new("RGB", (512, 512), color=(255, 255, 255)),
+            PIL.Image.new("L", (512, 512), color=255),
+        ),
+        PIL.Image.Image,
+    )
+    with pytest.raises(ValueError):
+        te(PIL.Image.new("RGB", (512, 512)), PIL.Image.new("RGB", (512, 512)))
+    with pytest.raises(ValueError):
+        te(PIL.Image.new("RGB", (512, 512)), PIL.Image.new("RGB", (512, 512)))
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/tracer_b7.html b/docs/api/tracer_b7.html new file mode 100644 index 0000000..f104f77 --- /dev/null +++ b/docs/api/tracer_b7.html @@ -0,0 +1,487 @@ + + + + + + +tracer_b7 API documentation + + + + + + + + + + + +
+
+
+

Module tracer_b7

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+from typing import List, Union
+
+import PIL.Image
+import numpy as np
+import torch
+import torchvision.transforms as transforms
+from PIL import Image
+
+from carvekit.ml.arch.tracerb7.efficientnet import EfficientEncoderB7
+from carvekit.ml.arch.tracerb7.tracer import TracerDecoder
+from carvekit.ml.files.models_loc import tracer_b7_pretrained
+from carvekit.utils.image_utils import load_image, convert_image
+from carvekit.utils.models_utils import get_precision_autocast, cast_network
+from carvekit.utils.pool_utils import thread_pool_processing, batch_generator
+
+__all__ = ["TracerUniversalB7"]
+
+
+class TracerUniversalB7(TracerDecoder):
+    """TRACER B7 model interface"""
+
+    def __init__(
+        self,
+        device="cpu",
+        input_image_size: Union[List[int], int] = 640,
+        batch_size: int = 4,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the TRACER model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (Union[List[int], int], default=640): input image size
+            batch_size(int, default=4): the number of images that the neural network processes in one run
+            load_pretrained(bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use fp16 precision
+            model_path (Union[str, pathlib.Path], default=None): path to the model
+            .. note:: REDO
+        """
+        if model_path is None:
+            model_path = tracer_b7_pretrained()
+        super(TracerUniversalB7, self).__init__(
+            encoder=EfficientEncoderB7(),
+            rfb_channel=[32, 64, 128],
+            features_channels=[48, 80, 224, 640],
+        )
+
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+
+        self.transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Resize(self.input_image_size),
+                transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
+            ]
+        )
+        self.to(device)
+        if load_pretrained:
+            # TODO remove edge detector from weights. It doesn't work well with this model!
+            self.load_state_dict(
+                torch.load(model_path, map_location=self.device), strict=False
+            )
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.FloatTensor: input for neural network
+
+        """
+
+        return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask
+
+        """
+        output = (data.type(torch.FloatTensor).detach().cpu().numpy() * 255.0).astype(
+            np.uint8
+        )
+        output = output.squeeze(0)
+        mask = Image.fromarray(output).convert("L")
+        mask = mask.resize(original_image.size, resample=Image.BILINEAR)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = batches.to(self.device)
+                    masks = super(TracerDecoder, self).__call__(batches)
+                    masks_cpu = masks.cpu()
+                    del batches, masks
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(
+                        masks_cpu[x], converted_images[x]
+                    ),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class TracerUniversalB7 +(device='cpu', input_image_size:Β Union[List[int],Β int]Β =Β 640, batch_size:Β intΒ =Β 4, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False, model_path:Β Union[str,Β pathlib.Path]Β =Β None) +
+
+

TRACER B7 model interface

+

Initialize the TRACER model

+

Args

+
+
device : Literal[cpu, cuda], default=cpu
+
processing device
+
input_image_size : Union[List[int], int], default=640
+
input image size
+
batch_size(int, default=4): the number of images that the neural network processes in one run
+
load_pretrained(bool, default=True): loading pretrained model
+
fp16 : bool, default=False
+
use fp16 precision
+
model_path : Union[str, pathlib.Path], default=None
+
path to the model
+
+
+

Note: REDO

+
+
+ +Expand source code + +
class TracerUniversalB7(TracerDecoder):
+    """TRACER B7 model interface"""
+
+    def __init__(
+        self,
+        device="cpu",
+        input_image_size: Union[List[int], int] = 640,
+        batch_size: int = 4,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+        model_path: Union[str, pathlib.Path] = None,
+    ):
+        """
+        Initialize the TRACER model
+
+        Args:
+            device (Literal[cpu, cuda], default=cpu): processing device
+            input_image_size (Union[List[int], int], default=640): input image size
+            batch_size(int, default=4): the number of images that the neural network processes in one run
+            load_pretrained(bool, default=True): loading pretrained model
+            fp16 (bool, default=False): use fp16 precision
+            model_path (Union[str, pathlib.Path], default=None): path to the model
+            .. note:: REDO
+        """
+        if model_path is None:
+            model_path = tracer_b7_pretrained()
+        super(TracerUniversalB7, self).__init__(
+            encoder=EfficientEncoderB7(),
+            rfb_channel=[32, 64, 128],
+            features_channels=[48, 80, 224, 640],
+        )
+
+        self.fp16 = fp16
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+
+        self.transform = transforms.Compose(
+            [
+                transforms.ToTensor(),
+                transforms.Resize(self.input_image_size),
+                transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
+            ]
+        )
+        self.to(device)
+        if load_pretrained:
+            # TODO remove edge detector from weights. It doesn't work well with this model!
+            self.load_state_dict(
+                torch.load(model_path, map_location=self.device), strict=False
+            )
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.FloatTensor: input for neural network
+
+        """
+
+        return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask
+
+        """
+        output = (data.type(torch.FloatTensor).detach().cpu().numpy() * 255.0).astype(
+            np.uint8
+        )
+        output = output.squeeze(0)
+        mask = Image.fromarray(output).convert("L")
+        mask = mask.resize(original_image.size, resample=Image.BILINEAR)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images (List[Union[str, pathlib.Path, PIL.Image.Image]]): input images
+
+        Returns:
+            List[PIL.Image.Image]: segmentation masks as for input images
+
+        """
+        collect_masks = []
+        autocast, dtype = get_precision_autocast(device=self.device, fp16=self.fp16)
+        with autocast:
+            cast_network(self, dtype)
+            for image_batch in batch_generator(images, self.batch_size):
+                converted_images = thread_pool_processing(
+                    lambda x: convert_image(load_image(x)), image_batch
+                )
+                batches = torch.vstack(
+                    thread_pool_processing(self.data_preprocessing, converted_images)
+                )
+                with torch.no_grad():
+                    batches = batches.to(self.device)
+                    masks = super(TracerDecoder, self).__call__(batches)
+                    masks_cpu = masks.cpu()
+                    del batches, masks
+                masks = thread_pool_processing(
+                    lambda x: self.data_postprocessing(
+                        masks_cpu[x], converted_images[x]
+                    ),
+                    range(len(converted_images)),
+                )
+                collect_masks += masks
+
+        return collect_masks
+
+

Ancestors

+ +

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, original_image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
original_image : PIL.Image.Image
+
input image which was used for predicted data
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, original_image: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        original_image (PIL.Image.Image): input image which was used for predicted data
+
+    Returns:
+        PIL.Image.Image: Segmentation mask
+
+    """
+    output = (data.type(torch.FloatTensor).detach().cpu().numpy() * 255.0).astype(
+        np.uint8
+    )
+    output = output.squeeze(0)
+    mask = Image.fromarray(output).convert("L")
+    mask = mask.resize(original_image.size, resample=Image.BILINEAR)
+    return mask
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : PIL.Image.Image
+
input image
+
+

Returns

+
+
torch.FloatTensor
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (PIL.Image.Image): input image
+
+    Returns:
+        torch.FloatTensor: input for neural network
+
+    """
+
+    return torch.unsqueeze(self.transform(data), 0).type(torch.FloatTensor)
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/api/u2net.html b/docs/api/u2net.html new file mode 100644 index 0000000..724e75b --- /dev/null +++ b/docs/api/u2net.html @@ -0,0 +1,480 @@ + + + + + + +u2net API documentation + + + + + + + + + + + +
+
+
+

Module u2net

+
+
+

Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0

+
+ +Expand source code + +
"""
+Source url: https://github.com/OPHoperHPO/image-background-remove-tool
+Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO].
+License: Apache License 2.0
+"""
+import pathlib
+import warnings
+
+from typing import List, Union
+import PIL.Image
+import numpy as np
+import torch
+from PIL import Image
+
+from carvekit.ml.arch.u2net.u2net import U2NETArchitecture
+from carvekit.ml.files.models_loc import u2net_full_pretrained
+from carvekit.utils.image_utils import load_image, convert_image
+from carvekit.utils.pool_utils import thread_pool_processing, batch_generator
+
+__all__ = ["U2NET"]
+
+
+class U2NET(U2NETArchitecture):
+    """U^2-Net model interface"""
+
+    def __init__(
+        self,
+        layers_cfg="full",
+        device="cpu",
+        input_image_size: Union[List[int], int] = 320,
+        batch_size: int = 10,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the U2NET model
+
+        Args:
+            layers_cfg: neural network layers configuration
+            device: processing device
+            input_image_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            load_pretrained: loading pretrained model
+            fp16: use fp16 precision // not supported at this moment.
+
+        """
+        super(U2NET, self).__init__(cfg_type=layers_cfg, out_ch=1)
+        if fp16:
+            warnings.warn("FP16 is not supported at this moment for U2NET model")
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(u2net_full_pretrained(), map_location=self.device)
+            )
+
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.FloatTensor: input for neural network
+
+        """
+        resized = data.resize(self.input_image_size, resample=3)
+        # noinspection PyTypeChecker
+        resized_arr = np.array(resized, dtype=float)
+        temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+        if np.max(resized_arr) != 0:
+            resized_arr /= np.max(resized_arr)
+        temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+        temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+        temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+        temp_image = temp_image.transpose((2, 0, 1))
+        temp_image = np.expand_dims(temp_image, 0)
+        return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        data = data.unsqueeze(0)
+        mask = data[:, 0, :, :]
+        ma = torch.max(mask)  # Normalizes prediction
+        mi = torch.min(mask)
+        predict = ((mask - mi) / (ma - mi)).squeeze()
+        predict_np = predict.cpu().data.numpy() * 255
+        mask = Image.fromarray(predict_np).convert("L")
+        mask = mask.resize(original_image.size, resample=3)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images: input images
+
+        Returns:
+            segmentation masks as for input images, as PIL.Image.Image instances
+
+        """
+        collect_masks = []
+        for image_batch in batch_generator(images, self.batch_size):
+            converted_images = thread_pool_processing(
+                lambda x: convert_image(load_image(x)), image_batch
+            )
+            batches = torch.vstack(
+                thread_pool_processing(self.data_preprocessing, converted_images)
+            )
+            with torch.no_grad():
+                batches = batches.to(self.device)
+                masks, d2, d3, d4, d5, d6, d7 = super(U2NET, self).__call__(batches)
+                masks_cpu = masks.cpu()
+                del d2, d3, d4, d5, d6, d7, batches, masks
+            masks = thread_pool_processing(
+                lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]),
+                range(len(converted_images)),
+            )
+            collect_masks += masks
+        return collect_masks
+
+
+
+
+
+
+
+
+
+

Classes

+
+
+class U2NET +(layers_cfg='full', device='cpu', input_image_size:Β Union[List[int],Β int]Β =Β 320, batch_size:Β intΒ =Β 10, load_pretrained:Β boolΒ =Β True, fp16:Β boolΒ =Β False) +
+
+

U^2-Net model interface

+

Initialize the U2NET model

+

Args

+
+
layers_cfg
+
neural network layers configuration
+
device
+
processing device
+
input_image_size
+
input image size
+
batch_size
+
the number of images that the neural network processes in one run
+
load_pretrained
+
loading pretrained model
+
fp16
+
use fp16 precision // not supported at this moment.
+
+
+ +Expand source code + +
class U2NET(U2NETArchitecture):
+    """U^2-Net model interface"""
+
+    def __init__(
+        self,
+        layers_cfg="full",
+        device="cpu",
+        input_image_size: Union[List[int], int] = 320,
+        batch_size: int = 10,
+        load_pretrained: bool = True,
+        fp16: bool = False,
+    ):
+        """
+        Initialize the U2NET model
+
+        Args:
+            layers_cfg: neural network layers configuration
+            device: processing device
+            input_image_size: input image size
+            batch_size: the number of images that the neural network processes in one run
+            load_pretrained: loading pretrained model
+            fp16: use fp16 precision // not supported at this moment.
+
+        """
+        super(U2NET, self).__init__(cfg_type=layers_cfg, out_ch=1)
+        if fp16:
+            warnings.warn("FP16 is not supported at this moment for U2NET model")
+        self.device = device
+        self.batch_size = batch_size
+        if isinstance(input_image_size, list):
+            self.input_image_size = input_image_size[:2]
+        else:
+            self.input_image_size = (input_image_size, input_image_size)
+        self.to(device)
+        if load_pretrained:
+            self.load_state_dict(
+                torch.load(u2net_full_pretrained(), map_location=self.device)
+            )
+
+        self.eval()
+
+    def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+        """
+        Transform input image to suitable data format for neural network
+
+        Args:
+            data (PIL.Image.Image): input image
+
+        Returns:
+            torch.FloatTensor: input for neural network
+
+        """
+        resized = data.resize(self.input_image_size, resample=3)
+        # noinspection PyTypeChecker
+        resized_arr = np.array(resized, dtype=float)
+        temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+        if np.max(resized_arr) != 0:
+            resized_arr /= np.max(resized_arr)
+        temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+        temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+        temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+        temp_image = temp_image.transpose((2, 0, 1))
+        temp_image = np.expand_dims(temp_image, 0)
+        return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+    @staticmethod
+    def data_postprocessing(
+        data: torch.Tensor, original_image: PIL.Image.Image
+    ) -> PIL.Image.Image:
+        """
+        Transforms output data from neural network to suitable data
+        format for using with other components of this framework.
+
+        Args:
+            data (torch.Tensor): output data from neural network
+            original_image (PIL.Image.Image): input image which was used for predicted data
+
+        Returns:
+            PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+        """
+        data = data.unsqueeze(0)
+        mask = data[:, 0, :, :]
+        ma = torch.max(mask)  # Normalizes prediction
+        mi = torch.min(mask)
+        predict = ((mask - mi) / (ma - mi)).squeeze()
+        predict_np = predict.cpu().data.numpy() * 255
+        mask = Image.fromarray(predict_np).convert("L")
+        mask = mask.resize(original_image.size, resample=3)
+        return mask
+
+    def __call__(
+        self, images: List[Union[str, pathlib.Path, PIL.Image.Image]]
+    ) -> List[PIL.Image.Image]:
+        """
+        Passes input images though neural network and returns segmentation masks as PIL.Image.Image instances
+
+        Args:
+            images: input images
+
+        Returns:
+            segmentation masks as for input images, as PIL.Image.Image instances
+
+        """
+        collect_masks = []
+        for image_batch in batch_generator(images, self.batch_size):
+            converted_images = thread_pool_processing(
+                lambda x: convert_image(load_image(x)), image_batch
+            )
+            batches = torch.vstack(
+                thread_pool_processing(self.data_preprocessing, converted_images)
+            )
+            with torch.no_grad():
+                batches = batches.to(self.device)
+                masks, d2, d3, d4, d5, d6, d7 = super(U2NET, self).__call__(batches)
+                masks_cpu = masks.cpu()
+                del d2, d3, d4, d5, d6, d7, batches, masks
+            masks = thread_pool_processing(
+                lambda x: self.data_postprocessing(masks_cpu[x], converted_images[x]),
+                range(len(converted_images)),
+            )
+            collect_masks += masks
+        return collect_masks
+
+

Ancestors

+ +

Static methods

+
+
+def data_postprocessing(data:Β torch.Tensor, original_image:Β PIL.Image.Image) ‑>Β PIL.Image.Image +
+
+

Transforms output data from neural network to suitable data +format for using with other components of this framework.

+

Args

+
+
data : torch.Tensor
+
output data from neural network
+
original_image : PIL.Image.Image
+
input image which was used for predicted data
+
+

Returns

+
+
PIL.Image.Image
+
Segmentation mask as PIL Image instance
+
+
+ +Expand source code + +
@staticmethod
+def data_postprocessing(
+    data: torch.Tensor, original_image: PIL.Image.Image
+) -> PIL.Image.Image:
+    """
+    Transforms output data from neural network to suitable data
+    format for using with other components of this framework.
+
+    Args:
+        data (torch.Tensor): output data from neural network
+        original_image (PIL.Image.Image): input image which was used for predicted data
+
+    Returns:
+        PIL.Image.Image: Segmentation mask as `PIL Image` instance
+
+    """
+    data = data.unsqueeze(0)
+    mask = data[:, 0, :, :]
+    ma = torch.max(mask)  # Normalizes prediction
+    mi = torch.min(mask)
+    predict = ((mask - mi) / (ma - mi)).squeeze()
+    predict_np = predict.cpu().data.numpy() * 255
+    mask = Image.fromarray(predict_np).convert("L")
+    mask = mask.resize(original_image.size, resample=3)
+    return mask
+
+
+
+

Methods

+
+
+def data_preprocessing(self, data:Β PIL.Image.Image) ‑>Β torch.FloatTensor +
+
+

Transform input image to suitable data format for neural network

+

Args

+
+
data : PIL.Image.Image
+
input image
+
+

Returns

+
+
torch.FloatTensor
+
input for neural network
+
+
+ +Expand source code + +
def data_preprocessing(self, data: PIL.Image.Image) -> torch.FloatTensor:
+    """
+    Transform input image to suitable data format for neural network
+
+    Args:
+        data (PIL.Image.Image): input image
+
+    Returns:
+        torch.FloatTensor: input for neural network
+
+    """
+    resized = data.resize(self.input_image_size, resample=3)
+    # noinspection PyTypeChecker
+    resized_arr = np.array(resized, dtype=float)
+    temp_image = np.zeros((resized_arr.shape[0], resized_arr.shape[1], 3))
+    if np.max(resized_arr) != 0:
+        resized_arr /= np.max(resized_arr)
+    temp_image[:, :, 0] = (resized_arr[:, :, 0] - 0.485) / 0.229
+    temp_image[:, :, 1] = (resized_arr[:, :, 1] - 0.456) / 0.224
+    temp_image[:, :, 2] = (resized_arr[:, :, 2] - 0.406) / 0.225
+    temp_image = temp_image.transpose((2, 0, 1))
+    temp_image = np.expand_dims(temp_image, 0)
+    return torch.from_numpy(temp_image).type(torch.FloatTensor)
+
+
+
+

Inherited members

+ +
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/imgs/input/1_bg_removed.png b/docs/imgs/input/1_bg_removed.png index a1e44f6..c0443fd 100644 Binary files a/docs/imgs/input/1_bg_removed.png and b/docs/imgs/input/1_bg_removed.png differ diff --git a/docs/imgs/input/2_bg_removed.png b/docs/imgs/input/2_bg_removed.png index a30c041..86e7097 100644 Binary files a/docs/imgs/input/2_bg_removed.png and b/docs/imgs/input/2_bg_removed.png differ diff --git a/docs/imgs/input/3_bg_removed.png b/docs/imgs/input/3_bg_removed.png index 298e17f..5157287 100644 Binary files a/docs/imgs/input/3_bg_removed.png and b/docs/imgs/input/3_bg_removed.png differ diff --git a/docs/imgs/input/4_bg_removed.png b/docs/imgs/input/4_bg_removed.png index 32b6a1c..89097c1 100644 Binary files a/docs/imgs/input/4_bg_removed.png and b/docs/imgs/input/4_bg_removed.png differ diff --git a/docs/other/carvekit_try.ipynb b/docs/other/carvekit_try.ipynb index 484ee9c..35989bd 100644 --- a/docs/other/carvekit_try.ipynb +++ b/docs/other/carvekit_try.ipynb @@ -1,204 +1,180 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "carvekit-try.ipynb", - "provenance": [], - "collapsed_sections": [] - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "accelerator": "GPU", - "gpuClass": "standard" + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "carvekit-try.ipynb", + "provenance": [], + "collapsed_sections": [] }, - "cells": [ - { - "cell_type": "markdown", - "source": [ - "![logo.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABQAAAADWCAYAAACOuy9iAAASgnpUWHRSYXcgcHJvZmlsZSB0eXBlIGV4aWYAAHjapZpZciS5EUT/cQodAXsAx8Fqphvo+HqeWWxNc2gya6kpkjW1ZAIRHr6Acudf/7zuH/zLOUeXi7Xaa/X8yz33OHjQ/PtvPD+Dz8/Pr3/l8+xvz7tdPy9Hnkr8Tu9/ts/z4ev5rwt9fofBo/KXC7X1eWH+/kLPn+u3bxf63ChpRZEH+3Oh/rlQiu8L4XOB8W7L197sr1uY5/29vzba3m+nH+F+3vb58Pf/zkb1duE+KcaTQvL8TOmzgKTv6NLgQeGnXmbdKfNYL470vje8BfmpTr/+dVZ0tdT845u+dyv81K34WbX73q0cP29J34pcf/3+8XkXys9deUr/lzvn9nkUf3/ez/BZ0bfq6/ve3e6zZ3YxcqXU9bOpry0+j3jf5Ba6dXMsrXrju3AJe746X42OLaCw/fKTrxV6iPTjhhx2GOGG8/xeYbHEHI+LxoMYV0zPky1Z7HHRtUD/+Ao3Wuppp0Yv19P2nOKvtYTntt0v99ytcecdeGsMXCwIF3/65f70A/dqFEJQLeN4asW6YlSxWYY6p5+8LT/IfotangJ/fX3/Fx4EZ95FlTUincLO9xKzhP8wQXoanXhj4fc7g8H25wKUiFsXFhMSHaBrIZVQg7cYLQQK2WjQYOkx5TjpQCglbhYZc0qV3rSoW/MRC89bY4k87XgeMqMTJdVk9KanQbNyLuDHcgNDo6SSSym1WGmll1FTzbXUWq2KFIcly86KVTNr1m201HIrrTZrrfU2euwJ0iy9duut9z4G9xxcefDpwRvGmHGmmWdxs06bbfY5FvBZeZVVl622+ho77rThj1237bb7HiccoHTyKaceO+30My5Qu8ndfMut1267/Y5fXfu09W9ff9C18OlafDqlN9qvrvGs2dclguikqGc0DBUJdNzUAgAd1TPfAvKizqlnvkemokQWWdSzHdQxOphPiOWGr965+HZUnfu/+uYs/9a3+L92zql1f9i5v/ftp65tcfB6OvZOoYrqE9N347zN7vBlnHnHWT3sM86d1gbFXoWyrJ6WH2vdkNjisev3mdSyrzn7iOmKg0K4tu2uOtZtUU2ekNuld2YpQf51jxxOWbud5VnailPvL3Agd7uhnFZOoS63D2+9150MST7p0izQGAoLOY72WWANJ6xe5g5suQJIKDbX0KHHVXu/6ZZ8aRFUeffJhU7x1h5rLy2O2M5xtcczdq1UfvAqpUyQ7gZmtDqm7rErY9tK7BdFqLYHJUrGdYIhrPecEiidGzNzlzPsdmBwkgUIGNhZCXfNtU6GqMMoRc2Jt/d1rPFC6PWqRH7WPkGiEwWA0Mg8LjAV1yjzZsoG/m6r5TA2l5faHADh8NmTZmzd1pmt5jMK+jAHkl3XZWu8wBiI2OhM8afxPB+Ku2hZK11av5jmxrTdNFgegKAgLacqLGRH8dasjT4xICy2JK45xppIWC/1tr7ZeSw1lZLarsY2Z0zbnjfrcs3PHKbzNulKLjtVD7RCSjFf83gUes1O6/T9lnQRPEvROrgElm+hMGHzdnwOy3Z2dkxjlb0CPYunB0+ZIu1La+MtVfNa8mhlgapY57E+zqad1xrbBSrrsnU3GNdSqFA6YeZ2QhuUP8SRIzWETUI7gGqeZhEuuGVcDx7upKJWB4g6Iew83Ur0bO0NIqxCYyrUYESBD8W7RbAPB1Av+bfYBoN5G3v1ljLAzle9g7PHneXQTlYQNw9qfgyEALUaNUl92RQRLQDfIIp0PMCZWkUuFxQyIWNVFxke4Ao8LyqCNT2+rcbja0MTX4ZWMbx2bou1qSXD4IdRWMjofrLPsh0QNcY+DmHf6hoHalumj3Odfo8u00UcleJ6baM17kw/9uQnY3KvaKSPm6CPgzfok3fu2m5i+gaDw70KtNf2jcB1NOSZqyLB3Eu3CWvAFMs0FA5jRN1uO8Yjf6DWOsKYi2ay8zh7OxFK8rzgb/YVLCH6NI6ts4/LwDar+To/bhuzYI8o92JcmdJcz95F2GOWsQCedz/U0XgTc3i1UXgbEJ0JeVTq7QK3sdHZWc3bJiPV6Qv1iBPqYO4n1zyHUrBpcLpAS+8Hbk+IGFC5swsZjoLuIXSwsgIrAH+QJPbpfVMhrreRMO4kJNeNWAQYyYPNuVqGEgNAh494j9hla/xyeIdRj7mcftbR6HylANXf3a/lsaFLKSMd4TtUhHjj2MoYvZTeZZ1Qt7q2WY92FjpDdRn+Jv2idIhogtK9bnC1qvRSCWvoobtbINDB+KNEntvz5j00dRu3wubknCC0hT3gfjH0DI7IN+cshh5WYS5uOdfpqlMt6UhWE4Bwd+q89K+iQudAsREIzjVuoa43enRkoViRG40DY9YJsZGNWB/ittetZ7UGHGVMIJv0Vk60D96tY//UEG2I1WG1IbLcA8IcmmsVV2HoRxPnimxup/Vn6V3y9+K0aTAk9kVKnq0X5pmtoTdM9Enw+gaQhZ7hKA22LYGlhg20O1wGAY9U20IROtiLueH/DxEVNaUYhiahAVKSBqM45JdPe6J23RBEsTznmkTGiHHpvaHcmUqLsjIMBa/l+TB1gbHfDh7bZBH1MIUJnwowyAvbmuGnt/7lnaK290VefV98uvb7lS7E11A/bAiS1AeoIzmc0/eE1gk/B1rkU+bvDGcfWlkMgZSWdIBaASoGiBzHf0KYPUDZ8doDO+rGR/aFvSj/hB3CsNIZXmQO1Q/HDVoL5892IGwwua3v3jKyButgB+vdV5Wl+fihjaVjePEbZRirncgwHYQ+nOZU6j0HowR3psxsRthqo/c2kvZL72+GN9GjTSLCyqDgtAVg4ii3CPEAyIlkRYkHE2DXplQKFccRJc1kebm5CvsSNpAmI9oK8nIpXsDqYX+dyM7SOhUulNltWRamQlGZ++MKgEsILBFyoqURFsEqQlDYQbLnlOsc6AiudtdIuXnH3jvMKX9ojCEEUOxQD1FBXeweBQCkKNTEedNB3PbES7EP/KG7VigOHxVPnPggCIZ48HMaynoKHmJdj5wzOnhbUE8r68QtIL8sE1fVkmvroIaIw8Y037O2KA0aXJkl4mdIndQAa0xqG5jGHgoqink+GAKD1CO1hSocSwFkVoRIo1GoU2uTcWXBtnipPTjqdUpT6NCtMAB3pjGtUd+pSL4UITYyQSX86jXS+VAB7SS1XM01MYQ0iZ9MsBzklGh3wYxkyLvgGJjAwTj66yyiT9sbDfaydlh5yI2WPlWqKUTuu8AVlhszgEfZ5CPqsKjDZfi7Mc2rOMOCQaQkf1wl/LMy7oXcMgkusBlm+hFVeUgcCRadhz70TQKrF9vCyMAryRws1YLE7K4j0GKdYTn8vJgmX49qElOAJFgiBtDbeRJhYsjJV/WFiWUPLsmi4LrKTPJoF+Eg27AyUhJzpJpSIrznhHgb5pgZCwgASsHqAH+fEUQt18khJJDICkjsPP+6kI5JjI+DYH3HpEWLRBS2R9R0KiMjZtwEmju5++Mw7Soqg3gHdMSK2eihFrzR2yIyMgAZpky7BKGHW/oZPRvH48tgTrZ8vEOFALnuBKDEgrRAvHwlMlTfgx8PXOVwGEYPzhc7w1sG9JbVozjoiLmBSgCo8ohkRGw0tVAVColSCnAMI/akNdlBzClgpd/E54PfJI1t3McN1ZGqYCsoEYlWLbKsQedFJmlETPoG8WilRIdIVbmnqhZVkPCWhY/G6YgMuJUNnhseY4SsASMvJzZCUZljFjyiWI+sR0dxPJMLIzHyrxFkw+KlOQTt0jZQeCJuRBkJaX3QgVAQaaAu5oEQBzpoowLoxnIP6sxj5k1i4avbTBPuEbdSmLpCaqIQCQUkITBl5jXhCS4zRkvg15aEVm6Gb2Lu8GAze7co6IHbEl6fqF2lYkFqChJnwsBRksuF8XI4fGQX5uS6zDqdu7A7tnAtk2TT84PjB3wwBAY3Pz4g4e1AW4uo+y4mzieaYuYhBeapIQzkORQ4ERIJNzg2AIJiwBHKVgsKJGMo81DoIicRsGiYmU6wZpTRbi02lEpcVsIs8xBck0MEmNdEdAnUG1dJgCawarZeC9jT02RGkoWMYh0/jChcQQ9PDT8ZiZOtHYCCJnd4T2LXoGZiB/k24ncLMByi5ycW64KYi/sOFUymhjHpfM4eE9EYIhOFoPKXCyCfHcGID7Hq/CInIj9uoeJk8YbEzHzEgpGoRJqsEUAWMkofuFEoBLIhg+hYgk1AE7psfdeBPy4Pe792Uyfs4zkC4fqYvrvdFIEpMpFfVrzQFmiZMMhDJZqOi9fuS3NFZp3woipHewdNh3LjK1yuCDNQiBZmOXEjLEWSVdeZQgA2gU6QezsxZMHtixlNZaFnmG002oN6EOgGowtfZ5IVLpBMwrw87mcRgcQ2awtfIjZuLkeJ5RA9ixcIK5XIhGcqDpMhw6uJIRXVB2OxIEJASwwZsNUDjxYbNnkmv5RgoUkdOFT0AfKP8G90iBFBh5xGfDwYqyX/o3jeUQAdiuTL0glatQnpAAJtYnOLQK8zDkKFh+C3q2/l1342FINYUEWcj7kJCQSSfyeNNFwsg8Q3QYO1LsqG0AHnTAZ3cqpGqf/bpSTRr+zjFsRIz5Ef/EfhZ8EswdLErLg6pc24pR51aph6sEx/CdypKccwZTCEDgbAErFDhzVVFgo2kAcje6O0afCZSsslTgmvLhO1B7oKZSO0CEssKAQ8KmnAQcWNLi6/3oGZpG09cgS0h6wAbFL8oWYLB+oLgyj1ABidcLHy08dA2GWHLW/JgKktQJVlHcfVN1fFaW2vEXxj38SzUKUtz/PcBiG7/I/SkWaglILEMdSIG05xMW1QLX7FNu+vwhcmsLWFqpPEABw+wbCRVUTRsLS1F62YrIjzVvNY5dCJRXH0MK80vbg5YW0s48mx0FxrowDEnJV0xNrz0PHmWvgZZBrBhO1Jn0e4bqO5BRXCzdjXBcU04I9L7SAHI0lCoiJtcM95FHgpOANrmD0Nj3ydrzoxwP07mHiAp2tGrC5VfxjZsD1qHzF2d96khEt/lekCPgICJDEQlshqdKDAxRkD7TTBajbd0imu4X5Ia1u04jslwZIVHUeQTHmYFun+pLoYTLzjwjHITzOnihDg640unqq1AXFl6Jzgp2NGBDM2YrrUFUdYIMQ4dQQ4IWiMb4c9qcnmQmv3Tg86nSa8IBSmJMIIc5mgRMi+SCNKEnt4/SD3cDN+Yg9Q+tJzRmmZYK/cgaurDHInKitxX7ETKQYy0dGpYV5pSZ8HzSe+0DbWeuSpZRAIfk1nuQMXyHhdCFoHuBHvSLTAZiqXw7GBZMrCcRZLZp6ZGS3oD5OQKRkf5zCd/JvOSd9jDHtSNZm6kBJJYaxbVqET9VqhgfS2IrgLb6u/CE1GeciuWHJLUYMWoXQnsvJda2XQQHlA2D3uc+Ww4iCF6QQVptOhEXO6YkTtJ8QObMDR7hullo5iPz3K4Mm3Ht65eaHeKT/KSychGaCXpgQbNQKjUtQsKQewzUHJ8phgh1Ydj0FDiNGVNuSaYbe8icciCBRNTgNhHTtSb/j26pg4D0+2cCTC95gPc8nQblmaCS1uhjGdeISZjluZmogJca0dWDsmEd0xGSSQjNdwXia9JUYUijuHjBcaEAKncUydiaG34JBtNlm4paO3MBAeJLKT8wjskB/TX3GYWb6h4ATzTuxgMFwejJP3lNAhbfSUNE9taBeSYralmOvRHIhaROrwvspT2Grutxu+jwHBIJ8ri8mke1I7yTiMxvwv7Htu8T0s8QQ47JBmYbA1zIWug4d7G5T0xz1ATebLRyfO1U+lMloPAhANnRbOCMmCxEm0kvDu6NAG0+RDwzqJwh3JspWEs4gP9VJUFEpsUXQEpWbcQlQGoCcE5WZUtxGzYCUCHAFi4eQonO8EoEHZIBQcJahuOr6ZOgEcFEToDrmFGJb3NFIpizlweJH1Ho/RapyBVofWV2a8nIXCTYgHjksgB5pCf4jToDnKaDQ8ZJsWEU3RSG9bxxZQj/4WDse9JzhFf9Uo9A5NJRYny6fOXPU91WcdTzIP+msiJO5ArMm9d/kWFIMts3kFHEKo5x6TdZLFKnr1OD4vF2zP8Vcmg3nKmSsjglnRiYH+Bvv8RUc0VgFdwndElLAxC1XnsyQEXtEhA03IDHhKW8g50qU03LiioWjylloqTjxvgiZDpj+WowMYDhqtoy5oQ4NQ1HJ5LjJKJ/a1RtLD+SMYiEZMlYjYG/uhFBiyyHBBNwqDqj0aOUn8iBy4Bb0iDJqCMjWdlNO1TdWxiV3nW+mDSpz01v/z4t83RmaqXjgqCgAAAYRpQ0NQSUNDIHByb2ZpbGUAAHicfZE9SMNAHMVfU0tVKoJ2kOKQoTpZEBVx1CoUoUKoFVp1MLn0C5o0JCkujoJrwcGPxaqDi7OuDq6CIPgB4ujkpOgiJf4vLbSI8eC4H+/uPe7eAUK9zDSraxzQdNtMJeJiJrsqBl8RQA8GMIiIzCxjTpKS8Bxf9/Dx9S7Gs7zP/Tn61JzFAJ9IPMsM0ybeIJ7etA3O+8RhVpRV4nPiMZMuSPzIdaXJb5wLLgs8M2ymU/PEYWKx0MFKB7OiqRFPEUdVTad8IdNklfMWZ61cZa178heGcvrKMtdpDiOBRSxBgggFVZRQho0YrTopFlK0H/fwR1y/RC6FXCUwciygAg2y6wf/g9/dWvnJiWZSKA4EXhznYwQI7gKNmuN8HztO4wTwPwNXettfqQMzn6TX2lr0COjfBi6u25qyB1zuAENPhmzKruSnKeTzwPsZfVMWGLwFeteavbX2cfoApKmr5A1wcAiMFih73ePd3Z29/Xum1d8PL5JyjPoBHfAAAA39aVRYdFhNTDpjb20uYWRvYmUueG1wAAAAAAA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/Pgo8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJYTVAgQ29yZSA0LjQuMC1FeGl2MiI+CiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogIDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiCiAgICB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIKICAgIHhtbG5zOnN0RXZ0PSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VFdmVudCMiCiAgICB4bWxuczpkYz0iaHR0cDovL3B1cmwub3JnL2RjL2VsZW1lbnRzLzEuMS8iCiAgICB4bWxuczpHSU1QPSJodHRwOi8vd3d3LmdpbXAub3JnL3htcC8iCiAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyIKICAgIHhtbG5zOnhtcD0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLyIKICAgeG1wTU06RG9jdW1lbnRJRD0iZ2ltcDpkb2NpZDpnaW1wOjcyMjdlZWUzLTQ5MTEtNDE4MS1hNjBlLWVlYmRjOWFlOWQzYiIKICAgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDphMDg1Mjk3OC1kZTdjLTQyNjAtODdiZS0zNTQzZTA1ZGIwNGEiCiAgIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDoxMTJiY2Y4Yy0xMjcxLTQ4OTAtOTQ5My1kNDJhMzE3OTcwNzkiCiAgIGRjOkZvcm1hdD0iaW1hZ2UvcG5nIgogICBHSU1QOkFQST0iMi4wIgogICBHSU1QOlBsYXRmb3JtPSJMaW51eCIKICAgR0lNUDpUaW1lU3RhbXA9IjE2NTIxMjA0MzM5NzMwOTMiCiAgIEdJTVA6VmVyc2lvbj0iMi4xMC4zMCIKICAgdGlmZjpPcmllbnRhdGlvbj0iMSIKICAgeG1wOkNyZWF0b3JUb29sPSJHSU1QIDIuMTAiPgogICA8eG1wTU06SGlzdG9yeT4KICAgIDxyZGY6U2VxPgogICAgIDxyZGY6bGkKICAgICAgc3RFdnQ6YWN0aW9uPSJzYXZlZCIKICAgICAgc3RFdnQ6Y2hhbmdlZD0iLyIKICAgICAgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDphMTExYjYyNi00ODM0LTQ1OGYtYjc4Yy01ODZiMTIyYTY2MmYiCiAgICAgIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkdpbXAgMi4xMCAoTGludXgpIgogICAgICBzdEV2dDp3aGVuPSIyMDIyLTA1LTA5VDAyOjQzOjM2KzEwOjAwIi8+CiAgICAgPHJkZjpsaQogICAgICBzdEV2dDphY3Rpb249InNhdmVkIgogICAgICBzdEV2dDpjaGFuZ2VkPSIvIgogICAgICBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOmExYTVlY2M2LTJlN2UtNDAxOS05ZDRlLTdjMDNiYWZmNjY3OCIKICAgICAgc3RFdnQ6c29mdHdhcmVBZ2VudD0iR2ltcCAyLjEwIChMaW51eCkiCiAgICAgIHN0RXZ0OndoZW49IjIwMjItMDUtMTBUMDQ6MjA6MzMrMTA6MDAiLz4KICAgIDwvcmRmOlNlcT4KICAgPC94bXBNTTpIaXN0b3J5PgogIDwvcmRmOkRlc2NyaXB0aW9uPgogPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgIAo8P3hwYWNrZXQgZW5kPSJ3Ij8+r4gMfgAAAAZiS0dEAAAAAAAA+UO7fwAAAAlwSFlzAAALNgAACzYBvwjYegAAAAd0SU1FB+YFCRIUIcREItAAACAASURBVHja7J13mFTV/f/ft03bKoKCiNjosoZQVgRUVMRgQYrEQmzRxBgVkxgLFogo4ld/KpqIGCvWIIJAFBAxIqgBggi6gBCxRpDiwu6U28/vj+Uc7iwLLLBz587M5/U8+zA7O8w997R7zvt8CkAQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQRMNIVAUEQWSTNWvWtK6pqelTW1vb1bbt0nA4vCUSiXwbiUS+7tat2wdUQwRBEARBEEQmcF2XAYAkSdixYwcikQhCoVBmN+CSBMdxAABvvvnm7OHDh59PLUEQhB+oVAUEQTSWVatWdf3mm2/+uHTp0pGqqqpt2rRBcXExysvLUVpaiuLiYpSWlqK8vBzFxcWQpIM7Y2CM7fU9/v38PcdxkEwmUVtbi0QigR9//BE1NTXYsmULtm7dim3btqF3796vnH/++SMlSWLUogRBEARBEIULYwyyLEPXdZSVlfl2XcdxEAqF0LJly39SK+yzjRgAxONxFBcXN0mbp1IpxGIx8Z6u64hEIpAOdvNCEAGHOjhBEIJ7772X9ejRAx06dMChhx4KSZIgSRIURYGiKJBlGYqi5PQ9uq4L13XBGIPrupAkCbqu44cffsCKFSsQjUbfGDJkyHDqDQRBEARBEPkNqwN+6z6u60KWZSxevHhUv379HqOW2DOWZTFVVdHU7WRZFjRNAwDYtg1VVUkAJPIesgAkiALjrrvuskpKStQOHTqge/fuiMViKC4uFg9AvigBAFmW97RYAgAE/RnZ0EJBkiTIsiwEQAAoKipChw4d0KFDBwAYZts2Y4xh1apV+Pjjj7F161Z98ODBA8klmSAIgiAIgiByF74/0DQNjDHxAwDjx49no0ePJhGQIAiCyF2efPJJZlkWY4yxRCLBdF1njDG2U+gSGIbBDMNgruuyveG67j4/EwR4Off1s6/7S6VSos4WLFjAGGO0MCAIgiAIgshx+LrPbxzHYYwxtmjRohupFfYO3680VTt5v8e2bWbbNnNdl1mWxZYvX04hgoi8Rg5CId54bcpMd2KUWY/G2JKPPxpCzULsD8v/s+xUd2KUuROj7NZzD2dr166NUa0AK1asOGXWrFls48aN7Le//S0sywIAxGIxOI4D13WhKAoYY3AcB47jQNM0hEIhSJIExhhs24Zt2+LzXsu/XLWQ52X33oPruqIOuFWgJEmwLAumaSISiSAcDqO2thann346ALjr169nM2fOZGvWrGlNvY0gCIIgCIIgMrN2b+rvcxxHxH9UFAWSJMF1XbRq1YoqnMjv8ZTtAjgTY0xCutAuj0qRdQ3RKBYt/Ne1fT4dNMn7HkOEKaOq5UKul/Xr17O2bdtC0zQR0wKoCzjMY/gZhgFN04TgxT/D8cbFSKvfnSIgf2jmCg0lD/H+jbsD7GmR4a0Pr2ux67pYsmQJJEm6vHfv3lNoVBIEQRAEQeTM+pBiAAYc13UZN05oqnbih/7cDZiLgj/++CNat25NWgSRt2R9915f/AMAd2KUTG+JRlFf/KvrU3rBTtqzZ89myWSSHXfccVBVVZxucfHLdV3Ytg0ACIfDkGUZqqqCB9b1Wvo1JP4Bu8SzXLMA9Ip7vE4sy4LjOCIuIP+74ziwLAu2bYukIbw+uFUgt5CUZRmVlZXo2bPnC9u2bWMzZ858mUYmQRAEQRAEQQQTnuCQr/u5kcSe9j8EkS9kVQB85YWn9hhQ/5/Tpz1NzUPsjeefeuyTPf3NnRgrOBF506ZNbMCAAYhGo7BtW2Tv1TRNiFaapgmxDwBSqZQQBPnneXKM+m6x9S3ockEA9JbZW3aOpmlp98w/z+vNaxXpOI6ow3A4LCwrU6kUZFmG67qIxWI4//zzL1m0aBEdYhAEQRAEQRDEQcKt/5oKb7JDx3GERxNjDMXFxVThRF6TVQHwl9v/0G9Pfxv03a9+Tc1D7InVq1c3vyx1a7c9f6Jw9Jd33313HGOMNWvWLO0kiwta3JovFArBdV1YliXM3KPR6G6uv4qiiAchj4vhtY4TNcyCX8de68f6Cwm+AHBdN80CkIuBjuMICz9eD7wOebxE0zQRjUYBQAiDAFBZWQnGGHvuuedICCQIgiAIgiCIAKLruljn67qOUChElULkNVkTAOti/7l7/Qy5AhMNseKT5b06zu++ZV+fKwQrwKeeeoqdfvrpdwJ1Yha3SItEImkx7bjgJcuyMG3nYmFD1HeJ3dNnAj/B7eUe+D3Wj2PI31cUZTdxlH8nr7v6iwR+LV7HV1xxBcgakCAIgiAIgiAOHO/hfVPsDzhFRUXidTQa3ev+iCDygawJgFIjLbScAnTlJPbOiYv6LmncJ/O769xzzz3syiuvhCRJqKmpEaITnVwFB9M00adPH2zbto3mMYIgCIIgCILYT7g3Dz+kJwjiwMnKCPpw8QdXNPazEhimPPnYUmoqAgCcR/fPKnTZ0iUD87EeHnrooS133XUXFEWBZVmIRqOQZRmmaULXdQCgE6wAEAqF4DgOysrKsHnzZlZVVdWOaoUgCIIgCIIgGkf9WOQEQRw4WREAey8/+7n9+fxI49aey5ctO5Waq8An/4nFbH+9Trt/3H9uPtbFpZde2pzHqJMkKS1jFTdlT6VS1GmyjGVZUFUVsiyjWbNmiEQi66hWCIIgCIIgCKJx5ELYIYLIFbJkQ7v/Cn63j055n5qr0HF86WtBx3Vd1rJlS5GplieuYIwJ91/TNBGJRKjLZBlN0+C6LnRdh6IoOOqoo/C///2PjjAJgiAIgiAIohF4BUCyBCSIg8N3AXDqlGfnHOj/vfXMFjTiCxTnIBLCzJs985F8qYfJkyczwzBg2zZs20YqlRKZbG3bFll+eWZbIrtYlgVZlkWmYFVVUV5ejocffpjmMoIgCIIgCILYD0gAJIiDw3cBcHj1788+0P97/3lxzHj15anUbIXFbRe0YAcjZQ3YcNFN+VAPS5YsGXH55ZcjEolAVVWoqopIJAJN06Aoishoq2kawuEwuQAHAE3ThEi7bds2AEAsFsNNN90ExhgptARBEARBEASxF7inEwBKAkIQB0nOjaDBm6++8NMVKyqo6QoDY2I7e3z/OFUEgC5duvxD0zQwxkSiD279xxiDoijYU1xAIjuYpoloNArGGJo1awbGGGzbBgAsXrzYpRoiCIIgCIIgiD1DSUAIounISQm94oOTV1LT5T9r166NafieUtkCePvttycVFRVBlmVYloVIJALTNIXlH3f3VRQFqqoCgPg3F+Enfd6HPWMMruvCcZw04ZO/X/89vmDY0/f6sZAIhUJp15EkCaqqwnEcdO/enTo2QRAEQRAEQeyF2tpaKIqy27qeIIj9x1cBcNH7/7q2qb7r23tK6Bggz2k/r1uiqb7rs1Wr2udyXQwcOPDaVCoF27ahKAoMwxAJP3KZ+id6XJiTJAmSJAlRD6gTz2RZTnN39roD8B/+f/n/4fERLctK+24/YyTWv5Ysy1BVFS+88ALNYwRBEARBEASxByzLokogiCbCVwGwz8pzJjXVdx1ZZjepoEgEi2//EmtSYWTdZysezNW6uO+++xhjDNFoFKqqQlEUhMNhGIaRF21dP7OX1zqPi331cRxHLAa4BaDXQtDrCu0VB7346UqwJwHwwgsvpMFOEARBEARBEHuAhz6iBIcEcfD47ALctBvuPisHTaImzD9mvPby1CPLm7avLPvH7efnan1cfPHFAADDMOA4DgBgx44dCIfDuT8BNSDu1RfmGGNC1HMcR8Q71DRtN8s/biHIXaF5vD3+fn2rQj/wWhzWdweORCJYtmzZ+TTqCYIgCIIgCGJ3EomEr9erqqpqR7VO5Cs5n0bntos7kAtdnjH4x6ub3CzqvjOqc7Iuqqqq2h1++OEAgHA4DEmSEI/HUVZWJsTAvJqQPGIeR5IkIeoBSBMCG/rx0tB7DYlxflLf5XnFihUzadQTBEEQBEEQxO7E43G/r9eNap3IV3wTAFcsX94rE987/qRv4U6MkQiYJ1iPRjPSlrlqML5t27Z13HoNqIuBUVxcDAB5EQi3oaxeXvHPsqw0odPrysv/r9fCjgt7POafpmnC6s9xnLTPBUFAlWUZxxxzDA18giAIgiAIgmgAvy0Aq6ur+1CtE/mKbwLglk0bh2Tu2xlmvPryVGrO3ObWARGmZFCpW7t2bSzX6qSyshKRSAQAkEqlEA6Hoeu6ELdyHS7Y1RczeeIO7ubLM//Wd/NtyJKPZ9rVNA2maQrhj4uo3s/5xZ6sEwGgbdu2NPgJgiAIgiAIogFSqZSv19u6dWt3qnUiX1H9utBPG/9XmcnvPz8DbqOEf8yc+tpL5228MqPX2FH900AAM3KlTqZMmbJEVeuGaDweR1FREQAgEomkZcvNZbzZfvn9cAs+27axZcsW1NbWorq6GolEAqZpwrIsGIYB0zTTLAgVRUEoFEIkEkEkEkEoFMKRRx6J8vJylJeXp4mNPAmHH9TPbOzFcRy0a0dhRgiCIAiCIAiiIfzOAlxdXX0U1TqRr/gmADY7ovUSrEf/zAkJgDsxyuRRKUoPlGMsev9f1/ZZOejSTF8nmYh3yKV6sW27F3d35W6/u/q7FJQyQlXVNDHScRwoigLDMESiEm6xyP9m2zYURcGmTZuwdu1arFy5EsXFxVXXXHPNCZkq67Rp02Ydcsgh51VWVqKoqEiUhS8quEUlvxfbtsEYg9cF23tPAISYyP8f/3z9tmqovbhVouu6WLlyZc8TTzxxGc0GBEEQBEEQBLELbgHol/FDJBKpoVon8hXfBEBTTx7mx3Vm/OPlqUN+eekIatrcoe3CIZNQ7sOFcixS5Mknnxz4MnKxS5IkmKaJUCgEoE4EDIfDcF0XpmkiEongb3/7G7p373557969p2SjrMOHD0/LtltVVdVuzpw5H/Xq1at57969hXUgF/u8FoLcBZmLf4ZhQFXVNLfi+kJfYxcpjDG89dZbS5G7oSoJgiAIgiAIIiP4bQH43//+twvVOpGvqD4O3BI/rjN409UX/nP6tKfPHTr8amre4ONOLGKA4cu1ZEU2cqluOnQIvsEiFwBra2tRUlI3xB3HgaZpSCQSUBQFH3zwAQYOHBg4catLly7rAbTgv8+ePfu5Vq1aXdGuXTvYto1mzZohHo8jFouJeIMA0oTAvYl8+yMAnnrqqTQZEARBEARBEEQ9dF339XrRaJQqnchb5Hy8qUHf/erX1LTB5/PPPjsScKkiGmD+/PkP5kI5eSbdkpKSNAs6SZKwadMmRKNRKYjiX0Ocd955V/bo0UMqKyuTNmzYMPitt95CcXExZFkW8QYBpP3L3X4bSmTSWBcFxhg6depEnZ4gCIIgCIIg6hGPx/drbX2w1A/nQxD5hG+9W5YV088bcydGGTVvcPl0xYqKzu/1+s7Pa6qatj1X6kdRlJtt286FcsIwDOEiyzP2jh8/Hscff3zOurT27Nlz1rnnnitJkiRNmDABGzZsgKqq0HUdqqrCcRzIsgxZltMWI14RsLGLFFVVhfUkQRAEQRAEQRC74AKgX5AASOQzvvVuRVF1v2/OmdiczMsCygkLT17p9zVDocgPuVI/lZWVOfPwCYfDME1TJNT473//2/6OO+7Im3h2t99+u9SpUydJkiSpuroauq6DMZYWj0SSpLRkIPtDrmdyJgiCIAiCIIhMkUgkfL1eJBKhSifyFt8UhmhR0fd+35yEhLRg7pyx1MzBYt4/Zz4iZ0Hz0EKhjblSR6qq5oQwxB/IiqJgw4YNCIVC0s7YennJEUccId1zzz1YuXIlXNdtMCix1y24MTDG0pKJEARBEARBEARRx+GHH77Jz+tRDEAin/FNAIzEYt9k4wb7fzF0DDVzsBjw5UU3ZeO6kWj0v7lSR7qu54QAWFRUhHg8jg0bNuC4444rCFO2+++/X+rRo4c0YcIEPZVKwTB2zy2zP+KfbdtkBUgQBEEQBEEQDVBeXu6rAKhpGlU6kbf4JgCqavbir1mPUjzAoJDN2IwdO3ZM5kIdTZs2bVZpael+u5JmCp55y3EckfSD/+u6LtauXYtOnToVnII1duzYaFlZmfTcc88BqBPzdF0XsQH5e67rpsUGtG0bjuOI9iXrP4IgCIIgCIJomKKioh8dx4FlWbsl3cvIftWlKGJE/uKbANjrpN5vZusmFQlYtnTJQGru7GKTENsoqqqqzgOCExsuEomI7L7c5VWWZRiGgdWrV6Nnz54Fbb72u9/9Tnr33Xcf2rhxI2KxGIA64c80TdGGXndhy7KgKIoQAMn6jyAIgiAIgiAa5phjjnlCURRomuZLjHRamxP5jG8CoCRJWRV/un982tzPP/vsSGry7DBl0sSlchbnUpZD0mOXLl0CUxbHcWCauxJ4c5N40zSRSqXQtWtXekICGDBgwJ9bt24tTZ8+XSQICYfDsCwLpmlCVVVomgbDMBCNRmEYBi0uCIIgCIIgCGIf9OzZc5brur6tnck7h8hnfE0zmm0NpvN7vb6jJs8OI83bembz+rkktnTp0iUw7r+KogiXVtu24bouuAn+IYccQgpWPYYNGyaNGzcOoVAIyWQSmqZBkiSRLEWSJNi2jXA4DEmSxA9BEARBEARBEA3DY277sUeiGIBEPiP7e7nsb3TdiTFyQ/W9zrPv+jvz8L+/niv11aZNm8DEnrBtG6qqIh6PQ5ZlaJqGVCqF1157rYp6dsOMGTNGev755z+NxWIwDAPhcBhFRUXQdR2apkFV1d0ShzBG0xJBEARBEARBNAQX5fxYM4dCIapwIm/xVQCcdfjTARBhGMUD9JH3Hx+9LgjlOL5z1ztzpc6KiooCUxZJkuC6LoqLiyHLMpLJJB5//HFcc801J1Dv3jNXXnllt08//fRU0zRhWRZ++uknRCIRpFIpAICqqrv9n9WrVzenmiMIgiAIgiCIdPja2Q8jCbIAJPIZXwXAdl26/jEIN93949PmUtNnngXz5ow9xX2kXRDK0rWiYl0u1Nnq1aub84QbQYCXQ9d12LaNRCKB0aNHk89qI+jWrdsHpaWlkuM4aNasGQAgFovBsizYti2yKwN1p5mGYXSmWiMIgiAIgiCIdPz0lqEYgEQ+46sAeELXrt8H5cYdcgXOOP3XDh1DtbB/vPrqqxtt2w5UmSRJgqqq2LZtGw477DAS//aTaDQqxeNxmKYJ0zThui7C4TAikUja50zTbEm1RRAEQRAEQRC7WLFixSnJZBJAw140mdj7EES+IhfqjUtggYhNl69Q3R4YrVu3Vr2ZdgMxViQJsixjyZIlr1ALHRjz5s2bDQCyLCMcDou2pdh/BEEQBEEQBLFn3nrrrYU8RFJQ9kcEkatkQQAkRT3fCVqiFcfNnbo78sgjhSgUlPgT27dvxyuvvILBgwdfSr37wBg+fPj5Dz/8MIA60S8UCkHX9bQTRtd1I1RTBEEQBEEQBLGL/v37i9cUn48gDg7fBcCV/RZVBqkCyFKtaVn43oKbgGBV6fqBy1vkSv21bdsWQN3pVhDMz23bRjwex69+9StS7g+S22+/XZoyZQokSYJpmmkuwJIkQdf1o6iWCIIgCIIgCGIXvXr1guM4Ys1MEMSB47sA2O3n3ZcGrRK+/QvFA2wq+n127iNBKs/tsyLo3Lnz1lypv9LS0kA92GRZxksvvUQdu4n49a9/Lf3www8IhUKIx+PifUmSEI/Hj6MaIgiCIAiCIIhdqKoKHiPdj/A5JDIS+UyWYgAGa1AdWc6w4J05Y6k7HBxBtKYc+cjCDrlUh47jiKQbQUCSJNx+++30FGxCWrduLf3www8oLi5Oez8ej7el2iEIgiAIgiCIXRiGAVn2T7agGN1EPpMVAXB573+dHbSK6L+GMtYeDG5Asyp3rahYl5MDU5bhutkPXlhVVUWdOwPMnTsX8XgcjDGxyPjpp5+OppohCIIgCIIgiDoWLVp0YzgchqZpwlAi03B3Y4LIR7JiZtSzV+U8Z2IMUsBixbkTo0welSJrpwMiePrfkp7zLwb65lQtcqswx3F8PenaEzNnzqSunQF+/etfSz169GBdunQBACiKgtra2mZUMwRBEARBEARRx4cffjixb9+6/Zxfrrnc3dgvvvzyS6brOpLJJEpLS6HrekavZ5omNE2DoijQNK2oY8eOyab67jVr1jDK1BxssuZn+O+fz7mq9ydnPxu0CrEeiTLtDyQC7g9BTaTS++S+r+VaXZaWlooHTzgcznp57rzzThoLGeLEE0+ULMsSYyeRSBRRrewfjDFp54KwwTmoqqqq3d7+v67rx8iyrMuynAIA13Wjtm2XG4bR0rbtMsuyDquuru56zDHHPLF9+/Z+mqZt3rp1a7+vv/66980339yCWmD/mD179nOHHHLIx9XV1b0jkchWSZIatcJVVbVGURRD1/U269atG7Z169ZjSktLUVJSgmg0Cv66qKgI0WgUkUgEoVAI0WgUsixDURRxoOK6LhzHERbW3teSJMG2bei6DsMwsH37dsTjcWzfvh01NTWIx+OoqalBWVnZpqOPPvrDI444Ymo0Gl1TUVHxGbUuQRAEQTQ9Xbt2FYKcqqpwXTfjRhJ+WwC2bNkS0WgUkiQhlUohGo1m/JrxeBzFxcV49913/wLgz031ve3btw+EEQuxl3V1ti7cp9+pz7kTo4ETABUZ+Of0aU+fO3T41dQ99o39aFCzKCs5WZ/hcBi6riMSiYAx5ttJl2VZ0DQNruuKa9bU1FAHz7wggiFDhkDXdViWpVCN7B220186kUigqKjI+36mLz2w/ht/+tOf2JtvvokhQ4aQSN5IBg0adIUsy1cczHeceeaZQbiVlgCG7fzZrf/F43Hs2LEDy5Ytw+rVqzFs2LAmPV0PKs8++yy78sorfb1mMpkUmybHcaCqKo3HJkTXdaYoClRVFesE0zQRCoX2+X/5eoKvKRhjcF1X/Oi6Ll7btg3btuE4jhDlU6kUgDpLFcYYamtrsWzZMtxyyy0H3caMAnwFHomyMBA7qaiogOu6CIVCvsXmSyQSvt5jJBIRc6Uf4h8AxGIxfu3vmvJ7SfwLPlnNNPDxz+cG0gpw0He/+jUAEgD3waefrqiQF54cyLLJo+I5u3DgCUD8XPuoqgrGmJi0uRUMkVmGDh0qbdiwgR111FFo1qwZLXb3wosvvshs24ZpmmniXzYh8a+w8VoheF8XFxcjHA7jggsuwODBgyFJUsJ1XSQSCezYsQPbtm3DTz/9hKqqKvzvf//D+PHj5T1ZseYSV111lXTZZZcxWZZ9eX4ZhiE2MPy5NX/+fDZgwAAal03ArFmzWDgcFhayvH83tm3rf06SpLTv4B4PQMOHOIwxKIoirHZTqRT69+9PbUsQBUarVq2gKIqviTnIhZXIZ7Iq0fbpd+pzgV3YT4zS6eA+6Pr+ySuDWK7Rb+emIdWaNWtaAxALXj/gJu6SJKU9WB3Hwfr166mT+8CLL74IRVHQokWLT6k29szZZ58NVVXFht8wjKyXaerUqXOpZQigTqzwxgzSNK1uLeG6wpo7Go3iiCOOQEVFBU455RT8/ve/x7hx42AYhmtZFvvpp5/YnDlz2Ny5cyfmaj3Mnj3bt8Mr7grGn1myLAfFQjQvOO+880S9cjGOrxcaS0MiILco9L4ny/JuP4qiwLIs8XrNmjXUKARRYNxxxx1MURTYti2siv2wMOMWyASRj2TdRnOldM23Qa0cEgH3zK2DD2FBNc6/YuJ/cjKW2mefffZMVicDWRaCoCzLeOedd6ij+8CYMWOkrVu34osvvvgZ1UbD3HXXXax58+bid9M0AxEjs3fv3gOpdQoX76EJt3rj7oveuZSLIFxIYYzBcRwYhgFZlhEOh6GqKg455BCcddZZGDBgwI2MMfb111+ztWvXsueffz5n1iLHHXdcBXflzDSKoghLdV7Ptm3joYceorXbQfLEE08wPtc2VO+WZe3zOxorFNZ3DeY/lmWJMea6Lk466SSy/iOIAuOSSy4R4QH4M9YPyAuKyGeyLgB2u/GxtgzBfaZ/uGjhldRN0nnt2afeu//0YE6Mc9u+PClXYy198sknA/limG8UMw1foHPLFb5x1TQN48aNo8W2T7Ro0UKaMGEC1fceuOqqq9LGQ1DCN7Vu3XqfiUaI/Ib3RW7ZxBOOeF0dueDH51lJkqBpGsLhsBCtTNMUf2eMwbIstG3bFm3btsXFF18Mxhj74Ycf2HPPPceC3OcqKio+27Ztm7AWyzSxWAyGYYh6VlUV55xzDnXMg+SMM86AbdsiFhVjDIZhCGtWrwVfY8fJnubtPVkAapqGUCgEwzCwbt06ahSCKECOPPJIAHUx0mVZFjFBM00ymaTKJ/KWQERpVEYlA7vx7f3J2c+uXbs2Rl2ljhmvvTx1RO2o/sHciAGDLhh6Xa7WbYsWLRrcWGYSb3BuIN1ahSCCQsuWLdP6ZTgcTnO3zBaSJGHt2rW0My1QFEWBJEnCYql+39B1HalUCq7rQlEUaJomhEHDMJBKpWCaJmRZRigUgqqqwt1RURTE43GR0ZgxhsMOOwxXXHEF2rdvv27btm1sxYoVjGfCDhJTp0715TrerJC2bQu366OPPhovvfTSYuqhB06bNm3EuoBbqvJEYbZtH/A6gQuB3h/uJt+QSGiaJjRNw5w5c7ZSqxBEYcHDrHgPHKLRqC9WgO3ataOwPETeEpg0LQu7zLw9qJXUfl63BHWVOgb/ePWFgd2M3ZTKaeWqZcuWadZ/fghxfPPKH658c0qm70RQePnll1kkEgFQF5OFn8rurwVKpvjZz8hzu9Dhc3V9F8ZIJIJIJCKs4bziRjgcRjQaRSgUEvOuVwSRZRnFxcUA6uLbWZYlhBJVVdGsWTN07doVjuO4hwf+HwAAIABJREFUP/74I5s0aVJg3F5vvPFG6fvvv8/4dXgMQG/cXMMwoGka2rVr14d65oHx+eefs4ayUHLLvAOJUezNBlz/+/jBI//xuslrmgbDMHDzzTe3oJYhiMKiefPmA0tKSuC6LgzD2C0hUSY54YQTxlALEPlKYATA/meeNSHIFeVOjBV8TBmHYiJmlEMOOSTNFdcvSzx+LcuyxOJ7+/bt1CBEILjkkksA1Il/0WgUsVgMlmX5Fgdmb9i2jWOPPRZPP/00zY2F+EzcGe9vTy6MDX3e22+5oMfxiiAcLvyFQiFompaWsIkLi82aNcMVV1wBxhh75513AtEX/UoixZOvhEIhxONx4SbWuXNn6qAHSJs2bQBAWPyFw+E067zGhihpyKKvIaGvvvUf/zvv39OnT6dGIYgC5KSTThJx//g8xK3mM03Pnj1nUQsQ+YocqMKMCrIFF8OHHxRuPEBnYjkLauMs7/3+2cHuO41fdHsXwH7hdZ/iln8U+4IIAvfffz/jmd+8bh+qqvoWY6wxY2fIkCHUWAUIj/e3N7xzef1+25iQC5qmCdfW+t/JRRMeU5AxhgEDBmDjxo1ZFwJ1XX+SC6Q7fwewK3t3U7nwezPKcotJSZJQXFyMf/zjHyTM7ycPPPAAKy0tFck/vLEs+Y+qqo1aozRk9Ze2rtwZ1F+SpLRkI97XNTU1GDlyJMUlIYgC4/nnn2fRaFRYefNnXigUatLr8DVm/WREBJHPyEEr0IxDnwys4t57xdnPFmInWbZ0yUAJRiDLNufYl57p2atyXj7Uc3FxsXjIHaibzYHA3c34ZlOWZWzcuJFmRyLr/OIXvxAiG99Q8k1jYB6isoxmzZrh8ccf/5ZajPAbLiryzKyO46Bly5bo27cvLMvKmmvwoEGDfvfll19ClmURy9B1XSFm+hHj9sILL6QOsp+MGDEiI5vsPfVd3g+4dathGMK1e/v27fjoo4+oUQiiAOndu3faOovPGQAalYV8X3DRz2vR7D3wIIh8JnAC4LCRlw9+KfzAsqBWmFOArsDdPz5tblDLds55w67Ol3ouLy9PsybxM9MpF1Z4UHs/4jcRxL444YQT0oRwr2tlEE5oLcsSi8eTTz65DbUY4Sc8M6tt28ISkI+LaDQKVVUxfPhw1NTUZGXd8vLLL0PXdRQVFaWNXx6nzw8ee+wxsgJsJIsWLbrxqKOO2s0iL5P9t/4Gn792XRfl5eUYNGgQ7cQJosBYsmQJa9++PYBd1uLe8BlNsT/iFvgNiX5BCDFDEJlEDmKhLrv2xl5AMJ/5ElhBiYBBjvs38/CnX8+nui4pKWmyB9v+LMB5LB7vwy8ej9PsSGSVhQsXMqBOZAuFQkLY4P00KDEAeVm6deuGqqqqdtRyhG8LOE9SBu4GzOO2cWvA5s2bQ9M0bNu2jf3tb3/z9Xl+zz33SLqui4zIfAz79YwzTRPnnHMOdZRGUlZWNrG+pY1f6w8+l4bDYTHPf/XVV9QoBFFg3Hrrraxbt24A6mI/e5NkcZraQtn73a7r4qeffqKGIPJ7/RjYgo1KBvbUTwLDsiVLBuZ755jyxMSlwT16lTDkoktH5FN980yGfsYB5NdyXReyLAtLEjr9IrLNKaecAkVRxIYwlUqJPut1JcwmPFMmd2XbvHnzOmo5wi/4OOCbIT42uBjI3YIjkQiaNWuGq6++Gh999JGvIuAXX3wBwzAQiUSE25b3dSYJh8No27Yt3n777UnUW/ZN165dAdQdbPgRZJ9fg683+L+maUJRFKRSqfbUKgRRWNx1111ifRcKhURIgKZ0/+XPTz7n8Gcnf/3dd99RQxB5jRzkwn3cbe5VQS1b93+fNvezVavydnGy4pPlvUZat/UMYtlGLyoJtEB8MHDRjwtyfsGFQB5UniwAiWzy17/+ldXPRO09BfbDPW1/FpE8iHSfPn2o8Qjf4Ak1gF0ZifkBDp/TbdsW1oChUAg9e/aEnwlCYrFYBR+7XkteP+BCUocOHa6l3rJ3HnzwQQYAtbW1UBSlyZK0NHbtoaoqZFkWMV63bNmCLl26rKeWIYjCYd26dayoqEgkjeJx0b3PukzsjbwJvTRNw4YNG6gxiLwm0AJgn1NOfe7tNi8+E9TydX6v8ot87RgnLuq7JKhlmzBtc16KfzwYLbfE83PxzeOY8UU4Y2wrTY9EtujXrx/Ky8vhOA4cx4Ft24hEImnjxE9X+T3hOA5kWUYymRTJGJ588kmKOUb4AreO5RsY/sNjuiaTSRELkLsF27aNAQMG4P333/eln1ZUVHz2xRdfpCXvsSzLFwtebjFyzDHHUGfZB5dccgmSySRKSkpEpl8/8Gb45O7Amqbh9ddfp0YhiAJi8+bNrF27uigqkUgkzTDBK/opitJkMQC551V9UfHLL7+kBiHyGjnoBTx36PCrg1pMSQIWzJszNt86hf1ocOP+LT/p/bPzdTB6LSP8csHlGzJZlmFZlvi9X79+v6fpkcgWFRUVQjBQFAWqqorA9LZt+5Khcn/GD09yIEkSRo4cSQ1I+ALfBDmOI9yiuCWgqqqIxWJIJpPCJViSJLGxOvXUU/HBBx/48qx/7bXXwBiDruu+WrZ7n2uzZs0iYX4PPPnkk1+0aNECsVgMQJ3lpB8HLIwxIVjXD/T/+9//npJ/EEQB8Pjjj39bW1vLDj30UDEPeK3ZJUkSh13cErAp90jeEASGYYAxhtatWy+lliHyGTknCjkqEdiFQP+1Q8dMfeHZOfnSIdyJRUwOaG3PPfrlST0rK+cVwsD06/Td63LsjSNVWVk5laZHIhv85z//Ydx1r75YEAqF0oLEZxteBu9CtaioCLfddhuJDUTG4ZZTiqIIizrvawBC1PE+VzwHPXj88ccz3lfvu+8+yWtVpmmaLzEA+T0zxsg9f2/r2P7929fPtu7H/OrdeNu2Ddu2EQ6HMX36dGoUoiDg4yyRSBxXiPf/yiuvsOuuu65NcXGx8ELiayluoeeFr/+aYo/kDZPB5z1+IHHZZZdVUu8k8hk5Zwo6KhVYEXD49t/nhVXa6tWrmwNuIMvmooQNGjz0ukJbFBTatQmioqICoVAoJxLR8OyV3P2Xnx4PHz6cGpIIPLqu4/rrr8f8+fMfzPS1Zs6cCVVVhfDnhwswT4hi2zbKy8vx0ksvkTDfAO3bt0coFBLxVf1aA/DNvuu6iEQiUFUV1dXVGDZsGC1CiIKAHx6qqlpTSPf98MMPs5qaGnbxxReDMYZUKiXEPj5v+7XX4dfla7mtWykCEpH/yLlU2BmHPjkrqGX79i/lOb+w7Di/+5Yglmv0BwbUUZvlvB+MHmsnv0U4HnsnCHHViMLlwQcfZH5mwW6KMctPog3DQDgchmma+PnPf47333//z9SiRJAJh8MwDANHH330zZm+1rBhw6SamhpomuabuO8N6i5JEs477zxq9Hr8/e9/Z3y+5bGwmirG1r7gmd25GFxbW4vNmzdToxAFgTfut6IoRiHc85gxY9jXX3/NbrjhBoTDYTDG0lx8OX48I7ibMZ9/+DV5AhKCyGvNIZcKO2zk5YODWrYjyw3Mn/PW/bnaEdyJscAqPxPecAviNJiLcN6Ni9+LEYLIJhdeeKHo+7lkicqzrAJ1p8mO4+DQQw/9P2pRIheeOUcffTTeeOONjD8A1qxZI55vfomA3L3LMAxEIhHcdddd9KDzcPXVVyOZTAoxwmuFk2mi0WjaPB8Oh7Fo0aIqahWiEDBNU8QojcVi6/L1Ph966KEts2fPZjt27GBjx45FmzZtoKoqQqGQcPW1LGu3AwG/1m7eUC6WZWH79u3UOYm8J+esqoLsCnzGuuG3McZyTqx64+UXZgLBXBPnc9IPLytWrDjFu+jOhgUg/yGIbLBo0aIb27ZtK9wEc6Uv2rYNTdMQi8W4Kw8URUGHDh2oUYnAw10vhw4dmvFrzZ07F7Ztpx12ZRKeYdZxHEQiEYRCIVx44YXU6Du58847meu6Yu6SJAnJZNLXNQi/Tjweh2mauOaaa06gliEKgXA4LA5Cqqur8yZI6dq1a2N/+ctf2FtvvcW++eYbNmrUqObnnnsuioqKhJUxj/mZSCSE90Q0GhXf4cfzoX6MQR53et68edQ5ibwnJ90qgywCssdibi7V5cIF7/5hyNZrzw9i2V4t+n8fFkrSj+3bt6cJgH49AL2LfG9CEILwmxYtWkwE6tzCNE3LmX7otWTicQB59uJnnnmGrI2IwMIYg2EYIi7fq6++mtH+OnbsWIlfy49EV/Ut6R3HQfv27bF48eLfUOsDl1xyCRKJRFpd8XWAHwJgIpEQ1wmHw3jhhReoUYiCmn95kqJIJPJ9rt7HihUrTnn00UfZ0qVL2fbt21n79u0Td999N84880xh7ee6rjj84fOLqqooKioS7r/eLLx+JULkB0SWZSEUCkFRFNx8881kCUHkPTkbV+3jn8+9Kqhlsx+N5symr9/n5z0cxHKNfrctLr36ur6FMhDj8fhxQLr45pcAUl909Cs7I0F4Of7449OEAdM0c6LciqLANE0YhpE2jmRZxrBhw6hhicBiWRbC4bBwuTrrrLMyfs23337b17HNrQCBusOFcDgM27YnU+sDRx11FEpKSuA4jhAAedZoPw4gi4qKwBgTbXT99dfTxpsoGEzTFB4P69evvzyo5Vy9enXzO+64gz3//PNswYIF7NNPP2Xr1q1j27dvZ6lUilVUVCy88cYb0aNHD5SWlgqRj7v4MsYQj8eF0KfrOmRZhmmaME0Tuq7DMAwoioJwOAxJknzdhzDGxDNiw4YN1DGJgkDN1YL36Xfqc87E2LNSAF1XZQn4bNWq9l0rKtZRFzswLntgeosJszsXzP0ahnEofxDxoLh+PvyAXfGgeMwkgvCLp59+mpmmCUVREAqFRLyuXIAxhlAoJBa2kUhEnCiXlJTggQceYLfeeittbInAEQqFxAGQ4zho1qwZXnnlFXbJJZdkrL8OHz5cSqVSzI/nnOM4IqGFrusoLi4GAHTs2LHg237atGksEomI9YbjOL5a3njbx3VdLFu2jAYkUVB4k2D079+/yz//+U9mWRYsy8KOHTuEQJZMJpFIJNCqVavvWrVq9WlZWdkXmqZtPvzww19pxPrEAADbtlslEolu1dXVvbdu3frzHTt2tKqurm5TXFyMsrIyxGIxhMNhlJaWomXLlmjRogVatGgh9gb33nvvbt/Ns+bydZD3sAWoO2BSVRXJZBKlpaUwDAOhUEis7XhypvrfKUmSb3EAbdsWiVhM08TixYupYxIFQc5vSqZOeXbO8OrfBzJOXJBdlefNnvnIgA0X3UT1Fgz+9re/bbjuuuuO4QKgX0lAGGMibhkX/7755hu0a9eOBAvCN3jqX74YNk1TiGq5jOu62Lp1Kw4//HAaT3UbfpaNBEd7m/94n6u/EeFWUXxjw60E9lR+72f473uKq+rNuM6/j18HgPgOrzhjWZbYODXl+ODXsCwLmqbhhx9+QOvWrTPaXz/99FPWuXNnqKqaln3Wtu20ez/YvsI3qDzGlGVZIu7T5MmTce211xbsuGQByPrF+9xO9zspH++fMYZkMomioiLYtg3TNBGLxUSfJ/ayQfXxJJzVQXGwC3Mu9D5vZEmSsjI3ZmN9xOehxYsXj+rXr99jTT2eiOCi5voNjLjsql/cem4Ldv+AeODKNuWpx5Ze9psbewWx3oIq/n3Y7e3fAf0LbiCmUqmSbEyW9Teomqbhu+++o5mR8G+enDJlCQCxSQd2WSbl+gZJlmVEIhEsWrToxqZcXBFNP//xGEUARCIXviH0WkVZlgXGGDRNS7Me8IqFvO25VSv/Li4KyrK8WyIMHjeyfvm42yq3LJUkCaFQqMlEQB77iJe3VatWGa/3uXPn7jjxxBPL+MaLC3+8nr0uqQdD/frk9Q4AZ599dsH2+zvvvJN5rXeyhaZp2LFjBz7//PO8rGc+XmOxGJLJJGKxGFRVRW1tLcLhcF4cchFELsPnQf7MyZb4l2+8+OKLS7dt23YU1URwUfPhJh745xbp/gHBi7s3MnVrz5J/vDx1yC8vHRGkcrkTgxmjcPTCFpgwqv+ThTgQt27demg2RQqvJcx///tfmhkJ3zj22GN78X4I7LJuaioRINsUFRXBsqyJAEgADBg84YyqqpBlebcNObeO5hsFb5ZnLmA0tOnnf+ffxxPD8N9d14VhGMICzisKArusAWVZTsuMCCAtvlJTCBReMZF/95tvvskuuOCCjJnC3HbbbeWXX345a9myZd1CVFXTrP+8dXgw8Ocaz7bJv9M0TbRt2xZ33HEHu++++wrO5Gfo0KGBmFtN00RZWRkURbk8H+uZu0R6DxqSySRKSkrE3wmCyB78mS5JEv76179ShTQRl112WSXVQrDJG/vzlf0WB7KzDd509YXzZs98JDCTXUATlIyeU4oJ078tWNt7VVWzdu/1XeA2bdpEMyPhGxUVFUIEACCssPLBfYCLmP369aOGDiDRaFSEP3AcR/zYti3iF3nmaITDYSEi2bYtXPp4wHKvtSDPsOs4jrD2cV1X9IloNCrER6/bMO/3XDhIpVJiTHj/nkqlmvQZwMul6zrOOOOMjNf9G2+8Ie5JlmUhptaPI3UwcGGVj0WOZVlwHKdgk/S0a9cuMIcrpmmid+/eU/KxnrlbodflPxaL8ThnkqIo9LOXH3pCEZmGi/A7duzADTfcQH2OKBjyRgDs9vPuSxd2mXl7EMsWFHfbz1atah/U8BYT5v5Y0BMvd330CnF+CSDeJCA7LUJ20NRI+MFLL73ESkpKYJqmWIjxsZBt97QmecDKsjhhnjRpErmWBHDxz0U1HhuOu6JyQcowjLQMz15xUFVVhEIhKIoiRDouOnHLs/rx/LzCSyKREBlxvX/jFoCWZSEajYq4mJqmiVh59S0DDwRFUYTrPb8mFyYzzfXXXy/xeuWukl7xsynwundLkiTaiGef7dChA1atWtW1kPr8e++9x3gylGyjqioefPDBvK1rLvrrui6yK9fU1OTFs40g8gFN0+C6Lt59912qDKKgyKsItP3PPGtCUPOa3HZW9i3vuvyr8osg1s3aActbFPpADIVCWbF48p5M89/79+//G5oaCT8YMmQIXNeFpmlpm34/E+FkEm9ct1/84hfU4AHDG68PQJpFH38dDoeFQMbFXE3T4DiOEO9kWRabesMw8MMPP2D16tWYPXs2nnrqKYwePRpvvfXW88uXLx8sy7LEKS4ulsLhsORFlmVp/vz5j40bN06fMmUKvvnmG5F8xDRNMWfzax/s/YdCISFocktGAHjwwQcz/kBat27dbq7PvC28ouvB3iOfY3gdArssOrdv376qkPp837590/p+NtmxYwfuvPPOvD385UlnotEokskkHMdBUMRXgih0+HNP13X88pe/JOs/oqDIu2MoeVRSunVwlN1/erDKNf4c4Ocjn50z4rKrsrIL/PaecgYYgWuv5b3fP7tn585bC30gNmRx4UdGsvpJQBRFQWVl5VSaGolMM3r0aMbdoepbRNi27YsVUqbxjuG2bdti9OjRbPz48bTQDMoCaGe/4xZi3n7IxT7XdVFbW4tNmzZhy5YtqK6uxqZNm7Bp0yaUlJRs6tSp09Szzz57VFOWa+f37fadd911l3XuueeqvXo1TW4x13WF8MfdkV3XRTgcxs9+9rOM1/+CBQs2derUqSUXyb0WkE1xAMDdfhv6fn6NysrCCVX00EMPbVEUpcliLB4s7733Xt6v60zTTLMUdl23ScR7giAOfn3GGMOLL75IlUEUHHm7EQlqogt5VMr3On/575MWXZz8Y98gdj95VJI2wwAeffRRdsMNNwjXMUmSfMuC6nUB3vkvtQmRcRYvXsxOPvlk4XpuWRYikYgQJuqL07mKbdtiTOu6jpKSkoIdX47jsCBZdnIrPi4UpVIpfP311/j000/x3Xff2ePGjdOCPH769Olz0N+TSCRQVFQEXdcRDoeFK+7atWvRuXPnjPfVb7/9lrVs2TJN8OeC5EGvAz3P0FQqJUQYrwBmGAaefvppXH/99Xk/Lr/88kt27LHHpmVcz+oKMIsTPPPB5aJ+eBWvmz+ts4IFqwPULIXFV199hWOPPTYQjZ6N9RF/Ri5evHhUv379KFFdASHn641V9V/SIYjlyoYwSeJf8Km/GPdL/PMuTvlmkCD8oE+fPmKxrSiKEP8ApLkF5jr8PlRVRTQaxZQpU5ZQ6/sDd2nlMSY/+ugjPPbYYxg9ejQ++eSTgaqqSrFYTCopKZFKSkqkww47TOrVq5f0m9/8Rgqy+AcAffv2lSRJkqqqqgDUCc08bmC9TUWDr7mLbVFRkXAF5uKfYRg44ogjfLmPadOmQdO0tHAU3MX6oBe4nmdoNBoVol/9uIxXXHFF3o+FFStWnHLMMceI+/fDBZi3oWEYSCaT4n3TNPHvf/877+vce4gly7Loj5T9lyD8wXu4551/GGNIpVJ45plnqJKIgiRvBcCuFRXrVkrXfBvEst121hG+iYDBtYQk8a8hkcBv6sf/I9cUwg+mT59eMAkxePw2/rpVq1a9qAf4A3fpDYVCAIA+ffpIo0aNku6//36pe/fu7+TDPZ5wwgnS5MmThUsrF9P4XO4V1hzH2S3ZTn2hQpIkhMPhJkky0hj++Mc/St7nEI+V5oeLqiRJCIVCkGUZ48ePz+s5KR6PL+SJUFRV9eWAkfepcDiMWCwGwzBElueysrIWNEMRBJEpEokEFEVBPB4Xz7OtW7ciFArBcRy89dZbuO+++2gvShQkcj7fXLcbH2vLArikG39ONWa89nLG46w5E2OBXNAu67VgKA29hhfK2YQxRhaAhC+cd955WUl64zf1Y5C5rot+/fpRB/BxXk0mk9i+fXteJJXZE9dee6308ssvf8p/98bV9FrTKorS4Lir//zhwtiaNWta+1H+lStXQtd1SJKEWCy2W3KqTD7zgDrrwAsvvDCvx8Jxxx0n4tE5jtMkFpb73GDszILuui7i8bgQHrds2YLOFPuZIIgMwkU/nnjHcRw0b94cpmli48aNuPDCC0n8IwoWOd9vULkpFcgBPvjHqy9cMHfO2Ex9/5TJjy2VEMQNtoTK3ifPoKG37w2YrxPBzs1xbW0tNQaRUV566aXFfmw+gwBjDLZti98dx0E4HMY//vEPRj0h8+i6jlgshvLy8ry/1yuvvLLb+++/f1cqlYKmaQ1m0eXWqA0JbIwxuK4r3meMYfbs2d/7sk5TlApvCADLsnx7BvLx6ZfLcza44447WKtWrRAKhZBKpUQcRJ/aFrZto7i4GIqiIJFI4LXXXqPJiSCIjO9rdF1PO4itra3Fli1bcNRRR5H4RxT2+CiEm1x9+tI2QSxX/y+GjsnUd4/Ub+0ZtPsd/U+LXH/3gHez46f4541PI0kSCYBExuncuXOfIASg9wNVVdPGM09scNZZZ1FH8AHu+muaZkFYN/fv3//eWCwm8b7muq6IC8hdf72uvl4R0Jshl/+/9u3b+1LuioqKz77//nsYhgHDMBCNRn0RqHiyLcYYYrEYPvroo7wU5u+9914Ade7V0WgUuq6LsZFJeDzJUCgk3H81TcMf/vAHWgcSBJFxIpGI8MCIx+MoKSnBli1bTqWaIQqdghAAT+ja9ft3270+IYhlm/7ai2829Xfeen7LwC1iHVfChPk2Lfr2shHZ2+9+QQIg4cNmH0BhBELn8a6AOksjLmqUl5fjnnvuISvATD93dta3pmkoKioqmPu+4447RMZRVVWFxZfXus/7r1gQ7kxUwH86derkW5lnzZqFcDgsXJfrJzPJFKqqirnopJNOyru+UFVV1Q6oS8TB4/BFIpE0y+RMr2t41mHXdcn6jyAIX+CJPxzHgWVZMAwDt912G7p16/YB1Q5R6MiFcqNnDTr3dgtHBs7v7IIffzN4yccfDWmyDefEInb/GTsCV//aH8jyrzEL5Wzg3QxSDEAik7zzzjuMWxjlc0w2MR97RE6emIG/96tf/Yo6RIbh4mu+ZJRuLOPHj5fefPNNsQGqn1Wbj8G91YskSeBZY/3g+uuvl0zThOM4SKVSaVnBM4XjOJBlWbipGoaBadOm5ZUw//33368Ddlkfu66bFiMy0/NfOBwW4p+qqrj88stpLUgQRMbhMQAVRYFlWXjqqafwwAMP0PxDECggARAAwqPWq0Es1/S/XTm9Kb5nwTtzxgLBs6qZWjrxXzTUgok3FhRlASYyTbdu3aCqqq8xvrKJqqowDAOO40DTNESjUXHfbdq0wdSpU+dSr8jgAmenyGxZlm8WZUFh+PDh0o8//ghd1/f4Ge8Y9Mb/488DPzLxelm4cCEURUE0GvWlvfj9clE+EonknRVgjx49hCWsZVnCytIPF2u+vrAsC7IsY82aNTQpEQThG6ZpIplM4tlnn/1u9OjRJP4RBF8fF9wNjwpeUpD7e34L9yAz9n62alX7/msyF1PwQLn9jQguuvI3p9NQ2/dGNRsWUYwxcV2enY8gMsGMGTOmNW/eHIZh+JYBmLu5cWHba+FaXV2d9hkuAnDhoakEiHA4nCakeDOyHnPMMQOpZ2QeTdN8sXgKGqtWrXqFW355+148Ht/teVPfSlCSJN9F+rKysl/ycnnLnSlUVUUikYAsy2L8t2rVCmPGjMkLK8CxY8eyZs2aiflM0zSRAdiP9YbjOMIVfee8355mI4IgGrs/8b7mh1T8NZ/XvIdX3uRXPLP8e++99/wNN9xwFNUoQXjWfIV400t6zL84gFPdQYmAHd+r/CKIdf3AB9V04hJg6m/w/BJmiMKjS5cuw7gFSiQS8cXalIs+3AWkqKgItm3jiy++wOuvv45kMims9GRZhuM4QqzzQ4Do2LEjdQwiYwwePPjS6dPrHAy40O04DsrKyg7o+ZBpKisrp65Zs8bX+KA8NmQoFILrunAcB3/605/yov2S+pBKAAAgAElEQVTvuusuALtccVOplJjj/KhjnmhElmV89dVX6NKly3oalQRBHMhehR9SeQ+neHxbntQqHA6juroalmUhEokgFApJ55133pVUgwSRTkEKgL379H3t7TYvPBu8kjG8PWP6E/v7v2a98Y8XlQDKbEG0tiR2f6h6XYEKzU2O8IeVK1f2bNeunfjdNE1fslByaxe+CQbqRMGpU6fit7/9rcTdI7lQyC1i/RIgiouL8dJLL5HqTmSMESNGSF9//TUikQhSqRQURUEqlQrsXP/xxx/7kqCCzwk8OYqu65BlGYwxlJSUYPbs2c/lcruvXr26uSzLadaeqqqK2Ip+HfYlEglIkoSpU6fSYCQIYv93xp5QRd73HMeBruvCsplbAh5yyCH46quvIBVa8F+C2A/kQr3xc4eO+PXMlk+/HrRynf3tpb9bvXp188Z+/u03pz9x7vdXjAzafawdsLwFDa/9e8BlCy52MMaQTCapMYgm55NPPlnK+5pfAeg5iqJAURSEw2GxkLz77rslANiwYQMMwxBWf163OL9EwCFDhlAHITLKlClTAOwKih6NRn2xcD0Qrr76amlvcQub+rmrKAokSRIHEqFQCNu3b0d5efkVudzmK1eu3BKPx1FcXAxFUZBMJsU8ZxiGL3Mwt7p2XRdjxoyhzThBEI2G63fcuo8f1vD9kqIoiMVicF0XlmVBURTouo4ffvgBHTp0oPmGIPaCWsg3P+SXl45wJ0YDZ30x5YYztgBo1OR19jeX/i54NSujc+fOW2l4HfzmxI8DLO81UqmUTTVPNDW9e/cGUBcPjweE5zGpMomiKGLhyDe/06ZNE3+fNWuW3a1bNxVIt0r0M/5ZNBrF//3f/7FbbrmFFqxERhgzZozUo0cPNmjQIJFhtzH9mzGGRYsW3divX7/H/CzvqlWrcOKJJ6KkpMSX6/FDCdM0oSgKysvL0b1795xu8/PPPx+xWAxA3WEGT0DEExL5uYF/4oknaBASBHFQeyAe+48f2njnb752e+aZZ3DDDTfQWorYjbfffpvxw0XTNIU1fKbQdV3EAE8mk7jooosC1S/VQu8QK/surjxxcd8lQSrT+PO342cj/77goiuvOWNvn7MeiTEgWPolY4ByU4Im3yZ86GUabvXEGEOfPn1opU40KTNmzJjWoUMHpFIpRKNRqKoK27Z924R6rV1M08SIESPE4Bo3bpx2yy23sGg0KsYBdyPxIwMqv9b555+PW265hToLkTHOOeccqbq6mpWXlwtLin1ZgTHGsHbt2t8A8FUA/Oabbz48+eST+/h1PS4A8nig0WgUoVAI06ZNY8OHD8+59cwTTzzBYrEYdF0XcxnfINu2jXA47IslNk+yQhtygiAOBh4D0OulUVNTg+LiYoRCIaxduxb/+9//7r3hhhvuotoiGmLAgAFpfSjTnnd8H2+aJoLojS4Xeofo1r370rfbvPhM0Mo1oubG09euXRvb22cUOXiho5SbKO5fLuG6roh7BAADBw68iWqFaEr69es3zHEcRKNRxONxXx68aXOSokBVVUiShE8++WS3v7/++uvic17hw48ycsvE4447DvPnz3+QeguRSb788kvR7xoj/kiShK+//rqL3+UcOXJkXz9c8BVFgW3biEQi4lkYiURQU1MDRVFwxhln5GQ7X3HFFUgmk9A0TYiZuq7DdV2xEfErDMP69ZT3gyCIA4evxbyHtIZhoLS0FLIs47nnnkOnTp2kM888k8Q/Yo+Ypin6EBflMvnDM1SHQqFAhlyRqUsA5w4dfnUjPW59pf28bolcqkdK+pF7eOP/7dzwUUICokkpLy8XiQeKi4vFw9GPDT7f8HJLu/feey9e/zNXXXWVxN2E+SLTD+s/ACJ4NWMMJ5xwws3UW4hM0qNHD2n79u0IhUJpyZ/2hCRJIm6g30yePDnj16gvhPLkI6WlpeLfu+++O+eeibquIxaLQVEUGIYBoE7wk2VZbER8TAJzKo08giD2Fx77j6/fOIZhIB6PY/LkyZAkSbrqqqto70nsk1gsBtu2YVmWL0kINU2DLMu+JTXbX0gA5BUxKhnICeS2cw/b4+JzRZ8PTgtKOSnpx8Fh27YQRPhDz283YL+SHhCFw9NPP824mMaFhMZaHzXVApJb/7muizvvvLPBoGLff/+9GG9+jj3unqdpGpo1a0Ydhsg4r776KmzbFnE4+fMHqBOF+GvGGGzbFmKY31x//fXSjh07hFDJF+48c29T4BX6ZVkWc4X3vUsuuSSn2nfq1KnskEMOEe0YDocB7LL44/fXFBYJ9Tc2PBMnUHf4UlVVhW7dun1Ao44g8g+vKOc4DkzThG3bwoPCNE3x2vtZLuilUqkGs/t6Dyfi8TgURYEsy6itrQUALFq0CM2bN5euvfZaEv6I/UJVVd+t8VRVDeT+mgRAb2UE0IJt/IBavPHSCzMb+lv3Hj0XsgCcTc875pUnKOlHbpIN0YMoHLgLnXdBxx+E3lgumezfPEPc3XffvcfPvfHGG1sVRRELVr6gzTS8XkzThKqqeOedd8gCl8go1113ncSFLq9Lleu60DRNCEWMMciyLASkbLBixQoxHvnCnVv1+uGib1kWjj/+eDz00ENbcqV9zznnHN/mV1VVhejHDzNkWYZhGJBlGdOmTdNpxBFEfmJZllgn8Tij/NnCM6t7E6rZto1UKgXHcYR1uXcPwmOWapomBMSSkhIkk0msXr0aU6ZMgSRJ0sCBA2mzQhAHCQmA9VjWa8HQoJVpyLZrz//8s8+ObOhv2Y+5J+EX5w/5PfWcHJ0APBtAPzYMROFw++23s6OPPjrNYsebXMMvF2BZlqHrOsaPH7/HufLmm29usXHjxjQLID8Eca8bgm3bOOmkk6jjEBnnvvvuE9YU/DngFbz5Bk2WZRQVFWWtnKWlpQO5yMTFcj6m/RifmqZBURT079+/eS6064QJExjP/OvX85wLxfy199pjx46N0mgjiPwkFApBURRxgOS6LmzbFj+6rsOyLGG5raoqotEoNE2DZVlIpVJp7r2RSEQ8i1zXha7rWLhwIVasWPHbLl26SNdffz0JfwTRVPt/qoJ0KnufPGPGoU/OClq5Or/X67s9/a2q/5IOWetAAXWdJhqH192YBECiKbn00kvhui5M0xRWRI7jpInOfvRvAHjvvff2+dnp06enxevwIw4gFzVCoZA4PZ80aRJZARIZ5c4775QsyxKbN265BUC4cPGxU1xcnLVydu/e/R1d19My93F3Gr9cagzDQIcOHXKiXQcPHgzTNJFMJn25nvfg0BvXVZZlzJw5kwYaQRTIPoLP0TzpmqqqiEQi0DRNHKRwF2EAIkERt+iWJEkcQn3yySd46623XikrK5NOO+00qW/fvk9RLRNE00I7/gYYNvLywaP/aQauXO7EWIMbw64VFeuykcRk3cAVRdRb8mQi8GQCJoim4MgjjxRZNXlGLI5fcQAlScJPP/2EIUOG7HOC5KfL2RgH3nhdQ4YMoc5DZJwHH3xQzP1AneDNN2CqqopxkE0BEAAmTZqEVColysljF/pxYOW6LsLhMIqKijBr1qzAPyA7duwInnHdD4GUb/q9boAAsGXLFlxwwQV0OEwQeYxt28JanMMTqvH5J5FIQNfrIgGEQqG0v/NMrD/++CNmzJiBd95550lJkqRevXpJgwcPvpRqmCAy+PymKmiYCfOdAC5eGD7+cPFFDTakz5Z4yyoXDO3YsWOSekpu4w3Oa5omVQjRJDzzzDOsIddBLiz4mRXr888/b/Rn169fL9xy/ciSyV1hJEkS8QcPO+wwPPLIIxupFxEZXeNMmCDxscEzUfMxCuyy0M22APjHP/5R4s8m/q9fcQm9hwGnn356oNvz7bffZqZpirhafsxf9Q9LuBC4cuVKGmAEkecoipIW0oWPf2+IhlgshkgkItZUPL7fli1bMHnyZKiqKrVs2VIaOnSoNGjQoN9RrRKEP5AAuLfKCWBSkMr/DHh10fv/urahvy0/6f2z/SiDhSOdypNOnkE9JPfxntz55TZE5D8XXHBBmtjH47/whSIP5p9pDMPAunXrqhr7+VdeeUXni1i/kuJwV0xvwOxTTjmlJfUiItN8/vnn//rpp5+ES63XDZi/zmYMQE5VVZVwMfUzSQ93keaxS//6178G1gqwf//+CIVCSCQSvl2TtwUXAWzbhmmaUBTlIRpdBFEYMMbSQknwAwgeu3XHjh1YunQpHn/8cSxevHiUpmlSy5YtpRtvvJGshAkiS5AAuA9mt3ru5aCVqc/KQZMaer9nZeW8+ce+9mimrx8etV6lnpF/D3DDMKgiiINmzJgxrLy8XMR2UVVVuJjzBaFfLueMMVxzzTUnNPbzY8eOjW7atAmJRMIXF2XXdcGD9juOI0SNE088EYsXL/4N9SaiIebOndskg+fiiy8+ffPmzSLmHxe9dV0Xr7OZBZhTU1PzpGEYiEQiYh7xI0YnT3AhSRIikQjOOuusQPaH0aNHs0gkAtd1UVRUBF3XfWm3+sljFEWBrusYMGDAn2mUEkR+U/+w1DAM1NTUYP369fj3v/+NO++8E+FwWCovL5cqKyulP/3pT1K/fv0eo5ojiOxDAuA+GDziopGvxh5eHLiJ99FogxuAgecN/sOMwyZnLIlJEK0i8wHDMCDLsrDE4Mk5/ILH4SCIg+WXv/xlg1mleYBo7g7SVBt4LlxbliWsDfnvDz/88H5/34cffrhbVkv+ffy9eDzeZOPOa0XDX+/MojeZehNRnxkzZkw77bTTsHz58iZRozp16rTbM93r1nX88cdn/Z4HDRr0u+rqaiFUKorii4srD0zPr9W2bVvMmDFjWtD6xDnnnLPbXOsXXJDlc+S8efPm0SgliEbs43aOGx4SpX5yI6/FsxeebXdvewTv3xr6nDcWHw8FxK+/Jytrr5svAGzevBkLFy7EQw89hA8//PCWSCQilZWVSe3bt5dOPfVUacKECbRfJIiAQgJgI7j0mt/1C1qZJAl47enJDaa2HHbxZYMzcc1lvRYMpd7gR9v6+8zkm4WmEjWIwqZVq1a+WfglEgmEw2ERW0ZVVREnLJFI4I477tjvwXTiiScWRaNRmKYpREXvIlqSJBHMuinGuncDzcdiOBwOhPBCBI8zzjhjmCRJmD9//tSm+s5x48aJfs7jyHHRy69su/vi+eefh2EYkCQJtm1D0zRfNug8q6Vt2wiFQjj66KOHBa1PnHzyyQB2HYaEQiFfLPpt2xZZmQFg06ZNGDFixNk0Sgli3/Bxo6qqOGjgwh4/7HBdF5ZlwTAMmKYJx3EgyzJUVW30XmFPn/Nmf+frD752s20bNTU12LBhA5YtW4YFCxZg2rRpGDduHKSdtG7dWjrttNOkP//5z9Jpp532ILUoQeQO5MrZWJFkVEpyJ0YDFf9lROKm/hftacK/MSmzx2JNtnJ/u82Lz5zbm+L+Zax/7XwIe92w+ALAL6qrq6khiIPixRdfZMXFxb5ZoBQVFYlN6M7YU9A0DaZpYtasAzOE7tixY7Kqqgrt2rUTAav52OSWjaFQSFjqHiz8e/lrfj8tW7bExIkT2f9n77zDo6ry//8+986dlkYTUBBFXWkSeicUFQVEATcizYLiiigG2VUha8EVBH+rQljL7uoqogIiUgTRSK8aEEMNSPGrNGlC2vR77/n9Ec71ToiQZGbOzCTn9Tx5CJDce+b08z6fkpGRIW7RBQCAvLw8qigKrFYrEhMTU8L13KlTp5IpU6ZQNoYAGAIbD0u78jBp0iTy2GOPUbvdbowRHgd0NiZZHMDWrVvHVJ/48ccf6fnz55GSkgKHwwFKKQoKClCjRo3IHyAutAGzyPzmm2/EII0SR48epUCJgO/xeFC/fv2Yv9RVVRXMdf2TTz7BpEmTqtVapygKfvvtN9SsWROBQMDwjjCHODAn2mDnArPVntlbwXx+KO1JxP7P/G9nz57F2bNn8fPPP+Pw4cMIBAInn3rqqSvFaBIIqj5CAKyISBODIqCe5aBlueUSQuji+Z98NvDU6HtCfwvBgLvTR4seEDnM7n/RoqioSDSEICRGjhwJACgoKEBKSgqXd/r9fjidTmNzyzbEDzzwQKUPE7m5uZubN2/erfRmnVlJsZvycAgMzDWabdI1TTMEjocffhgZGRmiYwmwYsUKeuONNxp95eqrrw7r8wkh5MyZM7ROnTqGa5iiKJBlGStWrHgnFjI07tq1C2lpaVzEPzbmA4FAUGZwSilmz55NH3zwwZgQKxo3bmxYA3k8HlgsFtSoUYNLnERzghSfz4cxY8aIy4ooUb9+/aB1ye12o1atWjFdZrMYFetljQQulwu1a9cGAOzZs2dghw4dgm4t8/Ly6pw7d+7un3/++f4ff/yxE6XUwmIqS5KElJQUw1rP5/PB5/Ox5Bu0Ro0ahbVr1z5Wt27dnQ0bNvywXbt2Qp0XCAQGQgCsIKubLJx+y4/pE2OpTHqWk0oZ7os2XoOHjhgSmOGgcojn1LKeLQj/QcN8m8e1/1wQIYQLsCAUli1b9gH7nh2YI00gEDCSaDCBTtM0bN26NaTnjhw5snu/fv1oSkqKcZBmroCapgXdqocCcyssy7oQKHEFfv755+nLL78s5uBqzCuvvEL79Olj9EWPxxOR7LwLFizAQw89BLvdbohHkiRh8+bNYwBEXQAsKCiYHQgEHuTh/mtskk1iI8tsPmjQoJjoF5mZmdTtdiMlJQVerxcOhyNIXIk0LG6xqqpYt26dGKhRhIlCbrcbTqcTiqJwDydTGTweD5xOJ2rWrFnt2iwhIcGw1PN6vdeW/v/mzZufBfDfC18CgUAQvjVDVEHF6NP3jkkLkrPWxlapKL747NOPyvof5anQknaIpB98KEsw4bV5Y2KGsAAUhEJiYuKDQMmttsPhuCiAdCRQFMWwVmLWKIFAAN27dw958Kxfvx6yLMNqtSIQCAS56YZrbJrFBSZemvH7/Rg6dKjoXNWYf/zjH3TcuHFwuVzGOkEICbsFIAA8/vjjZO/evXC73Ya1GwBce+21MVEXd95556jjx48b4yXSsKD3gUDAcIVmljevvfZa1L1BBg8ejJSUFLjdbjDXaBYDlZdHAXM97N+/v9grRhFCCAoLC+F0OqHruiFWx/IXUOL9oqpqtbyAZu68Fyz/baIXCwQCXggBsBIMHfWXm2OtTANOPDjyDxu5kiLet22/fki0Nh+YFVDpDR2vjSMgYgAKQqNXr14Afrdm5ZU8gGXQ1jQNiqLgzJkzYXlus2bNGrpcLgAwkgCYM+SFY3yycrPnsTpjoqbT6UTTpk2xf/9+p+hh1Y+VK1f+c/z48UhMTERKSoohEpst9MJN+/btSVFRERRFMfpjixYtYqZOvvzyS25zC7OqkiTpoqQjLNxBNGnfvj0AwOl0wuPxGGX1er0Rd/8FAK/XC0IIjhw5IgZrlCGEIDk52fg+2iFlyoPf74fNZjPEyuoIuwSklAoBUCAQcEMIgJWtuBi0jAvM+OP4hDqtWFNT2Gm3tJ4fiJbmgyzLQck/uPblC27HtWvXPilaQlAZFi1aRIES4YqJ2TzidDEXYEqpIVgsX748LM9u1qzZ8by8PHi9XlgsFiiKYmTIM2fuDcdnAGBkMTYLDyz5QEFBgUv0surFiy++SDt37vy3pKQkI8aTOeNq/fr1I/bu+vXrk+PHj8ecBSAAPPHEE4RSymV+YWNclmUjGycjJSUFH3zwQW606iE7O5tqmmZY/LH5iVd8RACw2+2QJAn//a/wUIyJM0AgAL/fb8TCNSd+iMUvJlKzC6/qBksw5PV6kZ+f31r0YIFAwO3sL6qg8uTdvPXqWCqPLAHLFy18r6z/s4x3kczs8sfYkDPOi77BkaSkJON7ngIgc3ECgBtuuOFr0RKCytCvXz/4fD7joMw2t5GGZfz1er0ASpKPPPHEE2F78YYNG85KkmQcqphrFRMFwnWINh8IzM9mMQeZpY+g4jDLzXiIh8V45ZVX6F//+lckJiYa/cBmsxl9HkBEYgCaadiwIfF4PPD5fKhTp05s7b3y8oJEePYnE8PCtY7KsmxckLH4nOZx27Jly6gd2tu1a2eEKGB9pKw5JdSxY94rmOuaceDAAUyfPl24/8YAFouFW/zdsJxZTHFGoxH/OiYO4ZIEu92OOnXqbBA9WCAQcJt7RBVUnptatjxGwcdq+1i+hvLsZ/sfve/hbVtzbi/r/6avOFGuTZqI+xe9jVA0Nh+MRo0avS1aQlBRtm3bdpeiKLDZbKCUwufzlXlQjARutxtWqxUOhwPnzp3DkiVLwvr8v/3tb1ewLL1ut9sYLyzmYKShlMJqtcLtduPDDz+kordVDCYMx9PhcsGCBfTpp58OuhSKFsnJyaSgoACFhYUxVUenTp16TVVVo13Z+snEUXPMzkjSunVr7N2790+8P//kyZM9PCz92AUEm4s0TTOEdE3T4HK58M03IrmoQCAQCATxhBAAQ0TOyCcUkdPLvm3z9UNShoc0etFP5PEeImV4yLIrP/jkUr/T7ttef2jJdeD23EuaDZAn3aJPRAGebjsXtfmFDX2rVq22iZYQVJT8/Pylsiwb8aAcDgdUVeUiarMDv6ZpqFGjBh566KGwT8ZLly6FLMuGqzGzdORhqWvOBnzbbbeJzlZB7Ha7EV/K7XbHfHl37NhBBw4cCIvFYgjpl2LhwoVfRLpM9erVI7FWd3369Hn62LFjxhiklBpWuuzvPOYfWZZx5MiRA7w//913321PSUnh8i42BzHrbia6UkqRkJCAcePGiQtjgUAgEAjiCCH2hGMTmOEO+wYoM1uBlOEh3XpcHIdv4JChI79u9Mk7l/r9P4oH2LRpUzeFvcz/20keOUIIEVYmUYC5dwHg7qoWT65xgtiDJf8wu9/xsrhSFMXIVvrjjz9G5B3p6emEiTEsyzC3BVqSoOs6FEVB7dq18fzzzwdEjys/fr/fsFpyOBwxW87//e9/lFJKmzZtCqvVCk3TgtaEPyIvL+9OHuVr1KhRzC0SK1asMJLoaJoWJMjzcoMMBALo0aMH189NKSXNmjXjt781CansexYK4dtvvxWTjEAgEAgEcYYQAMPE5SzrKsr0FYWX3HD3H3z32NU3Lpz+h5s2CdCynGWKeXLGeUm7yDuGoM2Ts64RLRkdrFZr0AGGVxzAeIuNJYgt5s+fT1k/Sk5Ohq7r8Hq9QdltI91/WfIMSZIiFhRt7969RrZhp9MZZAkTSViQcJbheMCAARbR6yo2r7LkDbE4z02dOpX+9NNPdMiQIaCUwmazweVyQZbloPhrf0Tjxo2rbds+8cQTRNM0yLJsxEg0Zx/nMf8oigKn04l//vOf3C5O586dq/MKGRIIBIxLCEqpkQSJEAKfz4euXbuKzYNAIBAIBHGGEADDRNOmTd17e+c0CcezPra9Wi5XzD797ph0ycMjKNau+mZimQfaXltase+PFciQImDFKKjYQTUasMOS2XpLICgvvXv3DupH5sx+vAQXTdOwb98+NG3aNGJ+itnZ2Ya7McvOy+vzsYyJQEnMse3btwtf4HJizpDq8XhiplxTpkyhLpeLPv3007jmmmuQmJhoiL0suUd5LE2vu+66at2+mzZtCrL+M3/PIwYgE2mfeOIJbp/5lltuMT5rxA8IpksO9j42H+3cuVNMMAKBQCAQxCFCAAwjLVNTD2Q3nhtyIoUrG12/Olxl6rl34LSy/r11mza7NrZcNmHS2hpo9EKxEP+ijKIooJRyzQBs3uC7XC7RCIIKsWLFincSEhJgsVggyzKKi4tBCIGiKNws5CRJQiAQwLx589RIviczM5MUFxcbwgIvCxxVVaEoCiRJgtfrhdVqRWFhYbbofeWDudNqmgaHw4FDhw7RNWvW0I8++oi+8sordM6cOTm8yvLuu+/u2bFjB/X7/fSvf/0rADDLVWiaBlVVYbFYkJ+fDwDlclm+4oorqnsTP1NcXGyIUyxhD8AnsZbFYoGu67Db7ZgyZUpRpN/32muvnalXrx4IIVzmV3OWVvOFh6qqyM3NPSBmGIFAIBAI4g8h/EQAPcsRkoqztO57nw0eNmJIuN610/LIkTaPC/feWObgwYP0uuuuC9pk83RZO3HiBBo0aCDmA0G52bFjB23ZsuVFB1EmuDBxI5IwF0DCYbBs2LCBdu7cGYqiGGJNpGHvYW53brcbTqeTy+eNUHtRnhl5Wb0BJZccCQkJRp/Rdd0QWAkh8Pv9OH36NI4cOYLjx48jPz8f//d//4eGDRv+X/Pmzd/v1avX1IrEyM3JyRmyYcOGT3v16oWWLVsGxfQz9x9WHnb5QwhBcXExEhMTL/uOn3/+GY0bN67W8/bZs2dprVq1gtbLQCBgWOzy6GOapiE3NxcdO3aMaFucPHmS1qtXj9sFi7ku2QUlIQQFBQWoWbNmXPc7yvm21TwXXbhEIuH+POZ3xFlb4IMPPsDDDz9MotkfolF/bCxv2rQpIy0tbZbYWVZfeO+Pqlv/4z3nl67ncM/5oSLiCUUAKcNDQhEB7zz9VHo4y9NKfbfRjtzc1NZt2uwSrRObyLIcFF+n9IYx4n1WEsbAggrOK61a/WFfDpf1Dcu6a7fbAQAejyfIMooQgs8//5zL5+3RowdhGwjzYTiiC/QFkYgQgkAgAKfTCQB477336OjRo4VgfxnM7cNca81WTebQC1arFQ0bNkTDhg1LP6YxgJcBvFzR/WPHjh0v2a7m8pjLWh7xDwA3kSuWWbp0KUaNGmVYAbKYgLzXz7Zt2yIvL69O8+bNz0bqPfXq1TPmQdafI3xgMsQ/JlQHAgG8++67YnIRCAQCgSBOEaf+CBFKPEAJRYRSetmd657duxuW95mpG7qKgC0xDDsEmg+YPG8iecQTElQdPv74Y8ojbiQhxHDjpJTC4XAYmV11Xcf58+eRnp7ObaBs2bLFsIjhMT5ZHFR2cKUAACAASURBVDNmqQYAbrcb6enpohMKkJKSUu3r4OGHHyYej8cQp1RV5RL/j62bLA6gLMtwu91nIvWuuXPnUkopvF4vF/EPKLH+8/v9IIQYoqrP58MzzzwjLh8EAoFAIIhThAAYIVqmph5Y23TRS5XeWGY5L7uDbb6m49GKPHPiHTdQ0TKxidmSIxpWyiIJiKAi3Hrrrdzi4FFKDRdJlg2XvZt3IPo1a9YYFjG8RAZKqRFrDCgRBVNSUjBp0iQxn1dzymspWNX59ttvjVh8FouFW9bnC+EHIEkSfD4fbrrppoi9q3fv3iCEGNbQRUURDzkIq9UKq9UKj8djCIEi+YdAIKgMalZtqs5MEvsWgSAGEAJgBLnl9n6Td/XY0kqrxDlRIoA6s2w34o3r1o6pjIvxK7cdh57lFJNvDFLalYu3COjz+UQjCMrFtm3b7rLb7VwEQF3Xg1ziPR6PkS1VkiRcccUVqTw/+/PPP098Ph8IIVysZs0ZgJmlUUJCAjweD5555hnRGas5wnK7hPz8/GXmrMlWq5WLQK9pmhE+w2KxwO/348033wz74v3cc8/R+vXrG3/3+/1ISkriUrderxc2mw1WqxWEEKSlpQnrP4FAUC4Wf/rJgsBMJ9WzHBTUDUCDJs6hAkHUEQJghGndps0u5SkPkTJKvijKL9xJpCTJhzrTQbWsmvqCj97/auLdV9JuO/u/U/kSUezIzU0VLRNbsFhU0Qrg7PV6RSMIyttXl/I6fAK/Z/rVdR2JiYnQdR2apuHgwYNITU3dzfvzL1myBH6/n2v8NZ/PZ4gahBDYbDYkJSVh2bJlH4geWX05efKkqAQA6enpd50/fx4ejwc+n49rggyWxVmWZSQlJWHQoEFhf8/YsWON71mSJR5QSiFJkjEHr1u3TnQ2gUBwSXbk5qZunaBQPctBB54cfY98IXeWRACJUBBQBGY4hAgoEEQRIQByRs44J0kZHvJ1o0/KLeJJBCDwkvRzj/d9pWd+yGUQ8QBjsF9ccHE0W/zwgsX1EQjKQ4sWLYwDYcQXKEmCruuwWCxG4gSfzwebzYbs7Oyj0fj8Q4cOJbxc5tlcwEQGJmywenE6nQ+KHll9OXz4sKiECyxfvhwOhwM2m42bCGi3241wAGwNrVu3LjIzM8N6uE1OTjbioMqyDIfDwe3Szmq1IhAIQJZlJCYmDhQ9TSAQlMU79zahWpaTpm7ourP9NZbL7G1EfQkE0UQIgFGi/+C7x0oZHnLsXHTer85MELcvsTQQL2QAjhYiBqCgPLzxxhuUCX+8YgCqqmoI436/H7IsIz8/H+PGjWsUrXr49ddfuVnhsKQjZsHV6/VCURS0bdtWdMpqzMGDB0UlXOCRRx4hhYWFxpjhhdvthqIohkWwLMsYMmRI2J7/0UcfUZvNZlwSMtdmm80W8c/m8/mM92mahg4dOnwheppAEBs8m5ZCC/7ZQP/kP29vqkhSyHCxIzc39eCM4b8FZiZQPctBH+16BATlO8eUhLkSrsACQdR0B1EF0aXRSyWuwdwbnugiDkMMQQiJqgAoYkkJykP//v1ht9u5BdkHSmJrsYOo1WqFoihYunRpVOth4cKFXCyMKKWGsGC32w2h3m63w+fzITk5GcuWLRPzeDVFuAAHk5ubC7/fj8TERC6XWpRSI3wHW78lScL111+PvXv3/ikc7xg2bJiR2ITNOV6vl8v8a7fbjXf+5z//ER1MIIghpvzZjyTrOTLM+9duzdd0PKpnOSj7CsxIpHP+/a+cUEM+5eXl1VGzauuBGYlUnZlAtZkJVJvppFqWk7bc0HXn9dLiWjKpXLxViVAhAgoEUUIIgBFg7epvJj47EHRiv7p0xZJFb5erITI8BOCrA5b3pkbAqT2iYBPP3I55WkwI4pN58+atufHGG41MvLysRnVdh81mM5Jg6LqOBx98MKoOJJmZmSQ/vyQcAxPPdV0PGkfhEPRLzwlMbABKLIAkSUKXLl1E56yisD7PcLlcRt/SdR19+vS5V9TS73z33XcFVquVW4xOVVWDLP+YW67D4cCJEycOhGGeobIsQ9d1Yz5h2Y55XhieOXMG48aNE057MYymaSCEBGWLj3XYHsJcbkH5UcZ7yOqj1jL/T5Y0jPQ90zF1Q9edZmHQ/KXOdFJ1ppMGZjppwf9LoHmZCdT7Rsm/aVkOqmU5aNOV7c5IcBNZ0iARHYToIBfi+IVjQpAIxTujG4nDqEDAGSEARoCeewZOm3azA6/0LULfX0Y8pmc56Jy3s7ZetjEy3CSn/cphPMtamWzCgqoDExiICMghuAwdOnTozYLda5rGxQWN9U232w2r1QqPx4NPP/00JupjyZIl8Hg8kGXZsM5hbtHmA3tE529dR82aNfHiiy+KebwKwqxfmbCckJAAv98PQgiKi4vRqVOnBaKWfmfixIk1iouLjYy1ET+AKwp8Pp8h/LH4g7Iso1u3biE/f8CAAcYcyMR/Nv/yWrPPnTuHb7/9VnSuGIeJaSxhFq9EOKGOH7/fb4T2EFScPq8VEI1Wbi6QCIVEKGRCkWTT0bSeDqtMLyTq4GeS8mjLM1i+eOF7ojUFAn4IATDMaDPLFtRGBiZ2YLcul/r9Lt26z5cyPGTSUhu3MgsRsHrDMv0JBJeiUaNGRj/hHTPS6XRC13U4HA6MGDEiJtTqhx56iDgcjuD53+RKz2NMSZIEQgjuv/9+0UGrKIqiwGKxGNaALFzEiRMnROWUwezZs+HxeLhZtcuyHGRtyIQ5h8OBDz/8MKS9VefOnYPmFJYFnKe1VHJyMtLT08UNYYzjcDjg8Xhgs9lAKY2LsC6EECiKAqvVitJrqaACa8R4d9yPz35H7nt4965dN4rWFAj4UKVO/bk/bO+4fNHC9/Qsp2HiPLH/lVSbWfL9swPqXfh3J108/5MF2cuXzti/f78zXO/Xsxy0PJeyepaDzn5zRu6lfubVNflcXYK1rJph3VFu/35bz5VffTlt9lszcvUsJ504sAk98o8aNDDDSY+8lELVC0Fjly9a+N62nJzbxVCMHuY4YwJBWbzwwgtUURSoqmoIcTyyULLsk0BJQPojR47EVL2cOHECPp8PiqIYVn/skM4LVVVx3XXXYenSpZ+Inlq1YJmfWX9i7vdutxuHDh0SFVQG48aNI7quc1nT/H4/LBYLZFmGqqpBMQE9Hg9uueWWSj/7448/pkzsY38yEZiXtZTH48H58+dFp4oD/H4/HA6HYU3HksbE8pdZqGdZtAWVoyLeYxQApbGlGRIAzdd2+lG0pEDAb8zFNc92Ap02PDw3R9+2/fqhbmk9P6jM7855Z9bWkf5nO1T09y6XAETNqqtLKOLSThtbLpvQ8+ZbZ1T093bk5qbuytn4Xt7yVztMvfVcpdO7T8oG0h756H8DBqePrm4D0ePxUKvValgNsdh8vA6Z33//PTp37ixu+QVlbxgppR6PB3a73eiXvPuoruvIzs6efeedd46KlXp5/vnn6XPPPRcUm48d2HlY1jKXbFVVsXv3brRt2zamx7CmaVRYG1fucM/6GBMFn3vuObzyyitizi7rMJyTQ1u2bMnNqoiJf0x0VFXVcN9++eWXMXXq1Aq3k67r1DyHsPmWiScsM3qE9yX44YcfHu3evft/q+Kaxvl9Rnvpug5ZlsPaeAUFBZQJgGfPnkWtWrViPq4ec213uVyYN29eVONM0hK4h8Nh7tqbNm3KSEtLmxXKsxa+OW333do/birX5wWJyTjwOiWwVAGLxnjZH4Wz/4k5/9L1HO45P+T5Nx4bcf/+/U7HN//4+Wq6+IpIPP9j26vb7h/zZMcKNW4IbrRL67732eBhI4b80f8v+PD9r9LzH+/Lo24rkpF4R25uauqGrjsj1TW3dVx1d6cuXRdXh4k/2gLgnj170Lp1a3GYFFzE4sWLF95xxx1/VhQFHo8HVqsVsizD5/NxiQPI3nn69GlcddVVMddHi4uLKXNjYmP3wmLPRXiwWCyGNUVCQoIQAKvWhtVI0sTEJU3TWAxOMV//AXPmzMkZPnx4Rx5jsHS8NV3XDUtNSilOnz6N+vXrV6itXnjhBTp58mRjD8AsoSVJgqqqkGWZy/7gl19+wbXXXkuq6NiqUgKgIPT+EO8CIACoWbWoBE9ct4UOJ7Vk/FbtNgpCAKxac37peo61OT+uBtjGdWvH6FkOemN2G1ekxD8AGOl71ojXt3Hd2jGXbdgQY+gNPD36nks9Y8gDD/Vb9afPpvOo4yOT7Zf9LPPf+88aPctBIyf+AQBFh623LCrJVJVAw+mqLQiGEMItoYMg/mjUqNGfgRIhzuFwGMIWT7dxWZaxaNGimKyf1atXGwJAIBDgGqOLiX/snXPnzhXxXKsQzOWTud+zZDNz5swRlXMJ7r///k483hMIBIKS/TDrTLbhp5SiXr162LlzZ4W8QwYMGGDEcGPPNM8pzBIw0nz++eeiMwkEcYQl4xzR9PjWliW4yfZt23qK1hQIIjnO4gRtppN229n/Hd7v7baz/zt6loOuX7PqqbL+P5wJNC71rNv6D5ikIyniO76GNQnmv/efNX94IJnpoENc43tz7aREx43ZbVxqVl1dDNnwwg4RFotFVIagTG644QZYLBY4HA7jUFpcXMwlyQWLa6SqKp544omY3NU2btw4lY0fdkjXdZ2LCGh2C0xMTMTdd98tOmwVggl/bI4mhMDr9eKRRx4R1kOX4b333uPSPn6/3xjvzHqIEAJJkoz1NT8/f2tFntu8eXND3GfWxKztzVaBkeavf/2r6GcCQbytG0+5SXkOizol0CkBpQQ0Cg6Blypjq80914mWFAgiR8wLgBNvBi1JrhFdw4a03Xe+oWc5qDlhRV5eXp1wv+dSIqAl47Q0L+GNTZH+rKUFPkopYRaREolmZy0iepaD7sjNTa1qA9Htdgfd6vPK4MayxYkMbIKyeOONN2hycnKQsKWqKhITE8PWR8tKJsIOt2w8fPnllzFbR6mpqbtZchKWoMFms3ERAAkhUFXVEAVsNhteffVVYQUYR6iqCrfb/YdjonQW2F27dolKKwdjxowhhYWFF62pLF5fuCzoHA4HJEkyvoDfL9SYtXTbtm3L/bxPP/2UsvWYZUllz7Lb7cYzzXFHwzkHFxUVQdd1zJ49W3QigSBOkTM8lz01S4RCIhSE0KjEAySXKZsWRgMbgUBQaozFasF279p1o57loK8MjC1hot13vb7Oy8urs2f37oZNV7Y7E4l3TBzQ5A8nvRGjH0s7lh/5ZtOznBQoyaysZSXElOVd6oauO1d/89XkqjQQ2eGEHeR5xYGQJAmyLIf1MCGoOjz66KMAAHaQVhQFFovFiEMWDszJDdgzmeWLzWZDUVERBg0aFNOWKB988IGR9MPpdELTNG5WtUwEZHU3aNAg0XHjaN63WCxwOksiXLBMmIQQ+P3+krX4gpDscrlw6NAhkaypAuTn5xtCX+nYXrzc9AOBAOx2O7788styHWYHDBgASZIuEoUj1f9YTEGgRAhMSkqCJEkYNWqU6GcCQRyzrumil+K5/ARA3ou1hQgoEETi/B+LhVq6cP7HLWI4HXjTle3ONF/T8Wiknv9KnyNYvfKPBa5GL7o4bMwo9CwHbbWxe45EYm/+7b3v7hf1KnQ7VNqdh2cgYkmSkJCQIGZDQfDmcd26p51OJ/x+P2rWrBkkSoRTNDYnvmHfs4D3ALB///6Yr6vJkyeTkydPwuv1Gla1vGAiEquva665BsuWLftA9ODYh4lQXq8XqqoasVglSYLVaoXb7TZc4BVFQYsWLYQoUwHeeecdykRVVVWD3Gl5XbKxS5O0tLTL/uyCBQu+Zn2AicIRPWATAovFAkVRgqyuv//+e9F5BII455bb+01eWPPtr+P5MzSt5UZVM/gQCGKBmBMA9SwHvfP4qBHVvWG++eSRFy/1/zntVw4T3bekv0TCFZs3TFjhDbOOcDgc2LVrV0vRowSM4uLi/+f3+40YZKVFapfLFfI7zEJZWZktKaX44YcfDsRDfR04cMAQcKxWKzweD5fxa64z1kbNmzd/UPTg2EdRFGiaBrvdDovFguLiYsPS9rfffoPT6URxcTGKioqwaNGitaLGKsb06dOlwsJCY35hcfnM8fp4rK9M6H3hhRcueWl51VVX3c4jtl9Z+w9KKex2O6uXgaL3CATxz5D7R/XTqBzXn6H3vrtf/KM4/AKBoHLElACoC39/g1uG/PuSWX+7dOs+/9u22aNETZVYZMb7Z2BWCrwxu0ctWLBABJcSGKSlpcFqtcJqtRr9xGq1Gpkv7XZ7yO9grmeapgUFt2fvO3r0KMaMGdMkHuqLUvp8IBAwDvvhqJ/Lrpm6bsQIYxmIbTYb6tevLzpwHKBpmjEGACAxMdEQcWvXro3i4mIkJiZi1qxZGDZs2M2ixirOkiVLAPyeUVnXdW4XbpRSI3tzUlISRoy49N12u3btYLfboWkalyy/7B2qqsJqtcLlcmH//v3o0KHDF6LnCARVA2V8cdxbjqftvvMN0ZICQfiIGQFQiH/B3NZ/wKTL/Uy3tB6zNZEXt0r0HyYAsg05j80/UCL86boOTdPQokUL0ZEEAIB//etfNDk5GT6fz8g+ao4TZf57yGPXlDGXWeawDJiffvpp3NRZ7969pxw+fBjA72Icj/HLDvDmJARWqxXz5s0Ta2qMw8aQ2+0OivmmaRp8Ph+Ki4vx3XffPTB58mTh+ltJxowZQwAEJejgIc6zd3q9XsMyuEGDBpgzZ05OWT+7aNEiWjp5CI/+Ryk13I0dDgfmzp2ril4jEFSxw36GJ+7XEHWmU+xpBIJwzQmxUIilC+d/LJqichO18pSHUDEllogHcSwCMhec0gd7XgICpRQtWwoPYEEJY8eOBaU0KM4fc9dNTEyErusoLi4Oy7vMbqyUUiN5xunTp/HMM8/E1aZ15cqVkGUZfr8fPF35mGgqSRLOnTsHi8WC2267TXTkWF+zdB0+nw9OpxNOpxNutxtWqxWyLOOzzz7DlVdeSbp06TJH1FRoHDp0yLCUZfAan0xsLCgogCzLaN26dceyfq53796QZdmYZ3mVj70nEAhAkiS8/PLLiugxAkH1PlvGZPkJxcQ/Xy1OvAJBOMZT1DfAWc6IxPzL/Kok/tKCpJlrP1amb4uXBsm7eevVFf2dLW1WPBazBxwKZC5jsbAIAILMRR5ESrQ88pI1LhcH5p5EOau5THyRZRmNGzcWM6IA69ate9rlcoEQAkKIYZ2qKIphocey3YZDAAGCXYHZYZRZ08UTGRkZpLCwEAkJCRcJDhGZX8vIZFqrVi34/X7UqlULs2bNEpvlGIa5bDMRxu/34z//+Q8IIeS+++4TVn9h4oMPPghaYwOBAJfxqaqqkTU9JSUFdrsdDRo0uOjnFi5c+EWNGjUucsmNNIFAAFarFX6/H4qiYOHChaKzCARVmKVX/u+zeC7/lLSz2LY153bRkgJBiPvPaBcg3FZb61ssndT71tumX+7ntuXk3P7rkZ9HDjjx4MhYapDK3tCoMx1UipHjQuYXdtzy14+n9+l/x6Ty/Pznc+csHXTm0bvCVfzM9fUwfdHPcXV4WrRoER08eLBxOOGx+WcCgiRJ8Pl8sNlsIDzTDwtikjVr1tDu3bsHZeLl0QfN8dC8Xi8cDkdc9sWNGzfS7t27G4d/4PcYh6w+S7vsRooTJ06gQYMGMVOPmqZRXtlXy3g3vvrqK6SkpBjWzizpgSRJhuBt/gJgZHU2u72zsAlmEckcT7V0/zYL5yzsgq7r8Hg8CAQC2LRpEwYOHCjm3gjyyy+/0EaNGhlrHQ8opUa/kCTJEOznz5+PESNGGO29fft22qZNGxBCjLiPPDALoReyiVebPkg537aaLd0vZKMW4z3G+kPppFo89+CbNm3KSEtLm8XjnWpWHV2CK277n04JLOPdVW78RGN/FI3+V13m/NL1HGtzflQLEy7xb3uXdX07dOyUHcoz5ryTtXWkf2KHaNXFpGzg1RWVN8+mlBI6yxnViIALEmauHTr60ZAClWtZTkoQerfY32f7Fc2bNz8bLxPTvHnz6NChQ42NOIvNw2Mz4vV6YbfbQSnF9u3bB4oA4NWb8+fP05SUFC59T9d144DMxBWfz4e9e/eiXbt2cbnB27dvX4MbbrjhGItjaI6zyeIp8sLtduOrr75alp6efld13OCyzS0TO6xWK7nUGvrdd9/dd/To0eG5ubm3JyUloWbNmrjyyivRoEEDOJ1OqKoKQgiSk5NRo0YNuN1u2O12JCQkBF3aeDwe+P1+nDt3Dh6PB/n5+Th9+jSOHz+OU6dOqXfdddefxTzLl/fff5+OGjXKaB9FUQyBPoL9HaqqGpcp7KLD5XIhJSWFlD6YuFwuJCQkAAA3IbCoqAiEEBw8eBBt27YVAmDk3icEwBjvD9VFAASAZ9Pr0Glprrhus6oQ1zCa+6No9r/qMOeXrudYm/Mt0Xrxt5s3DcX3fUJ6xsIab3095IGH+gGdQi7P/Y9ldLwf0YsjN3TKpk6vrmhX6d8nhNCJPYBX/uzgXva1TRe9dMvt/SYDj4b8LDmj5FZHz3JShCAEXsgMHDeLQ35+viH8ma1MeAQDN7sdLVu2bGk81Vs8MmnSJHrNNdcciMXstm+99RatUaNGkPVaRDdwJutToMQFkhCCc+fOvRav7dusWbPje/bsQZMmTQyLL3bwZ2Oa10HD6XSiXr16d1bXsSZJEiilQYkYLrWGAphz4SuivPzyy2Ii5MxDDz1ERowYQa1WKywWC5f5rfR6zvpicnIypkyZUvTcc88lvfXWW5TNewkJCcbP8RD/NE1DUlISAODkyZP/Fr1EIKgevLrwLJmWFt/JEz/635s59z38RCfRmgJBJfbH0Xpxp+/7zKv0AXp1TUgZHlIi/oW5QjI8ZHNr/jH12rRttzXUZ4x8a/fV3DtQhoeUiH/hfq6bbOu4+u5QnrF54/pR8TIQT548acQ/Mx1II/5eVVXhcDiMw3KjRo3ErBhhMjMzkZ+ff2Msli09PR1AiYUMD8xulD6fD4qiwGq1ok+fPk/Hcxt/8cUXRiZjoERkZ2IAcyktK35fJGjfvj3y8vLqVMexxrIxOxwORMv1WBA77Nmzx5hneMDiDrI5QNd1I3bqwIEDEwHggQcegNVqDQoPwMsKye/3AwCOHj2K/v37PyZ6iEBQfdjeeV1foCRWezwyrOjZjl98/ulHoiUFgooTlR1xaFZ2BK9+cSKiu6O0nr3/zdO0OLfrhl7heM5NLVseW3H1h+/zKTWJuPl1py5dF4dSN11+6Pt+vAzELl26zCp9QOVxCGCiI3NT+tOf/iRmxQjy7rvvUlmWceONMan/ISUlpWRh4CSWMOs4r9drHMrfeuutuG/nzMxMwuqRuQGyOmWfmUcdU0qhKAp++eWXM9VxvLGYqgIBACxbtszL3PF5CPDsPZRSY8wTQuByuXDTTTdhxowZlLn8EkLg8XgMMbD0hWAkcDgcCAQC2LBhw2bROwSC6kWHTp2yN7de8RiJU6cfiegYcGzUyNwftncUrSkQVPD8FY2XVlYAnJfw+uYRo8d2j4eyVmgSC7OQFuky60iilozT3MTjHbm5qakbuu6MlfqN4GGVAr/HZOAZj4RlATxx4gQaNmwoXIAj3MbLli3DXXfdFVP1/NVXX9E+fUrCMsiyHJSUI8J1AkIIVFVFYWEhateuXSX637p162jPnj0NIYAd/pklEI+6Ze8uLCxErVq1ol6v0Yhxo6qqMacqiiLmtmrOmTNnaEpKCrckR+Z5zhyCiFkF//bbb0hISDDcknnHIfvtt99Qp06dajcuRAxAQen+UJ1iAJoJzKxFCTyQSLy2HYFcBZKCiBiAVWvOL13PsTbnc7cAnPPvWZV2deUt/gEAedId0TrambYp7PELshvPfTtinZgCPMU/AGjdps0uKcNDKmumnrs9Pm+HeLkIMusEAKhZs2ZYnrlkyZK4ji0SCU6dOkUDgQD8fj86doy9Ltm3b1/Isgyv18s2IxF/p9frNTbcHo8Ha9asqTLt3atXL+JylQTZZq6/5jHNa3zLsoyEhAS8+eab1W5M+nw+WCwWsLhvAsGqVaugKAo38c/n8xlWgOyLWfh5PB7UqFEDdrvd6J9+v/8isTBS+P1+ZGdni04hEFRjlPHnCI3j0N+EUKgzneLMIRBUAO4C4Ejfs5XKtBstKy5CCF2QmLU2Us8PR+y/0vS7a/DjkSqvZXz0rOn29NzSqjK/V/uL7jnxMiB530CyGFmyLBt/hoPOnTvj2WefFQvyBZ5//nlat25daJoGq9UKu90eU+WbNWsWVVUVXq8XCQkJUFU1KKtppLDb7YaFWlJSEu65554qZRXx66+/wu12A/g9CQAbZzwO+IFAwGjLxx6rfiG+WOIPVVURxctfQQwxbNgwwsYGl032BddfZtUvSRL8fr+xDsiyDFVVoWkaAoEAbDabkQwp0siyjBEjRghLNIGgmqPEuQWdRCjUrFpikRcIyjtmeL5swez3v6pUIaPswjn04b/cHIky7O2dE7EsoEvr/++zsHeWKLdD6zZtdmU3nldh68aGteJjMJoPqIFAgIuLoKIoKCwsNA4biqLgww8/DMsimpmZKWZYAOvWrXv6hRdegKqqsNvtUFXViLUXK6SlpcFisRiCHC/rNKDECiU5ORk5OTlVru2XLVsGm80GTdNgsVjALEABPjE+FUUx5hG/34/MzMxquUEW1n8CM5s2bTL6hDlRh8/nAxBe62dzwhGLxQJCiHG5wuYAi8UCWZaNn71cxuqKEggEjAsIs/C5ZMkS0RkEAoFxxovnDYIED/Y9f50uWlIgKM944Uh6weN9KzMhVdXKb5maeiBSzx587/Ah4XzewhpvfR0LddbvrkGPH8tXqmR/2LNnDwghIIRAURTDFTOSaJqG5OTkINekQYMGhT6xSBKS60vV6QAAIABJREFUk5Oh63q1v5Hr2bPn/wsEArBYLPD7/bBYLFwFtvLQunVrAEBBQYFxEOVVRofDAUopOnfuXOXm+gkTJhCW/be4uBh2ux1WqxWapnFL8sMEDpvNhjvuuEPseqoRn7z3zkY1q644EJVi9erVXp/PZwjzDLPVc1WwGGWiH3N5Ns85BQUFSE9PF9Z/AoHAYHePynlaxQp/qn1SzGkCQXnO6aIKys/X13zyTrieteqGz6ZHvsThmweHPPBQv1hph1N9V1ZYSP5q6eKYTy1KKe3ocrmMG3qebqK6rhuCT3JycsjPKyoqKumBhGDbtm3VVgT0eDwUKBG5WIZHVVUNK7BY4Mcff6RutxuBQAApKSlBSRMijc/nA6UUBw8erLJ9YOXKlUHiArMu4uXixwRAQgjat28vFvIqzvrVq54KzHBQLctBh7kmdJdQRER8pGAmT57sMF+wMatnloyIdyD2iO0ACUFZn1NVVbDswwKBQMBo3abNrqX13/ssXssvEQotS6x3AsFlxwq3Tema1eMr+juxYnXG6D/o7rHhetZtdwyYFOny7u39XVhcjJdd+cEnsdQOHTp1qnDU6tt/HjE21gdjq1attjFRwOyWFGmBwOfzQZZlQwDMz8/H0qVLQ2rznTt3QtM0eL1etG7dGqtWrapWC3Jubm6PI0eOUGb5xeoaKHH3CreLVyjceOONsFgshvuZxWKBJElcLABtNhuKiorw5ZdfVtm+cNtttxEW80vTNG7ZlRlWqxW6rhvxJ1euXCk2x1UUPctB0/bc+YYsBV//SYRCEyJgEFu2bDHGoTnpBrPC550NNBIQQuBwOACUJFnSdR02mw2yLOP1118vEL1AIBCUZvC9I4Z8vit+lwsCisAMh1jvBIJLwE0APPvr8d4V/Z1YsjpjxJoYdilapqYeyPwy9CYeOGToyFj7bCsafvh+xX4jPtaCX3/9FQC4uYnqum64QLEg5TVq1MCePXuGh/jcZZIkwW63Q9d19OrVC//973+rzYLcunXr9VdddRUAIDExEW6323CrZUkhXnvttTPRLufrr79O8/PzYbVaQSk1AtETQrhYKbpcLiQnJ2PChAlV2m3jp59+Miz/mCDHAyY2MusmAOjWrZvY+VRB9KxLH3gIoVg895MFoqZKuOOOO4jX6wULzyDLMndxngfMo8BqtUJRFPh8PuTn52PixIk1RC8QCARlcc//vCSeA/jIEvDsgCuECCgQ/AHcBMDBZx+9qypUWJPU1n8JudI5xjUc+GL2sNCeEJvn8gF/HvJwVRyQW7duDQoaHmlYRtLSSUe6d+8e0nPT09PvAmC4kkqShEceeQQLFiyo0gvytGnT6MmTJykAI5aeqqpwOp2Ga5nT6QQhBCkpKaeiXd7bbrsNNWrUQGFhISilkGXZcBnjkQXY6XTio48+qvIL7fLlyyHLcpBrIQ8rX2bhy6w6KaVwOBx4/fXXxca4ivDNV8unaWWIf2U18MAzo+8RNfY7ubm5QX83C/NVIQYgpRS6rget74qiYM6cOaLxBQLBJbGM9xCdxu/d7LQ+xVj4r2m7RUsKBBcTs4FO5jle3xyL5WratKk7nhq4S7fu80P5/XC5EUeCeF6Y/giHw/E5AHi9Xi4uSIQQUEqD4oS5XC506dIl5GcfP34chBBYLBYUFxeDUop77rkH33zzTZUUH5YtW0affPJJ1K1bF0BJ1tWioiJYLBa43W7DrZZ9FRUVtYhmeVetWvVyixYlRbDb7ZAkCYFAAElJSSWLA4c4WJRS3H///VU+aPPTTz9NTp06BU3TjGQ7vFwMzRlNmTXQqFGjxO6nChCY4aC3HrhnYlk96Y96V2BmLSH+XiAnJ+ek+eKrqsT+M6/vNpvNsPxjF3Ljx48XgfIFAsFlkTNccT0p3q3/4ybRigLBxcTswG7VJW1orJbtwO25lY6efFQazN3tL5SL7EhmKg6VL+r9Z2lFfn7P7t0NY31ADh48OP3XX3+FLMtcBAJVVY04QUwETEhIYFYCOaE8+9ChQyCEQNM0JCUlGRYVN998M/bt21dlDqF5eXl19u/fT/v27WtY96mqCqvViqSkJAQCATidTsiybIg/fr/fiM0ULRo0aPAcE3+tVit8Pp9hfcosRyLN4cOHq81iu3XrVlBKjTHBw9VQ0zQ4nc4gq06/34+aNWvi73//uxCC4pgjU6+hciV2cDLxiMq7wFNPPXUlIcS4lDHHwq0KMQABGDFobTYbdF3H+vXrRcMLBIJyQQihsWqQU14CIv6tQHARMSsA3tSy5bFYLVsoVoDXjJtbl7tAcXNOk6rYeZu36VChOHUF+ef6xMPn+v7776EoCrxeryGalRZjAoFAkGVPZWEiBDv8MBFQ0zSkpqZ2DOXZOTk5UFUVsizD4/EEBVlv2rQpvF4vjXcRIisri9avX/9MkyZNjHbxer2wWCxQVdUQfBisDmw2G2rWrBnVsl933XWQJMmIAWlOTEIICas1jKqq0DTNqBOgJAOwx+PpiGrCtddem8rEBl5xxpgLMMsozjJR67qOhx56SOyA4pDF8z9ZoGc5aMPE05V+RvaXy94QNVnC/PnzjfmOJenhFaOTB+Zsvz6fD7169RLWf5wwJ5ehlBoW2FWpfwmqPiP+Mra7RoMvrONp4y4TCu8bSUIEFAhMSKIKqj6xbMUXChUVYs+fOdU9Hj5XzZo1M/x+P+x2e5DIZxZPWNDyUGEuwEyoYs+UZRl16tQJ6dkTJ04kTFxyOByGBRxzRVIUBS+88AJ8Ph/9+OOPN8Xh5p4+/vjjqFmzJoqLi0EIgaIosNlsQeIf+9ylM00yV9to8Pbbb1MesSbZgZr1KYvFAr/fD7/fj6NHj6JVq1bbqss8nJqautvj8cDn88Hr9XKJAVg6jhkb35IkoXbt2mJxjDPUmQl04KnQ4/i12T36KVGbJQwfPpycO3fOmJcVRYHf768yIg27gPJ4PDhy5IhocI6Yky+pqhpkYS8QxBPK+HPkbPHvdwfxdotglVWoM2uLgScQXEAIgBxZfMW/vxC1ED2K8883jodypqWlzSqduZPFDIvExrG0JSH7+xVXXIG8vLyQVECW8dbv9xvZZZnrK6UUVqsVFosFI0aM6LZlyxb6wgsvxNQCvXbt2udWrVr18qxZs+jixYvp6tWr6ZkzZyillHq9XkNQsdlshsDHPifb7JtFVhYHEEBUXYDvvfdeLgdcSZKMOjLHv5MkCe+//361m4P+9a9/weFwBFlbRvrwbxZ6WSIQAEhKSkJ2drbYEMcBE+++jupZDiqR8IzZOo5CbN+2raeo2RIOHz6MoqKiIKvnquACrGkafD4fgBIxasmSJaKxOcIsvimlxjzs9XrD4r0hEPCm7t/dRNNJHI9Ht2hEgYCdB0QV8OPPwx8YGK13z3PGdwyHcHBP0bO946Wsy5cvv2iTaM4iyjaV4dyoAsGx36xWKw4dOhRSzMr//e9/AEoyD7LYZ8zySZZl+P1+SJKEoqIidO7cGS+99BJcLhfdvXs3V/fgjRs3Pjljxgy6efNm+ttvv1FVVanf76c9evR4uUePHs+NHTsWd9xxB26++WbUqVMHHo8HdrvdEDbNn89qtUKW5SDrNyBYDGOxF6PBnDlzcmrVqsXl/ayPskzTzPKTEIJp06ZVO1e0v//976SoqAiUUsP1OuKL/IU5g4mvbAxKkoRbbrlFLMwxyNqV30wMzHBSPctB9SwHfaXnr2F/R5stPdaJmi5hyZIlRow8oMTCvioIgLIsG6E9EhISkJmZKdx/eR6wJAkejweEEBBC4PV6YbfbjZAMAkG8oTzlJvF6a0gAaFkOcekpEACwiCrgQ+b6KwBEz/1ixCNjuw+r4MQnvBSix6BBg4jf76fA766/7FBidtkN1ybVLBJYLBZDuOnfv39Iz37yySfJgw8+SBMTE43EJkwUYxkJdV033GF1XYfdbsf111+Pl19+GRMmTKCFhYU4ceIEDh8+jLy8PDp9+vRyX1zs3bv3T9u3b//4zJkzHa+77jpceeWVSElJgcPhgKIoxveSJKFr166glAa5QZvddgghhtjHkqZYrVZ4vV4oimK4+gIlFo/megzahIQ5vl5Fuf766zuyuudxAFJVFRaLBXa7HYFAALIsY+7cudV2bH/++ecYPnw4rFZrxN+laZoh+rFxzqxSWB9/++236dixY4UwwIn9+/c7r/+6q+sQBp6+AZ/WZQttUEKPvIHiepYj06ZNIy+++CJlVsq8YnTyGP8sC/0///lP0dCcoZQiISEBPp8PkiTBbrcb+wUq/IAvi+lyVaxPMcS+m7de3XxNx6PxWHYC4Mg/atJGL5wXfUpQrRFbTE7ckfHhY6IWossucn9cBcBZt26dETfN7LoXbtdNtrdiyQnYbTUTvN58882QNqr79u0LEiyZJSMTyVhMQF3XDYtAJrDVqlUL1157Lbp06YLhw4dj6tSpRNd1yr5+/fVXmp+fT/1+P2VomkZVVaWaptHmzZsfuO+++zpOmDABgwYNQvv27XH99dejQYMGqF+/PhISEqDrupFwhYl4gUAAPp/PKB9z26GUGgKmxWK5KBkLi2/HRD52kCwd/8/sCsybZs2aGcIcz4MQ++wFBQW4//77q+3ma9SoUcRqtXJxA2Njjo1nc/9jY75fv35iceDA/v37nYGZSfSGr9u6ZOJBEzK/rkwoZKmU+MeRZ+/vKESIC/z3v/8NcpfnOT9GChZKRNM0Yf0XBVjd22w2KIoSlJDJ5/MFxQYWXxd/XRBKRUeKMW5q2fJY9rVz347X8jdM8ULNqiMy8QiqNUIA5ERaz97/jnYZvm2bPapih8eq1QYHk1vnxVN5GzRocKPL5TI28sxyx2yxF05xxoxZ5LrttttCev7WrVuNm0KWBAGA4ULrcDgMgdBut0NVVcNVlGWPZe6LpV2V69Wrh6SkJEOsNLsXS5IEn89niHSs7hRFMURVs3Wa2T1XlmUjTpvNZoPdboeiKIZ7L9vAK4piZFnUdR02mw1Wq9XY7LN/L12vAFBUVMS9T7311luUxR7kIUCaD9G6rsNisWD37t3Vfj3Iy8vjJgCyRDSlY4iyMdWoUSNMmTKlCIKI0rRpUzcBhRRGB6pQnzSl7T7RMBd48sknSVFREUrH341nmKv/gQMHRANHAfPFC4CgJCA2my3oslV8XfzF9n+C2KPfwMGPa3EsoUlwiQsRQbVGCIDViG5pPWZXpc+T892WwRX5eZvT+Vs8fb4WLVocPHHihJFIg4lY4bYgY88xb1QDgQCAEgGwfv362Lhx45OVff64ceMaHT161LCwYwkzmHst+3cW15DFRtM0zYhhxMQ/sxUTEzWYpZ1Z2GPWhOzm3Rw/kYmHTJBi+Hw+o0ySJCEQCMDv9xt1xMQsJlCyz8GeaRZXdF03XDyZ8GgWYPx+P86fP8+9T40dO9bILs3DBZXVL2snr9eLX375pdrHI50zZw4XF2w2Z5gFATaHsAOWJEnIyMhIFCtk5LGML/7DQwelBJRKoFSCTiXolFz4usTcHbKWSLHvHzcJS4gL7Nu3zxgnVcEN2GKxQFVVtG/fXhx2o3HAunBZq2kavF6vMedbLJYqk2U60vXHK1auoOIoT3mIHkUDzVDffWDGMJdoRUG1nV95vagqWXGvXbXy2erSQfbv3++M1bKd++30rRX5+UbX/2lWvNW/x+Pp6HQ6jUDS5rhy4XKNMIs0DOamwuLzuVyurFDe8f333y9jrrEsAYb5C/jdypFt/Mx/N1P6Vr102dnvlyVwsd8ty4rSZrMFbTaZxV/pjSgTGc3vZYKk+f3mMjHxj/2boij48ccfufal6dOn57Pvebq3MTEVAE6fPo2RI0d2r+4L76uvvkpOnjwJAIaFKrN08Hq9xtgLx9g293Mmbpv/3+12w+l0QsAHTS/7QEsIBSE6CNEhER0SoRe+/vjwI4co68hEQ5OUw0IcusBPP/20lq0F8ULp/QCbR1js4L1794qGjTKyLF+U+COacYDj5oBqCn0jiE0s4z1RWz+kEN98g7TEqc10ig4mqJ7zK68XVdSbYv3a1eNjtdJ67Bk4PX4PHxX7+SOHDz0fq5+lz8ERYyvy823atdsab+3VqlWrbZs3bzas2NhG3+v1crFQYCLE7bffHtJz0tPT7yooKAiy0KsucZ2ZcMhiLDKX5vbt2y/jWY5bbrklpbi4GIFAADabjYtrDbO2VBQFmqZhy5Yta8WyW8KGDRvg8XgMV3c2nnmLD2xMZmdni40wB5SnisJiNSGFcfr8dsumoaJlgGHDht38888/GxnL4wVmUc8umZh7PwAcPXp0tmhZgUAQMSEhwxO3m3lCKLxvJIu9j6D6jVteL1rbdNFLFfn5tF0DZsRihe3fv99JKhjDJ7frhl6xUv7sRh++X5Gf731wyMRYbQeJVA8XClmWH2CxUJjLHnOZjfgEccF9RVVVrFmzJqRF8uzZszd6PJ6grKRVHdZG5lts9tnT09Pv4lmW9u3bIzEx0RDjeLmgWiwWaJoGj8eDYcOG3SyW3RJGjBhBWP9gbaFpWlCsTx4wK9e0tDTRKJw4iKGnY6k87bfePk+0SglffvmlkbU11mGCnzmxj3l9JYTgzjvvHCVaVSAQRJKc9iuHxWvZrXIAJ2f2KxatKKhOcNvh3HJ7v8mV2NzE3K3CjdltKxwzoF2HDutjpfwD/jzk4Yr8vExi82Lk+q/aVKgdKGxxO0i7dOky58MPPwxKUqEoChcLOnaokGUZnTp1CulZLVq0OLh06VLjM1QHzFZ2rP0IIYabJy+WLFlCWXnM2Z4jDXuHqqpYsWKFWHFLceDAAXi9XsMKsHSSDl790+fzgRCCBQsWiJtwDjQb/0E9jUql1qjoIRMdmzeuF0IRgCeeeIKYswHHMuZLQLNHAJs7ZsyYIRpUIBBE/pzSrfv8zJW147b8dcm6BNGKgupETF9x6lnOmDLx2rxh/aiKbtMnrbki7jvJgo/e/yqWyrN//36nXMGeu+/mjVfHcxuMGTOGHDt2rGTQSpIREzDSMIs9AHA6nTh16lRI59Thw4eToqKii+LhVFVYkhNz9mJZlnH8+HGu5Rg4cKDxPUtswmWBMSVkuffee0WssVJs377dCNDFYnyWdbiPFEwokCQJdrsdffr0EY3Ca24YH5yFMNqDo/MPfd8XrVLCTz/9FBfZRwkhRkgJAEYsUYvFgtOnT2PChAlizhUIBFyYvvwY0Wj8Tjl6lkNcgAqqDVwFwO2d1/Wt2OYmtpJQdMmt+Aa5f8YHT8Vao9MKHjXSzz3eN5bK/6fszhW2wrypZctj8T5Yr776asLidVmtViOxQiRxOByGeOXz+ZCQkIAVK1a8E8oz169fP5sdXKoDLLsxi9MkSRJyc3O5vX/evHlrmOjHgpG7XC5uwbV1XcfGjRvFalsGjzzyyE2sbdg4Y8IfjxifFovFsEAEgMTERLz00ktiE1wNEUrR7xw+fHh2PFipmzPbsz+ZS/CWLVtEQwoEAq4o491RzQwcKgGRFERQTeAqAHbo1Cm7or9zY3abmEjTXRl35EnrFfS8+ZaZMdfoT7oq3O5HXqoRE5NiXl5eHQJftT3aTJ061RAHeLgpMZGOZblNSEhAixYtxoTyzDvvvHPU1KlTuQgcMXGwvhCUXdd1WCwW9ufnvN7fokWL3larFS6XCx6PB0BJogmeSViuvvrqVLHcls2SJUuM/sHEYp5tY3bT1nUdo0ePFo3CiY03LZsQS+X57C9XiMPPhTVq37598bORN8X/Yx4CTZo0uUK0pEAg4E00MwOHikwo3hl1jVgHBVUe7i7AmTmNK/w7sWCWS2dV3B256/D/fhKjgkSF67NhDR8WzPkg6q7ATVa2O1PR38nttr5XVRmwzz33HJkzZ07JuODkIsgs15h74hVXXIEVK1bQUD/Hzp07q/wEy5I5sC9CCDweDwYPHpzOqwzNmjUDpRQJCQmw2+0IBAJBrqYRnmvg9XqRmpq6Wyy3ZfPAAw8QNtZYgh9KKRcLX1VVYbPZgvrEVVddhZycnCGiZSJPz1tunaHpsXNW+nMLEQedsX79+rgopznRlCRJoJTi3LlzaN68+VnRigKBICriQhxnBn609Wmszv5qsmhFQZUeo7xfOH1uXqUmhZL4e9HhlzeHVypj38D0oSNjteEXJMxcW9HfST8/tu+3mzcNjdpGN8tZYTNMCoJ27Tusr0qD9oEHHiDbtm0zNv7mP9n3Pp8vLO9iySLMGREtFgv69OmD11577Uwoz27dujU5cOAAACAQCBjWhqUTZJg/i9kti4cAGirsM7HsrgBw8uRJbu/fsmULVVUVhBD4/X4QQqAoCvx+f1hcsM3PMLcNa0NKKRYuXLhDLLWXZuXKlVAUJajeeIi0TGSklMLn88FqtcLj8cDv938qWoUPylPumDoobZ1UX1g/ABg/fjxh6wwbJ2yOY8mUog2lNMgTgBCCQCCAhg0bCo9ugUAQVRbWfOvreC177/13vxiLiUgFgnAhxUtBu+T2fT8ag1HPctCrtcUVdqXIzK4R0/U5dPSjN1fm9zp932de7vbtHfm3QxKtTJ7E7zuuursqDtyOHTuSQ4cOobi42BDnfD4fKKXweDyw2WxwuUL3nje7B7JYZUycePjhh+uE+vwmTZqQn376yciCyhKEsM/h8/lgs9mMMui6Dk3TEAgE4iKTsKIoRtZdr9cLSimOHj36PK/3X3311WUmXGEu3aFS1jM8Hg/sdju8Xi+OHTuGUaNGtRFL7aU5cODAUaDENZsJtjzcgO12uyH82Ww2+Hw+OBwOtGzZUjQKR47lx044hLb1CkSDXGDVqlXQNM0Q2ViSDVmWgy7FogUhBOfPnwfwe3KnggLRfgKBIPoMuf+hfjoccVv+ynj+CQTxQlR2MORJt1TZwbgtJ+d2XuUMxfV4+opfY/7mIKf9ymGV+b1Wm7rn8GyHdf/KPABUXOw5lq+hU5eui6vq4G3WrBnJzc2Fz+czDu6yLMPhcEDTNCQkhJ7VnlnfWa1W498URUFhYSFq1KiBtWvXhmwtcv3115PCwkLYbDYkJSXB6/WCEAK73Q6LxWKIgZRSWK1Ww03SXKaY3kRQCpfLBbvdjh9++AG9e/eewuO9kydP9lx11VXG35lYx9ySw/XZ2DNLC1Z2ux2rV68Wq2w5GDduXCOXy2VkAuaVoKW00MjGVEpKCubMmSMswTjR6MXimNkvSARYvmThe6JVgIYNGzY0W3Gzyyg258UCNWvWBFByeVBYWIgvvvhCNJxAIIgJLBnniB7HhnTqTJEZWFA1iY4AWIkYdIx23/X6modvfnVIB96lW/f5obTDxnVrx0S6jOrMBNpDn/Gnyh2q/FXefLtHjx7k3Xffhc1mg67rUFUVgUAAkiSFxcXTfOCRJAlutxuEEDidTni9XnTt2hXr1q0LeaykpKSQbdu2GdZjTJyQZRm6rsNutxtWGISQuLD+AwC3220kT6GUon379tz65AMPPGCXJMlwDWcCIHPpDofIZD4Es+c5HA7DWnPUqFHChaKczJ8/36hHFgcw0jBRncX5JISguLgYPp8P9913n2gUjmxuteKxWCnL7T8/8LBoEaBZs2bHjx8/DlVVjfVUlmX4/X5uIv3lxm9hYaExFyckJGD06NFizhUIBDGDJY4zA0sEUGfWFiKgoMoRNR+GUAKE9t5/94u/zKpcXL5yHWpDFP/iKfjpPNvrmyv7u9129n/nq2VL3orQxpYcedlKJVK5W/aPba9uqy6DeNy4cWTSpEnweDxGjDfmphQOVFUNSgLCstna7XZomoaePXti1qxZIS+QHTt2JG63G263G0BJfLJAIGDErgNguDVLkmS4PMUyTqcTAFBUVIS8vDxu792+fftt1157rSH+MYs/ZqlHCAmLBQtrF/PzWBbb5cuXixW2AowePZqcO3eOS/IPc/sx4Y+JxQ6HAzabDZqmYerUqWLjy4m0Xr3/HStlkYmOHTtyReZulGTptlgsQRdqTKSPNoQQJCcnG1nEP//8c9FgAoEg5ojnzMAScWPFkkVvi1YUVCWiGsRkZ9qmTpX93avp4iv0LAfN+e7bQeEqz+fz5iwNVfzLvnZuXE0SI8aM7R5KN7j9p2Fj1ZlOunxReFyGKKVEz3JSOsupN0yu3AabguD+MU92rE4Defr06SQxMZHs3LkTmqbBYrGgqKgo5OeyZzGLO6fTCVVV4fV6EQgEYLfboes6xo0bFxYRsE6dOiQrKwuBQACKokBRFONwo+u6YZFosVjiwgWYBYvfs2cPbrrpJm4boNOnT2ezhBLsoMrq0Zj8w+QGzJ7JnhcIBEAIwcCBA4UlSgXZunUrrFZrUNIYHn2UiQmqqhr9xeVyYcgQkQyY64Yshi4PW6zrtlO0CPC3v/2NuN1u48KLrT+xsr6wuaK4uBj33nuvmHMFAkFMsrd3TpN4LXvfX0Y8JlpQUKX2m9F8eZu27baG+owOOTcv1rMcdM7bWVsrE5du/ZpVT+lZDqpnOejg04/eFWp5+g0c/Hj8HTpcJPObhpX/fULR/+h9D+tZDrp43icLKvOMb1YsnxaY4aAlQVdD05F2pW3sVF0HdLt27ciUKVNw5swZJCUlhfw8s5UDE5SsVqvhkstEA13XMWbMmLBYDGVmZpKffvop4ejRo/B6vXA6nYbLlcVigc/n42olFQqUUpw9exZdu3blejBr2rQp7HY7JEkyhCRKaZDrb7gEptKHYlmWsWOHSPxbGQghs1hb8YCJ6pRSwx2YJdlJTk7GDTfcgIULF4qgYhxZXPc/MVHfMhHGn4ylS5cacxuzqI6FGICyLENVVXg8HhQXF4uGEggEMUvL1NQDK6+fPzNey18dQoMJqg9RT2MWrhvvkYGJHdp91+trPctBC7O6uzeuWzumrKzBlFKycf3aMUz0S9t95xux9lmiwfQvD4al7ANPj76npG4N/Jj7AAAMPUlEQVQT6batlxdkd+zITdWznPTWg/dMlMPSG0lYhOV4ZvLkyaRu3brko48+Csvz/H4/7Ha7Ye3HMvEykYLFELNYLMjMzMT8+fNDXiSbNm3qbtSoEfnkk08AlCSUYFaIlFIja3Csc/z4cdSrV4/rvDBt2jR67bXXwu12G8IOsxJhVnrhqjtJkoIsCX0+H2RZxtdffy1W10rQt2/fjEOHDl3kchhpzMlcmAs3+7d27drdKVqGH38edv/AWClLYEayOPAAGD58OGHZdUvPpdGEJSax2+04e/ZsT9FSAoEglrl9wMCnjhXY47b8QgQUVBWIGFThIZ7FP8aOHbmpqeu7xq3bz7dts0d1S+sxWwzrYKZOnUqHDRuGxo0bBx1eyooTyAQAs+WYWSC4aAIxCQXmn1+9ejVuvfXWsI2JjRs30u7du8Pj8cDhcAAoEZsURTEOYmWVw2ylUTrjKYCg2IZMFGN/9/v9kGU5qI6YG605kYaqqkZmYq/Xa2RJnj17NiZMmMB9XlBVleq6biRNMcMEQfZ9WXGszG19uT7Akov4/X7Dmuz8+fOoXbu2cEWrJC+++CKdPHlyuX++rHYp3W7msVF6DDDBr6x5gP2OLMthaU9d1yl7l3nsRQq/3x8UKoDw8qsOkY/efee7oa5nOknQQDhZ4lEQkDKs76vC3iYcHDt2jDZo0MAIT1GebOqXs+StTHcsPT+7XC4UFhbiqquuEu1UwXUyFuI4Ci7d1y+sWYRnf/D5fLBYLBGP88n2kpqmYd26da/16dPn6erStqtWrpzSO2/g30tWnvhBpwQ6qaNbM47IEegPXPdH1a3/+Xw+arVajTNbpC/x2F7BdFaKqa4uxUxB4niTWVU2yK1bt9m1vvnSSfFY9gXJWWuF+Fc2f//738l1111HPvroo60FBQWQJAn5+fmG+1DpuHDMzcmcZIMJB6W/mADGXHJ9Ph88Hg969+6NU6dOhe3kmpaWRggh5LvvvgMAFBQUGK6L5jKycgcCAeOAxr7MrrDMitFiscDr9Robvv/f3t3HNnHecQD/3p2d80ucN8JYUxCClWpiUJrxEkrSorSFjpW3NahiW4c2TSisWseLNolNIIiKEBOTkk2gskYwbQhSaUJijUbDm9aIAqJdBM0I64pKUYOGrG4QksX2+Rw/+4M87sW1EzuJDxt/P5KFMH7jntf73e+ex+FwIBwODwkcJAiGxAZNuemKpmmxW7D279+PsrIy5UEE/9ra2n4rMyQNw0h4wimPR7LJrbV8kz0vHwMDA7Fj5XK5oCgK9u3bx0Y3Bg0NDYrceVTW10SP4cplpPKUn2HN6E3UD8jdiA8fPjwubVn+hnA4bMs6agUFBbGNhLLhls1U/WD9TxYqgG3BPwAJg38AsLXWy4wHAAcPHoxl5Uaj0ZSyqJO1SWsbHOkETY4/8UF8+ZkejwdHjx5lAaU7bx+8jZuP7H3Yeb1Gzh9N04Su67Zs8iMDnA6HA263+3o+tb/nlyzZpuRY8A+4v+SVKj5X/9n4I38m6qCd86N8q39yPiiTXDLN6XTGvi8brz1n3S/KtUzAh/HqeMcHHyyuvPDMu7nyez9U139W+drvpnJKm7pdu3b1LVy4sLCqqgqqqsaudqYy6bGekMhByjRNaJoGVVVju4rKAGNLS8v769atG9d1GRsbG2+Xl5d/dc6cOaioqEBZWdmQzDzgi1u15G+VJ03DdfwyKCLfY92ExLpTqtyhuKurC3fv3kVHR8f/tm3b5nvQ5Xr8+HGxatWqWHbnSLeRWnfxHQ3TNFFQUBC79dcwDBQWFjITZYxkxmsqEzjrn4mCD4mCB8myZeMDjE6nE/39/YhGoygqKhpzuYr7oCgKAoFALJs3U8LhcOxCweCJXc7UzbOn3tm5+FrdDvUBr8UnoEDbGGCbvl+fhNPpjPWvIwWV48ea+PaaqN8dri+WbVR+rzx5y5XM1mwsS8peMoPGjvotBhulYRjQdR3BYBAulyvT34lAIACv14v29vbttbW1u/KpfO/tcQmfOze7rqgY/52N7Z4f5Vv9kxmA8hwu0xtJGoYBl8tlayZzOhzZVkDqxqCSK0HAM4/9eQ+w/KFrJHPnz28HoESa3ELN8r6546l3vzV/QdVJTpXSEx+s2rt3r6iqqsLEiRPhdrtRVFSE4uLiIYGzRMGi3t5eFBUVQVXV2JVTGRSU7127du2C0tJSsWLFinGrTZs3b34k/rnm5uarPT0933A6nSgrK8PkyZPh8XhgmmYsM1Fm+4XD4ViAcMKECdB1HYZhwOPxYNKkSdB1HX19feju7oZpmujv78edO3fg8/k+3rBhQ9buZFZdXR07UUwloJssA0U+nyxYal3/Ud6qoGkarl69ysY1DkKh0G+i0ejPrZu2JAoUpBPATfR662cnK2tN0+D1etHc3CzWr18/pjbc09ODkpISAIDb7c74VVHrCX6uxUieW7psp9lUuAMYQHSwmKzjcVQM/XumKBAwmyYI56b/5n2Q6dChQ6ivr4eqqggEAvB4POkdyxTaa7IlGKxt1NqvHzt2jB3mKPsGuzZbotGXUU9Pjy3fJYONcv6q63rGxwxLAB/BYPDRfCvf4q0h5WTrXxqf++R7m9Ld/FFg+AwmMWQMszwn4v8Vw3zS/dcFTAUeJ7D9dCFeXxrB9jMTsWvJHQEEx/V42D0/yrf6ZxgGCgoKoKoqdF3P+PfJCwiKotjWj6U3t8tS2RwE/OWpUvz6r//Oi8lwpKlQqMpAVv62E5P/eGh53cs/5jQpc7q6umbcvn17XXt7+9by8nLHlClTUFFRgfLycni9XsirKdbbQWXgT9M06LqOe/fuwe/3w+/3Y/Xq1cxUyJADBw6I+vr6WHZKMBgcMshZJxPxAaBkE41UMlzk7tCDgyrLd5zcvHlTTJ06dcT1N5NlAVqzXVMp50S3KcigYSQSQXd3N6ZPnz6m8pVZFvFr82VsHmG5pWvw/5Nz9fPw7/dd6jr1hwWvP/0p3v5K09uPz3ryF6FQcJq30NcRHRhwJTnOHgAIhYLTIqZZGg6HHgn09T3W19szfdrdy7Mqo0cehYAiRBCAgrshN/4T8mBG6R0oEFAUAQUCAgqE0BCJeuDa4s/7tt3V1TVj5syZH8u6NVJ1Gi6YN1x7TLX/FULgxIkTWLlyJfvdNPX394t0A7hkLzmXsaPfNk1TOBwORCKR2N0fmRYOh2WGI86fP19fU1PzJks9f9k9P8q3+ieEENY71OxgPcbZNv/M6knDxfPvra36+5KWbPpNp7/2VtMLy1dtzqdO6ezJd3bWfvTSjmz5PX9y7r7yw1c3V3K4ILK007NnhVyTUK5LGJ/BKYNCchySg6D1eWummCVwknSduZKSEkSjUXR2dmLRokU8ER0ne/bs6ampqSmODyZYj71c5zFZENAwjC+tISZfJ3eHlhmyMivWWmdM04SqqjAMA729vWhoaBhT+ba1tYm+vr4hO4pnkqZpME0T8sSurq6O9ZPG5MaNG8IwDPh8PhQXF49Y/2KT7bh+M/5W/ET960jrFN26dQt+vx/z5s1jvR7lyTZlLxkIsePEuaWlRaiqGhub5JiRSdaN5NasWcM2nOfsnh/lW/1rbW0V1g1A7GjfqqoiGAzC5/Nh2bJlDACmKxuyAYUAtE35uxvePzo7Hz+y/YV/7X72waaxnnuidcvi2ucbOVQQEREREREREaUmpwJaW1/Uxe6l9m9czHXmvnDl8uUnZrUv+tD+tQEVqFyInIiIiIiIiIgobTkZULEjI/BX54rwyo4zU2bNnn2L1eTLLnd0LJjz3tOX0l24Na0yOK9h1ca27z5VXfMWjzgRERERERER0ejkdEbVtWvXyr9+eu7n4/mZ52a3bln8LG8xTdXljo4Fn31y/WcX3/zp93ev6B+Xz2ybduSNb6986VUeXSIiIiIiIiKisXtobqm8dPHCdz796OprL/durB3N+zufuTDnycrKTlaJsTnX/rcN1VdefOP+QvKpvUcIBee0TdeLq+teqfzm3Pd5FImIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiLLe/wGdN/jFkJIJ3QAAAABJRU5ErkJggg==)" - ], - "metadata": { - "id": "-BV5wSJzQ-ev", - "pycharm": { - "name": "#%% md\n" - } - } - }, - { - "cell_type": "markdown", - "source": [ - "### Automated high-quality background removal framework for an image using neural networks\n", - "\n", - "\n", - "\n", - "- 🏒 [Project at GitHub](https://github.com/OPHoperHPO/image-background-remove-tool) 🏒\n", - "- πŸ”— [Author at GitHub](https://github.com/OPHoperHPO) πŸ”—\n", - "\n", - "> Please rate our repository with ⭐ if you like our work! Thanks! πŸ˜€" - ], - "metadata": { - "id": "Yq1sa5BbRV4c", - "pycharm": { - "name": "#%% md\n" - } - } - }, - { - "cell_type": "markdown", - "source": [ - "This notebook supports **Google Colab GPU runtime**. \n", - "\n", - "> **Enabling and testing the GPU** \\\n", - "> Navigate to `Edit β†’ Notebook Settings`. \\\n", - "> Select `GPU` from the `Hardware Accelerator` drop-down." - ], - "metadata": { - "id": "lrGOILABYqXx", - "pycharm": { - "name": "#%% md\n" - } - } - }, - { - "cell_type": "markdown", - "metadata": { - "id": "sqwsUfoI3SnG", - "pycharm": { - "name": "#%% md\n" - } - }, - "source": [ - "# Install CarveKit" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "7C4rC_HQi1gq", - "pycharm": { - "name": "#%%\n" - } - }, - "source": [ - "#@title Install colab-ready python package (Click the arrow on the left)\n", - "%cd /content\n", - "!pip install carvekit_colab\n" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "#@title Download all models\n", - "from carvekit.ml.files.models_loc import download_all\n", - "\n", - "download_all();" - ], - "metadata": { - "cellView": "form", - "id": "EPjtRXRpQ2k7", - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "pF-4SVcB3gjK", - "pycharm": { - "name": "#%% md\n" - } - }, - "source": [ - "# Remove background using CarveKit" - ] + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU", + "gpuClass": "standard" + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "![logo.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABQAAAADWCAYAAACOuy9iAAASgnpUWHRSYXcgcHJvZmlsZSB0eXBlIGV4aWYAAHjapZpZciS5EUT/cQodAXsAx8Fqphvo+HqeWWxNc2gya6kpkjW1ZAIRHr6Acudf/7zuH/zLOUeXi7Xaa/X8yz33OHjQ/PtvPD+Dz8/Pr3/l8+xvz7tdPy9Hnkr8Tu9/ts/z4ev5rwt9fofBo/KXC7X1eWH+/kLPn+u3bxf63ChpRZEH+3Oh/rlQiu8L4XOB8W7L197sr1uY5/29vzba3m+nH+F+3vb58Pf/zkb1duE+KcaTQvL8TOmzgKTv6NLgQeGnXmbdKfNYL470vje8BfmpTr/+dVZ0tdT845u+dyv81K34WbX73q0cP29J34pcf/3+8XkXys9deUr/lzvn9nkUf3/ez/BZ0bfq6/ve3e6zZ3YxcqXU9bOpry0+j3jf5Ba6dXMsrXrju3AJe746X42OLaCw/fKTrxV6iPTjhhx2GOGG8/xeYbHEHI+LxoMYV0zPky1Z7HHRtUD/+Ao3Wuppp0Yv19P2nOKvtYTntt0v99ytcecdeGsMXCwIF3/65f70A/dqFEJQLeN4asW6YlSxWYY6p5+8LT/IfotangJ/fX3/Fx4EZ95FlTUincLO9xKzhP8wQXoanXhj4fc7g8H25wKUiFsXFhMSHaBrIZVQg7cYLQQK2WjQYOkx5TjpQCglbhYZc0qV3rSoW/MRC89bY4k87XgeMqMTJdVk9KanQbNyLuDHcgNDo6SSSym1WGmll1FTzbXUWq2KFIcly86KVTNr1m201HIrrTZrrfU2euwJ0iy9duut9z4G9xxcefDpwRvGmHGmmWdxs06bbfY5FvBZeZVVl622+ho77rThj1237bb7HiccoHTyKaceO+30My5Qu8ndfMut1267/Y5fXfu09W9ff9C18OlafDqlN9qvrvGs2dclguikqGc0DBUJdNzUAgAd1TPfAvKizqlnvkemokQWWdSzHdQxOphPiOWGr965+HZUnfu/+uYs/9a3+L92zql1f9i5v/ftp65tcfB6OvZOoYrqE9N347zN7vBlnHnHWT3sM86d1gbFXoWyrJ6WH2vdkNjisev3mdSyrzn7iOmKg0K4tu2uOtZtUU2ekNuld2YpQf51jxxOWbud5VnailPvL3Agd7uhnFZOoS63D2+9150MST7p0izQGAoLOY72WWANJ6xe5g5suQJIKDbX0KHHVXu/6ZZ8aRFUeffJhU7x1h5rLy2O2M5xtcczdq1UfvAqpUyQ7gZmtDqm7rErY9tK7BdFqLYHJUrGdYIhrPecEiidGzNzlzPsdmBwkgUIGNhZCXfNtU6GqMMoRc2Jt/d1rPFC6PWqRH7WPkGiEwWA0Mg8LjAV1yjzZsoG/m6r5TA2l5faHADh8NmTZmzd1pmt5jMK+jAHkl3XZWu8wBiI2OhM8afxPB+Ku2hZK11av5jmxrTdNFgegKAgLacqLGRH8dasjT4xICy2JK45xppIWC/1tr7ZeSw1lZLarsY2Z0zbnjfrcs3PHKbzNulKLjtVD7RCSjFf83gUes1O6/T9lnQRPEvROrgElm+hMGHzdnwOy3Z2dkxjlb0CPYunB0+ZIu1La+MtVfNa8mhlgapY57E+zqad1xrbBSrrsnU3GNdSqFA6YeZ2QhuUP8SRIzWETUI7gGqeZhEuuGVcDx7upKJWB4g6Iew83Ur0bO0NIqxCYyrUYESBD8W7RbAPB1Av+bfYBoN5G3v1ljLAzle9g7PHneXQTlYQNw9qfgyEALUaNUl92RQRLQDfIIp0PMCZWkUuFxQyIWNVFxke4Ao8LyqCNT2+rcbja0MTX4ZWMbx2bou1qSXD4IdRWMjofrLPsh0QNcY+DmHf6hoHalumj3Odfo8u00UcleJ6baM17kw/9uQnY3KvaKSPm6CPgzfok3fu2m5i+gaDw70KtNf2jcB1NOSZqyLB3Eu3CWvAFMs0FA5jRN1uO8Yjf6DWOsKYi2ay8zh7OxFK8rzgb/YVLCH6NI6ts4/LwDar+To/bhuzYI8o92JcmdJcz95F2GOWsQCedz/U0XgTc3i1UXgbEJ0JeVTq7QK3sdHZWc3bJiPV6Qv1iBPqYO4n1zyHUrBpcLpAS+8Hbk+IGFC5swsZjoLuIXSwsgIrAH+QJPbpfVMhrreRMO4kJNeNWAQYyYPNuVqGEgNAh494j9hla/xyeIdRj7mcftbR6HylANXf3a/lsaFLKSMd4TtUhHjj2MoYvZTeZZ1Qt7q2WY92FjpDdRn+Jv2idIhogtK9bnC1qvRSCWvoobtbINDB+KNEntvz5j00dRu3wubknCC0hT3gfjH0DI7IN+cshh5WYS5uOdfpqlMt6UhWE4Bwd+q89K+iQudAsREIzjVuoa43enRkoViRG40DY9YJsZGNWB/ittetZ7UGHGVMIJv0Vk60D96tY//UEG2I1WG1IbLcA8IcmmsVV2HoRxPnimxup/Vn6V3y9+K0aTAk9kVKnq0X5pmtoTdM9Enw+gaQhZ7hKA22LYGlhg20O1wGAY9U20IROtiLueH/DxEVNaUYhiahAVKSBqM45JdPe6J23RBEsTznmkTGiHHpvaHcmUqLsjIMBa/l+TB1gbHfDh7bZBH1MIUJnwowyAvbmuGnt/7lnaK290VefV98uvb7lS7E11A/bAiS1AeoIzmc0/eE1gk/B1rkU+bvDGcfWlkMgZSWdIBaASoGiBzHf0KYPUDZ8doDO+rGR/aFvSj/hB3CsNIZXmQO1Q/HDVoL5892IGwwua3v3jKyButgB+vdV5Wl+fihjaVjePEbZRirncgwHYQ+nOZU6j0HowR3psxsRthqo/c2kvZL72+GN9GjTSLCyqDgtAVg4ii3CPEAyIlkRYkHE2DXplQKFccRJc1kebm5CvsSNpAmI9oK8nIpXsDqYX+dyM7SOhUulNltWRamQlGZ++MKgEsILBFyoqURFsEqQlDYQbLnlOsc6AiudtdIuXnH3jvMKX9ojCEEUOxQD1FBXeweBQCkKNTEedNB3PbES7EP/KG7VigOHxVPnPggCIZ48HMaynoKHmJdj5wzOnhbUE8r68QtIL8sE1fVkmvroIaIw8Y037O2KA0aXJkl4mdIndQAa0xqG5jGHgoqink+GAKD1CO1hSocSwFkVoRIo1GoU2uTcWXBtnipPTjqdUpT6NCtMAB3pjGtUd+pSL4UITYyQSX86jXS+VAB7SS1XM01MYQ0iZ9MsBzklGh3wYxkyLvgGJjAwTj66yyiT9sbDfaydlh5yI2WPlWqKUTuu8AVlhszgEfZ5CPqsKjDZfi7Mc2rOMOCQaQkf1wl/LMy7oXcMgkusBlm+hFVeUgcCRadhz70TQKrF9vCyMAryRws1YLE7K4j0GKdYTn8vJgmX49qElOAJFgiBtDbeRJhYsjJV/WFiWUPLsmi4LrKTPJoF+Eg27AyUhJzpJpSIrznhHgb5pgZCwgASsHqAH+fEUQt18khJJDICkjsPP+6kI5JjI+DYH3HpEWLRBS2R9R0KiMjZtwEmju5++Mw7Soqg3gHdMSK2eihFrzR2yIyMgAZpky7BKGHW/oZPRvH48tgTrZ8vEOFALnuBKDEgrRAvHwlMlTfgx8PXOVwGEYPzhc7w1sG9JbVozjoiLmBSgCo8ohkRGw0tVAVColSCnAMI/akNdlBzClgpd/E54PfJI1t3McN1ZGqYCsoEYlWLbKsQedFJmlETPoG8WilRIdIVbmnqhZVkPCWhY/G6YgMuJUNnhseY4SsASMvJzZCUZljFjyiWI+sR0dxPJMLIzHyrxFkw+KlOQTt0jZQeCJuRBkJaX3QgVAQaaAu5oEQBzpoowLoxnIP6sxj5k1i4avbTBPuEbdSmLpCaqIQCQUkITBl5jXhCS4zRkvg15aEVm6Gb2Lu8GAze7co6IHbEl6fqF2lYkFqChJnwsBRksuF8XI4fGQX5uS6zDqdu7A7tnAtk2TT84PjB3wwBAY3Pz4g4e1AW4uo+y4mzieaYuYhBeapIQzkORQ4ERIJNzg2AIJiwBHKVgsKJGMo81DoIicRsGiYmU6wZpTRbi02lEpcVsIs8xBck0MEmNdEdAnUG1dJgCawarZeC9jT02RGkoWMYh0/jChcQQ9PDT8ZiZOtHYCCJnd4T2LXoGZiB/k24ncLMByi5ycW64KYi/sOFUymhjHpfM4eE9EYIhOFoPKXCyCfHcGID7Hq/CInIj9uoeJk8YbEzHzEgpGoRJqsEUAWMkofuFEoBLIhg+hYgk1AE7psfdeBPy4Pe792Uyfs4zkC4fqYvrvdFIEpMpFfVrzQFmiZMMhDJZqOi9fuS3NFZp3woipHewdNh3LjK1yuCDNQiBZmOXEjLEWSVdeZQgA2gU6QezsxZMHtixlNZaFnmG002oN6EOgGowtfZ5IVLpBMwrw87mcRgcQ2awtfIjZuLkeJ5RA9ixcIK5XIhGcqDpMhw6uJIRXVB2OxIEJASwwZsNUDjxYbNnkmv5RgoUkdOFT0AfKP8G90iBFBh5xGfDwYqyX/o3jeUQAdiuTL0glatQnpAAJtYnOLQK8zDkKFh+C3q2/l1342FINYUEWcj7kJCQSSfyeNNFwsg8Q3QYO1LsqG0AHnTAZ3cqpGqf/bpSTRr+zjFsRIz5Ef/EfhZ8EswdLErLg6pc24pR51aph6sEx/CdypKccwZTCEDgbAErFDhzVVFgo2kAcje6O0afCZSsslTgmvLhO1B7oKZSO0CEssKAQ8KmnAQcWNLi6/3oGZpG09cgS0h6wAbFL8oWYLB+oLgyj1ABidcLHy08dA2GWHLW/JgKktQJVlHcfVN1fFaW2vEXxj38SzUKUtz/PcBiG7/I/SkWaglILEMdSIG05xMW1QLX7FNu+vwhcmsLWFqpPEABw+wbCRVUTRsLS1F62YrIjzVvNY5dCJRXH0MK80vbg5YW0s48mx0FxrowDEnJV0xNrz0PHmWvgZZBrBhO1Jn0e4bqO5BRXCzdjXBcU04I9L7SAHI0lCoiJtcM95FHgpOANrmD0Nj3ydrzoxwP07mHiAp2tGrC5VfxjZsD1qHzF2d96khEt/lekCPgICJDEQlshqdKDAxRkD7TTBajbd0imu4X5Ia1u04jslwZIVHUeQTHmYFun+pLoYTLzjwjHITzOnihDg640unqq1AXFl6Jzgp2NGBDM2YrrUFUdYIMQ4dQQ4IWiMb4c9qcnmQmv3Tg86nSa8IBSmJMIIc5mgRMi+SCNKEnt4/SD3cDN+Yg9Q+tJzRmmZYK/cgaurDHInKitxX7ETKQYy0dGpYV5pSZ8HzSe+0DbWeuSpZRAIfk1nuQMXyHhdCFoHuBHvSLTAZiqXw7GBZMrCcRZLZp6ZGS3oD5OQKRkf5zCd/JvOSd9jDHtSNZm6kBJJYaxbVqET9VqhgfS2IrgLb6u/CE1GeciuWHJLUYMWoXQnsvJda2XQQHlA2D3uc+Ww4iCF6QQVptOhEXO6YkTtJ8QObMDR7hullo5iPz3K4Mm3Ht65eaHeKT/KSychGaCXpgQbNQKjUtQsKQewzUHJ8phgh1Ydj0FDiNGVNuSaYbe8icciCBRNTgNhHTtSb/j26pg4D0+2cCTC95gPc8nQblmaCS1uhjGdeISZjluZmogJca0dWDsmEd0xGSSQjNdwXia9JUYUijuHjBcaEAKncUydiaG34JBtNlm4paO3MBAeJLKT8wjskB/TX3GYWb6h4ATzTuxgMFwejJP3lNAhbfSUNE9taBeSYralmOvRHIhaROrwvspT2Grutxu+jwHBIJ8ri8mke1I7yTiMxvwv7Htu8T0s8QQ47JBmYbA1zIWug4d7G5T0xz1ATebLRyfO1U+lMloPAhANnRbOCMmCxEm0kvDu6NAG0+RDwzqJwh3JspWEs4gP9VJUFEpsUXQEpWbcQlQGoCcE5WZUtxGzYCUCHAFi4eQonO8EoEHZIBQcJahuOr6ZOgEcFEToDrmFGJb3NFIpizlweJH1Ho/RapyBVofWV2a8nIXCTYgHjksgB5pCf4jToDnKaDQ8ZJsWEU3RSG9bxxZQj/4WDse9JzhFf9Uo9A5NJRYny6fOXPU91WcdTzIP+msiJO5ArMm9d/kWFIMts3kFHEKo5x6TdZLFKnr1OD4vF2zP8Vcmg3nKmSsjglnRiYH+Bvv8RUc0VgFdwndElLAxC1XnsyQEXtEhA03IDHhKW8g50qU03LiioWjylloqTjxvgiZDpj+WowMYDhqtoy5oQ4NQ1HJ5LjJKJ/a1RtLD+SMYiEZMlYjYG/uhFBiyyHBBNwqDqj0aOUn8iBy4Bb0iDJqCMjWdlNO1TdWxiV3nW+mDSpz01v/z4t83RmaqXjgqCgAAAYRpQ0NQSUNDIHByb2ZpbGUAAHicfZE9SMNAHMVfU0tVKoJ2kOKQoTpZEBVx1CoUoUKoFVp1MLn0C5o0JCkujoJrwcGPxaqDi7OuDq6CIPgB4ujkpOgiJf4vLbSI8eC4H+/uPe7eAUK9zDSraxzQdNtMJeJiJrsqBl8RQA8GMIiIzCxjTpKS8Bxf9/Dx9S7Gs7zP/Tn61JzFAJ9IPMsM0ybeIJ7etA3O+8RhVpRV4nPiMZMuSPzIdaXJb5wLLgs8M2ymU/PEYWKx0MFKB7OiqRFPEUdVTad8IdNklfMWZ61cZa178heGcvrKMtdpDiOBRSxBgggFVZRQho0YrTopFlK0H/fwR1y/RC6FXCUwciygAg2y6wf/g9/dWvnJiWZSKA4EXhznYwQI7gKNmuN8HztO4wTwPwNXettfqQMzn6TX2lr0COjfBi6u25qyB1zuAENPhmzKruSnKeTzwPsZfVMWGLwFeteavbX2cfoApKmr5A1wcAiMFih73ePd3Z29/Xum1d8PL5JyjPoBHfAAAA39aVRYdFhNTDpjb20uYWRvYmUueG1wAAAAAAA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/Pgo8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJYTVAgQ29yZSA0LjQuMC1FeGl2MiI+CiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogIDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiCiAgICB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIKICAgIHhtbG5zOnN0RXZ0PSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VFdmVudCMiCiAgICB4bWxuczpkYz0iaHR0cDovL3B1cmwub3JnL2RjL2VsZW1lbnRzLzEuMS8iCiAgICB4bWxuczpHSU1QPSJodHRwOi8vd3d3LmdpbXAub3JnL3htcC8iCiAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyIKICAgIHhtbG5zOnhtcD0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLyIKICAgeG1wTU06RG9jdW1lbnRJRD0iZ2ltcDpkb2NpZDpnaW1wOjcyMjdlZWUzLTQ5MTEtNDE4MS1hNjBlLWVlYmRjOWFlOWQzYiIKICAgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDphMDg1Mjk3OC1kZTdjLTQyNjAtODdiZS0zNTQzZTA1ZGIwNGEiCiAgIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDoxMTJiY2Y4Yy0xMjcxLTQ4OTAtOTQ5My1kNDJhMzE3OTcwNzkiCiAgIGRjOkZvcm1hdD0iaW1hZ2UvcG5nIgogICBHSU1QOkFQST0iMi4wIgogICBHSU1QOlBsYXRmb3JtPSJMaW51eCIKICAgR0lNUDpUaW1lU3RhbXA9IjE2NTIxMjA0MzM5NzMwOTMiCiAgIEdJTVA6VmVyc2lvbj0iMi4xMC4zMCIKICAgdGlmZjpPcmllbnRhdGlvbj0iMSIKICAgeG1wOkNyZWF0b3JUb29sPSJHSU1QIDIuMTAiPgogICA8eG1wTU06SGlzdG9yeT4KICAgIDxyZGY6U2VxPgogICAgIDxyZGY6bGkKICAgICAgc3RFdnQ6YWN0aW9uPSJzYXZlZCIKICAgICAgc3RFdnQ6Y2hhbmdlZD0iLyIKICAgICAgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDphMTExYjYyNi00ODM0LTQ1OGYtYjc4Yy01ODZiMTIyYTY2MmYiCiAgICAgIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkdpbXAgMi4xMCAoTGludXgpIgogICAgICBzdEV2dDp3aGVuPSIyMDIyLTA1LTA5VDAyOjQzOjM2KzEwOjAwIi8+CiAgICAgPHJkZjpsaQogICAgICBzdEV2dDphY3Rpb249InNhdmVkIgogICAgICBzdEV2dDpjaGFuZ2VkPSIvIgogICAgICBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOmExYTVlY2M2LTJlN2UtNDAxOS05ZDRlLTdjMDNiYWZmNjY3OCIKICAgICAgc3RFdnQ6c29mdHdhcmVBZ2VudD0iR2ltcCAyLjEwIChMaW51eCkiCiAgICAgIHN0RXZ0OndoZW49IjIwMjItMDUtMTBUMDQ6MjA6MzMrMTA6MDAiLz4KICAgIDwvcmRmOlNlcT4KICAgPC94bXBNTTpIaXN0b3J5PgogIDwvcmRmOkRlc2NyaXB0aW9uPgogPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgIAo8P3hwYWNrZXQgZW5kPSJ3Ij8+r4gMfgAAAAZiS0dEAAAAAAAA+UO7fwAAAAlwSFlzAAALNgAACzYBvwjYegAAAAd0SU1FB+YFCRIUIcREItAAACAASURBVHja7J13mFTV/f/ft03bKoKCiNjosoZQVgRUVMRgQYrEQmzRxBgVkxgLFogo4ld/KpqIGCvWIIJAFBAxIqgBggi6gBCxRpDiwu6U28/vj+Uc7iwLLLBz587M5/U8+zA7O8w997R7zvt8CkAQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQRMNIVAUEQWSTNWvWtK6pqelTW1vb1bbt0nA4vCUSiXwbiUS+7tat2wdUQwRBEARBEEQmcF2XAYAkSdixYwcikQhCoVBmN+CSBMdxAABvvvnm7OHDh59PLUEQhB+oVAUEQTSWVatWdf3mm2/+uHTp0pGqqqpt2rRBcXExysvLUVpaiuLiYpSWlqK8vBzFxcWQpIM7Y2CM7fU9/v38PcdxkEwmUVtbi0QigR9//BE1NTXYsmULtm7dim3btqF3796vnH/++SMlSWLUogRBEARBEIULYwyyLEPXdZSVlfl2XcdxEAqF0LJly39SK+yzjRgAxONxFBcXN0mbp1IpxGIx8Z6u64hEIpAOdvNCEAGHOjhBEIJ7772X9ejRAx06dMChhx4KSZIgSRIURYGiKJBlGYqi5PQ9uq4L13XBGIPrupAkCbqu44cffsCKFSsQjUbfGDJkyHDqDQRBEARBEPkNqwN+6z6u60KWZSxevHhUv379HqOW2DOWZTFVVdHU7WRZFjRNAwDYtg1VVUkAJPIesgAkiALjrrvuskpKStQOHTqge/fuiMViKC4uFg9AvigBAFmW97RYAgAE/RnZ0EJBkiTIsiwEQAAoKipChw4d0KFDBwAYZts2Y4xh1apV+Pjjj7F161Z98ODBA8klmSAIgiAIgiByF74/0DQNjDHxAwDjx49no0ePJhGQIAiCyF2efPJJZlkWY4yxRCLBdF1njDG2U+gSGIbBDMNgruuyveG67j4/EwR4Off1s6/7S6VSos4WLFjAGGO0MCAIgiAIgshx+LrPbxzHYYwxtmjRohupFfYO3680VTt5v8e2bWbbNnNdl1mWxZYvX04hgoi8Rg5CId54bcpMd2KUWY/G2JKPPxpCzULsD8v/s+xUd2KUuROj7NZzD2dr166NUa0AK1asOGXWrFls48aN7Le//S0sywIAxGIxOI4D13WhKAoYY3AcB47jQNM0hEIhSJIExhhs24Zt2+LzXsu/XLWQ52X33oPruqIOuFWgJEmwLAumaSISiSAcDqO2thann346ALjr169nM2fOZGvWrGlNvY0gCIIgCIIgMrN2b+rvcxxHxH9UFAWSJMF1XbRq1YoqnMjv8ZTtAjgTY0xCutAuj0qRdQ3RKBYt/Ne1fT4dNMn7HkOEKaOq5UKul/Xr17O2bdtC0zQR0wKoCzjMY/gZhgFN04TgxT/D8cbFSKvfnSIgf2jmCg0lD/H+jbsD7GmR4a0Pr2ux67pYsmQJJEm6vHfv3lNoVBIEQRAEQeTM+pBiAAYc13UZN05oqnbih/7cDZiLgj/++CNat25NWgSRt2R9915f/AMAd2KUTG+JRlFf/KvrU3rBTtqzZ89myWSSHXfccVBVVZxucfHLdV3Ytg0ACIfDkGUZqqqCB9b1Wvo1JP4Bu8SzXLMA9Ip7vE4sy4LjOCIuIP+74ziwLAu2bYukIbw+uFUgt5CUZRmVlZXo2bPnC9u2bWMzZ858mUYmQRAEQRAEQQQTnuCQr/u5kcSe9j8EkS9kVQB85YWn9hhQ/5/Tpz1NzUPsjeefeuyTPf3NnRgrOBF506ZNbMCAAYhGo7BtW2Tv1TRNiFaapgmxDwBSqZQQBPnneXKM+m6x9S3ockEA9JbZW3aOpmlp98w/z+vNaxXpOI6ow3A4LCwrU6kUZFmG67qIxWI4//zzL1m0aBEdYhAEQRAEQRDEQcKt/5oKb7JDx3GERxNjDMXFxVThRF6TVQHwl9v/0G9Pfxv03a9+Tc1D7InVq1c3vyx1a7c9f6Jw9Jd33313HGOMNWvWLO0kiwta3JovFArBdV1YliXM3KPR6G6uv4qiiAchj4vhtY4TNcyCX8de68f6Cwm+AHBdN80CkIuBjuMICz9eD7wOebxE0zQRjUYBQAiDAFBZWQnGGHvuuedICCQIgiAIgiCIAKLruljn67qOUChElULkNVkTAOti/7l7/Qy5AhMNseKT5b06zu++ZV+fKwQrwKeeeoqdfvrpdwJ1Yha3SItEImkx7bjgJcuyMG3nYmFD1HeJ3dNnAj/B7eUe+D3Wj2PI31cUZTdxlH8nr7v6iwR+LV7HV1xxBcgakCAIgiAIgiAOHO/hfVPsDzhFRUXidTQa3ev+iCDygawJgFIjLbScAnTlJPbOiYv6LmncJ/O769xzzz3syiuvhCRJqKmpEaITnVwFB9M00adPH2zbto3mMYIgCIIgCILYT7g3Dz+kJwjiwMnKCPpw8QdXNPazEhimPPnYUmoqAgCcR/fPKnTZ0iUD87EeHnrooS133XUXFEWBZVmIRqOQZRmmaULXdQCgE6wAEAqF4DgOysrKsHnzZlZVVdWOaoUgCIIgCIIgGkf9WOQEQRw4WREAey8/+7n9+fxI49aey5ctO5Waq8An/4nFbH+9Trt/3H9uPtbFpZde2pzHqJMkKS1jFTdlT6VS1GmyjGVZUFUVsiyjWbNmiEQi66hWCIIgCIIgCKJx5ELYIYLIFbJkQ7v/Cn63j055n5qr0HF86WtBx3Vd1rJlS5GplieuYIwJ91/TNBGJRKjLZBlN0+C6LnRdh6IoOOqoo/C///2PjjAJgiAIgiAIohF4BUCyBCSIg8N3AXDqlGfnHOj/vfXMFjTiCxTnIBLCzJs985F8qYfJkyczwzBg2zZs20YqlRKZbG3bFll+eWZbIrtYlgVZlkWmYFVVUV5ejocffpjmMoIgCIIgCILYD0gAJIiDw3cBcHj1788+0P97/3lxzHj15anUbIXFbRe0YAcjZQ3YcNFN+VAPS5YsGXH55ZcjEolAVVWoqopIJAJN06Aoishoq2kawuEwuQAHAE3ThEi7bds2AEAsFsNNN90ExhgptARBEARBEASxF7inEwBKAkIQB0nOjaDBm6++8NMVKyqo6QoDY2I7e3z/OFUEgC5duvxD0zQwxkSiD279xxiDoijYU1xAIjuYpoloNArGGJo1awbGGGzbBgAsXrzYpRoiCIIgCIIgiD1DSUAIounISQm94oOTV1LT5T9r166NafieUtkCePvttycVFRVBlmVYloVIJALTNIXlH3f3VRQFqqoCgPg3F+Enfd6HPWMMruvCcZw04ZO/X/89vmDY0/f6sZAIhUJp15EkCaqqwnEcdO/enTo2QRAEQRAEQeyF2tpaKIqy27qeIIj9x1cBcNH7/7q2qb7r23tK6Bggz2k/r1uiqb7rs1Wr2udyXQwcOPDaVCoF27ahKAoMwxAJP3KZ+id6XJiTJAmSJAlRD6gTz2RZTnN39roD8B/+f/n/4fERLctK+24/YyTWv5Ysy1BVFS+88ALNYwRBEARBEASxByzLokogiCbCVwGwz8pzJjXVdx1ZZjepoEgEi2//EmtSYWTdZysezNW6uO+++xhjDNFoFKqqQlEUhMNhGIaRF21dP7OX1zqPi331cRxHLAa4BaDXQtDrCu0VB7346UqwJwHwwgsvpMFOEARBEARBEHuAhz6iBIcEcfD47ALctBvuPisHTaImzD9mvPby1CPLm7avLPvH7efnan1cfPHFAADDMOA4DgBgx44dCIfDuT8BNSDu1RfmGGNC1HMcR8Q71DRtN8s/biHIXaF5vD3+fn2rQj/wWhzWdweORCJYtmzZ+TTqCYIgCIIgCGJ3EomEr9erqqpqR7VO5Cs5n0bntos7kAtdnjH4x6ub3CzqvjOqc7Iuqqqq2h1++OEAgHA4DEmSEI/HUVZWJsTAvJqQPGIeR5IkIeoBSBMCG/rx0tB7DYlxflLf5XnFihUzadQTBEEQBEEQxO7E43G/r9eNap3IV3wTAFcsX94rE987/qRv4U6MkQiYJ1iPRjPSlrlqML5t27Z13HoNqIuBUVxcDAB5EQi3oaxeXvHPsqw0odPrysv/r9fCjgt7POafpmnC6s9xnLTPBUFAlWUZxxxzDA18giAIgiAIgmgAvy0Aq6ur+1CtE/mKbwLglk0bh2Tu2xlmvPryVGrO3ObWARGmZFCpW7t2bSzX6qSyshKRSAQAkEqlEA6Hoeu6ELdyHS7Y1RczeeIO7ubLM//Wd/NtyJKPZ9rVNA2maQrhj4uo3s/5xZ6sEwGgbdu2NPgJgiAIgiAIogFSqZSv19u6dWt3qnUiX1H9utBPG/9XmcnvPz8DbqOEf8yc+tpL5228MqPX2FH900AAM3KlTqZMmbJEVeuGaDweR1FREQAgEomkZcvNZbzZfvn9cAs+27axZcsW1NbWorq6GolEAqZpwrIsGIYB0zTTLAgVRUEoFEIkEkEkEkEoFMKRRx6J8vJylJeXp4mNPAmHH9TPbOzFcRy0a0dhRgiCIAiCIAiiIfzOAlxdXX0U1TqRr/gmADY7ovUSrEf/zAkJgDsxyuRRKUoPlGMsev9f1/ZZOejSTF8nmYh3yKV6sW27F3d35W6/u/q7FJQyQlXVNDHScRwoigLDMESiEm6xyP9m2zYURcGmTZuwdu1arFy5EsXFxVXXXHPNCZkq67Rp02Ydcsgh51VWVqKoqEiUhS8quEUlvxfbtsEYg9cF23tPAISYyP8f/3z9tmqovbhVouu6WLlyZc8TTzxxGc0GBEEQBEEQBLELbgHol/FDJBKpoVon8hXfBEBTTx7mx3Vm/OPlqUN+eekIatrcoe3CIZNQ7sOFcixS5Mknnxz4MnKxS5IkmKaJUCgEoE4EDIfDcF0XpmkiEongb3/7G7p373557969p2SjrMOHD0/LtltVVdVuzpw5H/Xq1at57969hXUgF/u8FoLcBZmLf4ZhQFXVNLfi+kJfYxcpjDG89dZbS5G7oSoJgiAIgiAIIiP4bQH43//+twvVOpGvqD4O3BI/rjN409UX/nP6tKfPHTr8amre4ONOLGKA4cu1ZEU2cqluOnQIvsEiFwBra2tRUlI3xB3HgaZpSCQSUBQFH3zwAQYOHBg4catLly7rAbTgv8+ePfu5Vq1aXdGuXTvYto1mzZohHo8jFouJeIMA0oTAvYl8+yMAnnrqqTQZEARBEARBEEQ9dF339XrRaJQqnchb5Hy8qUHf/erX1LTB5/PPPjsScKkiGmD+/PkP5kI5eSbdkpKSNAs6SZKwadMmRKNRKYjiX0Ocd955V/bo0UMqKyuTNmzYMPitt95CcXExZFkW8QYBpP3L3X4bSmTSWBcFxhg6depEnZ4gCIIgCIIg6hGPx/drbX2w1A/nQxD5hG+9W5YV088bcydGGTVvcPl0xYqKzu/1+s7Pa6qatj1X6kdRlJtt286FcsIwDOEiyzP2jh8/Hscff3zOurT27Nlz1rnnnitJkiRNmDABGzZsgKqq0HUdqqrCcRzIsgxZltMWI14RsLGLFFVVhfUkQRAEQRAEQRC74AKgX5AASOQzvvVuRVF1v2/OmdiczMsCygkLT17p9zVDocgPuVI/lZWVOfPwCYfDME1TJNT473//2/6OO+7Im3h2t99+u9SpUydJkiSpuroauq6DMZYWj0SSpLRkIPtDrmdyJgiCIAiCIIhMkUgkfL1eJBKhSifyFt8UhmhR0fd+35yEhLRg7pyx1MzBYt4/Zz4iZ0Hz0EKhjblSR6qq5oQwxB/IiqJgw4YNCIVC0s7YennJEUccId1zzz1YuXIlXNdtMCix1y24MTDG0pKJEARBEARBEARRx+GHH77Jz+tRDEAin/FNAIzEYt9k4wb7fzF0DDVzsBjw5UU3ZeO6kWj0v7lSR7qu54QAWFRUhHg8jg0bNuC4444rCFO2+++/X+rRo4c0YcIEPZVKwTB2zy2zP+KfbdtkBUgQBEEQBEEQDVBeXu6rAKhpGlU6kbf4JgCqavbir1mPUjzAoJDN2IwdO3ZM5kIdTZs2bVZpael+u5JmCp55y3EckfSD/+u6LtauXYtOnToVnII1duzYaFlZmfTcc88BqBPzdF0XsQH5e67rpsUGtG0bjuOI9iXrP4IgCIIgCIJomKKioh8dx4FlWbsl3cvIftWlKGJE/uKbANjrpN5vZusmFQlYtnTJQGru7GKTENsoqqqqzgOCExsuEomI7L7c5VWWZRiGgdWrV6Nnz54Fbb72u9/9Tnr33Xcf2rhxI2KxGIA64c80TdGGXndhy7KgKIoQAMn6jyAIgiAIgiAa5phjjnlCURRomuZLjHRamxP5jG8CoCRJWRV/un982tzPP/vsSGry7DBl0sSlchbnUpZD0mOXLl0CUxbHcWCauxJ4c5N40zSRSqXQtWtXekICGDBgwJ9bt24tTZ8+XSQICYfDsCwLpmlCVVVomgbDMBCNRmEYBi0uCIIgCIIgCGIf9OzZc5brur6tnck7h8hnfE0zmm0NpvN7vb6jJs8OI83bembz+rkktnTp0iUw7r+KogiXVtu24bouuAn+IYccQgpWPYYNGyaNGzcOoVAIyWQSmqZBkiSRLEWSJNi2jXA4DEmSxA9BEARBEARBEA3DY277sUeiGIBEPiP7e7nsb3TdiTFyQ/W9zrPv+jvz8L+/niv11aZNm8DEnrBtG6qqIh6PQ5ZlaJqGVCqF1157rYp6dsOMGTNGev755z+NxWIwDAPhcBhFRUXQdR2apkFV1d0ShzBG0xJBEARBEARBNAQX5fxYM4dCIapwIm/xVQCcdfjTARBhGMUD9JH3Hx+9LgjlOL5z1ztzpc6KiooCUxZJkuC6LoqLiyHLMpLJJB5//HFcc801J1Dv3jNXXnllt08//fRU0zRhWRZ++uknRCIRpFIpAICqqrv9n9WrVzenmiMIgiAIgiCIdPja2Q8jCbIAJPIZXwXAdl26/jEIN93949PmUtNnngXz5ow9xX2kXRDK0rWiYl0u1Nnq1aub84QbQYCXQ9d12LaNRCKB0aNHk89qI+jWrdsHpaWlkuM4aNasGQAgFovBsizYti2yKwN1p5mGYXSmWiMIgiAIgiCIdPz0lqEYgEQ+46sAeELXrt8H5cYdcgXOOP3XDh1DtbB/vPrqqxtt2w5UmSRJgqqq2LZtGw477DAS//aTaDQqxeNxmKYJ0zThui7C4TAikUja50zTbEm1RRAEQRAEQRC7WLFixSnJZBJAw140mdj7EES+IhfqjUtggYhNl69Q3R4YrVu3Vr2ZdgMxViQJsixjyZIlr1ALHRjz5s2bDQCyLCMcDou2pdh/BEEQBEEQBLFn3nrrrYU8RFJQ9kcEkatkQQAkRT3fCVqiFcfNnbo78sgjhSgUlPgT27dvxyuvvILBgwdfSr37wBg+fPj5Dz/8MIA60S8UCkHX9bQTRtd1I1RTBEEQBEEQBLGL/v37i9cUn48gDg7fBcCV/RZVBqkCyFKtaVn43oKbgGBV6fqBy1vkSv21bdsWQN3pVhDMz23bRjwex69+9StS7g+S22+/XZoyZQokSYJpmmkuwJIkQdf1o6iWCIIgCIIgCGIXvXr1guM4Ys1MEMSB47sA2O3n3ZcGrRK+/QvFA2wq+n127iNBKs/tsyLo3Lnz1lypv9LS0kA92GRZxksvvUQdu4n49a9/Lf3www8IhUKIx+PifUmSEI/Hj6MaIgiCIAiCIIhdqKoKHiPdj/A5JDIS+UyWYgAGa1AdWc6w4J05Y6k7HBxBtKYc+cjCDrlUh47jiKQbQUCSJNx+++30FGxCWrduLf3www8oLi5Oez8ej7el2iEIgiAIgiCIXRiGAVn2T7agGN1EPpMVAXB573+dHbSK6L+GMtYeDG5Asyp3rahYl5MDU5bhutkPXlhVVUWdOwPMnTsX8XgcjDGxyPjpp5+OppohCIIgCIIgiDoWLVp0YzgchqZpwlAi03B3Y4LIR7JiZtSzV+U8Z2IMUsBixbkTo0welSJrpwMiePrfkp7zLwb65lQtcqswx3F8PenaEzNnzqSunQF+/etfSz169GBdunQBACiKgtra2mZUMwRBEARBEARRx4cffjixb9+6/Zxfrrnc3dgvvvzyS6brOpLJJEpLS6HrekavZ5omNE2DoijQNK2oY8eOyab67jVr1jDK1BxssuZn+O+fz7mq9ydnPxu0CrEeiTLtDyQC7g9BTaTS++S+r+VaXZaWlooHTzgcznp57rzzThoLGeLEE0+ULMsSYyeRSBRRrewfjDFp54KwwTmoqqqq3d7+v67rx8iyrMuynAIA13Wjtm2XG4bR0rbtMsuyDquuru56zDHHPLF9+/Z+mqZt3rp1a7+vv/66980339yCWmD/mD179nOHHHLIx9XV1b0jkchWSZIatcJVVbVGURRD1/U269atG7Z169ZjSktLUVJSgmg0Cv66qKgI0WgUkUgEoVAI0WgUsixDURRxoOK6LhzHERbW3teSJMG2bei6DsMwsH37dsTjcWzfvh01NTWIx+OoqalBWVnZpqOPPvrDI444Ymo0Gl1TUVHxGbUuQRAEQTQ9Xbt2FYKcqqpwXTfjRhJ+WwC2bNkS0WgUkiQhlUohGo1m/JrxeBzFxcV49913/wLgz031ve3btw+EEQuxl3V1ti7cp9+pz7kTo4ETABUZ+Of0aU+fO3T41dQ99o39aFCzKCs5WZ/hcBi6riMSiYAx5ttJl2VZ0DQNruuKa9bU1FAHz7wggiFDhkDXdViWpVCN7B220186kUigqKjI+36mLz2w/ht/+tOf2JtvvokhQ4aQSN5IBg0adIUsy1cczHeceeaZQbiVlgCG7fzZrf/F43Hs2LEDy5Ytw+rVqzFs2LAmPV0PKs8++yy78sorfb1mMpkUmybHcaCqKo3HJkTXdaYoClRVFesE0zQRCoX2+X/5eoKvKRhjcF1X/Oi6Ll7btg3btuE4jhDlU6kUgDpLFcYYamtrsWzZMtxyyy0H3caMAnwFHomyMBA7qaiogOu6CIVCvsXmSyQSvt5jJBIRc6Uf4h8AxGIxfu3vmvJ7SfwLPlnNNPDxz+cG0gpw0He/+jUAEgD3waefrqiQF54cyLLJo+I5u3DgCUD8XPuoqgrGmJi0uRUMkVmGDh0qbdiwgR111FFo1qwZLXb3wosvvshs24ZpmmniXzYh8a+w8VoheF8XFxcjHA7jggsuwODBgyFJUsJ1XSQSCezYsQPbtm3DTz/9hKqqKvzvf//D+PHj5T1ZseYSV111lXTZZZcxWZZ9eX4ZhiE2MPy5NX/+fDZgwAAal03ArFmzWDgcFhayvH83tm3rf06SpLTv4B4PQMOHOIwxKIoirHZTqRT69+9PbUsQBUarVq2gKIqviTnIhZXIZ7Iq0fbpd+pzgV3YT4zS6eA+6Pr+ySuDWK7Rb+emIdWaNWtaAxALXj/gJu6SJKU9WB3Hwfr166mT+8CLL74IRVHQokWLT6k29szZZ58NVVXFht8wjKyXaerUqXOpZQigTqzwxgzSNK1uLeG6wpo7Go3iiCOOQEVFBU455RT8/ve/x7hx42AYhmtZFvvpp5/YnDlz2Ny5cyfmaj3Mnj3bt8Mr7grGn1myLAfFQjQvOO+880S9cjGOrxcaS0MiILco9L4ny/JuP4qiwLIs8XrNmjXUKARRYNxxxx1MURTYti2siv2wMOMWyASRj2TdRnOldM23Qa0cEgH3zK2DD2FBNc6/YuJ/cjKW2mefffZMVicDWRaCoCzLeOedd6ij+8CYMWOkrVu34osvvvgZ1UbD3HXXXax58+bid9M0AxEjs3fv3gOpdQoX76EJt3rj7oveuZSLIFxIYYzBcRwYhgFZlhEOh6GqKg455BCcddZZGDBgwI2MMfb111+ztWvXsueffz5n1iLHHXdcBXflzDSKoghLdV7Ptm3joYceorXbQfLEE08wPtc2VO+WZe3zOxorFNZ3DeY/lmWJMea6Lk466SSy/iOIAuOSSy4R4QH4M9YPyAuKyGeyLgB2u/GxtgzBfaZ/uGjhldRN0nnt2afeu//0YE6Mc9u+PClXYy198sknA/limG8UMw1foHPLFb5x1TQN48aNo8W2T7Ro0UKaMGEC1fceuOqqq9LGQ1DCN7Vu3XqfiUaI/Ib3RW7ZxBOOeF0dueDH51lJkqBpGsLhsBCtTNMUf2eMwbIstG3bFm3btsXFF18Mxhj74Ycf2HPPPceC3OcqKio+27Ztm7AWyzSxWAyGYYh6VlUV55xzDnXMg+SMM86AbdsiFhVjDIZhCGtWrwVfY8fJnubtPVkAapqGUCgEwzCwbt06ahSCKECOPPJIAHUx0mVZFjFBM00ymaTKJ/KWQERpVEYlA7vx7f3J2c+uXbs2Rl2ljhmvvTx1RO2o/sHciAGDLhh6Xa7WbYsWLRrcWGYSb3BuIN1ahSCCQsuWLdP6ZTgcTnO3zBaSJGHt2rW0My1QFEWBJEnCYql+39B1HalUCq7rQlEUaJomhEHDMJBKpWCaJmRZRigUgqqqwt1RURTE43GR0ZgxhsMOOwxXXHEF2rdvv27btm1sxYoVjGfCDhJTp0715TrerJC2bQu366OPPhovvfTSYuqhB06bNm3EuoBbqvJEYbZtH/A6gQuB3h/uJt+QSGiaJjRNw5w5c7ZSqxBEYcHDrHgPHKLRqC9WgO3ataOwPETeEpg0LQu7zLw9qJXUfl63BHWVOgb/ePWFgd2M3ZTKaeWqZcuWadZ/fghxfPPKH658c0qm70RQePnll1kkEgFQF5OFn8rurwVKpvjZz8hzu9Dhc3V9F8ZIJIJIJCKs4bziRjgcRjQaRSgUEvOuVwSRZRnFxcUA6uLbWZYlhBJVVdGsWTN07doVjuO4hwf+HwAAIABJREFUP/74I5s0aVJg3F5vvPFG6fvvv8/4dXgMQG/cXMMwoGka2rVr14d65oHx+eefs4ayUHLLvAOJUezNBlz/+/jBI//xuslrmgbDMHDzzTe3oJYhiMKiefPmA0tKSuC6LgzD2C0hUSY54YQTxlALEPlKYATA/meeNSHIFeVOjBV8TBmHYiJmlEMOOSTNFdcvSzx+LcuyxOJ7+/bt1CBEILjkkksA1Il/0WgUsVgMlmX5Fgdmb9i2jWOPPRZPP/00zY2F+EzcGe9vTy6MDX3e22+5oMfxiiAcLvyFQiFompaWsIkLi82aNcMVV1wBxhh75513AtEX/UoixZOvhEIhxONx4SbWuXNn6qAHSJs2bQBAWPyFw+E067zGhihpyKKvIaGvvvUf/zvv39OnT6dGIYgC5KSTThJx//g8xK3mM03Pnj1nUQsQ+YocqMKMCrIFF8OHHxRuPEBnYjkLauMs7/3+2cHuO41fdHsXwH7hdZ/iln8U+4IIAvfffz/jmd+8bh+qqvoWY6wxY2fIkCHUWAUIj/e3N7xzef1+25iQC5qmCdfW+t/JRRMeU5AxhgEDBmDjxo1ZFwJ1XX+SC6Q7fwewK3t3U7nwezPKcotJSZJQXFyMf/zjHyTM7ycPPPAAKy0tFck/vLEs+Y+qqo1aozRk9Ze2rtwZ1F+SpLRkI97XNTU1GDlyJMUlIYgC4/nnn2fRaFRYefNnXigUatLr8DVm/WREBJHPyEEr0IxDnwys4t57xdnPFmInWbZ0yUAJRiDLNufYl57p2atyXj7Uc3FxsXjIHaibzYHA3c34ZlOWZWzcuJFmRyLr/OIXvxAiG99Q8k1jYB6isoxmzZrh8ccf/5ZajPAbLiryzKyO46Bly5bo27cvLMvKmmvwoEGDfvfll19ClmURy9B1XSFm+hHj9sILL6QOsp+MGDEiI5vsPfVd3g+4dathGMK1e/v27fjoo4+oUQiiAOndu3faOovPGQAalYV8X3DRz2vR7D3wIIh8JnAC4LCRlw9+KfzAsqBWmFOArsDdPz5tblDLds55w67Ol3ouLy9PsybxM9MpF1Z4UHs/4jcRxL444YQT0oRwr2tlEE5oLcsSi8eTTz65DbUY4Sc8M6tt28ISkI+LaDQKVVUxfPhw1NTUZGXd8vLLL0PXdRQVFaWNXx6nzw8ee+wxsgJsJIsWLbrxqKOO2s0iL5P9t/4Gn792XRfl5eUYNGgQ7cQJosBYsmQJa9++PYBd1uLe8BlNsT/iFvgNiX5BCDFDEJlEDmKhLrv2xl5AMJ/5ElhBiYBBjvs38/CnX8+nui4pKWmyB9v+LMB5LB7vwy8ej9PsSGSVhQsXMqBOZAuFQkLY4P00KDEAeVm6deuGqqqqdtRyhG8LOE9SBu4GzOO2cWvA5s2bQ9M0bNu2jf3tb3/z9Xl+zz33SLqui4zIfAz79YwzTRPnnHMOdZRGUlZWNrG+pY1f6w8+l4bDYTHPf/XVV9QoBFFg3Hrrraxbt24A6mI/e5NkcZraQtn73a7r4qeffqKGIPJ7/RjYgo1KBvbUTwLDsiVLBuZ755jyxMSlwT16lTDkoktH5FN980yGfsYB5NdyXReyLAtLEjr9IrLNKaecAkVRxIYwlUqJPut1JcwmPFMmd2XbvHnzOmo5wi/4OOCbIT42uBjI3YIjkQiaNWuGq6++Gh999JGvIuAXX3wBwzAQiUSE25b3dSYJh8No27Yt3n777UnUW/ZN165dAdQdbPgRZJ9fg683+L+maUJRFKRSqfbUKgRRWNx1111ifRcKhURIgKZ0/+XPTz7n8Gcnf/3dd99RQxB5jRzkwn3cbe5VQS1b93+fNvezVavydnGy4pPlvUZat/UMYtlGLyoJtEB8MHDRjwtyfsGFQB5UniwAiWzy17/+ldXPRO09BfbDPW1/FpE8iHSfPn2o8Qjf4Ak1gF0ZifkBDp/TbdsW1oChUAg9e/aEnwlCYrFYBR+7XkteP+BCUocOHa6l3rJ3HnzwQQYAtbW1UBSlyZK0NHbtoaoqZFkWMV63bNmCLl26rKeWIYjCYd26dayoqEgkjeJx0b3PukzsjbwJvTRNw4YNG6gxiLwm0AJgn1NOfe7tNi8+E9TydX6v8ot87RgnLuq7JKhlmzBtc16KfzwYLbfE83PxzeOY8UU4Y2wrTY9EtujXrx/Ky8vhOA4cx4Ft24hEImnjxE9X+T3hOA5kWUYymRTJGJ588kmKOUb4AreO5RsY/sNjuiaTSRELkLsF27aNAQMG4P333/eln1ZUVHz2xRdfpCXvsSzLFwtebjFyzDHHUGfZB5dccgmSySRKSkpEpl8/8Gb45O7Amqbh9ddfp0YhiAJi8+bNrF27uigqkUgkzTDBK/opitJkMQC551V9UfHLL7+kBiHyGjnoBTx36PCrg1pMSQIWzJszNt86hf1ocOP+LT/p/bPzdTB6LSP8csHlGzJZlmFZlvi9X79+v6fpkcgWFRUVQjBQFAWqqorA9LZt+5Khcn/GD09yIEkSRo4cSQ1I+ALfBDmOI9yiuCWgqqqIxWJIJpPCJViSJLGxOvXUU/HBBx/48qx/7bXXwBiDruu+WrZ7n2uzZs0iYX4PPPnkk1+0aNECsVgMQJ3lpB8HLIwxIVjXD/T/+9//npJ/EEQB8Pjjj39bW1vLDj30UDEPeK3ZJUkSh13cErAp90jeEASGYYAxhtatWy+lliHyGTknCjkqEdiFQP+1Q8dMfeHZOfnSIdyJRUwOaG3PPfrlST0rK+cVwsD06/Td63LsjSNVWVk5laZHIhv85z//Ydx1r75YEAqF0oLEZxteBu9CtaioCLfddhuJDUTG4ZZTiqIIizrvawBC1PE+VzwHPXj88ccz3lfvu+8+yWtVpmmaLzEA+T0zxsg9f2/r2P7929fPtu7H/OrdeNu2Ddu2EQ6HMX36dGoUoiDg4yyRSBxXiPf/yiuvsOuuu65NcXGx8ELiayluoeeFr/+aYo/kDZPB5z1+IHHZZZdVUu8k8hk5Zwo6KhVYEXD49t/nhVXa6tWrmwNuIMvmooQNGjz0ukJbFBTatQmioqICoVAoJxLR8OyV3P2Xnx4PHz6cGpIIPLqu4/rrr8f8+fMfzPS1Zs6cCVVVhfDnhwswT4hi2zbKy8vx0ksvkTDfAO3bt0coFBLxVf1aA/DNvuu6iEQiUFUV1dXVGDZsGC1CiIKAHx6qqlpTSPf98MMPs5qaGnbxxReDMYZUKiXEPj5v+7XX4dfla7mtWykCEpH/yLlU2BmHPjkrqGX79i/lOb+w7Di/+5Yglmv0BwbUUZvlvB+MHmsnv0U4HnsnCHHViMLlwQcfZH5mwW6KMctPog3DQDgchmma+PnPf47333//z9SiRJAJh8MwDANHH330zZm+1rBhw6SamhpomuabuO8N6i5JEs477zxq9Hr8/e9/Z3y+5bGwmirG1r7gmd25GFxbW4vNmzdToxAFgTfut6IoRiHc85gxY9jXX3/NbrjhBoTDYTDG0lx8OX48I7ibMZ9/+DV5AhKCyGvNIZcKO2zk5YODWrYjyw3Mn/PW/bnaEdyJscAqPxPecAviNJiLcN6Ni9+LEYLIJhdeeKHo+7lkicqzrAJ1p8mO4+DQQw/9P2pRIheeOUcffTTeeOONjD8A1qxZI55vfomA3L3LMAxEIhHcdddd9KDzcPXVVyOZTAoxwmuFk2mi0WjaPB8Oh7Fo0aIqahWiEDBNU8QojcVi6/L1Ph966KEts2fPZjt27GBjx45FmzZtoKoqQqGQcPW1LGu3AwG/1m7eUC6WZWH79u3UOYm8J+esqoLsCnzGuuG3McZyTqx64+UXZgLBXBPnc9IPLytWrDjFu+jOhgUg/yGIbLBo0aIb27ZtK9wEc6Uv2rYNTdMQi8W4Kw8URUGHDh2oUYnAw10vhw4dmvFrzZ07F7Ztpx12ZRKeYdZxHEQiEYRCIVx44YXU6Du58847meu6Yu6SJAnJZNLXNQi/Tjweh2mauOaaa06gliEKgXA4LA5Cqqur8yZI6dq1a2N/+ctf2FtvvcW++eYbNmrUqObnnnsuioqKhJUxj/mZSCSE90Q0GhXf4cfzoX6MQR53et68edQ5ibwnJ90qgywCssdibi7V5cIF7/5hyNZrzw9i2V4t+n8fFkrSj+3bt6cJgH49AL2LfG9CEILwmxYtWkwE6tzCNE3LmX7otWTicQB59uJnnnmGrI2IwMIYg2EYIi7fq6++mtH+OnbsWIlfy49EV/Ut6R3HQfv27bF48eLfUOsDl1xyCRKJRFpd8XWAHwJgIpEQ1wmHw3jhhReoUYiCmn95kqJIJPJ9rt7HihUrTnn00UfZ0qVL2fbt21n79u0Td999N84880xh7ee6rjj84fOLqqooKioS7r/eLLx+JULkB0SWZSEUCkFRFNx8881kCUHkPTkbV+3jn8+9Kqhlsx+N5symr9/n5z0cxHKNfrctLr36ur6FMhDj8fhxQLr45pcAUl909Cs7I0F4Of7449OEAdM0c6LciqLANE0YhpE2jmRZxrBhw6hhicBiWRbC4bBwuTrrrLMyfs23337b17HNrQCBusOFcDgM27YnU+sDRx11FEpKSuA4jhAAedZoPw4gi4qKwBgTbXT99dfTxpsoGEzTFB4P69evvzyo5Vy9enXzO+64gz3//PNswYIF7NNPP2Xr1q1j27dvZ6lUilVUVCy88cYb0aNHD5SWlgqRj7v4MsYQj8eF0KfrOmRZhmmaME0Tuq7DMAwoioJwOAxJknzdhzDGxDNiw4YN1DGJgkDN1YL36Xfqc87E2LNSAF1XZQn4bNWq9l0rKtZRFzswLntgeosJszsXzP0ahnEofxDxoLh+PvyAXfGgeMwkgvCLp59+mpmmCUVREAqFRLyuXIAxhlAoJBa2kUhEnCiXlJTggQceYLfeeittbInAEQqFxAGQ4zho1qwZXnnlFXbJJZdkrL8OHz5cSqVSzI/nnOM4IqGFrusoLi4GAHTs2LHg237atGksEomI9YbjOL5a3njbx3VdLFu2jAYkUVB4k2D079+/yz//+U9mWRYsy8KOHTuEQJZMJpFIJNCqVavvWrVq9WlZWdkXmqZtPvzww19pxPrEAADbtlslEolu1dXVvbdu3frzHTt2tKqurm5TXFyMsrIyxGIxhMNhlJaWomXLlmjRogVatGgh9gb33nvvbt/Ns+bydZD3sAWoO2BSVRXJZBKlpaUwDAOhUEis7XhypvrfKUmSb3EAbdsWiVhM08TixYupYxIFQc5vSqZOeXbO8OrfBzJOXJBdlefNnvnIgA0X3UT1Fgz+9re/bbjuuuuO4QKgX0lAGGMibhkX/7755hu0a9eOBAvCN3jqX74YNk1TiGq5jOu62Lp1Kw4//HAaT3UbfpaNBEd7m/94n6u/EeFWUXxjw60E9lR+72f473uKq+rNuM6/j18HgPgOrzhjWZbYODXl+ODXsCwLmqbhhx9+QOvWrTPaXz/99FPWuXNnqKqaln3Wtu20ez/YvsI3qDzGlGVZIu7T5MmTce211xbsuGQByPrF+9xO9zspH++fMYZkMomioiLYtg3TNBGLxUSfJ/ayQfXxJJzVQXGwC3Mu9D5vZEmSsjI3ZmN9xOehxYsXj+rXr99jTT2eiOCi5voNjLjsql/cem4Ldv+AeODKNuWpx5Ze9psbewWx3oIq/n3Y7e3fAf0LbiCmUqmSbEyW9Teomqbhu+++o5mR8G+enDJlCQCxSQd2WSbl+gZJlmVEIhEsWrToxqZcXBFNP//xGEUARCIXviH0WkVZlgXGGDRNS7Me8IqFvO25VSv/Li4KyrK8WyIMHjeyfvm42yq3LJUkCaFQqMlEQB77iJe3VatWGa/3uXPn7jjxxBPL+MaLC3+8nr0uqQdD/frk9Q4AZ599dsH2+zvvvJN5rXeyhaZp2LFjBz7//PO8rGc+XmOxGJLJJGKxGFRVRW1tLcLhcF4cchFELsPnQf7MyZb4l2+8+OKLS7dt23YU1URwUfPhJh745xbp/gHBi7s3MnVrz5J/vDx1yC8vHRGkcrkTgxmjcPTCFpgwqv+ThTgQt27demg2RQqvJcx///tfmhkJ3zj22GN78X4I7LJuaioRINsUFRXBsqyJAEgADBg84YyqqpBlebcNObeO5hsFb5ZnLmA0tOnnf+ffxxPD8N9d14VhGMICzisKArusAWVZTsuMCCAtvlJTCBReMZF/95tvvskuuOCCjJnC3HbbbeWXX345a9myZd1CVFXTrP+8dXgw8Ocaz7bJv9M0TbRt2xZ33HEHu++++wrO5Gfo0KGBmFtN00RZWRkURbk8H+uZu0R6DxqSySRKSkrE3wmCyB78mS5JEv76179ShTQRl112WSXVQrDJG/vzlf0WB7KzDd509YXzZs98JDCTXUATlIyeU4oJ078tWNt7VVWzdu/1XeA2bdpEMyPhGxUVFUIEACCssPLBfYCLmP369aOGDiDRaFSEP3AcR/zYti3iF3nmaITDYSEi2bYtXPp4wHKvtSDPsOs4jrD2cV1X9IloNCrER6/bMO/3XDhIpVJiTHj/nkqlmvQZwMul6zrOOOOMjNf9G2+8Ie5JlmUhptaPI3UwcGGVj0WOZVlwHKdgk/S0a9cuMIcrpmmid+/eU/KxnrlbodflPxaL8ThnkqIo9LOXH3pCEZmGi/A7duzADTfcQH2OKBjyRgDs9vPuSxd2mXl7EMsWFHfbz1atah/U8BYT5v5Y0BMvd330CnF+CSDeJCA7LUJ20NRI+MFLL73ESkpKYJqmWIjxsZBt97QmecDKsjhhnjRpErmWBHDxz0U1HhuOu6JyQcowjLQMz15xUFVVhEIhKIoiRDouOnHLs/rx/LzCSyKREBlxvX/jFoCWZSEajYq4mJqmiVh59S0DDwRFUYTrPb8mFyYzzfXXXy/xeuWukl7xsynwundLkiTaiGef7dChA1atWtW1kPr8e++9x3gylGyjqioefPDBvK1rLvrrui6yK9fU1OTFs40g8gFN0+C6Lt59912qDKKgyKsItP3PPGtCUPOa3HZW9i3vuvyr8osg1s3aActbFPpADIVCWbF48p5M89/79+//G5oaCT8YMmQIXNeFpmlpm34/E+FkEm9ct1/84hfU4AHDG68PQJpFH38dDoeFQMbFXE3T4DiOEO9kWRabesMw8MMPP2D16tWYPXs2nnrqKYwePRpvvfXW88uXLx8sy7LEKS4ulsLhsORFlmVp/vz5j40bN06fMmUKvvnmG5F8xDRNMWfzax/s/YdCISFocktGAHjwwQcz/kBat27dbq7PvC28ouvB3iOfY3gdArssOrdv376qkPp837590/p+NtmxYwfuvPPOvD385UlnotEokskkHMdBUMRXgih0+HNP13X88pe/JOs/oqDIu2MoeVRSunVwlN1/erDKNf4c4Ocjn50z4rKrsrIL/PaecgYYgWuv5b3fP7tn585bC30gNmRx4UdGsvpJQBRFQWVl5VSaGolMM3r0aMbdoepbRNi27YsVUqbxjuG2bdti9OjRbPz48bTQDMoCaGe/4xZi3n7IxT7XdVFbW4tNmzZhy5YtqK6uxqZNm7Bp0yaUlJRs6tSp09Szzz57VFOWa+f37fadd911l3XuueeqvXo1TW4x13WF8MfdkV3XRTgcxs9+9rOM1/+CBQs2derUqSUXyb0WkE1xAMDdfhv6fn6NysrCCVX00EMPbVEUpcliLB4s7733Xt6v60zTTLMUdl23ScR7giAOfn3GGMOLL75IlUEUHHm7EQlqogt5VMr3On/575MWXZz8Y98gdj95VJI2wwAeffRRdsMNNwjXMUmSfMuC6nUB3vkvtQmRcRYvXsxOPvlk4XpuWRYikYgQJuqL07mKbdtiTOu6jpKSkoIdX47jsCBZdnIrPi4UpVIpfP311/j000/x3Xff2ePGjdOCPH769Olz0N+TSCRQVFQEXdcRDoeFK+7atWvRuXPnjPfVb7/9lrVs2TJN8OeC5EGvAz3P0FQqJUQYrwBmGAaefvppXH/99Xk/Lr/88kt27LHHpmVcz+oKMIsTPPPB5aJ+eBWvmz+ts4IFqwPULIXFV199hWOPPTYQjZ6N9RF/Ri5evHhUv379KFFdASHn641V9V/SIYjlyoYwSeJf8Km/GPdL/PMuTvlmkCD8oE+fPmKxrSiKEP8ApLkF5jr8PlRVRTQaxZQpU5ZQ6/sDd2nlMSY/+ugjPPbYYxg9ejQ++eSTgaqqSrFYTCopKZFKSkqkww47TOrVq5f0m9/8Rgqy+AcAffv2lSRJkqqqqgDUCc08bmC9TUWDr7mLbVFRkXAF5uKfYRg44ogjfLmPadOmQdO0tHAU3MX6oBe4nmdoNBoVol/9uIxXXHFF3o+FFStWnHLMMceI+/fDBZi3oWEYSCaT4n3TNPHvf/877+vce4gly7Loj5T9lyD8wXu4551/GGNIpVJ45plnqJKIgiRvBcCuFRXrVkrXfBvEst121hG+iYDBtYQk8a8hkcBv6sf/I9cUwg+mT59eMAkxePw2/rpVq1a9qAf4A3fpDYVCAIA+ffpIo0aNku6//36pe/fu7+TDPZ5wwgnS5MmThUsrF9P4XO4V1hzH2S3ZTn2hQpIkhMPhJkky0hj++Mc/St7nEI+V5oeLqiRJCIVCkGUZ48ePz+s5KR6PL+SJUFRV9eWAkfepcDiMWCwGwzBElueysrIWNEMRBJEpEokEFEVBPB4Xz7OtW7ciFArBcRy89dZbuO+++2gvShQkcj7fXLcbH2vLArikG39ONWa89nLG46w5E2OBXNAu67VgKA29hhfK2YQxRhaAhC+cd955WUl64zf1Y5C5rot+/fpRB/BxXk0mk9i+fXteJJXZE9dee6308ssvf8p/98bV9FrTKorS4Lir//zhwtiaNWta+1H+lStXQtd1SJKEWCy2W3KqTD7zgDrrwAsvvDCvx8Jxxx0n4tE5jtMkFpb73GDszILuui7i8bgQHrds2YLOFPuZIIgMwkU/nnjHcRw0b94cpmli48aNuPDCC0n8IwoWOd9vULkpFcgBPvjHqy9cMHfO2Ex9/5TJjy2VEMQNtoTK3ifPoKG37w2YrxPBzs1xbW0tNQaRUV566aXFfmw+gwBjDLZti98dx0E4HMY//vEPRj0h8+i6jlgshvLy8ry/1yuvvLLb+++/f1cqlYKmaQ1m0eXWqA0JbIwxuK4r3meMYfbs2d/7sk5TlApvCADLsnx7BvLx6ZfLcza44447WKtWrRAKhZBKpUQcRJ/aFrZto7i4GIqiIJFI4LXXXqPJiSCIjO9rdF1PO4itra3Fli1bcNRRR5H4RxT2+CiEm1x9+tI2QSxX/y+GjsnUd4/Ub+0ZtPsd/U+LXH/3gHez46f4541PI0kSCYBExuncuXOfIASg9wNVVdPGM09scNZZZ1FH8AHu+muaZkFYN/fv3//eWCwm8b7muq6IC8hdf72uvl4R0Jshl/+/9u3b+1LuioqKz77//nsYhgHDMBCNRn0RqHiyLcYYYrEYPvroo7wU5u+9914Ade7V0WgUuq6LsZFJeDzJUCgk3H81TcMf/vAHWgcSBJFxIpGI8MCIx+MoKSnBli1bTqWaIQqdghAAT+ja9ft3270+IYhlm/7ai2829Xfeen7LwC1iHVfChPk2Lfr2shHZ2+9+QQIg4cNmH0BhBELn8a6AOksjLmqUl5fjnnvuISvATD93dta3pmkoKioqmPu+4447RMZRVVWFxZfXus/7r1gQ7kxUwH86derkW5lnzZqFcDgsXJfrJzPJFKqqirnopJNOyru+UFVV1Q6oS8TB4/BFIpE0y+RMr2t41mHXdcn6jyAIX+CJPxzHgWVZMAwDt912G7p16/YB1Q5R6MiFcqNnDTr3dgtHBs7v7IIffzN4yccfDWmyDefEInb/GTsCV//aH8jyrzEL5Wzg3QxSDEAik7zzzjuMWxjlc0w2MR97RE6emIG/96tf/Yo6RIbh4mu+ZJRuLOPHj5fefPNNsQGqn1Wbj8G91YskSeBZY/3g+uuvl0zThOM4SKVSaVnBM4XjOJBlWbipGoaBadOm5ZUw//33368Ddlkfu66bFiMy0/NfOBwW4p+qqrj88stpLUgQRMbhMQAVRYFlWXjqqafwwAMP0PxDECggARAAwqPWq0Es1/S/XTm9Kb5nwTtzxgLBs6qZWjrxXzTUgok3FhRlASYyTbdu3aCqqq8xvrKJqqowDAOO40DTNESjUXHfbdq0wdSpU+dSr8jgAmenyGxZlm8WZUFh+PDh0o8//ghd1/f4Ge8Y9Mb/488DPzLxelm4cCEURUE0GvWlvfj9clE+EonknRVgjx49hCWsZVnCytIPF2u+vrAsC7IsY82aNTQpEQThG6ZpIplM4tlnn/1u9OjRJP4RBF8fF9wNjwpeUpD7e34L9yAz9n62alX7/msyF1PwQLn9jQguuvI3p9NQ2/dGNRsWUYwxcV2enY8gMsGMGTOmNW/eHIZh+JYBmLu5cWHba+FaXV2d9hkuAnDhoakEiHA4nCakeDOyHnPMMQOpZ2QeTdN8sXgKGqtWrXqFW355+148Ht/teVPfSlCSJN9F+rKysl/ycnnLnSlUVUUikYAsy2L8t2rVCmPGjMkLK8CxY8eyZs2aiflM0zSRAdiP9YbjOMIVfee8355mI4IgGrs/8b7mh1T8NZ/XvIdX3uRXPLP8e++99/wNN9xwFNUoQXjWfIV400t6zL84gFPdQYmAHd+r/CKIdf3AB9V04hJg6m/w/BJmiMKjS5cuw7gFSiQS8cXalIs+3AWkqKgItm3jiy++wOuvv45kMims9GRZhuM4QqzzQ4Do2LEjdQwiYwwePPjS6dPrHAy40O04DsrKyg7o+ZBpKisrp65Zs8bX+KA8NmQoFILrunAcB3/605/yov2S+pBKAAAgAElEQVTvuusuALtccVOplJjj/KhjnmhElmV89dVX6NKly3oalQRBHMhehR9SeQ+neHxbntQqHA6juroalmUhEokgFApJ55133pVUgwSRTkEKgL379H3t7TYvPBu8kjG8PWP6E/v7v2a98Y8XlQDKbEG0tiR2f6h6XYEKzU2O8IeVK1f2bNeunfjdNE1fslByaxe+CQbqRMGpU6fit7/9rcTdI7lQyC1i/RIgiouL8dJLL5HqTmSMESNGSF9//TUikQhSqRQURUEqlQrsXP/xxx/7kqCCzwk8OYqu65BlGYwxlJSUYPbs2c/lcruvXr26uSzLadaeqqqK2Ip+HfYlEglIkoSpU6fSYCQIYv93xp5QRd73HMeBruvCsplbAh5yyCH46quvIBVa8F+C2A/kQr3xc4eO+PXMlk+/HrRynf3tpb9bvXp188Z+/u03pz9x7vdXjAzafawdsLwFDa/9e8BlCy52MMaQTCapMYgm55NPPlnK+5pfAeg5iqJAURSEw2GxkLz77rslANiwYQMMwxBWf163OL9EwCFDhlAHITLKlClTAOwKih6NRn2xcD0Qrr76amlvcQub+rmrKAokSRIHEqFQCNu3b0d5efkVudzmK1eu3BKPx1FcXAxFUZBMJsU8ZxiGL3Mwt7p2XRdjxoyhzThBEI2G63fcuo8f1vD9kqIoiMVicF0XlmVBURTouo4ffvgBHTp0oPmGIPaCWsg3P+SXl45wJ0YDZ30x5YYztgBo1OR19jeX/i54NSujc+fOW2l4HfzmxI8DLO81UqmUTTVPNDW9e/cGUBcPjweE5zGpMomiKGLhyDe/06ZNE3+fNWuW3a1bNxVIt0r0M/5ZNBrF//3f/7FbbrmFFqxERhgzZozUo0cPNmjQIJFhtzH9mzGGRYsW3divX7/H/CzvqlWrcOKJJ6KkpMSX6/FDCdM0oSgKysvL0b1795xu8/PPPx+xWAxA3WEGT0DEExL5uYF/4oknaBASBHFQeyAe+48f2njnb752e+aZZ3DDDTfQWorYjbfffpvxw0XTNIU1fKbQdV3EAE8mk7jooosC1S/VQu8QK/surjxxcd8lQSrT+PO342cj/77goiuvOWNvn7MeiTEgWPolY4ByU4Im3yZ86GUabvXEGEOfPn1opU40KTNmzJjWoUMHpFIpRKNRqKoK27Z924R6rV1M08SIESPE4Bo3bpx2yy23sGg0KsYBdyPxIwMqv9b555+PW265hToLkTHOOeccqbq6mpWXlwtLin1ZgTHGsHbt2t8A8FUA/Oabbz48+eST+/h1PS4A8nig0WgUoVAI06ZNY8OHD8+59cwTTzzBYrEYdF0XcxnfINu2jXA47IslNk+yQhtygiAOBh4D0OulUVNTg+LiYoRCIaxduxb/+9//7r3hhhvuotoiGmLAgAFpfSjTnnd8H2+aJoLojS4Xeofo1r370rfbvPhM0Mo1oubG09euXRvb22cUOXiho5SbKO5fLuG6roh7BAADBw68iWqFaEr69es3zHEcRKNRxONxXx68aXOSokBVVUiShE8++WS3v7/++uvic17hw48ycsvE4447DvPnz3+QeguRSb788kvR7xoj/kiShK+//rqL3+UcOXJkXz9c8BVFgW3biEQi4lkYiURQU1MDRVFwxhln5GQ7X3HFFUgmk9A0TYiZuq7DdV2xEfErDMP69ZT3gyCIA4evxbyHtIZhoLS0FLIs47nnnkOnTp2kM888k8Q/Yo+Ypin6EBflMvnDM1SHQqFAhlyRqUsA5w4dfnUjPW59pf28bolcqkdK+pF7eOP/7dzwUUICokkpLy8XiQeKi4vFw9GPDT7f8HJLu/feey9e/zNXXXWVxN2E+SLTD+s/ACJ4NWMMJ5xwws3UW4hM0qNHD2n79u0IhUJpyZ/2hCRJIm6g30yePDnj16gvhPLkI6WlpeLfu+++O+eeibquIxaLQVEUGIYBoE7wk2VZbER8TAJzKo08giD2Fx77j6/fOIZhIB6PY/LkyZAkSbrqqqto70nsk1gsBtu2YVmWL0kINU2DLMu+JTXbX0gA5BUxKhnICeS2cw/b4+JzRZ8PTgtKOSnpx8Fh27YQRPhDz283YL+SHhCFw9NPP824mMaFhMZaHzXVApJb/7muizvvvLPBoGLff/+9GG9+jj3unqdpGpo1a0Ydhsg4r776KmzbFnE4+fMHqBOF+GvGGGzbFmKY31x//fXSjh07hFDJF+48c29T4BX6ZVkWc4X3vUsuuSSn2nfq1KnskEMOEe0YDocB7LL44/fXFBYJ9Tc2PBMnUHf4UlVVhW7dun1Ao44g8g+vKOc4DkzThG3bwoPCNE3x2vtZLuilUqkGs/t6Dyfi8TgURYEsy6itrQUALFq0CM2bN5euvfZaEv6I/UJVVd+t8VRVDeT+mgRAb2UE0IJt/IBavPHSCzMb+lv3Hj0XsgCcTc875pUnKOlHbpIN0YMoHLgLnXdBxx+E3lgumezfPEPc3XffvcfPvfHGG1sVRRELVr6gzTS8XkzThKqqeOedd8gCl8go1113ncSFLq9Lleu60DRNCEWMMciyLASkbLBixQoxHvnCnVv1+uGib1kWjj/+eDz00ENbcqV9zznnHN/mV1VVhejHDzNkWYZhGJBlGdOmTdNpxBFEfmJZllgn8Tij/NnCM6t7E6rZto1UKgXHcYR1uXcPwmOWapomBMSSkhIkk0msXr0aU6ZMgSRJ0sCBA2mzQhAHCQmA9VjWa8HQoJVpyLZrz//8s8+ObOhv2Y+5J+EX5w/5PfWcHJ0APBtAPzYMROFw++23s6OPPjrNYsebXMMvF2BZlqHrOsaPH7/HufLmm29usXHjxjQLID8Eca8bgm3bOOmkk6jjEBnnvvvuE9YU/DngFbz5Bk2WZRQVFWWtnKWlpQO5yMTFcj6m/RifmqZBURT079+/eS6064QJExjP/OvX85wLxfy199pjx46N0mgjiPwkFApBURRxgOS6LmzbFj+6rsOyLGG5raoqotEoNE2DZVlIpVJp7r2RSEQ8i1zXha7rWLhwIVasWPHbLl26SNdffz0JfwTRVPt/qoJ0KnufPGPGoU/OClq5Or/X67s9/a2q/5IOWetAAXWdJhqH192YBECiKbn00kvhui5M0xRWRI7jpInOfvRvAHjvvff2+dnp06enxevwIw4gFzVCoZA4PZ80aRJZARIZ5c4775QsyxKbN265BUC4cPGxU1xcnLVydu/e/R1d19My93F3Gr9cagzDQIcOHXKiXQcPHgzTNJFMJn25nvfg0BvXVZZlzJw5kwYaQRTIPoLP0TzpmqqqiEQi0DRNHKRwF2EAIkERt+iWJEkcQn3yySd46623XikrK5NOO+00qW/fvk9RLRNE00I7/gYYNvLywaP/aQauXO7EWIMbw64VFeuykcRk3cAVRdRb8mQi8GQCJoim4MgjjxRZNXlGLI5fcQAlScJPP/2EIUOG7HOC5KfL2RgH3nhdQ4YMoc5DZJwHH3xQzP1AneDNN2CqqopxkE0BEAAmTZqEVColysljF/pxYOW6LsLhMIqKijBr1qzAPyA7duwInnHdD4GUb/q9boAAsGXLFlxwwQV0OEwQeYxt28JanMMTqvH5J5FIQNfrIgGEQqG0v/NMrD/++CNmzJiBd95550lJkqRevXpJgwcPvpRqmCAy+PymKmiYCfOdAC5eGD7+cPFFDTakz5Z4yyoXDO3YsWOSekpu4w3Oa5omVQjRJDzzzDOsIddBLiz4mRXr888/b/Rn169fL9xy/ciSyV1hJEkS8QcPO+wwPPLIIxupFxEZXeNMmCDxscEzUfMxCuyy0M22APjHP/5R4s8m/q9fcQm9hwGnn356oNvz7bffZqZpirhafsxf9Q9LuBC4cuVKGmAEkecoipIW0oWPf2+IhlgshkgkItZUPL7fli1bMHnyZKiqKrVs2VIaOnSoNGjQoN9RrRKEP5AAuLfKCWBSkMr/DHh10fv/urahvy0/6f2z/SiDhSOdypNOnkE9JPfxntz55TZE5D8XXHBBmtjH47/whSIP5p9pDMPAunXrqhr7+VdeeUXni1i/kuJwV0xvwOxTTjmlJfUiItN8/vnn//rpp5+ES63XDZi/zmYMQE5VVZVwMfUzSQ93keaxS//6178G1gqwf//+CIVCSCQSvl2TtwUXAWzbhmmaUBTlIRpdBFEYMMbSQknwAwgeu3XHjh1YunQpHn/8cSxevHiUpmlSy5YtpRtvvJGshAkiS5AAuA9mt3ru5aCVqc/KQZMaer9nZeW8+ce+9mimrx8etV6lnpF/D3DDMKgiiINmzJgxrLy8XMR2UVVVuJjzBaFfLueMMVxzzTUnNPbzY8eOjW7atAmJRMIXF2XXdcGD9juOI0SNE088EYsXL/4N9SaiIebOndskg+fiiy8+ffPmzSLmHxe9dV0Xr7OZBZhTU1PzpGEYiEQiYh7xI0YnT3AhSRIikQjOOuusQPaH0aNHs0gkAtd1UVRUBF3XfWm3+sljFEWBrusYMGDAn2mUEkR+U/+w1DAM1NTUYP369fj3v/+NO++8E+FwWCovL5cqKyulP/3pT1K/fv0eo5ojiOxDAuA+GDziopGvxh5eHLiJ99FogxuAgecN/sOMwyZnLIlJEK0i8wHDMCDLsrDE4Mk5/ILH4SCIg+WXv/xlg1mleYBo7g7SVBt4LlxbliWsDfnvDz/88H5/34cffrhbVkv+ffy9eDzeZOPOa0XDX+/MojeZehNRnxkzZkw77bTTsHz58iZRozp16rTbM93r1nX88cdn/Z4HDRr0u+rqaiFUKorii4srD0zPr9W2bVvMmDFjWtD6xDnnnLPbXOsXXJDlc+S8efPm0SgliEbs43aOGx4SpX5yI6/FsxeebXdvewTv3xr6nDcWHw8FxK+/Jytrr5svAGzevBkLFy7EQw89hA8//PCWSCQilZWVSe3bt5dOPfVUacKECbRfJIiAQgJgI7j0mt/1C1qZJAl47enJDaa2HHbxZYMzcc1lvRYMpd7gR9v6+8zkm4WmEjWIwqZVq1a+WfglEgmEw2ERW0ZVVREnLJFI4I477tjvwXTiiScWRaNRmKYpREXvIlqSJBHMuinGuncDzcdiOBwOhPBCBI8zzjhjmCRJmD9//tSm+s5x48aJfs7jyHHRy69su/vi+eefh2EYkCQJtm1D0zRfNug8q6Vt2wiFQjj66KOHBa1PnHzyyQB2HYaEQiFfLPpt2xZZmQFg06ZNGDFixNk0Sgli3/Bxo6qqOGjgwh4/7HBdF5ZlwTAMmKYJx3EgyzJUVW30XmFPn/Nmf+frD752s20bNTU12LBhA5YtW4YFCxZg2rRpGDduHKSdtG7dWjrttNOkP//5z9Jpp532ILUoQeQO5MrZWJFkVEpyJ0YDFf9lROKm/hftacK/MSmzx2JNtnJ/u82Lz5zbm+L+Zax/7XwIe92w+ALAL6qrq6khiIPixRdfZMXFxb5ZoBQVFYlN6M7YU9A0DaZpYtasAzOE7tixY7Kqqgrt2rUTAav52OSWjaFQSFjqHiz8e/lrfj8tW7bExIkT2f9n77zDo6ry//8+986dlkYTUBBFXWkSeicUFQVEATcizYLiiigG2VUha8EVBH+rQljL7uoqogIiUgTRSK8aEEMNSPGrNGlC2vR77/n9Ec71ToiQZGbOzCTn9Tx5CJDce+b08z6fkpGRIW7RBQCAvLw8qigKrFYrEhMTU8L13KlTp5IpU6ZQNoYAGAIbD0u78jBp0iTy2GOPUbvdbowRHgd0NiZZHMDWrVvHVJ/48ccf6fnz55GSkgKHwwFKKQoKClCjRo3IHyAutAGzyPzmm2/EII0SR48epUCJgO/xeFC/fv2Yv9RVVRXMdf2TTz7BpEmTqtVapygKfvvtN9SsWROBQMDwjjCHODAn2mDnArPVntlbwXx+KO1JxP7P/G9nz57F2bNn8fPPP+Pw4cMIBAInn3rqqSvFaBIIqj5CAKyISBODIqCe5aBlueUSQuji+Z98NvDU6HtCfwvBgLvTR4seEDnM7n/RoqioSDSEICRGjhwJACgoKEBKSgqXd/r9fjidTmNzyzbEDzzwQKUPE7m5uZubN2/erfRmnVlJsZvycAgMzDWabdI1TTMEjocffhgZGRmiYwmwYsUKeuONNxp95eqrrw7r8wkh5MyZM7ROnTqGa5iiKJBlGStWrHgnFjI07tq1C2lpaVzEPzbmA4FAUGZwSilmz55NH3zwwZgQKxo3bmxYA3k8HlgsFtSoUYNLnERzghSfz4cxY8aIy4ooUb9+/aB1ye12o1atWjFdZrMYFetljQQulwu1a9cGAOzZs2dghw4dgm4t8/Ly6pw7d+7un3/++f4ff/yxE6XUwmIqS5KElJQUw1rP5/PB5/Ox5Bu0Ro0ahbVr1z5Wt27dnQ0bNvywXbt2Qp0XCAQGQgCsIKubLJx+y4/pE2OpTHqWk0oZ7os2XoOHjhgSmOGgcojn1LKeLQj/QcN8m8e1/1wQIYQLsCAUli1b9gH7nh2YI00gEDCSaDCBTtM0bN26NaTnjhw5snu/fv1oSkqKcZBmroCapgXdqocCcyssy7oQKHEFfv755+nLL78s5uBqzCuvvEL79Olj9EWPxxOR7LwLFizAQw89BLvdbohHkiRh8+bNYwBEXQAsKCiYHQgEHuTh/mtskk1iI8tsPmjQoJjoF5mZmdTtdiMlJQVerxcOhyNIXIk0LG6xqqpYt26dGKhRhIlCbrcbTqcTiqJwDydTGTweD5xOJ2rWrFnt2iwhIcGw1PN6vdeW/v/mzZufBfDfC18CgUAQvjVDVEHF6NP3jkkLkrPWxlapKL747NOPyvof5anQknaIpB98KEsw4bV5Y2KGsAAUhEJiYuKDQMmttsPhuCiAdCRQFMWwVmLWKIFAAN27dw958Kxfvx6yLMNqtSIQCAS56YZrbJrFBSZemvH7/Rg6dKjoXNWYf/zjH3TcuHFwuVzGOkEICbsFIAA8/vjjZO/evXC73Ya1GwBce+21MVEXd95556jjx48b4yXSsKD3gUDAcIVmljevvfZa1L1BBg8ejJSUFLjdbjDXaBYDlZdHAXM97N+/v9grRhFCCAoLC+F0OqHruiFWx/IXUOL9oqpqtbyAZu68Fyz/baIXCwQCXggBsBIMHfWXm2OtTANOPDjyDxu5kiLet22/fki0Nh+YFVDpDR2vjSMgYgAKQqNXr14Afrdm5ZU8gGXQ1jQNiqLgzJkzYXlus2bNGrpcLgAwkgCYM+SFY3yycrPnsTpjoqbT6UTTpk2xf/9+p+hh1Y+VK1f+c/z48UhMTERKSoohEpst9MJN+/btSVFRERRFMfpjixYtYqZOvvzyS25zC7OqkiTpoqQjLNxBNGnfvj0AwOl0wuPxGGX1er0Rd/8FAK/XC0IIjhw5IgZrlCGEIDk52fg+2iFlyoPf74fNZjPEyuoIuwSklAoBUCAQcEMIgJWtuBi0jAvM+OP4hDqtWFNT2Gm3tJ4fiJbmgyzLQck/uPblC27HtWvXPilaQlAZFi1aRIES4YqJ2TzidDEXYEqpIVgsX748LM9u1qzZ8by8PHi9XlgsFiiKYmTIM2fuDcdnAGBkMTYLDyz5QEFBgUv0surFiy++SDt37vy3pKQkI8aTOeNq/fr1I/bu+vXrk+PHj8ecBSAAPPHEE4RSymV+YWNclmUjGycjJSUFH3zwQW606iE7O5tqmmZY/LH5iVd8RACw2+2QJAn//a/wUIyJM0AgAL/fb8TCNSd+iMUvJlKzC6/qBksw5PV6kZ+f31r0YIFAwO3sL6qg8uTdvPXqWCqPLAHLFy18r6z/s4x3kczs8sfYkDPOi77BkaSkJON7ngIgc3ECgBtuuOFr0RKCytCvXz/4fD7joMw2t5GGZfz1er0ASpKPPPHEE2F78YYNG85KkmQcqphrFRMFwnWINh8IzM9mMQeZpY+g4jDLzXiIh8V45ZVX6F//+lckJiYa/cBmsxl9HkBEYgCaadiwIfF4PPD5fKhTp05s7b3y8oJEePYnE8PCtY7KsmxckLH4nOZx27Jly6gd2tu1a2eEKGB9pKw5JdSxY94rmOuaceDAAUyfPl24/8YAFouFW/zdsJxZTHFGoxH/OiYO4ZIEu92OOnXqbBA9WCAQcJt7RBVUnptatjxGwcdq+1i+hvLsZ/sfve/hbVtzbi/r/6avOFGuTZqI+xe9jVA0Nh+MRo0avS1aQlBRtm3bdpeiKLDZbKCUwufzlXlQjARutxtWqxUOhwPnzp3DkiVLwvr8v/3tb1ewLL1ut9sYLyzmYKShlMJqtcLtduPDDz+kordVDCYMx9PhcsGCBfTpp58OuhSKFsnJyaSgoACFhYUxVUenTp16TVVVo13Z+snEUXPMzkjSunVr7N2790+8P//kyZM9PCz92AUEm4s0TTOEdE3T4HK58M03IrmoQCAQCATxhBAAQ0TOyCcUkdPLvm3z9UNShoc0etFP5PEeImV4yLIrP/jkUr/T7ttef2jJdeD23EuaDZAn3aJPRAGebjsXtfmFDX2rVq22iZYQVJT8/Pylsiwb8aAcDgdUVeUiarMDv6ZpqFGjBh566KGwT8ZLly6FLMuGqzGzdORhqWvOBnzbbbeJzlZB7Ha7EV/K7XbHfHl37NhBBw4cCIvFYgjpl2LhwoVfRLpM9erVI7FWd3369Hn62LFjxhiklBpWuuzvPOYfWZZx5MiRA7w//913321PSUnh8i42BzHrbia6UkqRkJCAcePGiQtjgUAgEAjiCCH2hGMTmOEO+wYoM1uBlOEh3XpcHIdv4JChI79u9Mk7l/r9P4oH2LRpUzeFvcz/20keOUIIEVYmUYC5dwHg7qoWT65xgtiDJf8wu9/xsrhSFMXIVvrjjz9G5B3p6emEiTEsyzC3BVqSoOs6FEVB7dq18fzzzwdEjys/fr/fsFpyOBwxW87//e9/lFJKmzZtCqvVCk3TgtaEPyIvL+9OHuVr1KhRzC0SK1asMJLoaJoWJMjzcoMMBALo0aMH189NKSXNmjXjt781CansexYK4dtvvxWTjEAgEAgEcYYQAMPE5SzrKsr0FYWX3HD3H3z32NU3Lpz+h5s2CdCynGWKeXLGeUm7yDuGoM2Ts64RLRkdrFZr0AGGVxzAeIuNJYgt5s+fT1k/Sk5Ohq7r8Hq9QdltI91/WfIMSZIiFhRt7969RrZhp9MZZAkTSViQcJbheMCAARbR6yo2r7LkDbE4z02dOpX+9NNPdMiQIaCUwmazweVyQZbloPhrf0Tjxo2rbds+8cQTRNM0yLJsxEg0Zx/nMf8oigKn04l//vOf3C5O586dq/MKGRIIBIxLCEqpkQSJEAKfz4euXbuKzYNAIBAIBHGGEADDRNOmTd17e+c0CcezPra9Wi5XzD797ph0ycMjKNau+mZimQfaXltase+PFciQImDFKKjYQTUasMOS2XpLICgvvXv3DupH5sx+vAQXTdOwb98+NG3aNGJ+itnZ2Ya7McvOy+vzsYyJQEnMse3btwtf4HJizpDq8XhiplxTpkyhLpeLPv3007jmmmuQmJhoiL0suUd5LE2vu+66at2+mzZtCrL+M3/PIwYgE2mfeOIJbp/5lltuMT5rxA8IpksO9j42H+3cuVNMMAKBQCAQxCFCAAwjLVNTD2Q3nhtyIoUrG12/Olxl6rl34LSy/r11mza7NrZcNmHS2hpo9EKxEP+ijKIooJRyzQBs3uC7XC7RCIIKsWLFincSEhJgsVggyzKKi4tBCIGiKNws5CRJQiAQwLx589RIviczM5MUFxcbwgIvCxxVVaEoCiRJgtfrhdVqRWFhYbbofeWDudNqmgaHw4FDhw7RNWvW0I8++oi+8sordM6cOTm8yvLuu+/u2bFjB/X7/fSvf/0rADDLVWiaBlVVYbFYkJ+fDwDlclm+4oorqnsTP1NcXGyIUyxhD8AnsZbFYoGu67Db7ZgyZUpRpN/32muvnalXrx4IIVzmV3OWVvOFh6qqyM3NPSBmGIFAIBAI4g8h/EQAPcsRkoqztO57nw0eNmJIuN610/LIkTaPC/feWObgwYP0uuuuC9pk83RZO3HiBBo0aCDmA0G52bFjB23ZsuVFB1EmuDBxI5IwF0DCYbBs2LCBdu7cGYqiGGJNpGHvYW53brcbTqeTy+eNUHtRnhl5Wb0BJZccCQkJRp/Rdd0QWAkh8Pv9OH36NI4cOYLjx48jPz8f//d//4eGDRv+X/Pmzd/v1avX1IrEyM3JyRmyYcOGT3v16oWWLVsGxfQz9x9WHnb5QwhBcXExEhMTL/uOn3/+GY0bN67W8/bZs2dprVq1gtbLQCBgWOzy6GOapiE3NxcdO3aMaFucPHmS1qtXj9sFi7ku2QUlIQQFBQWoWbNmXPc7yvm21TwXXbhEIuH+POZ3xFlb4IMPPsDDDz9MotkfolF/bCxv2rQpIy0tbZbYWVZfeO+Pqlv/4z3nl67ncM/5oSLiCUUAKcNDQhEB7zz9VHo4y9NKfbfRjtzc1NZt2uwSrRObyLIcFF+n9IYx4n1WEsbAggrOK61a/WFfDpf1Dcu6a7fbAQAejyfIMooQgs8//5zL5+3RowdhGwjzYTiiC/QFkYgQgkAgAKfTCQB477336OjRo4VgfxnM7cNca81WTebQC1arFQ0bNkTDhg1LP6YxgJcBvFzR/WPHjh0v2a7m8pjLWh7xDwA3kSuWWbp0KUaNGmVYAbKYgLzXz7Zt2yIvL69O8+bNz0bqPfXq1TPmQdafI3xgMsQ/JlQHAgG8++67YnIRCAQCgSBOEaf+CBFKPEAJRYRSetmd657duxuW95mpG7qKgC0xDDsEmg+YPG8iecQTElQdPv74Y8ojbiQhxHDjpJTC4XAYmV11Xcf58+eRnp7ObaBs2bLFsIjhMT5ZHFR2cKUAACAASURBVDNmqQYAbrcb6enpohMKkJKSUu3r4OGHHyYej8cQp1RV5RL/j62bLA6gLMtwu91nIvWuuXPnUkopvF4vF/EPKLH+8/v9IIQYoqrP58MzzzwjLh8EAoFAIIhThAAYIVqmph5Y23TRS5XeWGY5L7uDbb6m49GKPHPiHTdQ0TKxidmSIxpWyiIJiKAi3Hrrrdzi4FFKDRdJlg2XvZt3IPo1a9YYFjG8RAZKqRFrDCgRBVNSUjBp0iQxn1dzymspWNX59ttvjVh8FouFW9bnC+EHIEkSfD4fbrrppoi9q3fv3iCEGNbQRUURDzkIq9UKq9UKj8djCIEi+YdAIKgMalZtqs5MEvsWgSAGEAJgBLnl9n6Td/XY0kqrxDlRIoA6s2w34o3r1o6pjIvxK7cdh57lFJNvDFLalYu3COjz+UQjCMrFtm3b7rLb7VwEQF3Xg1ziPR6PkS1VkiRcccUVqTw/+/PPP098Ph8IIVysZs0ZgJmlUUJCAjweD5555hnRGas5wnK7hPz8/GXmrMlWq5WLQK9pmhE+w2KxwO/348033wz74v3cc8/R+vXrG3/3+/1ISkriUrderxc2mw1WqxWEEKSlpQnrP4FAUC4Wf/rJgsBMJ9WzHBTUDUCDJs6hAkHUEQJghGndps0u5SkPkTJKvijKL9xJpCTJhzrTQbWsmvqCj97/auLdV9JuO/u/U/kSUezIzU0VLRNbsFhU0Qrg7PV6RSMIyttXl/I6fAK/Z/rVdR2JiYnQdR2apuHgwYNITU3dzfvzL1myBH6/n2v8NZ/PZ4gahBDYbDYkJSVh2bJlH4geWX05efKkqAQA6enpd50/fx4ejwc+n49rggyWxVmWZSQlJWHQoEFhf8/YsWON71mSJR5QSiFJkjEHr1u3TnQ2gUBwSXbk5qZunaBQPctBB54cfY98IXeWRACJUBBQBGY4hAgoEEQRIQByRs44J0kZHvJ1o0/KLeJJBCDwkvRzj/d9pWd+yGUQ8QBjsF9ccHE0W/zwgsX1EQjKQ4sWLYwDYcQXKEmCruuwWCxG4gSfzwebzYbs7Oyj0fj8Q4cOJbxc5tlcwEQGJmywenE6nQ+KHll9OXz4sKiECyxfvhwOhwM2m42bCGi3241wAGwNrVu3LjIzM8N6uE1OTjbioMqyDIfDwe3Szmq1IhAIQJZlJCYmDhQ9TSAQlMU79zahWpaTpm7ourP9NZbL7G1EfQkE0UQIgFGi/+C7x0oZHnLsXHTer85MELcvsTQQL2QAjhYiBqCgPLzxxhuUCX+8YgCqqmoI436/H7IsIz8/H+PGjWsUrXr49ddfuVnhsKQjZsHV6/VCURS0bdtWdMpqzMGDB0UlXOCRRx4hhYWFxpjhhdvthqIohkWwLMsYMmRI2J7/0UcfUZvNZlwSMtdmm80W8c/m8/mM92mahg4dOnwheppAEBs8m5ZCC/7ZQP/kP29vqkhSyHCxIzc39eCM4b8FZiZQPctBH+16BATlO8eUhLkSrsACQdR0B1EF0aXRSyWuwdwbnugiDkMMQQiJqgAoYkkJykP//v1ht9u5BdkHSmJrsYOo1WqFoihYunRpVOth4cKFXCyMKKWGsGC32w2h3m63w+fzITk5GcuWLRPzeDVFuAAHk5ubC7/fj8TERC6XWpRSI3wHW78lScL111+PvXv3/ikc7xg2bJiR2ITNOV6vl8v8a7fbjXf+5z//ER1MIIghpvzZjyTrOTLM+9duzdd0PKpnOSj7CsxIpHP+/a+cUEM+5eXl1VGzauuBGYlUnZlAtZkJVJvppFqWk7bc0HXn9dLiWjKpXLxViVAhAgoEUUIIgBFg7epvJj47EHRiv7p0xZJFb5erITI8BOCrA5b3pkbAqT2iYBPP3I55WkwI4pN58+atufHGG41MvLysRnVdh81mM5Jg6LqOBx98MKoOJJmZmSQ/vyQcAxPPdV0PGkfhEPRLzwlMbABKLIAkSUKXLl1E56yisD7PcLlcRt/SdR19+vS5V9TS73z33XcFVquVW4xOVVWDLP+YW67D4cCJEycOhGGeobIsQ9d1Yz5h2Y55XhieOXMG48aNE057MYymaSCEBGWLj3XYHsJcbkH5UcZ7yOqj1jL/T5Y0jPQ90zF1Q9edZmHQ/KXOdFJ1ppMGZjppwf9LoHmZCdT7Rsm/aVkOqmU5aNOV7c5IcBNZ0iARHYToIBfi+IVjQpAIxTujG4nDqEDAGSEARoCeewZOm3azA6/0LULfX0Y8pmc56Jy3s7ZetjEy3CSn/cphPMtamWzCgqoDExiICMghuAwdOnTozYLda5rGxQWN9U232w2r1QqPx4NPP/00JupjyZIl8Hg8kGXZsM5hbtHmA3tE529dR82aNfHiiy+KebwKwqxfmbCckJAAv98PQgiKi4vRqVOnBaKWfmfixIk1iouLjYy1ET+AKwp8Pp8h/LH4g7Iso1u3biE/f8CAAcYcyMR/Nv/yWrPPnTuHb7/9VnSuGIeJaSxhFq9EOKGOH7/fb4T2EFScPq8VEI1Wbi6QCIVEKGRCkWTT0bSeDqtMLyTq4GeS8mjLM1i+eOF7ojUFAn4IATDMaDPLFtRGBiZ2YLcul/r9Lt26z5cyPGTSUhu3MgsRsHrDMv0JBJeiUaNGRj/hHTPS6XRC13U4HA6MGDEiJtTqhx56iDgcjuD53+RKz2NMSZIEQgjuv/9+0UGrKIqiwGKxGNaALFzEiRMnROWUwezZs+HxeLhZtcuyHGRtyIQ5h8OBDz/8MKS9VefOnYPmFJYFnKe1VHJyMtLT08UNYYzjcDjg8Xhgs9lAKY2LsC6EECiKAqvVitJrqaACa8R4d9yPz35H7nt4965dN4rWFAj4UKVO/bk/bO+4fNHC9/Qsp2HiPLH/lVSbWfL9swPqXfh3J108/5MF2cuXzti/f78zXO/Xsxy0PJeyepaDzn5zRu6lfubVNflcXYK1rJph3VFu/35bz5VffTlt9lszcvUsJ504sAk98o8aNDDDSY+8lELVC0Fjly9a+N62nJzbxVCMHuY4YwJBWbzwwgtUURSoqmoIcTyyULLsk0BJQPojR47EVL2cOHECPp8PiqIYVn/skM4LVVVx3XXXYenSpZ+Inlq1YJmfWX9i7vdutxuHDh0SFVQG48aNI7quc1nT/H4/LBYLZFmGqqpBMQE9Hg9uueWWSj/7448/pkzsY38yEZiXtZTH48H58+dFp4oD/H4/HA6HYU3HksbE8pdZqGdZtAWVoyLeYxQApbGlGRIAzdd2+lG0pEDAb8zFNc92Ap02PDw3R9+2/fqhbmk9P6jM7855Z9bWkf5nO1T09y6XAETNqqtLKOLSThtbLpvQ8+ZbZ1T093bk5qbuytn4Xt7yVztMvfVcpdO7T8oG0h756H8DBqePrm4D0ePxUKvValgNsdh8vA6Z33//PTp37ixu+QVlbxgppR6PB3a73eiXvPuoruvIzs6efeedd46KlXp5/vnn6XPPPRcUm48d2HlY1jKXbFVVsXv3brRt2zamx7CmaVRYG1fucM/6GBMFn3vuObzyyitizi7rMJyTQ1u2bMnNqoiJf0x0VFXVcN9++eWXMXXq1Aq3k67r1DyHsPmWiScsM3qE9yX44YcfHu3evft/q+Kaxvl9Rnvpug5ZlsPaeAUFBZQJgGfPnkWtWrViPq4ec213uVyYN29eVONM0hK4h8Nh7tqbNm3KSEtLmxXKsxa+OW333do/birX5wWJyTjwOiWwVAGLxnjZH4Wz/4k5/9L1HO45P+T5Nx4bcf/+/U7HN//4+Wq6+IpIPP9j26vb7h/zZMcKNW4IbrRL67732eBhI4b80f8v+PD9r9LzH+/Lo24rkpF4R25uauqGrjsj1TW3dVx1d6cuXRdXh4k/2gLgnj170Lp1a3GYFFzE4sWLF95xxx1/VhQFHo8HVqsVsizD5/NxiQPI3nn69GlcddVVMddHi4uLKXNjYmP3wmLPRXiwWCyGNUVCQoIQAKvWhtVI0sTEJU3TWAxOMV//AXPmzMkZPnx4Rx5jsHS8NV3XDUtNSilOnz6N+vXrV6itXnjhBTp58mRjD8AsoSVJgqqqkGWZy/7gl19+wbXXXkuq6NiqUgKgIPT+EO8CIACoWbWoBE9ct4UOJ7Vk/FbtNgpCAKxac37peo61OT+uBtjGdWvH6FkOemN2G1ekxD8AGOl71ojXt3Hd2jGXbdgQY+gNPD36nks9Y8gDD/Vb9afPpvOo4yOT7Zf9LPPf+88aPctBIyf+AQBFh623LCrJVJVAw+mqLQiGEMItoYMg/mjUqNGfgRIhzuFwGMIWT7dxWZaxaNGimKyf1atXGwJAIBDgGqOLiX/snXPnzhXxXKsQzOWTud+zZDNz5swRlXMJ7r///k483hMIBIKS/TDrTLbhp5SiXr162LlzZ4W8QwYMGGDEcGPPNM8pzBIw0nz++eeiMwkEcYQl4xzR9PjWliW4yfZt23qK1hQIIjnO4gRtppN229n/Hd7v7baz/zt6loOuX7PqqbL+P5wJNC71rNv6D5ikIyniO76GNQnmv/efNX94IJnpoENc43tz7aREx43ZbVxqVl1dDNnwwg4RFotFVIagTG644QZYLBY4HA7jUFpcXMwlyQWLa6SqKp544omY3NU2btw4lY0fdkjXdZ2LCGh2C0xMTMTdd98tOmwVggl/bI4mhMDr9eKRRx4R1kOX4b333uPSPn6/3xjvzHqIEAJJkoz1NT8/f2tFntu8eXND3GfWxKztzVaBkeavf/2r6GcCQbytG0+5SXkOizol0CkBpQQ0Cg6Blypjq80914mWFAgiR8wLgBNvBi1JrhFdw4a03Xe+oWc5qDlhRV5eXp1wv+dSIqAl47Q0L+GNTZH+rKUFPkopYRaREolmZy0iepaD7sjNTa1qA9Htdgfd6vPK4MayxYkMbIKyeOONN2hycnKQsKWqKhITE8PWR8tKJsIOt2w8fPnllzFbR6mpqbtZchKWoMFms3ERAAkhUFXVEAVsNhteffVVYQUYR6iqCrfb/YdjonQW2F27dolKKwdjxowhhYWFF62pLF5fuCzoHA4HJEkyvoDfL9SYtXTbtm3L/bxPP/2UsvWYZUllz7Lb7cYzzXFHwzkHFxUVQdd1zJ49W3QigSBOkTM8lz01S4RCIhSE0KjEAySXKZsWRgMbgUBQaozFasF279p1o57loK8MjC1hot13vb7Oy8urs2f37oZNV7Y7E4l3TBzQ5A8nvRGjH0s7lh/5ZtOznBQoyaysZSXElOVd6oauO1d/89XkqjQQ2eGEHeR5xYGQJAmyLIf1MCGoOjz66KMAAHaQVhQFFovFiEMWDszJDdgzmeWLzWZDUVERBg0aFNOWKB988IGR9MPpdELTNG5WtUwEZHU3aNAg0XHjaN63WCxwOksiXLBMmIQQ+P3+krX4gpDscrlw6NAhkaypAuTn5xtCX+nYXrzc9AOBAOx2O7788styHWYHDBgASZIuEoUj1f9YTEGgRAhMSkqCJEkYNWqU6GcCQRyzrumil+K5/ARA3ou1hQgoEETi/B+LhVq6cP7HLWI4HXjTle3ONF/T8Wiknv9KnyNYvfKPBa5GL7o4bMwo9CwHbbWxe45EYm/+7b3v7hf1KnQ7VNqdh2cgYkmSkJCQIGZDQfDmcd26p51OJ/x+P2rWrBkkSoRTNDYnvmHfs4D3ALB///6Yr6vJkyeTkydPwuv1Gla1vGAiEquva665BsuWLftA9ODYh4lQXq8XqqoasVglSYLVaoXb7TZc4BVFQYsWLYQoUwHeeecdykRVVVWD3Gl5XbKxS5O0tLTL/uyCBQu+Zn2AicIRPWATAovFAkVRgqyuv//+e9F5BII455bb+01eWPPtr+P5MzSt5UZVM/gQCGKBmBMA9SwHvfP4qBHVvWG++eSRFy/1/zntVw4T3bekv0TCFZs3TFjhDbOOcDgc2LVrV0vRowSM4uLi/+f3+40YZKVFapfLFfI7zEJZWZktKaX44YcfDsRDfR04cMAQcKxWKzweD5fxa64z1kbNmzd/UPTg2EdRFGiaBrvdDovFguLiYsPS9rfffoPT6URxcTGKioqwaNGitaLGKsb06dOlwsJCY35hcfnM8fp4rK9M6H3hhRcueWl51VVX3c4jtl9Z+w9KKex2O6uXgaL3CATxz5D7R/XTqBzXn6H3vrtf/KM4/AKBoHLElACoC39/g1uG/PuSWX+7dOs+/9u22aNETZVYZMb7Z2BWCrwxu0ctWLBABJcSGKSlpcFqtcJqtRr9xGq1Gpkv7XZ7yO9grmeapgUFt2fvO3r0KMaMGdMkHuqLUvp8IBAwDvvhqJ/Lrpm6bsQIYxmIbTYb6tevLzpwHKBpmjEGACAxMdEQcWvXro3i4mIkJiZi1qxZGDZs2M2ixirOkiVLAPyeUVnXdW4XbpRSI3tzUlISRoy49N12u3btYLfboWkalyy/7B2qqsJqtcLlcmH//v3o0KHDF6LnCARVA2V8cdxbjqftvvMN0ZICQfiIGQFQiH/B3NZ/wKTL/Uy3tB6zNZEXt0r0HyYAsg05j80/UCL86boOTdPQokUL0ZEEAIB//etfNDk5GT6fz8g+ao4TZf57yGPXlDGXWeawDJiffvpp3NRZ7969pxw+fBjA72Icj/HLDvDmJARWqxXz5s0Ta2qMw8aQ2+0OivmmaRp8Ph+Ki4vx3XffPTB58mTh+ltJxowZQwAEJejgIc6zd3q9XsMyuEGDBpgzZ05OWT+7aNEiWjp5CI/+Ryk13I0dDgfmzp2ril4jEFSxw36GJ+7XEHWmU+xpBIJwzQmxUIilC+d/LJqichO18pSHUDEllogHcSwCMhec0gd7XgICpRQtWwoPYEEJY8eOBaU0KM4fc9dNTEyErusoLi4Oy7vMbqyUUiN5xunTp/HMM8/E1aZ15cqVkGUZfr8fPF35mGgqSRLOnTsHi8WC2267TXTkWF+zdB0+nw9OpxNOpxNutxtWqxWyLOOzzz7DlVdeSbp06TJH1FRoHDp0yLCUZfAan0xsLCgogCzLaN26dceyfq53796QZdmYZ3mVj70nEAhAkiS8/PLLiugxAkH1PlvGZPkJxcQ/Xy1OvAJBOMZT1DfAWc6IxPzL/Kok/tKCpJlrP1amb4uXBsm7eevVFf2dLW1WPBazBxwKZC5jsbAIAILMRR5ESrQ88pI1LhcH5p5EOau5THyRZRmNGzcWM6IA69ate9rlcoEQAkKIYZ2qKIphocey3YZDAAGCXYHZYZRZ08UTGRkZpLCwEAkJCRcJDhGZX8vIZFqrVi34/X7UqlULs2bNEpvlGIa5bDMRxu/34z//+Q8IIeS+++4TVn9h4oMPPghaYwOBAJfxqaqqkTU9JSUFdrsdDRo0uOjnFi5c+EWNGjUucsmNNIFAAFarFX6/H4qiYOHChaKzCARVmKVX/u+zeC7/lLSz2LY153bRkgJBiPvPaBcg3FZb61ssndT71tumX+7ntuXk3P7rkZ9HDjjx4MhYapDK3tCoMx1UipHjQuYXdtzy14+n9+l/x6Ty/Pznc+csHXTm0bvCVfzM9fUwfdHPcXV4WrRoER08eLBxOOGx+WcCgiRJ8Pl8sNlsIDzTDwtikjVr1tDu3bsHZeLl0QfN8dC8Xi8cDkdc9sWNGzfS7t27G4d/4PcYh6w+S7vsRooTJ06gQYMGMVOPmqZRXtlXy3g3vvrqK6SkpBjWzizpgSRJhuBt/gJgZHU2u72zsAlmEckcT7V0/zYL5yzsgq7r8Hg8CAQC2LRpEwYOHCjm3gjyyy+/0EaNGhlrHQ8opUa/kCTJEOznz5+PESNGGO29fft22qZNGxBCjLiPPDALoReyiVebPkg537aaLd0vZKMW4z3G+kPppFo89+CbNm3KSEtLm8XjnWpWHV2CK277n04JLOPdVW78RGN/FI3+V13m/NL1HGtzflQLEy7xb3uXdX07dOyUHcoz5ryTtXWkf2KHaNXFpGzg1RWVN8+mlBI6yxnViIALEmauHTr60ZAClWtZTkoQerfY32f7Fc2bNz8bLxPTvHnz6NChQ42NOIvNw2Mz4vV6YbfbQSnF9u3bB4oA4NWb8+fP05SUFC59T9d144DMxBWfz4e9e/eiXbt2cbnB27dvX4MbbrjhGItjaI6zyeIp8sLtduOrr75alp6efld13OCyzS0TO6xWK7nUGvrdd9/dd/To0eG5ubm3JyUloWbNmrjyyivRoEEDOJ1OqKoKQgiSk5NRo0YNuN1u2O12JCQkBF3aeDwe+P1+nDt3Dh6PB/n5+Th9+jSOHz+OU6dOqXfdddefxTzLl/fff5+OGjXKaB9FUQyBPoL9HaqqGpcp7KLD5XIhJSWFlD6YuFwuJCQkAAA3IbCoqAiEEBw8eBBt27YVAmDk3icEwBjvD9VFAASAZ9Pr0Glprrhus6oQ1zCa+6No9r/qMOeXrudYm/Mt0Xrxt5s3DcX3fUJ6xsIab3095IGH+gGdQi7P/Y9ldLwf0YsjN3TKpk6vrmhX6d8nhNCJPYBX/uzgXva1TRe9dMvt/SYDj4b8LDmj5FZHz3JShCAEXsgMHDeLQ35+viH8ma1MeAQDN7sdLVu2bGk81Vs8MmnSJHrNNdcciMXstm+99RatUaNGkPVaRDdwJutToMQFkhCCc+fOvRav7dusWbPje/bsQZMmTQyLL3bwZ2Oa10HD6XSiXr16d1bXsSZJEiilQYkYLrWGAphz4SuivPzyy2Ii5MxDDz1ERowYQa1WKywWC5f5rfR6zvpicnIypkyZUvTcc88lvfXWW5TNewkJCcbP8RD/NE1DUlISAODkyZP/Fr1EIKgevLrwLJmWFt/JEz/635s59z38RCfRmgJBJfbH0Xpxp+/7zKv0AXp1TUgZHlIi/oW5QjI8ZHNr/jH12rRttzXUZ4x8a/fV3DtQhoeUiH/hfq6bbOu4+u5QnrF54/pR8TIQT548acQ/Mx1II/5eVVXhcDiMw3KjRo3ErBhhMjMzkZ+ff2Msli09PR1AiYUMD8xulD6fD4qiwGq1ok+fPk/Hcxt/8cUXRiZjoERkZ2IAcyktK35fJGjfvj3y8vLqVMexxrIxOxwORMv1WBA77Nmzx5hneMDiDrI5QNd1I3bqwIEDEwHggQcegNVqDQoPwMsKye/3AwCOHj2K/v37PyZ6iEBQfdjeeV1foCRWezwyrOjZjl98/ulHoiUFgooTlR1xaFZ2BK9+cSKiu6O0nr3/zdO0OLfrhl7heM5NLVseW3H1h+/zKTWJuPl1py5dF4dSN11+6Pt+vAzELl26zCp9QOVxCGCiI3NT+tOf/iRmxQjy7rvvUlmWceONMan/ISUlpWRh4CSWMOs4r9drHMrfeuutuG/nzMxMwuqRuQGyOmWfmUcdU0qhKAp++eWXM9VxvLGYqgIBACxbtszL3PF5CPDsPZRSY8wTQuByuXDTTTdhxowZlLn8EkLg8XgMMbD0hWAkcDgcCAQC2LBhw2bROwSC6kWHTp2yN7de8RiJU6cfiegYcGzUyNwftncUrSkQVPD8FY2XVlYAnJfw+uYRo8d2j4eyVmgSC7OQFuky60iilozT3MTjHbm5qakbuu6MlfqN4GGVAr/HZOAZj4RlATxx4gQaNmwoXIAj3MbLli3DXXfdFVP1/NVXX9E+fUrCMsiyHJSUI8J1AkIIVFVFYWEhateuXSX637p162jPnj0NIYAd/pklEI+6Ze8uLCxErVq1ol6v0Yhxo6qqMacqiiLmtmrOmTNnaEpKCrckR+Z5zhyCiFkF//bbb0hISDDcknnHIfvtt99Qp06dajcuRAxAQen+UJ1iAJoJzKxFCTyQSLy2HYFcBZKCiBiAVWvOL13PsTbnc7cAnPPvWZV2deUt/gEAedId0TrambYp7PELshvPfTtinZgCPMU/AGjdps0uKcNDKmumnrs9Pm+HeLkIMusEAKhZs2ZYnrlkyZK4ji0SCU6dOkUDgQD8fj86doy9Ltm3b1/Isgyv18s2IxF/p9frNTbcHo8Ha9asqTLt3atXL+JylQTZZq6/5jHNa3zLsoyEhAS8+eab1W5M+nw+WCwWsLhvAsGqVaugKAo38c/n8xlWgOyLWfh5PB7UqFEDdrvd6J9+v/8isTBS+P1+ZGdni04hEFRjlPHnCI3j0N+EUKgzneLMIRBUAO4C4Ejfs5XKtBstKy5CCF2QmLU2Us8PR+y/0vS7a/DjkSqvZXz0rOn29NzSqjK/V/uL7jnxMiB530CyGFmyLBt/hoPOnTvj2WefFQvyBZ5//nlat25daJoGq9UKu90eU+WbNWsWVVUVXq8XCQkJUFU1KKtppLDb7YaFWlJSEu65554qZRXx66+/wu12A/g9CQAbZzwO+IFAwGjLxx6rfiG+WOIPVVURxctfQQwxbNgwwsYGl032BddfZtUvSRL8fr+xDsiyDFVVoWkaAoEAbDabkQwp0siyjBEjRghLNIGgmqPEuQWdRCjUrFpikRcIyjtmeL5swez3v6pUIaPswjn04b/cHIky7O2dE7EsoEvr/++zsHeWKLdD6zZtdmU3nldh68aGteJjMJoPqIFAgIuLoKIoKCwsNA4biqLgww8/DMsimpmZKWZYAOvWrXv6hRdegKqqsNvtUFXViLUXK6SlpcFisRiCHC/rNKDECiU5ORk5OTlVru2XLVsGm80GTdNgsVjALEABPjE+FUUx5hG/34/MzMxquUEW1n8CM5s2bTL6hDlRh8/nAxBe62dzwhGLxQJCiHG5wuYAi8UCWZaNn71cxuqKEggEjAsIs/C5ZMkS0RkEAoFxxovnDYIED/Y9f50uWlIgKM944Uh6weN9KzMhVdXKb5maeiBSzx587/Ah4XzewhpvfR0LddbvrkGPH8tXqmR/2LNnDwghIIRAURTDFTOSaJqG5OTkINekQYMGhT6xSBKS60vV6QAAIABJREFUk5Oh63q1v5Hr2bPn/wsEArBYLPD7/bBYLFwFtvLQunVrAEBBQYFxEOVVRofDAUopOnfuXOXm+gkTJhCW/be4uBh2ux1WqxWapnFL8sMEDpvNhjvuuEPseqoRn7z3zkY1q644EJVi9erVXp/PZwjzDLPVc1WwGGWiH3N5Ns85BQUFSE9PF9Z/AoHAYHePynlaxQp/qn1SzGkCQXnO6aIKys/X13zyTrieteqGz6ZHvsThmweHPPBQv1hph1N9V1ZYSP5q6eKYTy1KKe3ocrmMG3qebqK6rhuCT3JycsjPKyoqKumBhGDbtm3VVgT0eDwUKBG5WIZHVVUNK7BY4Mcff6RutxuBQAApKSlBSRMijc/nA6UUBw8erLJ9YOXKlUHiArMu4uXixwRAQgjat28vFvIqzvrVq54KzHBQLctBh7kmdJdQRER8pGAmT57sMF+wMatnloyIdyD2iO0ACUFZn1NVVbDswwKBQMBo3abNrqX13/ssXssvEQotS6x3AsFlxwq3Tema1eMr+juxYnXG6D/o7rHhetZtdwyYFOny7u39XVhcjJdd+cEnsdQOHTp1qnDU6tt/HjE21gdjq1attjFRwOyWFGmBwOfzQZZlQwDMz8/H0qVLQ2rznTt3QtM0eL1etG7dGqtWrapWC3Jubm6PI0eOUGb5xeoaKHH3CreLVyjceOONsFgshvuZxWKBJElcLABtNhuKiorw5ZdfVtm+cNtttxEW80vTNG7ZlRlWqxW6rhvxJ1euXCk2x1UUPctB0/bc+YYsBV//SYRCEyJgEFu2bDHGoTnpBrPC550NNBIQQuBwOACUJFnSdR02mw2yLOP1118vEL1AIBCUZvC9I4Z8vit+lwsCisAMh1jvBIJLwE0APPvr8d4V/Z1YsjpjxJoYdilapqYeyPwy9CYeOGToyFj7bCsafvh+xX4jPtaCX3/9FQC4uYnqum64QLEg5TVq1MCePXuGh/jcZZIkwW63Q9d19OrVC//973+rzYLcunXr9VdddRUAIDExEW6323CrZUkhXnvttTPRLufrr79O8/PzYbVaQSk1AtETQrhYKbpcLiQnJ2PChAlV2m3jp59+Miz/mCDHAyY2MusmAOjWrZvY+VRB9KxLH3gIoVg895MFoqZKuOOOO4jX6wULzyDLMndxngfMo8BqtUJRFPh8PuTn52PixIk1RC8QCARlcc//vCSeA/jIEvDsgCuECCgQ/AHcBMDBZx+9qypUWJPU1n8JudI5xjUc+GL2sNCeEJvn8gF/HvJwVRyQW7duDQoaHmlYRtLSSUe6d+8e0nPT09PvAmC4kkqShEceeQQLFiyo0gvytGnT6MmTJykAI5aeqqpwOp2Ga5nT6QQhBCkpKaeiXd7bbrsNNWrUQGFhISilkGXZcBnjkQXY6XTio48+qvIL7fLlyyHLcpBrIQ8rX2bhy6w6KaVwOBx4/fXXxca4ivDNV8unaWWIf2U18MAzo+8RNfY7ubm5QX83C/NVIQYgpRS6rget74qiYM6cOaLxBQLBJbGM9xCdxu/d7LQ+xVj4r2m7RUsKBBcTs4FO5jle3xyL5WratKk7nhq4S7fu80P5/XC5EUeCeF6Y/giHw/E5AHi9Xi4uSIQQUEqD4oS5XC506dIl5GcfP34chBBYLBYUFxeDUop77rkH33zzTZUUH5YtW0affPJJ1K1bF0BJ1tWioiJYLBa43W7DrZZ9FRUVtYhmeVetWvVyixYlRbDb7ZAkCYFAAElJSSWLA4c4WJRS3H///VU+aPPTTz9NTp06BU3TjGQ7vFwMzRlNmTXQqFGjxO6nChCY4aC3HrhnYlk96Y96V2BmLSH+XiAnJ+ek+eKrqsT+M6/vNpvNsPxjF3Ljx48XgfIFAsFlkTNccT0p3q3/4ybRigLBxcTswG7VJW1orJbtwO25lY6efFQazN3tL5SL7EhmKg6VL+r9Z2lFfn7P7t0NY31ADh48OP3XX3+FLMtcBAJVVY04QUwETEhIYFYCOaE8+9ChQyCEQNM0JCUlGRYVN998M/bt21dlDqF5eXl19u/fT/v27WtY96mqCqvViqSkJAQCATidTsiybIg/fr/fiM0ULRo0aPAcE3+tVit8Pp9hfcosRyLN4cOHq81iu3XrVlBKjTHBw9VQ0zQ4nc4gq06/34+aNWvi73//uxCC4pgjU6+hciV2cDLxiMq7wFNPPXUlIcS4lDHHwq0KMQABGDFobTYbdF3H+vXrRcMLBIJyQQihsWqQU14CIv6tQHARMSsA3tSy5bFYLVsoVoDXjJtbl7tAcXNOk6rYeZu36VChOHUF+ef6xMPn+v7776EoCrxeryGalRZjAoFAkGVPZWEiBDv8MBFQ0zSkpqZ2DOXZOTk5UFUVsizD4/EEBVlv2rQpvF4vjXcRIisri9avX/9MkyZNjHbxer2wWCxQVdUQfBisDmw2G2rWrBnVsl933XWQJMmIAWlOTEIICas1jKqq0DTNqBOgJAOwx+PpiGrCtddem8rEBl5xxpgLMMsozjJR67qOhx56SOyA4pDF8z9ZoGc5aMPE05V+RvaXy94QNVnC/PnzjfmOJenhFaOTB+Zsvz6fD7169RLWf5wwJ5ehlBoW2FWpfwmqPiP+Mra7RoMvrONp4y4TCu8bSUIEFAhMSKIKqj6xbMUXChUVYs+fOdU9Hj5XzZo1M/x+P+x2e5DIZxZPWNDyUGEuwEyoYs+UZRl16tQJ6dkTJ04kTFxyOByGBRxzRVIUBS+88AJ8Ph/9+OOPN8Xh5p4+/vjjqFmzJoqLi0EIgaIosNlsQeIf+9ylM00yV9to8Pbbb1MesSbZgZr1KYvFAr/fD7/fj6NHj6JVq1bbqss8nJqautvj8cDn88Hr9XKJAVg6jhkb35IkoXbt2mJxjDPUmQl04KnQ4/i12T36KVGbJQwfPpycO3fOmJcVRYHf768yIg27gPJ4PDhy5IhocI6Yky+pqhpkYS8QxBPK+HPkbPHvdwfxdotglVWoM2uLgScQXEAIgBxZfMW/vxC1ED2K8883jodypqWlzSqduZPFDIvExrG0JSH7+xVXXIG8vLyQVECW8dbv9xvZZZnrK6UUVqsVFosFI0aM6LZlyxb6wgsvxNQCvXbt2udWrVr18qxZs+jixYvp6tWr6ZkzZyillHq9XkNQsdlshsDHPifb7JtFVhYHEEBUXYDvvfdeLgdcSZKMOjLHv5MkCe+//361m4P+9a9/weFwBFlbRvrwbxZ6WSIQAEhKSkJ2drbYEMcBE+++jupZDiqR8IzZOo5CbN+2raeo2RIOHz6MoqKiIKvnquACrGkafD4fgBIxasmSJaKxOcIsvimlxjzs9XrD4r0hEPCm7t/dRNNJHI9Ht2hEgYCdB0QV8OPPwx8YGK13z3PGdwyHcHBP0bO946Wsy5cvv2iTaM4iyjaV4dyoAsGx36xWKw4dOhRSzMr//e9/AEoyD7LYZ8zySZZl+P1+SJKEoqIidO7cGS+99BJcLhfdvXs3V/fgjRs3Pjljxgy6efNm+ttvv1FVVanf76c9evR4uUePHs+NHTsWd9xxB26++WbUqVMHHo8HdrvdEDbNn89qtUKW5SDrNyBYDGOxF6PBnDlzcmrVqsXl/ayPskzTzPKTEIJp06ZVO1e0v//976SoqAiUUsP1OuKL/IU5g4mvbAxKkoRbbrlFLMwxyNqV30wMzHBSPctB9SwHfaXnr2F/R5stPdaJmi5hyZIlRow8oMTCvioIgLIsG6E9EhISkJmZKdx/eR6wJAkejweEEBBC4PV6YbfbjZAMAkG8oTzlJvF6a0gAaFkOcekpEACwiCrgQ+b6KwBEz/1ixCNjuw+r4MQnvBSix6BBg4jf76fA766/7FBidtkN1ybVLBJYLBZDuOnfv39Iz37yySfJgw8+SBMTE43EJkwUYxkJdV033GF1XYfdbsf111+Pl19+GRMmTKCFhYU4ceIEDh8+jLy8PDp9+vRyX1zs3bv3T9u3b//4zJkzHa+77jpceeWVSElJgcPhgKIoxveSJKFr166glAa5QZvddgghhtjHkqZYrVZ4vV4oimK4+gIlFo/megzahIQ5vl5Fuf766zuyuudxAFJVFRaLBXa7HYFAALIsY+7cudV2bH/++ecYPnw4rFZrxN+laZoh+rFxzqxSWB9/++236dixY4UwwIn9+/c7r/+6q+sQBp6+AZ/WZQttUEKPvIHiepYj06ZNIy+++CJlVsq8YnTyGP8sC/0///lP0dCcoZQiISEBPp8PkiTBbrcb+wUq/IAvi+lyVaxPMcS+m7de3XxNx6PxWHYC4Mg/atJGL5wXfUpQrRFbTE7ckfHhY6IWossucn9cBcBZt26dETfN7LoXbtdNtrdiyQnYbTUTvN58882QNqr79u0LEiyZJSMTyVhMQF3XDYtAJrDVqlUL1157Lbp06YLhw4dj6tSpRNd1yr5+/fVXmp+fT/1+P2VomkZVVaWaptHmzZsfuO+++zpOmDABgwYNQvv27XH99dejQYMGqF+/PhISEqDrupFwhYl4gUAAPp/PKB9z26GUGgKmxWK5KBkLi2/HRD52kCwd/8/sCsybZs2aGcIcz4MQ++wFBQW4//77q+3ma9SoUcRqtXJxA2Njjo1nc/9jY75fv35iceDA/v37nYGZSfSGr9u6ZOJBEzK/rkwoZKmU+MeRZ+/vKESIC/z3v/8NcpfnOT9GChZKRNM0Yf0XBVjd22w2KIoSlJDJ5/MFxQYWXxd/XRBKRUeKMW5q2fJY9rVz347X8jdM8ULNqiMy8QiqNUIA5ERaz97/jnYZvm2bPapih8eq1QYHk1vnxVN5GzRocKPL5TI28sxyx2yxF05xxoxZ5LrttttCev7WrVuNm0KWBAGA4ULrcDgMgdBut0NVVcNVlGWPZe6LpV2V69Wrh6SkJEOsNLsXS5IEn89niHSs7hRFMURVs3Wa2T1XlmUjTpvNZoPdboeiKIZ7L9vAK4piZFnUdR02mw1Wq9XY7LN/L12vAFBUVMS9T7311luUxR7kIUCaD9G6rsNisWD37t3Vfj3Iy8vjJgCyRDSlY4iyMdWoUSNMmTKlCIKI0rRpUzcBhRRGB6pQnzSl7T7RMBd48sknSVFREUrH341nmKv/gQMHRANHAfPFC4CgJCA2my3oslV8XfzF9n+C2KPfwMGPa3EsoUlwiQsRQbVGCIDViG5pPWZXpc+T892WwRX5eZvT+Vs8fb4WLVocPHHihJFIg4lY4bYgY88xb1QDgQCAEgGwfv362Lhx45OVff64ceMaHT161LCwYwkzmHst+3cW15DFRtM0zYhhxMQ/sxUTEzWYpZ1Z2GPWhOzm3Rw/kYmHTJBi+Hw+o0ySJCEQCMDv9xt1xMQsJlCyz8GeaRZXdF03XDyZ8GgWYPx+P86fP8+9T40dO9bILs3DBZXVL2snr9eLX375pdrHI50zZw4XF2w2Z5gFATaHsAOWJEnIyMhIFCtk5LGML/7DQwelBJRKoFSCTiXolFz4usTcHbKWSLHvHzcJS4gL7Nu3zxgnVcEN2GKxQFVVtG/fXhx2o3HAunBZq2kavF6vMedbLJYqk2U60vXHK1auoOIoT3mIHkUDzVDffWDGMJdoRUG1nV95vagqWXGvXbXy2erSQfbv3++M1bKd++30rRX5+UbX/2lWvNW/x+Pp6HQ6jUDS5rhy4XKNMIs0DOamwuLzuVyurFDe8f333y9jrrEsAYb5C/jdypFt/Mx/N1P6Vr102dnvlyVwsd8ty4rSZrMFbTaZxV/pjSgTGc3vZYKk+f3mMjHxj/2boij48ccfufal6dOn57Pvebq3MTEVAE6fPo2RI0d2r+4L76uvvkpOnjwJAIaFKrN08Hq9xtgLx9g293Mmbpv/3+12w+l0QsAHTS/7QEsIBSE6CNEhER0SoRe+/vjwI4co68hEQ5OUw0IcusBPP/20lq0F8ULp/QCbR1js4L1794qGjTKyLF+U+COacYDj5oBqCn0jiE0s4z1RWz+kEN98g7TEqc10ig4mqJ7zK68XVdSbYv3a1eNjtdJ67Bk4PX4PHxX7+SOHDz0fq5+lz8ERYyvy823atdsab+3VqlWrbZs3bzas2NhG3+v1crFQYCLE7bffHtJz0tPT7yooKAiy0KsucZ2ZcMhiLDKX5vbt2y/jWY5bbrklpbi4GIFAADabjYtrDbO2VBQFmqZhy5Yta8WyW8KGDRvg8XgMV3c2nnmLD2xMZmdni40wB5SnisJiNSGFcfr8dsumoaJlgGHDht38888/GxnL4wVmUc8umZh7PwAcPXp0tmhZgUAQMSEhwxO3m3lCKLxvJIu9j6D6jVteL1rbdNFLFfn5tF0DZsRihe3fv99JKhjDJ7frhl6xUv7sRh++X5Gf731wyMRYbQeJVA8XClmWH2CxUJjLHnOZjfgEccF9RVVVrFmzJqRF8uzZszd6PJ6grKRVHdZG5lts9tnT09Pv4lmW9u3bIzEx0RDjeLmgWiwWaJoGj8eDYcOG3SyW3RJGjBhBWP9gbaFpWlCsTx4wK9e0tDTRKJw4iKGnY6k87bfePk+0SglffvmlkbU11mGCnzmxj3l9JYTgzjvvHCVaVSAQRJKc9iuHxWvZrXIAJ2f2KxatKKhOcNvh3HJ7v8mV2NzE3K3CjdltKxwzoF2HDutjpfwD/jzk4Yr8vExi82Lk+q/aVKgdKGxxO0i7dOky58MPPwxKUqEoChcLOnaokGUZnTp1CulZLVq0OLh06VLjM1QHzFZ2rP0IIYabJy+WLFlCWXnM2Z4jDXuHqqpYsWKFWHFLceDAAXi9XsMKsHSSDl790+fzgRCCBQsWiJtwDjQb/0E9jUql1qjoIRMdmzeuF0IRgCeeeIKYswHHMuZLQLNHAJs7ZsyYIRpUIBBE/pzSrfv8zJW147b8dcm6BNGKgupETF9x6lnOmDLx2rxh/aiKbtMnrbki7jvJgo/e/yqWyrN//36nXMGeu+/mjVfHcxuMGTOGHDt2rGTQSpIREzDSMIs9AHA6nTh16lRI59Thw4eToqKii+LhVFVYkhNz9mJZlnH8+HGu5Rg4cKDxPUtswmWBMSVkuffee0WssVJs377dCNDFYnyWdbiPFEwokCQJdrsdffr0EY3Ca24YH5yFMNqDo/MPfd8XrVLCTz/9FBfZRwkhRkgJAEYsUYvFgtOnT2PChAlizhUIBFyYvvwY0Wj8Tjl6lkNcgAqqDVwFwO2d1/Wt2OYmtpJQdMmt+Aa5f8YHT8Vao9MKHjXSzz3eN5bK/6fszhW2wrypZctj8T5Yr776asLidVmtViOxQiRxOByGeOXz+ZCQkIAVK1a8E8oz169fP5sdXKoDLLsxi9MkSRJyc3O5vX/evHlrmOjHgpG7XC5uwbV1XcfGjRvFalsGjzzyyE2sbdg4Y8IfjxifFovFsEAEgMTERLz00ktiE1wNEUrR7xw+fHh2PFipmzPbsz+ZS/CWLVtEQwoEAq4o491RzQwcKgGRFERQTeAqAHbo1Cm7or9zY3abmEjTXRl35EnrFfS8+ZaZMdfoT7oq3O5HXqoRE5NiXl5eHQJftT3aTJ061RAHeLgpMZGOZblNSEhAixYtxoTyzDvvvHPU1KlTuQgcMXGwvhCUXdd1WCwW9ufnvN7fokWL3larFS6XCx6PB0BJogmeSViuvvrqVLHcls2SJUuM/sHEYp5tY3bT1nUdo0ePFo3CiY03LZsQS+X57C9XiMPPhTVq37598bORN8X/Yx4CTZo0uUK0pEAg4E00MwOHikwo3hl1jVgHBVUe7i7AmTmNK/w7sWCWS2dV3B256/D/fhKjgkSF67NhDR8WzPkg6q7ATVa2O1PR38nttr5XVRmwzz33HJkzZ07JuODkIsgs15h74hVXXIEVK1bQUD/Hzp07q/wEy5I5sC9CCDweDwYPHpzOqwzNmjUDpRQJCQmw2+0IBAJBrqYRnmvg9XqRmpq6Wyy3ZfPAAw8QNtZYgh9KKRcLX1VVYbPZgvrEVVddhZycnCGiZSJPz1tunaHpsXNW+nMLEQedsX79+rgopznRlCRJoJTi3LlzaN68+VnRigKBICriQhxnBn609Wmszv5qsmhFQZUeo7xfOH1uXqUmhZL4e9HhlzeHVypj38D0oSNjteEXJMxcW9HfST8/tu+3mzcNjdpGN8tZYTNMCoJ27Tusr0qD9oEHHiDbtm0zNv7mP9n3Pp8vLO9iySLMGREtFgv69OmD11577Uwoz27dujU5cOAAACAQCBjWhqUTZJg/i9kti4cAGirsM7HsrgBw8uRJbu/fsmULVVUVhBD4/X4QQqAoCvx+f1hcsM3PMLcNa0NKKRYuXLhDLLWXZuXKlVAUJajeeIi0TGSklMLn88FqtcLj8cDv938qWoUPylPumDoobZ1UX1g/ABg/fjxh6wwbJ2yOY8mUog2lNMgTgBCCQCCAhg0bCo9ugUAQVRbWfOvreC177/13vxiLiUgFgnAhxUtBu+T2fT8ag1HPctCrtcUVdqXIzK4R0/U5dPSjN1fm9zp932de7vbtHfm3QxKtTJ7E7zuuursqDtyOHTuSQ4cOobi42BDnfD4fKKXweDyw2WxwuUL3nje7B7JYZUycePjhh+uE+vwmTZqQn376yciCyhKEsM/h8/lgs9mMMui6Dk3TEAgE4iKTsKIoRtZdr9cLSimOHj36PK/3X3311WUmXGEu3aFS1jM8Hg/sdju8Xi+OHTuGUaNGtRFL7aU5cODAUaDENZsJtjzcgO12uyH82Ww2+Hw+OBwOtGzZUjQKR47lx044hLb1CkSDXGDVqlXQNM0Q2ViSDVmWgy7FogUhBOfPnwfwe3KnggLRfgKBIPoMuf+hfjoccVv+ynj+CQTxQlR2MORJt1TZwbgtJ+d2XuUMxfV4+opfY/7mIKf9ymGV+b1Wm7rn8GyHdf/KPABUXOw5lq+hU5eui6vq4G3WrBnJzc2Fz+czDu6yLMPhcEDTNCQkhJ7VnlnfWa1W498URUFhYSFq1KiBtWvXhmwtcv3115PCwkLYbDYkJSXB6/WCEAK73Q6LxWKIgZRSWK1Ww03SXKaY3kRQCpfLBbvdjh9++AG9e/eewuO9kydP9lx11VXG35lYx9ySw/XZ2DNLC1Z2ux2rV68Wq2w5GDduXCOXy2VkAuaVoKW00MjGVEpKCubMmSMswTjR6MXimNkvSARYvmThe6JVgIYNGzY0W3Gzyyg258UCNWvWBFByeVBYWIgvvvhCNJxAIIgJLBnniB7HhnTqTJEZWFA1iY4AWIkYdIx23/X6modvfnVIB96lW/f5obTDxnVrx0S6jOrMBNpDn/Gnyh2q/FXefLtHjx7k3Xffhc1mg67rUFUVgUAAkiSFxcXTfOCRJAlutxuEEDidTni9XnTt2hXr1q0LeaykpKSQbdu2GdZjTJyQZRm6rsNutxtWGISQuLD+AwC3220kT6GUon379tz65AMPPGCXJMlwDWcCIHPpDofIZD4Es+c5HA7DWnPUqFHChaKczJ8/36hHFgcw0jBRncX5JISguLgYPp8P9913n2gUjmxuteKxWCnL7T8/8LBoEaBZs2bHjx8/DlVVjfVUlmX4/X5uIv3lxm9hYaExFyckJGD06NFizhUIBDGDJY4zA0sEUGfWFiKgoMoRNR+GUAKE9t5/94u/zKpcXL5yHWpDFP/iKfjpPNvrmyv7u9129n/nq2VL3orQxpYcedlKJVK5W/aPba9uqy6DeNy4cWTSpEnweDxGjDfmphQOVFUNSgLCstna7XZomoaePXti1qxZIS+QHTt2JG63G263G0BJfLJAIGDErgNguDVLkmS4PMUyTqcTAFBUVIS8vDxu792+fftt1157rSH+MYs/ZqlHCAmLBQtrF/PzWBbb5cuXixW2AowePZqcO3eOS/IPc/sx4Y+JxQ6HAzabDZqmYerUqWLjy4m0Xr3/HStlkYmOHTtyReZulGTptlgsQRdqTKSPNoQQJCcnG1nEP//8c9FgAoEg5ojnzMAScWPFkkVvi1YUVCWiGsRkZ9qmTpX93avp4iv0LAfN+e7bQeEqz+fz5iwNVfzLvnZuXE0SI8aM7R5KN7j9p2Fj1ZlOunxReFyGKKVEz3JSOsupN0yu3AabguD+MU92rE4Defr06SQxMZHs3LkTmqbBYrGgqKgo5OeyZzGLO6fTCVVV4fV6EQgEYLfboes6xo0bFxYRsE6dOiQrKwuBQACKokBRFONwo+u6YZFosVjiwgWYBYvfs2cPbrrpJm4boNOnT2ezhBLsoMrq0Zj8w+QGzJ7JnhcIBEAIwcCBA4UlSgXZunUrrFZrUNIYHn2UiQmqqhr9xeVyYcgQkQyY64Yshi4PW6zrtlO0CPC3v/2NuN1u48KLrT+xsr6wuaK4uBj33nuvmHMFAkFMsrd3TpN4LXvfX0Y8JlpQUKX2m9F8eZu27baG+owOOTcv1rMcdM7bWVsrE5du/ZpVT+lZDqpnOejg04/eFWp5+g0c/Hj8HTpcJPObhpX/fULR/+h9D+tZDrp43icLKvOMb1YsnxaY4aAlQVdD05F2pW3sVF0HdLt27ciUKVNw5swZJCUlhfw8s5UDE5SsVqvhkstEA13XMWbMmLBYDGVmZpKffvop4ejRo/B6vXA6nYbLlcVigc/n42olFQqUUpw9exZdu3blejBr2rQp7HY7JEkyhCRKaZDrb7gEptKHYlmWsWOHSPxbGQghs1hb8YCJ6pRSwx2YJdlJTk7GDTfcgIULF4qgYhxZXPc/MVHfMhHGn4ylS5cacxuzqI6FGICyLENVVXg8HhQXF4uGEggEMUvL1NQDK6+fPzNey18dQoMJqg9RT2MWrhvvkYGJHdp91+trPctBC7O6uzeuWzumrKzBlFKycf3aMUz0S9t95xux9lmiwfQvD4al7ANPj76npG4N/Jj7AAAMPUlEQVQT6batlxdkd+zITdWznPTWg/dMlMPSG0lYhOV4ZvLkyaRu3brko48+Csvz/H4/7Ha7Ye3HMvEykYLFELNYLMjMzMT8+fNDXiSbNm3qbtSoEfnkk08AlCSUYFaIlFIja3Csc/z4cdSrV4/rvDBt2jR67bXXwu12G8IOsxJhVnrhqjtJkoIsCX0+H2RZxtdffy1W10rQt2/fjEOHDl3kchhpzMlcmAs3+7d27drdKVqGH38edv/AWClLYEayOPAAGD58OGHZdUvPpdGEJSax2+04e/ZsT9FSAoEglrl9wMCnjhXY47b8QgQUVBWIGFThIZ7FP8aOHbmpqeu7xq3bz7dts0d1S+sxWwzrYKZOnUqHDRuGxo0bBx1eyooTyAQAs+WYWSC4aAIxCQXmn1+9ejVuvfXWsI2JjRs30u7du8Pj8cDhcAAoEZsURTEOYmWVw2ylUTrjKYCg2IZMFGN/9/v9kGU5qI6YG605kYaqqkZmYq/Xa2RJnj17NiZMmMB9XlBVleq6biRNMcMEQfZ9WXGszG19uT7Akov4/X7Dmuz8+fOoXbu2cEWrJC+++CKdPHlyuX++rHYp3W7msVF6DDDBr6x5gP2OLMthaU9d1yl7l3nsRQq/3x8UKoDw8qsOkY/efee7oa5nOknQQDhZ4lEQkDKs76vC3iYcHDt2jDZo0MAIT1GebOqXs+StTHcsPT+7XC4UFhbiqquuEu1UwXUyFuI4Ci7d1y+sWYRnf/D5fLBYLBGP88n2kpqmYd26da/16dPn6erStqtWrpzSO2/g30tWnvhBpwQ6qaNbM47IEegPXPdH1a3/+Xw+arVajTNbpC/x2F7BdFaKqa4uxUxB4niTWVU2yK1bt9m1vvnSSfFY9gXJWWuF+Fc2f//738l1111HPvroo60FBQWQJAn5+fmG+1DpuHDMzcmcZIMJB6W/mADGXHJ9Ph88Hg969+6NU6dOhe3kmpaWRggh5LvvvgMAFBQUGK6L5jKycgcCAeOAxr7MrrDMitFiscDr9Robvv/f3t3HNnHecQD/3p2d80ucN8JYUxCClWpiUJrxEkrSorSFjpW3NahiW4c2TSisWseLNolNIIiKEBOTkk2gskYwbQhSaUJijUbDm9aIAqJdBM0I64pKUYOGrG4QksX2+Rw/+4M87sW1EzuJDxt/P5KFMH7jntf73e+ex+FwIBwODwkcJAiGxAZNuemKpmmxW7D279+PsrIy5UEE/9ra2n4rMyQNw0h4wimPR7LJrbV8kz0vHwMDA7Fj5XK5oCgK9u3bx0Y3Bg0NDYrceVTW10SP4cplpPKUn2HN6E3UD8jdiA8fPjwubVn+hnA4bMs6agUFBbGNhLLhls1U/WD9TxYqgG3BPwAJg38AsLXWy4wHAAcPHoxl5Uaj0ZSyqJO1SWsbHOkETY4/8UF8+ZkejwdHjx5lAaU7bx+8jZuP7H3Yeb1Gzh9N04Su67Zs8iMDnA6HA263+3o+tb/nlyzZpuRY8A+4v+SVKj5X/9n4I38m6qCd86N8q39yPiiTXDLN6XTGvi8brz1n3S/KtUzAh/HqeMcHHyyuvPDMu7nyez9U139W+drvpnJKm7pdu3b1LVy4sLCqqgqqqsaudqYy6bGekMhByjRNaJoGVVVju4rKAGNLS8v769atG9d1GRsbG2+Xl5d/dc6cOaioqEBZWdmQzDzgi1u15G+VJ03DdfwyKCLfY92ExLpTqtyhuKurC3fv3kVHR8f/tm3b5nvQ5Xr8+HGxatWqWHbnSLeRWnfxHQ3TNFFQUBC79dcwDBQWFjITZYxkxmsqEzjrn4mCD4mCB8myZeMDjE6nE/39/YhGoygqKhpzuYr7oCgKAoFALJs3U8LhcOxCweCJXc7UzbOn3tm5+FrdDvUBr8UnoEDbGGCbvl+fhNPpjPWvIwWV48ea+PaaqN8dri+WbVR+rzx5y5XM1mwsS8peMoPGjvotBhulYRjQdR3BYBAulyvT34lAIACv14v29vbttbW1u/KpfO/tcQmfOze7rqgY/52N7Z4f5Vv9kxmA8hwu0xtJGoYBl8tlayZzOhzZVkDqxqCSK0HAM4/9eQ+w/KFrJHPnz28HoESa3ELN8r6546l3vzV/QdVJTpXSEx+s2rt3r6iqqsLEiRPhdrtRVFSE4uLiIYGzRMGi3t5eFBUVQVXV2JVTGRSU7127du2C0tJSsWLFinGrTZs3b34k/rnm5uarPT0933A6nSgrK8PkyZPh8XhgmmYsM1Fm+4XD4ViAcMKECdB1HYZhwOPxYNKkSdB1HX19feju7oZpmujv78edO3fg8/k+3rBhQ9buZFZdXR07UUwloJssA0U+nyxYal3/Ud6qoGkarl69ysY1DkKh0G+i0ejPrZu2JAoUpBPATfR662cnK2tN0+D1etHc3CzWr18/pjbc09ODkpISAIDb7c74VVHrCX6uxUieW7psp9lUuAMYQHSwmKzjcVQM/XumKBAwmyYI56b/5n2Q6dChQ6ivr4eqqggEAvB4POkdyxTaa7IlGKxt1NqvHzt2jB3mKPsGuzZbotGXUU9Pjy3fJYONcv6q63rGxwxLAB/BYPDRfCvf4q0h5WTrXxqf++R7m9Ld/FFg+AwmMWQMszwn4v8Vw3zS/dcFTAUeJ7D9dCFeXxrB9jMTsWvJHQEEx/V42D0/yrf6ZxgGCgoKoKoqdF3P+PfJCwiKotjWj6U3t8tS2RwE/OWpUvz6r//Oi8lwpKlQqMpAVv62E5P/eGh53cs/5jQpc7q6umbcvn17XXt7+9by8nLHlClTUFFRgfLycni9XsirKdbbQWXgT9M06LqOe/fuwe/3w+/3Y/Xq1cxUyJADBw6I+vr6WHZKMBgcMshZJxPxAaBkE41UMlzk7tCDgyrLd5zcvHlTTJ06dcT1N5NlAVqzXVMp50S3KcigYSQSQXd3N6ZPnz6m8pVZFvFr82VsHmG5pWvw/5Nz9fPw7/dd6jr1hwWvP/0p3v5K09uPz3ryF6FQcJq30NcRHRhwJTnOHgAIhYLTIqZZGg6HHgn09T3W19szfdrdy7Mqo0cehYAiRBCAgrshN/4T8mBG6R0oEFAUAQUCAgqE0BCJeuDa4s/7tt3V1TVj5syZH8u6NVJ1Gi6YN1x7TLX/FULgxIkTWLlyJfvdNPX394t0A7hkLzmXsaPfNk1TOBwORCKR2N0fmRYOh2WGI86fP19fU1PzJks9f9k9P8q3+ieEENY71OxgPcbZNv/M6knDxfPvra36+5KWbPpNp7/2VtMLy1dtzqdO6ezJd3bWfvTSjmz5PX9y7r7yw1c3V3K4ILK007NnhVyTUK5LGJ/BKYNCchySg6D1eWummCVwknSduZKSEkSjUXR2dmLRokU8ER0ne/bs6ampqSmODyZYj71c5zFZENAwjC+tISZfJ3eHlhmyMivWWmdM04SqqjAMA729vWhoaBhT+ba1tYm+vr4hO4pnkqZpME0T8sSurq6O9ZPG5MaNG8IwDPh8PhQXF49Y/2KT7bh+M/5W/ET960jrFN26dQt+vx/z5s1jvR7lyTZlLxkIsePEuaWlRaiqGhub5JiRSdaN5NasWcM2nOfsnh/lW/1rbW0V1g1A7GjfqqoiGAzC5/Nh2bJlDACmKxuyAYUAtE35uxvePzo7Hz+y/YV/7X72waaxnnuidcvi2ucbOVQQEREREREREaUmpwJaW1/Uxe6l9m9czHXmvnDl8uUnZrUv+tD+tQEVqFyInIiIiIiIiIgobTkZULEjI/BX54rwyo4zU2bNnn2L1eTLLnd0LJjz3tOX0l24Na0yOK9h1ca27z5VXfMWjzgRERERERER0ejkdEbVtWvXyr9+eu7n4/mZ52a3bln8LG8xTdXljo4Fn31y/WcX3/zp93ev6B+Xz2ybduSNb6986VUeXSIiIiIiIiKisXtobqm8dPHCdz796OprL/durB3N+zufuTDnycrKTlaJsTnX/rcN1VdefOP+QvKpvUcIBee0TdeLq+teqfzm3Pd5FImIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiLLe/wGdN/jFkJIJ3QAAAABJRU5ErkJggg==)" + ], + "metadata": { + "id": "-BV5wSJzQ-ev" + } + }, + { + "cell_type": "markdown", + "source": [ + "### Automated high-quality background removal framework for an image using neural networks\n", + "\n", + "\n", + "\n", + "- 🏒 [Project at GitHub](https://github.com/OPHoperHPO/image-background-remove-tool) 🏒\n", + "- πŸ”— [Author at GitHub](https://github.com/OPHoperHPO) πŸ”—\n", + "\n", + "> Please rate our repository with ⭐ if you like our work! Thanks! πŸ˜€" + ], + "metadata": { + "id": "Yq1sa5BbRV4c" + } + }, + { + "cell_type": "markdown", + "source": [ + "This notebook supports **Google Colab GPU runtime**. \n", + "\n", + "> **Enabling and testing the GPU** \\\n", + "> Navigate to `Edit β†’ Notebook Settings`. \\\n", + "> Select `GPU` from the `Hardware Accelerator` drop-down." + ], + "metadata": { + "id": "lrGOILABYqXx" + } + }, + { + "cell_type": "markdown", + "metadata": { + "id": "sqwsUfoI3SnG" + }, + "source": [ + "# Install CarveKit" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "7C4rC_HQi1gq" + }, + "source": [ + "#@title Install colab-ready python package (Click the arrow on the left)\n", + "%cd /content\n", + "!pip install carvekit_colab\n" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#@title Download all models\n", + "from carvekit.ml.files.models_loc import download_all\n", + "\n", + "download_all();" + ], + "metadata": { + "cellView": "form", + "id": "EPjtRXRpQ2k7" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "pF-4SVcB3gjK" + }, + "source": [ + "# Remove background using CarveKit" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "rgm6pR6U22a9", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 529 }, + "cellView": "form", + "outputId": "a908d208-0520-42ec-dbe0-c06e6c4ee260" + }, + "source": [ + "#@title Upload images from your computer\n", + "#@markdown Description of parameters\n", + "#@markdown - `SHOW_FULLSIZE` - Shows image in full size (may take a long time to load)\n", + "#@markdown - `PREPROCESSING_METHOD` - Preprocessing method. `AutoScene` will automatically select needed model depends on your image. If you don't want, disable it.\n", + "#@markdown - `SEGMENTATION_NETWORK` - Segmentation network. Use `u2net` for hairs-like objects and `tracer_b7` for objects\n", + "#@markdown - `POSTPROCESSING_METHOD` - Postprocessing method\n", + "#@markdown - `SEGMENTATION_MASK_SIZE` - Segmentation mask size. Use 640 for Tracer B7 and 320 for U2Net\n", + "#@markdown - `TRIMAP_DILATION` - The size of the offset radius from the object mask in pixels when forming an unknown area\n", + "#@markdown - `TRIMAP_EROSION` - The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area\n", + "#@markdown > Look README.md and code for more details on networks and methods\n", + "\n", + "\n", + "import torch\n", + "from IPython import display\n", + "from google.colab import files\n", + "from carvekit.web.schemas.config import MLConfig\n", + "from carvekit.web.utils.init_utils import init_interface\n", + "\n", + "SHOW_FULLSIZE = False #@param {type:\"boolean\"}\n", + "PREPROCESSING_METHOD = \"autoscene\" #@param [\"autoscene\", \"auto\", \"none\"]\n", + "SEGMENTATION_NETWORK = \"tracer_b7\" #@param [\"u2net\", \"deeplabv3\", \"basnet\", \"tracer_b7\"]\n", + "POSTPROCESSING_METHOD = \"cascade_fba\" #@param [\"fba\", \"cascade_fba\", \"none\"]\n", + "SEGMENTATION_MASK_SIZE = 640 #@param [\"640\", \"320\"] {type:\"raw\", allow-input: true}\n", + "TRIMAP_DILATION = 30 #@param {type:\"integer\"}\n", + "TRIMAP_EROSION = 5 #@param {type:\"integer\"}\n", + "DEVICE = 'cuda' if torch.cuda.is_available() else 'cpu'\n", + "\n", + "\n", + "config = MLConfig(segmentation_network=SEGMENTATION_NETWORK,\n", + " preprocessing_method=PREPROCESSING_METHOD,\n", + " postprocessing_method=POSTPROCESSING_METHOD,\n", + " seg_mask_size=SEGMENTATION_MASK_SIZE,\n", + " trimap_dilation=TRIMAP_DILATION,\n", + " trimap_erosion=TRIMAP_EROSION,\n", + " device=DEVICE)\n", + "\n", + "\n", + "interface = init_interface(config)\n", + "\n", + "\n", + "\n", + "\n", + "uploaded = files.upload().keys()\n", + "display.clear_output()\n", + "images = interface(uploaded)\n", + "for im in enumerate(images):\n", + " if not SHOW_FULLSIZE:\n", + " im[1].thumbnail((768, 768), resample=3)\n", + " display.display(im[1])\n", + "\n" + ], + "execution_count": 5, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "rgm6pR6U22a9", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 529 - }, - "cellView": "form", - "outputId": "a908d208-0520-42ec-dbe0-c06e6c4ee260", - "pycharm": { - "name": "#%%\n" - } - }, - "source": [ - "#@title Upload images from your computer\n", - "#@markdown Description of parameters\n", - "#@markdown - `SHOW_FULLSIZE` - Shows image in full size (may take a long time to load)\n", - "#@markdown - `PREPROCESSING_METHOD` - Preprocessing method\n", - "#@markdown - `SEGMENTATION_NETWORK` - Segmentation network. Use `u2net` for hairs-like objects and `tracer_b7` for objects\n", - "#@markdown - `POSTPROCESSING_METHOD` - Postprocessing method\n", - "#@markdown - `SEGMENTATION_MASK_SIZE` - Segmentation mask size. Use 640 for Tracer B7 and 320 for U2Net\n", - "#@markdown - `TRIMAP_DILATION` - The size of the offset radius from the object mask in pixels when forming an unknown area\n", - "#@markdown - `TRIMAP_EROSION` - The number of iterations of erosion that the object's mask will be subjected to before forming an unknown area\n", - "#@markdown > Look README.md and code for more details on networks and methods\n", - "\n", - "\n", - "import torch\n", - "from IPython import display\n", - "from google.colab import files\n", - "from carvekit.web.schemas.config import MLConfig\n", - "from carvekit.web.utils.init_utils import init_interface\n", - "\n", - "SHOW_FULLSIZE = False #@param {type:\"boolean\"}\n", - "PREPROCESSING_METHOD = \"none\" #@param [\"stub\", \"none\"]\n", - "SEGMENTATION_NETWORK = \"tracer_b7\" #@param [\"u2net\", \"deeplabv3\", \"basnet\", \"tracer_b7\"]\n", - "POSTPROCESSING_METHOD = \"fba\" #@param [\"fba\", \"none\"] \n", - "SEGMENTATION_MASK_SIZE = 640 #@param [\"640\", \"320\"] {type:\"raw\", allow-input: true}\n", - "TRIMAP_DILATION = 30 #@param {type:\"integer\"}\n", - "TRIMAP_EROSION = 5 #@param {type:\"integer\"}\n", - "DEVICE = 'cuda' if torch.cuda.is_available() else 'cpu'\n", - "\n", - "\n", - "config = MLConfig(segmentation_network=SEGMENTATION_NETWORK,\n", - " preprocessing_method=PREPROCESSING_METHOD,\n", - " postprocessing_method=POSTPROCESSING_METHOD,\n", - " seg_mask_size=SEGMENTATION_MASK_SIZE,\n", - " trimap_dilation=TRIMAP_DILATION,\n", - " trimap_erosion=TRIMAP_EROSION,\n", - " device=DEVICE)\n", - "\n", - "\n", - "interface = init_interface(config)\n", - "\n", - "\n", - "\n", - "\n", - "uploaded = files.upload().keys()\n", - "display.clear_output()\n", - "images = interface(uploaded)\n", - "for im in enumerate(images):\n", - " if not SHOW_FULLSIZE:\n", - " im[1].thumbnail((768, 768), resample=3)\n", - " display.display(im[1])\n", - "\n" + "output_type": "display_data", + "data": { + "text/plain": [ + "" ], - "execution_count": 5, - "outputs": [ - { - "output_type": "display_data", - "data": { - "text/plain": [ - "" - ], - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAwAAAAIACAYAAAA19gs6AAEAAElEQVR4nOz9eZjv2V3Yd77Pd99+++9Xe91bd+vue293q7slIQmBBGZzbCdecDPGBgQINyBbYDDYCSRzrXGSmUnizGBsHJPBMTAwCbKHiVdmMME2i3apF/V2t9rX37589+XkjxLEfjIzsaUWRbfP65/7VNXz9HPOqd/pOp+zfD6gKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIry1iEuugGKoiiK8kb79V//deOle9vvMQrj9Q996DvOLro9iqIov59oF90ARVEURXmj3d3evlmUkw9nev9b/h+/8subar9LURTlf2FcdAMURVEU5Y2WFJMnHX3+TKvm32R+70/8v/7J3/wXWez8xrd883f/TxfdNkVRlIumtkQURVGUtxjBX/3PfvRvh/39D4gy06oyNZc3m+mjt28cV+bS3xqcrPz0d33XH59cdCsVRVEuigoAFEVRlLeUn/u5n1t66TO/8T/uvvLiu6sypdFokKZzbj2+xNve9VSie2v/aFZ0/stv+ff/zCcvuq2KoigXQb0BUBRFUd5SHuw92Do9OrrZbDYlGqDndJeW+eTHXudf/tNfd4ge/smue/aLf/tn/sa7LrqtiqIoF0EFAIqiKMpbShlmW1kU+45vi95Sj/liQa3lYfktXnl1xD/7Rx/HKgfXNtbS/+znf/6f1C+6vYqiKL/XVACgKIqivKXE8aKazeZ6npesrK5y+fIV4jBha2uN9Y1L7O7OeemzDwmM+dc12w//0kW3V1EU5feaCgAURVGUtxQ38KP5Iip++2OfYu9wl3avSc2vc3w8oNGus3llncEgZnxwjKtPPvQP/ukvvOei26woivJ7SQUAiqIoyltKu7dkPv2u95xtbF7l05/a5tOf3ibLDDyvxSc+8QK249Bottl+cMLZg9datjj6q7/0q7/auOh2K4qi/F5RdQAURVGUtxTbsO+2l7uLxx77QxzsnvHaK3f5/Kv3uHX7Gtp9nXt3j3BthyzLCecDhLf9B9qb7f8YxI+CvOjmK4qifNmpEwBFURTlLeX9H/rQg6PD/sm9u/eptzTe9weeZnPrMkI3yQvJyemUh7t9JpOC1+6dcveFB6KYHHzol37pp/74RbddURTl94IKABRFUZS3lMc1LROYDwfHEwZnfQwr4cojLZotn8ubl0kzyb3tYwzTo6wk80nF7LTv+V7xfc8++0v6RbdfURTly00FAIqiKMpbi5Rowvr03buH1T/+B7/OL/+9X2PYn2FbFU89c5ONjTXyRHJ43MfzG0znIaPxHPL4Hc9+a/T2i26+8nvv1q1ng3/9O8/qzz77Q+7vfvXssyowVN5S1BsARVEU5S2n2Wy81O4szzStah7vD/hE+Rpf/f5bPP62q+wdddh+uM9gOCVwl7Esg9F4jne43V65VvvgnTu//tmPfORri4vug/LG+57v+fNXWp51dXU9WDMtHovm+aC3UnumKIrNLL19Lw6LiUCbhPPiyeksPfqub//Q33cC+XJZ1itgetHtV5Q3igoAFEVRlLecoFkfN5rBXNBoanpJVWn81r98DaFLvvJ9T/DyS3u8/vpdGjWPS2tdHKdOGqdU+fDZW2/b+fSdO/JnPvIRUV10P5Q3hPZXfvwHP9hueM84pnxHvWZsuoG2JEDEdZcwHuG7Buur9fdr+MhCMBwsOOkzcafi6+Zh+tuaXPzzO3fu/L2PfOQjKjBU3hJUAKAoiqK85ZRlWZ6cnjiTwR5+zefGozeI51N27x9z+/FH+JY//bX8nZ8Oubt9yBNve4QKSbPZJJ3OWqZv/8XNzZ/5/wC7F90P5Yv1rP7+93u1p2+b6zeuNH7Yd7VvT6KpGU8zTvcSzs5GzBYhaVKRpSVe4BAEHr7v0+zV6Cw3WF5tNC1bNLPd9PYile3+fv9jqM+E8hahAgBFURTlLafdDsaNZmcez6e9fn/Apz75adqdGvWazf72CYGn803f+B5+67efZ2fngNuPrZCnMZrm0mrnjxad4t2oxd6b0oc//H1Pry81v6/TMd5Wc4xr08Gwe/flU/pnE6IkJ0kKBqMZaZ5SlhJdCNyFhxBzDN3AsAw0Q+PRm9cRQBpLTdfct49m8x/44Hd88H/4mZ/7mU9edB8V5UulAgBFURTlLSeHFctxvaDVpdapI0tJu9nl+PgeYZSgSQeZJdx+9Bpn/T7HJ3NcR6fXs0iiFL/hvB34Hy66H8q/nR/7y9//h69dafxko2FfmY9CPvkbryIrl6PTMXGSogmTLJckaUqcphRFQbfTIysqxuMBpmFSVAW+57NYRNy4fh3XdfB8cVnTat+VFWL5T//p7//wL/7i3xpfdF8V5UuhAgBFURTlLcfx7LU/9q3f6JtawWI+4t6rD0jmAtPQOD4OKToFszBG6Aa67vPqq8fMJhMeu15y2TJoNLOn/i//3S83f+i7/vjkovui/G/74Ac/eHO9F3zHlcved0/O+kv3X4jY2z9BYjJdnHBy2seyHJIoQlLh+3UsBEHdIspiZrMZRVHS7rSZTmdkeY6IIk5Ojlla6mLqUq+5tdZxP/4aoRWPA79x0X1WlC+FCgAURVGUt5w0De1X7u/Y80mf5aUaV652mAwTFqHOfBpy/cYSR2cjeu0azWbA4cEOwz70W1OaI4etW/LJtebiL/3nP/mTf+3HPvzh4UX3R/n/7t3vftb96nct/5GtLe9/71ry1vBoqH3qUzvMFjG2bVOJgoOjE2pendlihmVZBPUaxydHUMHiJEJoAr/m41gW27u7WJZFvVYjSRP29vfJ8gxTu4zEwHXFymKcXkEFAMqbnAoAFEVRlLecxTA5/ORvPR+P+0eWrkvazQaPPnqNbrvJ4UHGqy/v0mq22X14wFNvv8mVa6s8fH2b4dCjfuShldGyqxff7BbFTwMqAPh96IPPfrD9yOOdvxJ45TdXyWzt+DjnY7/9EpN5ieN5GFJw//59akENhEa71WEynfLwwUNM0yLNMiQalm0zGI6p8hLT1PA8j0pKkiSlyFIMQ0fXdC5fWWd5ZUU/68+/8YMf/OBv/8zP/MwDQF70OCjKF0MFAIqiKMpbji31U0v3wzLTGqZrEyeSl158naeefpLl1S6H+wc0A4EsdQ72R3S7yxztnlGUGqbtMO4PaQS9nu/LTWDnovuj/Ov+8l/883/g2qXgTqtmvO+1l3YZj1Me7h3x2v1dtrYewfcCJpMJZSGYzmbAjDiOEAKWe6tUEuZhSFFVDEcDNMBzPQLfw7QMirwkSwsQMJ8vSOIUL9DZ3Fony7LHo7F4m5TygRDioodCUb4oqhKwoiiK8paTaeJ6nBVBUiQcHB1yejYiSSV7e7volsSwTMbzGVkhsW2Pfn+KZjs83DlA1w2SKKFZk826W/ylv/bX/tN3X3R/lHPf+q3Pdf/Kj33oP3niVvujhpa/73OffpndvRFnw4j72wesrV8mLwt29rbZP9gjSSKSJMYwdB555Cbra1s0m01q9QDHtknjEMcysQyDwHcpyoIwjAjDiDgOEehYlofn+QR+m8Usoln3b4L2we/9ru99/KLHQ1G+WCoAUBRFUd5y0ip+7PbbH3X/8J/4I3zzN/8Jbt++SZKnnPUnZHGF63ns7RwRLkKG4z4bW0ucnfWZzWIe3DskWoTouhDLveYzg+PJ0z/4vT+4ddF9+nfdt3/7t3eeulX7iccfW/srw5Nx+2O/8Tr3H4wJU9g5OKLV7lJvNqiERtBocOXaVVbW1rh2/QY3HnmUNM0pK0mS5YxGY0bDIVVRIiSYpklZVRRFQVmW5HmGZZsIAdPZiP7ghJ2dPWaTBe2mb2XpfDVKFl//Pd/zPVcuelwU5YuhAgBFURTlLeUXfuGnWkvL1td2O7bZqnsEdZu3v/Nxvu7r38v1R65hOTquZ2IYBlIaRGFOvW5x5eomVSnZ2T4mSQRxmlBrmiuNhvcVSVHcuHPnjvqbeUG+8d3f2L651fxPLq25f+rh3R3tM5+6zyKuCGN4uHPAbLHAtC0836EsC0CQphllWXJycsbHP/EphuMhw/GA7e2HDAZ9TEPHcRwsy8Y0LaSUaJpGWZZomkZVVRR5QVVBFGekaclsmnJ4cEj/5EQr03y9SIubFz02ivLFUG8AFEVRlLeUMA+/LvC1dz2/dxrnYerOJ1PyLAWtpNttkqQJtYbP+uVLvP7KfcxQA83kxmOXGZ6OqYTg3oNjllcDuqu25jjaI5qhvWN4fOb/yId+5OP/1U/9VycX3cd/lzz3Hd/9vhs32j+0uep+0+svP9D2d0NMu8bwrM9J/4zZLAJNYhgmx0cnVEXJbDalLDJmswWaYeD4LqPJkCrP0XUN27IpZYGsBJqmYxg6cRKhifOFv2EYaLqJkEAFmmaQZjHNpscUgWEGO8I0fyVMwt+66PFRlC+GCgAURVGUt4wPf/jDdrKIN+2GU7N1oWHomBbYtkccJ4xHEbLQiKKEpdUu917T0DCIkpTeSpd3fuXbeOEzL6MJi4OdEavrm2yse4+fHC+Kxay8EWexAH75ovv574K3P/ec+X7X+JOPbNX/w17XfvL1lw945ZUjbLfBzoMdtnd2iMKQbm+ZdqvJ4OyMoihJkozFfIaUEj/wKIqCxXSCbVkYpoUmBEV+/sDXMBxqtTpQkiQhRVEAUJQ5RZJjWiZCaAhNsrqywtJKD8RY7hye/MNf/KVf/GcXO0KK8sVTAYCiKIrylqFpmptHpRwm4/Djv/Fbues49vrmJs9/5gUsS3DlynV0zWAxjbjxiIduVISLmMU0Y3A2oV2v0VtqUa8HdFZWODkZ0PBrdVmkTwtNNJMsn9y5c+cffuQjHykuuq9vZc9963Ndh/x7bj/a+2DN5fq914/45KcfIjHYubfNgwe72K7NM29/O6trS+xs73PW77O6uka7rVOWq0RRSP/sFImkHjTO83VWUFb5+e5/WVIUBUUeI5FUVUWe5xiGieW41GstpCxJkgRD0xmP5/T7E9IsR9cs9ftX3tRUAKAoiqK8ZfzET/zE5G/9xP/h4Oh4lue5MM9O9siykrc/8062H94jSTPiKGU2mzM4HfL0M7d5+cUdZtM5pZQcjPtsbq5wcnrKY29bwam5LC1vEcev18ajaNWrBe/cee21jR/6oR86DcOwsG1b+8mf/MkMlQ/+DfPss88GX/GVq3/DENXXj08HnZ1Rxv3dY+4+3KURNAjDlM3NTS5vXUZScnx8zHw+Z2P9ElmWEIYhmtDIsoxWp0er1cateWgaDPsDjo8OkXn1u3f9s7xECA0pwTQtNE1DliWjyRBD13AtD123CII67V6dOEoKbUeL7ty5o33kIx+pLnq8FOWLoR40KYqiKG8lopDFuu1onXqjWdvY3KKsJJUGz7z7PVx99CobV9dwPQ8hNFY3elx/5DJJlNFp1xlPppSVQVVKXnt+G8OUOIHJ0toStuV0HMd6XDO8PxaGoTUejytHiEt37txRyeDfIH/0/R9ovv325g9trrt/kjLt3Ls/4mgw5+HuCbPZgrP+GZ1el8tXtyirAllqdFqrPP74bRzXZjIZ0T89ZjwckMQJSMl8PmN/b5e7r7/GaDRCAAKQEoQQOI6LadrYlg1CUJZQlhWGEJiGDkJimgKBxmw2w3dsc7Xb+47P7e/7Fz1eivLFUicAiqIoylvGBz7wAbsouFVIUW5dedSaj6fMxkccH+7hOTb1tsfqepuToxM03aHW8PACneWVS9iOw0ltxIP7uzzz1A3iaMFiPGbk7LKx2aZ/ONQsQ6+VjvVsuIiNbrf7M16SHKpd4DfGs89+V++db+v+ny+vut96dDDRn39hl2kSczKYEIYRmtRYW7/M8sYqZV4yGY1ZzGecnZ1h2SbNZodms0er2aOqKvwgwDQ1Tk4OmY/GOI6PYehI3SQTOoZRgZCkaYaUFVKWgMC2PQTiC2lBKzQhWYQz5vMZeayBb5BX+e4/+Dv/9/lFj5mifLHUCYCiKIrylvGzP/uziS7Eb1WljMo8r9AqwrhgMYt47dVXiaMMw9JpdZo4js10HLG21sUPfBzf4D1f8wyaodM/m9LqtNjfOSWOZly9ukyRpcgq01eWG4/blvXVWZat9A1D/R19A9y6dcv6hq/d/K9vPtb6rvFo5rz+2jHTRYFu2UwnE9Ik5fLVaxSy5NOf/BjPf/pT7D68z3w+Y3VtjdWVdYLAZ2m5Q2+lR3elh2bqJGmG5/l4vkOSLpgvRqRZhOt6+H4dy3QwTRPDOH/sq2mCqirJi4SiyJFVQVWV1GotWq02sjLY2T7h6PjwAercR3kTUycAiqIoyltKTvlCmRHv7dxtvuOdX0HQ8EimKVESEkYJuqHTWaqjC40KSRiVeIFJGJf4fs71R7bYu3+MHzg06hqjQcTVR116Sy1GozH1ZqfWiv2N4+PRNxkYvw188qL7/Gb2R//oB5p/+Os3/2qvZXzb/s4J43HBSX+Bblrcfe11Tg9P6bQ7HB/ukaYplmnieR7NtS5Bs4HvB+gIoGA4HJKmCWkWM53OSKMYSUVZ5pRlhZQSKSVlWWHbLpblYOgmZVGSFylFmZHnBZIK07DQDR2Q5HlGkkSYlqSm+9KxvGsf+v4PBT/1Uz+1uOjxU5QvhgoAFEVRlLcUQ1pas92qbV6/ZMziCY/dusK9Vw6pt7o4gWAyDc8rvxYS23EZTEc0mi6GbRDNU9K4Yn1zjZ2dPW7eukSeFzhuQa1hc//egqVJLEzN2NLQ/rhZCR0VAHzRvvc7v/PGO9+19V+vrzh/5FMfv0e/nzOdh7z6+j3iMGbYHwKS4aCPEBq1WpOl5RUuX93EsHXCKCWLEzTdpMoLFosZo+EQTUgMXcfwHSoJmtCRwGQ8pChzyiIhLjKEriOEjqGbCE2i6waaZmA7JpowEAjiJEXXNCxbZ32zzWKeC6Rm9XqhygSkvGmpo0tFURTlLUVYVEWZzx3Pm1x/5DKbV1pce3SZRtfCdnWEFGRJhUCnKkvW15e4+9o2raaL55kc7g9otup4QZO8rLBMA0OHRssnilNODyfIvGi1Gt4zaHLjueeeMy+6z29G/+GP/NAffMc7L/39Rl3+kRdf2OX5Fw54+eVtXnjxNU6Oz0iTHNf1aTTamKaN5/gs9ZZpd7qEcUY4T2g3fNqNgE6rRq3u8vjjt1hb28T3GwR+HcepUQvaeJ6PZVt4vo/jOGiaBkKiawLHdkAI0iShLHM0oRFHKdPpmEoWGLqkLFMWi4jJbMHp6Rl5ngBbFz2EivJFUycAiqIoypvWnTt3NIB/9SFulglfE0ZydjRMDne2ux/4nv9A3H5qnTwpOT4ckqUVVSmYhjMqzq+PpEnJ/deP2NraQLc0mr0GzsmQJMnQ9PNqsaZlYlsWo/EUy9OF65lBPM7f3z+Z3QJeuLBBeJO5detZ60//767+exur9l+v+calV1464nOf2WY2T0nTjCxOkGVFkiSYukmel0gJhcyZzcfopo5u2NiWwenBHgKDsqyIkxTTNGg0GownI0bDU0zToJQVlQRd08nzgqqSgI6mQV4UmKbENEzKUqeqKjTTwLVshPAo85woiuh22gS+j6EL1jdWOD2ddl555RW1hlLetNSHV1EURXnT8n2W9JAMGAHcuXPHiSbzcValWpZF/oNXH4p//Mv/nD/5p/4gmihY3ajTHyScnp7SbHgADAYL5tOCMDzFMjwarRpFmTObz1haXsKyLGQFfs2h3qgzOO2TLDJAoOv6o45j/slnn3321Y9+9KPZBQ7Fm8L3f//3tzqe8SMbq963+G516d6rfT77mQc83N6j3V5iMp7SH/QRmsA0TGpBcP5Y1zbodltkScZoNEFWJYauE0YL4iQhySLKssJ1PIKaz9UbN3jt8zFpHKFpAss8v9oTBA00oZPnOVmWUpY5iPN7/gCO42AaBlEc4vs+cVEiNI0kzbBdizTLMR0L29D7v/jRj0YXO5qK8sVTV4AURVGUN63JJN4qwPmdrz/ykY8kpV6URZrq08nIWur1ePGzr/PLv/RrTCYhlmuxeaXD6kYb3RB0uwEbW13aSzWuXt/g5Zfv49gehwdnLK12uHf/gDgsKKuMZtvFcTWEqEiiEikFq5tN79Jm99t90/+h7/zO5574nXY899xz5p07d9Qm27/iz/zxP7N6fc37L971jrUfyqPw+id/e4dXXznm5VfuslgsODjYYzA8pdFosrGxxY1HbrG1dYXllSWarRaj8ZjB6IRmq87G+iaaxhfSc47RhcWljS0a9RqHBzuMRwMuX7lKs9OjRJBnBWVREscRaZpSlRLH8bBtH01oGLqBEBpVVVIUGUWR/u6/mibY2Nyk3akT+D6TcSijJHkJUOlflTct9T8nRVEU5U0rSjMvEab+r37Ptt2rMq9qspD5fLHg8pUr9PtT/vmvPs+7vuptXL6xxCM31zjY7iPQCOoWT7/zEWxTIy9CXn5hF8u2eP/XPsn+wwO2Hwy5cnPB6lqX9Y0Oo7MJQgiSMCFLdC5f7lx2LOPP9geh8+yzz95vubV3FkVxf297713PPffcb/z0T//04KLG5/eL7/nA9zz15K2lH796I/gPRqdj61O//Tr7+1OOzg4YDAYYhonj2iwvr6DrJu1Wm6XlDroQFGVJkkZcvXEFz/OIZiH3Xr/PeDzGdjwsy8EybcoyJ4xC4mhBmWXkErq9JdbW1hESZrMZSZqSpiFISRRVCKGRpBGGYSCkpMxLKlGhCUGepYgvlA3rD/pcfeQKfmCxfW8vr8p8fNFjqihfChUAKIqiKG9aUtebNcP41wIAS7PKVCSuoZvS0HVMx+LWk49wejRicBpSyUOuPrpGJeDuq0cIXQNRoVFw8+YVDnb7zGcV81nE1uV1oigDAaZtIDSBppskSUhRGsRTj6kZYWv6ZQPe3rT9P1cV5XVK/qGoqq8pqtx+7rnn/v5P//RP5xc1Rhfp277tL/rr7fwbe0vGh7auBF87GcT6P/wfP8NLL99lMplg2x5ogqzMMCuNyXSI59SY6pKH269gmTZra5v0ltpURcF4MGH7wQ5xFKFpOhUVmqaRlynhNKJRb2HbAZ7rMBwNmQwG2I5NVZbYjk+93ubs7JgwmiKrCttyMHRBVRVomk5ZVlimTlUCZUWr3iQvcsqyJElzFlHE5uWlbL5I1PUf5U1NBQCKoijKm873fd/3LQ3/m+GwW/M7lmV2gZ3f+VmSxjeSJEMTulgsQvI8ZzJakGUFURSxpDc42Bng+z7tpQDTcvjsx19hdaXHqy8/pN1cYjTYJ5xUlHlFOC+YjkOESIiihCBwOD6YEQQBs3FCkpbUao6xttb8uv7Z5PpwNN/Jy+QPSyF7uq6/19L1HSH4uJQXN14X5dpG8WfXNrzvvXql/djZ4Yzf+J/u8qlPv8IinuO7AaUsCdOQIKhhGg71Wh1D0xDApY1L+L7LdDrjhc/tkiQx9XqLXm8V36uxWMzIsoiiqLCtgFrdwDJNtMBC1wyWTZckWZClCXmek6QTsjTHNEyoJCCRVEg43+eX8guFwKrz4EJKkBWOaaI7DtPZgjSrCHynEJpxcrEjqyhfGhUAKIqiKG86dqVd+Shnoz9QeyaIsqT7r/4sCMzHB/0kNDSj9viTt3nfN7wHzZDs3B9wcjyg2WmwmCVYVkSRF2R5iKzOC0Stri/x8d98lZPDQw7aFvVaF8ezSeMS13YxTYswTCiKCl03kUhkJUCTtJd8T2jVI7Kq6qOz7EqSR1GlyV5K/nV//ju+rZnLSuiOOLX8pVfTNNXfykWk3v/+9xtf8xXv+J5HbtR+eHnD2zw7nPGr/+QFXvr8Q0azAe1WG9fySNKc69duoiHpnx5TVTn1oE0Wz4ljnaPDFKHpCFmhC0EUzugjCGpNdNPCNU0M3QApGYzOSNOERqOOrpnomsFsOkPXdUzTwtI0sjSlkjkSMHQTTdPJsxwNqER1HgkgcFyHNE2J0xRdA01KBv0p81mIrRmWFDQudoQV5UujAgBFURTlTUegNwAcyzCEsP+1n5mGeDRLQn0+HZtlFRHFKV5gs3W1B5VgPk2QZcHewZh2u0EcJZSFRpIW5LnO8mqPcJGyfzjgySdWMUwDx3YoywRNlJiWA4ZGpYHQDGxLw7Z1sjyn1Qv0zlKw/vKnq2xvdzKzTK1ajCYii+artudMAt0fl5OjpzXJ+EMf+sBv/9RP/exbcif5m973jqdXVvz/KAi0zbP9iF/71Zf45GdeJE5z1lYuYZkOZVGwstolyzPOTs/odlcxTYN+/5hur0fgeURRRBbHxHGIJgS60PFsG9/1sb2ArMwY9I+hqLh69TEm0xHHRwe4tkNV5lRFiiZsCqmDqJAIPK9GlhdoAqQsgfMCYFUlMTSNsiyhqjB0E8/1iJMFjm1h6zq9dg1KTFHKpYseY0X5UqgAQFEURXnTEZqo3brV13RTM6ryf/lb9oEPfKA5GYVzUzeX8iypxuOI8XDCYKCzutwBwLJNLMNmMc/J0hLXDajXJIHvcu/1PZZXOjz19pu8/touhqXjBi66YSJlyfJqi8WiopQljYZLkWs0Oh7Ntkeelwg9Y6nXwA0cq9lodfIsytJ4cX0yPwt83z4WWesUSYJphJ6sX/pL3/ftr5qa+ZrR2zy+fft2+Ylf+ZUNaZRukjIpbHv4Znw78GM//Oe+aWOz8Z+ubQaXXvn8Ls9/Yo9XX39IVpasr1/CMFyqsiSM5hwc7BLFC27ffIpOu0uap9y6dRuB5OjwkFEyYjoZUskK36/jBTVmizmjyZRmp8vNx5/gkceuc3J4zGy+IPDrbF1+hCRLmU5PmY/PELpOEDQIggZ5XpBnOa7jksQhRXE+vLquI4T8wteSLEsxdJMsjcnSFMvMaLd8bt7c5LOfubs9D5N7FzvKivKlUQGAoiiK8qYjtMrq9W5bhm0g0vR30zGKrPrK+SJv+7W6WF5b1zVhoEmN1fUu03FErdFAaAUlUJYCL7AZDMYIoSGEgSxskiTn8tUesAVCghAsFgkIDc0UmJZgfX2VTs+hqio6Sx7D/gLbMTFNqErB2uUOlmWJg+0T24iy5Ul40k7n48uiyPdBhIZtzWSeLKUz85pum+8zZmfj33z4wqKSwi1SaWSFkCLVTn7gAx941bDtXb0o0v/y7/yd+Rs9jr/07LP6y7duiY985CPFG/Hf+/Ef+Qtf/453rv3tRtu4PB2nfPZTe3z6ky/iBQGrq5eQleToeJeizNA1g6BW5+mn30G9ViPLEqI45d69l+mfnaIJjVqtzY1HnqRWb1CWOcPBkKLSQGQkYcjnPvVJVtZXuXTlGt21VaJ5zOuvvEaz3Wb18iqf/URIEs0JoylpEmLbHkVRYhg64vziP8iKNE2wLBspK4QQVFVJnOcYho7j+jQbywTNBpN5iJRakWXZwRsxXopyUVQAoCiKorzpWJrWevJK80nXdIxQT3j22WfdWq22ZOf5e2RR3JJCw7YCdK1i98Ehvh8Q1B0WUUZZSmp1F02fU8mcesMnXCTEcUFVQbiomM1yOst1pJRYuuD0eMZoFOIFFkHDZTLOsByBZTvMpuf31C3n/C56XhRYjsbSRgPP99l7YOtFmeoindtkiZPkSSKybJqn2bomWFieMzZNqxS6sZAlMi81wjTTHdst8fO3y0g7Mm0z/v4/+2c+8bf+21/41J07d7R/tfLx7/jBH/xAU8S2W5J4cZzqNSsYFJ5X/PW//tfnQoj/1RPkO3fuGB8/OPhaMzn+JDD9Un4fzz33lxtpOl69eq35V1fX/Mv3Xjvis5885PMv3GVl4xL1VodXXn6e/tkJvd4yT9x+iqKUrK+ucnJyxMnJEZPJlNFogGVZ9Lpr2JYDQicvJKenQ+q1Jssr68wXY8oS4mjBeNzn1Zf7HO4dsLJ+mZX1dR67fZN79+7ieg43bz3D6y8/T57HQHWeAhSNSp7n/JdSggBZSdI0QQiJpplUFWiahmEY6JpGSYHp2EzmCfsHpw+3GlvDL2W8FOWiiYtugKIoiqL827hz545RTUd/YZHk8iu/8smr29uHuy9+9jVRalVQ89yv3riy+v7ZLOZkZ0S3UyfJS5pdj0Y3IMtK8qykqiTjfvSF+/sus2lEWRQYloUb2EhNEPgmtquhVQIqQdAwcBzByWnEfJaxtOTT7XqcHs9xHBfHK3Ecj/EgwnIkvaUWWVJQZhWnBxPOdk9YjEZMZiMZJWFeyTh1HWvhOE5clEUmhChNy2ER5TLOiqrVahHH0anjOTMJM92292zHuafrpo5mzoVWRUUlD6oyXxeVuCQqva5r8qoQRi+oNW7meb4TLsKRaYuPW7b58crVDtvtS3PYYTZre3I2uymr/NpP/Hc/93Nfyu/j277h2/yNJ7rf/9itpT9640b7q84OB/z3P//P2d4bsry2RlmVvHL3ZQ7399nauMzXfMMfoN5qMDgdkcxShmcD4niBlBqOYwMlYRiT5QW+X6PeatFsN4iinFrdp9UNyLOc3QcP2NnepeYH+G6TNEvIZEG92UAISaPRBHQOd/c5O9klL0LO83ueP/iuvrDbL6sKXZzXRZVINKEjhIZlGdi2SxwndJaW+dpvfC+PP3WNf/ZPP/3TP/vf/9L3fqmfY0W5SOoEQFEURXnTuHPn/ca8f/z+wHEv6zAoynJzMV88U8ZJb7YYG6lrt1c3urieh2EtEIaJberUGg7D/hzb9hDCYDyYkUQZVWGSJguE0EnSnJV2HdPWGI1jqrykXtkURclsmuNNdWzLYjhMqcoSw8jQNIM0Bt/XyXPIs5zRNOTG0iquX8N2UzRh4NZ9gpbHfLTM6HQi5qOJdXywa+mYbjhfFHEyLxqNdiFsq0qTuGjWm0YWF0mZlLV5NEorUSWu619JNP09CLHQTaNfykKAHgR244Zlu81SVujYnbUrPavZaQavvbx3u9tezhfR4g+N+5MHruvunwwf7OuGYRbZWc2yjbj0/B//Un4f/+Ff+AtX2x3/Rx99rPtHV9a91YO9Ac9/7pDJXPL42x5jNi14/sXPcnJ8wrWtR3nf+74Kw9TY295jOpoxG42pBQ3WNzbJioIkjdC0Cq9ew7Itmu0G9WaNRtsjaNQoy4w4LigLeOLpr+dzn7zHyy/dJS0r6u0Ohu2xmM0YnB0wH09Y3dhkqbdKlWeMJ6dkWUiZ5wiq8x3QSmIZJlVVnZ8GSKhkieOYIGA+n2FaNo4VoEmDe6/slfNpfPeN+TQrysVRAYCiKIryppEOHm+UWbopzXKpqIqpaRp+FEePyqpq2sIU2SKDUqDpkkKWOIFJuEg4PlgwnsQYRoKpS8qiIs8hTUNcxwWtwnZchIDJaEqRauhCMB+fp4wsy4osEximpF63CRcxnmfhuA6zSUSjbbNYFIxGM65cX8Kv2SAkRSVxPJtWvUGtU0NIwfhsQjKOeP3lNuPhxCiyxDDmk6qqhJT4VbPtiaqqisWibxV5VhmaKDTD1rJUVJahk5VJXIbRQheGYdl+0wyCRnd1xSjKkjTJqTeajIcJ7W5bW1lr29v3znq1Wr1XFNW7KymzJIpkkWRJEi0+zjz6Ux/6wAd++ad+9ovLRuQFxlc98baVb+90bX/7/ikf+817nA6mLG+sU+QJr7z8PHla8syT7+Hq1iUe7uzwysufx3d82u0O08mY8XhCnKasbKxx+9ZjLK82MEyBruvnBb90C900sGyJJkw8z6DINQ4Ohnz11z+N1/B58bP3SKuK1dUulqORhTOKPOHeKy9jmDZFlmFZDrqukesxeZagy4pKSoQQCCHQNA0p5XkWICRFXqHrAkMXGLZJreVjm2IuJb/1xn6qFeX3ngoAFEVRlDcHiUi+N353WZZ1IZwAKdbKqvSyNPVs2xE6BrZuEiUFQpM4ls3gZMjyWpc4zmnUHIq8RBMazY7PaBzjBzVm0zlVKZBUHJ0kiEoQBD7tpRpZVJAVGVe2eucFogywTYss9ajVDKqqwrRtoqxgEcU4jk+apZTSYxHGhFFCU7OoquILj0w1Ous9qm6GdAw0AYd7Iw72zjRNk2iloQsExyeHmmEHtuVr0rFsWVVCaEIITUiKWKvXmo1lXbd003aE7tr4DR9NFwxHUzAlZ2cLrj22hOHprF9tMT4LWWp6jEahJSuNPCtsUYj3e577rsVi8b7nvv1P/TWzbm7/zb/58//Gd9v/i7/6419//Xr7LwU1wz84GPPxjz/g4HBGVUESjrl/9yG6brN1eQ3btnn55Rd5/cFr1GtNbNujLAuarTa+18B2DTRhsLM74OikT6/bQTcEi0VEEpUkSY6mw/H+IbPxlEdvXaG9ssa9+3vcemKTqqg4PBijG4LVS10Od3dJsowsS5CyQFaSLE/RNA2h6RimCVVFnudUlUTXdaSUaJrG71wR0oSGaViYlk93tU1vtU48TmPbdGpfxk+5ovyeUAGAoiiK8vveX/6BH7iU/9nFVydR8o4szw3qwSVdCDMrq26WSdvyAyxXUgK+67JYxDQbNWazEE0X1BsWrmNg2hZIncU8pFZzsGydorDIC8iyCsOwSNMIqVUYlsD3HLLMpMhysrSi35/TbNUoZEqWushSY2d3yP2Hp1iGjWWa9JbrTMdneIGNaWoUeQoVhLPze+2r68skRYntGyytNOlttNi6sczwdM72wwGeb9LeuKmZtkkYFiKaJ1iWSThLGJ716XWWjFqrSb3tkcYVo1HIaBZjWyYISV5Cc9kiTmOKWYFpamR5wXwesZjHJGmG5Xl4XsuxLcuZ78b//qS/uNop3OPv/c7v/NG//Xf/7v/PFJc/+t0/WjsaHwXv/qob33nlavfP1mrGlU/85mscHi4YDBPOBhM802W+mNBZ6fLUVzyBrdt84rc+zvb+fWp+jW6ri9D4QtXd8x33OBKU5RivZlNIwcH9bdI8JWi1qNda+H6DvCx57HabnbvbvP7KPsuTBFlsEgQujXaN0WjB/t4xjz95jVtve5JXPvcaaRKSZwlSlucnOVUJEqQU6JqGaZoA6LqgLMvzkwD9PAuQZZqYpoXnB4RhSjRPOTocDAZx/Prv0cdeUb5sVACgKIqi/L724z/8/ZtJGH1DGsXfUuRlj6qykyRta7pRk5XWzPPz6yLrW10QFa5rkmY5umaQFza2o6ObOqYhycsCw9bQYwO9yhGiYGmpwWyeMJ+ngEa7U6fedLFtnfHZgjQuabRrjEcLqqpE16HVbLL/cECr5XN6coomPIRIiOOE+bTDtUd66FqG360RLkLQTMJ5RLvV5OxsSBKlyEqyvztAInEsk4ODCWubLdYv1dEMnek8w49SZiObIs9pdl3WL7dAF6S5RDcMTndPSAvJ6uUm4SwGaTA8WdDqeaRpSRym6GZFUcDhwZgiL5GywrQs4jRDMwxWNtb88WDxFVVVycWwb33Hs9/8dz3HuydsTRPIpLd+5d5kMvGS8fiRWTX8I29/77U/eOvpy0+ZIjd/89df5nOfeojtu6CZVHmF3fDYurnFyqU6s2nK5196jZdfe5V6rYNjOpiWhl+vnz+61g16q8ucHg/Z235AEi+QssQ0HBzPxW1MqdWndJd61FsN6o06b/vK25wcNTnZP+Lll15l5VKPOIloNOtEccZgPMV0bbprS4SLEclkikRiWTZJGiOrCkPXkJqOruvoX6gkXJUSKM8zvwJCQBgusJyAbqeFlEIOx7PffO97337wz/7ZP7rgWaEoXxoVACiKoii/79y5c0fL89lqOY1upGH6jiyKb5e53NKF6KVprqdZbjqBY5eV1DqdBmfHY0pZomkSx9NoYFEVBhUFvdWA2Swiyys0dBBgugYVEk1AWZYUhcQPPKIwwbQdJpOUKKyY9EMs28SrF19IDamjaxV5WhFGCQd7p4yHCxpNjXqzheU6tDoeQeAwny/IkhmtJY+SlCLX2H94guc5eJ7FZBpyehKxtt7mNJ0SxiX0QwpZ4tc8ZmGIqRvESY4QIHWBMExm05T5JObuKw9Ikoz3vO9xOl2bIss5OxjTPxvgeg7tdpOgdn5KEc4T0qRgOhnj+T5pmuFJj7Iq6bQb1NoeVV6KSjS/aTQ8fTyW0bCSVWba1nByNvlEVVW6Yftf8Y6vvHzryWcubZpGxqd+6wGf+8weUjdY2egyHsZsbq6Tyxy3bvLg7j6vPL+NYxs8/dS7mC1Clpe6rK8vEy1CZos5cZQyHs/QhM71R24idI0srTB00EwNy7ap8oL+wQn9w1O6K0vohmDr2gqPPbrF4eGIo6MB125c4vOf3aUoBK5jMR5NkZVgZX2dskhYzIfngY9pkGXZ7+b6B0FZFuc1AZBUZYWUFYZhgDg/pfCDGpcurREtEnlyPP7Y/+3n/9cpWBXlzUYFAIqiKMrvOy6sWWa9OconV9IoeTyP0g2hW+2ykEGWFnqRV8J2DGE7Os22T5YU+IFFRU4lzne54zghDhOyuEJUBlmSUpQFpuaQJhlRXOD7LmleYFg6liNotpsIXWM8ipiOQ2azBUurPUzLJM0meJ5LnkiODgac9adUuYaUOrVmnVqjDmiAYDaNSZKK+Tzh+CzCD0zKtGRlxWc6CimynHrdYzKICecpBwcTikIwGs7Z39UJGhbNTsBsOsHQTExTQwjBZDwnywQPXntIEoasrq3R8F1kUXJ6OOTkcMRiMSMMM9rtNkmck4wSsqxgPp8yHvSZjEY4nsV4dF78LItzmu06pm1i2YHWbhmbs8loczGf4AutmI/n7/YDi2fefaX2nq++aUz6Y+7fPeVwd0KalWxeWWP74TF5Kjg9OuDK9auE05zD7T4N30QzDFY3VrnZqRPUPF5/5S7RYsrSaovVS3XSWHK4e8ZoPMG0LYqyYDIaUOYZhmWy3F0mqDVIkoLZZAFI7n7+Lu1eh9tP3cIyYXd7QLvdwHVtxv0p3W6H4937hLMJtu0jq5w4nqFpYFk2lBLE+TpeyuL8LYdhnBeD+8J1oaqSaLpFs93CcXV8z6PmNyYXNysU5Y2jAgBFURTl9x276Rg1u/zDk0PRlqV8KitKzxDCN2zPsCshDMcmqDnkeY7jufi1BMuW6IZJUVSATlGW5Dncf+2MvCipCkmjE7AYLahK2H5wyCOPXkY3NGzXIMty8iQhaFpYlkZZlcxmM2pNj+lYx/dtOi2f08MJnhtQqwf4NZtrj64wnaZUZYZpm5QSBoOCNMnprrR5uH1EGHqs9BxqNRvLMoiinJ3tIe2Oz2ySc3oyotFqAFCVFadHfaaTGbZjk2cJQWBzcjRkPkup1wOSZEqj1UQ3dXa3j1nNW0TzFNu2yTOHokrpD4YUWUm9UcdxHZhAXmQUeUaWGRR5gR+0GA9nGIZFrR6QpSmlFDhBnUroLC23jMd6V1obl9s8+sQ689mIB/dP2X8wZTpNaDSbnByNCRcVJyd7RIsZ7ekSUZojKRnPJpSlRBoGRbXK8eEhly6tkCY9NE1gOoKV9QbLmz2qXDAdLwijiLJYpUxzsqxiNJqwt7+LbuhsbF3CtGr4QY37r7/C2a+e8ZVf8x7WLi3zL37t06yuNFle6TCb5bi+wWIskWVJlmbomk6Wp5imjeU4lHlGVRXouomsSoqiwrJMpBTkRUZVQVBvYFoGS8sNZuP4qNTK7QudGIryBlEBgKIoivL7xn/0Ax9c1irrfafbB0H71vo3Wq7XW8yilm7YXiGlE/i20C0DhMTzLYTQzheYUiPPJFUpSJISy3SZjCeU1Xmqx/kswXZNEBqWpRGFFZ4XMJuGLK20EUKSLGA8nFKPbKpKZzaKMS2HjUsdDEPH8w1GZwsWs4LeesGVqx3qDYckTrFMDU0zKauC7rLLaJgRNGw0I2dtvQOyotlyOdgbs4hyZvMUBLS6PsfHfeqNGo1mQJZkzKYL8nlGNkmxbYuyrIgWJmdnJ+R5xnzqIoA0z6hpGQILpEFQ85jNTimrClnBbLKgXqtTVCV5VWEYNoblMF+M8PWAoiiIogmaYaAbJhIoqoosL/Bch6BmceV6l2fecwmvZrG/02f77ilnJ3PiSLK/f0ZZZszDGMfxSPKIUgPL8SmrhFdfeYG8KDFNiziJefnzn4NKsra6iWXWMUzByuYSk3GO4zqYtsTxPfxak7IqSNMM3XLZkIIqT6mKgsUiwrA8DEOj0X4PpycDPv3Ju3ylb/E13/BO/vH/81+wFlW0Oz02Nlc52tnHMi0k5fliXwNZFeT5+R1/zdChqpBUCCHJsgSh6WhCQyLI8xLT1DGMivk8GWaxiC96jijKG0EFAIqiKMqFevbZZ/VHN5Zu9Qrt7q6Rlto8eVJU2jvn42S91vBXTg5ORbvTdAzHFJblIEtJkkdYpo4sUuLZnJO9M8q8hURyfHR+VSdNCyzbIq9KOt06hmkhS4lre2zfe0heSirpIGVJmUqKPGU4HlOUNdrtBp2lOlXpnb8rsAWyAr/u014CNzBYWqkRxwVJUtFd9ljMY2zbZG21AdWMk6MprXaNs8MjWp0aZ6cl9+8d4TjnC3jL1Rn2I5rNNnGcYlsCwzDRtDqu7XF8dMh4PKKsSjRhkiYRmtAoypzVtTUWi5AqK4mjmCJLabV8JhOXspDohoWh68ymI/TIxDAtsiTDNG0kBkKY6IaObtpomkme5USLhPk8xPddltc9bj2xhm7CaDjhwd0F+9tjPvvp1zB1D8s0mE7G9McDWp0Ws8WUw4MDnnnHu0GveP7TnyIKFxiGRSkzzk6O8D2PTqtHs97CdX3a3SZLK23yssCt6chKZzQcMzgbkmWCCgt0A9cysR0d0xSkcUo8nrG2sYZTr7Fi2vj1FjvbE9q9Hl/7772XFz+7zUvPv8K73/0knZUO4WhOUGsxmfYRQqLrGoIKWUkANCHQNKikQJbnlYA1TUcIQZEl5FmGpmtkWdZPRkb/YmeLorwxVACgKIqiXIgf+8EPPVUJa5YXhcyT/Om9PH9vOc+6aZ4sp/MscH1n7dZX3KiNRiGmadJecYjmJRoa6TwjikqieEYcx8znC0zHxfNdNM3EtEySJCOOI+pNh2arznA4p1hkHM77+L5HUVUsrzbQdYnv2eztnGHbDmlW4voGRVHSarcoq4yilNi6pN21WFtvsLs7IEnz89OERUZv2edSt02j7n5h9z1hPo2Jw4Kd+0dIuY5u2kRJzHg0oZIFT7/9NrphUFUpjqdh2xZpVlBkMXmWIsuM/tkJjUaXUqbUghphGFKv1VlabqMLQVVVpGkKlBQF2JaBrLlICaPBiCIP6fZ6TKYTDMPAti2ajS6GYWLJCtcL0HUDz3eJ4wRdF7Q7ATefWCZoarz84iFlKhkPIuK4oN8f4bo5rWaTNI8Zjvq0u1329u+zurJGu9Hm7uuvEM6meLZLKSVVmWObGlVZUlYlo+EpaRrz4H6FYRpIKag3WzSbPYKaS6sV0Ggtc3w0ZDSckuY6SVTh+i61wKcq4eG9PRqtBs12i1rNRk9Ntu/3Wb/c4j1fe5vf/NWM3YNDlldXeXH/CM9xaTZ6LGZDZFUCnBcAA3RNpygqBJKqqjAMgW17JGlKrd6gs9SiKAqEFCe0h+mFThpFeYOoAEBRFEW5EGUpn6SMrwL7ZZY9WhXlk1mU+WWRt0zbXh2MF35WZHgNhyTMMSwD04ZolpJnkvksZzoeM+qPkFSEUcjmVhfbBU1YDPoT/JrJ6lqH0ThE6ALXCVhEKbee2OT+g0N8z2I2S8mjhCvXVoiSggf3DxkNE2azOWGU49ccHNek3rCwbAOQDAcxCEG9ZrOYz2l3V9F1iabDaBwyHkekecH+/gmSCsOySdISKkn/+ITLVzbxPIeyKml1XTQ0jvanxFFOOF/guz4Cge94tBpNhNApypw8z2k0AjRR4TgWYZii6QLb8hj2+3iuS56XgEYUz1hMB8wXUxrNDoZhkMQh7U6XNM0Iaj62dV4FudOpM5+HtDo1bE/j6HBEsV3y0ud2kKXEdlxGkzkInf7ghLzMebj7ECErzo4PCVyPZrPD3t4OJ0fn124QgqLIcGyLqihwHIeyzDnr90mSmHZrCUPXydKE+XiBIRw0CVUFo9E+jufQbNSYDiNMQ8PRTZIoQwqNoNFkNJwxHc/oLfco8pKqqLj38h4bV7o8/sxjvPriNg3Hp9Pt0T85QNfOd/+LPAcBRVGeF/4SFUV1XiTMMM6LuwlNx3EbGLaH5VqcncwfToeL/+ZXfuVXVACgvCWoAEBRFEX5PXXnuee8hVH0sjhp2qZxu0zSy0kSN+t+87GoSjxN9+pB3bNrnRaLWUKvW2NQzkkXGa5jkCwMslSSxBmB53OcnGBbNrXAZW3TJwotHtwdnu8O1y2OjibYnolh6sxmIQiN8SQlnKdMximzWYhl61xZX2KxM6DZbBLFC2p1n7woqTUcpISigqWWz+dfOCKJCw73J0x9m+WVJqZxnjWmyCVHhwscz+T4OEKYOtduXvvCIjtgPhwgRMXW5XUMIXFrBo5TkScamhQUSUK7VUMTAs9z2Vi/xDxaUK81KMIE3/fpdFu4jsliFgMVaVYw7M8xTYOyEviey2w2Yzw+JUtjlmoN8iwjQaBpOovFFMf1sGwLz3dxfQPfNzGtgFrToShzDnZmTEcL0kQSRQucNOPevZcpco2NjQ1Oz06RSLqdFYQQ1Bt1iipH0zRs18V3fKSEpm2wmIzPH1rbNrPZhDha0O2t0Gh0iKKQokwp4xIxN4mygiArWV1bwzAEbsNGMwSLcchwMEUYGo1mG7fm0e02GPRPGU+nnB4eYpg2N27cYOe1A5aWu7RbLU5PBvj1JnE0ZzQ4JklCDHF+9aesSrRKIKUGVOdpXoWGrmtkeUZvaYWV9TWuXr/EeDC+/zd+9mOfutCJoyhvIBUAKIqiKF92zz77rPXRj340+9Hv/u7aQqZ/rAiLJ/NSLsosR9fMr07DjFkZ9RzH80pKbM/FdjXms5Ru1wVykrhCIkjTHM+zmM9ndHsNrj56lSRKaXUCqqpkNs3I0hLb1hBCJ69KttaX2L434OSwT1WVnBzsUZUlWVrhBz5Sluzs9Bn058hKww8cag2HVscHJLu7Y8IwxnEcHjw4JU0rpJRImdNbWmI6jpFSMJtFzGcRKyttdrcPWV9doxAlaGDZIHRBb6VHs32emlLmGnmsM58tKPKcJInwA5ug7nPJ3GBn+xCq83vqVVES1F0810JWEllVaEJSlHB0csKVKxscn/SxLJPxcARSo91ZxnYCZJWfZ8IxLCxLR4iSLI3wPRsqncU8Rgid/umIdrtJHKYkSUEUpkhh0B+MKYqCdmeFg4N9HMflxtVbGJZO/+yM+SLk6tUb7O9tU/MDkBpCagxOj9E0jU6ry9mgTxwtaLc7+H6NWTQlzwvq9WV6vRUs16feamJbFkII/JqNbumsrHWZTuZMhhlHR6fs7x+i6xmmKbh8+TKd1S5PPL7FZz/7eQ4Ojul0W9x7fYd2ZxkQTEdTWs0utmlzdPiQNJkiRQWcBwKU1fm7AAmSEil18jynkJJazaMZ2EQz69Vnn71dfvSjr1zcJFKUN5AKABRFUZQvqw9/+MO2lk7f9QPf/W2juEzeZWTy64qsWC6KYiBNY7XWqXXTrG6NJmPn6Wduc3zSx3Ac3MDl/oNj2t0r2J5LluZkSUWRl1iWSYVBULeoNzzm8wWu53D/fp/ZqCRcxEwnE3rLy9x4fIkozFjMM9qtFq+98hLD4Rnd7hKmvaDZapCkGUKDeqPBbL6g1a3h1yzSLKeqJJZjkeUVn/rEfXRhYugFURxj23XGo5gsL0iTioODAdceWUJokmvXL5HEktP+BNu1yPMKIQTLSyvU6ha2ZXF0MEI3Stodn6mWoBs6YRRjOiZlJVmEU4LAo8hTHMdma2udLM/IkhJdF/i+w97+AX7gEUcplYQ4zdFNi5XVTVzXJssLDN1mNpshixRNNymijLIoSJMYoWnUGw1M08GwQAiNosgZj4dYtkWapQxGfTY3rzGfz0jzmI2NSxi2y8Od10izhMsrVxiNzjg53adZbxMuQjQBG+tXqDUa7O49RGqCS1du8MgjjyGlhl9zsByHOCqYjqbIqmQxndKPQ9IkJai3cDyfE9ug3vTYuNxmZb3GbJIyHk6YT2d8+uOfxvZcHnv8NldvPMposMC1HRzHIYxSiiInSeZE85SylHhei4qSPIuAii9UAAMpkRrwhcw/fr3F8voa3dUWSZSEJ/uDv//Rj360vMBppChvKBUAKIqiKF82f/F7v/dWmYZX4jhbI0se1aRYraT+qG15vTQOL+dZVpNdEVTCFEWlCdODdq9OmuSYhkG7WWM+LUAzsEydLC2QUiIErK7X2dxoMJnOWFlpMBqnRFHJ4eExo+GAbq/H2maLyTjmwd0+0/GcMk+Yzyc0mh28oEVeZEynCwzTpCwqLM+g0fQJ6hZZUjA4C3Fci2gR0W21efjaNsvLa8hMYmgGRwcneK7D+uYqo9EYqpKrV5fZfniKrkmCms5ZX6LJkvEgRpYlZZbz8O4eSys9DEOQpDlFJogWC6J5iDAEVVmQ5xlpGnH58gZVlaNpJnGckmUZhmlSq/mMhgNOjg+58chNBv0ZUoMsy7Adm7LMyLKcpZUeo+GI2WxMVZUEfoDrugRBjcV8TpqntNttHNfCcXTytCBLczQBQkqm4yk1P8AwdZIkod3ukqQJ4WTEYj7B9332dx8yXyxotFokSUpepliGQX94zHQxZevyVVwvwPcbmKaN6ejU69YXUrmanLo6Z/0xEoON9SuYJswXIUITFEXBYhKSZwnLK12WVgN6Kw3itODyjS32d3Y47Q+43mzh+ybT4ZgkjVnMFxi6SVXmjManmJZFVVQIYaJpJuK83Be6Bhry/JTGdLAtn/bSKlevbeDXTSzH2h/n+f2LnkuK8kZSAYCiKIryZfHDzz3XLcv48TiOH0vCRVMvi6c9x+9JXfiW5zZdNH02mdtRlOumZaFpNvNZxfrlHvfuHjMYjektNzkbTJGVYD4MWVnpoOsheRXz2O2bNGoWWZozGkTcu3vG6fGILMlJ04LOUpOSkvEoYbEIQRQMh2c4jocb+OiWSTSfE0cxZRVSq/vYXpullYBW2+aVF445Ox6wsrzE6cEpg6MjqqpA0wTj0ZTLl1c52JvTaQf4noEQDp1OF8sqsW1JlkhODvvE8zmBt8RksaDb6zAZnjEcnr8pACiLkvkkOb+SlKV4lo9h6NTrTYaDPq6v47g1qkqyv3fI5uYSfs1jPFgwmy2wrfMiXpPxDCHANB2EqFiEM9ZWN8jTgjzLkZWkFjTQNIGmaWR5ihRgmucpQRutGrIqiOLz7zdbTYqywDAtlpaXefDgdUzTQVY5+/sPzk9GdIssTsjzlHqthq4bJDJGIpFC4DoNGvUGg9M+jbYkyyocz8OIDY52pxgGtLptNrbWWL+8wsnRmCwpKfKKoF47f7RblRimhqHrjAYhjbLAsEzqdY96w8f2XPrHA6LFHMtyOTo+IxyPaDZqxFWM0Aw0XSeOpgjAMM5Tn2rid6oA5wgNdGGgoaMZzvkD8OmM5e4Sg9PRy7/wC7+g0n8qbykqAFAURVHecD/8fd/9tCyT9+ZJUivmi/cWyaJWyapr15stu9Zw5lFsW4FvNG1DhFHEpa0N8rJCtwT1doPNLcnBwSlNUVGru6QRjIuQLM1Zu9RkEcb0+2Oi0CRLS15/5YiXXnxAnufU6x38wMf1Avb3hxi64PRkhyzLmc9mGLpBksdcbTbITYPhoE+z02J5pU0YpVhmHVMzKIuKWi1gNDhjNh6QJgV+4HD3tXtcvrJJt+dyegxBYNNoaPRWltBEyWy0QFY683nMoD/D1HVmswVZkuMtN1gYgv7ZCZato2kGtVqNcBFTFucLW9dzaLYazGcLllaXaPXqDE7mZFnMeHTGxkaPPMuYzeYkac616zeo1wPiKCTPCsoi4/B4n0atDgj6/VPSJMF1XTw3IMtjgiAgSRJ0XaMsIU0Sjg8OQUBVCTY2lwkXKcPhhNW1NeaLCE0zWF5e4qUXP01ZFjQaHcIwJM8yyqpCUFHmKVIKtrZuYBsWk/GMwbCP4zp4no/jBudF16KIqoI8F1TV+VKk0fRYXWsxm8acnYzRNY0yl2SFpMhi4ignzyWD05zxcEAYD+murFOvL+FYJod7hzSaTa5ev8Lu3ZLFbIAb1EEKLNMhTyOqKqcscjRdRwodTTcoyoKqqvBcG00zqGSFEODYBnXXjPf2Br8GqOs/yluKCgAURVGUN8wPfuADzcrksSovHidNni6TpF0k+bpW6b5t2nXTcvx2p2G6gRTjWSzWNpd5+cUd8qJkabVF0HAwrYqV9Tp5cb6oNKTGPMzRhEYYpvTWbBzXJ0/hte0+w/6ch/cPiMIZ8/kMgU6t6aEbxvk1mf4pRVKyCBfkRYZEI5xMODk+JksLtq5e4rEnrmM7GkfHJ5ye6CSxS56n6EIwm00RQsf1LKQEqNjc7GAa5wvldseh3bGodINoUTEfpxzvjpnNYnw/IM1y4jjGcwyGZwOODg4YDM8oi5KVlVVSyyaOY+q1Go57nmp0sYiIwozeyhLj4YLpZMaLL3wG36uzv9MnSaPzdwd5iuetkMQpVVni2Danp2doCEzLw3E8DMOgNAwc2ycvUnzfZzQaYds2WZah6zrDwRDbtinKknqjeZ6bv2Xi+j0mo5i8gGvXH2M07GNaDiaCMI6QCFy3gabpeH7AbDah1WxyenpCmeW4jk3g11hb3WI+mzIZTWg2Ovg1D8u0WF7pYTs2/ZMx917fRWiCp565Qa3u8InffpGzw2PyQqDJijyNKWVBViSYuo0sK3Yf7rKyusGVa4/QanU5ODxifdMmKSKyIkFGgirPSKIQTRfnxb4AWUlKWVAW8rwQmNDQNAPdsAmjOVsr11ldaVHmxUlRVK/wuy+GFeWtQQUAiqIoyhtBfPjD39Ut5sU3kFfXqyIzyYumif5o5dhtKqfZ7ratlcs9kZSSm2/b4MH9U+bzBb3VDv3BjJXVOitrTVzPIApLNi63GJwOQRMYhmTrRhPDFASBx+nxGMuWPLx/zMH+KWE0J80yrly7SpHrLK/2cDzBbCI5OxkQJSHd7jJ8YaG3t/s6Xs3i0cceZWWjS9A0KfKKVrtNVWhEUcbx8Rl5nBItznfW67U6flAjqBnIKqcqDdodh3rDJE0KGh2H+Tji9HjG6fGAoO4js5x63QcB4WxGFEZomkCg4dgeoHF2ekqz1cYwTSQl4/EMKcGyTF75/KtUVUVVFaRpyvJynUpWwHll3K2r65QFzCYLhCYYDAdomsCxHbLf2ZkXGrbtMZtOidMF8/kYx/EwTYsgCLBtG8MwKauKRqPGynqbzpKNJnTCRYxp+vg1mzQ1OTsdkOclne4yRVlQrzWYTkbMFhPmZzNM2yCOBZ32Eq7j4fs1TEswmgyxLIONtWWQBrohCII6B/v7zCYTylLSqNXJ85yPzxKuPLLFlWvXuLq1zunpGWcnE8q8wWw+QhgmUThHFgWUOYd791nM51y9cZN2s0schdx+4ja/8o/+HnmWUPcaSFGSFzlSSjRNoBs6VVWSFyWmAVIINN0gK1KEpnHp8gZLSx7Hu/3DSoidC55bivKGUwGAoiiK8iX78Ie/q0uYf7fM8nVTVGtlXjbzNPNdy2/7Na/V6rWtoqpwaw7dlkO/P2Z5pckLz3+OdnuZRsPn7HTEykaby1trCCaMRxMCz2YcJQR1nWfeucVsHjMdRchScHY8Y2/3CCl1HLuO73UxDBfbLXjksQ0G/TkH20cc7B2SJAuC2jUcx+fkdB/b8rh8ZYtHH71KkqWEi5wkTMnTAq0UnB33WequMBqcEocxlmXT7rboLnVZXqljWgLbMtFMaHcCxsOYJMzwAx3NEriBQ7tbp+yP6bR9hqMp4XzCbDxCMwzSLGMRhnhBQFkV+L5NkRdMp1PiOGVlZZnd3YccHuzSbHbI8hTbdjAtk8Ggj+s6rG0uEwQ+Z6cD4jhmMh3TbrdJ4pSTkwnNpsFiPjvf2S9y5uGUqsqo1Rq4vk+z3vhC5VsDTdOwbAvTMmg0aoTzhPksOb+iVLcJai6f+8wOWRZy6+aTSKDW8BmOhownQwpZEjQadNtLWJaLEDDonxDHCzY2t3j8qSdptuuURYZrG5ydzJmMpsRxguMEVEXC2ekhURhTb7XI8ozltRWGoxGalFy9tokbuBwfDxkOQ5JwzGh4cp5FKJwzHh3x+RembG49RoVGHrd517vex2/+xv+b+WKEaRgIDYRmIoSOlBVCSAxdwzA0TEOnLAsWsylXrz/B2voyuqZlVSU+s+Qzuuj5pShvNBUAKIqiKF+S5557zjTT/NE0zw1RpptVJa51251uUVaGEIaHifno2zaZTSMOD8Zc9j10Q+fkaIZlOoyGQxB1nnnHbZI04fhoxtJSwHgwwTQMXM9iMg7RdY2qhOEgYjrJef3VPcJFwtalawhdI8sylla6uAHIUrL38Jj9/R1M22Gzu4Rtu5yc7tPvH3Ll0k2We8vM5nOKsiKoBUz7C4okZzw94+h4n8uXbyCR1BtNkjhB0zUcp2Rp2cF2NEbDBFM3kFT0+zMMQ2Nts0WyCKEoiRYJQkIUzknjhNF4yHw6oqxAConnecwXIatrbVbWejy8f8jJySlXrlwhiRMmozGe52NaFpbl4DguRVYQRxFbVzfwPYfJeIGumeRZRq/Twa8FHB0cUgvO79q7nsWg32d3b5tup4eQklJCHEeMBn2klCz1llhaWaVWD9AEHB2cUeQV3eUG3SWP05MZD++fYWg6X/nep6nVPZK44OGDPU6OTllbu0qtUaNeryOlZNA/ZjA8Q1QQOA2SKOT+63e5/eRt4iTCX1+i2Q2wTIuV5SXCKMXQBEKXRIuEk+MheVYRzmZc2Vrn5Rdf5+TghM5ykyeeeRvW3hll2mK5t8zB4SGOW2M6OSFNInYevMyVK4/y+qsv0uksc/PRp7l/70WkLEEKhAQB5+8LihLHNrFMA9dxyYqS7so6zXabpaWA0WB2NhzGv/Z//bt/Z37Rc0xR3mgqAFAURVG+JA2Kt5V59meSxcJM5qOe79S6ZdlqePWaHceFvrLRo7fSprfewrB87r1+zPXHeozGE+I4p5Qlo9GcOE14+iuus/PwgIO9kKqUxFFCULOQ1fmDUllqPLh7jK45FKVOveGzubXEgwd79Hot1jZqlBIe3D9hNJiQpykbW1vYbsBkOiWOEhpBi62rq7iuwWga43gmw+EcXTM5PXlIHMUYusfR4QEbm+tYlsvJ0SlZlnH5apfNrRbzWUxxkrK03PjCY9aK+axAMmc6jpjPQhZhSpKG1BotoihjPp+SlTGz2YJGrUGchNi2z+raKp7vUJTguAFRFJOmKZphYOg2juNj2x6mZZ5fPZpKPC9gNg0piwrb9vC8GvWay87ePtdvXKHRaLIIE6qqRNN0rl65gaYLxqMxeZFRJgWT8YjLly/T6nQwLYOqKillRatTwwtsDMPg9HRGmpTcfHyTSmYgBEmYgSbRDIvHbj2JZhq0mjVOTw64f/8eo/4JtaCO7wVE8YKiSCmk5PjgCN8L+NwnPs+Nm9dYhDFaWeG3PExLI0oyastN/FaTNEoZ9PsswpDbTz7CyX6fk6NTPvEvP0mn1yMMY3zP5/LWVfZ392g2l5mMTsnyiMODB3Ta6+zv3KfdadNurzAaHoEU58XXNA2Jg2boxEmIZbYRQlBUJb3mMjceuUTNNzgb5mempS8uen4pypeDCgAURVGUL0lOWlFmG4Ft3ZycJi1bePUoCnW7ZojltSVOT8d0T30uX1/m2mPLeIHPvXtHFFKgaYI0SvmKdz+G6+loomR1rcP23QOSJMX3XOoNH9MSHB8Nse3aF64GtanKiitbl2k0z3Pir6y1sSyN2axA11zKqiDNYpAgq5RoPmFz/RKe57K82kPTNXzXQdNBdwQPd3cYj0bU683zSrXzIbWag2Fq2A7Ytk696WK7OlXlEdRjgrrJbBJTq7kMzwYsZinzRcTu3g7NVofReMiW5TKfjonDOXmZk5c5pmUShhGW5TKfpkwnJ/i1GnGaATp5HuO6HlJW+H5AFEUkacjSUpsgCMizivFoxqWtFdKkAipm8xDTNGm3W4SLhCSKyfKcWq1GHEUM+gNM06LTCpiMxix1l6kFAc1WAwlMxjMqKdFNg0YrYDxcUJQFSysB3WWPSjrsPOxjGjZ5EbJ1ZQnDNigrjY/95sd49ZWX0HWDsiyI4wXz+QQ0wcrSKq7jMT4bs7ATNKGRxBVxmlP3PYbDKfPZjGZriaIIMXUdyzTQNY/7rx2iaZIiy8mThGSakcQp4+kQpKTRbNBsdclymzgNSacRSRIymw/QdcnZ6QGNWgvPrREnESDRDR0pS2Ql0XWTsiwAn2Y9IEtDHFfimFquGfIzpi/uXuTcUpQvFxUAKIqiKF+UOx/6UBAW8yeyefgV0Wy26rv+xpVL1yyhG6K51CApcqI0Yuv6EnfvHtNb7mB7OpqZsHm1i7A05pMZs+kEISTLK22m0zmGpp1n/Jln1Gsetm3g+Q0GZyHzJEETJnmWs3Vlhc2tDkKUNJs1oijG81ocHexj2QGartPu9rBMm9PDPdIkRWsvUW81kNIgXCR0l2o8fHBEq1Fn+/59bCugqs7vh9frPmtrDRzHIYkSGm2PTtfHskyyNGFp1afMK+aTFM9zSJKEIhfICtI85ax/hKYZzKZTBsM+QtMwMDB1kzRLaTeadLordFeaLOYVYRjT63XJk4IoXlBWJWVZYlomRqZxdHTCxtoyzUaT86Q0OuE8pd700ESHo6NjLEdiGDrT6RRdN3Edh+FgiOM6rKysICswDIOqrPBcl1anzXw6YzKd4dgOUsJ4MMNxLISQNBou03HK2ekBna5PqxnguhqtlouuaQwHIZ/62PM8vHuPutvBcly0pqCqCqSs8HwPDUjTlM2tLRzXI6jXaPXqBHWbxTRjubnC5aurVGXGZLjg7GRCUUp0w6aztMJiHtNqWeiiIo5DpNCpNZo8vH+X06N9BoNjTMvDDwIsQ2c+m5JmEYZmI4gJoymuXcPQTYqioMgLHNsB7Tz7j+f6lGVBEWdc39xgZaXNdDTdn0/jv/uf/+R/e3ixs0xRvjxUAKAoiqJ8UdJidruM029PF+Hbiri6JhzL1E1N9DZ7dFbbrG2uonsdjg532by0zEsvbXPz8TUcR6c/mNLr+mQbPdrtLnFcUJQZdd9nOoyYjEOCwMeyNRzX5uy0j5QVs2mCYdjU6j7dJR9JQZ5LanWHySTmYH/A/t4xSytLmKbN0lITAMO08TQL13Wx7fM/fZZtkiY5tuGyv3NAnhUUxYJOrwWyxPcNdK1Clgmea1Cra3i+hmmaBHWJYcDgLCFNcgBqQZ3hcMR0NkXTDKazEZubV8mLijiOsS2HJA0RGji2T7O9xCO3LrOy0eJTH7tPkZU06w2Gs/OqvbPZBN+vE0Uxw+H5jresBEIYGIaJroPrm7iezdnJCUWe02z3KKuKldVlykIyHI6oigLXdbBtl/l8RlmVrG2sU1Yl8/mCKIyI45jZeILjOAS1dY4Pz7BsDaoeaZKBWVHg014J8GsGw9OY11864fPPPyRJEt7+zLuZjKeMJ2OarRZZmpJmMbIC03ap1zogbEzbo0SSpjHNpkO77VJUBZZl0mp3kFWBYTgkScZ4HDKfZ5SZwXA4Zzyc4poBln1eydgPHD7/4mdJkxhZhsTzKbomEEIgJUgpMQwTWVUkaYxpmhiGga7rCMCyDIq8wPd8TodnrKwts761hueaDPdHdyez7JULmlqK8mWnAgBFURTl39qdO3eMxc6rX2XJ6lZjefmRMMpqeVlqzW6HvJAMRlPcZsD60hKPPXGLaD5mOlnw4N4RtuWgCxPX0ml1bLanY56+dQND05iPp1SlRhzlmJaO69eopCRNSkCcF+equ+iGxqA/w7JNHEfH822ms4xBf4hjB+TpeTEn33UpypJubw1d16g1AsIwxK9ZeKbPZByT5xWD/vliW2oS09SxTQvXKVnMMgyr4tLlDkHNBqkBJXEc02zWGZzGFGVFWaRMJzMW4YI4jpFSkBcFhm5TlBVVmVEKmzRL0TUdxwtY2Vih2fNJkgxN6MiyYD6fkecpx8cHBIGPbTmkWUqap9QbDZIsQwjYfrBNmifoZo7lbBLHMb1ej0WUIKUkTVPOzkZURcnly5s4gcMijCiKHN/3ybOceThH0zSCeg3DsggXC/xaneFwiGt7GLpHksT4vsPlKz0abYfJKOJou+DkZMjR4RlbV1doNhos5jH90wFL3SWEfv6oNopMwmiGX/Not5ZodFs0ewGaJmk2XVot6zwlqoA4KTAtjbLUqaqCZstmdbWOYRoYhk5RwtnpgoO9Pqcnc8ajhEdvP87Syhq72zvsPLxLVExJihSBRNdACIGh2whDQ8rz05TzjEfg2jaarqNrFpXUcL0Wa5uXuHxlmSIMZ6Ph7O+/N4rnP3HRE01RvkxUAKAoiqL8G7tz51lrNkMf7776tXm02JoP+tSCmnZp65pueDVyWWAYJrbrkKUF89EJue9T5gLHhq0rK3ziY9uUpUazbbGy0kDXdMJFxI1HrnBycsJ8FuL6Jq2Wh64LRv0pmjCpyhjLNbEdAylhPs9Jk5I4qnBch9PjPq++9nmWeuuMx338WgPHdQjDCMOw0AwIah6Gdf7AVcoCx7HZubePY7k0L3UYjU5BCsqqpN5sEGcl47Mxb+vUEcIgiXPqjYAszUmTnCRJ0HSBkALH1mk3WxzsH1MWJYZukeYFaRySZSmVFLhencViTruzxOpGl6rk/CF0njEaDliEUwzdQFYVG+tbLBYR8+kIQzfwPI+yLKlkQZrmuIGP43i4jke73Wbn4TatThPPt3n+s8/je01sxyRoeBRlBaWg1Wrgug5FUbKxsUJenOfCHw2neJ5PnqcY2Ni2hWFIKorzE4bTOdvbxwipYZgWzZrPrW94hqDhcu/VPicnpzzy2BbTccjO3j55nmEaOmmaMJQDTMOl1nbx/RZ5JjnYG/PqixOgpNlq4gU+g7MJeVagCYmuV+eLdENjbbNNu1fD9z0evbXO0+90mE1zXn35iDV/Bce3aXbb7G7vMhsOWSz6IAukBCFAaALH9EmzBCFAVqBpAsu2yOKUNM/YvHKVVq+OKwqi8eKzlSl/+Vs++lFV/Vd5y1IBgKIoivJv5Meee251fhy/r8qzdhnFX20U+tMNv9vJ89zd2TuivbJKvd2g3qyjuaBpFXEYoQmB73uMFiFlCVvXOrz0wgnhYUynU+PSVovtB6ccHZ5iWyZxVBDUTDQD0jQnWhQUecV0mhDUHBzHIlzERFFEt1unVq+xv9tnNptyafMS3W6PJM0oKg2haeR5wdJaG8PQqcoK363hBRq1usve9pAonFNV55l1TNMlCAKKMmExT7Edj9XVZRAGUXK+qC2KHNM0mc9DNB1sQ6eowDB0JpMU03SwrAIPQVGWDAbHyAo89/xdwrVr17j5xHU2tlo8vN9nNoqZTGdMpkPKIicqK7rdHoP+GbP5lF63i2e6SClI0pgoSvD9gNlsTK3mM5uFnPZPaXXrdLtd+mcj6vUGq6uraJpOt9dgNosYj6YsLbepqpyW51NWJWlWkqcpVVWQZQlVBZ7nk+YxzU4b17EZDicYpuD6I+u0Oh6UGv2zBf3RlMkiZTROaS/1ONg9YDSesry6jq6bRFFIu1Oj2+uAKEjTkFc+9zpVJXDsAE3oxEnBeNTHccbUA5/5OERoOtEiZjQ8T78vRYnn21y5egXHtbBdC8ezCDwbw65YWtlgdb1Nq13j7mv3OT4QJIsxyAIhJFQVyArT0ACJJs7faBiaidA0pCZY21zj1u1lNrpmtj2f/vP/49/4udH/6W/+/MVOOEX5MlIBgKIoivK/6T/+we+/GSXJU8UieiKO5q0izjdcp3mpt3nJ0y2doO5Ra7kIo2Lr2hKGrTEez0BKdCTzaUgWQZql1GoWz7zzEg/unaIbFbWmzepGk7Is0XWLsqwAiZQFaQxRlJBEBUVekucFQlSMhgtsy6Ld9UnikiRJuHz5EqZpkWY5tUaT07NT0ixBIqnVPDzP4fRkRP90wobbYTpOmI1CsjRmbX2T2WxOo97EsmxqjsdsNufy1TqP3FwmSVIm4/n5u4E4p9HwODoYES0ykkVBHFVEUYrQBKZlIQyTZtAgimOa7WWKLOPS1jXidMI3/aGvRhoak3GIqCqyKGN4dkYUhcRxiOt4RFFGGC3QNO28TkHgMRicsLq6wmg4wnNdBv1T6kGL6WRKVUk2N9aRUhBHGb7v4wc1hMhJ05SiKGk0AxaLOY1mjclkRlBzqTcckjjGMg3qjR5lUTGbLYjjnN2dE9qtJq1WnZWVNpquMR6HmKaBV4PuWpu8lESLGM/xuXr1KRzPoEKyWJTM5iGGJth/cMKD+w+Io4Q8LzEtHcuyqDUDOt0uXtBg//CE+XTO+qUlLFvHtpfQxAbhLKN/OuZgf5dP/tYncJ0AN/BJsow0iynKBMMy6LR72K7B299xi+1Wk72H2yxmA8oip6okZZkjKfFdBykr8jyjMEocv440LEwT2nWT+WQxWUTZx4QQ8qLnnKJ8OakAQFEURfn/60f+3He/J44WT2dx2JuNJk/KgkfjKO7mlWnZ8YwnH7tOq+MjK4kwNUqZUyVQFhVlKdkbDMmz843YsioZjmIqqeEHFp5nYxiCjc0lBv0xWZZhmBXjQUqz5TMczJmOU5DguQ5VUeG4FgiBaZmUhSCOMyzLIk0TVlYavH53gO10WFtbwrIdjo5P8Hzz/O55IyAKY/KsYPE/s/fnMZZleWLf9z3n7svb34s9MnKpzKy9urqrm7P2zEgzFFdLIKkBvOgfyfYYhCnLIAhD5B+N9kJAIAzZWghTsiWAC2SNCJqUaEGj4cxwerbunp5ea8vKrFxij3j7e3e/95zjP6JmTIj6kz0Fst4HCGQikRkRuBk/4PzO/S2rjKY2BL4PaILApdUOiFoeUeQx2goZDANcT+J5PlHosFot6XY7rBc5q2mBrsCyHJJVgjYWaZKCscjLHDeKQSs8J0bKCtsN+MKbt8myhoYGC4vFeMXF6TlpmpNlKUWVUZYlIAjCCM8JCf2YJFlRliWTyZSyKpgtJpR1ievaHB9f0O12sFyJauDq8pq9/R2yPKHfb/Px0zNacYfT05ObBWE4eL7D1laPsmyIWx7tTkxTK6aTBaqpCEMf27ZodyPiVsBynlMUFZZtaHXaKK04P58RhB472x0sx8J2LKpGk2eKq/MJ58cTTk9OybKbZM33A4QoaZqatFpTlDWz6ZIgiNjZOeT0xTNePH1Mrze4SaQ8hzAKaLVCvvRH3uH0eMzZ2QVX1+cUVYkwGikEZVly+vQFtSr59u/+Hvt7tyjyFKVuEkYEWI7EEjc/f5YQ1FWNcmu0gdFwm93tNk2WMT4Zf6s0xXc/5ZDb2Pih2yQAGxsbGxv/lF/8xa+4z55l3uR4+VDl2Y9Xebadp+s7ZV6/Hvjtg9H2ltXpdHFjn9VK0dQrut2QYl2QrCRhHFBVBsu2WS0LvvN7H1PkJe1eRG8Q0+t36Q08yrJGShvPd+l0Q+bTNbpxKIuMi9MVda04fbEiDF16fQhCl7KoiCKPoqhYLj5ZupXk9Hptwtij1fJYzhNq1XDnpX16/RZ7h220Mvzg25fkSYFl2cymKf1ByPPnJY5j8fY7LzObLcFohGjYPRjgBYaqLPADi7jts141Nw3IVxlZUlOVNQaLNMuQts06TSiLDKMUqqqJQoeybHjr859HyIbd/SGXZzfTdibTKR++/xGz+YRud4tKNyTpCs+18Hwfy3Z4+/PvUFcFX//mb3H//kNOT06oqoKz84Qf+/KPE/kxxy9OERIGgw7f+8775MWaXv8hRZmT5xWW5eL5NkEQkeUl3X7NaNRDCCir6mY1rtAY3RB4Hp1OjzRP8AOLVitkuUwIIx/dWGTrhsV8wnAnJohdBJpa1Tx/MWO1zgkCm/lsyXpVkK8LwqjN/uEhRiuW8wVVVRIGEXGrQxy3ELIGy+DFgnuv3yZLdtCNxrVC8rymrAvee+9j8vwHuJbDajXDCLCkfbNgTCkc26MVx5SloC4zjp99hGu7BKGHcQRVWaKEJGq1qIqSWjVgNBqF63oMhj22Ri1ElRQa+X3ie5NPO/42Nn7YNgnAxsbGxgZwM9nn+NGjL6TL2cP/93/6tVuNrjtKNUOKZtT243aZ53fCVnev1R7Jwc4AI0A4Nu++d8pkvGRrq83BrR0aXRNGOY4r8QKX6+sV63XB2ekZ+Qcpti34iZ98h3svvY42GVpp1usVrVYIwNnJnMk4JV03RLHFbDqmaXqEbQeV5jiOTbfXIssq/MDDdX1UA2la8f77xwwGPYQs6YU304GMMVxdLNnZ6zMeX7G/u4NtSVzP4sGrO1xejun0Wjh+TX8UIjFMr9ckSQ7SAqDV8fEDl7ppKPISIaDdDjhb5gShT1EVeEHMOluS5hXtuEu2XGHikL1bu+zd6rK900KVhpPja9aLBYv5gqvxBb1uD600trTpdAZgNGHQoTdssXc44Nd//Xd4/e3P04rbnJ2dEYct2v02w+EW77/7iK3tLQajDudnY1aLnLc+/wa9QYQluxR5CbpNkmS0Wm2ydEWr1UJrzWK2JssbWu0Yx7K5SK4I4puxolIqoiigLmpUo1C1IakL/MBme9RmsNVCaUWnEzGfJdy5O2I5z2kaiIKQsTOnaSmMESyWa5LlAoGg1x9gWTfPNMtXtLsthtsDRjsdelsRWV6QJxUXL6boecnxk1N6/SHMpsxnExrV0NQFCIGUEiEEdZNTNxYSG8uWCDTGKIriJrELXBuMoS5KLEvSKIPrhwRBj35/i1de3acdWUye5afSdn/5q1/9qv7UgnBj4w/JJgHY2NjY2ABgdnn8AFP9bF4Urxd5eStN15HAiqusGMz0KhgNttw006I7sFisCnAkRZ5QFpBXmvceX/LsdEGnG9Prt8AobBdMY6GahmQ9Y7aY0NQ1f//vXyGsmn/lT71DVZVYls3F2TVBELB/2Of42RzLcdEafD9ECoE0DhiB51lYEqqyoapqQCBti5bv0W57ICBu2QxGbaRQdNoOaV6wnBd4gUulC8KgyzDosloX3Dra4/B2GynBKIUf2Fwq8FyLdjumrhRCWCitCAOfLM2wbYluFFlekOWaRmlk0zAYjZg/eU6yWjHoDxnt7PCzf+KPsLMXcfzxmO/97guePn5MliYYA1HUotsekuUFWzs7GFPz9OlTdrp9Xn39Hk+evCCKO9w+usuL5y/Y2t7FsyVKa54/PkGViv5BDyEMV5czRltD2nGL4bCN49o8/zinUTVhGDKdrgCLNCmoSoXSNbZjc305ocprmsZwdGfI2dkVEgn6pmcjDEOM1kStgChysSxxM6nJD5jP1ywWOZPLjKuLOU7g0Bt0uHW0DRgur2a44RC0Il0n+H5E3I0Yjrq4vsDzPK4u5/zKf/fbdIcBr7z+KnVV0ZDS3/K5Lw/47d/6Jov5AguJ49wcW4zRaG1ukgBpIQxoY0AYLGmjtUZrhTAC27KwpERrQ1M3WJaNbbtYbogbBBzstxG61mVdfdfvdL/3acbgxsYflk0CsLGxsbEBQNsSqR2HS1t3zqus2bOH2x1hZFvaVqxp7N3dA7QQ1EoRtiOWq5z33nsEQhC12rx094hGVaRpilES27GQApbrFRcXp8yXVyzSJUhIFin/+d/8u2Arfu6PfoFu10FgoRqNFwj2j7p88IMrBv0uju2RZzlF1uBHkqoCITT9Qcx0skZYYDuC3sBjMGxxfZlyeNTDcSR5WpBmDUmqaPSCuw/2qQqFQaONIs0kRkKn63F+ssbzLIq8xvNsPM/ClgJjQZ7mBH6EMhrXtrDdgn6/Q5IUnJ2mpGnKKi0R0sf3Qmwp+NEv/xGOHvTZ3vUwteLD7x/z0YcfkaYrtAKlavwgwHV9HC9gsDPkyeMPGA53ODw6ZDZNsW2P0SjgyeOPaLfb5IlLEEYURcZ0PKXb7mLZNlme0W61aMUxQeShdM31yYLpeMFg2OP6esFiPiYvVty6/TnG1wsevnqbZx9fcn25JAgcXnpwi+lsQd0ohsM2eVogpaSsa+L4Zltu1YCsHDzpsFyuUQiWi5IiL9ndb3N4e4Tr+5yfzxEIuv02q1VJrx/juRI/tJF2zXw+I0szep0ug1GHn/rZL3F5ec03fusHtNox9+8f8t3vfpumEfzMv/xTTCdTHn3whCIrgQatFUrd3PQbpW7GfYqbNwIGgxDiZmSqBml7KKXRukFrjZQucafL/uF99m8NEKZmvUjXQRSkxm6cTzsONzb+MGwSgI2NjY0NAIR0uq7dWC3PVXleV1Josbu7FbqRY2kpyXLNaKuHG3mUpUBYDm+9/RaL1RLHlkSRpN8f4blbN829LhhjcXpic3zsU1QVlTJoBYiGqmj4m3/z7+IHki9/+U2iKGS9znFcm+HwZm77eLxEiJvb3aopiO0WearxQ5umqen2ImxHMhq1sG2J40naHUWWVxTTmqq8ub0v8xpVW6TJFNe36LTbSCmIIo/ZeM16VbNaF8gUXBu2t+ObQ6OCdJ0jcREaMIaqqvEDl9l8Qbvb5vq6QmmNbVtgJAbD0b19fuSnX8J2DPky5/mjCR998JSqzDDaEIYtrq7OEdLC2bdZpSmN0Xiez+HhHtfX12By3v785/i1X/sdkvWCtz/3JTCCqqxZrdYYFLZ/M8pyNOjT7rh88N4Je4cPmc9yPvrgnKM7uyTrAgGcX5zw5S//KJPxAtd1mY6XJEnOwa0hd+7uYNkGpRp2toasFmtOT85xHJdur4trW7iuh3IMjaWxLMVou0WnG9HpxRhzc7uudU2aNjQ6ZDEvaXKNrktWq4TryzHX40swBimsm0VdtsAPfPrbfT73xZf4n7zyJb71zcd883fe4/U3XyVLcr72a7/JcHvIa2+8wXS85MXxM0RTInSNUiDQN30MUgMWCAlobFuCgEYpDAatNQhB3OrT6e6ghcILHfLUaNeSS+3YukjF5ly08Zmw+UHf2NjY2OArX/kpm7k6lCBtRwxc394SympX6dqxg54Y7g6xPJuoHWKkptGafXvAflJgOYI4cojjgDKvWa9Totgiin0m12tOniuyrKTRAo3BtQOiIEY1JXm65m//7f8vg/4273zhNrYjqKqG/jDi1tEW7333FN/zGQyHDEYhSt+M27QdyWKecfvuCGMUtgPj65vvxfEli2VJnijqMsf3XTzXRggIA4+6brBdgdaSumhAN1SlwnYktmPjSoGQUJSKNC3JU00UwuXlmjByKcsKW/rkqSJLUppKUeYFyyphMDrk3v27/Ml/7S129nocP7smXSjee/cZ5+fHhGELIV3A0Ov32T+8TV5WCCEYX13w1luvcXJ8wWIx5k/8iS/z0aNLPDdi96UdOq2Y9Tr9ZIxlSVkVdKoSrRqaWrJc1rS7PkVRoVTDy68dgjHkmcV8Nufhw1dxnAjfh539LsvFmtGwx9ZOiONJJldrOq2Ii7Mp0+mC3d0hVSE+2eMQkKUZ81nOzt6AILBxbJs8T6jrBK0lZdkQRhF1XVHlFXWlUBpanS5hGLGzu0OePSRJUmbTKdPxmHWy5HpyxZOnH/Pbv/k7HBxt8y//7E+zu3vAk4+e4wUOP/rjP85//9//Eo8evc/u7j7dXpvrqwvKKrvZJIxEaIUUEiHkzQIwDEKAFDdJgKpvdno5js9o+xaHd27R226hm5rlfK0HPausdL1OPT//dCNxY+MPxyYB2NjY2NjATu4f1qY+MqIMbKEi13c6YRzHrUHPmqYlaVHT64Zs3xrhBx6qqahKRSsHL3DxfI8sTWl3YkY7XZqmRJsGy3G5uBhzcvaMpEoJgi6HBy+jGiiLlGR9weX1nP/iv/iH9Lr/Okd3RjSqoNElu3sx3/itFUIKPF/Q7XtUlWK9KPB9D2kl5HmBUobewCMIJNK2qJRCYBO3LMLtiMU8xZKaNGlYLVIsadjaPWJ8Nefx+6cURc7WqEueFOzs9SiKgqIwFHmFrg260eS5JpsWtLoV2ztdsqRAa5uL8xmPPnzO9dU1WsD9lx/yxR97yJ07bYpVwsXzGfNJwsnJGWEYgxTo+ubfbm8fEIQ3t+fnF8f85Oe/RFUari+ueen+S6TZzYjRVhTjuh6L1YKdvSHXl1eEYYfRzhZvf+4B8+ma+XzJzs6Qdsdjd69L09wkL+mqoN32CB4eEkYhk/GK0XaHOHKJwiF1U2LbNicvZljS4ur8Aku4dDo9ZrMptuXiOh5Xl7Ob6U29Lp5vI6TFbL6m0/XwAx80OK5LWRqytGIwiGl3NZPxzcSlbi8iWSekSU5dD5lPhzx55PHe+wvAxrZAWjZnJxP+8//Xf8mto0O+8PaPsE4KlssZ73zhR/nmN36T85PnuP7NBuRSZNRNg+P4CEArjUCjqRFCYf9+b4A2GCSW5bC1t8ed+3fo9HwG/Ra/9Wvf5M4f/ZyyHXK3caJYWG1g+ulG48bGD98mAdjY2NjYoCrrV+06PxK6aVmi3o1brX7/cMtxeyFDP2S1LBlud7DcTybwpAXGJDTKpqkaXNfDj9rUSuPYgiCOqKuGs5NTvveDx0yWc4wR3HvpTcJoh+liiu/HSDfCjZZ8/HzO/+fvf4M/++d+jG7PwXUsOr2IKAooy5JuP0BKgWPbOK6kURXdfkxdg2UZWu0A33eoG8Vy2VDkMwaDNpYl6PYiricJdV2jaoW2DForfN8hTSumkymtVoQQmjDwUFoBkixpmF2nDIctri+XdHstTCMo84qqapjPK07PpsxXa9rdIUWVsXPQod+XLCYrVvOKZx+dk6U1Vd3wxptv8NFHHxH6EYF/c2hVTU270+bw8Ij1MuPyYsyde4c8eO2QZ08u6HW6VHlD3LPY2uljGsPp84wf+fFX6G3FzMYJWVbi2i5hbLO712FylbBc5qxWCa++vodj2xR5A1IxGG6B0UyvE8rmZinX+GpMp91jNl0AsFrNSdOSwWDA3sGAjx6dEPgeWkGyTihVRVbW9PsxUQRZWiKlxCCxbYs4DjDcNNvqGh6//4zx9Zjz8yuSZEWn07lpwrU9+v1tLi9PKMsSITS2uPkcZy/Oubz4+zx8+BpC+kzGK8IwJlln6LpkXRYooz55AyBuvpYqMVqBkLRbbdANZVlggOFon05vxJ0HdwliH8cR/J2/+V/Sb/Vot72mKdKkLNU3ps3y6lMMw42NPzSbBGBjY2NjA6lRolF3fUvsS2FGqk69Ismw4xg3kuwf9vBCgyUUi/mSqizIszXzaUZVaHZveeweHVAWFcY0SEtSlUsevfeCy4sLGqOIoh5b27eZzjOqpsa2bMJ2j6gzpFhN+e1vfJs79/f5yZ94SBz5pFSEYYyUgt4wAkDpiuGoRZIUbO92WSwyHEeSJgUYibAEWmlWixzPcwlDhzDwCEMH32+hK81g1EbVMJssMSiCICZJawaDiDyrOX5xTLfbYbQ9ZLGY4fk2Wml6/RhbWMyvExA2V+cTFoslShl2dra5+3DEgwd9LEuQrCvG1ynrdUIYtbl79z6zxYqd3X3qUnN1ec5bX3iN2XJJEIWUZcPz5+d4ns3ewTZFWTMY9liME7b3usQDj8BzePbolL29HY7uDHACh8WsJI4i4pbD4dEAaQmMSPjwvY957a0jlNI8+uA5w1Gb+w/28EOL2Tjh/HzGOkmoa8VgMGAymbC3t8OjD58TRx637x7QNIqiLLj90pBW1GY2SRBSE3g+o2Gb7sDHoFnMCxzXxfM80nVJU2uU0ghZsc5KGiGoTYPtONS14OpyilKKRjcIYcAohCXRGkB/MrnHQWvNuz/4Pnv7h+zu7bFcTmm0wHNstKkxxiClhcAQ+AFKOUghGI128WyPyfQKIQ0IzfbuEftHh0jbpdVq87Vf+xXe/egRf+6P/1GENk1dqCVS/MZXv/qfZJ9qIG5s/CHZJAAbGxsbGwDfFbb11DT1vcBvb3tB6OZZBcsMtx3SKE2zLtHaxvNc6iIjXeVEXohrNxilKYqbxU9VmeOFHrPZnMl0RakywDAY7KCwybIFRlUgHRptCAObyfqS9XrCL/3SP+btt4/Y2+9xdrJGG82du3u4nsBxJQO/z+XFnK1WjBcI7ERgjGQ2zXAciyJrCEKPBw92WKxyhIR2x2c6W9NqBUytNVWjOD29QlUwnYyRUuIXHoKI5fpmpvxslhBEMeAwnSZYlk2aV2hlULWmbkrm8xRjBH4geOvztxntxFgWLBYZZa64uJiwuzdEaZvFeoUf2kRBl+Nnx2ituDgfc+f+HdI0I44j2m2HdtdH1RXnp2uyrMLzLe6/tUuWV1yeLXj80QtGwyGzccpor0uRpGilaPfaZEVNmpbMZgWHRyPq2nBxtuTHfvIhQWgzuUrIM8nZ+YyPn56yvT1kf3eXi8sr+v0uL55f0Wp1iGKX2XLB9k4f13GpqobFIuPs7JpuN8BoQ5ZkxF2Huq5xXAfXc3FdSRiEGGGznCf0h222diXvfOkeUlr87tcf8/Wvf4er8zF5kpGsxjSqQgqB7QagaxrVIBA3iQEWjmNxfnbCYjnj7p0HRHHE5cUZUli4to3AYEuXVtwjiloMhiN003B9dUWvv0VZ5niBw87eDnlZcGe/z/Mnj/nO976HjeBgd4Ar9WIt+F4q3c3t/8ZnxiYB2NjY2NiA0e3revz+d9yq+VmMFQs3xPFt1mmNXOR0pEEZwcnxHC9wCXwHz7aplcb1JY3KKbIUzw/ww4Cmrnn86JyPn55SqU8aMO2Auq7QOkfoGktKpBTURYpuCgSC09Mr0kwTxhFFqYnikP4gpN326fYjxtcJRd5g24JW20Nrw3pZ0mrF+JHL6fMZ0obDwxbddcjV5ZKo5bC332E2Kxhu9VgvUpq64dbtIR/6LmVRYEzDarUmbrdJkpw49hFIBoM2y2VCkVdMxwUHhwFZsaYsG5Q2dLoddg52Obo7JMsKikyzXBfY0iaKQqpSMr9eErVc7t6+x3vff44fBhg8Dm7vEAQuRVZR1zd9DeuloqlLsqxEG8X91/ZxPMHk6ZIyrXjjjZcZDCOMgW/8xgcsZhm7uwNWi5L1OqMVh7i2RX9vm9UyQwqLxx9eEgQBliWYTRd885sf4HkhlmVTqZSDWzvUtaKua7zg5i2LH8LuQY/J5ZrpJOHyYkG6zmi1Y6qmRhsHVUO7HWE5AjAEvk0UhziuRxw7rJcZZSlZVivmizWOZ/EjP/FFzk8mvHj2BPsc6rrBwqbMK9b54maMpzYompuxnggcyyJPE37w7u+xv3+Lg9t3uL64xHdcorBFrzdACMnW9j7GKC4mN9uSo2iI64fcf3iHWil2tvqkqxm//I9+ibRI6QVdfMenyeuuLuqtyDGv/+Iv/uJv//zP/7z6dINxY+OHb5MAbGxsbGxgzy7u1EXzcpqsPM/28GwL6cXUacNkVuJGAdKB+aKg1bicPr+m14todSLabky/PcANAtJiTSRjhIFOq0W730e+uDnIFfmCJlvQ5Gscx8V2JNKSrOZjsnKJtGx+5md+mvsP7lDXhsWy5NbdEXcfbBO1HCxHooUmiG2CwMOxbUbbPaQ1p9uL8AIfaTkspisENq3YYuFbWI6F6zsU+ZJ0VVHkOVlaUdeK+w9vM7lMEEIznS3Y2tqhFbdoVEGWlwwGEY5r4fkR82nCaDtESPB8j14/wgjBg1cOsWwIY4eiVAhp0TQNtuPx4vgSL7B4/eEDTl9MePW1e/S3fVpRwLPnl2RZhetZqFrS6/ncf2WPOI5YLpd4ns1gK+bF8wmjQZfwwGa03WKdlFxfzfA9mwcP96lrRZrmDLdaSNslbGmkFCxXa5aLhLsv7SJkRbIuOH5+xcHeLaStuH13yP6tPhena2zLxbYH1FWF42n2Dnb43neeMr5coZXEtiTDUR/LtojaIULYnDyfE8QWu/sdbNuiLgtmsxQhLTqdENuVrFclq1WNUh5GG5IkIytLvDDi1dfeoS4bXnz8hKqY3sz1RwBgDBjT3Mz01yCFwAAnx88JohavPHyd0G+RpSWDwRZB7BGENt12izRZk2cJcbuNF0coKRhs9QiCkL/7i/+QyWKOFIJep0dvtMPpWdpJZumf8VvOQVH+1t/9a/+nv/iPkiY+++pXv1p9mjG5sfHDtEkANjY2Njagyd+xm/oV14t21knF8sU53b1d4naX6bLgve9fs73fwvN9zs5mdDohGsnzZ2Pk8QI/HPOFH32V3cMtLOmgdc2D1454881XefHsmIvrNXm6Yjk7R9cVAkGezLCkwFQJRkOr1eVf//l/icODLs+enSFkw5vvvMTu3gDbcUnSNbfvHLCzneI5Do7rUCkNEkajHtPZgm6/RZ6XaDRV02DbNlobMJoodEkXDVlWMZ8veP7UYjBsUzcLpCWwLY/5fMZwq0OaWriuxLIMQtYMR0NWixSQhO0Yx7FolCZq2WztBBS55upyjh9GtNoucWiTLJYYAztbI54/vSIIYvzYYp3W1E1Dtk6ZL3MOb28z6IQIYbAsRdySdLsdzk4WPHt0yTqp6XQdej2f8WRNbxTzYw9f4ep0xvnJkmdPL9ja7THc6nBxcclg2OPZ40sm4zU7u0PyXPH+e88wWtDr9VjM57z9zm1efW2P6bSgLDUX02suzq7Y3Rvw4OUDvvOtj5DS5uhwm0ePXuD5PlvbQxANeVowuSpptUMC4zIbF4SRR13XuL6NG0BZKlzXotXzMdJmPFlxdb3kyZMrvECwvbPDar5iOhsTxTGNamiMoKoLlK7BNNSquckEuGnstYSD7QgEBmMEjudzMBihEewd9dk7jGlyyXe+0xDHnZstwSj2dkZoKn7rN3+TF8cvcKXEkoIHrxwxvN3l+YeKSVJ13cL8ZE/VQatv3e/61df/xt/4G//NL/zCL9SfdmhubPwwbBKAjY2NjQ20zaVt0dJCOsPdXdJMMx+nhAR4vsv4as3zJzXttkfTaC6v17Rij1bkE8U+nueSrRYsZwLp2NiOQ6fb5Y/82EO+8VvfYbG8pshz1skUz40RGFSdgrTQVc3n3vwcf/xP/zRvvvUyTZNTlhXvfPFltraH+H6INjDyA1bzKU4U4gcOy3WKETajrS3KIgNcEA3bu220UURakqU1GIUtbZpKIS0BBsAwGU8x2kJakjAMWa1SZrOMl+532T/sfrI1VuB5PkIYwtgnjByU0Qih6A0i2j2PLC0xRlKVgnZb4vs2adJQViW7e0OKoqA/6LOcrygKyTotOLq9zStv3OHqaoWQNr/3e484Ox3z0oNDbt/dRdU1vu+yXhboxrD3cIfhdsyt2xaNgqdPznj2eExdNoShj6obHn9wgmosLJMjDRzd2caSLlop7r10i7JU1GXF7n4frS1ePF8xvl5ycjJnMZtx69Yu+wcD6qamP+izXie8eHHBYNhha2fI6cmY9TqjURWD/pDZbEmjcg5ubaGUJvA9/NhGWpJ2N6DT6+H7EWlacLuq2T+b0uu1WCwLHn/4EXlaY7SFEZqiKrEdC89rU9cVWZ4hMSANQto3s/2FhWcF7O7v0447rBdzmqrg5Tdus7Ub4TgO1+drPNelWC0YX55z+94R44srfvUf/yrn58fYDiAkvt3i9TdeRQgLZVwmqxopdDBbF2/v6Vbd7yFV9fQa+M1PNTA3Nn5INgnAxsbGxmfcV/7t//Ut06QvF5Wahp5nySDAtiWxETSW5Oo8IVlXaJOi6pjFMmW9KnF9SacfMxx2CCKHrFGsc8Xd+wfY0qOpa37iy69RZX+G//g/zHjy8WOKLMOxbTRQVhm+2ybw2/z0T7/D//R/9rNUZUOWF7zy8ivEcQeMxLIEVbGmyFakqyWrZcre/g6+18ILu9iuizIT2o5kPp3gBS5pUrKzPWI5W5Guc3q9LstuxmJek2XFTfOqHaMUuM7NlKB7L+3T6UaMtmN292McWzIZZ7iujR86SCkIYhsMZElF3PaxLEGaloShy2jUIklyzs+XGG3hBR7zVYpjS1aLJdeXc06Oz0EKTNXQ7rWQlsb3JYdHh/S3tuh2AmzLpso0ja05OOwSBA6GAqV8rs9nnJ7MaLUCHryyR1mWlHlDVSlePD9je7vLy6+NEGKL8VVCkTVcX66QlotSOcNhn9UqJ000J8fP+fDDx/heRLfbYjhqMRi2yMuSJE2xbY+t3RG2I3hxfEmeVbQ7IUE4oCwaqsqwXhk+eO+EwaBLFLtsyZi4HVKVNeskIStKmsbgOZK794YMBhEXlzM+94UDxhcZz55dk6xSsjxnNrtmmc6om5txnsZoLNtBChuEAdPQjgN6UYc6zZBNiYtDvxNjSagaBZYkTVfkyZput83x8XM+/rUPyYoM17Zu3gS1Bjx88Ba7O9tkyxJLWFRVyXg2oWmK+PTi6ovtWHR39gbyL/1v/s3xX/t//GePPu0Y3dj4Z22TAGxsbGx8hv21v/i/iMqqels19StC6QfrZengrNBhxM7dLcraMJmVKA3LxZoibwjjCNepoZEUaUPZ1mRFyeVlxuNH57z3/efcfWmHL/6RB4RBj5/7Uz/C7tGQv/uL/4jf/trvMp5eY4RCIOkM2/zxP/aT/Mk/+SXqMiPPFHGrTxiGCCGpipS6qtFNhTaKre0dfL9gtc65fe82jh+hlGG1WuK6Lke377JYTWi1bVSV0+2FzCYZUtRsb8csphUCheM4bO9t0+5GCEq2tzvUTcPD13Zod0OKPKPTjpCuTdh2cD2HMiswaBxXEkQB0+madtuj2wuoK4vJZEqyarAdn9V6TZY1rNYppyenXJ2dfHKLLQHBhx+8z9b2kJfu38YLIpbLlFY7pg48JukKg+HsLGM5iDi806fj+SwXOddXSyzLQRvNbLxEKU2aVJy8GLOz3+aV12+BqMnSEs/1efHsguOTK1xHcPelfYwGzw356IOPmc1vauH39ncYDjrUjeb50zFlVbG102WZVdRNg+3adLsxgV8zG0/59re+g+/H9PtDeoOYrZ0BQhqMsVCNhetaWBZIc/Osfc8BCVfXY8q8Iggsilxj2Yr+MKbTDXH8t7i8mDC5umA6PSdNl6R5BvpmpKwQAiklURhTlRmqLrAtSbu1x8X5nLu9HQb9mN/99R9wdfwMx3O4uL5gurhGqRrXkaAlluPTH+zwxR95m/miJFkuKSpB2IkI8pTVsiBZV62yUHeMkIUS7vjP//k//x/+9b/+15NPO1Y3Nv5Z2iQAGxsbG591UnSMboTRpht6gUMDZQbj65sSm7LMiVshloSLizEg8b2I+WJKkq9ZLea89rkHWG0P1/VJVzW/+1vPmVyvuXV7wP6tPV554z7/+7u3+bk/9i/xzd95j+vLKxbrMV9851X+9J/6McJQUDWS4c4OvhegdUVdNVjSplE1XtDCD1soBUeDA7KiImz1kJbFOkkZjXapKoVtS4wwSAl5InAdD9dOiQIPSc1oK+LNz93jg/fPiDs+XghHR/uMtlogYWu7S6fTJU0XYDSjLYdOt0IrxdqFsqww2lBVBY4l0UpwdZZzfj6nqiTJsuBy/Iznpy+YLxfM51OyLKXRCjBIJI51s0n3xeVz3v3wXcIgwPN8Dnb2efmVO7x0/xZaCKLY4+Jkiu0IVkuLvb0+g2GH5apC0xC3I1pxRJpljLZ6NE3D08dXfPToCVXZ0Gp1cT2f+/dvsbvfYTbNePb0AqM0/UEXKR0aXRJHAappuDxfEUQhUHJ+rGi1Yg5uDZhNE66vL1ktcvJszTtf+hxVBY5jcev2NleXU4pCkfslRZHTNF16A4/BqI3UkJQli1lGUdaEkU9e5Jy8WJKsaoRt8e1vfYfrqwtCPybLcnwvIgxa+OmCqixomgZtbpbNaa1I1gtA0Qpb5FlFRwh8z0EiMI1FlqVUSUZZNzfLxSwbrcFxPLZ37vLmW6/S6/g8eXrN+GLNKk0Yz84xTUmZZlgWRH4YlXmwi7I/50nvHvC9TzFCNzb+mdskABsbGxufYX/p//q3s6/82//Wd5XSbwgIjZTMFmvi3QApLM4u1izmBVJKXNthf38XpRq8wKIz2qMsa549ec75+ZQf/anP8f67x7z62i3WqxYnL64pMsP11Zq6gcO79/jxn3mHL//Mj1KVFXk+pywX9DodXCfE9TtoNFqVICzCIKCSEiEFcdRHWhaW7WKQeLFBSuem9jzQ1JVNGHoYU+O6NyUdVVGwtbVNFDgk6wV+4DLY8ji8fQvHFShtg9VwcGuL/aMRCAvLsrAsF7spcCwbx22wihStGqoqxxJQFDWqEWhl8ej9c66vMoqiplQ5Hz99yun5Gat0iUF/8pQFnwy3QaMpVQWfDJrMq4z5+mbKzeX1FcenL3j04S3u3LvDnbs7CN1QZjXb2z1mk5TFYs1rbz5ga7uDlDZCgKGgyCueP53w5MMpRWoBFr4f0GpHSKn58L1jBDa+e1NOo5SmMQXr1YqXXz5iNltS1TX3b3Xp9SOaSmE7FlXVAIbPff5l0uSmdCqMXfzAY73MuL5asFyU5HmGajT9QRuBJEt8Jtc5ipveia1Ri9t3t1isc9ZnM+4/3Kbb87m8SHDtL3B+fM7l+RV5lqG0YrGcUZQJUv7+VCCNFBZVVeFaEttxkNIiqzKCKKSubxq+DRb1J0vGpHXz7LWGVmtAq73Fg5fv84UvvML4Ysl8knB5fUGeJ1TFmtXqGlVXGCFIbN/2fccfJumg1Wn/6Fe+8pUPNlOBNv5FskkANjY2Nj7bjNJa6FpvO7bX7e9t0XZ8jOOSNwYpbLq9iCgKmFzPcT0fS3osk5RBK6bVafPaW/d478MTat3w5/6Nn+XicsrdV4+IOxGrxYyd3SHHL66xXZtbt28jbQ8/cLCdDi06OJaDEAKNwTQlRhXk2QK8CD9oIW0bIxR5niFtD8fxaFQNwsK2XFzHQgqBVgrQGKWI45tJPel6jh9I8rJGWoLDO12MNvzET3W5ur7Cdh38yCJu9zHGRmkFRmOMi+V4KF1iORVKKYSxybOMyTjl7GTOxfmCyfWK+WLJYr3k/PqYZbL85LGKTz7MJ0/5n3zkv//nn/wqBApIq4STq5zxfMYHHz/m7TdeZW+0z4cfPGW9znjniy/x0oM9kjTj+MWK3igmitocP72krhRZWuIHHreOdvEDh7ppePL4OYvZkk6nx/b2EGNgvcjIi5wsTekPuqxWKatlikFycTpnNlmhVMOto10QAstyGF/NEJZmMIzY2e0zm+WkSU5V1bTbEbbtML5esFisaXd8IEIpTXcrZmuri+sJqqbCcQS3j7YJXJvxJOXibIFRmsUqZbZMqWvFYjUhzeafJE0CISSBHwISy7ZxHBuMYJWmdLaHeLHNaKeN40Gn18d2Q/I0wbYFEknY6tEd7rK/f5vX37jN+HLK5Cpne2tAkieMPzqhyJa4jo+yJGVVo40SyXrtJ8na7xbdh+nMPQQ+/uGH48bGH45NArCxsbHxGfYX/sJf8Kx6+UUh5NHu/p7vd7voyMYNApA21+Nzqkpj2yVaSZSoGGz38EOH5bJAGI0fOfzcH3+HRpV4ocNPfPlnWK3WHB7dYbWYcHVxwdZeTK/XQwJKFSh1s+jJmIZKVxijqMqUYr3AsSVK5czH5wRhj7jVphYSpcHWCoFBmxqQCNsmTRMc26aqC7Sq0apCNw6uG9L4FVXacOvwDpUqUUajtUBpw8s7g5skwg0RMgCjsIQFRuEHLTQNVVVijMCSDnlecT3Oefp4zqP3PybPcs7Oz5gsZ6zzNdqom0VWmE/GV4p/4kmbf+rZ/34aYIxBfHLYbYwiKRKSYs2vfG3Knf27vP7q68xnCR99dMn1ZMVoK6LVDpiPU0xjsbXVZT5NmI8zwtAljh2ytKEqDUVekGUFcavh+PiUuqxJshWL1ZLAjXBdl7OzGUWRk2cpy8WCnd0hW9tDXjwfUxQVQRATBA7DQYRtS97/wQl1BXWtEAKqOkephrjl0x90sW2J68Lu4QjpCHSjEJ5DnubYjkUUeYzHc84uUnqjLqNtiR8HtLp9puMpz56BRpEXOY1q8FwbIQV1XeF5HnEUg7CIYodX3rpHfxjSjh2UdqiaGse1kZnGwsWyPaKoS6c1YG+vT6sV8oNHx0jbpS5LXC+iPzygyGOydImuwHcsJAKEsJI07Wf5utPuR//0f+DGxj/HNgnAxsbGxmdYv9831tViV0qvtZivTS+K8eKIi9M1fhgy2op5/nyMH8YMtwdcn0+ZzhK0UTx87RZXkzFRK+Tw9j7g8vzjM0ajO+zsHlI3NaOtQ27feY0sXYGp0KbBoLAth6bOMfrmJjoMAsp0Tbpa4PsOnu8S+AFVkTKvctrdEbbjsZiN6XX7JOkCaTk4loNtS4oixbYtbMejUArDzRsBzwtBq5vSHhXhej5NoyirklbnZoOs49yMNi2bHLQGI0BodFPjuRarZcJ0OqMsFE8fT/nedz7i7Pzk5vZ4fo3SzR88z5tG30+SAD75vfn9s6P45KB/0w1gjP6DvyOEuPm6Rt9MvEFQ65rHJ49YpSveePVzcAJK7zGbpBwcdhmNYk6fXRO1Anq9FnfueVyeT6gqg+MIvKCm1XY5OHjAeLwkSxVCWNh2gO839HoDoiiirgx1VeP7IQKb2XTNcpmwmC2wHZfhaIBjO5y9OKMoGw5u7WKEJghdDrdiVKOpSs3VxYo0SVktG8bjOY8fX+CGPsOtNoOBTxQ4IB2ep2vStELYLllRIAy8OH5BmiYYUzEcjoiCiDRdMluM8dyAqqwZjLa5//JDuq02nb7HrTs7TGYJR7cGBIHH+UWGVhWr1QIpLRzbx3FDwiDizTfucfelLV48m6CMjRSQpjll1mC0oGkaLNumF/apipzVai7yMreNbizfFuOu503/0IJyY+MPwSYB2NjY2PiM+mt/8S9Gyfz6nmV7d9zYGYZxLEttiH2HzqDNt3/vOcNhn4ODEe9/cMKDV2/x8I0jPnp0Qn844M7LA14OjqgV2F7A7s5ttrb3Wa9nRGEHL4gQgO/GBH7IZHZCowy+Z1HlGUJXpOs5F+dnbG3v4LkOruehtKKsFBjodHsI6d18LiHoDfrURUaRrWl1etR1RWMUgR+QpWuEbaFRVHWG68aUVYHSCo1G2i6W4xCEXXytsW2PmxrxCtu1MNKhzHPqssTokrrOuDi5ZDHPuL5KOP54wnd+7yMuLk+5nJyTVxna6P/BU/0nb/4Nxkgc20MKSaMrpDFYtkvYvUtdleTpBNsGR7oo1WBMRdMUNyVHQqAFXM4uWHxzwc5wn2Sd8PrLD5hcZbRin939AcbYTK4T8jyjLAyzac71eIrSOWA4fnGM67SJWyF5WmBbFq7jkuclVV0ShW0QEm0MRleUacVyucT3AvzQYzCIkZZNtjIMRj1aHR/daIzQzKZr5rM1vV6XTjckzWuWs4JOJ2R7t81op0WRK54+mqKVJs0a1klNUdZkWcne0QhLSj589wXLxZTVcgoYorBNXuR021tsjXbxHI9ev8Xe3h62Da+8totBsx92AI024LgB8+kSjEFaDrbrEQQRt45ucfvOkKKsmM1THC/EqBpHSoosIUuWLBcTimqNZVnYUiClwXbQnU448zyHpqpiYMnGxr8gNgnAxsbGxmdUVdextOQ9pWShhNTacoUlbV48m9Lqtrn70gEfPTrl/sN9fuwn3+DRe8f02iF37mxRKUOru83tO/cJ2300Amk5DLeGbHNIWdef1OU3zJbnpMkM17WRGMp8zXoxpS5S1ssFri1ZL6Zktk1T13R7La7H1/iuTxS3aBqD40Voc5MUaG2Iohhp+ThuhKpTqqZBo7C0xpaCoqzxfUkQtklVRVWWuJZEG4ll+wgBQtzUkteqoqlytFYoVVAUK7J0wdmzC772ax9wfrHk4uqKdFWwXE4Zz8/Jy+wPbvn/SUKIm+p/IbGckFZnm3bc4friYxqTY2EDHvHWAxy/iyNLymyBaWrqck2VLSmLJapI0aahbAoMiqLKeXHxjOniitlsys7WkCQ5pNtvUdc1e7sjPN9jOpkxmy9YrRZUpWI2nTC5vsL3fdrdHp1Oh3a7xd7+LstlSrcXs1wkOI6F41jkeUlRpISBS3/Y4+VX7rB7MCRLK5aei7QMWVqiak2aFbiuR+B3uDhfMJutcF2fuBXi+TZC2qRpg+86HN4e0OtFrNclbtAiSdfUVYHnB3z7W8/YHm3jWS6O8FAqISty6rrkzTfe5mB/j9PTE97+wkus1iWtXoRwLI6fjtnajVkuFVE3okxzzl48RwqFbTtI6TDa3uXtd15Ba3jxdE5TC+JAUpaSqiqRUt68EapLalWBsBFaIKRF5Hply3Myx7VPRy+/fP2HHqAbGz9EmwRgY2Nj4zPk//mX/lKr6oZHf/6vfPXdv/If/AdXX/nKV/4bcX32wPd9z7Vd4cceg4MuZ5crqqrm5dcPub5ccLfb5U/+az+BMg1xp0WjJVEnxot7+GEH1/FuSlqMQCOwHSjznNXqgixd4bmSukpYLeY4tqTMVxit6PY6VGXBOlnjGZ8sy+gN+3T7W3heiDY2AgMojK7wHI9SA9LFmJteANf1ULVCOC5pukQIizCIqesC27ZRTUMYxmDZOI6PEBZaFUipQIOFplYlRbJGUrOej/m9bz3mH/69X+f99x+xzjOU1lhSonRNUWf/I0d/PplVL5BYuF5M2N4laA+pixVN02BLDyFdaulSCY3tC+Koy/aog+1arFcJi+mYMl2higxVZaxWY+oqoTEl2mjW2Zpvvft7BI6HIzz63T5HR7d4+cFDtKnwAw8pDaPRgMU8YzabIWyJsTRCaALfp6wKptOMnZ09+v0eWhtefeMOjm3z6NEJVdljf3+EkIIoDrEsm/VqwXvvPUbpmv5wSCsO2dru0zSGxTLl6mqBVhopKzxPYtkOk8kCZxEQxi57hy2kC3tHLVrtNq43YL0u+c63nhNEHuk6pyhLbMvBcTqUtebO7UNefe1lVJ3xzjv3GQxaDHfbBJFDux9yz93H9QR+ZONZPl/75V9hfH2KbdvY0iaKAh6+cgc/EDRlQ7rOkUIgJbTbwc2uCWlRNxWGiixdoJsCC0Enjpvtfm8Wx+FJ5Lu/9gu/8Av1H3Kobmz8UG0SgI2NjY3PkJMwTPfRvb/17/3lz/8b/4e/+m1AV7r5cDabnvS13smaiJ2ez/7tNvN5w/bOkC/+yCt88N4TVumSB6+9SdRtEbUGuG6IbkArhREN0rKpmpJVMsXoirrMaKoCYRrWyzV1XVAWCWndYNsSy7IpywppS2zXJYxjhG0hLJ/BaAfXCzCqoSyyT8pbVpS2jx+EKKNw7IA8S24qblRNWaZEYQvVGLJkSVWVhGGAFAKlGnw/QkpJVWY0usCYBqE1WhfU5ZrLFydcX874+u885tf+8e/y5NkzyjKhMg0gkH9Q2//7E37+/+U/v1/HL4SF78eErW38Vh+EZrW8wrIENj5Oq0sQ7+F5bdq+x3p8wZPnP2C0s82dh2/Rat2mzHJWixVltsTyQrJkQra+plYVyigapdCuR9zrE8ZtFsmax8+eEAUxcdxhvV5RVQVlWbJ3sMPR7UNOjk+YL+YIXKQlMKZhsVwQxj4vPdhHSE1VKNrtEMsOyYuCsqyoS8HTR1fUpuCnf+5LjLYikBaXZ3OSteLi9IrlYklZFty5e0CrFVJXGssSKF3j+ZLRVgthCepGkOUNYWQwRhDFET/x5bdIkpTnz6/54N0u5ydjZpMlrVabfr9Pnt00dh8cDmh0ReD67B70AY2QFq1WzHpZ8A/+wW/wrd/5HlBi2zaWdInbEa++dsT1eM3V6YqyqOl0A3zPvlmglmVYlqTf7ZPnK0ydYxyJRDPs95K9ve2ncb//gYs7/hRCdWPjh2qTAGxsbGx8hnz1q1/VX/nKV37nnqX/V3/n3/+q9+1HL9a+0l+0pNNzvADp+6TrhtBSrOcL8iQhaj3k1bce4votxuMxBkHodxD2TVOrkIK8KnBcj7xckWdzpL6Z559nKVm6Jl0vP6mttsjTgih0uBxPsByH7d0B3d4IKR1qZajqikgIqjLDtlxsx8FzPXzfI1ktydI1VV0Qxi1s26YuGzzHpi5r1tWcVruDFCCMIs8SbNvBDSRCWghhUdYrmibDMg3ZesF8Nub8eMpvfe09ppOcr3/je5yPTyiaHPMH0zwN2ph/oqnXABIJgMGxHSzbwZibUighJKrJMarCkQbb92mqhrKBwAlZzy/wtE/k21iyYTk55dIPObj3kHDQwQiBH3pYjovtBDi2z2x6jO3Y3L//Kj/+41/i6GiEYwuSdUG6zCjSAqMERhvmdY2QEmMERV7i+wEvvTTCdW0Obm3TbofMZiuaCibjJb1eCEJw7+E2vu8SBA5VrZheremNQqKWR9zyUabGlQ37hy2a2nDvpe5Nec2zCUJaNEqxmKeEccRg1GHvoIcfeJyfzzk7niGE4Oz5nCIvWS4rnn18yWQ6JY4thsMud+9v8/DV2yznKbPrJUo3dNoR3b5Pq+8Qt0KEarg4nyOkS7Jo+NX/7lv86i99naZegFAgwHZdXn3lVepKM75YslysiVsxW1tDfN8hSQrMqiRZLzg/O6XIFuimAWq6nXazt9077w27H3qB/3iYF7NPLWA3Nn5INgnAxsbGxmfMV7/61eY/+spX/tZe2/o/Ptjfvrx4fvmTceTdEY6N37K5nmQsVjWjQZf1MuH46SnvfPnHGG7fZkfD5dkxk/ElewcBCBuMjWU7KFUhTI0tYJ0uGI9nPHl8TOgb6kpxsD/CthXCkiAdRjtb2JYH3NRsV1WN57qk64TAv9mOq4UEA/PlFa24j+24rJYTjNKUWcFisWR7bxu73SGIOzRNTZqnGNUQxRF5Wd3UdyNxwxpMSl2lLGYTzl+ckK/XPP/4mnffu+Ljj094+uIxq/WSBo2wLAQCoS30zZaCP5jcA2BJG4HEsg1x3EYIi6apEULg2AZTZ9RljrRspGODKMjKFLW6Jl9NmK49Hvz0H6dIpmSzC9bzC06fCdq9bRojqeuGpjY0RmOk4JXXPseX3nmDH/3xhxwcDpBCI4zCNBarecaTR1ck65osLzBGABYvnp3gOALXuyl96nTahKHPYp5zeTEnjluUy4qmKWl3WtSVjSZnuc45eT6jqfTN/gUp6fcDHr66RysO0dqQrBKM1mgFYegxGa+ZzlKkkNh2Tq8JULXG+HB4tM3WbsNqnVMXJa7jcH5+RV6UdDt9qkLx3neO8T+4pLc1wvVd+v0W/WFAFHk4vkEIyXpZcfpizm997ftUecM6XTK+usaSJQ0Gx27juB6vvPqQ23du8eG7J6yWaxAKxwHb0rgOOLagKjLGk2vmqxk0Gb5r0WkN2e13ilboz5Qx88aYqjef/w87vTc2/rm3SQA2NjY2PoP+t1/9avJ3/vpX/qPtg8F/phtzJ5vlRtsCO/J55XCHi7M5SZawfzRikaS8+93v8/rnXbb2jnjwypvUdY3S4No2TdMgjCbPxlT5mo+fnvLd7zxiPFnR67l42x1Wy4x3py9otz3ilkPrTpvlcsXF6SkH+0MmkxnGWHiuhesKnj55xtbWEMsRtOIucdSlKDOKIgMEjmOzXicYo7k4P6epaoI4/qRsSFJUNcY41GWJlFA3imQ1ZzmboVTBbJLw3vfO+d53PqIoSp4+e8F0NiMvSsKwR6NrEGBhEwYhYRxz994Rja65urji/OKMxTpF6RpLWmB52I4HVoXlONiuS5WnIG426jrWTS9CVYzRTU47bhOHEePra2xp44dthBOwnM1ZLFLa7RGulAzbHodvPOCLP/qQVx7eJg4clEpupvgkBVma4jgORlb4sWQ2q1ksF1yNLxiPL9FK0Wq1aUUtFvM5772fIi2L/d0jRqMhVVXS7cVYtiAIXHr9CDCkWUW740LjMdOCy8sLyiKjLGosywZtIW2LbtfDtuXN92BsiqzBsgVhE3J1vsIYTX+r4eD2gMPb21hWQFEqkiQlHnY5ur/F1cmSJx9dUdYVjm0xv5rS6cbotofr2hzd7RKFHot5zumLKf/ol36bq4tLyirHlj6uK9GmxnZ8pO2wtT3inS+9yWQ85/pqhmUbXNfH81z80KHXD+gPQlzPoahvdkNInWOjkaoyvuNUvutJS3OLuv6dn/+v/qtNArDxL5xNArCxsbHxGfU///Nfffrvf+Uv/zsPHhz817PpSrqRzdbRPkWZ8frn7zCbJFRNyZtfeA1tHFzHoqlrGlvh+iECgWoa0A1ZMkY3CR+++4i///e+zuVlzvZ+h6OjPmlSMBjFhF5AluQUheHF8RjVNLhuwOnZAjewuL7Mmc/XfO7tQ4xSNGpCrxexmK0YDoc4roXtWCznOevFza2uqg1BFLBcLmiamqYZY4B2t8N6tSRLMry4TdBuUaQFy9mKjx69QGuf58cTzi9XYHIC3+HenTvkucJzfVqtgMn1mDffeMDDVw7o78Rs7Q9wHMHsOuf502ve/eBjvvv9d7meXCGEg7Qj2nGfuqlB2iAsjDYIYTDGUNc1RisiP6DV6eOHHRbXV2gM0o6xbZe9nT69bosHD27x9hfus7PbpdP1aXVCTANNVbFeNdRVTVWXtDttmlojqDFakGclURCxu71HWRTkZUapKrLZFShN3ZQoZbBtl04vxg1CsqzC9zySdcX56Ql1U/DglQPe/sJdyqJmcu3xxucOqMqKyWTF8fMJs+mK/rCDFB0sS5AXc+bzFWEYARbLZU7gW5THY/KiQWtFus5pdyLcwGMw7LN/a4uLsysuT6d88Ufv8fSjfX7wex8zmazp93vcu7NPEEMyT8mXObNZxn/7X/8qT548wbbBc3yEpUjzFCFdgqDNzu4uR0cHPHn0gtl0jtbgSOdmt4TnUFUNdd3Q7UUEvgPKolitMDphp99ne2tbR6FXGmgapXxRKcn/2Ba3jY1/zm0SgI2NjY3PsA/eP5ml6/Qbb3/xzT+bNjXXl1e4vs/p+SlZUtHrR1huSLe7xSpNCVSD0YqqqrAtmyJfo+scP/S4eD7hl//BN5kdj4n8FovLFd/49ZR7L+1zdbqiKSsMAiMl/WEHIW62yU7HGUFsIyxJUQqMCanrBQGQJBntdovZ/JpOp4MQFnmecX66wpIWmoYgLOgPYiDDcSRlVWElFkYphOUQtbdx/JCqNHT6PVbr55wenyOMzR/7V75Eu+MyG2ccv5jQjlzabYcocglCyYNX96ialOH2EG1L2u0ut+8IXnlzxJ/6s59nPvvTvPveUx5/fMbHT6ZMJynT2RS0QdoBqq5paoUtPISw6Xf36Y128KMu0nIIHYcgDjjcH/LgpX1efu0O+wc9hsMetm1R1TVN06C1oa4LqqrED0KMp3AdyWqxIllXrJcJWhkODnt0OjEvXricX1wyXy5o6vJmWZk2uJ6P51ok6YrjFyf4fgvHkuwfHqCNQdqKu3e2EQiuzuc4gYXnezSV5vjFJWnS4PseR3e28TyXMPYoi5JWO8Dzb0rB0A6XF1OKsiZutcmynOXco2lqFvM1rudw/OycuNfFNDbvv/ccxwWExZ27txmNMuJWQJalaG3TlC6+7/Gb//i7PHv6DESFEA4aQZrnCAHdbvzJ4f8IKQVPnj5DK43r2riujWUJfN/BcVyStCDLKpK1YtgbsuyPSJeGMluZ3NNZFI2WrmcvLUuuhGVNPuUQ3dj4odgkABsbGxufLQIw//5XvtJ1nPS2xP5SU4nhyfMT8/ZPfV40KJqywY9rylIx3Brhh12i1pCwNUArsG0HIR2MVkjr5la6KWv+27/3q8yPrxnYGkmOsn3mkyU/mC1o6obFIqFSDTiS3b1d5vMVtu1wcOuAq7FinazxA4uj29t4jmK1GtNuhRR5TRhZhH4JQmI0HBxu8ezpGWHs0hu0sG2LLK/pODHpek0Qxni+j7QjpOPSaEOr06Mscpq6Jooc5tMF15eay3NJp+tz67CFKyT37nUYDGKCjsTvRKDbgMZvtyjynEXaUFYN11enpFkFWvMjf+QhP/dHO3huSFFWTK4T6qpitVqyXmY47s1SLdd1sX2XsB1SVw372yP6/TZ+5NKOY2zHp65TtFJUdUVZFjRVie24eK5HslrTVCVlkdFUFckqoaqaT2r+4fxszmyak2Yp/V6PLFtTNz55mdA0DVXTEIQRURSTZCnrJMW1fYK4RX8Yc3A0ot8PyJKKslSo2iZPM0ajLoe3drBtyfX1jMl4yXJRIIXE9T2qskZKl+U8o9226Q8CbMcmzyuqVYMRkr2jW2ztdDH69xuXCyxL8Oqrd3j/3Rc8fXSOKis6nYBur4vlSOL2zbK23/36x1xd3Szq0rUFRmIJiR9F9PoDdnd2cV2f89MzEFDXNRiNIz08x8Z1LFqxS7dvE8UR0+uCxXxOWZQMBl182dCOBO3YqW1HXGLbj2zHPQmUevppBuvGxg/LJgHY2NjY+Az5v/27/+7W3HXXJUVbVOZtSfVTEuth1Ark9dUZe/ceIiKb7PyC4ajFaGeHTu8AhIcxDa7noBqDFgWqyhFAGLp87ze+y9PvPsbTNbIpadI1xrIJjGGeJEjXo+VIpO9zOZvx+N33KbXCSIu8KIk6XXzHo8oMv/Hr79EKbX7iJ19iOl6DAVsEfHB6xmg74uxkhhcEbO0N8P2bg29TaYw2HL+4QivYO4pwwxbSckmyBNsOsIIA1/EJfJ8gsJGiYX5dUJcVUitu3Yop0or5PKWoct44vE+twHMEedIwnY55+nTCu98749mzS5brFKNBSovb94e0Oh6Hh3sc3Npm2OsCNY5d8dorh4x2t+h0ujdjROXN9CTX8TFGo5Wi0ZpVMsEog+u6rFcJmIamqcizAozElhamETiepEYzvV7SVIZuv0VOwdaOT38UsZpnlLkmzSo+frbFR0+fM74a40hJXuSEYRshJNfTC4yqCLwQYSmKIuXqss9oq0+3G9w0aMsE25JYtqDdDVkvcw5ubdNqtSlShdIaP7QYXxU0RuI6LqCJ45gkzRHCIm7Z9AcxeV5w8uKKqBVSZJpWq0PcMWhV8+prewy7MU8enVJkNUXRMGiHxLHLyemKk5MxR3ceIixDnmdsb2/j+T55UbG/s0eSzDk5fgZKYTs2xoDEYIzCtiVxHOL6km7fwwsshPAoig5pekloHLrhDqGnjWNViXDllR24S8tzfvur//F/spkAtPEvpE0CsLGxsfEZ8r/7q3/1+v/+l//yVi1tkTfVt01RPOi140DVhuuzGddX3+LowQPavQ51XdAoaJRCWgohDMooLOGgmwqhahCKfLbkG7/8XShy6mIJtaJRJY7lYiHpexaWJxBSINB0dnvM0pKrZYqybObzCZPZmGF/QCvuIK2IJK149weXdFoWjuuSpzVZmqKNoTvocn15s3n21q3+J42+PnlZ8uzjJVWecXBvj6g7QGubOAg4v7ggOryNH/aYL1POX1xzeDRisTzh/kuH9HsBXujhRyFVU/Dy/SOqUnF+seTxR9dkWcl7PzjHsh3OLs6Igi77eweUZU6WlHz03glFmRMET4n8gCxb4Tget+4egSWJw4D9/R5B7HHv4T0W8xlCNPieS7fdoygLVukC24HAd3FsiaqhqRWL5ZpsVSGM5Ox4RhA7dHo3de3LWc5iniNtge+7CGlumo4dh1LVtLs97t2WlKuS1XrKajm5eaNgO9RFged7tNoDVGORZxWX5zMuzidsjbbZPRhy624Xo2oWiyVCSq7HKemzCaOtDoskQ1rgxh5+HJCuKpKspKhK/MDHaI0fWGztdNCiwfc8+sMIadl0uzZh7CNFw3SS0e35FFlGtxeR2Q1V1XD8dIa4vc0Pvv2E9z/8Lm+98QW2RiMmkzFSQLJeIaTk+PhjsmRFWaSURY7jhURhjJASxE0PhlKaLFWsVwpp2biepD8IyPa6LCYLXK3xbK1VVRVImUvXmTq+PP6043Vj44dlkwBsbGxsfIYIIQxw9Z/+ta/8tKDzxXpt/YzrWN3VsiBvJFiC93/wa9x+5R4PXjni+PiMV9tDhJAYoW+23drcNLYKgZSaF49PePboBXVZUOYZ0ihAUygFgO1Y6KLCAEYrBJJAC2KpSGqDlDZIw9X5McsgpNUbsLuzQ5oU2MLjB9+7pNWSfOGLd9FaMxr1OX5+je87TMZrdna7VI3m4iQhKRTbox5lXqCbhtlsTlEpDg4OqOqS3mCLP/FnfoJH737Mu995zM7+FtPZglu3tzg9ueal+/sYE/Lee1dICybjkg8+vKKsaopcMR6f4jgWloTZZAwGfD+g3x2wXM4Bw3q9IlnP0Rgs18d2PJIgZDlZUjUNv/FrPyAKQqo6JwwimlKB0HRHMZ1eSF2W+J5LmZdkWUO31yIMXJbzBMuyqCqF1g6dbkiv20YI8CMPx7VQlWG5zFitCnzPpc4mnB6fIN0G27PZ2tnHGLCFfTPmVNzsCsiqhHK6Jg7bOI7LZDKl1fMpi5hOJ8BzJQLJw1cDFouULGvwIxchYHy9pikNYRDgeR3qpsIPHHrdiEZVTCcpZaVI1tf0+gGjUYuo7dOoirqCF09nJKuCbj+iP4rp9g2LaYlqJGenK84vJszXU377G79CHIRgDI5l43kBlm1RFSWIm0VtlmUhpAAMUt6UCpVlQ5HXVKVmPl2hVEh/EOIHklbXI3B7NKuEKltXyhIGRUeVTZ+S5tOL1I2NH65NArCxsbHxGSQcd7I9DP6k6ruvpauS2fWarBCE3YjR9oDzk3P6gx53XjpiOrtka2sPiUB8MgtfG0GlG3xL0lSGk5NnxK6H1NCoGq3qm8VZUuDYLajBGIUQoI3Gt1x2u31mScZVukYGLXAC6qpiNrnGc1x2tyOEMCTLlOGgQ68XkhcZ0/kax3cZ7bbp9Ya02y1Ozo45PNqm06kYX1xSVmCMYGt7n1rBfDbDcz3m4wmddpfd/T0evfuC1TTBKIv1MgcNl5dz4pZPr9+m1Yl58vQRtuOitWGeJWwNd9AKHFsSxS3QBsexyPOconBZLCYEYUCn2yMIPFxHopqaxeyK4+WaVquF5br4tkNTKM6uz7Btie04aKMwGnzX4/hsxsXZNU2t2d7ps7vXJwgdWh2XrZ0W7a7P+fGSxTxjtBMTly7rdcne3pBu3yXNcyxbc+/eHrZweHF6QbNlmFyPqStNki5ZpwuUUlR1TNMoXEtiSedm0ZswlFnF9GrNerEiCDz2D/u0Io8o8pgvUuLYxXUdlguH2STj8vySxTyl2x1Q6wJVj3Bciyj08AOF6zgIo8E0OLYkSwsuzxbMpznPnl5xdXXN3u4ei+kUVRu6wz5aGcqyxBhDVpc0qsK1LDzHpVYlUlrYloNn+0hp4Tgejm0jEDSqIk0Vjm2RpRlVUaEjn6bSLOcZUkji0KNWhtyWKCF13ihjCykwzW7je68DX/u0Y3Vj44dhkwBsbGxsfAb9L/+dv/Le3/ob/+dfubXXeWWw143bu12axmddGJCSL93eZ7CzRX+0i217pHmGLQWOK7FsD8uysaVA1RW7t7bAbnj+4pyXbh+SFjV1nWOMJvB8VFOjlUbrGs8NsKyb22ffsfC6bcIo4Hg2x0iLJCvxPQdV5EzHGStnTXfkce/lXZIkZ/9wh8rA9v4OUdgmjmLG02sevvoWQkrOT0/xI4vh9j5YPk7UInQjLMclWy/xfI80zUiTFdk6pdsNuL6ekVc1B7e3sD2Hbjfi6mLJfFXRG7ZI0pI0TdnZ32HY7zO+moJlE0UBy/mCZJ1wfHKM59g0dcX4ao60bOKojePmNI0hiCK2d7c5PzvHsWwC16FRDdKG+w/u3GxTLkscR7B31ObWvQ5v1Lc5fX7N2emEvCx558fu4riSVtvD8ywwNot5yXSSMxjEzIuC85MJg2FI4Nn0bm+zmt80Eh+fnZAsMoIwwPUMq/WcrEhwbQ+la+q6JA4G7O8egbQII5c0SfngvQV7+0N8X3F+uqTbjUmSnFY7RpuG0XaLh6/sc/x8ikGxWleMZ1PKPKfKFVHsE0YOvu9zfvGETqfNwa0dzNMpWztddne61GXF4WEPx7a5OJ/guR6Wp2mMZnw5ocxSAsdF2hZGNUhAa4Vt2zi2g225eK6P7wdobbCERCuF41qEYUgQBHiOg2oUTX3zZsBzHbRW+J5AZeD6HlI20nZRpVaJdJ2pdKg/7Tjd2Phh2SQAGxsbG59NplH88nqR/qv9UWdra3vI2eWCkxfXjK9TknXO2+0Wfl4SeDa+H1CXBQYJCKQUWEgaYxjujfi3/sK/yf/lL/97ZFVBrUpq1VBUOY5tU9clGINGo4zCFjau42BZFqHrEeoAW0gukpSqcgiDENuyaOoKpKDTGXB+viBrO+zu7yAti053RKvTI0/XdHsDwnYP24lx/QH3Hgh8P8J2XLI8ochz3CDEdhxcx2ExucZzHWzbYbFMOLizR3+7y96tHhfnK548vqYqavYOBqxVQhx7tOIDlvOUq6s5WZpi24IsWTCbzCirCt/30aomSxMaVbO1NWA43KGqStI04cGD+2RphlEGKSWeFzC7vCRJFqyTOY7j8vprr9NpxWTrkt4wIAxdXnvrgP39LSbjGXmW43ktnn40IU0L2l2HwShmvV5zfZWwWqUYY0A6lHVFXa1RlSLNMh48eIn1qmS+TJgvl4RhzM7gFq1Wl+X65sC+Ws348KPvsbW1S5l3sKQkDAMapVkulyTrGmNsev02xgiKXPH133jCe98/Y3evT5EroqiFbSW8/uodyrohzyrCyMdxLB4+vIcXWBgEq1XDb/7j7yPRDAY9gjjAsSFwbcqypr/VQRsYDAekqznduE9ZZmBLDAopBbZlIYXAd31c28WW1s1iamOwpIe0JEYLyqLGtnP8IkCuJH7gYNuaMHJoNLiejaxtIieya1uGhapayvEWjSUbPpma9SnH6sbGP3ObBGBjY2PjMypN6ueRaa5UVlRuO3c7vQG3b0tMM6MdtQlcD0FJUWkcfGwnwJI+UljUdYHWBlt61Gjuv/mQ3Xsvkc0nWFqhdE1RlzTGoI0AI6iVptEFomzwtIXvSxpt8B2HURwR+AGu7ZKohq29Ed1eRLfX5sWzK15+9ZA4alEWKX5rwGB4SK1AyYYwiAnjPrbtE0Y9tNZYlo1uDE5T01QFRlooLXFcH2FJ1mnDv/rzf4Lx5ArLgTCQqFqjgfF4hS0sZpMFi1lGUdSs5hPanRZB7FIXDnVdMptN8TyP3mBAURTMpmNc1yP22sRxh3WacHFxRuiHPP3oCXGrxWhryOnpGU2jabXaxHFIGPrkeck3v/lNbCnpdkcc3t4hil1cR9Dt9rh9d0BVFnz0/hqtDctFxvOPC9qdEM+zSNZLdnZjdg+61A1oHVCXhvl8Td3YJOuSj5885vziCoXCGEE7jnFdG8fxcF0PTENZ5kxnV5RhhQD0uGG5HOIFIVEc43kOYWyTpzVx5PDK64dMpwvOTid0OjEvvbRLsi5odWJ8XSOtAte1cGwbKWyuLid0BwH3Hgy5dTTg2ZNz5tMllxc3i8S0NmAE86mk1YlxHIvBaAfLdplMzqjKkqrO0EYhsJDCxpIWBg0otL7pBRBCfLKETdA0DXme42cZtm1TpA2eLYlDn7jlIaOG0pNML2YmTctUW6yEw7Vru+NPOUQ3Nn5oNgnAxsbGxmeUZayW9MVlHLvrJNeDx989IW9sWsMWZ1cTomdPefDqA/zAQ+DgOD6eG6J1Q92UN3sAHBckLFYJWVHjCBsjDHldUTaKutEoR9OoiqJRXK7mXCZrjOXSD9rc3z6kH0UEjocvbXqBTZ7UXF2O2T/aJi0qhjsdkIpuP6Y/GNLbvod0I1qOT7s7RCtQWlNVOUJIbMtBGIElwfPbuH6Ma9uk6ZJ1NscJIn7sp36Eoqi4df+AIs+5vDglrQuOXtrBciTX50vyTNHpxcgkoyptNBV5mqJMw3K5wPM9hoMBSkNZVnQ6HWzbwrJcpHRxHfjCF9/BtVyquqHX67Jer/B9H9UYzs+e0e60sR0JaLI0QUqbTlegtKQ/7FEVJaenE9K0jVINy3nBZDrG9z1c1ycvUnZ2YoSA68s13W6EtODyYkWVw/PjSwbDLrdu77OYl8StFqs0pVGaNEu4uL7A9z3iuMN8domUNloJHMchCmLWyQoE9Pt9wjDEYEiXFZ4niVoulmPR7UdkSc7jj65QjaFsSq4+mrB/MGLYj8nyDNcVuJ4Aq8vp2TVXl3OGoy5pkjGdrjCmod1pY1keVV6Q5wWpgDCOGGwNuXf/LtdXB0yup6TrBVrXIAWWlFjCQukaKUA1NxN/HEdiWRbmkz4Uy7ZRjUagbxrRhaAqKqChaRSO59Le6tpx5G9nefGwUCZrGqOA559ulG5s/HBsEoCNjY2NzyjH9aTri7blChxlcC3JRx9NqFD8yE+8xmhnQKMaGgNR0CIOIhplqKqMpspBSFzpoFROkqX0+n1WZxnacPPv1M2NulKKRmnG6xVP5xNyrYGcWbak1g2vHdy9qd1GEPoulilYr2quLmZoGnZ3ejx59ILPf/4u0+Wa3p5NrWqElEhjgbmp0/j9m1+MQaubQ55lCYyxMUbi+zHCljRNSVWk+IGFMQ1aNdRlyXQyZ7lYAQpbWuRlycXVkvlkSZGWBJGH6zlIWf9Bbfl6vaYsa6I4ptOJ0caQrDP2D/v0R33KvOTs5IxWu8352RmLxRwpbMqyxPNcet0eSMl4fkWr1aHbHWCAyWTOerWm2+ngug6Cm2k24/GEJK1YrdKbZyZtJhPY29umKjTJOufw1pBGwcnZNUpJ0lxzfDoljALa3YbxbEqSrnFdlyDwmS2uCfwAjUCrGqMV63RFVmRIJFVVYYwiSdZs73SIWx6qNtSVIc8bTk8n7B0OGWz3OX52Tbcf4rkBeVIQBQ6e42LZNklScHh7yP2HferSMBkv6bQiirzk9771PYwR9HpbaK3p9XtIYTG+njAYDlBK3SwmazSdTh/LEqRpRl7kCAcsy0EKQVnlKKUIpCQIAlzXxbJsLNsCS1CrhqqqqGsXrSVa3/zMCECrRqRpXguNLQwWkumnGZ8bGz9MmwRgY2Nj4zNKyywtUqdysUBYeF5DO4ZnL5b87m+/R9x2ud/q43khtuWhNVRVTlkVuK6PtF2kkCTrNUWZMtoaUc9XpMWcRhnq5mYMqNGGsm6YZwm51n9wSEfA8fwCheHuziGelKhCMJleME6WTOfXREGLrydrDo/ucHax5Md/6lUcr4PleBgDYGF9Mv6xUQ0Ig2VJjDForcGAlAKlGooyJc8TXC+gqms820XiMJ9dkyzXnL245re/9gGeC8NBhzC0aJqGNJvhOj5C3JSZVGWB1jWTyRrVKKJWm7Ismc+nVFWNkIbnL57w4ljgOREAk8kVrhuws7NLlq1odEar1WW+WJDlOZ7nEHghTVMynlzT743wvJD1IkF/UppjWTZBEOK5LmHYJsvWhFEENJSFQinFhx9MeO/dF7z8+h737h/y/e+dUOUFcTvCEh5+WDCeTBhPTul1R9R1g2oayrKi2xnhOg7L5ZysmrE9OqApK7RqGI66bG0PmM9WFHlB4IVk0xyNwPE8JtMlO3tdZlOP8eWSg/0t/MDQ1ALQqKJkucioH18QRy6vvrHPvQdDFrOC0XaHn/mjX+S9H5zwg+98jOe7lGXF9fU1fuBTFAXG3Ow3ENKwXM0RCKKohTIaS0oc26ZpaoRVglLkeU7TNPi+j+f5RHGEZdtIy8bzXPzAuSkb0ha+ZyO1IWuUrutmbQSPGsv5WlybzQSgjX9hbRKAjY2Njc+oUg51Xq8qKy98advcrLVV+IEDUqMtjR2EeF4Hy3KolcJws6nWtm3quqaoErSpiFserXaIsG34pFFYA3wy9lN98nHD3HRVftJaeTa/YpatsQRUqqaqGwyaRbrAkwH9zoh792/T7sYo41A3CqUrLMu6mQwDmE8O/krfHIQBjNE3M+9tC4FNGLQJwxZNU+E6NsZoxtdnjK9n9Acdbt9TnByP2d3tc+fuiPU65fTFmO2tIfn/j70/j7Y1vQt63+/7vH0z+9V3u6+9q+9SlY6QxECACKEX5eARUQFBPXoFm6uAUWw4cFBQRFA4wlGvKEGFBAhpqKRIW0n1u6p27X6vdvbd2zfPc/+YdTz3nzvO+cdUIPMzxhp7jDV2M9da8zf283ufX5OkXLt6k7bTAGAeTpBK0Kh3MIzFtBnPc+h2e4zGJ8RxSFEUVBWsrW2ws3WKoBaQZRmt9iobW9vkaYoajKjVm4ThjEazie85NJsNbt25Q69/gtA1NDQc2yMIahRlgSZ00iwhK2KuPf8q589dQmg57U6DJC0xdMH1K12ODsasr69h2jquY+O1XdqdGmH0AE99tmQ+H2GaNrbp4bk1TMNGViW6YZDEEZPxgO2tHVY76/SOJmxstNneXaV7NCFOcmZRQhhmmKZBnlckUUaR54RRyHBssao3GA3HrKzWcVyLoFbR7gTkWcnhwYRKZjRbAcl+xvUbR0RRSXutRZ7GFJlibW0V3w8wTINGq8ZsOmFmmSSJoihzpPQI/Doai83KVVUhNB1FTqUUQpZkry09q6qKPC9wXRuhSYRQmObiPS9fm/Zfb3rCt4zmeBS641IlX6w4XFp6PSwTgKWlpaUvUw8/eOkb19Zb70xmE2c26pGpQ2r1jDQ2kUJHx2altYvv+iDVokRk8didsqqQZUFVZUhVYdsCy9YpZUEpK6RS//3QrxQIIXAMC4NFiZD6/xmsolDEWQSApgmE0BCYGAh8u8ne1hnOX9ihs7pCvdVECEGaJDiWi65VSF1QFeVrNwtQVSWGYSwWQQFSKsqyJC8SpCwxTAtNM0mzIY1GmwsX76bM59RbDTxfZ9gPORlOGA9iDN3FsjVWtzpkZcz167exrRr11gaWtVg0devGdUzTpNPpAIosW5SZ5EWOYweE4ZSrN17BMHTKIsO2fZrtFbY2dvGCOmE8pdc/ZjYbYVkm8/mEslQoJWg2myRJxHH/kOo4Jy8K/KDG1sYWnuuzsbHHLIzJ84w7+0cYusHpM5ucOXeJly/fZjwZ0267zCc2N68P0ITk4qUzBH6NgzvHTMZTojQhSRM83yPPMuIUbNtCAVEcYZkjYsPiM58KuXjvadbXO6Rphe7odFZ0Br3J4vubldT9OtqmjmmbzKMcx7WxHYFSOVmWIwTs7jUQQsdxFv/Gw4/usXNqlRvXhzzx0c/TPRlTrwVIKZnNZiRxTFmuU6sHnDt7Ftu5i7Jc3Hr0B6PFzP+8QNMWP2/HcRcLwZT6v5aDsdhE7XoWpmOgvfb+U5UkL0pkXlAkeVVk+TRXqmfq5s3/5Z/9bPZFC8alpS+yZQKwtLS09GXq5StXPlzmm9+4s7Ozs3PqPBcfhls3D/nUk89RypKz5y8upsPAok5aSDQWh+w8i0jjCUk6pqxShBDMwgnD0RATtai7FgZ5UeI5JjqChusTpAlRmSOVQqChazqGMNE0jUpWi0ZjMyBwfRzT46GHH+TNX/kg9z92no2tXQzdRwiDes1BFzpCF2hCp6oqKrmo51dKkeUphm6i6wZCCAzTQEoL3XKRqiLPM/K8QIgSNwgYD0Ki+RTD1Dg6HDCZJQSex/hkSrc/pLPZ4NHHL7FzZofAtxj2Z4zHIfGs5NaNG0hZopQkSWICv0a9ViOKQgzTIkkSbMdF101WOpukSUo0n3Irz5ESwnDKdDplroFlWUgpsS2b6WxMloVITeB5dQxdZzIdEccx165dxbEdLl56gCgMGQznrLRX6feOmM+npKnEskxuXLvDJ4/26ays0F5Zp9Xq4Dghiopev09VlpimQZorxpMxlmUhhEGeFCTlkKyMaTYb1GpNjk9OqF5QRGcqNKFh2zaz6RzPdVBIoijFtgxOn9uk0fQ52h+RZSlCF8hKo15v0jsJqQcelp2jazXSOMMyC1baHkoKbp85TZ6axPEEXRP4fkBl28hKISuJ7ZnYnoErLLLebHG7VJaURbEoAdJ47WMxDcgwTUzLwvYsvMDGDSxqDQfPNZBlAZlEFhV5miqUzKWsYk1hmzqP/sQP//Crf/Mnf3L+Oofp0tL/EMsEYGlpaenLlK2nZ/LZYe3k+iHH1y38ziarG6f5zj/3rei6i+X4KE1QSYmqFEWVIaVEypKyjMizOWWeoxuCoO6xsdlZbGo1DAzDRNMATUepRTmOZZo0nToqDcmqElSFpTu062usdLaJipQoDjl/9gz33HeG1bUNLt5/iov3nyOor2A7DXTTIksTpF6glIZpWliWixA6QmhkVUWep5imgVzcWSClwtR0HNtb3FxUFYZuEPgt5uEYyaKO/M7NY+bzmHrDR0rFjSuHjHoziiKh02ly82oXN3DoHo54+nPPcnBwCGhURUXQ6tDv9/A8n/G4j1QaSZoSTwcoqfBrDWzbpts/ptlYIZpHzI4PsWyLosiwDJs0TxmPRnj2om/AcV0qWWJZDq1mhyzLWeu4oEmGwxOiZM7Lrz7P+fP30OseUWQJnfYaCsHVa68uSl9KSa0W4Hk1srxAISkySSU1VtZW6Z706J50KVVGnmUUMsN361img7AAzeTqjSskWwXNWhvdMOn1RmxvrZKli59BnhWE85RKKnq9GbMwYnO7QSOoMxlnFDJA0xR5lmDbFkdHY1BwsD9nc6eGLBVCzxe3CobGZNJnNhuw2l5HGAZRHC2mD0UhfuDTai8mIo1GM5QE13UpdEGShCgF5ms7JtRrPSAS9drnNTQkGmCaOgWgqQpZlRi6rlVlVek600JyKOF6T0r5/z96lpb+cFsmAEtLS0tfhn7iJ364ZqDeZunVKdc29bIsGXdvsr9/g8baeS7e+xZ0UyBfO0DnRUqRR0iZU5UpeZ4gdAPfqwEVpSZptDxyWaDKElku/pwQBkoY5DIjyReTgQxhYesercYqrlunXvO5dM/daLZJpXIefeQuWus+d917kaC5hjBspAJhmOiawNAN4jjEsT2SOKaSFbZtA4snvlJWKCURmkBKhVISTbFo4hUaaAohTISuYVkWUT5nPJ2QJDlVDq5js3nfOs12QPewh6Z0NM2g2bRxXZtPf+EW80mCzCVZmYOmmM5m1OtN7r3vIg88+m5mYcwzn3uZGzcOOel2CaMZaZYiK8nNW1d47VhKGidUVY5j+limhWmYhFnELJ1j6w71Wp0sSymKHNM0mU4X24xdr0FQb6GAcB7j1wKOD+6Q5TnN5gppkjCdjVhf3SDwmyRJQme1Tr3uo4mCIKixuWMjhLl4gl4tbgLiJCLLM8oyJ8sLWs0GhimwHAuhL361HJNeb8KZc3vo0YxWs0alZYzHIa1OkzSNuXPrhE4rpdVuILSK1U6AaZjcutkjDENc16dedxj2QyajlLX1OqZtsne6jVIP0O9OyVNFmqeUVcp4FNGoddB1g+l4hus6NOo10jTDMHTMeoDruAx6x2RpjFIK07RwHRfXchYftoNAUOQVlVSYroXSCwzdoZCRSmbpKJHVS7nl/PqP/vTPPvP6RujS0v9YywRgaWlp6cvMv/yX/7i13ax/f6tpvqvmGx2BotIqhDTw7YDO+i6WGyBVRVGmlFVFWeRUVYlUBWWRoaoKQxdUZU6ehkzHPepNF8t3OL59G9+2yKUiTAtAEGY5kyQiygsCv81Kc527777Exu4ma9t1Tl/YoLnSwnYsOittispgdXOHSilsK0ApjUoqKqkwTIsgaIJaNCQXZU6eCwzDQNM0TMOkKArKYtHQapomsqzIihSoQFOoSlJVFZqmowuLeqPGmfPbpGFCfzAgjmN2T69z5twapq6RZ5LROOTVl46ppMLxPNbtXYKgTiVLXNdiNB6RJRlVpRF4Pg8+cC9VZpOEJXeOryB0jU5zHde2UVIhBZRpsVh+JcBzg8UBN5ygGRqWsCnLnDAOmYYTdE1fzLcXFoEXEIcpEsV0MkDoGo63mNYURRFlldKoNxYlPjWL/kmXJE2pN2zuuecccZpTKZ3VtTbHJ4fMh3OmRUxR5liOA5pGpSrSIkYrdaxwwsW7LtHuNKi3fQbdCdLMWd9pc+PaESvrHe669xTdox6WtUXveISsDPrdKY5nM+xFWIZBISUnxxGtNjiuiZYZqCqnKIbUWy6ObbK62kGWNjdv3mI+m3Lx4l2E04jZNCbLElzXwvMdHMdmNpsjK0mepaRpRFWVmKa56P/QFsvnbKUwdB1DN/A8B9+z0XUQSpEmBVmYYFYqFqZ+W2j6saVby/GfS3/kLROApaWlpS8z3W469XVxoCqCMnFMw9DBMKm1Vuhs302tcwpN6ChZgQRUiRCLJ/qaVKBKDK2izDOicEoUzpCyYu/UOhfvvsjVV64jNUVeSeZFQS4roiRinqY4Tps3vekrOH16mze85SK7F7YxHYOyrGi0V/G8Jlklado+luWT5uFrTb0ujm0t5v9rYBiLvoI0S3BsD6kUWZYuEgDdwDAMlKah6wKUoshzknSOboBCW1wEaAIl5WsfitksZD6b0253ODrqcuPaHbpHY3ZPbZHnBYPeiFvXDxmPB7iuzfrqBrVagygKKfIM27IYjyKe/exVgkaNspBcuLiLGxgknwqZz8fIqsQ0LTRNEGcxVVGhYVCWJYZhYjseRVlSlTmyLNCFSaveQaGQUsPQNfIsZT6foGSF63lMZlPQFGsrm/hBjSSOCeczpOej+4sn+A88cC+abvDKK9dJo5g3vu1BLKekSAvO7F3gSD+ikClllaEhKMqCPEsIwxkVBePJiFu3rtFsPYzjaTz2lnNcv3HMcDTF1D32bw1QVcmZM1vM5ylFXidLKiwEUkpKqbOyUiOaJ9z34CmiMCFLS1zPQdMrmm0bx7XpnoTcuDpjMplimQ7ttstwEIEmMW2TZtPHsmxmYUycpPi+j6HrhLMpeZ5RVRIhNAzDxnEcDENgOTa2a2EYi54RKUEqDV0H0xSkWSZn0XyQV3K/sv3rud86er1jdGnpf7RlArC0tLT0ZeZ973uf/LV/+1Nle7Whu4YuijQnzXPCMCKoFF6lIVWOlCUgQAmkAjSBQiKrjDyeEocTppMJcZrQ7rSYhXNOn92j0VlhPutRKklWSqRSTJIIx2tz//2P8PZ3PcTO6TY7F3bRdBvLCcjSjDTJcf3FnHZd6JR5jmk4CGFi6IKizFFIylKii4pCghAs+hJeK/lQSi4O9koBijzPFw3MOpimgaKESgIalUoJoylVmZJmEZZrsOI00CrF6koDJWF1pcnJ8ZTLLxyAynnwoTN0Vh7A812qAm7fPCEMC6bTGfNZTLstmM102httmhs2q6sBdz+0xfpmi0987HkOj29imBWtZpO8KnAsH9M2SZOQ/qiH5wdoSqLrOpZpogsD2/GwHQ8lNZqNBmgVk+kYhSCvUupCJ5xNSOMQJNTrbXaCs4TzKWhweHjA4ZHi0r338JXvfAs3rh1w69aI1fUWaZohhEat7tHtTRmMu8iqwnN9qqogTSOKquD0qbPEScRsFqIdCrpHU8Bi3E0YD2fEUUQ4mnN0a8jO6TaWZfPqlZt02m10zUB3Svr9CUIJsjhj79Qqs1lIGM5ZWa2jGzp+3WbHsojmYJiKwWBEHMW4rsuZc9tYjk73eEKWVbQ7ddIkYjKZg1TouoYfNJCloqqqRYKo65imiWmaaDpouoaUUOQVSSjJdIWNjut5mlbJlot4MKUci7B78mM/9mNPvO9971v2ACz9kbVMAJaWlpa+jPzyz/+zC0HNfcy2jUeF4dp+p6HpukFWKCrdxXZqIDSEZqCbJiiFpilQGlWVoSoJVUmWJKhKUavXEZaOUhLP8zl3fouzZy7w1BdOqLSCtEpJipJcSb76bW/jq975ZvbON/BbPllRYuk2umlRdwKUlETRFC9oUCkTTegYmkGeZWiWjm4alFWBEBpFkYGSJGmE69bQdQslJaZuohsGUqlFg7Kuk+eLKUUIgVYJoCLNF7X3tmVRagpdN3Fsm6osSNOSF188ZDia43s24Szlvvv3WN+oM50kHB+NiMIDygJms4g0zuh02gSBT/fkiKJogFrn/MUd1tfrOLbDxnqHc+d2efGlV3n688/RO56hKsWlS/ehoXF0fIe8eC3pEhqB38SxPYoswTRMNta2QROYpmDn9A5FUZEmGftHh+gaTEYjDu5cBS3BLlLu2ruL2ayO63ucPrvDdDxjPsuYTRIu3X2BPMuZDWM812U+jzFtj2ZzHdOyOTi4znjcxXF8fK9BnMSE8xDXrjGZztAEmIaL0HOUBqZt4Soo8oyDwxOSrKDdbtOod+j3ZliWzvbeCuvrNSbjjNFwyu3bXXZ2OmiaSzjL8XyLwTBi1AsxDUGnGVCvOYzGY/JckhcVtmtiWjpVWSGUYn2tDQLytGQ2jZjPFJquY2nite3OCqUUmi6olAUKLF3D1BWWITB1HZmW5ElOVZWl1FQm0VwhxJtq6XwMLPsAlv7IWiYAS0tLS18GfvVXf3XNdbWHfNt6oN2qvaHmOfc7jrGrhE5RVFi2hXDc1w7/2uIJvCqpqgK01/4SJRcH5CxF0zUMy0IIQVFJBv0hw2HCzesj8qLE0E3yMiMrYsqq4oEH3sCb3vIwe+fb2J7GysoGEkGhJHmWUWlg2waqKsnTCGF7oHSKssRzfRQSXdPRdKhkSVIsmj91TVBkCZWxqPdXlcQCDMNCmYsRoBqCOJlRyfy/jwk1TBvH8dE0SUqIkgZJnHLjaperr/TorLicO7dGninOntlEE4oXn7uOrATrGw32zq7iOCYAveOIk6Mxjm1x6dJd3Lxxh353ykc++Hm2tzZxbItollGvO9x/3wVOnz7LJ598msvPvcCg16fTXqdVWwdRUVaSKJziB3UajTZ5klEUKbop6KysYJg603nI6VMbVLIgVwXNWgNdKDrtFlkm6aw0abbqbO5skRUlldI4c2GXLE0X03rKinA2Z//WESvrKzSaTbKkoNGokWcreG7A7dtXkVJiGjqaVpFlKc3GCnme4fkW49EMXTdxXRfX89EQhLMZVdUgTTKiOKZeq2MYBifHXXrHU1BQa9hsbLUwDQPEYmeDrmtEcYXtCRzXJcpyDAMc18Yw2iT5YjGcEIq9vRXSqGT/9jF5UWNlrc7Kmo3nGcRxBijKPCHPKoRhYJg2pmXjOC6OZeDZOq5nIMsKWZZYhqA0RFrmcqSqYiw1O8or7fNlo3b19YrVpaUvhmUCsLS0tPRlYGVlZdrtdj+nl3J/EEe+qnlvqWqeafs+luchLBfhBNhuDU0IyqqgrHIqWS3GflYZVRaRhFOKPEMAYRwRRTFRnBGHKSeHU+7cGSGEQa1WJxnPAZu9U2e5+76HWd1u0FwJyIuM0WjEytoW4XSIKiscO6DAxLYswnDKfDqiXm/hBA3m4QTLttCExDZ9lIRa0CRJ5uRZgiYUJjZFaeA6HvNwgm1Zi6+jLHG9ANetUZY2cTxmNhvTbHYQurnoL7Bc9s5cYGW1hWO7eA5sbrVBCZ5/5g6T0ZQslzz48EXOnltnOp1z48YRs2lBUZRITWG5FllWMRp1KaqK6fGQ6WxCb3/IW9/2RnRTYzLPwI6otwK+8Tu+gje+5R4++qHPcnTQo6wy8iJGR6ILE1kpBIIgqNNsb9Hs+Oye2sA0NZJUAhXbu2ucOrfGbBzSate474EdXnnpiMk4oxbUqTUMvJrDtSvH3J4f02o3kErHtAzWtlpMpmOuvPIqtVqNzkoH13cIZ0OqvKDVXKGsUuI4xjRd0jTl9p1rrKyu4dg+juvQP5nh+QWWZWBZOv3+Ca1Wm+29TcJZSFmmdFYa+DWTsiiZTCJMy8TxTMqqxHF9PE8HFONpTniSInTQdIGJwrZ1DNtBJBmgk8cJlBWu63P63DqOb1JvuLiOTla3mUclg2PBrMzQLQvLtvE8h8CzsW0T09CRVUWVaVCV5EVBmuVlNJ0NKlkeaJaxj9BetUTw6R953/vC1zVgl5b+B1smAEtLS0tfBt7znvfk73//+x/oNIOvkkX6NmkYRmXYSMsiUwWyjPFFDV23qaqSIk+pqgo0kDKlKlOKPCTPQoRQ5GlBmqZUlcQ2TEqjYnOjTb8XcWh4OFaAQGNjfYfz5+7F93zSOOXocECj5YHKmE56mKaJZVnE8RTTtEhTgaUbZFnKeNgnkArTskmSjDRJqNcUrlsjzyp0w8bzm6TpjKoqcVxvcWMhU4b9EyzbxXY8skxgWQGO7S2WRCkN07AxDQvbdKlkQRwN6XZ7TGdTzl04hWGavHJ5n5de3EeqkjJTHNzsc/m5GygUqxsNHMciSwXRLML3TUpTMZtKnMDBsE3Gkyk3blxndXOV9c0Nam2X1a06mqZotBx2T13grvt2eeXFmxwd9HnhueucHBziBz6mbWO5Jo16g2azzub2CqZhoCqFKnNsx8ZxLBptB90QpGHJdBhjmSaTcRfbEIzHBRI4d2GDOMpeG+dpousa7ZU6X7HxBoRmMugNyPMc07IIgjpRkjAfjBiPR9i2i9AMOu1V6rUGVQHD/pjVtQ2EroijkKq0KQuTqoCb129ycniC5zvs7m4zmU7Y3l0hmhcYpoZpg64vJjnduH7CyppHo1mjKApsS1Cr2xyfTJmPC2xHY+d0B7+mU+WQmDpxnKNURaPpYXsmnudQZAXzSY7AQOgC07DRtBJN09EEGLaGoYNjm8iqIg4zVBJBmVdllsyrqpiUmtbVNPMwNpzfft9PLJd/Lf3Rt0wAlpaWlr4MKKX44Af/q2lalq/p0jE0FSTzuTg5OESKlPbWNvXGJmWZkOYxVbnY9iqlpCozsjSkiOcUeUYSh4RhiONaVFIxmUVMRhGyErRbNc5d2KHb3SfwOjRaLTTAMhQy17j2ygnbu23O3bWNQGM6nbHlNzFMk7LMObpzSJHlrG9v01ldQ8kSyw6oygpZKYb9Q1bXt7GtAL3S0RwP3VyUkwh0iipb3FyUGXGegiwpigy9rqPbHo7jY2gmmljsOFCyJC8SSlnR6axRZjk3rt/m1o0ugWfyznc/gOdZTCcR1185oigLNndWuXTPOtNZxHw2wLEtVKVhCA0pNXr9Hv3+CUWWYzs+SZITBA5RmGIYgp3dDpa1KFlpdzze/q7HODka8rZ3vIFrV/Y5PpqS5RXbuy08zyFNE4SQDIdjDF3QXmmwf2vE7ZtDgoaNEJJGw6eoMlodj7e87T56xz3OXDjFrVvHZJkkqAfEccjKap2TkwkvvdjDNC0Mw2Q0GnL11it4bkCj0SJJEhr1JlmSk+YRui4YT2Lq9QYbqyvEScZwOMCyLObzOSKKWF1dQYjFBKY4CplOJwyHQzQBk8kmzXqb46MunbU6GxurhPMI0xJ0uyFC2LiuRVnkxEmKaVtUpKS5II5z6jUDzVQYlqRuuXS7Mwpp4xVw/dU+GjoCwXAwZTadIYTCdR1s28H1bBzXRNcBVaE0haYW64KzopA6lKZpommaWaJNU5Lx6x2rS0tfDMsEYGlpaenLgKZp/Nf/9J+Gw2L45M0XXnKnh4cPICVuo8befac521nH8pqgBLZVQxmSvIyoygxZ5iArhIAsy5hO5/iBj2ObKKlhWDm1to/vOhi2S7cb4jguthVQFIuJPKbQmQxDkrigzErm04iVNQdDaCTJHA0T32+wvaMzHY/RgDCcY7ouo16Gbiy2/iZJyMF+yNbWeXyviVQS3wuoKoVhGCSpICNEFzaVyjk52qfZWcVxPXTTxDIdLNtBCI28KFBCENTaCGGCLDk5HnFwq8vWWhvDgHAecfPGEbW6w+7pDSqpsB3FeBLSbPk8+vg5rl894uhgzHya02q1sF2XeqPGtSuvcnJyiERhOTZvefsD9HszJuOUBx/eQ1Zw42qPRsujKAviRGIHDm//6l163RGObVOre5RlxWg4ZWWlBkojnIecOtcmzyom44TpMCeeTbnngR38usGwN8dxfSaTmLvv3aXfjRgNQwxDpypL7r5/l6tXDnnuqVvEcUi91SFKQwb9LroQFEVFEs8JajXKyaIHRGglN2/dwDBszp45h5SS+TxidWWFMIpJk4yyLInTiEazDojXyocM5pOIcBJTFjlH+zGmbrKzt0rFYgFZnkNZZOzutTFsmI4z+idjwnlGHPmgSTzHwrR1Wis1HN8gjnOChk69uUaeVcSRJElK0iRBoRC6iWEYeJ6NY+m41qJfRNMUQpMoXWCaelZlZTcrq+vKsg5LZXzmJ37iF6avc6guLX1RLBOApaWlpT/ivvd7v9d8FDi88lw8jfIzWl4+ELR8f+vUNjuXzrF19hJ+a40KSVHEZFm6GK0oAFmiqhIlJXmeUVQSv17HsW3SOCKJEzQkhqHTG8S89HKXeZih64tGYiEWJSeObVHmJZ22TzSP+MKnRpy9sIlft/FcH2FbSE3g1GsYtoUQOpouUEoyGfbRNI1mZw2lFFWVce3q8+ztngchcLwaGouGYcMw8bwmpulSFjGO38CwbNIsBy1CsJhYVBY5mlpMjRG6gWlYeG6dRx59EM+2ePLDT3P1lTuEcYxl65w+t82li6dQVUVRasSpIjmcY5uwvdvm9Nl17twecLQ/YxbFKGHSaLUpi4IkmnHl5VcZTUa87R2Poqmc575wk7XNGpZjsH97SJrlnL+wi24qJBlUcPmZAyajkLPn16g3HUbhFIlOECye+tfqNo63aGqdjRM+/rFnuO/Bs2zvNcnynDSRzGcZnRULwwy4fWOGEJCkXQKnxqV7zlCVOa++cpML5+9hPp9z0j2kkjmj0QDP8wmCJkVRYZoGrutRFAWj8YhOu42UJVmWIWXFfJ7jOC6u63J8fMT62hZCMyjygtWVDQzdoChSgprPxXtOk+UhgevTblnMpjP8uss8jLFLnVbL5d57T3P1lQMObo9oNBzW12tYtsHx/hDPM+m0fRqrAfWai5JwdDgijTOKLKCqSjQhsRyB0MG2dRxbR5MVlApZFOiqqvSyjEuq48LQr+m6+QUH7+nXO1aXlr5YlgnA0tLS0h9R/+s/+DvnbM1+u2Xr99q22SnC2V6n453f3Nnc3Tp3BmHYJHHB/s1r2Mf7mK6LG9Rw/BpKF5RlTlFk5EUKUmIYJoEfgFDISuLV6pRSYzjuM5ukHB/OEUpDoGHoNo5tE4V9bGcXxzVp1Ux83yKOJXEYU1UKIXSEEFiWhS4WS6MM0ybJMizDxBACw9SZjMeLcZN+QBpFVGlC9+Q6pmvh5x1sp4FEYhkWtuNiux5C76Ckem2IkU5VFuR5ilI54XTE0cFtXM+jubKG59fRhIETNNk6vcrpuzbQdZMkTfCDAMdRlEVBq+3hBBbd/oyj21OKNKFSkr3zKwSBT1mVhPMpRV7QabUYDU4Qps54fMJw1KVMKh58+BIray2O96esb7c4e36d6TTh2qu32Tu1QRymmJZFo+kx6qc89ekrvPGtF5CajqSilCUyL0mTDNNUPPDwHrNpQhhGdA9nlKXkocfO8PRnrzKfRTiOz8pajSSRzCcxruvh13ROujkCjUt3n2c2jWm3OwgdTrpHNJstwihkPp+i6zpJWiBVSbu5wnAwXNTZ6yZJ+lqvCJBnMUVRsLq6ge3YrK6uEkcx09mc9bVVgqCDVCVRlNJqB+zf7mKYBqdOrxEnGVItjiTzWUw4W2xyllWJJjSKosL1LdZWO4SzjJOTkOE0ZWunzupqndaKRxwWRLOU0XCGa1s4jrXYnKxp6LqO0DSqPKUsM7I0isosuVUY2g0M76rS9Ct//af/afK6BevS0hfZMgFYWlpa+iPo7/7lv/zwrSvHPyQL7d7N3VVnZ6+zsrq353q27+RJxrNPvUSaJRiGjuvatFdXuXDPfdTqbZI8QwFKKlSl0HUdhaSQ1WLBllQITRDHEUkcUxSKNClZafoMyoTJeMo8mhHFM9A0er0TdHEeQ9dQVYWsSi7evcbmThOlQb87xA0yOmur6LpBkhXUm21MxyWJQoJGB910AEVVFGRxTO/kGHfu0VppoCOoigLNUCQV1IIOFQLH8dGNxahOpSoMy0ArNbIsJ4xnICo8z8Z1fCzTRTcMDMulXaW84a0au2cHdI+7DE7myLyi1xtzcHhErd7AcRwG3Qlr6wF7pzv4DZssLxFGTiln3Lx1B5krpCyYzyfYto1l21x99TLTyYQzZ85x/yNniMKU2SzF8wS7ex2Oj3rouonvm9z70BauZ3LnluTZp6/w9nc9Tq3tECcJs7HE0BU3rh4zHM5501svsnumyWQgmU1jRqOYrZ0VXn7hCKUMPE+yselhGToH+0M2RZOz59b55BMvUBQVvuchdEFRFETxHE0pbMumLCs0qSFlxWQyIE1SWq1V+r0ee6fPYZs50+mYoiyoNet4NZ9SSgQ6aZpiWAZruxv0T06wnVVsy+Hm9R7jocPm1gqHByOuXjmhvdpidjKkVndptR2cwKTZajOb9YmTnPMXV7FMjTs3ukgpqDcc6g2X6SgkjhJqtYCiUOimoNkMFn0AtomugawUSZSiS4nMYvIsygXlVGoMpNLmuibCtLZ++XUN2KWlL7JlArC0tLT0R8xf/FN/6ht6Byd/pb2x+cb7Hru/trK1gqErrr10h88+8xK1wGTjVAtNCILGKvc+9BCr6yuE0ZjB4ADHq6PpBqoq0FRFnkYURUJVlqA0lJJEYUyRp1iGgSwUZVIRZ5JBd47n+1iOSaUybMul2WwzGkdYmk0QWAhDx7Ak8zCiKMCyTSYHhyRpxNb2LjU/IEsiLMfF9QMqyyKod5BVhVQl/ZMBUkoa9TplpsjiiGgeoguopKJIY7x6C8u2KLMcXehIqTBMgak7BEEL51RAtDJmHk5Is5RKKQzDpJIlZSmopMOFS/dy7/0PkMRTsiwhSTJMQ9A/7jPoTzCMkpdfPCRNSrzAxPZ0zl/Y4/SZU7zy8j7Xr97gxtXr6JogTeLXlmbZzGZjXnnpJWbhjLe+4352zqwRRQkagrvv26PfHZEmJb3+mHOXVtk9U+f5L9T4+O8/xzu++gFs30IXBVmZ0m63uHnzgDAseOChHeo16J5MePozV9neWcHxDNIkI08LdENDCHBMi+7RnI0twYW795jPUqbjCKUk589fIIoixpMhli4wDEVVSUwhsG0b1/WxHRtJRb/fpVlv0VnpMBqO6PX61JtNgqCGrBSOp6ObMBp2WV1vMuwPWF1dw/MMylJy89YRnU4b1zHxawZ+0CSaZ+i6ju0YOEGOaQmqStIbhDQCj5pXQ0oJUlLkJUHdR7JIesJZhCwllq1j2+DoAtsx0DWFVoIqC1RZlEKVwyLPb5WauCF1646mqVvAcuvv0peVZQKwtLS09EfIP/zhv/zORjP40dbqysOjWaU/8fufJQkVjz5ylsAWrHV8ElXhdNZ5/C1vZn1rnSybMx4cMhn2sV2PwK+TFyllnpBEM6qyoChzhCbI0oSyLCmKRTJQFjmWoWHZOvNpimGaVOWMcD5lHk2pZEG322NnZwPTrYG2+I8nTSqEWTCZzvD8xWz/cDpnVhsjDIO8yIhDHcPysG2PeTijFjSwnYC9s5KbKqXWalNJDdu2iOYho2GfsqjwfZ9oPgIFntugqHKkVOhGQF4VaCVYpkOzuUW9vkZVFWR5TJ7HzOd9Lj//Ch/8jU9w++o+6+tN6m2H9fU1Tp/b5fzde+yc3WXn3CnKPOP0XSe8+PwNesdDooM5t29MsS0NTde5+9LdnDl1ii889TS3b18ny2KSLKPuNXAdmE/nPPXpVwjnCZ3VOkmU02jV2Npe4/Cgh1Ims1mKrsPqRpOsKMiyDHQWW3FNgV0ryeUK/W7MU5+5Rq3usr3VplbzuHa1y12XdqjKDNPSCecJaVpwfNAjTBJOul1WV1fJ8xLdFISzmFpQZ3NjD8fx6feOUKrA91wMw6YoCjw/wHV8siwlimZomoZfr9NeXyWoBYxGY6J5SJqlrG2ssr65OPCH8wI/aCClZHNvk6JIkUpD06AooXcUEtRMGk2bNC6YjhK6xxM0KoKgRjjLGA8jHNvEsSzcwEK3dZReLXoiai7RNGPcC7Ftm0bdBllSFQVIhcoztCKTWRaPlKqulpr+IqZ5IExjplvWlR953/vK1zt2l5a+mLT/+9+ytLS0tPSl7jd+5Z/eHTjGd0+n8z9x+85g91OffUl/9eoJQri0Gps89OhZzt3V4cxd57nr3odZ31unLHIGvROuv3Kd29duoFTKI296kPbKOnE6J0sWYz+zLEcXBkITi5KfqkTTBFWWk6cl3aMRg0FG9zhiMplz/cYrXLn2LHmRIjTB2vp5Hnr4TZw9tcq5U3VUJTFtg5XtJoZhEEcFw+GERjugs9ak0WqRpAmm7lCr1xFWgO36CN3ENDx0oZhNutieSyVBVRlCKg5v32J//5C777mE4zskSYLQBIZpEAQ1DMtFmDU8r46Ghi4WzcZlmVOWOZUsiOYDnvzox+gdDFClIs1Kbt88YNSPsGyNBx7ew/ZMstykvRqwvbdJp9OmKFMObg54+cWbdA/7FHmO41oURUYcpRwcHnJwfJv5PKR8bQOtrgs67S1qjSZnzpxBNzSUVvGmt96D59pIBeE8Jk4rHMtkPIiwHR3DMJiMY9zAorHiMhtkvPLiAXun2gz6E+aThFa7QVEUlFVBZ6VJNI8Ruo5SGkID27XIi5LxIGIym7C61qFeazAaTJnN5/T7PebzMbPZkCgKMa1FM7fnNdjbPYfleASeQ384wK+1cV0T27QZ9rpkWUYUxaysruHXatQaPrW6i65JxqMZ01nI7u4GrmPjuBZHR2OiWUZQd9jeaeJ5Ovu3ZswmCWm6mF509/1nqLfNxRSkWUGSJGzt1ems1UDCsDdnNirpHQ1RUtGsu/ieQEeh8hJRFuTRJJaqvImuXi41/ZpmuC+VpvXZ9/30z736esfv0tIX2zIBWFpaWvpDSimlfeJDv/KYLMr/ef/W4TfsXztYu3XtwOqPE607S7UwrLC9Jrtn9njHV72Rd3/Du9g+s02ZlRzcus4rL17m6HCfPElB6dQbLg89djfCNMiLhDxLqcoSzwtQSpFEMVVRICXkRUaR5YyHGSf7U0aDlNEw4vDoNs+9+Fnm8QjbMBez5i0Hy2lz7uzdvOXNlzh9qk2RVazvNAjqDlkmSeKcVrtOVqQ0Wk36JyNODibsnd2gtdZhdeM0lutS5BmapmPbHmhQlgV5NscQgjQKiaMIx3UxTAMlJUUaMTgZ0usO6Ky1OH3+PF5jHd30sQwbTVv0N5RlSlkUoErCSZcXn32eGzeOueuuM6ysNYmikls3Dzg5GZCEOfPxiEG/i0KjVm9w7sI25y/usbG9zvFhj6efukwaF9Q8j8HRiPFkzmQ+Zh7PGY76jIa9176XJbVGh057G9/ziJOYi5dOs76+TpGX7Jxa4aQ3pdmsIcuSk+MJZVGRJwWtjoflWejo+IFNmia0Oy0uP3eDQXeGHzRBVOiGwrMtDg/6uJ676P2wTYSusb21hlSC46MeSZyytrbGYDig3+sSxxFpGjOdD7Eta7E12XDY2Nil0eywsb6KV/M4OuhSZCWVLBkPhxiGQNM0NM1A6Ca7p7bZ3d0gL1LyvFiUkM0zOisNWh2H4ShFUxqB75KkEWvrAdNxxeGdIbWGiyEUszBjZT1gZ7tNFObEUYofmNSaDkHNQ2gao96cJCxJkxxDA9/RMATIIkMmqSrieU8X8gXNET2pm7cy3fuVv/e//dxVTUO93rG8tPTFtkwAlpaWlv6Q+YVf+AWzYeb3+6b6U9PJ5Ouf+fyL5166fMvcWtvGslwG85T97oisynjvt30jf+q7v5W9vQ3GoxnXrl7j8rOXKbIpssqxXYc4zcnSipWVgNW1Jk5g4/kOSmq02ysoJLPZhKqqiOYR8TwCpYjDgts3xiSRZDKccXLc5dkXPktveETg19CkRNMU8yTG99t4QYN77nmIt/+xh9nccHFcC9s20TSBUhqNpk8cp6DryLLi9vUT8rLk3ofuwvU9DN3G0HUs18GrNRHCRFUVVZVjWS5SlUhZInQTWZUgK6L5hGsvv0KnucHB0SF7Z7fZ2NlF0y1A0GpuooSJrusUeUFRRghykvmYD//Ox3npmVfZWK3jNTw293bYO3sKw3QQVcXh4SHXr97k1qv7jHszDMtgZWuFC/duc/7CDrdun/DcZ68x78dUZcE8mpPkGZoQHB8fcnR0m7JavGbdMKnVWqRpTFok3Hv3G1BVRZaFbG5vUBaK02c2uH37hOFgQsNvkucpZ86fRRiCja06hmnQOxmzsdHh6iv79HsTdGGiVMX6WoeyWFS5SEBYgsOjI9KwYHWtg+979Ho9NE0nSzOUVBRlxtXrl5nHU3yvhqnrGMJka+s0nl+n3mqxutFEVZIbrx7QbLVRVU6/32U+m1KUJb5Xww9qrG+ucPc9exwc9MiSgmaziRs4aEIRBA5JVFDkORfv2eLqlQOmkxyJwPddTENDlhIJrG/6OLb+2sZfQVVV5EVJLXCYTxOmoxRdg8A10JVC1zUoC5LptCiT+LrU1SeFYz2jmWavKOwPve+f//PZ6xnLS0uvl2UCsLS0tPSHzH/6hV9ofOGZp37wxksvfodKso0kSVfHSaF5tTWqymJ9b497Hr2Hr/3Gr+LsXWfpnhxzcOsK80GPq1eOCTwXKxCEUUqVS9KipNGpcfrUNu1OHcs2mc1Ctrb3kDInDCcUeUZZlERRQjQLiWcZ40HMsJ8yHsaMhkOuXnuVg6MbaEJhmS4KMHQdKSWSinkYcemex3n4DQ+xteNz/swKZV5SSUmtEdBo1hmPpvh1D9M2MITGaDjDr9UQukH/ZFGWs7rZxg18NrdOLZqTK6jVmkgF6BplWWLqBlJWKFUw7nfxvQae3+aVl55lbaOFFzSIoxlFLtk+dQ+m7VAUOXmRYAgNTUFVxpzsX+fo1jWytGAepyhdYuoBQbDK6voWgR8wHY/odgecnAw4PDgiS3MaTZdH33gJpSSf+J3nicOM0WjAcf+YSlU4js98PmUwOCIOp6R5juPUqNUadAeH2LbH+bMXOTk6JM4iWo0OrVabZqPNwcFtfK9Op9VhdWMVKQSz+YzHHrvE9StHaJqOaViLGf2VQkmFLgSmZTAcjVEKTNugvdLi5OiY0XBMrRZgWjbDfp/5fM7pM2dotzvcuHWVZ194iqLM0BAYukGntcb25mm29s5iWoJG02M6Dun3RxRZhmPaNJsNDg73ydKCVqfDeDRic6vN2XOncRyLgzsnuIHL3tk1ojDDFALXtalkQeDXuHx5nzjN2N1dJ40SVAVRVmBbgtNnO5iL4U44rkESZ9iWQ1WVlGlJOovwHAMdDaoKipx4MpllafqS9Oxf18z6v966L0u+7/t+sXg943hp6fW0TACWlpaW/pD59m//dr1ZFX/j6Na1HyjTcsfz2/QGEW6rwbu+6et59ze/m+3tDQ4PjvnUE5/mzrWbrK0F7JxaZTyboiudJEqptTzQDNqr62zvbeG6FlGccHzY4/y5c4xGJyitwDQ0VCWJo4jxaApKMexFjHoRg17MdJJydHzM8ck+RZEiNAPH87EsmzgMSZI5YTRFN008v4nj1zlz9gKPPHqKTtul0bDxPIc8L8jyitNndyiRBG5Avz/Cr7nousnVl6+TxzntToO0yNnY2qSz0iDLc1ZXt8jygqDeJIxC2u01iqKkLDNMXSMvStrtTeL5jMPDV7Ftk7LKObzT4+XnD3jzVz7C1m4H0/bJ0oQsm6MLQe/4kCKZgarQbYeyVIyGE2zL5/bNQ46PBrTX1tneO82Zs7vUgoDb+z1eefEad27c5u77diCXfOR3n6IqK4oqZxpNMCwDVSlkVZAkMcN+nyieoRsmCo0omtFurVKrNxiPRyRpQr1W49Kle0mTlH53iKkLvJrLhUv3oRAEroFl2Qz6E6J5TBjPkGUBSoCm4wce9aBOnmdM5jOEttiuPBz26A2OabdWaDVWyKsCQ9fptFdJkpwXLn+ek8E+oGjUGiRJhiEM9k6d48Jd91IUFWgVlmVyfOcQlEDTFk/py7LCC0xa7TqD/gQ0wV0Xd9ncbLF/Z0BQ89g53SaapwgElqOTxDllaXByMiWOEgLfBaHwfIs8Xbw209RxXJN6wyAIbKoKsjRDV4oiztCVhioyRJ6RxVGVpsmdQui/rwL/J/7Jz/2bZc3/0pe95RSgpaWlpT9k7tnb64xv37i/5tbqJ7MBdsPla7/rPbzn29/D2s4ar750lf/wS7/O1Zevkc0z2u0mhiboHd/gzIUVSk0SxpLds1ucu3gO09IRQufosMfxcY+HH3uEj/7eJyjiAY+84S400yZJU+azGUpW6LqBLCuqEqSEOEkopaRea6BpTWwnwHY9KlktSoXikEatSRTHjAdHWOEUx3T4dFZw/vw6jz92ivk8QlYS3RBEYYSwDEq7wnJtHN/DsmzWt1bpnwwoqxTfdegdHRDPpkg0dE0jjhPSOMQNArI8QSEWU4lUiek4RFFEs71KUsQk8y7RJGZje51wVPIb/+53ufv+Hc5f2mZje4WyKNFMiyyLuXOti2fZpEWflfUWK50W168fYmom99x9hjArmPRu89nDfTAszpw5xVf+sccR734r49EQzzWZJwWfffI5apZHEPjMwjmGLYjSBMt2CYImo2Gf8XiILCWB28AwHapSYegWll4ShzHPPfcMZ89ewrAMijzmzp0eSVpy/q6LaMphOp2jKtA1gSxKomiO6bgEfoOyrOj2etTrAZsbG1RSMp3NOHv2Ausbm5wcd0nSlHqjznQ64aWXnyVJMsoixxQmUkmKosSxLZCSyajPycEdTp29QJREFFmJ7/scHN0hqDdp11dI44QozKk1TC7efYHROGQwSPGDCsd16femZGnO2mYLw9RJ4oKVNZ9hP8b3LTRNcXI8wA980qREU+C6oORiIpVje1BTWL6JJgTRNEITAtswKYuMaD5Dk1VsGiIsNTG2jKL3esfv0tKXgmUCsLS0tPSHiFJKe99f/L5vzePk7Qir/vZv+Ca+6bu/i7P33M2Vy1f4F//oX/Hh3/pd2o0NVlebmIHC9U2ytGI2TdDUmL0L27z7PW9DN3UUCk0Jnv3Ci6AJ3vmud/Bbv/Ehnv7ki9x118Zi5rpSFGWJbpoITafIClAgdIECNDQCzydwvcXG306LeZTS7fZxHA/HcVCyxLFtpCxxLYd+9xZRNCeLR2ysr2AYkp3dJlJm7N85or3aBqUQmkEWp9iWTWe1iULiuRZCwGSkc7w/YjQICXwb2zUYDY7QxgY7e+dI0gTDNPHrdYq8wHUESR5Tb20gZUlLA6ngK77mjTRW2/zBx/6AW1dvc/6udc7fcwav3mZ94xT9gzGf/sTzKAXNVp1a06Pe8EmSFFUp0jQiSXJMyyGNR3zh9iGfFxZSCVa3Vjh/7wW+7lu+lvseuZdnPvs8x7dOsE2beTxnvVFnNp8yGo1w/TqT6YQ4m6FpgnxaIJGgNKgkhmGQpDEvXXmGdnMFx/LZWN9hPpvQOzpkc3uXsiipXiuB0jRBrdFe/BoEtFebjCdjpuM5RVnSXu1w4dIZVFVgDBWOvYfQBVUhcWyPdqPD0eEdbk9HCF1HIBCattirgMA2bYo8xbYFQb3DdDJDEzXW1DZKGriOy+7uBnGSkqUVSVoQhSFVBdeu7uMFJhfOn+L4aMid/SGNho+uazRKl63dDlHSZTaraDXrTMZzTMNaNKBnJbW6xLJMblzrkWVt2hsermvh12yySFGpCt3U0XUhsywfV7p2qFvmBMOxXu8YXlr6UrBMAJaWlpb+EPmR7/+z56P+5Fvb61ub3/HXvpO3fNW7OLizzz/78Z/i8594muFgBFIjLyOyIqBebzLsz+j1pzz0xvv56q96G65nMhoO2D19isFwwuc/80nuue8Sj7zpUT7wXz/EJz/2Odr1BvWGTZoVlKpAN0wcF5IwIk0zNKEhdNAF6LqOIXSEbtJZbbC502HYny22+kYxWZKgC+j1jjF0A01TFGlKVA6YWYInn3iaTqeOaZ1la7uOXcFsFKJrGkJoZHmO53lUssRxHbzAo6wKdCuh2fFJ04zRaExQ96nXfQ4OukxGIe21BpZj47ouWZJClVNVkmZ7E7++SprO0KuSwfCEBx+7xNpah6c/+Slmown7N28xm13h5pUxskqhFCgUCugdTzg5GGMYGuEsZWunTaMVkOYpXmCQxBVlWRKHCTdeHvH8s89x7uIZ1jZWefAN9/FVX/8OBsMRLz7zKs987jJxJJG5ZDafgALd0InTOaayyYqCQpYITUAGEokGpGmEa7msrWxiWRZpnNI/6WG7Dlmace7cKbZ2VwGFbuqvLduyWU8CuoczBoMZWZJxeLCY9b+xukZmVMzCiHkYUuYlSZxQKcXm5g7H3QOKPAU0KikxdRuFjus1ybMS0xIURYTrBGxuXeDg9hHd4xPm8wln7zqF7VQcHQ5RVUFRFrheG8MwefWV22xudWiu1Tk6HJPGJWUlscwpnufhWBmj6QxZSuZxhG4I4kQRxQa+59BpN5hPCgytQO9oCF1hWRZUFZopcet1JTWSqlAnmmldfd//9q+WNwBLSywTgKWlpaU/TDRdF3/2jV/3rje965u+WVNWwL/9pV/hA7/2m7x6+SVAsbK6jue7FEWGrRv0ugOCTo1v/hNfx6nTu1x+9gpb2y1OnzvL0089w2Aw4q1f+VZ2z6zz8Y8+yRc+9Szt1mK2+v6dEcKG9c0a7XaLrCxRaAjdQBgFlqVTa3ikucJxbdzAob3mYTiSZsdDaoI4jKgFAaPxgFqjhR4vpu00ah3KskCWkuM7t7h1IwNdo7P+OEovCGcJQhcUZY7rBWjaYmOwpgl0oZPECbphsLbZpJAFpmvi+g5ZlZOlGvs39nEcQRJHNOotiiwnnI+pNTKUrGitbDM3fIpqiiU0bly5zPnzD1L/mq/mEx/9II2Wz+65PYr4Fr/33z6GjobuKCbzOo7tEU4TLN0kimbMxhPOXDxFs+PiONZi6dY0RwiBaYBbOiSzCXOj4hd/7YM0V2s89MaHeOwtb+BNb32YF595mSc+8iSXn59RlSWyUghhoGk6liEoC4nSFKDQ0BCahoZGJQtmszGGsHAdjziek+UJtmnx8ksvcPrULo1WE8/3Wd+so5tgmDAeGWRFSiV1bNsgz0o+/7lncB3vv/cgFGVJlIbM4xm25bDW2aY7uP1a4mdQyhyhaYDk8PiEXXuDU2fPcPWVm4wGQ9rtOvXmNkdHPW7fOqbTWSEvUkxDkGUFcRRjWj5BLeDwsIvvm/iuiUBnNIipBR7dkx7xPMa1XDzfoVIlx8djhG6gCZ00LYnimI21BvE8JU8THEfH1MFQEi3PpFDVTJjWbcMUL0jPfPJ1jt+lpS8ZyybgpaWlpT8Efux7v9d797d/y185feH0D/tra+0nP/EZfvmf/xLPPvV5PMej1VphMp0yHB2zsbGFIXzOnz/Lu77hHdx9/yU+9+QzdHtjvunbv5Yin/OpJz5HY6XG29/1DqQhuXb5Kl/4g2c5ORiTpSW1us/Kmk9r1WV9y6fR9ChLSZ7n6LpOGCaEs4zZNCVO1aIp0zHwahaGqZPFiitXuvSPpty+cQ1kgWnZ5HlGlqXYpsNk2scyXfIiJ8lSLt3/EGsbbc6e38IPbGqBhWloRGFJ0DAJai6gsbLWRlFRlhKBQtddSpmhGxqO7TEbF7z6wku012p4vkN7tYOGYjYLqTXqCF3n7PnHsEyd8egQTVPE0yknh7d54NF30DvpcfnZT6EMyZlzF7jy3C0+9dHP0O4EuIEDwqLXnZDMFyMnbc+j3vZod3xMU6coJNNRyHQS4XoeigrDgp2dNV54+hr93oj5fI5bCzhz4TTnLp3m0r0X6Q3GfPz3P8Mnn/gsJ0cnFNkcqUpKUaGkBDQkCqEJTMOgyktqfoOa65MXBa1WkyIvcR0HTRP4QYNGc4U0yxZP/zfabG2voRD0un0mwwgdgaYriqxgNJqS5YtDdZyE5HmMVJBnGWgSdEWZ54BGWZTYtsvZM3exsraNbpgITcMydaqq4OjomPZqi1Nn9hY/36QgCVPmk5AkjvCCANux2T21QjTLGA3ndDoB7dWA8TgDNJQGRVow7I0Ioylb25sIzSZNMxzXRqDQ9JJW3aPTDtCUpExz6r6JSS7j+ey4qOSrBfp/sHTnt/7Oz/5s9/WN4qWlLx3LBGBpaWnpS9ijYP7iEx/9hp27zv2N5tr6Gy+/eIUP/Pp/5cknnuJkf58knpIXOZrQsW2bOE4pgW/81m/mT3zXn+DO7QP+4OOf5y1f8QgPPngvz7/4HK9cvsqpvW2+8qvfRr9/zGQw4TNPPoPnGtiWSRQVTEYR/f4I24V3v+cRDEOjkhXNdg2lJFlSkKYZSVKQ5grTNDBMHdvWsG2H/Tsj9m9N6Z1MiWYhruUynYzJi5jJZIJpGEwnAwzTBASBXyMuMuZhwqV77mFlrcO586vcd/8W+3eGBDULy9TRLYGmCdorDVAa0TxkdWMD0zLJs4JGewOk5Orl5zBtC8NQyEox7M+JwpyNzSZe3cJ2N7nn4TcQhz0oCwyhc/v6C5R5xb0PvZNXr7zCEx/8MLZn89hXPkI0zbj63EvYjiCJc467A+aTBNuy8Go2umGR5QmWreM4Lp7n0DsZIqX1WmKiY1oaV67cBCmYTufICkzT4s7REV7g8OCj9/Hw448QtBo88/kX+diHPs6rL71CGs+RlCgUQmgUssQ2LXRNpyorPM9ffD7Lafg1hDCxbRvPqxMEdVqtVebRjKxI2N7ZxnIcdAMsYZAnJfMoZjwcUpYlEo12u81sFrJ/cHOxzVkXzMMxWZbRbLaRFRRFgmU51Lw2a2ubbGxuEUURhm1w9vwOxwddDvdPuHD3WSzH5vatfQzNQhaKXu8YPwiw3YALd2+wd3qVl148pCo01tY9kkQjSXIqWeHYBgKNk8M+k8mcdqtJrV7DsgxMQ8MLFomHYyp8zySfZ8g8wxD5rCjTZ3IlPlWa2r/4yZ/75aPXO5aXlr6ULBOApaWlpS9BSintd379V+6/68GH37e5e/a98/lcfOD9H+TDH/g9ukd9dCGYRzOmsylpFqOUIowTHnrkYf78D3wvzVaL//Tv/wuW6/Jdf/ZPsn/7Dr/zwY+zu9vi4t3nue/Bh3n+vMQ3agABAABJREFU2efZ2mrzyU98Cse08AObQT9k//YJ+3dOyMqKP/mn30Wr6ZElEUHDRtNNHEunyEvKsgQUZbkoT0Fo2I5FkUvGo5jucczNa11MzSSJY4q8ZDTqYdsWcRQxn0/wgxpZGjMeD3C9BkpVSGWiWw5v+cr7efzN9xKHIbaj02h5JIlkMprSbNVYW+9wfNTjrrvPYlomcVywtXeRKBxxfOdlLNvB0AVRmNI/njLuTemdDLn73jNgOTzwxjeTJSPWVpsooTEfD+jevI4V1Lj34Xfw0Q/+AR/9zd/Gcg2+5lu+ljNntvjI73yE7p0jmvUaCoVUEt0S1Js+jmsymySkScV0GtJs1LEck9k0Ik1zRv0Z1159FceyiOcJ8zTE8erUanXmsylJlFKhWNle47GvfIxL99/NZDDhid/7FJ/+9Oc46d7E0Q0MXaeSCt/z0XWdsihwbJs8y1CvTWPyHJ8sK9ANjVZrlfbKKlmeU1UVjWYb2zIZjYa02itIqUAplKzIq4IgCLBMixdfvEyvd0ReJJSyJEnmlFLRaLSp15rkWUbTq7O7exrL8ciLgjQraXVaNFs+49GQ4WDMSmcdz/N56fJL1IKATqfJzVvX8L0WnfUWj7zxPEHN4+hgSFFIskxSpIphf4JSknarCQomkyloYOgmrmvjexaKCs8z2NzwsS2deBwzG0VlmiZ/YLniC5pgpLn6r/yjf/oLh69vRC8tfWlZ9gAsLS0tfQn6tz/3c+sPvPWhvxXUO1//8d/+CE997BMcH4/RKoFEEYUzkixkOhuTphFBs8Ff+Mvfx3ve824++N9+l89/+lm+87u/k4v3XuT/+KV/x+3rXU6d2eSxNz/C6fMX+e3f/B0ef+wenn3qZbRKw64L4jhhPk3IQvC9Ou/9+odptgKe/fxVdnfapGnO5nbztc29FYahoesC01zMAjIsA8MUFJZGGOZYps7e7irXXj3ANEwm4zFVUeA2G6AE9UaDNMmZTWfohqAsUkzTIs9idEth2T7PPHUHPzBZ365z6d4dLj9/HcexSZMEXRcUhWQyntJo1gAd1/aYT/po6KCgrMD1HLa2daosZ9J3uPbSIUHTQdOew6vrvPz0i9x132lMXQOhMxudcOWFz/C17/0aegeH/OZ//HWuv3yFP/Fd38q7//jX8eHf/gjHN+6wudui0XbQdZ16w0VoAssSjIcR4wE8/4VXabR82it12u0aqtTJ0pI7t2+gJNi2QxrF5GlCo7GCECZxEjE+mvDffvUDfND+be5/9G7e9TVv5lu+42v5+Mc/xe/85u/QO+piGoskTDgGAh1ZVCglKVRFnEaUZY6GIA0TkjQhr0oct4au64z7fXb2Njl77hSWYyCEzmQUUxYSW6tAk2xs12ivPMYnnvgsh4e3KPMcIcAUBmE0Zx7NWGmvkVc5w+GArZ1dNtbXufbqDSgr4jhm7/RpppOU3kmfeiNn79QpRsMR4/GY7e0dJqM5VVbxhU9f4Q2P38Xudoej4xlFlpPFEb5nUxUVg96ANMtZW1uj2z2hKCS1modGHdd1oJJkqURTCr9moyo1lTrXpaF9tEiSz9aTjfj1juelpS81ywRgaWlp6UuQv7YWDg6Oy0++/3cmV56/2S50g0mccXR8xGQ2QJYV8zAkyVIef/vb+P6//BfpHR7zd/76j3LvA/fyvp/6O7z68jX+wd/6RwhNZ2tvm2/4lq9mdXOD//Tvfp3HHr/I9Wu3GA2GrKx6tFoBvZMpOgIl4bE3X2Brp8kH/ssX8GwN0+qwd7qDG3jIqsI0DaSUKKWoXhsVKoSOVApY7AfwazpKQWejgaELDAFTSyOOYjzfpywLdKHTaa2SZh55XmBbJrqRcvbuu3j6qcvIHDa2O7z1Hd9C/2SMISTCFIBACDBMg/1bJ6jditbKJrZhMur3MQwTIWA+j+msrFAkMzqbdYSAgxtDPNfihWde5PGvfJj/8u9+m69575vYO7eJY7rMRxndo+dJEos/+T1/ioPb+1x77lme+OATTOOc93zrN/DpjzzBrRvX0IQiCBxG/QjL0rFdC0PPqQcue3sbTMYzbl07wK/XSeMC3/PwPJf+oEecRrTbHbI0ZiqH+PUaKytr5HlJIEuSJONzTzzDR377o2yf2+Ybv+Ub+Mmf+cd86g8+x2/859/g6GgfM4vwHR9dWJimhVZWKAlREuLaHs1mG6UUk/GIi5tb7O3u0Dvpc3v/mEYj5szZHVY36+iG4s6tIWWxWLh15eVr7Oxu8/hjj/IFXXBn/wZ5IaiqCsOwqGRJr3+C77rYtsNkPMWreVy47xxXX7qNGGpUhWRja4XLz15BVpKNnW3anRXC2ZQ0LTEMk/k8olYLuPHqALQhuqmDphPGCUIDyop2u05vMCRNQjzXZl7GGIZBkRcEvs3qqk+rYeB7BrpQcy8wPuaHzvuniXzlR/7Fvxi9roG8tPQlapkALC0tLX2J+Zmf+Rm799LT3zyYzR4gKZrbWyvi1mBOGIbMZiOSLCFPUtyazw/8je/nvgfu45d/8Ve5fW2fH/zrf5EzZ3b45X/1Kxzc6NPprNDaavOdf/YbqeKCf/9Lv8ajb70LTUluvHqLlZZPGhd0szkohReYPPTmU1y6f5PPfvI6RSrZvbBFvRngBh6VVBiGsTj4VxUKDYXAtgWV1BbbX4uUZt0iqFnM5wPqjRrTSYjruUxnBVvbq8hKp9froQmFYZlk0wIlFdiCM2fPMemNuHPjKpbl8+ib7kEIwbNPX+PcuVXyMsOwdDShI5TOsDvDsk02ds8zHU945fI1Hnn0LipSLEPg2i5TFWM4Aq/lsLpTR1USw6hoBB6r7SaXP38NpZXcc98ZJqMIx7b5whMfxzQ9vvsv/Tn+9U/+M2xTo4z7PPu5T/DOr3sXL7+4xo2rL1OVUMmS/Tsjykziew5FWRKnCZ31OucvbTEZx3zhqZdJkgRN0wmCgOz/LNlpNUnSnCxbjNn0gzqVlLQ6qziuh92z6N3s85M//k/ZPbPDN3zzH+fH/smP8Aef/BQffP8HmAzG6FoD27LQhUQBtuMtxnVaNq5bxwtqICWua7F7ehOv5jEcTDg+HBLFKbW6w/buCoP+hOFgjKY0nn36VXzfYWtzl1q9zo1b14jCKYZuIoFao4OmFMfdA2bzCXGZcv9DD7J9ap3DO0cM+xOarRatdouiWHx9nutQq9UZDgc4joVh2uRFRZoW+IHNdDZjdW2F85c2UFLj4GaP6zfuLMqeCsnqygqGYeE4Fq5tEAQmvrcoS9M1iayquULvlVTP/sg//EfHr3MoLy19yRKv9wtYWlpaWvq//NiP/ViQ7V/782I6/fOkxcVSOMYkK9Ftm6oqybKMcB7z0Jse4n/9F/+YLE75S9/9g+RZyo//zN9nPov4uz/0D7n6/G0s06S12eE7vuebuXOnx3/4t7/FSqvO1toKn/z9F/AdF9sUi0bQWUqSSEzf4Pw927z4/AHPfvYV8jim34s4Pp7Q7U2YzxPiJCEvCxSLufSWbSIMA8uyMQwT27II6jZlVlDzDWxTo0or0iThgQcexHZshK5jWzb1WkC9HuD5AY1Ok7P33MV0PqV7eIwpdNyaxVf+sTfx/v/4EY7uzOj15pQFZFmF7TgIoVGVi5p7w3QZDSYc3Ogyn8SUZclsPF9MnglzZCkQQtBecdEMSaNlU6Y55y9ewBQ64SSlKHMc32X/1oh8nvNf/t1/ptLgm/70t9PZbrKyEVBlE578+Ee49OBDPPLmt6DbJprQ2N3rsLnVIgxDsqwAITg+HpFnBaYpULIkTuaUZfVaHbuHZTk4jkuj0cDzPDQN8jwjnM+ZDcfYhsHuqR3Onr7AvWfuYXoy4qf/0U/z43/3H1ALAv7m3/t/8zXvfQ8pBb1Jn7QsCdOINM+xTBs0ge83iKKENMkYDacMR2N0A1qtOtPJlGF3yuGtAbNRhGM51IIavu+zvra+SNT6XSaTGbbl4Xt1HNcBTcNxHDy3jmE4pFnCsN9j/8YBnZUOm3tbKKGRFRnrG2usr2/gBwFlUVAVGb5rkSQhSkls00QXgjTL8Byf2zePyLKKPM/Y3u1wz33nsW0blEYYR2i6hh/YtFoe7bYPmkTXIEmK7mxWXonDrKtJUX+9Y3lp6UvZ8gZgaWlp6UuIEYYdAx40hXV+WuHsTzKk6zIc9+mPx/iNOn/ur30Pe2dO8/M/80u8+MwL/IW/9Of4ine+nV/9N7/G5598Ctf18AOX7Qtr/Nkf+BY+95kX+cgHnsI0DL7uG8/zwrMvY+iK9Q0X19HonURESY5Xc3jowYtMJyGf+4NXKEuF4/ooAZohSJIcTZqLUY+yQEqJrgs0BRrGf78ZsHyfNEnwXJPUthiWKVmasLvdJkszhsMZ7XYDoYNhWtiGTlCPOX32FN3+kCwt2dre5PDohG/7jvfywovP8eTH/oB2q4Vu3M/6RpPdsxtIaVKUGZoOjuvSbrV58frzOLrNrRv7nL93h/07IzQR0O9N0GSG6YJtGJiWTa1mMI+mdDZbXHn2GqY3JU4UYVgwnyTkecZkMub/86//d/7K3/qrDPpHuHpGY8VhNJnzwjNPct/Db0bTK65ffpGjGydommBzu8OdG31kKhESDm6PGE2G9IdHdHtHSFnRqq+hqNB1nTiKsG2XJEup1ZuUlUTKgv54gG3bdFZWqDc8xlXJ9sYeGorJ0YBf+ul/xdl77uJdX/c1PPLGR/no736ML3z6M6iywjBNiqogL3KEruG7HrPZlN6JjWlZmKaJ7dqsrLVJ0oRas4ljOwxHQ9I0pdPpUBQFrucy6AvGkzGu45NlKbP5BCE0otmcqsrxvAY1f4OqrJhNZswmMVub61imzosvvIiQivXNHbQiQ6BwHZNCpdx970UO9o8XjcNmDU3TmU5CijynzBRlJZmnc1zPZvvUBqNBhFQSUNi2jucLhF6i6xZomrQ863JSySfKSv37/9fffd+N1zuWl5a+lC1vAJaWlpa+RCilNFOW7yavHrfq3qrmexi+hZKK2TTk4ccf5B/89N9jNkv4q3/+bxJOE37+V36e0+cu8KM/9GO88JlnsW2L2WzOqYs7fM8P/s986Hc/w8d+5ylsy+BNb79EmIV4vs6Zcy2yPEdDp7PaoNnxePRN57FtwbVXjrjr4hnuf/giugH9Xo/RIKN7nNLtjhiNxkynIUUhURI0tRhnqWkCoS/GdC4aYjXSJEfXKs6eW6WzVifLU06fWQetoqwKNA0MXVCrBUTziHgyx3N8TC/gDW99mEv3nua33v8h8mTM4fEN9veHPPmxZ6jX21x/9ZAslwjD5Nz5s9iWw0n3GN2ErJTU66tMJwnzMMQ0Ne7c6DEZRcwmKf3ulKqUmAZs7K7hNlxMwyKJSvbObVGWBasbbc6c20OGCU996rM8+uav4PLlA8b9hHa7SZHMeOWFp9jYPMvF+x/i1IU1PM9mNo2pNz1s2yAOMybjCdPJhLLIF8u9LJuyKvA8H8+ro+smSZqglFr0QdgW9Wad9c0NlIIoigFFUFv0X9SDNu3mCq16k8Nrt/n5n/pZ3v9r/5mveMdb+As/+P1s7e2SxAlpmjIPJ/R7Rxg6NOoNFCBQRPMZg26fLE/YO7WJXzfQzIx63UcIQa/XZzwes39nn3k4o6wydF0ghEZZFqRpjCYEnlujyFPSNMF2HHRdcHhwxHgypdVocGbvNLNwyosvPMPtW1cJoynX79ziqNtlMgsJgiZKKTQBrXaTjY0Oa6strl15FdMwsF2HwWDOcBii6zqmYSCEIEsz6g0fzzexHB0pVS/L1MtSclTqLJt+l5b+byxvAJaWlpa+RPz03/ubb9Rk8TWG49w1TgozyivW1lqM5gnf9t1/nI3dLX7xn/0ST37s43z9t72XP/O9f4YPfeDD/P7vPslk0KXutUlzyeNvfyN/8a/9BT76oU/xyjO3sAyTBx8/zZlzGwy6B9iWIEtKdHSyRCFVjl/zKcqK25cPWV2toxs6l5+7ydGdYxzfZr6SYZiCZrNGs9XEMA3KIscwdISho5REEwLdMCjznCIvyLKCZtPBMQVJliF0ietYrK036HXH+F7A2nqLIoPhcEKaZgR1HyU1dMfkO77rvXzgv36YIioBDcswOTq4Q62uoZTi9z74GTY3N6i3PPbOnKffHTHojbBdg87qGkJ48FpSYpkCJTRM3cQ0BXFYEM+nbG6usrXXZuvcCrqCUX/MG9/2Zs7d3yeezPA8g7KqePm5Z7h43z08/pXv4sO/+X7OX9qi1ba59uJ1JqOYt779XVRlRZ4+Q15Ijo7HKEMnLebEUUhZFjQbLRzHo1ZvUOaLRWauG2BYBrZjE0YRSZQgpc3G5jpFXmAaFkWRk6Qxpmlz6vQZrl17FS2RmIaJbStMw+bOC1f5+Ss/w5vf8Q7+9J/7bl56/iWe+L2PMB710ZQAqfCDOnpREPirrDdX6Xa7xGHCyy++yt6ZDU6d3WQ8iMjzOvN5SJ7lrK6u8sqrl4niKZblkeUJhi6QQmc0HbDa3sC2bITQCMMZ66vrIEzGgwl1z8Ov1zh96iLdkyPSbMZkKtBNG1VKbt+6xcbqDhqCbneE0E0mgxGmZeLaHndu7rO6sU6aFuRJitAEvmfRbvvUaiZFkVE3PDQlidK8V6A/7dzY+j/+yi9+X/F6x/LS0pe6ZQKwtLS09DpTSmk/9bd+6G3VPP/ueVi8LbJ0VzdNCqnj+y7f/M1fx+UXXuLv//CPMx5M+Zs/+jd48PFH+Cfv+wluX7tDu96h3egQpQWPvu1xvv+v/nk+9rtP0r11RM0CUbN4+A3nuXHlOu2mjVKCcb9PrRZwfDihsepx6sIm11894eRwRhQm5KkiClOEIdCFTp7lzKY5Z861sSyBUhLTMtF1HalpFGWJY9toaIuVADq4vo0qS/JkUbLhBy7hJCSOUwLfZeVUHdC5daPL1uYqk+kMqRSWZ/POr3sjh4d9Xnn2Ouurq9TrNbI0Yzw+4au++b18/gvPcOvVW8xHOe/4+sexXZeXnnmZcXeCH1j4QZMkKxFC4fsW8zDG8U0kEs1QNFc8ylRSSoUwHVqdBkKVCFPj5GTA3Y/exyc/8gkCw1kszFEVv/+h3+Xb/qfv4s7tK1x9/hXuvmeDZJ7y1BPPMu6GvPM976a1eY4ku4EYRPS7A2rNNYpKopcFtZpJWSlcN6AoKuIoopAVOia+H+A6HrEbM49CxuMRa6trhGFEkiQ4rk0Sx/ROTljpdIjjiCwJKYscITRc10UqyZO/93s8//SzvOe97+F7/tL38pu/9n6O7+zj2AFoAiMr2M9ygsDDcRxM08YwDG5cPWY0nPLwI5ewbRt7YNHrDimrkvW1Da5fn4Cs0LXFpKeyLLEMnel0yNrKBnme0mytcHR8RKu9gaY5jMdTojhibaOD7ztkaUZv0CcJI7IsIksjlBKsdDao1QOmoxlVBbPBFCklfq3OaDhb9LLU6iTxDF1As+7TXvMIAhPL0tEUZZxWx3klL/8vy8P/0tL/I8sSoKWlpaXX2Y//0A/uZXHyffN5+e4oNdbiVGc4K9m4cJq3fd27+MiHPskv/8tfx6+1+Ymf+wc0Oh3++g/8EFeef4Xa/7kMCsGjb3uUv/AD382HP/D79O8cIyqoNx2+5o+/EVlEBC7IvGQ2mWNaBkZgsLbbZPvUOgf7PV554TZPf+Zl8ljhuRaaVuG4Nn7Nw7J0Tp/dxAssKgm6roNSZFlGluUITVuMBZUSwzKxXRddCKqqRDeh3QkYDqb4vk2zabGzVwdR4PkaZRVTa9js7W2g6yY7Z9bYOtXhEx97ika9Qb3VpN5o4Fg2G9ubPPrww3z8w58gnk3I8ikX7tlDSnj18jUMoXF8NMPza4RhiKYWryFLSqpSI4krorBEaaCZiigpMMwAzRDU2j6OYzHsd1nf6rCxu85kGmEYJoYu6B8d8elPfJK3vf2ryUuD7mDO5ukWlaq48syr/If//T+ytbNLo72DLASe5VKpnHZ7jVZ7Hdup4Tg+Qtfxax6aLmi16/iBRxzFpGmK7Th4rotSitFohJQVhi4os5LVlVV0Q2M47LO2tkG71aZZb+I4Hpqho6FRc+tMhn1+5Zd+id//8O/xx7/tvbzlq99JGEdEYfRaGY9ASihLiWEYaIbO3feepaxyPvPJF8mzkqLMSJKY+WxGHKU0GysYholpmNT8OrowkBWUZcE8nJIXKWWR02i1sW2bqoLJOKTeaJBkCV5Qo15vsrmxg+/XMSwXTTdQmkZeFmhImo36YoOxX6PZ7qDpGqahoyqJbhg0GnVW1joIy0QJsD1BkhfysB9+bhqp/4xZf/H1juWlpT8slgnA0tLS0utEKbSf/bG/fcnT7e9ME+1Ns7jansQpkyjirV/3FTz4+EN85Lee4ODaEY99xaP82E/8KE8//RL/8O/8fSa9PlWZcXxyQH9wwhve+ijf8/1/ho9/5Eny6ZDtjRqdTZ/Td23RaFgcXjsACZrQF09yDYv5KMVxDF545gafeuJlbl07QEnFeDCmSBWO69JqN1nbXMUNHIK6i+faaBpo2mKRvAYgK4qioCxLlFLo+qLO3TBN/MDF802qKiWLSyzTIEnSRblMUlGrWZw5u7UoiakyLEfyhjfey6c+8QWaNYf2agPXdqnVajiey1e9551MTiZkkwo0g7Pnt6jV6kwGEU9/7lksV2cyC/GDGif7A473p4xGITevHTPshgy6MfNJjiEEtuPTH4wxLY0sl6SlRqtTI57N6B73uXj/BYRhcvvmkDQpaTU8Xn7+GZKi4G1f9cd4+XKXybxi79wGUZYwGYz4lX/zHzl76QK7Z88jCw3HqJFlBZ4f4NcCbHuxMMy2XZQGtbrH6lqber3OZDJmMhnhOi5IiMKINErI05T5bMpoNGR1dZ2g3kAXgma9SSNo0q53aPpNdMMkLVIsy8YydF74/NP821/6Vda2N3nv//RezLrDSfeAPFtMO6rVPUxbx7Z1bt28zdb2Hhs7m9y+c4QsNbIspT/oYpqCSirSNMM0TTQEpmFiGDpllZHlCbVamyRZ3EbohsIwBJPpjOF4QnulQyELptGUJEtx3Dqe16RRW6EZNPFtF1MXhOGcSircwMN0LHR98V5Z3DhkCH3xnjMNjU5rMcHKMY3Kc92eVGoQhuHy6f/S0v9DywRgaWlp6XXyU3/7r7xhPJr+7W7v/8ven8danuZ3nef7+T2/fTn73W/skZGRe2Zl7eWCapcNxmVsKDA0+9IMbqCZ1ox7Zhi1RMndPeqhRyMkmoa2MVMGe7BdtjHglSkvZdeSVZlVuS+xR9wbdz/7+e3rM3+cbP4GiSSVrfP6K6RQSOfeOI/0+z6/7/f7yf7aaFpeWEQppu/yx/7iD+G4AT/3xV/j1lsPee4Tz/D5P/05/tlP/hQ////5OSQNgopG1dBInnj+Kf7Kj/x5Xv3mS/h2xua2g9MWXL6+xpPPnOP+nbtUZc1klHF2HBMvGo4O5qRxyf1bxzy4PSRLS9xWwNbuFu12gGbAYLPD+s46eZHT6boELZcwStENE6FpVFVDVdUURUmRZTT1Mo1WEwJdN3FcB2kY6IZEouPaEssCy9JotS3aHYs0zbEdidSh1XX5/s9/hpdefJnZ0ZS1QUArsAkCB9e2WdvZ5JOf/hAvvfAiW5sbSK3h2Y89TZpXPHxwyNnh0fJ2vR3Q629w58Y+J0dT5tOM2XjG3Zt7VFnFq9+5QZkr8rzCtm1OTw6wbZ9bbx2hhKTTa/HW63eANpZnU6ual7/9gMk0Iw5jvvzrv84zH32KzXPb3Hj1iKbRaA3aCCAPY37qx3+aT3/vR3jsw08QpwW24xLHKfN5tLx9V4q6rpYrLx/skUQRYRRimSZlkZPGMY5h0u10aPe6bO9eoN3p0jQ1WVbgex2EJjAMg0F/DV3XkZok8Fq4josmQAodS7eIx0N+6Z//LPfv7fNDf/aH+NAnP8JiMSWaTxienuG7Ns8/f41nnr9OXtS0ux2ClseDvftYtsv6xgZVU9HptkjSmKau0HWJEALV1PheQFkUpEmCbds0dc14OCRJQzzPpK5qRsMZtmOxub1BrXKKLMF3PDzbR5cSzzMpy5q6EjSNoiwrirwEBYZpoukamhS0AhvPM9EtgWXpOLaN60q50TMHG32n9/bbb9fv95leWfmgWM0ArKysrLwP/l8/+qNeXRV/MFxkT5yNi8vjWczFRy7yw3/hB3nz9Vu89I03qJqGz/3J76HbC/gf/+7/xN1bd6nrhAZFUeVUteBDH3+O/+OP/i1++9/+Nl1f4PoWTaXIyortnQGLyYzFLEVKg4aaaB5TFRqOb9Ae+Lz6xh5NBUoJtnYHGOjMxxF1o2h3fOI0Zmu3gy41Xn7hBls769BI6rrA963l8K/Q0HQDBSgBjarRECB0DMtE0DBLYvy2iWrAtk2iqERKyNIcyzDwAo0PfeIJptMF99/ep9/rkKYZlqnTODVlCc89d400SpiOE0zHYOfiDtvnz2GbDm/deJ2qrhCGoDtoowvF/t4edV2ShDlCKdIkYjqZES9SXnvpFjsXL3LuSoeXX3obU+rUheL+nQm61nByOOP6Ezlx3IBmokm4c+uMwVqHNDnm2996ne/5I9/DP/gf/yFSUximgWEamNSMR3N++sd/lj/3I3+OSsFr33gZ17PJi4TJfMH6Rp+mhqAVoFRNXtTLUDWlyJKE+XREq9XC89p0+gPKquLSlcsURc5iEZLEOW0/oFaKoNWl3RswGp0wn03REKzZGzQNzOYTtGJOU5S8/LsvMD4847N/+FNcu3aJr/3ON4ijlNPjKZomePy5i5y/sMlbbzyg2/UInnmMO7cfoGod0/QwLcmjjz7O8PiULA8pqwpdmliGg6l7ZHkEKJZBZm3KpsEUijxJmIxGnJ4Idna26fUGDKtT4niO57VocqincwbrfUxNJ00y8jTD912UUlRNQ1lWJHFOaEi6Aw8v0BGywpAGTSmomzLQdNQv/MIvrAqAlZV/T6s3ACsrKyvvA3+gr01H88+cHs0eaxqNx597hM//+R/gy7/5Vf71v/gNhK7x5/76DxNFIf/Df/v3uHPjHcosJA5nJHlMU2s885Hn+b/+d/8N/+oXfoOz/VPabYdonpOHFVK3qWvBnXeOoBYkSYplaWhCEsYpvV7A17/yCrNpSJblIGpmoxDTkFy4usa5SwGDdYte32M8XHDn5gFCGeRpzdnxhEG/h+042LaN49goWLYBZRlZnFDmGZom0A2NWjUIKWh3PbI8x/Z02l0XoQkQAk0InvvI45i65PDOQ1ptn/FwBkqhaGi1Xby2zzPPPcqLX38Z3/MIWg4f/fRzFEVB2/V487W3sQMHpcGVRy4SLebMJ1Ns1yBLc/KsRBOwmM3p9/rcu73HzbdvoZsNZycTbr29h9B1RsMFk1HK8CTi1W+/xfUnrnDn7gMMw+bk6Iy7d/eIFwm/9StfZvv8Dk899yzRvKAqFU2j3r2pDlhM5nzpZ36R7/uBP8i1Jy8zGk545NFLDNZaVEVBkqWYlont2GiajpQ6RZbRaQU4rkP57qrNxXRGOJtz453XWcwWdNtd2h0PpViu+QzneK7D4088wfbOLltbWwSeh21Kdrd3uHrxSXa3r+K4ksVwwq/+wm/wcO8Bf/xP/WE++dlnEabi6HDCC7/7FovRgqefOU+tFIeHQ7Y2Nwg8F01oHB4cggLPD/D9ZWZAVafkZYptOQRBB0O3qKoCITSSOCaJM9IkRdckru0wm82ghvXBBpqAosioVU2UZBwfn2A7Jk5go1RFkaWYtoHrLr9frbaHYRkIXWDbJr7rIkWDRlMpZKRqNfh7f+//ErzPx3pl5QNjVQCsrKys/Cf0pS99Sf73//Xf+NPxOP0L1PLaZB65567v8rk/+Uf45Z/9db7xlVfYvLrFX/hbf5KXX/42/+wf/XPmZ0NMqVMWy35oic3Vp5/l//bf/7d87Xde5vD+GVtbfZpakS4Ux6cLLlxe56UX3mY+y0jTCqEE09OUIqu4fHWbd97c4+ThAs+20YVBnTZoaORFielIfN9jf2/K4f4M17fY3F6jagRIxaXLW0CNrhuYlrW8qc1L8jgjnkeUWUZTV9RlTl03CCFxPZM0yUFpuL6F6SgMU5LGFd2NLt21DrfeuIFlamxsdpaDzXmBlIKmqXn6o1fRNI3jvVNavofr2Dz17OM4lkM0mbIYR7iOjWUaXHnkMscHJzR5TbvtIaVGlmcYlkSTEmlKWu0OUbSgqlPiecJ0nBFlKQcHE/KyYT5b8J1vvczmzoBW1yHLMxCK6WTKYhZydnjCV3/363z/n/h+hC5RtUJVDZ7n4gYOjm8xOzrlX/3iL/JD//n3c/HaRW7fvMfO1hpJkqEpRVmmnDu/jaYpmqZClxpRFNHrrNFprREEbaIoxHMc1vsD5tMJ49Ep7U6LrXNbrG+sMZuO2Htwj3bX4NHrF6ibirwosWyLyWRIEoa0Wl36/U2krmNoOje/8w6//Ztfp73W4bs/9zy7lwekecUrrzzg4GDERz52lSef2WU2mzAeDynLHE0I9vYeoOkalmXiuQGBN6CuGuo6pyxzqqYABVKAqhRJGGFInSRJaJoGTQmqsmQ+nZHGGeF0SpkleJZBmVWMRyPabZet3XWKqiYME/K8XO79z3Ns18T3TTQFcZhj6DqmJQ+aRn27qfQvJ4mbv9/ne2Xlg2JVAKysrKz8J/KFL3zB3H/lm3+0KZrP7d87+0tpml/87h/4NJ/9gU/zUz/xc9x8Z58Pf9eT/B/+1p/jl3/23/DL/99fZTI/pRY10rDZ2DzPzu51Lj/+JH/nf/g7fP33v829tx7w9FMXaPWWt8ZJlHH+0U3CMMLSJaYjkSaoWidNKmzX4XQ45eHBCN2QlGVDVVV4noPvmXiezWSY8M2v3mI4nPHIY1v0ei2mkwhUhWtJJpMZhmkidUGWp2RpRjyPiRcxWZq92wqkqBuFagSO4yJ1SZ6XOK6BaiRKgdcy2NxpcfXaBR7ef4BqStDUco1nYAIKaUiUBtefvMw7r97AsgykLljb7tPqttnc6PHKS69iSw3XsnBMnfMXt7l94zaWrqNrFVJCp90CKrzAxfFMLMdgfaNPXVYc7Z2RRhFSChaTOePTCVLC6eExt2/d5+Pf9SxRtCBo+fh+QNU0WKbBN37nBVr9gCc//CTxIiKLM4qqpK4KyqxASp3JwZAv/cwv8lf+5l9isL7F+DRCaA1nJ8cUWcX+/iGmaSJ1A7/VwnF9xuMRRZnitwO2drfJq4JWp8PVa+eJ4jlpEjOdTJBScvnyRRQNX//9b7OYR3zmD36Kc+e2qBtw/QChVcympzimz+baBtvbO6zvnCOe57zwW69z//YpTzx/hY/+Z4+yfXGNo6M5t985pdPp8NxHnuCpDz3OoN/F9wJc1yFOQjShEXgtTNPE93x03SLPM6azIUKTZEWB5wc4jk1ZVbieT101jMdj4iRkY3OTdqdDWeXE0ZyiTOl1OoTzmP37h5RlyflL2+i6tpw1UA261BGaWn5HHQNdNNR12TR1c5hX6mv/5f/577z+Yz/2Y8X7fMRXVj4wVgXAysrKyn8ifhVtzcfTzx0dTj7x8DS8dOXDTxnPfuoj/PRP/BIH98/4ns99mh/4/B/mJ//RT/Od33uVluWhNwZpmi0fOi2Pzto6/9X//b/km199ka/8m9/HsXW2dz1aXZPZJEezNHq9Nu+88hDRNLi+gS4NqroGKblz74jf/52XOHi4h9AaijQmSyPKogCliJMMocGV69tcu36es5MpRbaM+7UdEyEFQctf3tAXJU1RkUYJdbVsv9Z0jbwoKPOSKi8xpIFpGNSqwXIMvJZDmiRomqTdDXj6+ce4c/se4Syk1wuWGQKaxvZ2H8s0SBcRl6+eR1MNi+kUzzOxTZ1HH78CQqEbJnffeQfbkriGxHU9AtflcG+fTruNITXqrMSQJoZu0jQF3Z6H69lceeoSeVaiKkUchyTTGNOQzGchhm7gWi4v/N63eOLZxwjabQxdx/UcbEfi2gaqLvjmCy/x2T/83SAEeZRRZiVlUaKaZUCXqRu88+03+NV/+Wv89f/TX8MMWujSwjJtFvMQQ9eIohghNbKyxHVdLl66gG6aZGmGaZmcu7hNWqQgJZeuXMB2DK5eO0+aRQRBh+vXH+P8uQvs3z/k7u277O7scP78RWzDoSorhCbI85RFOCfPK/ygy7mLF+kELd5++S5f+603GQ9Ttnb7PP2hSxie5Gg05/7BhBs37xNFIVcuX6bdDogXIapWCM0gCHpIoVOXJVLTKauCyfwEJSBJQrI8+3erYTU0WkGHuoE4Teh0ujiOiyYFujSp6hqpSwQao9Mpx4enmFLHcw26HQdNgm5oSF2jaWo831wOH2dVLCpuCSHU+3y8V1Y+UFYFwMrKysp/Av/vL3yhV+f598WL7MrRcH7xY9/3Se2RZx/nZ/7Jr3B8MOcH//Pv5cnnrvJP/pefZ3gS4/oebqfD9s5lzm/u4po2ZZPzV/7Wn+bhvYf87q9+lUG3x+Z6C9uTVEVFFGZcfGSbB7dPSBeKPK6pC0WelmhCY76IyfOSsoiRQpEWGUVZ0eu26HQDojgji2pcy8DWDQ7vntIUiqIo6HZdLEdS1oLFPGY2n1MrhWZILM/C8k0qpZjPMxbzhPl0TpokSKlRliWqaWh3PExTIg0NqWns7p4nSiKKPIFmuRnHcSxUDY5nsL4RoBsal67scOv1m/i+RVPVmJbk0tULSKlztL+PFA2WJdEEtNvrxLOEcDIHAbZtYToCtJpur0NZ5limhm2ZtNoOs1FEvx/Q67UJFyGGLqmqiqqu6PY6TE7npHHO088vA7J0qaNLSa1K2oHJ2y+9jN/ucPmxR8jSGKEpTFOnzHOqqqBqajRN8fJXX+KFr32dP/s3Ps/G9jaO42DbMJ+NCTwPU9dot9vMwgmNaPADjyLPkVJQVBnrGx10XSLf3a7keQ6PPXGJ2WJKkmRsbAy4cuUyqhYcHp6gmpput0e3s05TC+JkQZIk5HmOEArDMumt9bh4eZvZeMr9W0e8+I0bfO13X2c+igg8i0cf3ebS5V00wyIpE3Z2L+C4AYtoQZIm5HmBEoJGNHS6A3Z3rqBrJov5mDSZk2Yh08WIpi4pipw8z2maGiGhVhWa1FFKLGdQUAghiONlgVEXy7Axz7WwbQPbMQkXEUVegVoGz+maTKqSYSHoK6XE+33GV1Y+SFYFwMrKysp/Ao5tK8q6Nx6H1z79uT+gP/rUFb74j77EcDjlh//a59g8v87//Pe/yOxsgkFDXTUoNDTborW2zsa5Hf7mf/NfUJc5P/dPfont9TUef3abS490KOua4Wm4DJeyBLdvHlNUJWnWcHy4YG9vyGyaMp/MmAyHJFGKYVhsrq/TbntEUcR4PEFqEtuFyXjOG6/coy4aVA2WrmNIndk4Z+/OsnWl2+5gWya2beH7HlXTkKQpk2HMw3sThsdzNAF5kZIkKYZugQCpa0hd0u71iNOU05MjOl0Xw9JZPgRW+L5OXlYUdcGjzz1CHGfs3zmirhpsy8L1baSp0W63uPX2DdpdF9PQ8fot1ta3uXv7HlLTcHyduinQDR3HsyirHF3XaVSDYWqYQjA8mVM1FYHvITWBaUqgQZcSP3BouQEvf+sNPvrpp+mvdSjzCE2rlzv0TYN4OufV73yHT373pzBsjXgRURUVVVGSRnNc1yYrEjodn69++Wu8/eZb/PG//IOs7W4jhGTQG7CYz+i0fHTN4OLlR8iKijRNOTk5ZP/BfXRNZz5LsSwT29ZZzKeMRxPSuGB3dx3XMcjzjCzPaRT4joNjGWxtr9PpBfR6XUzLoaoLjk/2SZIply71cFsGlRI89tRlbAuyMKNKYTrMOL434fYbd4nnMYahAxrr22s8+aGn6A0GOI6J7dr0Buv4Xo+yrBBomNIkT0PCcIFCw/NbFFWOUjVSU6i6YjoZ0moHrK2tEwQBShVMJickyYIwnJHlMVWVo+tgmgLT0iiKlE43wLYkuiEQAiqlSpSai6Kyf+InfmK11XBl5T/AqgBYWVlZeY/9wy/8TX9x8vAvnD4c/dGPf/enNq4/cY0v/oN/yWQY82f+2g/iBQE/9Y//JSpVaKomi5cPXXm4YH5yyPHBQ777+z6B43j8g//nTxI4AesbA649ukOr71EWNWGYs3NhnXs3TggXOVGSMppOGI/mmLrD7Tv3SfOC05MhpuGSpiV379whTVN0Xccw5bvDnpKmAt/zKMuSOMqYjGIe3DlhMcvZ3ukzWPM5OztlNp2RxClVWaMpjSxumJzF5EmN47jMFglpklAVOU3T0DQKNA3H8/BbLQ72D6AR6LpGoxoUAqUUpq1jWQZlo7hy7So33rmDoRvMxnNsS7B7+QJVA6qBxeQUGuit+bT7Hls7m7z9xts0Wo3v2wQtnzROsQyTwdqAJE4xTcn27hpnD8fEi5iqKNE0hWVLTNOgejeZFmCw0eHo4QmGJbl0bRNL17Ftnf5Gn6JsqMqKF3/vG5y/uM3m7g7ZouD48ID5fMLZ2QlxuMA0TIqqwNZ0/u3P/f9YTBf8oR/6Q5i2h2WabGwMKIsaXdaMzk4JvGWbUFWWnJ0cc/f2A1zPYTiaoekGg7U1FouMs6MZhw9OqcuSZJGgmuVWpTAMmU1naFLw5JPXuXT5PL1eH103KIqc1197na/8ztfodm1sF+7eOaIddLh8aRPHlYTRlDzPsA2feJ5x9PCQu7du8s4bN8jzFE0Hx7ExTROBoNPuoVRDGE6xXBvL9vE8n35vQFnWNI0iTfN33wRBEqacHY9RjQ5quZlK15cFIECWppRlQZk3jEcxs3FCmTfQNEipMEyBaRlI3cyQ5jup5n/lR37kR1YhYCsr/wFWBcDKysrKe0uo0rw+Oj753ic+/vRzO4+e17/4j3+BKM74K3/7TxNnKT/747+ML20s00QaFm4roGpKptMhw7MTPvHZ57n22CP80//lZ7l08SrPfuwxdnZb5HlGtEixdIt2N6BUNeEsxbYMLMdEKcloOObuvTs4rkeeF9iWTqfr0+112d7aYXdnjf6gTb/fw/FMHtzbYzgcEYUhUhMELZu8KrA8m2tPrtNbbzGaRNhOm1a7R7c3wLQsdEPDdgw6gwDbs7lz64TRaUS0yKiqmqapUEBVN/QHa5wcn0KjkFKjapYtKULT0A0D9e4mmd3z58mThGQR4ng6RVqi6ZLdR87j+1327z7AkBqW7WDZko3dDSzH4vjwFKUL0jSjP2iDqkmzCNe16bR71HXNxnabB/f2KLIcIQSu62LoGqZp4LoWeRqTpQmtroVtabz16k12r2zjdVp4XkBZFsxnC9Ik597NexweHPChTzxPVTdMp1Om0wlN3fDg3n00pTE6HpFGMUVS8vP/5Ofp9Tp88jOfYrbIcX0Tx5ds7QyIowk3brxDWZWsb2wAiul4SBRGDAY9oijBsA3aPRfbM3Bci3ARLVewljl5WaCEJM1Kbt64zd7eIf3+gGefe5qNjU1c10MIxenxhC//xou4js3169sMh0ecng7p9nq0Wy3KPAPZ0Ov36HV6qKognIzJwoJet8/p2TFJHFJVGZPpGXW1TIIuypxudwDAfDFla3MT03Lw/BZV3ZCmCbPZhOn0lNlsyHw+IwwjRCPQNJ1Op0+n08PzAlzPpq4rqqqk3fFxHQtVC8oC8lxRN6RF0xz/6I/+aPq+nvCVlQ+gVQGwsrKy8h5SSnFyMn7s2U88/ez2tQv2T/2vv8hiFvKX//YfI01j/vXP/AYmGuLdMCnXd0izmCxOaRrFZ//oZ/ijf+L7+Md//yfJw4QLlze4cHFAt+fgejbTUcytWyfsXNng6HCINMAJdEajBVIXbGytMRqP2du7x5uvvcrx6SHD0TH9fgvL0gnDkqJcDgjbro8X+KxvrjFY76MbJlII+r2ATs8hTXMUDecubDJY7xB0WghNo1E1hqHT1IpwlnNyMCdPa1Aas3FKntXkWUESp8sd+dMF88kM1TToUqMoGkxTR7cktm+CANOzOH9pi1defBOBREiBJk38Xh/NELiOzf7du9DoGLagLGF75wJZnqO0mnbgEMUJSVpgOAZ1tZxBsGwT03bJygKERasdUFYVSRITBAFVVaFLnflsxGh4BELgey533rlPp9dm88IaRVUzOpmTRhlZklJXBS99/UWe+PB1agosy2EwWKfX7VFVDQ0aRVkQJzFFlbKYzvjp//WneP5Tz3Ht6WscHc7QpYmQGk8+8wxNXXJ6vL9cCdpfw3N9Dh4cEi7muI5FGmd0uy0Gmx2koeO1WxjvtuRURQmiYX19gO/7jM5GvPjNlxiPh2ztbNLudNnYOI9lWxR5xte/8gr3bx9w9coFqqrkrTdvEM4STNMhSzN836Td8el01wmjkCSc47su21vbCFWzu7NDEPigFFLTGI1POBvto5uSNI45OtpHt3SkqTNY20BoksBvEccRSRYyWOtj2yaNqpcDw6pGGgLLWRax7ZaD75ioqmE8XBDOC/K0Js8U4SJpG7rYfL/P+MrKB9GqAFhZWVl5Dwkh1Ic/9eSz565d2v5nP/7LjM4W/NX/6s+xmIX8i5/4RTzLRKmKNI5Jkxjb0jl/fhtdN7hw7RI/+MM/xBf/4ZdIpiXPPfcUFy72sR1JVTUc3B8yHeZohsH4LOLezRGaZlDXEPguZVnz8OERuzsXaWpYW99gY2Obfm+DNMpYWxvQ6bp4rolt6ZiGxLFtZrM5jm+iWzrTaUg4S6HUaPkO3b5Dq+2i65KyKCnKElgOctquhRdYSKlR5DXDowVFUZNXFXFcIjWLLMm5c2OfMquRukZRVsynKXXNcgOQ0BA6XL1+mdHphHC+XM9ZVwrTkly4eg7VwPDslHg+RwN0KUDTuHjxIvPZEJoa1zER6GR5Rn+tjUAu99e3TExTsH93iKoF/X6LS5e2aOqawHcRWo3nmlRFRjRbMDmdowtBPE64e2OP609dYzELKcsSz3cpixTb0XjnjXfQbR3DMYkXEY5rods2/cEaVZVjOzZhFFJVBbZl8PDefX7pZ36Bz33++9BNiwe391nMFtiWw87mOVASqZlEYYSmgWFIbrx5k5PDIaOTOQ8fDKFZzkyUeU5ZLDtgDEdwNjxlPp+zsb7J1vYurXabt9+8wYN7D9ja2qbf3yDwu6RJiAAW84Ib7xyxu73DM09fZjw5JUkSTN1A6DXnr2zxxNOPcfHSZbI8ZXh6iqmb+F6b2WzBYLDO9u4F1jfPsdbfJApDRqMTiiIhXsw4eviA2eSMxXxKUWYsFnN8vw0Izs6GAGhSYpommibRpYHUNMoip65LDNMgSytUs1wvW1U1KIVANDTi6j/+B//dM+/T8V5Z+cBaFQArKysr76Hf/c1/fnX73ObnfuVnfk2Lxgl/8W/+CWbhlJ/74r+mKUrKqkQagjzPGI9Oef3Vb/Odl76D1dL5L/7Wn+DLv/EVju6P6W2so9sGjmvgt22kKUmihr29Mzp9j8OHZ9QFDE8WCDQG/TZlWQI6s9mcOI7wPB/H9rAtF02zODkdYts6GoLpcEqZZQgFAoGUkrquSaIc39VpeQaG1mAZOmWRM51MicKQIi9o6gbbNtAN0E3w2y6O4yClQZlrjIcpcZShasXd2wckaQFCkKQFi1nJYpYxm0akaUWalDiBy9rGGrffeYDQNAxLpyxrDNeit9Gj3erw1ivvUKQFmglCatgtj7XNNYanh+RJgoaiqhrKssZ2TDQpKcuKwbqP1DUmZ3OqIiPPCgZrHWzHJo4TBCVJHCJ1Sbc3II4yHu4dk6cZb718i3MXzuF4zjLFGIjjiFYnIIpnvPP2XZ742HOcDYecnY3IixqhGei6SV3X2LZNkiSMJ2Na7RbffuE7vPiNb/FH/9T3UjYNhw9HNCVkeUxeptRNTuAF7O8/IEkXtFo+h/uHHOw/4PTohMODMYZukGYJh0eHRHHIud1zXLp0kdl8xPHxIVIKtnd2CII281nIjRvvMJsNCYIWg8EmUbSgKHKEgOOzMV7X5+Offo40LxiN56hGo9N1OHepw6OPn8PzA5I0oyhqZuGM8eSMg8M9FospnXaL7c1zbK5voQlBmiUAWKaJ0ASKZjlbUhVE0QLLsOh1e3iev9ywpMvl90aTSE3Ddx1oNOazGE2D9S2f3mDZLuUHBq22dVajvSSVOQZWW4BWVv4DrAqAlZWVlffQYhJ+/Mu/9HvrBw9n/Im/+oOA4F/8k39JYPv4QUCW5URRSpbnVKphNp0xX4z5U3/++3jztZvcffsBVx49x/UntukOTObThHiR0VSKxSKm03exTYPx8QLHNGlqsGzJeDRFQ9LttJgtRqRFzGQ6pm5K/JaLE9h4rYAoTpjNYk5PhywWc4QulkmsRUayyNCQqIbl/IBtMTyacf/2EdEiIc9SdAm2ZSA1HU3TMB0dIRuSLCaJC+7fHTM8TKhyyfHRjCQB3TDI85LZKCOalyRhyXycs5jn5EXNzrkL7N07JA4z4rBEAHleIkwb27PRtYrD+/fJsoKiguksZ/vCefIiJ5xPEEJRVDXUFU1RU+QNtmdRFiV+y0PTdLI0xXMdatVgWFBVGVEcU+QVcVyAZhJ0O2RZwvBsRN0UnB2PWIznPPr0FcIopswKDNMGNPrdDi9/61We/eiH6a2vk6UlaZIRpxmW7dLp9+itD3D8gDCKiaKEC+cv8Pu/9TXcls3H/rOP0SCYzuaYpsnR2Qn7hw9QCPr9Nc7OTtGkpNvroukaZVUwn4bsPzxhEYYITXDz5g1u3bhPv9/nqWeeJMsTHjy4S6Mqzl3YpdvroGmCk9MDHh7cRYmaK49eQRiSOMtB07l3/wxp2lx+5AKGLnnztZvcv3XCbJwilM3zH36K85d3EFKQJSlnwxNOTh/yYO8mx8cPGY5PGY5PsSwbx/Zw3y06pdQJo4gkjvE8DyEEi3DBZDrENE2CoIWUEiEEjmPjOA40Csd1cFyb3sCn03NwXZtW28e0BJZtn9a1uPnX/+u/c8D/NkG8srLy72VVAKysrKy8R378x/+6cef1t3eO9075nj/+WeyWw8//1K/SlDUKge062LaBRC3TY/MCTRr82b/yeUzT5Mu/8g02N/s8+dQu5zYDdrYCLAOSacn0LCUMS7Z3exzsjUnDhtFwjGEqet0OaVoxnyeUVUld10hNw/NcyrJksYhwHItHrp/DDzxsx8DzfObzAk3TMHSd6WjGw719xsMRp8czNCE5PZ5zcH/KfJywmM3QYLmBJo7J04K6UmhC4LcMLl3boNFqojiiLmuODxfcvXlGnuZURUk8z4nmOfNxRJZUTMcZeVrj+Q6OZfHmq3dJooqyqInCjKwo6K75DNba3Llxj/loSl0r4ihnfBpz6dJVHu4/JE0SLNtBaOC6BuFswXS8QBOgFBRlSVVWVHmDJjWCwMV2JKBh6gbRIseQJlLotFsBnuMiNY3pZMJiMuLN197kkeuXMQyJahQXz+8yHc1QlcbJg326nTabu9tIqTObjLBME03XMV2PdreHH7QwDIPFYkGW5fR66/zmv/kan/ne7yLoBsynCzyvzfbmRdI0ZTIfs719Ec8JGA6HIDV0w0ZIHa9lkxU5ujTQdYt2a8DZ2SGnZycoTM5fuEyel9y7cwupNVx/7Dr93jqGYXA2PGRv/z77D/fY2u7T8h0O9x5QxRl7dw8ZjybUdUFTV5wcnHJ6NOLg4TF7eyd0W322Nra49shjrA02SZKIOIu4s3+Tw5N7qKZCNQ1Cg8ViQV1XrK31WV9bIy9SpBQ4tsugP8BxXKJ4QZYtMyMsyyTLUibjKVGc0qicdtvEcSSoBiGWeRF5UVCWWceUOO/3OV9Z+SBaFQArKysr75FitrVGTe+zn/8ea+vKZX7yf/45qqzEdhyyNGE0HDKdzjgbnTEej5iMhjzzqSd57EMf4Z/++C9jmw5NqYgXOV7HxmnbmLZBUTaEYc7azjLA6/D+hEZVBEGAY9mcHJ+B0KjqiihM8Jxlwm4UheimgaYJmqoinM8JFylB22f34iYXLm+hCbh7a5/JcA6qYhHOMQzJ6HTB8HiKoQsCz14GTu2NOT4YksYFi0VKXVWggevraLqG13K5dn0HIWtuvnOf+TQhWpRMxylnp3Omw5AkzIjCmDiKKcqS8xfPcePNu4STlDhMaMqCyThCkxaPPHaNPKl58zvvIDAoy4r5LKYRks2tTW69cxNVq2UAmG/R32hT14o8rgjnyfJzzlKKPKOuSlRTk2U5SVxTFTWqrrFMG9d1yPOcLElpt1vYroOuSc6OD7l38wGmqbG20Ue3zGXBJHVGZ0OKuGExXfDhT34I3ZQopRidjrBNC1UrlIJWu8Pm9jZK1ITRAhrFYrzg5Zff5M/81R9Gtx2aRmHrkpYXkCQL0jSh3emxCCeUZYHvB9RVhWoUrVZAksToUsP1PDzf5+DggOlkgmmaPPLINcpSsb9/QJrmbG/v0O2u4bo+RZ4xOjvh977yO5yNTtCkxt07d5mPZmxtDsiLjMHaOrP5guOjM0zDQgqD2XiB67lsbK1zbucS3c4GpmGDgjSNyYsUlKKuC2zHplEN+w8eIJRgfW0H0/QAyPIcx3YoipL5YkFRLPv9+/0OmqZQAjRhohoN09RoGoWiQTclaBZZVm5A/n3/9J/+P9be56O+svKBsyoAVlZWVt4ji9m0fvzDT+9sXr7s/Isv/muyuCBOM+q6oqGhaWriKCLLE9Ik4/Jjl/lTf+GH+ekv/iJkGi03oNtzMSyN6Sjm8OGEe7dHy4fxyQzXs1hMcjQNWoGDQDGbJoyGM6azMXmRUjclioZ2u0e328exHTRtObR7//YRmpRUlKAtN/I0qqGqCpI0AiFptdrwv/Vvt2xs1+DhgzF3bow5O4nJ85pFGCKlRprWaEJD143lJpotn1bHZDHP0IRGHIXcuX2Pu7cOGJ4tlp9zHjEdzairGrflohC89uJtyqImiWPiKCGcR1iuy8WrV3jz9ZtEsxhNSKbTGUVWs3Vhm6Io2b99sEyJLSqqXKGbJqZtUdcVhqEznUw4OxqiaTqWbZCECfNJxOg0Jsty6qbBtJfDzzs72xwfnhBFcxzPot0d0FQl+3f2uPn2LZ7+yBPLlqGieHcoVaMuSx4+2OeRx64TtDusb2xguw7T6RTXcZZrW6OIbr/Lhz76IZIkIgkXbG51ef3ldzBdi0//oe+i3R2wvr2ObftUecFoeIIftHFtl+l4iGUadLpdprMQ1wu4dPUqcZygVMO1R69T1w37e/cpyxypmfT6A07PTnnzjdeI4jlrG1v0BusopWiqZRH0xtuvkhQxrU6Pvb0jXvzma1iGT16U6LZFHGWMx3PKoqARahn0VVc88dhVnnn6ozz1+EfY6G/h2B5lXRCm03f3/4c0dU0raDGbTanrEk0TIBSaplGWFZpYJkM7jotAEM5D2q0WrcAFpVCqwfVN2h0HIXUqJcirhryklcXp4+ms+IhazQCsrPwHWRUAKysrK++RC488sj7Y2X38d/7tN8X0ZIJl6XgtH920sGwby7SQmqSuFd31Nn/tb/55fuXnfo2TW/fRZYXj2ssd9bbEdTSaRDAbFZycLnBtG0vTefP1+ximiW2brG+1QKsYjSacnBxycvqQ0fiUuq4Bjbpulpt2NA3Xs3Fdl3ARs5inJGFBWdRUVU0YLiiKFKGB5dicnJySZgnTScb9u2fkWcV8mqDJBtPSME2TOM4p8pp2x8N2dCzPxPF0ppOMNK1paJjNJ0wnY8ajMadHJyRpynA4Ikly0jRn61yP3/7NbzE6i5hOZ0RxwmQ25+h4zObuLkJKXv72a6hGMY8WKAV1rXjksavcuXWXydmI8WhGuIgZD0OE0JCmiRCQpyV10zAZT5lNIvxWQJoXFFnJ8cEQqemEYUJWJJiWyebOOoO1HnEcs7bWA2Bze4c6L/nqb3+bc5fW2Tw3ACnpdFpoCAyp8fD+Pv21HoZto2mSVrtNv9fn7OQEgaAqC9I0ww8CnvnQs9x78JBXvv0WgWXyr37hV/nEH3iG/tYa6xvn2d6+gOcEFEVGmqa0Wj2KPCZLEuq6oj/oMJmMMU2DVrtFnmUkScaHPvQhHNvmcP+UMJrTbrXY3tyhKDJu3HyT27ffxnZd+r0BAg3bkhRlwq07bzKejrBsk/FozI2bt5jPF0ip01CT5zFZllHkJWlScPOdO7zxxltURU633eOpJ55mrbdBLxjQ8fp0vC4tr7NsQZMGrVabPC8oy3L5fyMlruug6zq27aFLMKSg12vjeiaOLTENgWVqNFWJEIpW18O0DfKsJJzk1uws68Xj5In/6e/9Pf/9Pe0rKx8sqwJgZWVl5T3SG3Tst156dbB3+wEYcjl8mkTkVUldViRJQlHmOL7LX/4bf4b7d/Z45Rsv47vuu6suz0jTmrRokIbObLpcCel6DpanU9YNRV6RxAWz2YI4TinrHE3XkLqOUjVK1e8+YDmsra3jeT6dTpuqLMmLhP7Ap9dr4TgWdVMt04BRJPEC27aWCb0K5rOYsq7Iy4Jbt+5i2xq6aTOf10RhjSZ1rl3fotf3sD2Tbs9DkwJkzdp2i8H6gP7aGpZhUZcFdZ0wmZ+RJHPCaEowsDl4cMwLv/cd8ixifHZKGi4Ynp6SFCnXn3mMb/3+dxgeDqmKgqquMEwDdMGVqxe4+cYtFDVJnFBkBXlaUJQ5tmUg0CiLEsMwCfwOh/snIDQM26BpSrI4RJcSTWioRpGkMUHbpdNv43o+lmXSGbQYbOzgeAGn+yfcv32ba09dRWgag60+RZWhW5BFEUHbodvrLMdShULXBYO1ProuCVyXZJHy+iu3mExC1jb6PNy/x9tvvMPejT1e+P0X+QN/6OPkhaLX7bGxcx7DtCjzkl5vDddt0SgYDU8QFASBwa2bb9PpdOj0uoSLkNPTIc9/7Dm6ax5SaoRhyNbWFmuDNVp+myLLuPn260TRAt0wKcsK13JBwf29GxydPSArYqSEJA6hadjcWMMwJePZmPlsSpoktDod4iQniVNAYAc23/v938Pa2ga6dJHSwLYd1td2lu07qmF39zytoItt20gpKcuaXq+PZZhoaMRxQl4UVKqmqBoQCi8wcBydpoG6KhGqwTANhKbpVcH66Czk4OCgeH9P+8rKB8uqAFhZWVl5D3zhC1/Q9l5++XtUON707OUAY9U06KaBqkryPCUvc7Iq57v/yCdxnRa/8xtfY2Nji/76gMGgT7fXIS9Lwjhnb29EkldMpyFFmXHxkR1GoxhTt/EDCz9wOTmZMR7FWKbNWn8d12lRV4pGKVrtgPF4SJ6lZFlCXddMxguiKMY0FUm+YD4LGZ5OiOMIQzeomxpN05C6hu1Y6IbiYP8h/UEfzdQ4OhohWPZnX7jUR5NQFCW6oeF7Jqahsb7Rod22sGwNKTUMQ0ephqquMXST+XxGVEzYvrTJr//K7yE1jXCxIFxMWCzGNGXJxlaPoO3z27/+e/imy3weoguJhqTV8mm3A+7fvI2UAg1BVRSopkFKbZlXkC9DqhzXZjDoUGQZ4+EZNBWgaFRJGM7RhCQIAjqdgDiOiZOEJIq4feMW/Z5PoxTdtR5t3+elb7zKYDMACQ2K7fPbTKZTqrpG6jo727t0+x38wKGmwnYNbEvQbjvUdQ5lxeHeHlKTdDs9hsMzJsen/Nov/ia2a/Dkc4+ghIkXtNjZPY9SYrmydLABAjrdHvfvPWDQH6AJjb29BziOjeu5FEXF3sMDnnz2Gu2+g+cH5EXBlauP0Aq6tIMOnaBNUeRYpkHb79Lyu7iWi2tbhOGM0eSE2WxImoacnu5zfHSIoVu4ts3R4UNmiwlSCkzDIM9TGlURLXIWYcTnPv99OG2XOA+pqhJYpkBHUYSgxLIkmiZxHRcpJaBwXBNNCqqqIo4zppOILC8Io4LFoqRSNVVZkUYFVBq6FDQoFMoSTX3VCKv19/fEr6x8sKwKgJWVlZX3gF0vnmIef1olmeHYDq7rotHgWjaD/hpCGCymcx576iLPfvxZfvonf440zqmb5UOsbVv019v013xM06CpddKkxNANdFPn9HjMyf4JeZ5SlSn9tTatjkujCuaLGWEUEgQter01LNOi3TG5cHEHTWrESUyWl5iWwYN7x9y9dcR0FDEejcmzFNt26A3WsSybqiqX+/G1htHpiI21bU5Phuw/OGJtvUNe5NiWRRwlVHWBYWh0Wi2EAl1KTEPiezbtwMV1PHTDx/HauK5PFE2Is5APf+KjvPPWbc6OzlB1zmw2QihBoxqyKuGjn3iWvVv3GB6cUmY1xydnVGXFbDTlwsUdJqMJ88kYiY5AQ2i8O7egKOuKMIpQCkzLpNVx6Pd71GXKYj7C83w03UApRVHmVHVJq+1gmgbxIqYpS/KkYDKaUBUZLd+l2+8wPQ1RdYUX2KRxjlAabb/H7TdvkSwKgm4AUlGXKb1+B4SgKGpqpbG1s4Htmui6QZFV9Hp9NE0wD6ecHhzzm7/y23zyM8/jtVt4XoDUTc5f3EI3FIZukmcJgddia+Mcd+4+ZPfcBZqm4eDgIXESE8ULqAV79w+4cHGLnfMDkjQjTTOuPfoI7U6LVquL4wTUdYNSy3kUz3GxDY/N3jaO6ZJmKYhlQtvJ6TGHRwfo0uTChUtIXRLFEY7jIITg6OE+45MTXvv227z88pt8/x/7bta2N0nShGgRopoKBIynMxrVoEsdTVuGfy2LABBCW76tskzWBz1816LTdggCHdPQ0YQknKVMJgvKvMBve0K39DzO8nkp9Oz9PO8rKx80qwJgZWVl5T1gy2be1AXTYcTp6Zg8ydE1naqqmM2nzGdTOusen/uT38cv/PN/zWIaIiVITWEYBn7g0e47tPsmSlTklaJulv37vZ5PuijRdYuNjQGO4xEuYizLptvtYFuSLF1wenpKU9fYlsn21gZbW2vsntvk0UevsrE+YG1tjcuXL2AZFoPeGrZpkaYJmi4J2m0a1eB5Lq3A4/TkdNn7nWWUlaLKGl7/zm0ms5S8yoCGpgHTtIgWGZNxhEJguRpCa9A0Dds2CVoBtm2RZyWq0nju+afZ2Oxz4/Vb9NsdUMuf3zDksmXk4jp/4Hs+xVf/7ddosozJ5IzFYkye5YRhxFMfforXvvMGWZxTNiVFXWO7HpomaRrQdZ26qanqCk1qIEA3DfqDPkIILNei1e4gdY26LrFNgzzPqeqUza0utVB0B32Oj4YkUbx8c6A1GI1BOAy5eG2Tk5NDdN2i1WqjSji4f4jjmxRpRlXUjEYj2m0Px7WRusCybdbW1+n1BxRljuf79PvrFE1FUoS89LVXOTs548nnroHQsV2XTq/NuQu7GLZJVcNwNKQ76LG+vk4UJ6yvb1DkBVWZ4/su77zzBuEs5Y1X7hAENttba0zGU05Pzrh27RF2zp2j11+jrmuiZEEUTYniCKnpSN3kyuVH0XWD6WxIlITohk6SLDg82qNuSjY3trBdm0o1dHpdHM8hzxOoa47vT/jOC6/xsU8+x86lHaJ4QVUVGIaJado0ajkAbJomZVkwmUyYTCZEUYyu63ieQxJHlEWFokKhsB0bw5TY9rJYU40gS3JR5MWuVOKKaNLz7/eZX1n5IFkVACsrKyvvAdfwcpQKJlFCGKfEWbRsvWlqqiKn0Sr+9F/9PF/9yjd55+W3cGydOFpwdPiA46NjyioHAXFYA7C+6eG1BFWTYhomaZajaJiHIbN5SBTlnJ2OicIE03AwDBdNgqKi1/c4OjplNp+zttFCaA1ZlhJHCYZhUNcNN95+izic4lg2DaBJiee5pEmCQGLqDrPpnMODIzrtFkmScXQ4psgajo9SHuxFxGnN2emMxSLCso13H9bAtE38tsNgvUWrZWNbBq12mytXH+FTn/gIh/dPGARddra2kYaFbupIS+A4Dj/0wz/AfBLy+itvgCzJigzH0gjDGVbLYfvcDi9+4wWiNCJNMtKkoMwz6rKizCvyIkfXdYRQCBSqgboG22nhB22k1JBag6Eb2LbFbLpAopOGKZ2uT7fr4/k2rZaHbenkaY7j2NRNxevfeZPHHr+IISsm4xMsR6e/vsYsnOMHNqfHUwzHZjGLmQxnaAg0Tcf1LVpdj1Y3YG2jw+HDfdqtDrbjkeYZNA0//zO/xDMffYxG1CxmC85O5qSpQug6juuSZhlHR0fvFlY2Ukh2d8+/27aU0O8OeOftN8mynNdevsl8FuH5HlEccef2XTa3N9nZ2WZ7+zztVg9N0ymKjLopmcxOGU9Oabc7GLqD1HXSPCEvEqJ4wsHhA27ffIcsjYCS4fAM27TZ2TlPr9ND12E2Tnj1pRs8/fTjfPS7PkRRFeRpQjgdU+UFTVOTZSm2bWOaJp7roWmCcLEgiRNsx8ayJIM1j6Bl4Xk6tq1h2ZJOx6auS4pcUdXS1aV1VdXl9ff1wK+sfMCsCoCVlZWV90AyH39mkaTbi6JBGQaN0KgbULVCGCbf/8N/mMUi4xtf/g6+51E3NY1qMC0Lz2/h+T6m1DClzugoYnK8YNDrsLnZpakKsiRFNeB6Nu2OT5blxHFIuJhRVRVJGlE3DVle8vDhEXFUcnYy5+RoQpbkxItsmca6WKAahWM5pEmGbuiouqLIU5qqIUtyJqMRSRwxm07I84S9B3sMJyOuXDtPNE+4feOU2SRnMorRDQ3XNTEMQdNUy8HXrkl3zcZ0dLyWx/b5XTZ3trn+zCUarSGcxGysr+N4Lr1Bn06vi2G5PPXhJ/nQx5/mW199Eaqc7qBFlkTomiRJpjz3yec4PTjh4Z27VE1CkeQki4wsSSiyjCTKqEuWbSa6gWoqmqpC1wRN3SxbYKoCz7NwXAvbMhEojo9P6HR6727xWe7c39wc0Oq08DyP8XjMuctbDI+GzCcj/uBnPwpVSbyYY9iSLMvw/A5CSOI4YX2wxnwaUaQwG4cs5u8OCvdd1tb6WIbGYjLm/NYFdOlQlDn3bu1zd+8eT37kKRZhwt7eAfsP9ohmCzRNZ3t7FxQMT0fkSU6WZSgU21s7VKWiKpdbMd+58SZSk8RJSLiYoxoIw4i9ew8YrA/wWy6B38Z1WvheB01otIM2URyR5ymtVh/f7tBp9wGNPM+oqwJFzf17d7h1402iaMzB4QMePtwjzhJM2yJNY6JZyJd//XdRdcVnPvNxBmtdpG7iOT6e46LqGl1K2u02pmXh2A6WaZOlOUIpOh2b9U2XVscgTyvKvMGyLAzDQjUSTWoIHTWaTN0oip/6whf+kv3+nvqVlQ8O/f3+ACsrKyv/eyQtIy1qLR8uYibTlKrW0A2DOM149LmrrG30+cm//0Vs0yPLI+q6wffa2JaDYUqgYTGPEEjKoubG2w9pVMXu7jZ6pXAdE+FIyjLHth0sy6bTbqOqmjTLkFIjTkI0O6CuauqqwjAcDvZPEYh3d7ELWi2P6XyKqes4tkOeFti+jet6pEnGbDalyNPlLTqCeLEATbJ76TKj0YyizGi1WiSJgyZaHB1H9PomQWCjGyaGWZOlijQqljfkuksclphOiyvXtnntO3cxbYtW4FPk1XKIWOh4gz7Pf9fTzKZTHtx8wPrGBroQ1EXOMtlA8LFPfYpvfO1F8jSmEQ15njIajQjDEIW2DIwSAtO2aJqKulakWYVuashcYBkGhi7x2z5Jvkxidl2L07NT/MBH6hpKaVRVBZoCXaEZGmdnJ3Q6Hp12wPH+GRce2WXn3AYHD4/oeRskYchg7UmkqTEfL9h+vEelHBaLBWmWEEUhAo3N3R6JKLh6/So33r5F23JZX9ukQSDQ+a1f+30+9/nvRchlQXY2irAMG8v1OT09ZXt3nXa7xd0794mjBM9z6Xa6+H5AlpV0u5vs7d/ihW99hScefxbDtKCo0KXB4cNDsjzh8SceRZfLLfpxnNBUDaDTa7uUVYEQIHVJWZf4Xot5XSPE8u6w2+0zX0xJ0hgaMA1rmSGRNJimTZVnqCrn2y+8yoUrl/nIJz7E6emUs8MxnVaLuqqYTMbUdY3jOEgpEUIiDQtp6EipgRJYhk5RQKMaiqJA1wVB4CBlSVm19aNDW0/iqDfaE48DL7+f535l5YNi9QZgZWVl5T3gWu1XPNcadX0PwzQRmiIKF9hdi498/Gn+xT/7eSbjMybjYxaLGVmeUxYNum6xttbFNHTqUmM+y5hMln+v6mXIVV4W9NdarG977Oz2aZqSs5Njjo+OMQ0bQzexTIfNjS0uXLjAs88+zYWLG7TbzrLXXZXkeYFh6BR5ieM41E1NELTxW208L8D3fQaDDrouKPJl2JjvOjRVQeB7VGlOFkZIoVGXiuFJzM13TkiTAssyME0TgaCuQDUalmXQ6Tq0Ojatrs3Tz50nClOqQrG5tYFtm+j6sp3FD1p87JOPErRNjveGxIuEditAVTVoUKuGnXMX2Vhb4/UXX6JpCqAhjkMW8xlZllAWGYv5sp1E1wFVU+Y5URhSlRVVVVMVJWmavzsroIFq0DUN2zQ5PjrEMQ3qskQ0kCU53U6Lqi7YXNtk//4hRV5x+52HBN02buBgWRZnxyNcx8bQFf1uB63ROD4ZMtjo0WgF89mYOAwp85rx2ZRwHiINm3MXLlJTc/HSRQKvhW2b3H37HnEYcu7KJmVVYpg2i3hOWWWUZckrL7/CO++8w/b2FoZpcHJ6xnA4RhMaUTynURWPXH0c227x2huvcDo8QUqJJiVN0zAeTnjxm99kc2eN8+d3sE0TN3CBirouAUUUzYmTBUWe0TQ17VYXISRCwGIxJfDauLaP63oUZUoUzlBKLW/qLRvXbdPrDAinEd/4vW/juw5PPnuBrEyYz+YEQQvHsUmTGNU0tHs+jm8ghKIoa9Kkoa4BGlzXIPAd6qqmrHLmi4g0qYSpW6Yu5Ulpmm+8n2d+ZeWDZPUGYGVlZeU9EBtlLRukp1tI3SCbLZCW4nN/8nv52u98k5MHR/h+gGkYOE4L3/dpasV8NuXO7YoL5y8hdIEQApQCBbo00ITJYpbQ6brolmR8EhPOY5IkoygKRpMhZVEidYkuTfI8YzyZ0O/1abUCpKFTljXRIsH1XKJFuAxnKioM38ZzbTSpMZ2MkbrG9u42um7SNA22K5GmjVLLzyRRNFnFfDHD3Q1QFXiuy+F+jGkk9Hom7baPtJe36EmTIqRifTsgaBvcvRXS6baoy5J5lFLXNYZps3Oxz87uOgcPhkSTkDROkRuwmE8xLIc6zviu7/40D27f5uHdO2g60NTkaYyhW2iiJs1i6ndXgVadgCzOsSyToiiJihgqRZKk5HWJlHK5MlQYhIsYXeocHR5wYWeXIivQDZO6atA0ied75GmO57mMRmPG0zlH+wdsXdrkwZ0TRF2zmIyBhjicUZU5cSxJ4pSNrTUmowlNXXN2csx58zyzSYzluvhBm8kkJM0yHnnsMnfu3IGp4ttff5VPf88n+dpXXkTTBY5nEi4mOLaHAN5++y2Oj465cuVRyrLidHjCdDZCN3XiWYxt23S6fWzbIVwsSJMM32vhBz6LxZxsVvPC117h8uVLdHt94iTG63tMJxOyd98klVWKlCZ1o0jTFMfxiOI5aRpTVQ2dTo+yTBBCvBv2laHrBlI3MSwT3/GosoIqz3jj5TfYvbTL409dJJxlHD0cUufgugGGadDUik7H4tz5Dkop4iTHsiW2LYnCDE2TgIZtG7QCl2SeiHan08nzrDOdTldpwCsr/55WBcDKysrKe6Go1nRTZ5GMSeMEVM2n/tCnOHiwz7e++nVUXS9DjczlQ3peLB/MGtWQpRlxErG+sY4QGlVZU2QlnudhWZK61onCDLfxyPMa3bDwWwG2Y1FWFUkcYVkG2zvbzKZzwjDDtkvmUUI4D6nr5dDrdDKhyHLqsqIqCkDhejZKKcLFcnWm1EuKuqCuakQl8fwOtVJIqZEmGUo1eIGP0EravR6HBzMWs4StLY+NjTUs2yTNEjRdYDkWlqVYX29zcjTHcW1qo2E6LlCAbljYvsmlRzZ447WHdAY+D+6d4Xnu8n21As9pY0ibj376Y/z0T/wUZVXgmBZ1tdzpLyTUeYXKE5RS6LpkdDqhKEvqSlEUBQgwpKSqKySSs9MRlmmia5IsLdANQZaW3Lr9ABDYjk3f7lIUFZZlU2Q5nU6HOIpQdcHL33yLD3/meaQtCZRDNJth2jqGIYmiKVkdIzRBp9NjMBgwm4xp+x6LWUia5oRpSqfdwXFcHh4+oLUW0O33UEXDG6/c4A989qM8/8nn+ObvfoMgWBaKSjXUTY3rOSzCGa+8+hKDwQDHNQkXc8qwwrIswiSkzHNarQ6maWMYkrxIsW0Px/GYTE8ZjyOGo0PW13do+QOiMMJxPIJ2mzCcE08SNC2jFfRx222SNGTQ3yQvcsJwznw+xNAtHMfH1B3qKiUJZximjem4VMpESA2UhhAahw/OGJ/O6PcDds/3ybOSNKmQusBxlpkRZV7it0ykrpHEKVJzUMB0EmGaEr/tYpgaDTmz6UxD8UzbMteBg/fv0K+sfHCsCoCVlZWV94CBQSWFNAwDoQTXn3+Mbifg5774b2gHAyq3xLZs8jxH0wSGZeH7AYN+H8936fZa1I1gPg2ZjKfUdU1XCuI4wTAleVKRRjOKslpuTkkiFosZpmHgBwFQkyYLgsCiURZCV9i6yXxaURQ1QghM06QuKhA1uuOQJjGWbWGYJk0Dhm5QFDlZkiKUQmmwCOe0O10MXcexLYKgg2ZIds6vMZlEZFlDf+DSXWthWh4np+Nl8BMamgC3ZYIQSB2Cls1svHygsz2bsqi5fG2TOMo4ORpy/sI6k7Mplm6RpQ3SNMmTkGvPPAEI3n7zLayWh6wVQtewHIcGRQPoAjShqMqC6WSC7TqEYYguDZAC09QRurbc/1+UZGlKXZUgxDK9VxqMhmdUTUVeZVyurqALnf5an7JqsE2BYVjoUudkf0oaZniWQ1bkSCmpVc7O+S2OHh5QFwVFmBAjieI54+mIoiq4du0J4jTh6OFDhmfHbG7u4jo+N9+8g99uEc7HqLrhK7/1At/16U9x5/UHLOZnIJatSq7jUjcVWFDVDcenJ5iWgZQSXTfIigJDN3E9n3k4pakbDGnS7a0t16yKhq3t80TRnNPhKcenx6RpgZQa+TxBlwZlnWEakkU0o6orLpy7jGV1GE2mGJaJZdokaUVeZJiGRd2UGLpGVTYIpWjKhjIrKfIMTVPoEmxb0lQFD+4dcnR4xtb2Ohuby6RkpSpc10QItfxe2AbzWUpZ1uiGQKBhOwZlUVPXDaZhMZ8tkrJJf9ew29P3+divrHxgrAqAlZWVlfeAoardMK18pMfF622uf/gRfu1nv4xWC5SoKcuUPCvwPA/TtNA1A4FAGjqtjk+3HzAZR5imQbvbJglThqMh/X6PTq9HVVWMx8v03tHwlMV8hNAUfitAzzRczyNJCtI0Y3tnCyV0NN1ke/ccZVaQ5jl5llM3zXLAV9MwpfbuGtGSusyp8pyGCsPUicIIwzSoqpyD/XsEQYfz567guCZlpWhqsdzfbwrabQvfNfn2iw8IAoOtbZ8iLwgCl36vR5LEaFJRNxWGpRNIQaME/W2DoGXx2ov32L2wydnZjLoCzTYoqxqhGeRVzCe++9O89vJr6LVA1x00S6AasGyfmgYhNCQaAkFRFqAEhmlQNw11XWNaNkIIdH35O9elTqUUaRGhacsB6ZbfYjIZEiURk+kUKXSqouRqcw2JzqIKUY1Cmjq29Dh7MGZ9s8ObD2+zeXmb2WRBUVfs7u5weHgETU0Wh8xnU+I4Zu/hHmVds7l1Add2mC+mPHhwB9t0ly1XVcPp6RFVWSBeEXz448+ztrWJ1DRmsxOmszGe28K1AsJ6hmlomJbFbDEjrwo03aDleGgI8qKk012nqSvmsynj8ZAojuitrVFUFb7fRilJWRTUdUpViWWh1EBdlFiOj+0EpOmCvf3btNtr0DTMpxNaQbAMeNMNtHd/55a93PWvSwPXcdGkxGm3ltuDGkW4mKNJg7Is0YRiOgpJwoLNnS5XH9tmsOaAaohmKVle0NSCLK1wpaTIKxzXROqComg4PZojlVYJTR7+9M/8TPx+n/uVlQ+K1RDwysrKyntACG2tVpqMiozHPnaFt16+SRoW+IFPUVRUpcI0DYqiYDo9YzY/Yb6YcnY2YjqJmI4T8nR5I20YkqDlE7RaVHXDeDynqqFpGspyuVMdAVlesphHxEnK4eExD/bvMVuEDIcLkrhgfDbi4cOHnJweE4UhhtTRpSQvCpRQy2087Ta1qomTGMPW8dsB61sbdPsDWu0+m5u77OxeoN3pE8cphpQIpTg+OKUqa9otGwn89m98h4N7p8hGZ3gSoRpJu+dSUzGbp8RJCRJs30QzJN0NmwtX+0zHEVUluHh1l8kkYW1tA9txqRUYhs3F69e4/vh1Xvz6S9hugN/q0uqu4QUdPL+FbXu4ToDQdMq6oSiLZSJwVVHXNQqFaeg0VYOuG7iuh23bGIaBZdnYlk1dgyYNDMPE910sy2S2WJAWOffv3yFNY07PTiiqDKkLVF2QhDmbF9dIiojZZEhdNuzd38d2PIKgS5ylJFlMkqcoISgbxctvvMKde7dxAx/btjB0narICOcTNKFz6fKjSNPgzs0b3Lv7gIvXz+P4AesbF7DsgCSLKOsYXTewTAsN2FzfoR10oKmIkog0z9A0wcHRIXWj8FotiqpgPp+wd/8O0FBVNXG0IE0jNKGT5ilhtkBaOp7XocxTUBWakGRpxmw6pFYFrmuQZjGO4+B5LUzLxXZ9dNum3etj2BZlVVJXFdEipqoUnW4Hy7YoqwzbttBNE00Kumsdyrph7+4x80mEY0vWttoMNtps7gZ4LQshludgsUhJkwLVNMRxwtl4OBGm9IVYjQCsrPz7WhUAKysrK++BpqmkY+vOUx+5wsGdQ9761g2gIcsydCkxTZ2qzmmaCsOwaLUGWJaz7O2ua+IoJYpSonCZjmrZNlIKPN9Z7pePUkzTxLYtLNvCcR1sxyavCrI8o93psrW5Q6/XRQhIk5RwEdJUNaZp4boepmmg6ZKqqRmNTjg4uMvR4T3icAqiIQ5DTk9OEcDm5gZROGM6GaPrJrbtYVoWVV3juMsVoXVR0RQVr710j8W0ZGNjnTjOME0d2zWQ0uT0aMHh/pQkrDBMA91scFsG5y4N0NCIo5zNcz0836HMwXJdlNDQNBNh2Hzqs5/h9OiM+SSmt75Jq9sDoSN0E2mY6IZJ2SgQ2rKdh+W6U6UaDGP58zZCkFclQkqkaWBYFpblYNkujusjNIkCXNenKko83wFVg6gYTc84GR0TJRGnw1OmszFRHHF6dIKmSy49fo6zyZgwXjCfz7h95xaaBMMylw/jQoASrA02QMH9e7eZjs+o8gJV1wgpsGydJFowPDvl/LlLdNsdXn7xZc5d2mQ4PML1XLa2dmi3B2RpjqEvB7uF0AgXEwLHY727AUpRFDlxEqFLGA1PmEzGaLqGbhlIqbG3d4/RdER3sIZhW4wmZzR1SZKl7B/tsUjnKKFQVY0UGo7rUjc1aRohlY5tupR5iS4ElmmgCQ1LtymLisBr0W63qOuCuqnI85zRaIpA4toBhmHR6XRY2xggDYlpOaAMzo5jHj6IeOfNQ85O5lRVTa/vg4RG1LQ6LrohKPMS07aRuunXVeX/3b/7F1c5ACsr/55WLUArKysr74EyywYbW51eaHg8/MotLp47z/HwlDiNmUcjDGmiCZ1ut4dlG5RFhSYFuqGRpSlFVqJJyfb2AMfRSLMGqTtoEppaoWka8/mcKI7J0oQsz/7d/v+6yjk9PqTdbqOamp2dNoZhYZkm88WcplkOxyIbpC6xbIuikETRnDRN0JD4gU/TKNI8ZT4fc/HiVVrtDmmS4Tge7XYLP3DJ8gKFhu861HXDvdsnCCEZbK1T6QrDMymqEsfVieOMG2+dkoQlG1vLB3whIeh4SENneDSjrhXb51pMp3NQDZqUSMNAlRlW2+G5j3yIX/7ZX8SxHRqlKMsChIZuLnMWkjRdrqnUNKBESh2lFFIaqAayLMM0bcqqwjUc6gY03cDS9He30FTYdoamaUjbxtAtTNNCI6KoclqtgCyLqOqGOIkpq4pOq8NoOmLr8i6djU3SWnL48IjR2ZAiTwmTkMHaGkopqrJCCg2p6Qy6A6JozvD0ENfxUQj8VoeqrvE9l9PjPXRN0Wn3GB6dYLkm7W7Ards3uHTxEt3OAF2zSLKIwaDPfD5G1RWz6RjXa+P7LfI4ARqqusI2lz9PmqXopoHUNAxD5/TsmPliTrfbodvtUmYF7XaPRTgnzRJM3cKQBo0q/t18QVPVmJaL53jEcUSeZ5iWTVPXWIaFahRVWWLbFnVdYVkmdV1RFhmoZXGlGwaGoeP6Np2eQ1M1NHWDEIrpNEShGJ0mxPMSx4/Y2GmjmxqaajBMA9uBpq5xHEsXmigmk5Z6v8/9ysoHxaoAWFlZWXkPeC3Dbm207F/9+ZdIkhqhF2R5iqDCsT1Qy4RaTYM8KzBNk6LIqeuKpm6Qmon97n7+drtFUSwosoqirCjLAsM00DSNpm7QNA3HcWm1uwwGPeqqZD4LKcqSyXRKVTX4fkBdQ5pnWIYBKIKWD0pQZDm+GwACz29R1zVNXVFVOZ7v43kBeV7heD5e0MexTFpdm04/YDaNScKCMivIi5KyqvACG9c3MKRBlmTsnutjuyZvvXXKIirp93ySuGIezrl4uYcQGtNxRJbkWJZFb9Dj/p2HmKZGUSikoWF5Fh/9gx9jNplw9+aD5Z561dDUClBoms7yT8viqK4qpDShzhGapKpqUALP8SiynKKqcByXum6wTIOyqjBNm6Ys6XQ6FEVGXdesrW9S5QWWYRNlEYauk8QhRZHSNLAIQxrVIDXJG6/e4MpTz9Lf3OXseEw4X6BEjTQsjo+O6PZ6CCkpsoy6yQiCFkI1FEVOURXouiSJIjTDwbJcNrZ2CGcTKmqKpmQ0GfL4h58jjV7h4OEBrVaA6wU0qiZJIh577Elu373NYjElDKeYtoPve2RFQVXl5HmGRNIKfOZRRNUo+r0+vttiNh+RLGb4bhfbNkiTmMANcGyPNA1JsxShCbIso99fx3cDiqKgrEta7TaabqIbBpomqFQNmkZV5MRxhfbuvxOaoMwzmqbCcSyktCjLkiRMCHwL37cwDI2irNANHcNQWIZOVTbkac3wNKLVtRGqwXUkRZFi2hIhlKFpxnpd1waQv68Hf2XlA2JVAKysrKz8R/aFL3zBXdvQd996fV8/PZmRN4qiKjAMjTDMKYplqFJVF0RRRLe7BqWirita7Q6eF9BuBezsrqNLQRLl+N6y3SZNSwQOYRRRFjmqqaibkurdwKrFbI5umpiWhaZJ2mttNGlgWgaablBPljmzWZISLcJlp4wGCA3bdEjSBENaNJokL3OkNAjDCKUagpbHI9e2mM8WgEDXodW2SKIUoSmapsbzlv30qhEkYULvQgtd19nfH5ElBefOdZicLDjYn3Dh0Q1OzzLMeYYUDbohcTo20oQ8LbAsC92scdBppM2HP/kcv/KlX4VG4AcBZZ5TVtVy139T06hlMYRuYFo2WZpSJQ0oAIESUFQlRZETtNoUZYkpBEJoyzWghkRJiUAgBGgoCtMgEjGOptFqt5hNZ7iOjyYlRbEcpk7iBMs0uXf7Hp3NCzz5sUe4/Z3hclONXqGiCMd2uHP/3rJXXylAkMQxne6A6WSI1CS6bqGURlmmxIs5Quj0eutMZ2c0WcPDmwc8+eHneeObtzENg7PTh3h+SqfdJ4oiTk5OuX79WY6Ojzg6uENVFlRVhaHr6FJDQ6NRDVVZ0fEDkjghTRNM02Ktt05ZlqimIs1zLMcmyzPyIsd3fFJRIESNY1tURYHTd+j3B0wmY4qyou0GFEWG0mCepsuBdqmhGoVtO+iGie8HROGULCtwfR/L8WgaRdU0xFGO5cDadg/DNCmKirqsUE1FK3BI05yyrkmTDNuyiJOcJFHLAjSPTwxPfHttbS17Xw/+ysoHyGoGYGVlZeU/sqtXe/rwYFY/fOewcgMH25SgarIspagK8jL/d7fVjuNQlSWtoMX62ua7QUdQlg1ZkpNnKSdHY44OpxR5hQCKssQwDNbX1ml3eui6TZyETGZnCKDT6tHt9vACB8sxqOqck9MzsqJgbWOD3sYa0jSX23I0fXkzW1fESYxSFWkaUpU5qhG4ns+FixdwvRatoItpCPrdFhLB0f6IOCwxDRMpa8o8YjqZMZ+F5GlOu2NhGIIozMljhWtY3Hv7gAe3jwl8l9FZxnSaQSOoUo29+zM0aRBFGULouL6BaUkaGq4+8QjhLGbv7kNMx1iGkUmJ5/ssc8mWPf7tTodur4dlWTRKYLkOluMiDROERlGW79781+RFSVFU5EW5bCOSEqkLpBTYto357mpWy7ZodzsEfgvdWK6+bHkdXNvDdz00oFE1ZV6wf+8eQdsjTTLqRtE0UNU1YRIhEJyMzpjFCSCIk5g0L1hf3wElQRi02j00AaiCokipUVi2gy7g7ZffAFXSqJSySNk9d56yrlmEIaZlEscx4/GY8xeucOWRp+n11hFooBS26WJbHp7rU+YFRZ7hOhaOZSGUoiwrhNBQKLrd/nJI3TAwdI00SzGkjuf4dFt9XNtjMh6jSY3rj1+nO2hTVSVZlhIuZqAqTFNSVSVpmpKlGVVZsJhPcRwPw7TRpYPnBjieg6Eb6LpECJOz05jT4wlVmWNZOk0tWMxypCZpBQ6ea+E4BklUEsclKPADOzMsa+/HfuzHqvfrzK+sfNCs3gCsrKys/Ef0pS99Sb71wgv6gCxaW2uXudEYYz0jL0uKMkfTJLbt0PJbCDTKssJ2bCbTCZom6Pb72LaF67gkSYVlC6oGiipHr81lkFOWI3WdLC+xLJt2p4NSFYv5hOl0Qlk2OJ5DlqfLXvpGw3YcVFkTh+EyNEs15FmBbVvYlosuJbBMetVpqKsax/FQjWBrexvHsQjDjOHZnCxZBnfVFUzHI5QGjqnhei6zeY5pSlotF9PRMGxQQjEdl5wdTYjCFNP2WSQ17Z5Dv+uRRiW333qIMCRPuw4HD8douo6tK4QuqTXJcx95mt/+1a9gSI1CVJRJCZqgKEuyLMO2baSUgKAochSCTreLEII8L6mrAqFJLLEMOovjBNOyKUVNYwuyosSydKSuo5sNQkCepbiuQ14W+L5PnqZ0Om2SJAHVUKsGaZiYhk1WpJimRhxP0FHUStEotWzvatmopkYpheu6TMKQpm5wHYu9/Qdsb53HcwOyIiNO57ieR101NE1D09RYdsDGpsciiZhNI9a3utx6/W2KMmNzc5dwPuPs7AhN08irnI3tDbygTZmtEwQBo/ExWboM//L9Foau0zQVTaNoajBti6aukdKgaZZviLqdNsPhKUHQwrBNkiRCKIGuLwtH0zSIwmXv/7nzF3FMh/v37nJ2dkwchzRNvRwWdyzSJCZJI4KgDUh63T67F9bQdMF8pui0PGxXkiYFlmug0FhEJUVeIxQ0SrBYZOixpNU2cH0HIQWua+B5Nod7RU8J/Pf35K+sfLCsCoCVlZWV/4gevvCCGYfzHyzK6BHf9kVezinLGqUElungOiWT6Zj5fI5ju5imBSy3qnieTxpnxGaMgHc3qJhIXdIUCXleowmb/qCDEoI8KzgbzkiSDNdrAxq2peN6HkIz8IM2hm4wHg1J04TppAB0LNPCNA1cz6OpaxaLkKpMKfKEMl8OEntBF8t26fUGuI5Dp2ejS0Gr45HEKWWhSNOKokjICwXGclWnGzi0uy08V8d1TOKoYTYOOT1c0FQNUZxQxzFrGxskacL+g5holiCwaPVMxqMF9+6csb3VodN1yacJTz73NEUYMTw8xHYsZCHJTNCkRrQIkdLAMm2EplPVFZqu41kOTV3TNA1S18lzQV03qEZRFiVe0EbK5YOsoqGqlm9dpK7RoCFUg9A0DNNc5hsYxrINqt2iaRoMXS5nMFRDkRcYpYHUdBzLokxzdM3AsW0oK+qiWLatpCmB55PnFXGRAjWNqHh4vMfuxi6mpRPFIVIY9HprSE0nikJc16PbGdDqbVBVgrWtDns3TYSqmQyP8NwW7VaH47N9XOVy8+3X+cgnPkU4PWGxCOm0u8yUoGkgikIcx8E0PcJFiKIhTVKUarAshalbNHWJabpsrG8Txws0IfC9FtAgpcQ0baqmwjEMqlJx7/YDLlzc5pnnH+PuHZfDg0PyNEN6BgqB1E0c18K2HVzPZef8Gheu9BA6nBwuKJKCNAVdM4hnKZ2+iy4MLNNAGg2WZVFVNRqKbtejUg2WYXP33l1uvHmrFAZvu657+n6e+5WVD5pVAbCysrLyH9GP/v2/n37hb/9IlCXyyv5o6qQlWK4Fi4a6qlAoDEOjrivyIqVRNXFSY9kOQdCl3x3gtzzKsmA2m+G6LlGUMJ/OaJSgrAu6/R5Bq4Vp2DiuR1VWCA1a7RaqhjyrQBRoAnzXpdtr42Q2SgFINCGpqhxdl+i2hW7qhLMS1SgMQ6dB4AUB29tbOI6FUiWnJynRImM8GRNHKUXRUFWCOF4wHI3wPB+v1WawPkDXDU6Oxzy4m7K1s0Ucl1R5wmw8Is0z1na2iZMYy3bJswrftrm/d8TGlauMRgl1JbFcE8PWafc6PPXs43z1t7+KLgSNIdDQsJ2AJFne0EspqcqaIs3QpFi2zEhJXdfoUhKGIZblUFc1RVHieiaWZaHUMhdgPp0hNIEuJUITCBSGBF03QAh0Q0eI/z97f/KrW5bm52HP6na/v/Z0t40+IjMyszoWS7RlmjYFS4AHhgfmfyAY8EADcyJ4YBRzrIltwQ0naiDJgFiAObBlyCYlShQlURZZRWZlRlZ2EXHb05+v2/3aay0P9lVpSgIBBAL4nvHFveec7yzc913rfZ+fQApNOZtP2kslQQAIKg4kWUISp8SzOftNg1YJp2dnNK2k7xwISZ5l9H1HmcTsW0ecZLjO0w4Db2/esJytyJKMqj6QlzOcHVFSsNvcYkxEHGXUh5rHT5+Spj9naA9EUULd7JiVCx6fv0dV7dje3/Lzn/6E73/6ff7kTyo2m3v6vkUpSJKS7XbDarFmNptRVXviJMXaATeO6GRqQrq+Iy9K8rxAK421Iz44un7Sz8ZRzP39Lc4NhCCo6h1vL684OV3z7Okzbm9vGW2gyGeTcSnPWaxmLFdzkizl9nbPk+enfPL986kx3Fl2mwOHbcf1qz1RJDFJhEkkq/VICKAjhbVT1ldbD0SRoWvakM3NZXF+/tW3eOyPHPnOcWwAjhw5cuQbZr1Iry3hvrMyCOfEaC0COc2h2xo3erSOkVK+8/6PzMsljx89RklIYkOSJGwfNtzdPhDHCVGS0A89Mkju7u7I8pLeD9R1TQie5nCg7SuKbEZZzpEawLPb7ojTmL4fscM4zbfHCUpIgh+x1iGFpCgXpEmGtT1N3wMCpZmSfgdLXfXYsUdHCVlWkGaSq6tbnPeTzUhpzs/PMUZw+eYaN3oeXSxYLzP2D2/ZPmwYnSeKDPd3D2TzOW13S7Pbsb29Z34y5+LpKT/54y8pi5zFMsH5wGc/+gF1XXF7+5Y4M0incJHHe4isQQpF3/f0Q4dAYUxEW9fT168km6bB++nmf78/sFiusHbA2h4p4fLtK5q6JstK7DASpylxFKGyiCAlQgmixDAMPV3bk2U5aT4nBEuZxIx9D3iSOEUqw/rRIw6bA7HRzGYL8C2LMuP2/pYsS/He0VtLGkUsFyeYQwRiS9f3BALOO6Io+vPPqmtbpDBstw/MMsfm6o5nzz7FRBF2kCiTkEqDsxYBzGZzdrstd9dveVvM+eCTT7l6+4b99o7rm1dTbkNSorUmS0uSJGe33zGfrdBKYe3A2ekFdV0R3EhvB0ScURQFVVMRxzFVfWBWLMmznHGM0DpCKslhu2H7cE9kIkxkyIuCJE1ZFzlJlpKkmsU6Y77MqKqWl1/dEOeak5MFjpHlecnp4yXVrqHe9TRVT9+CEpLFKqGuO1ohwEvqw8DPv3hJcDipdPQtHvcjR76THBuAI0eOHPmGCdI5a2tvlEBLhQ2Ox4+fcn13xaEKWDeQ5QXBQ1XvWC3PiKOUqqp4/OgMpTS7fUUUa6KopK4brLUIIUnihNC22K6bXPh+nMZQbMtud8tuc8/Z2eMpCXfoESrgXSCKUozSdI3Fdj1pPHnXpdFE0eRq11GEiWKQDQjB7e0dQnhs79DacH5xzu3NHmstbWsJITAMlqIoKecLxtFzd7shy1Pe++ic1TrjfrOlrlucG6jqAwHIi4Lr119x+fZroijik4++zw//4ue0XYuShnIW07UDi9WaJ8/f5x//V/8VRZ7QyRFfdYQRBIIkiXl42DP0Dm1SEIKm6wmjn+b8qz3W9nRdx+GwY7Ve8fb1V9hhQEioqh1Ns0crjceTFTlqnCxAkdEoqWjbflq8Hgacd3ghUFFE8BAnMZ2SSK3QUqGjhPXZkpu7DZGKefb8Gb/pW5arFaOfloRnRcm+rpDOkcYpkUkxUUzT7pHCk2YJm812+jxUjJAGJdW0ZGwHqu2BNE+Ii4KH27ekaUZezHi4v6MoFjTNAa0lRivuH64QkWaxWvP06XP6P+5p6gdG29APMUJAOZuzXq6pq4okLyZhEpInT97jcNhjbUfXdYTgyZLinTqkpap3f65Rdb6nyAtOTk65u7sljbPJQiU0WZGSzHI+/OyM2SzGaEHbdbz38QllkbK5a/mzn77h6bMF+01NU41oFaZl9lWC95KmHYhqiQ8BaSTVfuT6zZ6rt29CnPAzE83+46urq/rbO/FHjnz3ODYAR44cOfINEkIQ/6f/3f/mo8RERopBOD/gcNT7A7e3l9NMtRR0XTMFVgmmfAARWC5nZEXGw/2Woe8xRuH9pIwMLuCBMs8w2lAfKh7u7hjdiNYarQ1ZVuJGT11XCAHOTarRR4+eE0UxfdezWC6QUqFQpGkKStJ1HUPf45wjTVKev/eEJDNERqK15PZmz35XMzpHXe2omg47gJSSp0+fcn11zW53YBhGTk4XRFHMOFqECCgVMVpH0+wYB4cPgTe7L9nXO/J8xiff/xFPP7jg2dMVTevIUs1imRGC4cnTD9ju7un6HXmeEPw01jTYka4ZqQ7Ta4pSkig2CCHQSuKtY7d9oO9qDocDdX0gzzOqw479fkcIDikFwzAFWyVJQZlP+xJuHFBSMI6K1mvsKFFSYEcJ0hB8wLsRrQVZFjOOA94JIBBnCciRoWnICsOzDx6z2T4wn80Z+p76cMCOA3GcUjU1hPFdmJciTwsQgcF2zGZzlFJ0bYNQCiEESih0pGmrhgCcPT7n7Ze/5OHhnrNHOUIExtETxwldv0dJzWG749mzpzR9i7WKTz78nK+//iVJoghOYMeBw35HkmTEaYQPARMleDey2+2Ik4i+b2jaPc4PZOmcJEmZz+b0vUVrgzaGqq7Z7vYU5YKnzz5g87AlyWasTlbEaczqvGC2iHj2/hoRJFobqkPL/tCyrw5ESvOrn17yB3/5I3a7luvXFaP1VLseHUE5jxlHz9A5tk3P7e2Odl8T6yikmfjp7//gB/+vf/3f+Dfct3vyjxz5bnFsAI4cOXLkG+Rv/I2/IZYgoyjq4ggvpZUBuL27pmsrILyz//ScnjxhVi7R2vD4yWMePT3BGMPmASQSNzgGaxFKURQZ4t3SaVmWNE0HeGzdMVqQMuLJkw/o2pa6qrG2QypIo+nPKhNhtMb2A0mWE0UxPni8dfhxqp2KPENHhv3+gBApQwdtZ6mqPU3dMtic4D1v31wSmZgiz2kaQZxE1F3DydljEIHdZscHi6fEcczOHRhGSwga61viJCLyCR+eX/Dx9z5jsV5z/mwBMrC93/L8vTMi45Em4vn7z/jiZ/8tWRYxiIAdBAGFHT0AWZ4hpZmc/VJgB89h3zD2A1JptJnyEJRSBAJd12CMATRSKoxJSdOUJJ5SafuhJ00zEAKpJxuNtZ6gBQFJ27YkSUYIvEsaBu88sTYEGTBpzGAdSgnavidf5jx97xmHhwNZltH3A857tFLEUcZgB4yeGrHIGBbrFS9efkUcJRgT0etpt2Eq0BOkVERJRlNZZvMUYyKEhO3DHUYp+m5HnGTk2YK2bUjTguvLWz77/Pv85le/IYlTFoslbVeRZBlJkrLdbdnt7lms1qRpTte1hCCQUrDZ3mN0RJZmdF2LTwaGIXCopgajbVuWyyXPnj3j9vaefnCsz845OT/Be0FelESR5HQ9o28HmtqyWmUoafjo4zPquuFrrthevWa7afn7f+dn/Oj33idJNPu+AwFZFtMcAnkxjaI9PNR89ZuXvHn5S0bfudMnJ6t//Ouff++v/bW/9sd/9Ed/dGwCjhz5Z+TYABw5cuTIN8iPf/xj/3/+w7/+KzfWGztYH3ovvRspijlpEnOoduwP0yuAVIpx9CSJIS9S+mFks6mw1oEE7wJ5kSGlpGoqxnHEec/eDoAEIaZF1uAYR8d20+O9o24O9ENDbBLKcoGJYuzgKN4FMnnvGUdLXmYIoajrir4fiOMI7Yd340aeNIkQwjNf5dNY0qYBIfnoo8/Y7/YEH3AuoIzhBx99zv3dlru7PU+fXRCCY3tfc3+3JQjJcrVEHBy3D9eMFtbqjHEcaOuGy1cdjx6dMj8pSFKPEoof/YXf5VBvcUNLFGlsPyLlVOj74FBa4/uGrFBEJqJtBtrGkqQGkUSMg0NJRRwnjONIVe+RShPCtOgs392sJ0mCQEEA9W7ZN4kTIpPi/IhUgmGwdH1PZCIQim4YsQ6QHc7B6EaSLKGcp1gbIEj2u4rFyYyiLLm/2TKfr6iqFmNigg8MduBkNqfr7WQGGjvmixnPeR87eGbzBXW1Y/uwpSiXxGlMXpY4EdBSU5YFaTaj7Q/ERjFajzGaopjxsLlFK0VZLugGy9Xbez54/yO+/PI3QKA57JECZvMlSZLhx5ZXr76ckqTXFxRJMRmWqgMA89kpdrimOhxI0oQin9G2HVJCddhTljmffvoh292AiTWr0wVFESMlHLY9d7db5usZD3cHghup9g7n3vD4yZKzs5JIPyF4x0/+0Sv+9E9e8+jJinIWMzpL0wx4B/vdiPMK5zSIiM5Zmn4fqjf9j2Sa/qvvffDB3wT++Fs8+keOfKc4NgBHjhw58g3yN//m/9rYr4cfVIfuUd12Ko4NHYHFfMV+t6Pv7qYbZKFQUuH8VLw3dY+UmuAhigxaaaI4ZrSWtq3JkoRyXmJH/+6Wv6Fte4bBQgiT8nIYcc4jBSRRglKGutqyXp8SJxld2zGLE2azGcYYmrZmt9tMgU1dRdsdWC5PyPMZtg/8+pdf0NmKNM1J4wVpmoCA+/srhn5EioiA5uLJOa9evGV3v+f9j5+S5hFV15GLmCzLOewqNpsHHu7viaOE1XwFMqI9BPCWT7/3jLJMaNoeoxVJNufx03P+9E/+G7RU+NEhARMZum5Kth26ATdCmmriWDPawGwGSmdT0WgDaWao9oZDdaAsZxCm1F/xLotBaYVWmqaqCQQQAiFhvphz2NckaQReTgpKpRFCsdvtsXbEOYsdPVpr+m5ExiCFIkkkQcT0w4GhdyRFhFTTq8VqtZ4+LwT7ww5lInIzLR37NiBRPHr8hNGPeK9YrOZYO7JYzdBRQT6LUbEmjI4nTx/z5PlHDF3D3c0VJ4+WbDYbimKGVILLN6/QRnE6X9APPYMdWa/O2e/uWCwX7PY70qxmPp+zfbimKEoO+w37/Z6PP/qMIpmxXp/w5u0LQgicPrrgxde/QUrJ2flTnr+34PWb1+AFXTtyODQ8e/4IpzTFMuXR0xnzMqXvBva7nofbmqapGO1AWZS8+vKemzc74kgy9J5HT1cYbXj54oGb25qz05IkTmibBoIABG/f3LLZ7Hl4uKFqa1Ri9sv18kWUJK+MNTff8tE/cuQ7xTEJ+MiRI0e+IUJA7H7F79dV9S9orR7N56UYbE9dNUip6PqeJMmYzeZEUUzT1CRxTBRFUwHvPCIINg9b7u7uqKo9Vb1n9I7BOi4vb9hsHuj6bhoNCpJZOSNOUuaLNfPFCVKpdyMcGqk0Smn6tqWpa5wPdF3P4XDg7u6Wru2oq5pxdJRlSV6UKKUYbM3u8EBR5KyX56xXZ6xPV7R9y939FXW1pywLothQzOY83O1o2obT8zXVfsvubkukIpSRtG2NtSNDPxKZmO3Dhuur14x2YLd74OJR/s5/36GkZr8f+e3f/z2u37xm6Bo622HHEaUVRutpNrzq8AR0JEjTiKH3SOVJ0ikobX2SMl/F6FgTpRGL5YL1+oyiyJBSEccRs3nOajXHGInSgmG0jONIbGIO+3rSfirN6DxKT7P/1lr6bsCOFg+0XY8PgsGOCKk5VBYTa5SSmLhgGD06FuRlRtt2nJ6fkuYZUkmyvEAnKWkxI4jJGHSoDgz9yGI158l7S8pVzvMPP0RHMYt1RlrEnD+aI5Vgdb4gneV8/qMfcPH0Gc5Jnj57HyElFxdPOXv0hKZpePzolCfPntD0PXVT4wVYF5BKUVVblJScXzyjb3viyKBF4ObyNePQcXp+xnvvfYwdHXle8vnnP8Ij2O92ZHnKb/3eb5PP15w9eYaJC3o3cnI+YzZLyLKY00enLM9OePz8lB/+7nMePzth6CS7XcNHn12w29XTK5J1/PxPp4yH1emMLE9pG8vmoSFKFXFqyPIIpeHNmxfcbd4w+IZddVggVfXee+/9P/+Dv/0fvP62z/+RI98lji8AR44cOfIN8Tf+xh+qXN7+cOjq533Tp73TIY4jERnH4XBAKUmaZWy3d9ONcxDcP9wyn48IGaibBv8uAVYKgR0cAcFgO4J3VFWFVJI8LxhHx+im239rLUVe0g0D6/UpAijKEik1fT+ACwQCkYkxJmIYLM4HmmaPUkwBWRjKMse5kaIoydICN3qcdxij2e637xaXA2lSsNsdmC/naAPBGc7WJdvdw3RDfLKgbXps8KxPC968fMXV9Qu64cBiseZ73/shbT8glWS/b0EpYq057Cp+9Pt/gSiKeHnzFilAKI1EEpwniBEkZEXG6Cx5HtNWPXYYMZGm7TqWq5zqMC3yFuU0597VHW0zNU2eQDkrmc0Ktpstfd/TdhVCmHday5G26zg5PQEp8AHatkVJiR176mqPD4EoilFa03YtAY/3AesD/eCIs5hx01HVHeVpSj4vubl+oO4GVBShA0hjWJ6cMPQjbpxeFAZr8d6z3zU8evaE0TmE0OjakBSSKErouoH5sqRYxshYcr/d86Pf+yG//rOvKIoMO4zUVcvFxXP2+3vevH3L5z/6Ef1gGYcBuklB61yFHRr2hy1FseD07GJa1JYWN1pevPyK75czzs4eoVTE/f0tTx494bNPf8Rms+H6+oHns4w/+B/9kPv7PUVZEoJAK0gzRRRN+QvrkxVvX19T79tp+b3vaR467OD5/IdPefvqlounC7pu4IufvmJ1MuPxkwX9u+C6fJbQDwO7XcdyvWC+mPGwfY3SCjmGoKTqlFL9t332jxz5rnFsAI4cOXLkG+LHP/7x+H/43//1L0fTVFkhm/3NLjStEKOfrDNCgh89WTYjjmOk1Bhj8A4OhxpQFFlOIOC9QxlBnheU8/N3qbrNdPvt4e72AR88VXVgHEc221suLp5ih4GHhzv6fuSd05GsyCdNpTZYa3FuJIRJlSmEpixT8jwlTTM2uz193/+5s/7+5paq3dM2FbPZkrKY0Q3dtATrBYdtTZ5n3N7dUJQzkjhhs93w3senvP/pBT/5R7+maSuev/eMR48vWC5O2e0q6mYgKROefXJO1wz84mdv+MFvf8gHHzznl1/8hGFocUBm3r2OANLAcjmFhw1WUx8mN39RZvTdQFGmNHVPXQ1kWQzBYQeH8yNZntAPOaXWpGnMZrNjt9ljtEHrBBMZotiweTiglMIk079rh4EQAkjoupbRucnBby12HEEIIq3RxmCtp7eO3o7TfkPj6VuBjiLSLKXrLHGSYIIgEEiSGEQgz0r2uwNV1ZFlGfk8Y7dt+N4PHtE2b0mLBd5ZZouI1y82XON5/tk5n/3gfb764pK6a/jgsycoFYFz/Nmf/gYVGc7OLxi6kTevr7i4OOXX2w1SKvJiTp7lk9rUgRCgdYwxGVE05Zs1bcOLr77kd37v97l4ekFaFNxe33GyXvPRp5+y2VXc3zdcPDnhD/4H36OqGrwP027Lu3wLicRZz3w+56tfXSKDIjaG9ZOCh/ua2nQ8+eCUtul59N6a5jCyua0oy5j5KqWpRrwTlEXK/VXD7fWW/WGDcwO4kUipLsuSK+/97bd36o8c+W5ybACOHDly5BtksINIU/PWDeoTpRPR24ZAmFJ3jcZ7jUAw9ANxIrAD9MN06661om4OeBeIk4T1rKBtO37+s5cYo2nbqRiNoxgTxSRpipCKrm3p+4br61uSOCLLcrquYxxHvA/kZcZivUCrmP1uB94z2gGlI4QQCDUVfLe3V2iTcFe3775mGIae4B0mMtR1TVUdEIopXCsELs4fcTjsWS6XOAd11VAuIj789DFf/uoN2/uOslxQZBn4mJ/+5FfMZzMW8wWzs5Jf//IVMijee/+Mf+Ff/ItsN3e0TcXoLLPFDCUEh7YiMgpjDCGeFo/b1tL3PcvVHMdIJDXBCaqDRRmwo8V2U/Lv6fmMatcSmQhpNMM4cnd/Cy4wm5XY/fQ93t1eE0Lg/OwxdrDYYWAcB7IsmTIVpKKYLRAh0A8dxsQkUU5epnhgHB0hCOqqQxlFQE67ElIwX8/Z3u7/PFPAGIM2ksPDAaMNcZ6iTU3X9kSpQY+w3becP17wcNtyfV9zelpwel5wd1/hBs96XRB9/xGHuic2mt2h5YP3z+nagdvbHdokJLniUFVsNjs++vR9vvrykgD0fUWaFQQk2sREsaPra4xOWK9O8cFyefWW+4d7fvh73yfNc8r5DE+gKAsW53OCF3Sdx4WRs0clxkTc322ZzTKUlNRVRZzEnJ6dcXp2x1e/fENRpNzf7zm/WHN7vWWzaXn+4RngQQXSOOXq9QGp5KQh7SyLVcFqWbC5qVBSMfQdkhDyIrvP0/T+d37nd5p/79/7977dg3/kyHeM4w7AkSNHjnyDyDi81Gn6K+fFfrB+ss3ANHYzuimUy0QoJRmGHjsOzOcznLPc3l6z2dxjxwHnHA/3O5pqCvyKTMpyuaYsSnRkGN2IGx2RNkSxRmuFHTo223sOVU1AUJQlJ6dnSGE47Btub2+4v7/h9u6SwfY0TUVVVygpOD1bcXJyxqzM0QrcOAVfVdWBru+ITEScxMznC2b5HO8DIQwcqh2zckFdVXhvcb7j/OyEl1/d0fcO70eKvEDpmOvbK56+d8bqtMRozfWrPcJLHj0958kHz4gSzS9+/jPqpmG1WqGVZH+oEFIglAIhCUz5Bj44illKnCmyPCYr0mksKFPIIBl7h/ee84sZSaJAQFFmxHH8biwK5vMFTTcQEFSHA3VdsVwuphvwpsPakSzPMEYz9BYTJeR5zjD0xHGCiRJkZAhSYf1IkkR0jaOqOgICbRTOjhgjyGcZzo/0XcdgB/q+Z7ADs9ls0oPaESlgd9ixr2q0inj51Q19Y4kjSX1ouHr7QJHHZJmm2jaEAG0zsFxmuGEkjSNubne8/8mK1ckcL0DHmuXJknZwtEPg/NE5WmuSOEMIQZoWlOWcWblivXpMUc4YvWB1+ojf+Qu/Tzs4hIbT8xnP3jvn/NGa5WnOxdOSj79/ipSOEGA2LyjLjCfPHtO0HUJ4lqsZh0NFXR343ucfEILksG+JjeHm6gGjDd4G/tv/8ucoEXj6/poxeOI4ZnvXTjpW6WiaATc60khT5jNO1+fkcUKRxYcsSfq3b98ea5kjR/45Ob4AHDly5Mg3yRi31rbxvmqS7aEVo1OM4zgtlQqNlA5lDDqE6UY7wNBbpITlYkGeF9OYjnc0TY1zjqY+YEzMarUiKzKauqXpapzzGB3RDx1CwGw+AwJCSEBitEHJSW3pxpGubYiMwvbQHmrSrKTrK9q2AjdnGHruN3fstluk0tPtrh/J0hwhDMYIQvCYKOP99z4lzQ0Xj065vnxAKfPn4zi31zseRYr9viEvMoIfcd7x4cdP6LoRpKJvHUkKZ2cz6nrHs2d/kX/4D/4R49Dx2fce0/UtQz8QvCcvS0IIBOexUpCkBoSg7yxaT3P6rnNkWcLQWnwYiRKNVAITR7R1y2yest91+N4y9B2n5+fTz973aBOj7cj5+YI0zdhXFVJPszBpnnN7c/Mui6FkGBoGO1AuFkgTI9XUlAig6wZ2+w4hpnErCHRdy3y5oGksQUqEVswXS+ww0A+WKI7pDnuUzJDS40eLG3r2dw3e97xpBpancxaLOdtNQ5JmpEnK/c2G+SpDKEGaGrbjSBCaqqoxyvHo6Yqm6qmqPVFSsN8lVPXA6emc08dn1PuWODJYO6KimNwYBjeyWhTkZc52e+D9T55y/vyEICRnFwsO+4p1PO1JSA3zRUyeP6G3PVIopAqkxnB+ccZ+/4B1nuViwasXb1msZnzw8RP+7n/037JcxHz86WNub3esT2d0Tcbf/7tf8Du/9zHrRym7u47gNHVjKRea+9uGy9d31PuKx4/OkaHDdQfB4FItgvhWz/uRI99Rjg3AkSNHjnyDCG8/HNrug0PdzQ9dR+enQlArzegcSivatmEcBmyvCAjiOCWEwGAdounJ83Sy93RTAee9p24O1E2Fepf6q5R8N8MPeT4FNUVRQt/1OD9ix5EmBLQ2JHFCCA4lA97DrDzBO0ucRBgN15dvuXzzCo9kGAaEnJSbUZRwsX5Mns/ZbO64vn6NMTHlfEWcxZTlKW/fPiDQCD2Spwl2GDk9Tdnvaup9R1kYBucoZwVvr65Is5TD4Y7FYsbp+Sm/+vnX/MG/+Fu8fvWG2+tb/uIffEY/tHR9jyAwm5cYY2ibhraddg+0MdBZlAIpAkpHjKMH7/BBEeWaoR8ZR8f+0JFEEUlucMGjG4mJzxBKsdseKOcZfdOjtCTLMpyf0nStc+RFhrUDCEUxzwHo+gFlNHlZYp1HK4G106x61x/wCMoixzk37VIUCUPvsKPj9GyNEHJaug4BO44keY4dRqJoahIRoKRk6Dv6rqWtp5C3oespyoTLt/csljOq/YiJJOXMcPVmRznP2B8q1iczXr284dPPC7LCoPSSuhnw3uOs5/p6w6PHK5IsZRhGnpzOefHyitOzE7J5hBslJ+clq7M11lseP11SzgusHcjzFG2m16KHhz0hwGKVEFzGaC1JWiClRGaCwIJxsMhcMJvPuLq8xjt474M1X/3qFUmc0vWW7eaaTz59yt1txa9+cUM5i1icJIxW8rCpiNIZ64s5D/c1r1+8oev32G5Kdvb45dgNq2/3xB858t3k+Gx25MiRI98gWvJaG31QUoyJMTTVjr5vCQLyIicAVbWlqrd0XQ3BY4eerm0RIWAizf4wFfvWDrRthRCS1XpNOSuJzJR8a0zMcrFCaTmZVZqarm2wwzROoqWizEtiY2ibCikgilJMlOKAdmi4u7/Eezs58uXUWFw8esxsviDPS5aLNVpHvH7zFW/evkSrBO8FbV2hheD+dkeiSrqq4fzRCVGssdayP9RoJfj0s2ecnZ+xXM65frvBdpbddsvz5xckacwXP/uS9z96zu//D3+Xumv47PMndG1D01iMMazXC5SSNE3DfrdF4EFMBbdSgjgxJElCcGHam4gNUSTxLiCCJNjAYhEzW8UoLfBCEGWa2TwmiRV5kZBlhsW8oJyVSK1BSpRRSCWIIoP3Aa01UknqtmUYLGVREkWGMI4IBH3XUdcHiiJluShwbsT7EZB0bU/TWZarhMfPFiSZRGoJImAHOy0DpwldPxCALE+JkohymdO0LaP1jIObTEcqJjExh11DZBL22wGpIobecXe7p1zMOFQVkUl4+fUdowMTa4oi4nQ9Y+havB2oDy2n5wvSXJLPEj757IK+b/j448d8+skJy3lCkQkePZoRGcWjixnnFwuiWLJc5ixWCWcXa5LYQAjUdYOQkiTJsNYhpaKcFVNuBJKT8xWPH13QNT0vfvOGi0endLbHxIbdzvL28oEPPzln6HuGPrDZNIzOkiYRh82AHx2fff8Zn33/Q/KkwPYtTbVl6BsvpfBF2x5fAY4c+efk2AAcOXLkyDdIkqpGaGm10s67EWd7xqFDCkHwjqY5IBAUxYz16SkmioiimDTLkEphtKEsZ5T5jMVqyXy+ZBwHLt++YrO9Z7ff0DQVECYfvfcoKTEmpq5qmqbGunEai3EjUkqKYkZRLMiyEoEERnQkETJw83BF3dVorYhiQ1Uf0Erz+OIJQz9wfX1J1/bMijlxbAjeI4XksK/IsoztfkMUK5p9y/Z+R10dyLKIxWrBOHrevLrmy1+/5mFzx+b+ATuM/OLPvuLLr17z7MNT/hf/q/8ZL75+y/11Rb1v6bqO+bxktZzTdj11VTP0Pd55gg9IEdBGYSJDksToKGJ0nsUiI44kWkqUnMZy4tSQJBo/eobeEseGJE0wkSaKFXlhmM/z6eY9CEIISCVQWrGY5xRFQhxH7xKIwShFlhfMFyvu7x8YR0fb9hz2e9YnC+IkpreWfrRESfSuGfOUeUSaTF9LEkf40ROCJy8yYqMQZsockFIjlGK2LJES8rJARxHWOZbrObdXtwgHfnAY7VjMM169uCNJY+7v96RpSj947GjpG8fmbvp9iCJBOY8xkSbPU4oy5nCoePr8lL7tePJ4xfvvnzJax/njBU/fX3FynlMWCRJB13UYo0nTiK5pkSJifbIkz1PyLCUvM7bbAwKJlJL9viJ4S9O2CKGwQ08xy3j/w2e8995T7m4OVPseN3iiSLA7DAQpyfKYt6+uiKKYm9sHhPQYI9neNUgx8tEnzyjKYgqBC5Y0jTdRZLZVmoZv+dgfOfKd4zgCdOTIkSPfINaK3DufDUMXCzxKCbphwDlLV1u8c+9ueksICu9H7NCitUArhbXD5N83I7PFnHK2xNoBsQ84bxlDS93UaKMgKEIIZFlOmuYIGpq6wTlPFIEdB0LQZFmMNpokiRmGlrbtcd4hhCSLUxDTHL33nmGYbt9fvPqazcMDduwxJkYICATyIieOM4qiYLO5QwqNGwVXl1f0Q0+aZgRv2G47iizC2pG6bai7jg8++ID1yYqyiDl7b833fvcTXry84r/4T/8pi3nOvFDksxLEyOHQ0zQNcRzTtgO9DYx+JDcJ2ohJxTlYHu72FHmM0YJhCFPj4z1BOOI0ZugDIBFIIj01Pd4LeutI45SH+x1dN9K2LafnS6q6JVIR80VEUkTUTUucRCglMFohpWSwjjhKUUrh/MijJ+es1ksuL++xw8BsUdC1A6NzzOYFzoXpVUMH+sEzjpbV6QoIVPuGQ1WR5yUMgSzNSNKYw75BCknbNMRpjEAQPFxf3zBbzAhegoe26dhvWgSC3WaP94KqbtFKgBL4EHNxbiBIlNYkWcz6tODq+oGhH1itMzbbA/NlSdd1IANCSaJUI0QgeI8QkqJIqPZ7XFBs7iuevjen60dMVHJ2uuDr+iUvXrzh+XsXDENH18Jh1xGpLUIEvJDs2waTaj76+Alff3XNfrNnfT7jbtNxf3dgPs+5ud6z2e7Jizk2eJYLQxRpokhz6C3lYsHy5IL72w43youhG06Lojg2AEeO/HNybACOHDly5BvEeZ4IyLMsC2fnMdu+p3MNzjuKogDhaNuWcRxBS6TwDK5n6C2D7WnbltlsidGK+/ubSb152HE43OHD+O6WVVNXFXk+I44nlec4Wpz35EUBTG73NM/xbnoh6PsBrRQheKIoQZuSsW8BzziODNbS2R4pA4fDAeccZTnHhykhd7ffsFguiZOYyES8ffuak7ML8qxgu92TpQVJkiGV4HDYMl+c07UV+92OJIl5/8P36LqeN1+/oK16Ht2ds1yV/Od/5+d4r7HpiHOOcQjstw193zFfzGibjqrqGEfHYpESxTHBecbR0TQ9aWJIE0NVjRwOkz1JCEWeR3g3KUOttdjBkaTRO7XpSBhhu99zf/uAMjHLVYHRilhHCDGSFwUexWgdaZIhBahM0A4WwnTT7f2IkFDO5lxf3rJ/OJDECV01MLiB9ckCkByqnsUixroRh2N5OiMyhv2m4v72HpNEaKOIIs1iMccOA4ddRVt33Fxf44NHSRBC8fCwQUea2yvJ5mHHfFby6sU1jHD5aosyivk8p61bbN9R5jGH/UCaxVw8XqKMIskMZ+cLhtaSncTgA26wpImmKFL2+woRNHEsp+9bS6SQeGcY6dnudqyaNUII9ocDUgVOz9a8fHHDbrfFmJjIpFT7Hj9YHj874X57QGk4HCpef7lhvZ7R9QNJHIM/YHuHGx1FGXFztSMyMfksY3QSOTreXm6IopjZrGCxWNIebhHSo7Xs+AHjt3nmjxz5LnJsAI4cOXLkm8SHWEqZmsiE9qEK1nqhdYRSGq01Shqcq3B+JE5Tum4y5GijkVJyenKGiSKccwQvUNJgtCHLCkKAOM6IkylETAhJ17X0/YAx+l14l8c5h1QKZ6dFVCUgTnK6vp+SZ51HKU8UJwgCWjsO9R11UzPYFucCeV4ihMAOgcViRZ6XdF1D27Z4J7h4/IiiWHA4dMRxghSSvrcE2fGDHz5jt+l4/fqWR48fIaTlF7/4gtubO5z1PHp2yl/8S3+Vv/+f/oTDduTjz57w+PkKYwL7bQchMJtn3N/tCB5G65nNS9IsxnsP+GkPQCqUFmw2Nff3LXFq0FoTPATnQEiQHh88Wisg4INkaB37TcPl5RXr0xPyIiNJDc56+r4jK2JMnHB9uUFJRfIuHUuqGL+rEQjqemripJHsNnvu7+9RQhK8x/YjaR4jpKSuW8p5QpLEVE03jR+ZiNvrB+ptjbOe1VlJnEYoIdhvdygt2N5vGQdL1zb0Xctvfv0bLs4fIwUEF7h8c42j5/TiCVma0B46mronTjRRrMnzmO2mIY4m4xJiYLZMqXYt65MZeluxD5Ku7zg5ndHUA0oJ7GCZFQV3tzvSJKPvLXXdkiQxq3XJduu5utzx5uVblquCzfaBzUNPmuacrAse7racXVwg5EBRxLz8+oY4z/FecnvzwMXFmu1Nz83tHWfn5+z2DcUsxnaOat9ycjLtizRVTZRKHp3P0DKgljGbTcv24YHrqzc0zYEoU3uTRNXtf/T1Crj6lk/+kSPfKY4NwJEjR458Q/zhH/6hlOEwt84n3jltTEyelfh2QOsI7z3GRJTlAudGlDQslifsdw80TUWsDEiB9yCkIkkUbVvR9DV2GCc7zWjRPqIoctI0Y7vdcjhsCWEqXgEikxBHKSF4ZmVMOcvRxnA4VIyjZXQD3aHCOUcSx7hxZFdt6brpRSAEQV1XZHnG8+fPMTrhzZuvEUJi+4HV4hxEzGADUWRo6z2jlQxjy1/6y7/Fr/7sDaODZx+cU+8rXn39goe7B5bLkkfPnvA//lf+EiZNObs455OPY+5utvyTf/QlWRERRZq8jFiuGpTWVIeO07OCNI0Y+o44Teg7i1SSKJJsHhr2244k1SRJhHPTmBVh0qNmeYw2I33riWNDU3ds7ytevbgkSmPyMqEoIkbnqBsLUrA+W1DVLUM3sFoVhDBiYkPbDQgRiOOYqqoYBkssU7qhx42WLJsRRTFj8GgTEVxAR4o4Mly/3VDVHUkW05uR5tAjPBCmMbEQAnXV8PrlK4QAN3qEFGijub25IQCRjrF2MkMN/Qgy4KzncKh4/OSUQ90htEJqQwiB2XyGlIEoFrRNS1bMuKk3EDyn5yVRLKl2A8oIdBQwcUzbj6R5QlFGBBHQJqJpLXJTEUJgdbLAWs9+vyfLprGy4MEOIwGHUgLvBGlSUJQpeW749c9f8Mln71HvauoDfPy9p3zxpz1IwehHjImwrqfbdUgjWZ3MOexbhs6ited0neGZXnPAI4VHCIcx0aCkJAhhvsVjf+TId5JjA3DkyJEj3xA/AH2JO0EIkSSxS7UdjQhGIRntSJImKKWQCIQyeOcZ+o5h6P5cAenclAqcZCl929E0EUpKvNREOsb7Ea0NxkRorZnN5u/2CAbsME6aUGUwWtP1HUIYhmFk6C3OjsRJjGtGIh1hCTRtj7XvbvGloqn3aK0p8jmr1ZKhb/n66y9RSpFmGUIoxtGiRwcm0FQVzgVUBH/hL/yIn//sJWVeMp/PuLvZst9tUVHCX/xLf8D67JQnH57ywcfP+If/xZ9yf3XHT69uSOKM05M1wTkCAqkU+/1Addhw8WjBk6fnNG2N95Lq0GKMxGhF3zgEklmZkqSG0fl34VER+23LfJlhIkFzsIy959BVWBe4f9iS5inL5YL1KqfvRsbOIWEK0PKW7UNFPk+IU402kw2oaSGO4ymnYRzx3iGFoG5rhFDv0ms1RgqiKMKLQBRL7q7vuXp7zWy5YLFYcH+3wY+Bar8nThOGzlHdbrm7vqap//vGbPQOAIGg61vqpiIEiOIErR1tN7zbAUnpu4Gz8xnD6CGAAqqqY3mSY60jJsY5S6QNr7+849MfnLKYpYTRY3uHlBI3TsrTpu7wIRBpTZpJ+n7kq19fsVpmJLFmscpo247N5oDSgpOTNXEc8/CwIYoi4jgCYRBS4EcwRlMdKsoyZ7PZTHsS85y2azm/WHJ3vyeKNbdXWxSC9ekKE0ckiaKuHbbfo6Wm6R1xUrJanbG5e8NoRyGVePN/+bf/7df/13/n3/k2j/6RI985jg3AkSNHjnxD/AxYC1mb2DwMTb93Y+e08MbZliBhsZrTNDU6MlPB5Ua6dsQYjdFT2qxSksVyNjn+iSnLGX3X07YNnoCJE2w/cHtzjVaKYRgYx4EkTViv17gxYKIYO07udx8cBE0IoHVE13UoZfBeoNUIaISM8EOgOWypmgNaS4zRvHi5o+9roigCqUHAcnnGfLFEqWhaWh4H8tmMT3/wAVfXN0Rxxvr0hLcvrpBKo5OITz/+kDjO2NZ7fv/JGX/v7/433F4eePJozdP3zonjDC96louCrrO8fXOHUhFPn655+t6Su7sHggsI5VmfrJDScdhVuDGQpgnBWwIOKQNCarYPLVkeEUeKl19vGHrHapkSvODu1QNZkZFlKeU8RinDaDuiOAJlSTPD0Ftms4I0ixAykBeauuohCIoy4bBvwAtEEIxjjw8jeV6go5SkSEhzTd8FIqPQBr749VeUsynMq2072rpDK43UZiqcb+9pqgo79CRpQtu2BAICgTHT+JGJIxCSKIlQJmY2j/H3B5TRLGcZ1lriRDFPc0brcMNI34/EscIHz/5gWSxzAp6b65Z8UWGMIMtinPPYMeCdIy80XW3RkaCuO4pZ+m6vo2e5KnHO0/Y9q2XB4dCRpjFN27JYLriIU64urxhci9AFUTT9zNMsoeta9vuWx4+W3NxumS1K9i+u3jV0GmFBetg87NBJRJpNtqb724rVskBlkq5pKRY5UqesTh4TXDvHuid//a//q0vg4Vs+/keOfKc4akCPHDly5Bvixz/+sfVe/jx4f+1svzGSIYkj0jgiUoaHhw3OT2MVznuGYUAqiRsdu+0WZQxCKA6Hmq7rGMeREAJKaeIkZTab4f006iOFYBgGImNIooTROm5ubjhUe7quYRwn8xDv/ChK6clkYyYTjjEaHWmcHxlsz+3tJU1zQEmJVJr9YU/Xd8zmCzwQgidJExaLEm0kzrlJGWoiPv38faQUNHvPcr7g7ctpcVWawMc/+BjnA3/8j/4p3//8fS7fXHH1akuWZdStZ7fr8cIRxTFffPElL19ckRcpSZqSz2Jev97y4usbPI48T6iriqaqaauecfBoPXn1lVboKKLtHdoItIbL1w8YrXj63gykpG4cUWR4+vSUJDXMFimHqiWKIuxoieIIpSDLNUn67uelBVpJvA9IJVEKksigtCJNEvBgZMRyPefkYsbZxZyu7ckLw3KV8vbFJVrHnD+6oG176kNNnhd4H8iygmGwDMNA8IE0y4nimCROiKOENM3QJiLLi3e3/oYsK8nLhPlqSV6m5GXEbJWQz1JWpwWr04Qkkew2NVkWEaRhtswQYgqOUxqqpubutuFh03J1fWAYLftDSz84sjQBPCbS1HWPFILZrOTsYs5+15LlKUZGpHGKFAohNN47rq8fyPKc84tzql01qWNnOTqamsm+9Tzc1fzZF69RaGbLgvl8zs3bB9p6slKNbiQ4aHYNYXT40ZGkBiEGlHQ8fjLn+ftrfvBbH7NanyOhrPf736k3+yff5rk/cuS7yPEF4MiRI0e+OcJGFn8yF+0HxSx70jX2t+MO0jTj0I6MdprLbtuWECxVtSdNM3wQpHnGYrGgaztm5RyjDV07kKc5TV2z3T0gWvnu5WAy98znCwgQfKAbeqIooJSi71usHafC1lqsHdFa03U1w9ASguNQTcV71zccqu30ihDFzOZrur4ljjKWyzVNU9G/U4NW1YEvv/wNaVyyWp+jdMRv/c5nzGYx//Af/FNO1ufUhwNBBh49f8x8nXN388Av//SX/N7vf4+m2vL/+A//Y06Wj8jSjGqomc0zdrsD3nuePbsAL7m7b5kvBW9fb9k+dLz/4SlDH3j59R3LdQZecX9Xc3paIpWf7sqFABmII4mXCUPfsVyVZHnEbtfQNANSB84ezWm7aRG22lVEcUq1q1FGkySSOFFY67CDR8pAGhuG1uMdIEZC0ICY9g1GT9c51qsVZ+czilnMm5cPKKkRUnN9fWC/bTk5O2VwI4N1ZElC3w9EsUEEGMeeWTmnEhVZntA0NUmcTi82xjBai4kilDQEIC9zykWOMXB2sWC2SskKA8KTZYYkjaj3A5tNxfN5jDHQ9Y7ZLKbadfStQClBUzfMFkvcOEKAcbQoKbBuoJynGGNI4w6JAA/Pn674zS/fcvV2w4efPOP1i0tOT1fc3Nzz5Pmar766YrlckWYJ9a7jcrwkzTXLVcHV2w2EQBYnvPrNHX6AkwuHTjw60vTdyECPVIK6ahEB/GhRkeHZe2tms5K2Hnjx9T1KKrb3e7I8IXv2zAgR/ordtf/Tv/bX/toXf/RHf+S+3eN/5Mh3h2MDcOTIkSPfIP8T4KeR6WzbeTt60fcDTdvQdY5yvkQw3aZ7D1k6hX8VRUldVRyqmtN1Tj/0IDx+DO/0kBFZluHeGX60kQglaLoWNzjiJCLPM/q+xw4DPgSUkmhtUFKT5wVKScqyYL97YPNwSxynxGnKoTrgRoiiaQdgv99ioojZrCAET91UKC1QUiGEIksLZrM1UZLx0SdPyArN3/s7/zVZktLUB5q+4dmHz1idL9nc3vHlT79CSYXUCX/7P/w7xCrh9GTNy1cvODk7YbE6IU1TlqsMPzpurg+cXeRIqfj61ztC6PnVn71AqZhn763Zbwcu3+55/t6CcpEyWguAiQzODeRZghvdNFbkPHYcaKqBONaMo2O0HoXB24C1HoSj6UYulgXlXBC8oq5GtIYkMfgQGJ3HeU+Wxdzd1BipJzWnkWQ65eLpnMUqZ/vQovS0VH11eccwTKm4JooQSLRWCAlKSYwx75SvM4SUSKVI8hikIksTuq4nerd7cDjsiaIYEBRFynyeIYKnKCLyZUKRR8SJRknJbtsRgiBOI9IsJommGf5xDAQPgx0pZhlZPi2UF3lMlqV//j1qrfAOhjBwdr5itFNSdRwr8iyhb0eyLKOc5wRGVus5AsGjx2d8+eULnj97hFSSw0PNdjuQZQlpqnCDo5xlFGWK0YbDtqOYx1TNNbO8ZLvZIKVGSsEwjqQhJoljum5EKsFnP3zCYr3nP/s7f8L27prt/Vtury9Znyy2xXpxHP85cuSfk2MDcOTIkSPfIH9id6e49vPm0DwbbIjafkAgJpe61PT9gJKaSEcYpRlsTxB6cuhLQdM25GVJHBs6P3Bze4uUkjQrqKsDIgSGoSeEwGhH0iQjiiP2uy2EyVDjJo0Q1lqyLMf5kabt8d7R9RapY5LY0A1TAWqMoqoPdF2L8w4fAvv9gaZp3o37gJIgxfT3lqs1H336mMUs5u/93X/IrJyUoVdXV6zP1qRxyvb+gV//2dc0TcNv/cEntLbh8aPHnF+ccn//wPe//xEff+99ilk8ueQNjCMs1wl9Zfnpz19SVx1aerzXnJwl3N003Nze8+EnF6xOCkY3IoUgyhJcAD9CmimaOrDdVhgVsdu2xMm0CG2tJ9ISpSX7Q4WQhjevbymKhNnCkKeSzUOLCI7lqmQYRrreobUiSQxD5xgGRzpPSbKEKI7QRpCVCX3XE7zE2gEQdG1LGAVBSoIUSCmJpJjSiJPpNr/voVzmGKNp6ghlQEdqWhQ3GqPfLeQKQZGn2MGxWGWkqWG0I9pIilmM0Yq0UOzvB37x07e899E5F4+XzBcFw+jI8pjXLx5oa8v6pCSbGdIiJksM4BEI0iRmu2uI4phxHOh7x2Ku6dqaKJ12G9anC25vd3RdR1kWtHVLnAIByizhZdtz9faWOEt48eXltL9w5snymIf6AEaQlAmhDxy2PSdnp8SxZrfd0ncDUnryWYmONEEqhJI8ebqinCc0bcf7H5zx0cdP+c+++jV9U5MYQ1DiQRfxP/mjf/94+3/kyD8Pxx2AI0eOHPkGiYwOYQxJECIHjNby3SJuQGlNOZsRxykBqJuGcRzp+w7nR6LITGYZKdEmwvuAVhprLYfDnrZradsWoyOyd/PiQ9+z225IopSiWCB1hA8C7ydlYl3v2e+39H1D2zYE7yiKqWCXQgKBruve7RrIP/flj25kuVwRmZjIJBAmDePgRhw1Qg78//7LnzJ20FQNN3cPzOZzYpVy9fKaL3/+FePgefLhOZ9+7z3uLq/pu5G72wfWJ3NOTpZIKTlUA6t1iZRQ73tefbnhJ3/8NQwOIwaqpiKJFfW+4pc//4oiN5xfzKjqGolEyanYtn2HQNI2I23TI4l4/fJhukmPI6RSrE9T4kwxWEtWxDRVT/Dw9PmC9TIhMoZxCKxOMkwk6bqBEDzBAwhGOxL8CMKxPMko5oaTs5wijxDod3agacTGWUfTdGitpuZPTbf35SyjmMWUZQRAmiqSRJDnEecXc9brgtnMsFzlOOfpusnBb4yhnGcUi4QoE5jEIFRAELCDZWg9L7964ObyARECq3VMnGqqesC7ERECRhmKUrFYJMwKxWqVoKRnsBZBYBxHxnFq8pwDFQmUkgQXSJKc1dkJCBh6iwgBwrsXFa3ph455mfL2zc27FxjD2xf3PNx2CC25unpAC4HRmhAUeMfN1ZbHTx4z2vHdwvrUKJ2en3B6sSJJNdeXN2zvau7e7nnx5RV5VvD+B8+YzUucd/Rtvz48NJ9/S8f9yJHvLMcXgCNHjhz5BrG2uXBuzPrOxsM46jhOkPU0UrJczvEhEAIkScTQt+z2W9I0J4ojhr7HGMPQDzzcb9FKsVqt2O231NUOoyWCqWjf7jYYFZHEMT44hAhYOxCnCcZEU+EWwNoBKSVd1+HcpK7c7u6nPIBxpLcdo7c4Ny1dSgWRMUjx7hbbe5qm4eTkjLPzxwgduDg54e5yj2C6re66jtVqQfCw3+/o2gYpBbP5gu99/yP+87/7D/n5n37BcrWmaxOcdRTFCoenKFJumo6XX97RN443r16w3d6y399Szkp+63f/gHrfs909sDxZslzO+OpXN3z82Tm3NweMMWQZJElK21pCkPS94+F+z/o0Z7HIadue80czuq7jsHfkRcIwOPq+5dPvPWJ9kuN8oOtHVKSIc8Nh32AiQ/AjWkuqTQMC7OjQRpFmmiiSZEmMHydvv/eBJJYMdlrqRQiKsiCOosmwU2rKmcZEKW9fbUhiQ5bHAOjIkWUxzaHn7HTBYdfw+ssdTV1zcnqKD57l6QITS+LEoESgqUeCl9SHmiiK+epXr/EOQnDMFwVX1zsWyxltbd+9IpmpkShiklSxWJS07UjTdqyWM0ob2O8bPvjwgqq+JklT2sQSvKdre8rZnLRI3yVKg3MeFwJCaIoiojn03FztSdMCZRRlmVBtauJE4QiMYyDLNftdjUkz2sExSzVRmhCCw3mPlJKhH1gsS548WzErI2wzQJDc3hxoOwdCUzU1GMbdvjp/9fr1//Jf+9f+tb/3b/6b/+btt3v6jxz57nB8AThy5MiRb5DBuloiWh1FTmlD14/4IBByMueMoyUvMrSRtG1LHMUopWmbmnFoGfqWu7trquoAcnLiSynfLeFW7HYbqromiWKSOCVJcpI4o+5qrO2ww/guDdgTRTHL+ZIiy0mShMViTlnmpGlKlmVk2TQ+5P1UDAsBxqRkeUmSFiyXZ5TlivlshUDg3cCiLLFtjxwlfdtQVRtmiyXD4NlutlT1jtFZkjzn8XuPqbYNL756QxQpbm/f0PcNy+UCO1iuX91z/XrDq9/cIpzn1YvfcHt7xTj2/Oi3f5e//Ff+Jepdy93tjiRJGa3n5dc3zBc5b19tuLvtaRrLaBXbTcPQO+7vDtjBcf54uk2vm55ykTCM9l1qsSaKFMNgef+jE1YnMX3X0/WOQ9UjjcDaEZhsSVme0NQtQ++mBkFKTCxIUs1qmZIkkm6YFK390CNUwI2BECTayKlY9pN9Z7XOiRON1mBHz+pkhhunhe68jHDeUcwipILr63se7u+QgBSCJIsJYkqMHrqpYdNaUVcDfTc1mHVVUR8a7m523FxX7Hc9JlLUVc84BtJMIZRgHB0g8UhMbDhZz5FSYCJJU7dEWrNaFTgPaZ4hjaLreuIk5umzJwx2REcGpCTSiu12jzYGZSBNDDeXW9I8xsQaRthc1yRpzpurDcvTOdnM4AR4AspIPJMJabQ91XbH9m7Dy6/ecn25p+8si3XB4iQnKzIe7jZ0jaVrB4QQtQ34ly9e/ZWv/+yL//nf+lt/S33Lx//Ike8MxwbgyJEjR75B/vV/42/+WifRl+D2wY0W53DjiFaaqqqmWX8h2O22RElEUZaTlaWtqOo9XdvgRksIjvHd6E/TNggBSRSTpQVlXhJHMVFsQASsnQo6pTSR0pMRhymAKY4TQJClOUmSkaY5WZYzOs9m90Db1oBDKoknIKVECsk4WKqqBiQBT2978nxGve9o6pbLq7ccDgcWyxNCgL6rITi0iijnC04fn5PPc778zdcE52j6hqAkWTFn9PBnP/8l+4eaw74hTgwPmx1JWvL5D3/EX/1X/mWevf8JX331hoftnvlyRpzGhBAoy5zL1w+8ebmlPvTs9x0P25qq7vnqy2uCd2SFxg6eh82BYhYRPFTbAeGnW/CmtcSxoZgljM5jXcCHQF31GKNw1pMmEUoKQLK5bzHG0LeW2SxDAEZPutAokpSzBCEEUayITYoQktEOeDvixyk9OC8ilFYorWkay2KZYhKF84EApKkhiiWLdcrD5sDmoQUJJjYMdiDLEyRysvIExWHXsFhk3FxvuLupuL2qEMHRtTuu3my4v2nRUiOCJ800UgjiJCIyCu+mhmm/q5FSgPSUZYKzA0oKdvuGs/MzqkONUYY0iYnjiN12y/nFGXEyBdoN/YBz0Pc9zgfWJ2d87wcfcXuzQQqIYkhiQ1sNdK1nu+nZbWrOzuZEUWA1L2n2Bz746ClKa8bBUR324D1GR1y92XJ7XfPiq1tev7mfzExBc3t5xWp+xsnyoj07P/v56Ef9cPfwr/ztP/q//8G3de6PHPmucWwAjhw5cuSbJSDUW0loInyfaslqViCCR8tAFAmyPCGJY4L3tF1L17cYE5EkOWlREsUZShp22w23d9fc39/wsLmjaStG22H7luqwp2lq2qbBuREpFVGSYpLJH6+URAjJMFqarqPrW8ZxREpNEiUoKYkjjdYCOwy40SGEREqBHSxKKtI0Zhh7tElYr8+5v7vj5vaG29stSkYsFiuiKCbPUiIToaRCSRidI8ljDvs9r1+/Yhgadvs965MnlPMFL17+htPzNbNVwen5jGFsaLqOYl5w8mhNlCVcvb3E256TkxVCCkIYiSLN3fWGV19e4oYeArS1Y7cZuLqqmM1yBBEPtz3DYEmzlHEM1NVAWw+EIOjbyRqUpAqtNVJOxffmocUODi0VUWT+vImq9h3j6KbiPoqIYk1wYrIzJQYTGwgCqTzn5zO8s7RVS3iXEiwEJKkmLxPGccS5EYDZLCUyGiFAazGFwRmJ9x4tJQrJbDYjBEFeRMSpRApIYokfA95P2Qf1viZ4zea+412UNH1jGW3AWYcInvk8JYoEtutREoJ32GGkPjTMygwTxwThMdqQpQnjaJFKoZVhc79FCoVSgr5vp9edNOWwP1AUBdtNzXxRMvQOhGK1muM6y+G+I3iJThTew9CPNFXL1eWGvps+AyH9tHtiFMUyJSsLkjwjSRPOLhacPZrhCMR5SttJvvjpS+5vb0giQVVvcaGvHj959E/P1utfSSlcGP2TP/zDPzzWNUeO/DNwPChHjhw58o3jLmfL/GWWibsiVWSxxnYDUoAxkvu7W9q2Z7QDh92GtjkgREDIqWBsu4amaxFKoZTGKMU4DPRdjRsHBjsglSJNCmazOVIqtDYIIVFGo5QijqebaCEEPniarsaOIwJF3w5oIYl0zNBbgpAEOf13MLoRO1qcH7F2YDZfsj45o94feHi4ZTbLWa9PaNuOpu3QWtP3PV3XEMeTxjLNM5Ik4je//pLgBZvdA0+fvk+Rl/zyN7/m/PE5zvdY1/Piq0t++cUb+m6gmMV4Z/mn//if8HB7S1FkbLZbrB3p2pa+ban3B5r6QJZnDIPlsJ9CrdI4pWsdV1c72nbEj5rDrmVzV7PbDDgPzgVCEERGEUWaODI4K+hqy2HXTT9/KTBGYQdL8ILtpmIxzxEeskxjraOuW5yDEBTOeUCQZ4bBjrz8+gpnIYpixrGnKHN0rJkvUwKBfpjGfay1JOmkLk1Tg5IS7zxSCPI0YhynkDdtBCfnS5QWLFYJ5SzDuRGtPU3dY60jyyOECPjgSdKcvMyAgBCCtp0CtooyQWtBHCvsOL34jHYkTTVJpN8tOnsGNxKnMdW+oijm7PYH+n7Ee0jSmBA8UioO+wofAnYc0SZlsVpxOFRAoCgKXv7mDucDr14/0HSWoXekWYS1gc2mZbSOzV2FUp626Xj/o+dEWYyJIrp+YBxHsiJifZJhYsGTJwUXFwveXr7l61e/oGpuGcbWihBkmqXbOIk3cVa+/vGPf+y/vXN/5Mh3h2MDcOTIkSPfMFFYvJHa3CSxPiRKhqHv0NqwXp9Os87DtHDbDwPg8O+KbWt79rsNITgWyzlRPC30RiYmigxZWmKijCTOiEz0zvCzQWtJ37fs9xv2uy11XdN3A8O7m/3IGIp8QVnMGYaBYbTY0RKAJM2w40jA40MgilKytCCKMlarU5bzJQ93lxwO95ydXqBUxKHao4xmtV69yyXQFEVJVR8AzeOnj6n3DSoommZHlpVolfLll1/y/MP3uX/YcHd9T7tvuXz9mmIWc36x4Bc//Rlvv36DbXqsHXj56jV5OWlMpRRUhwMPD3esT0+QMuL+tmF73+Os4/pyy5vXG0YbkEJwd7fn/rqhOji61jJah/fuz00+Uhju7/bsdx33ty33dxviWCEldF0PQlAdRrp6RClFwCOVwlrPaD14QddYmnogiqfF6Tdf39M2HYEerSRFWVDOcoIPREbTtZbIRIDARBE+OMp5SlpESCmp6gEhBYOdXgmGwbJYFsSxRirBcp1jx2nH4+R8hkCwWs0pZ9OYl1KaxXLJ6qQk+u8SeDtL2/QU84SsTIliwzAMxLEhzxOkFJRFhpKaNEvw3lFX7dTYCMF8vqBpWpQ2JGmBMdGUbRAnhBDI8hQlI7KsACQu9Hz+2+9zc/OAHyX7fYf3TC9KScZqOSOJNUoFDpuGN18+YGtPGid8/PH7lOWCNMk5bGv8APu7jr4ZqXY1FxdLPv/8e3z22W+hdETXDZkdbR5FUdd1fT42zetv7dAfOfId49gAHDly5Mg3TDMcTutdda5QsVY+aDl50vv2gG0tcZQg1KSGnGQxkqIoMMoggmA+m5NlBYvFgiIr0SYhz5ecnD5muTojTqYGIDYReBjf3Zimaf5u/EehtMIYTTd0dH3HYHsOhx2Haks3NNgwMtieuqnRUjEr5swWi3e7BRHPnz9nNivZbm+pqi1FWTBYy+gcSinSNGW/302aSymnIlEaFosF4yh4++qK2+u3NE3DxcUTRmd5//33uLu65OWLl7gRvvjipzgss1nJn/7pT3E4inmBdYGgLJ/94CO8cyzmBYftAy9f/Yb3PnnCJ599wHZ3YBw9SgWkCKSp4fx8QRxLqroDNM4HmsPAYd9QVS394NkfeoSQdG1P1wXaznJztyeOUuI4ou8sbhD0TeDm7YbdtmGwI8Zo2sbSd35KWB4s1bah2Q04K7h6s8FbgdGS/W6Ps5ZZWZKkmkePFgz9iFKaEKYGQhvFMDiSbBo3UpGk6y0uKDbbHm1i3OiZFQXaCJarjLxMCEjSPCbLppvy+XJ6CWmaHqMTynnJcl2Q5YY0UygFShvyWUyca07eJRZLJUhzQ9OMJFmJVGZ6QUgy9ruWJE6xtiXNImbzGWU5QyDxAdq25eTkDOc8s/n8nS510shu7yvm64zZqmC0jtksxTnBMPTU+4qzixlBSFbrBUp7lDBU+56u63n2wZqzxwtOHy349PPHnF9kPHo6B6FRRvPzL36Jsw377Yb3n7zPvFjYxXL5J2me/8YO/XIMIf62z/6RI98Vjg3AkSNHjnzDeD+etnVzsjtUcd21QWuNHTqSSLNaLpBSEAgYE6GNIU4S+q6j6RqiKIUwhXhppQgEtDFIrXDeIwR/rvD03iGkQUUx2kzjKCFMCcNTpsCkphzsiHeWfqgZXUc31NzeX7Pdb/BuZHQj95t7bm+vOBw2DH3L5eUrbu+vUVJQZCXBT3PvUkqknGQrRVHQti3X19eEACae6q+ubumairapee+9j2nqHmM0l9evubp8zawouL5+jdaCWbnkJ3/8T9jc3fLZp9+n7QZ04vnRb3/O/f0Go+BnP/ljbm4v+cv/0r/A9374OVfX92gF5xcFj58uePp8xnsfLEiy6ec/aU97gg8EN7LbdoBke1+jlUb+d3Ym52hqC0jSIsX7QNc5+mEaU3n75g6kRGoxjUmJaV8gNpqmsmy3ln3V09SeurbsDzucc8RxQtPW+GBBBAAOu47qML1sxPE06x/HEVGkSZIYO3riNKLrHIOdPjc7WoIUaCNJMz2pXYWnmCXYIWDtSJYZxsFjIoM0GmVgsUwoF4Y4gTSPKRc5XniK+WQSevRkRVYkRLHGRNMLU5ZF5FmC0qA01HWNkgrrLFKB1gYAa3uGvmOxWiK1QSG4urxkGFryvGSzqfEBPv38Oa/f3vC97z9HyoASCiEkD3cVi1WBJ2C0QhmJiRRXrzdcX97x9PmaPFfYwRGC4PS85NmzBWWRcnK64O2bK9puy8vXX9tiMfvl6vz8/33x+Pn/rZzNvmrq/V8+7gAcOfLPxvGgHDly5Mg3TDC6LRazh7zMuiTJaeqG0Y1T5qqErp+c/EVeEEUxVX3g5u4GpRVJmjCODhGgazvGcRoHiaIpoKtte7QyeB9wPhDFmtEN1HXF0LcIEZBKorSm7TqEEKRJOi19VhX39/fUVcW8LMmyDM9UuKdJQpFmFHmB94GmqbGDxdqpUFZa4caBrusQUpEkk/lGCEESJ8yKEucc88WCcejYbu54/t5HSCk4Oz/l4e6Gw2bDcnGCc566rjE65urtG3a7e37rd37A0/eWSGP5rd/+Pv/0T/6MoQ588bOfIJXhr/7L/zJxesJvfvGacRg4O1uyXKc8flby9PlyWq4NgqzQlGXGcpETJYrNriFONH07mZKkEuy2PVU1MPTTOE2WxXjn8SPUlWW/bbi5vENIyejGd8Yb9y6sbcpXqBvL9e2Ouh7Y7RpGH94lMIMbLWmW0TQ9zgnu7mruriu6qic4Td+/K361R2lYrjOcHYmUpu8a4lix321IkoiiTFB6GtNxzqMF2M7ycLsH51FS48eRPEvI8owsi0iziDQ1pGkM75aI60M/BcvphGJekmYRSkryLGJ0A24cEEJijCTPE9quxnk3LQz3HX3fk2U5Pni8szR1xdn5Y5x3CBzbzYY4UfS95+rNjtNHc4p5znbf8uH3HhEX6dTgtAN90/Hiy2vKRc4wDHgXmJcl1292fPWbK5QSEDxvXh741S+uaFvLz37yGiVT1utTsmxGmuebKE9+msxO3v77f/RHXy3Xq3+gpDr94osvjvlGR478M3BsAI4cOXLkG2bU3Zso0hul5OiDQ2mBiQz7Q8u+quj6gbIssaMjiXOatnpng4nouqmI79qWvu/R2hDpiOAV1g6kaYyJDVJJpFbU9YHDYYcUkiRNEUoxOoe1FmMMWincODJah5SKLEtI03ha4m0atFZ458mihEgndN2AUrBcLpiVxTuLTcYwDGgTkRcz0iSl6zr6rsePjuA9l1dvycuUfGb4+sWvOT97zEhgfb7m9u6KcbQ8evQEpeHy6g1ZlnJ59Yaqqnjvgw/54e/8Fpc3D5yerflP/r9/Hxx8+dUvsC7wyfc+5auX17x+dYlW8PjpnGKpefrhktNHCw71QJJFZLmZFl0jSFJJ8JJynpFkEXEaMfqR/aEjIHAW7DAtZVs7TtkAo6dtPG9f3XN/s0GKwDhahsFTNwNaaeJU01tL3wWcDVT1yO1thR38tAtRFvgQiOOEKEq5vdmx3TTc3+8ZrOfutsb2nv22wpgIYySj9Qz99DVIIbB9T9c3lIsSZCDNUqJIo4WkrR3jCNZ6tFY475ASytKwXJYsVgXGSKSURHFEnGiiSBCbiMO2AQ/1oX0XaBYRgqeu9tjRcaga1LvciThWNE1FmqY0dTP9PkVm2qVwnr5ryPIZaZEyjp67mwf2uxqC5PWrW9p25Ae//TFvL285u5jjvCXPUoJXGKHp6ophGFmuSrzz7LZ7zk4XZGmKD5rFOmN5YtgdOnrrWa7n/ON/9BPq6oASgkhrpYI0SdsGgGx58vdmi9lPy7I8jgEdOfLPwLEBOHLkyJFvmKFXTiDGJIo6KaS3/UA/DJTzhMWyYDYrKIsZaZLhXA8+kMYpJoomH78bqaoKOwxE76w+UxiYZuhb6uZA3dW0bcVge+IoYT4/IUlyRJi87G07ueu1MeRFzsXjR5ycnmFM+i5ALCaKYoZ+mG55xwHvPVJqnIObmxuGYSAvZqzWp5ydPZ4Si6OIuq7Ybu7Z73eMo6Wqd+hY8ennn/LTn/4EP1i0Tjh7dM79/Zbr60vOHz0BpXj75pL5YoYPjouLJ6xPVuRZyVe/vqNv4T/+j/4Os2xGFEc4Efjk0x9Q7VtmheHTj5/wwUdnrC/mfPDZmtVJzm53IDCSJjF2HMiyjDjRWDuSZor1SU6aKmCyzZRlCsEx2AYpJQ+3FXawRHFE143s9xVXV9eAmF5gBsd2U1PXPUmqMUZT1T0uBPre8nC7YbetGAaL0dM8f1YU+OBI0pj60FLvakbn2O8OWGu5ud6x21okgvow8vb1A0kyLdXaIbDf1wQpETrG+4CJYBwd3gceHmpCCCADHqaF3lQj1MjZ45KsSEiSCK0lygSiWDD2ltkiputGmrqnrgakUByqqRmyo0UoRRDg/EgInqKY4QPoKII/t1O1ZFnBflujpEDIQJQkeA911XI41Hz6+XtTdsEA1aFnuVpyezsV93Z0NE3DqxeXrJYlr15cUsxSVCSIEkPXe5SW+DDiQ2C5npFlKW9fb1ifZDx7ds7tzTV9WzGOgxi9W+6a7TOAf/ff/Xe3H3zvh/+ff+vf+rcO3+rhP3LkO8KxAThy5MiRb5hCSOnxXghq7+wQvEeIwMXjBctFxnxW0NYN+92WzfaOyESkaUbb1Ay2x7lpvn9K9B0Z7DCFbQ0DzjlCmEw3xuj/fuRHqXeJvh0CQZIkOOcm69A47Qb4IMmLGUJqnBsnM0uWT0WekHg/4lzPOFpm5YLROnbbe7bbe0LwBA+H7Y66mhoP8PT9QNM2/OC3Puf65oG2Gjg7OyPIkaHpuL++5YPnH1LkOW3bcXZ6QXWoEEIBgtl8xdcvXrDf3vOf/yd/lyJO+cEPP+duc8f7H7xPkhi0CXzw4TOqpiGbJTx5PkcbQXXoKWcJjy7mjKNltZ6DcOz3LdqYd+5+yTh4uqanLDKMFngfKMuCh4cN+33FbJ7RVj1vv77h/voBJacwtWEYEMB+19APAybS9L3lUDWE4HHO0dQ1fdvh7IhWCmsdRZ6/04g6Ii2o9jvyOKY5VAyt5er1PSI4Hu4P9K2nbaZ/5/LNjr6x3F7dkKU54+BIUoPWAakEm80BHSnafrIMEQRj74mNxmjN6UWJ1AJUQBtJZCKG3tF3I4KA1pLNww4/BrIsZbetptE0L4jjiDjWU+BZPwCa+XxKbE7zHOc81aEijjNGL2jqjnGc7EBFURAZTdt2LFYzLh6vqQ8DxmiklPStpZjHnD9aoI3i9nY3aWpRHA4tOorIiwwhFXXVUpQxSk762vOLEhk8v/7Fa1arBfOyoG0aoiS6Nkb33TA+/+/O3VEBeuTIPzvHBuDIkSNHvmF0EQ/ei9Z5PxgRxiKLWJY5MqjJmy5hdzjgw0gUxURxzGZ7T13v6PuWtq2REkII7Pd7QnC0bT2lAM9OWC5OSOOEvu8Y3y0LSynRerLwrNYr8jwjiiLSNJ0Wgp0nTVKcm26S4zhFGQnBI4NAK4NSmtFZvPfs91v2hy0gMSahaSp2uy3z5Yr5fIHWMYRAO1T87h/8HnEyo94PLOYr6rahq1s2d1tOT05Jsxw/epbzJX3fcrI6wTmHiTSHalKZvnjxK6QQfPq9z/jyxa/J85LT5SlVdcv3f/gRP/vZrwhy5OLJgt72ECSzecJiXtC1I3keQYDd5kBeGNJUIaWYbu/3DRDIinhqKJTmsG/w3nPx6ITBei4v72i6gcGOxHHMMHQYM1mBmqpBS0XfO/reYgeHtQ5jNEpLrB0QQlC3Lc47PJ44SWjajqxMaOqaJDHMZiWbhwYEBAltN7LbVuAFX395z9WbLfWhpetGijydshWSiCiO8M5THTrS2DB2lqJIQAiQgjF4ApL5PEYbGIdhSvZ1UxjZMIzcXlUwgjaG2+sHvBvwfqTa7YkihQ8jSaSItWbop9+B+XyJtYEsK9DG4OyIGy3z5Yy6qsA5nB04OV8itEBJxa9/9YYPPnnMoT7w7L1HBBzzZYIPnhA8JjF0g+X2ZkeWRVy+uSSJInSiKcsYLaGpW7qhJ04U54/nPHm2otr3/OrPXtB1PUpLr7TcGZPuJPrlt3zcjxz5TnJsAI4cOXLkG+Z/++P/41ZIcSOlbIVwg9GSRbni/mY7hXrZKdVWaU1kIoITxCahLBZ49y6RVwmqasvoRtq+I81y5vMVwLScKUBKjTYxUimUkoBnt9tzffX/Z+/PYnXNzvtO7LfGd/6mPZ6pTlWxqjiTkmjLQzsdxEDnwkBiJIDUCJBAuTKC3BgJ4HT3RSDwohO4lZuGETQgwEEctBO3BDfa7YYTOHHbbkmUrZEUyWKxhjOfs8+ev+md37VWLt6vysllOqLKor8fcECwwDq19/7OKj7PWv/hjBAGZvMcpSV1VdN1LddXl1Tllq5rKMstTdMQAGkUUgAEjI6QUoKAOEoIIeBcT1lumc0m2N3Q7oaGrmt48623UTrl8vUlBk8IA23XYdOYxdEBcZZxu1yyXK3YLFcUeQFhlDwJoXj05BOEFpTllnffeQ+lU66u1hzOj6nKjsXihD/8w/d5+PYp733xwWcFWZMiQyvDyxe3eOfpu8Dl5ZqD4xlHxwVxrBgGj40SDk8Kju9MsVZRbhtWyw1RpJnOCpwLrJcV3jO+imiFDwHvIfhA17bjoiQl19clwwBJYmmbFhEksbHUVYkU0DUNSgiatiNOMtq2RyhDWmRUdYNUBjf0aG2oy4HlTc36tmV92/HsyQVtPXB1vqSYFATvUJrxFUNbqrIb+wi8QyiQWmAjSZJEpHmM1GDsWHDWdwJrNN47bKx3PgPBMHiiSOPwmDgiiRO2ZT1+v24MLJIiYLUC4QDIsgxrI6IkQUcJZVWRTVKSLOHm5oaurUkzQ5aNRupyW7K83vD2O8e8fHnNw7ce0neO0+OMs1cXHBxMOb07pW4abBRDAO8GIgNRLDg6mY7eCK24vtpQ1R2nbx5x9+ERs3nOprzF4cokm15Lra8O8vzp53XO9+z508x+AdizZ8+enwBC87Qb2sb7YSjynKatUAL6dkAqjY0Mk8mMPJ8wnc64e/cNpFRENiK2muvLC6yNMMYSxwlxErPaLKnrLW1b433A2gilNE1Tc3t7xWazQWvNfL4gTwuur29ZLVdoo9Fak2UZIQSGvqftaoahx7kBN7jPGoCjKCaOY6yN0HqMKK2qkkkxwzm4vrygqbej5GZxhBsEt9e3DF1PXW4RgNGGMKZfYrQmjhMEAiklF2dnVFUJQvDo8UesVisG5zg+vUOcTvjgxz9GCHh19gITa1arii9/5R0WB1PSzHJyMmM+m2JNwrMnY1twmkWUVc39BwsWBxnOBbbbBo/j4CTi6OSQvvfYSANiTNZRBikUygjiyGC0xlqLNpphcGg9lnMpKdDa4D009ZiUE0cRBI9QgihN0EojCKRJwtAN+MHjXUAGyeZmTRqnLJdrQKCMwvWOq/NbNrdbyk3Hxesl29UWPwyEICiKnG25YTIpsJHCuUDfBuLYoIwiK1KCHOM6s8IQJ5I0G2NCiyJmcB7n/dgFYSVZnpAXloCjLBukhHJdMZlkY/pR8HS9wwcBAtI8wTs/Gr+VZhgGrLVEcURdVURak6Y5dd2xXm9QQpOlOUJ6pPR89OMXzCZTXj5/RZYZVqsOrS14wfe++yF3HxzT9hXXN5csDg7IspQiT4kiUFYRZ4r5IqOuen7jn77Ps8cXvPHmKb733L/7JlGUbW2SXiVp/MNf+dt/e/s5HvM9e/7Usl8A9uzZs+cnQPBqnefFZjrJ+q7vuLq9ZHAdN8st+TRBa0UxKZBKMZsvUMpgtMEow3q9JI4jRPC4YcAaS9NU9F1PFCUYY3G7pB/vR9mzkII4Hhtdu7bjZnmLCOxu2sfmWSEkSZwQEAQvGPpx+NdCIIRkMlnsmmo9IQQmk9kYrZkWFJMFBIlznrZtRgNx39M2NUWegYTBD9R1hY1iZAicHB5glAYvWCwO6bqWJE1J44yrm1eUzZrF4oA3Hr7N6/PXfO/7v0dVbambFhMJvvzVh/zct742fj3ec+/+AX3nIRg+/ugZs3nGbB7jfODwaEqaRbTNQD84pvOch28dMpvnrNYrssKSTyKEciSJoZga4kyhlBoNs1qhtCYQiLIYHRmMMYRd1GkIjnTXlJukY2nYMAxEiSVKIkCQTSc4PF3dEILDRJrV7ZKmriEI6qYhjgwheOqypu9a1usVy+XYxyCkJ51m9G6U9Cg1Lh/bsiIIQATiRLGYp+RJzGSakRcxUWwopgmD80ymGVLCEDye8RVDaQE4ktTQVh1GSaptzXxWEDxkSUEUW+I0wjmQQiC1wHlHP7TUVQk+kCYxkdFoKZFKMZ8vxmbl4DHWYmxEklqkDvz4g+fcvbvgh9//mMODgmePX3P3zTnL5TXPHp3x1lsPkVITkDRdh2fARpq7d6dEieH6puLhW3eZ5Ck/+t4jnj9+weA6zs+uMSZp0ix/keUH30Xsihb27Nnz/xP7BWDPnj17fgJoaZ1QqpRSDdtyi7EJq3VDlETYRDCZzlBKc3B4QJpn9G68ZSUMQKDrG9bra5zrkAjSqCBLC6Qcb9eFlGglESEAgjyfEkcJ2iiyyViaVEwy+qEfYxONGeVGjM2wcZyOQ5ux2CimyOckcUYIDucceZYz9ANaG7KioOtbetfS9R1ZPqUfem6XVxwfHyK8QgVJ37VU1Za0SHjw1hvc3N7Sdj2Lg0PquiOKM/LJlKvrMwKgVcR8vuDDjz8gimOU1LR9RwD+zM//LMMw8MEHH1KWDQ/ffMDzZ7e0bcf15TUHhxMWBwVN09FUPU3b4YPDGMVkEpFPLFk+Lk5pbimmEd57Do8mnN4dl4UQHHFsx+F/NF0ggDSNyWcZUkukNiRxgto1NwsRsJEc/RLDwHSekeYF27KhHzxKa5qmYnAdJtL0Q0/btUTW0DajWbjcbACPdwNVuaXvx8/YOUdVtXgv6DuH1gE3DDRNzzC0RJEejbpWoK0kijRKCYxVpGlEcGKXEARSSbQRCGVo6oF+CEipMZFEW0Hb9Qx+oO8Dbd0hhSfLErre4RxE1iKlAAb6fpSMIcEmFqU1Qkkm0wOUiii3Fd4LEAalJHmW8OijMc//7OUliVU8fXRBXTb83Dff5ebymu2mYjE/wIcBYyVJGtP3juvLJcenU04ezGmGli9//SEHizmPPrpEGY2NVVBGX2pr1ynsE3/27PlvyX4B2LNnz56fAHXbqaZqs7ZpVWwsdicjOTiaMAxhN7RZ5vMF200FQeC8p+1bhqHldnVF1zcoJYniGGPM6PmUY19AGqcoZRBqLOUyxuKDRypFcIHlzYbl7RZrRwlRABCf/gpobRAouq6nLCv6rmO7WXNze7UrgILIJrtb3p715gbn3RjPuUsnssZwc7Pi9mZJVVZsyy1aa6b5nO2mGbPtzfhSIBAUWU61XRMEtE3DnaO7LG+XZGlGlmZ0TYPwgQf373J0OOE7v/H7gOQv/dvf4uzsYkxDQoIQGAmr5Yabyy1l1TIpctzgUUoglWI6nSCFAi8ZBo8EDhYZk0nCtixpu9FImyQ72Yx3eODkdM5kmkAQoCWzg4Ikt0SRIQjIi4Qk1gy9B8KYz280QULfNIgANhoLuMLgx74GOS5H+NE0KwX44Oj7jmHoxwSn4KnKCtc7mqpGhIBWAj84usbhOo9gvNGXWuMGR2QVcWSIrKXvHEob6qal7wOB0SMsJDgXqKuWqmqxsSFNIyDg/fj3XJ7f0NQtSkfEcUxTl/jej7ItpUczdF3TtS1aW7wXDM4TcCwOFtR1SxxZisn4giBwaOF59uico6NDfuf3P+Dhu6f8wW//gM2q4cGDU/ww4Al43E6S1qGNoWkcH77/Etd6Xj695up8jTWaoXM8f/KKsloHlLtFCr/y7buf0/Hes+dPPfsFYM+ePXt+Agg4FIQ4iazMI0NiNWlm6buB1e2Go8MJR0czbq5XeB9QSqKVIgBt1+CGjqF3WDs27m7LDdvtesyF1xHWZmgVIRilPMMwjFIV72nqjuAZNexK0TQNfd8D4MPoHUAIvPdopUmiCG0kve+YzQ6YFHPiJCXgub4+5+r6NZNJht79XsH1lOUGH2C73VKWt3RdSQieo+M79K1nu9pgjCLNEqwxTKcTqmpDVW8QwOnJA7SNaNqa+XTBxeVrrq7P2GxuOTgq+Gf/9LcIPnD33h0+/vglddOTZxnrVUNV9ay3DX7wBC84OipAeIbej6ZmKamrhu22pe8Hjo8KDg8npGlMXTdorciyT2MvLReXt0gpuffggDg1tO2AVIGT0wWLwynaeJJEoyVMpgZwBOHJigwbGeLEjvIgN8phikmOAJqmJU0yCKMEy8YRUmsQAq00QcgxrtREBC8w2iCDp29bfHCkeUI3BLabFtAMg6csW6RQtM2Ac6CtgSCpqjERyijD0Pb4fhz8pQxoa1Bq9D6UVU+S5WRFjACS2LDZ1vS9pK5bZosCZRRt26G1QSmLVmMSUdeOL0AQSCJLVa8ppgVRlFLVNZFVZGnE5cWGJE/46MNnBB9wfkxj+ua3vsLl6xVPHr3g+vqS7eqWtqpo2oG+FfjgODieYFTE5dmKIon5J//oX/L73/mQtupo6g1tVzXeIxFqnQT96HM63nv2/KlnvwDs2bNnz08Ab/3LoNS1FpI4NuRJjNaB89fXFJOCLNdsyw4XWtIsI88L+n5AKYWNYqyJ0HrU85dlSdPUhDAuCsMwAKPuX0iBiSKElLtb5Z68KLBRxOAcXd/jgme1WnJ9c0PwnsE5lBr17gKITERVlhR5zmQyRQpJFMV0fUPXtWRphrUJ1kQUWYEbeggBay1Ns2W7vWG5uub45JQoSnh9/gzXVxweT5nOUqLIjNIZa0mThDwvsNby/OUTTk/vcX55zouzl2STCe995Ws8evSKjz96RpYWXF1fcXOzRe6+7+XNBu8ABG3tsZFEKk9d16RZjFER3gUCHmMlB4cTksQSxWaMukwMs3lOmhqcc6zXJbN5wb035kSRpGsgyxK0tmR5RFZolAKlwBhBkkb4AJFV5EVEYMBGljiK0VoDkul8ho4MAokQEqMNaZZikghl1PjZKTkWvwmF0RqlFSFAud0SgsNajdSG1bqmbAf6EGiHQNM6/ODp+wHE2GSslCRNE5zvUFoihKCuakQYpUpRHBEnMdrIsReg7Tg6PqB3jjSPaLqOthlYrzdoI4nTmKZrd68XmqbpEAgia3B9Q91sGPqWoesJDOgojLG1XpIXMW+9e8D9BwXVaskf/cv30QNsbkpubzYkacLdO/eIbUS53dA0Lbe3S8qyIU4inO+xVvPi2Q1GS9770kMuLs756KM/YrO9wjOs0yJ/lefpP/mP/87fWX5Ox3vPnj/17BeAPXv27PkJ0LV+6Np2UratNNYQvMNIhUYymyW7ZldH1/VIIfHOE9kELS1xnJGnExCwWt/SNDWRjcahvOswZtRn611yTd/1NE1J2zUMznG7vGW92aDU2DtQl6PESCsFn5laAxBQWlG1NRDo24bV7TXL20tWt1fUVT0mEEUx11fXlNWWrqtxbsDYiK5t8H3Dze0VB0cHNG3F5eVrIqt594tvkSTxaHANPfce3B1ThUxGnk25uDznzp17tE3D64szFosjjg7vI1Asb1ecHt1ludrQtI433rzDg4dHtE1PksR0Q89mM+AIHBwvCMKQpNnYQOvqcfg3BoKgrFvSPEZHCqkEaZoghOR2WTEMgcOjjMVi7EpQSrI4immaBrvT2BOgbXuMVhgjkFox+IE0i8hSjZSA8ERRhADiJCbNYuLEYKIxtSfKEpIioZiMTcpKa6RSxElMFBmsNUQ2AjzDMKCUJo5SqnVL2zjarsMHzzCMvzbrBu/HJt627dBG4N2AGzzO9WgrkRKkEjSNo64rfOgRSBSStm1R2jIMjN9P5yi3NcEH+m4gSTIQnq5tSaKUtm1Yrzfj61RTU27XLJdXdE3N9fklQzfQNg3BOdLUkGeGB28e8ld/4S+TTiQ//O4HlMuKo5MjotxyfnnBbDFnNp9BCOAE65uSm6sNm1VHP7QYY9hsOk5Op5zeORpjZ9uabVnZpu22X/3WX3zxOR7vPXv+1LNfAPbs2bPnJ0DfuMOybO8FaYohCHrvcT1EdozkrMqONJG4PkBwLA5mY0JOPiFLJigTjZp7I2nbCikFaZoRRaMkyJiIOE5wg8f7Ae96qmpLVVU450nTbPzf7wbONM1IkgwlFUPf0e8iHt3uRUAbxTD01PUW7wbAk6UpfggEL8jzDIGnLNdIKTFmzLRfr5fM5zOatqWua+JY8dWvfZG+81y8vsV7z5e++C43V1dEkeX4+Jiu78izguACL55/QhHHHM2OOJgdcnN5jdUSaSR1d8tXv/H2mN6DpOvGG/w0s9hI84V3H5JPCtpuNK5eXtxioxSj9Vj2tdlyeDzDpqPGX6jAMDiePb0A4Oi4IIoNUSSJY5gfRNjIUBQpp/cmKC1ZrUoE4ytOnFiE9ESxJk4lNlZorUkSg7ESaRRREtP2A1keU0xTlDFk0wxpJUkWEacxSZ6irSFOY6aLGcYapFYoq0nzHKXHheb87Jq+HTBKE1mN6x1uCJSbDgLgA34YU3vatieEUZuvtCTJYoxRuCHgBsiKnK5zhODJ84TNdkPbe7rOk8YRw1Cz3dZsNi1xnGKNpusalB4Tf66vb+g7RwiC5c2Sm6sb+q6mrrZcnJ3z4sVzXl+cEXBkWcJqteXgTsov/s//Ml/71hd4+uwlL5+/4OhoynQ+49Hjx5jIcnxyhHOOsqxJkoTb24bXrze03cCLl+cYGyPwFFmBNbFX2obtevvW7//jf3z4uR7wPXv+lLNfAPbs2bPnJ0A7dP266vRy26arsqF3nqZtsYlhtarRRoEI9H3PdDZjcI754oAkTXHBgRxnPIKjqTZkebq7tQchxkx95zzGjPGebduSxAlHh4fkWQYE+r4jBLDx2Das9KgjN3pMj0ni9LMCsRAE3eCxcU5WTInimLLc4JwjSwvSpEApTZZPSJIMgcK5gSAVIUi6uiXPE9577z0++vAxn3z8lKYuefjGPW6ub8Y0oXRM7ZlkU4zWXF+9Jo4TTJQwnc94/uop17cXTGZTVtWWn/35v8ByuYEgefb0NdpotmWFtZa33r6HG+Dp4zOaumV1u2E2yVBilDfd3q65c/eQ+SzBDwOSUWN/c7vBxobFQY5QYszyTyKiRJHlMd71FFPLZBqzKaudzl6BDESRBe93nglFsltE0sxiIoGxhrxIQAiUNru+AUGSmrGFOFHEqSHJE9I8I4oikizdZd9HREmKNIo4iVDG0A+OumxGMy8BoQSDg7JpaVtH3XQoIwki0LXQNB19H/AhkKQZQhhc37PdVgyDw/WBpumQSmAijQ+O68s1UgiSOGG9qnny6DV9WyGkoqkr6mZcKLXWrJZL8mICQtOUHVdXt/TDwHSacrCYsLqt+PjDMwiKPE+pty1pYvkf/7t/ib/yP/xLDI2n3jTcOTlksTjgyZOnXF5dE8WWrh/4+KMz4sTSd4xpSq3nd77zA66vrrhdXoWub67jNH6NkMWqrt/+HI/3nj1/6tkvAHv27Nnzx8wv//IvxW07/PnByXc2rdA3m4bL65uxcKntWC83xLEBDFGU4QaPMZq8SKmqLf0w0DQtsY3GNlkh2G7WNE2NlIIQerbbNSGAlAatDAcHp8znRwQgTmPSLBnVFS4gdvE/Qow391JqhNC0XUfbdrsFoiOyEX3XsdksaZot+aRgOp/Ru56mbmmbjrqqkFKRpik2isjznEmxIEkyTg7v8P7336faVrz99gPeefcBL19c0nYtxycHFEXC/bsPxn8+4J0DqXjw4CHPnj/m6vac07sn3N6ueOedL3N1dUteZFxdLkmilB+//xxlIg6OFlxfLXnx/IxyU5NnMcUsxsaC4OH12ZLJNGE6LQDo2orgBuqyJ/jA0eGE4D1SwGyaEkWS+WyCFJLpLEPKMRKzLXuUgDSJiNMxVlSKMYJVaU2aRUSxQmtBHBuSdPdCEI2RoXFimEwyhBDkWUKWGZLUMJtnZFlMkmqiWIESxJmlmKRE0dgrEAQUkynltsM7T9M5vPfUm4pq23yW6uMdY0Mwge16/LPSdT3DAHGSIsy4MPreg1A4J+gqR2QsfdPy6tk1Uiua2pEXCVcXt5RlS1PVOOe5vb5GEDBasdmskEIwmc1AStqq4eb1mucfX5PYlC+8c4e27Pnhd19Qr2v6xnHxcsVqVfIX//I3+Iv/vZ/hZnXL+eUVb731gDfffEiW5UgtSFJLEltePr3EyDHJSgTJelVSVSWDd62Ko+c2SZ+ryJ5v++bP//Iv/7L+/E75nj1/utkvAHv27Nnzx8zNzcRu6/6tTd1n29bTDgFlRp33dl0Sa0OWxpRlzWIxxXlHFEes1kuEEKNGW0ic5zOD72pXDtb3Lbe3VzjX431ASjneHEtDWVZIOf5rve16QvAIAZvtmtXqZmfENSg5SlekVph4bBtWUtI2NQRPlk6ZFMekScYwtCgFk2mOkIKua9hu158VkSkh8d6TpxNuLlcEpzg9vce2qvj4oxco5fjGz7xNMYmZzxfEccL4OtHjvOONu/cpVys2qyXvvP0uOE2RHfDs6VPWqxvWy4qrizU/+sEnZFnM4cGci7M1N9drklSRFYZ+6HDeI0TMo08udxIpSQiBuvQ458fGW+c5PJyiFESRIc/GF5D5fIK1kiSxCBGYLyZsNzWbVYMxmmKaMJnGTKYWrSVaCbSWJIkmzy3WaLSRFLMUHwJpFmEjBcIzP8yRQuKGnqIYXwFm84hialBakGQRSWrIckuaW5I0Ip9kOzOuJcvjnQ9hgCDYrioIYSxka8a23mFwxIlhta1G/bwy1FWJEANSCIKDumqxMZhI0fY9201JnhV0rcN5hwue2FqkhOvLNdW2RITAarkkeIcIAe88q/WSNI2YzSdoqzk8mdD1A7/zLz5iaCVf/cabmBhePF+xuh2XlcuLW/7wez/m7sMD/p2/8nMUs5inz58xmU3xwGw24e79Y6SRnN495Ob2hrzQ5FmMsZppsSDPp/V0MT9fHB5+MD88+GEUJ8P777+/n2H27Plvyf7w7NmzZ88fM3EcB5Surm42om47pJIUacb19Q3bVUkUy/H2PggmE4sbHH3viKKY+WKBNZq6XBPYRTga/Vn2fl2XxGmCMpogPB6PEIK+b1FK0bYtN7e3eO+Y5NmY6e7c2BqbZ1gbjSkxdcm2XBOcG9OHtCHNCubzI5IkY7NZcnFxRtc15HmOD44QHGW5xfuBqtrSte14K11vubh8zeB7Tu/dxQvYbjvm8wnf+Jkv8fL5DavbGuck23LDcnmLVJLT03skUcLt9SUnhydIqajqmvOLZ5yfPWO7qnj+5DU3VyvqduD0zjGffPiM73/3EX3v2WxrlLZcXZVIqXn16oreeeZHOVGcc/bqkrJaE8UWbRRpblFGAIxm4r4jikb9/mh6HUiSCIRjdVuRFymLwwnTWUSeRySJIc9ztIY4UlijSZOYOIkwVpJldkzhkeOCEQhoEyiKlBDAe8hyi7GKYhKhtEQpSBJLkhnS3BLno5xoMstxviPJNFFkxhz+ukdIiY0Mzo2xp3XV0DQNcWoIeNrWI/AMXcfgekSQ+H5gdV2htaQfHP3Q4wZPnGqywu6iaHuSWDGf51RVA0g22y3aSNzQIaQnjgzlZkXwHUlqxy4JFbjzYIHrHN/73Y+J44Q/9299A2U1Zy83vHh+hXOBclvzz//pH7Lalrz1zn2m8wXL1ZYkjTk/v8L5nsUiJ4oFxSTjk49e41xPtS0ZupY0jrvJbPJofrB4UeSzi/n84Dd/7dd+rf98T/qePX962S8Ae/bs2fPHzK/8yq9s3rj/xj9bHBw+KZvK123Ptqqpyga8J00N5bZmMklxw0AIoJRgMZ9zsDjatfTG2DgeC6V22n/nHFGcYG2M955h6CGAGxxKSfq+o65rsiwjiWNulku0tuRZgZKW7bYc5SRtQ12X5EmGFpI0jrHGYq1lcKMRuOtbht4RRQkhjOkwaZownU4BxsSXJMEHT9u2hCDwQeCcRxDo25p+CHz80RkX52vcEFitbrldLpnN5izmB7z58C1enZ1xeHTKnXv3qKuati+5uDxnNpnTtw3Xl+c8efoxX/rKQ549OWdzO3YczOdT1suBy9cNaZJhleH2uuT4dEGUJLx4eQlKcXg8G/0S3qOUpto24/fZO7RW2Eji/cDQB5QWIAJN7RBSMpklJCkkmSWOIow2aKMwVpOmliSJMUajtaSYJGR5jFSCKFJEVpEkETbSxInEGMUwONLMjr6AxLJYZGjF2EegFZNJQhIr0lSTF4YoipBSoJTE94H1uiJKIoQArQR11dC2LW3Xo7QkTSP6xuGcp2nGP1dlPcZ5+gASPcrBgmS7LamqLUluQEi6rsf5gcVBho3Gz9G5QBRHRMnodTBWYoymrlqytGA2n1JteybTnK984yE2cfzohx/Q9zXf+rPvcnovp2sHnnx4wVAF0sjy3d97zKPHl0wWc4IIrFcV1mZcXiwhwPq6pis9fdXw/h/9iLffvsO6ufSrzW0vg1RZln3PRtFv/d1f+7XfFUKEz/GY79nzp5q9fm7Pnj17fgIcHR39nv/S8LeFGKaXFzdfqJpOhgH6YYo2mrb3BDeghCaOLU09GiZ9gDjNyPqOPC3o+x4pJSE46qYkzSZ4PyAERNZSldUo55GaNE0pilH3vllvx3ZgD3VdsVqtMdrghgFjLEUxwfmBzWaL84E8HZuKfXBMiinGGJTWCDn+Na0Uq6albhuE8LRNRUAynS0okpS6aqnrDctbTZpGCCGJbERVdWR5RPBwfn5FFGVkWUGcKD788H1CgHt379G5UeN+dXnNl7/0dZbrWx4/fURkDf+TX/pF2ralaxsuVxu+8bPvcnZ2ydCBjTxHJye8/4OnYxpR3VOVNcUk5uTkgKos2W4qsiyl2nYkcQpA13fMFxOE9LhB0vfj4Nw1A2kS73LpFVFkmU4y4jhiGAa6viFNI5QeZUZtFwhiTAYydjfIz1IEnq4JBK1JtKcsHdpIQGBjg/eOLLJ4HFJofPBoLZgfFFR1QxxrssxSVT1CQAgCgUJpSQieKDaEMOC9wLmAc47FPKWueqRU1E07xr0ODiEENh5v9K1RYzOylKyWW9LUIKUkjhUhOCaTFGsVUo6ldMYoghZoYwDQeoxRbZqGPEvwi5TlcsXx8R3QgavzW67Orwho3vnSPdI85jf+6+9xdbFlW1YsjhZcX28QYsvp6RFlWbFabUFNePniljBaFUjTmBdPN3z04x/x83/uW+6HP/rRxzZN/9C/m/zmf/rtv9N8Tsd6z56fGvYvAHv27NnzE+B/+R/8B7fpNPvhu++9/dt37x6+MhovFGhraZueuqypqwGjFFGkEHjiRI057HHOdHZAlk3I0gLX9/R9y+B6QnAM/RgN6Z1DSUWe52MOvRBsNhvatiWOIkIINE1FXVVjkowE7z1G6TEjv6ow1pIkCXbXMaCUpm1bvA+4wVMU4xKyLStCEGglKbdrmqZlMp1zsDikLEvabszfR0DTtnRdy/J2iRRjsdbNzQ0nJ0csFhMePLjD67PXeCf4wttv0XUd6+WStq744rtf4uLijB9/8j7res0Xv/IVimLB5fkNVVkTRE/vNC9frhiGnqOTCWevbgFJ13coI5hMY46OJnRtze31EmsN3nnErpSt63om04woGjsU2qbHOQ/hU4NvIE0i8iJiMkuYTPMxW19LlJREkSVJIpTS9N1AFBnSzJDEmijSWKPIspg4jfBuICssxmgIAakFUim0VZhYEaURykKSGZCBYhaNhWZWkeQGG0vYfXbaSKQUDMNAFGuiRDMMQJBIYFrERFbifUBrhR8cQngQApto2u5fvRQN/YAQiqpq0AryNEZIkAom04zJNB8TjtIUsyuli+MUbSKm08XOUDxQFBPyPOb89RlxHKGtYrNs2a4qPvzgBZuy4Ws/9x4qEjR14NXTSwwK4QMvnr1kfjChmMa0VcPqtuLZsxd8/4/+COccSZxw9uIFv/c7vyO+8IW3v5ymuYvOIvc5H+09e34q2C8Ae/bs2fMTIjjXGs0Hbzw8+a037h6+jK2i3GzZLhu8Ezvzrx1vPcWYEFTvhvLZ7AClLMPg6doerTTWGPquJQSP9x4AExmarqWsSgDiOP5M7qKVggBdV9O2o5b/8PAQKRQhBNK4YJJNMMpSbpf0fY0xlsgmJHFGHCW8eP6Cpm6YzaYYo8eceWmZzY6ZFAuqqkdJS5oWTIoFTdvSdi1xFBNFMdNpxpPHzxHCcP/hEV/9+tucvbpEa82f+fmfZT5fcHV9xcFixtHRCavVildnTxAI3n7zSzx84z1evjhju9pyeX3BW194k8efvKSpB7Q1NK3ng/fPUFKzvN0wn+X0fUtZ1lye34KQZHmB94o0s/RDi9KKw6NDttsarTVCQJrFBOExeny5CD6QJprptEAbi5IWgQQkUkq0NoCi7z1Ka6yNRlO1VRgrEFqSFpbghnGQLgyD99hIYeNd7r8WzBcZNlLEiSVNY6QULA7SsexNKyazBBtLotSOaUOJwkYGISHLY+JkXASlBB2NUiRrFXlmURKkFAgVmC3GgV5pxsWl97ghkKb/SoYWR9HoX0gS4rSgmMzRJmYyOWAYBFpHZPmMtvfcvfsmbR+QOsLajKJI2W62nJ4esl631JVnOs949fKGtu148PCIh19YMAw9F68vCc4zm8958vg5i/mMvu1Y32zGorK65kff/wF9W+Fc65bLm49evHz1I6vF/+z2xYuDz+9E79nz08N+AdizZ8+enxD/3v/uP/pdodSHVonbxbS4nuQRiRb0dY8UMJ0mJKlEaUiShPWqIQTHbJqh1FgAtl7fjJIMBEppnBtQEvq+R0iFkBIpFUkS4/xA33ck8WhwLcuSYehRWmPsaDbebDeUTQVSoLSmdz1ls8V5TxynNHU16tyNGV8d+h6tDX3f4b0niVKyrEBpye3yirbZ0jQVwTu6viGfFGRZvstxr3n0+DEguPtgzle+/ibn59dU9Zavfu1LgOD5y+f8/M//HEmS0TQ155dn+AAnJ6d88xtf5+rqJevlNY8fPcaamMuLLQJYL68IfuC3f/P7DENP53refuc+5XZLcLBdlwghmc8LpFBYM95YDy5w5/4pZdkghEBrizZjJr5Uary1F462H5jOMtIsRUgNQtEPHVKDNmBjRRBgjMHsXgac60kSi1QK1PiakKYx3sFklqG0pK0H8tTQVB1GWyaFZTotEEBexERGk8aG2SzGWok2MD/ISFJFnOrRXxAphIAsi0kzjbHqs4bhfJIQJxJrJXEcEycJQgYWi5wi1xRFRBRpkkTj/DDGki5yAj1CKaI4w9qUOMnIihlCGorJfJST7boI4iyhqkvuP3iHq+slNoqRUpPnGTeXG95+9w3W6xIRFLGVfPj+S7abnvlBwb/93/8Z3v7iA3rXYWNBkeWcvbwmSQ3dUHN1eY3RhthCWd6gI3GzOD76YVRk/+dh6P4zbcV/9O/+1b/65ud6sPfs+SlgvwDs2bNnz0+IX/yFXxhbtqTyUob2eJauHpzOQmQEaaLJJxplNHGsmRYJbnA7eYhFCkkIA+DwYUApje8dVhv6rieEUfctBChACgUEZrMp3nvW6w1SKdI0GfP9+5bNZk0IgiiyBKD3HWVdjoViNmIYOrQxSKlou5rVasnhwTHGxgTCbmA2GGu5vR3z4ctyNTbCNjVI0ErRtB11U31mGH3jzTu89+X7fO8PP+Hy4op3v/iAqq65vFzyrT/7Laq64+b6mpvLC/A9RT7hG1/7OlfXl0gpefboOcvVFffuHeG7nlcvnnL3zgG/+9vfo2s6bKSx1rDdlqSJRe2G0dk8w5qYrm3ZliVd65jNJyRxwmq1ZjLN2FUk0HU9gvHGvNxWxLGmmKYkaYKUin4Yf+Zaa2xkxwIzo4isQYqAFhItJVFsgYDVCq0ENtJ07YAWMJ/lEAJaSQ4PpiyXW0Awn2ZoqVASolgjhCTPIqazAu8DxmpmsxytJXEWEWcWZdS4tEWWIo93XQMZWisWB3OM1XjnOTo6QkiBtYaT0wVpYckLg9WKJNYEP7A4mlFMMwgerSOUsmhjidMME6c0Xcf84BikZHA9WkqkDLRtw4M33uL8/HyUU7WONEvYbLbcvX/Ejz94weLoiNVyxeXrJe//0SuePn7Nw7cP+Zk/8y7VpqTalKggKDctb3/xHouTCa53bDa3NN26S5L0dVFMrmbT4g/+0//8H/4nBPEPlHD/i1/4hV+Yfm4He8+enwL2C8CePXv2/IT49V//dWcy/RtxnD1aLOaf3Llz8DxPVZlEDi2BIBBi4M7dnLZpSJIYbQVVWRKCp+8H2qbBO5BSo6TEO4cbevzQQwgENyAFDF1PGucEB03dMJ/PyYuCtu0oyw1d1xAnEVFk6LqOvm/pmho3DCTJmF9P8EBP3Ww5e/2ctqvZVCsQo+wnjnLSLKfrWpQev8662e6MrBKrDPW2REvF0eERzvd882fe5Y2HR/xX/8U/5+Wz17z7xfvMZgXr1ZbDoymb1Ypnj5/x+vUrJpMpR4tTvvn1b7G63dDVDRevz3nx/CV37p7iBnjx7AV4xatXl4RgaKqeOE5QxpIkmjQzpElMmqSEILm5XbJZl+MrgFVMJjnXV1e7gi6FlJrtpkYgxmhWDVU1cHwy/2z4J4ylZQKJMRZjLOP/fQrCLohGCoHSCqFGvb4SiijSeAAhEWIsC4sTS1XVHCwmZFnC1dWaIQxM5yltM5DEEUIEQggUWUJeFLjek2eayTTGh0AUj/n4AoHznjTPxj6IKEbs/rMoCpquIZ+kFHlBWdbkRc7h8SHGGpQdk4q0kggBi8WcKBoXRcRYHqa0IU1zXAigNWk2GxfNEMiLgrJakyQxSZTTlg3VdoOxgvOzG+LIEsWGDz54zrtffoNHnzyh2qz5+P1HvHxxjVCK07t3WBzOaNsOJWPaXvDNb32Nk7sHu69BvY7z4sM0z84CYgLwf/sH/9Xf19b+o1zzM5/Pqd6z56eD/QKwZ8+ePT9BNhQ3XvvfNdY+0lJeJpG+PZglg3A9Ugiy3NK1wy7yMTCdJBA8TV3Sdx3aaIyJxoIvAV3XIqVEEJBiLHEaQiBKErSxrDdb4ngcBLebDX0/4JwnjhOk1Nze3tB3LSI4lJAcHx/j3IAbBqTU1HVDCIE8L8Yh2sNmuWZ5e0sIDgnEcUqeTpnPjkiTAoIgzybjkqL17tXBcOfOEdZa/p//j+8wySfcv3+CG+CjD5/QNBXWSF6+eEGSGO7fe4A2hgdvvEldN2zLDc45PvzoQ+LEkiUHfPf3f8TF60vOz2+5vtmQTxNW65IQYLMpidOUbDplcXyMF5LBB+qqxlozZvanCVXV0A8t1mq8H6VUUggia4mTGIEijizHJzMCmgC0XUMIHmv1uABYi1CSAGMaUxyBUtjIEsUKT8CHgI0TEKNcyDkHjMlBURSz3ZacHM+QQF23xFmMjSJC8KS7jH039CxmOZHVY0HZPCdN7Ti4y0AcW5ASoRT5ZEoQgjTLCUAUxURxQtM0vPHwDYZ+YLnaUEwmHBwejr6HIiXNE5q6wUYxk9kcKdXYHOw9BInVCVk+pSpLYhOT2JiuaxmGgSSJWa5uOD094dXZLdoaXjy74Ktff4vf/s53OT6Z471DG8PiKGe5WbFZ1/zwux9RlhVOBlSccvLgAB9atIp48fyCr37jixTTSRBS9VEUVUmcvpgI8/1Pz9T/5df+8//GFPPvfD4nes+enw72C8CePXv2/AT59re/7f/Gf/gf/zcd4Ts2No+T2FwoMazbah2882zWHUpZpB4HvNlsjh8GXD9gjCZJc2yUEMcp3nukHG+tlVJ45zHKIKXGe09VlbsBTuGGcfBXWpPnE6yJ6Id+11SbYW1Cnmc0TUPfN8RxhPeSSbEgSTLSZCwFOzg4ASGp6jWr5RXDMKblpEmExOG9QyhFnGVEcTLq4q3BB8f52SW//3vf5/TOPU5P7tA1Pe9//wnX17e8+97bDJ1gPpvwxht3qZua6TJ0vroAAPoYSURBVHRKZC19W3F6esQPf/RHWGs5OLrD5eU1q9Wai6tz2q7i7v1DHn/8BGtitpuaNLGIEMjSCXXd4H3YdS7ECBmIk5jNtsb7gDUxfe8wRgKONNUQwGhJXTfcf3iCFwEpNX3vCD4AAZBEcQpy1ODjA0oKoljvknoEUWyItCGEMXFHKYmUfDZUhwBSQiDQdQ2zeTGmQlUd88WUgMDEEdoahBRI6ZhMx5eIKDJMJ5bIilF6pOQuDSqMRmVjiJMMIQ1SaYpiSt87kiTh8PiQuq5p6pr5bE6SZGitySfj68F6vSXNJmT55P+rdyIAkY2RCMpygzYGpTTr1QqJYrvZMISKYlHw4cfnLJctr16c8a0/82X+63/8OxSJ5cfvf8y777xFlqd4RrP7b//GHyAY062Ojhfcv3/CxdkZ/bblkx8/5t/67/68ny4mvu3agDG0mbf/n+fqV3/1V/clYHv2/P/BfgHYs2fPnj8BfEh+s/d82PX+si6bK6NUH9kxqccatXsNiNist3jv0VoxKaZEUUIUpwx+lAGFIIminMFBCGFseh0cIYRx2FQCqeQYo5gkRFHE4Hq6vqOqSibFjDjKmM8Occ7Ttg3eO/q+I01SbBSxXt2yXN6Mun8CeZ6TpDnT6RypAhJom5rNdoUPgaPjuxT5lKquUFqRpJazV2c8+vgps8kBAsXV1SVPnj7DB8dXv/Yu83nO4iDnrbfus16vefjG28RRws31LXdOjvnx939AHkfMpuNA+uLlU65X5wijuP/GCVW5pVw1CAICwe11zd17d3DOUa4rcB4lx8z8JE15fX6NNpa+H5BCYK2m6zu88wzDaNbtu4HJJKeYpjtz8Di8K6Xo2m6UDGmFd4Gh8/R9jzYKIXd5+R6slhAEWim0kERGY6RGfVro5Ud5j7WKoR+wZowI7bseHxxpkeMRSC2ReiwPi+MIYwyDH1+KtNZEcUTwHqUUUoBzA1pHu18GIRTWxkSRYXl7y50797Empm06vAvji43QpHHGdDpls9nSth15PkMITQiBwLBrgIb57GA0gjswOsVGCU1TU2Qzri5uOTkumE0L+sbz2//8R2w3JV/60tu8eHqBVobf+q3f4ctffYukGL+XxMScv1hiJLx4fkk/eBJrefzhB3z4w+/zW7/5W/z8X/jzh4vDY9F3w+vb2+H28z7De/b8NLFfAPbs2bPnT4B/71d+ZePj5B+Spn83aP3dvCjKrqkxSnB9udql5nREkcVE41BotCSJE4wxaK3oBkdWFCitkTLQdw1VvUFKiUSi1Bjv2bZj+2vbNGw2a/qhp65KBII4jjk8PKTvewbnqNuGpm+RUgCezXZNVVfMZ3MEgrOzFyyXV1ij8MHTtS1SKTySOC04Ob6HUZaq2hJCQAp4+vgJy9sbjFYEJ3n9+oKXL8+I4pjDoxl3752yWtaUm4aL60ve/MJbAGy2G7Is4/nzlwx9j0JQlyvK1S11taLrKt56+w1OTo/56McfgfCgPC9evKIqlwyh5+bqmuA6xgRUT2yTMXHHjIVrUo4Z+VVT03Xt2KJsLDqOSNKYPE8QPmBNRHCO4EGbiCDEqLv/VIrV1uOCJGEYIPiANoo4Seg7hxRiN+jL0UOhNMYohJAIodB6NAt0w0CaxQg1fm7WGKRQ+BDGG34bUdc1URoTkAghsZEmSWO8h2EIGK0RAZTUGBuR5cXu6zVoOyYcOe84ODyi63p615NmOd5LQlBMJlPSNGF5e4tUaiddEiAEgTC+PKmIJMto+w6hNWkxoe4GZGTJJzPOXq5474unlG3J5XXD//2//BccHM3ofSCfTEizhB//6Clf/8ZXWBxNAU3XOZI0IU40z56/YL1aM58vSKzl+uxi873f/+76/uHJXzZCHP36r//6Pv9/z54/RvYLwJ49e/b8CfE3/vd/6/1rO/8vglQfOedetGXbNWWDFAJpNc5Lbm/X9F1PPi1AwuHhwU5zPQ6xaZrRNRUhBHrXIoQiyVKiOKJpGqqqGn8/IcbbfTfgdjp3pRRVVVOVFZGN6LoWgscIQ3ABISWTfEqRTyiriuvrS9brJUPfcX19yfL2kqpcY4yiKKZ4H2ibjq5raLsGrTVt2+FCQNmIJC+omorb22uK6YzJdMHp/QXXV7fcXK3oGfjqN79O07Q0dUmkDdYo4ighTRMEgjydopREhMDd0xOKNOXF0xe4LrBtNnSd48mTRxweTbm9XNG1PV4EvNB4L3beicBsMSWEgTiOGdyAEhLvoGsHDo8WEAIIj3MO5wKb9Yq2qXHDpyVsDoTAu56uqWibBsHom3aDAzkWfElhkHJ8YXDOoZTCGLXrCrBorfE+IMTYJ9B33bjkKUPbtqzXa6wxo7RLyF28q8c5TxRF+OCx1mKtIU0TmroGRonRzo6M3hV3eQd5tiBJU+qmYjIpcA7qpkQqmM5ndEOHMTGTyZRhaGmbijRJUFLh+46hHT0hgx9Q2uL8gPcDQ+dI04KXL19ycLhgudxwc73l7Xfv0fQl5+dX/Jd//5+QRJZPPv4xic24vnjN00dPuffgHofHxzgfuFmuyYopi8MjEFCWa1xwQ1EU2+22+uCjR4+eCu//1//T/9H/4N/5HI/unj0/dewXgD179uz5E+USnSavejc80lKtRBDYyCKEo21rzl5ekGcZymgWh0d0fY+UCmsj7ty5w2azpB/Gv/ZpE2tsI5a3l2y3a5wbmE4Luq7DGEuajoVe2tixQTayKKnAB0LwRCYiiRKMiUAI6qam7SpW61u00Rwf3SPL50RxStd3dH3Dan1LWa6pqi0IR9Ns2ZZjWpA1FqMjFgdHzOYHBGA+nzKbZ0xmMdJErNctSRzz1rtv8PjHL3n5+CU3t9ccHR2TpimxjZBCMimmHC5O0EqTpTlxlPDk0WPWN0u6rkM4aDYrRJAkWUZdNrQNtG0gjmOkVITA+KqiQWuz07cHJIJqW3F4cozAMwzd2KysND4E6qpCabmL8Wx3EaiKrq4YulE6ZIwdpUEhELwbE3hMQpJEu89UjLGh1mIjg1IGY8zYxtsNaDkm6pSbLXmaYLWiqSrC4Ijt2OzsAyRpTt/3RJHZFZcJIBDFBiECXTt+1oTx1l4gUMpQNy1GR6TZZCyFE5qT0zuUm7EdOk3G9l/noCgOSPOUpinxziOFRCEZuhbvB6RWwFiS5vqWoW+xWnPzuuKDHzzhrbfucXF+jY009x4csC23rJdbnj95zunRAY8/+YjIWp588pgnjx4TWU1fN+Aky5s1201JbA1DVwcf+rW29nwyn752Wv7LbdN8z/fhb/zSL/zCtz7fs7tnz08P+wVgz549e/4ESatMdEMQINdai6UUeKUEeRpTlxUCByJgtKHvh3G404bpZDZKbJQiSROGoSOKYpIk4uzlE5a3l7iuJ4li+p2BOMsz8mJCnKa4EDDW0Pc9XdfjnCdLU5x3u1vvgdX6hpvbC4ZecOf0LebzY0DQdQ1dW7Mpl5TNBhd6pByNwIIwpgoJEASGruf09C5JnBHCaAg+OT3k3v1jpILNdc319Q0mCyyv1pw/O+fVszMWiwOUtVR1S11tMcpyeHBM2zVcXl5xcHjE+flrHj58wOB7tuWaSEsuXr3g3Xfe4cWza66XFc+fn5OlOVqPptu27ZB6d5PuPDaKUFqx3Y5Rq8ZqNpsVIXikUgit0ZFlMp3StT1t/enrxli65r1ncH5M3xESHzyCQN20aK1QWu1MvBHD4JBSYiKLjiw+BIZh1P0LIcc4Tq1p2h4hBcZopJBUVTVm/CcZ3kOejx0AIYxfv5AKKceytjRL2ZbluBSIgBRi1w8hEATariFJMoxJ8N5R5AVxHPHixRl935MmOXVTo60lS6eAomsblFLj96gVbTu+hMRxipAWY0b/QblZ8eUvvcujj15hTYw2McvbCqNjjNYEWpq2Zbtueeud+6xWGxaLI16/esX561fkaUK5XhNJwdCWPHvyiKGrB6H8JVosQyCaLRYfxEXxf3BC/B+VUuLzPr979vy0sF8A9uzZs+dPkLhpuhDYKquW2spzG6lWy4DrPGGQTCYFQowZ+wSPNZokjkjTmBDC7jZf0/Y9SZKzWi5Zb26RWqCtRmpNPwx03UDAjznvSJTQSKGQUqK0RJvRcwAQxDgodl2HlIr5wQHs7LVKj1nxwXuMTphOjvBOjIViNhlvsrXFmgilDJPpjHK7ZXV7S1NVRLElihMeffKMZtvx5NETnG9ZHB7w/NFLnr34hHyWc3J8Qt9UdDsj8WJxQJFP6IeBo+NDlqsb3nvvIcENnL18RZHGlNWG2eGC25trPvnxM549HqMo40jTtwPeQ5RGTKdTbm/XCDRKaoY+MDjHZJrTtWP/ghRybEw2BmvHMq++G9huK5TUwNi14P2oForjZDfgK0xkEUFAAEePshIfxhQdBONSYMzYEBwCxmicCxDARuOi0HefvvSMYTfeDVgbY2yEUhprIrq2Q2uNVGbnA5GkaYqSks1mO0aHurFMTggFeLwfRuN4nNMPA0JJDo+OccPA00fPKLIpbdvQthXFdDG2TfuBtm1RNkKq0cDctRV914GXrLcV+bTgZnUFquNr3/wy/+J3f4g0MZ98dM50WrA4OEZIixCBly8vmM4WHN2ZooTi9OQ+zsOmHuVozx495mCe0DY3rFZXTmm18QiPUNs0yb7z9/7+3/+j/+wf/sN/8H/6e3/v9z6fU7tnz08f+wVgz549e/4E+et/62+1Oor+OVZ930b6PAS/CsHTtQPOBSaTCdtNSVPVEALeOaJoHEBDEBirkGo0eBoT0bY1RmtiGxF9JhvxxIlhNsu4uDxntVnihhbnHF3f0TQNTVuTxGMUZNt3OOewWqOkYr1e0g0lSo9G2r7vqaqSyMZoZYhsgnOObbWkG1qiKGUxO8LalBCgrmq883R9jzaWx09fEILl9dk123LFV7/5ZZ58/Iqr15cUecE7771H3TYYpYiiiJPTezx88+2dpl7gvWNoa44Wh3z40aPRHxAkk0mOCIFXL5+jZUTfD9y5s6BrG+qqRinJZDqjadox5tMq1qsbNrcr0iTGxBalNEPnGIaxxRYEQ98xDN2Y/OlBBOjqHoJHKYEQYK3ddTKoz27kpVQI3FgIJkBJsYsXNQjEGE0qPB6HGwLOe6QYk5aquoEw+jQCDu/HGFKCwJixeVgiPlsUAgIhJdpYkiShqrcEPyb2aDW2IQ+Dw7keFxxRmoPUdF2LMTEP7j9gvVxxc3XFYr5gdXtD3w9ESQaA9w7XdwTn0Sai73ratqTvGrqqo617Tu++xdPnL7j7xozFQcEH7z+mqcfl4cEbJyRJQluViOB5/eqan/25n0FZDUKS5inBe6w1xHHE08fPKNIMqUXjEZ3SusyK/Emh9cvP77Tu2fPTy34B2LNnz54/Yf5X3/6bT7ww32uH/qXw/bNI0EkRMLHF+YAfPMEHXD+w3ZbkWUYIDoTHmPGWN7YxAsZb4KzARgn+04Uh1tx/45QQJEpZ0mTM/e922nUA58M4tAqJknIstgqSNM2J4xRjNGevniHwCCnJ8oI4jun6hm15TVWvqJsSrRVFkdM0Ld45wq4Ya6DH5jG9d0wmc9J5wXq75Of+7Nd5/vQlz548xyE4PLrL0PcgOkxiOTm9w4M37mOtZrVakiUZCs2Xv/Qlri42WD16FoSQ5FnB1c0N88M5k1nMnTsTqm1JVVVjZGYaEycp5bakmEzpuh7vxuhMbSOsjfAuUNctUo839OVmQ9dWKCVp24EQoKmr0bA7DOOiZM3oGfCOKLK7W3eJ2hmtpZBopRncGFEqhaDrOrRWWGvGFxU8IFBSkaYJBHZJTECAvu8IIeB3bWOjj8Ai8GilcG78M2JtgokTjFZU1QYhRx+A0mOa0GazhuAIBNKsoHcdbdeQFwV37p5wcf4Sm8RYm3D+6gXWWrwLJElCXW0RAqQYy+hc37JaXmGM5Mc/+gBrLKcnJzT1lsPDKZGJefn8Ba9fXzE7nFPMZ9x78ACpJWXZ8vLlFW+99wbKqF0J3eHYV+EdRhvK7ZYsS3DeS22iMi+mP/5P/q9/d/k5HdM9e36q2S8Ae/bs2fM5oEk+8IinJhKv81TXi2mG1nJMm/ESKQwuBOIkwXvP5dUF1hgiGyMQmJ0EJN5l/UspEEKQpAlZltC2HZcX1zvJyFg8FcUxkbUIBGmckSTpzlA65sh7PHXTUFUbbm6uUbuhNwRBmub0fUfXVYBASsNifoK1Ea9fv2C7uUaIga5taLuebDphMZ9x8/qcrl2zvHrGO1+4y+uXZ3zy/lOEC8TWQoCmadAywdiMg9MpJgpcXa0oiglHh8fcvXOfaTGj3Da8cf8+IoA1Fq0tp3cO+cJbD7FW0LcdXdfT9QPOSZrac3NzS57n9F3P9fX1qK1PE6Q2WBuxvF3R9QNpltD3wzh09wHfK1arLUorvB+H/qYeX0o+TfLRRqHN2PIbRZa+71BSoqRASoEbPIFRDiQlgP+s0TlJIpwbb/rjeGx6FlIQ8Citdpn7A0oL+r5DKIlU8jOtv1ZyfBkZBqIoxmhDU5f4oRt9CUoRxTFtXdO3Dd4PRHGKVmZMP+oGju/coZhNOXv+gul0Sr1ZU682u5cMgQC6riF4RxTH9K5HatiUK4zRPHv6MVlmaLuOw6NjqnLDNCv44Xd/xJNHn3Awm1AUU2bzBX5w3F6tiVKFtmMC09APxFHM+flr+q7E+YHlauuMjWolZevCMEY47dmz54+d/QKwZ8+ePZ8Df/3b394EqX9krCltJJdxHEJexNjYMnhHP/QkaQIEnr94gXMeISTB+TF1JoTRYGpGzbgAhAhoLdFS8+r5BWL3r/hhGIfW2NqxMMpYiukMPGhtR1+AHBcK8DjX0rYl682K5fIGGxnquqJtS7z3RFHKfH4EQtI0DXEcQ/Dc3FzStS1KKx68cY/15orG3fLOV95klh+wWte8//0PkX5AK4PzgavrGzarFmMt73zpHtZqbi5qpDTM5gf0ruH+gyNevbxkvig4OT4iSxLe++IXmS2Oee/dd1neLrlzeoLSUBQZfQsEwZNPXtJ3HYGxvyCOIpI0BSFRylBuW7bbLdoakjQjeA/BE0Lg6vIW7x3BjwvQeOvugIDWhmFwRNZ+tngZM8Z2Sj36MEIIGKtI0wgIO2lW+PSTwkaGYejHsjElkGr8faQUKCmBgHMDcRIT/Lg4aGsJQtD1PcaOiUZVWSGVHuVC3tO3DcENhOCwUUwcJ1RlSXA9hIBW45+BEAYCMJ0vGJqWvqlI84Szl68QKtC0DXk2oe8a2nbLp03IQmjWqy3OO9q64fEnz2laR+e2TA8Tlusl0yzh+uya7XpLU7ccHh4xnU85OFjQV4H33ntIlEi6pmZ1c0NmIy7OX3J8fIwyUXS7XM+lkgOI/K/9tb+W/gkfzT17/o1gvwDs2bNnz+fAL//yLwslhRuGcCl1WGa5DodHKad3DhAy4MLAMDjqumK9XjMpZrRtS9e3SCHHAdOMmfNKKZz3CARaauqyRvidtEeMZt0xzacdlwGjaduWKE4wuwVAK4Ug4L2j7RqatkIqQZqmNHVNVW7wwSOlQStD19d0Xc0w9Gy3JVIbjNZsyzVpZujaLdeXV/zsz32T7//BD3nx7Ixnj55A6Al4mqGl957ZYs7h8QGzWUFb19xcrlivl0ymMdtqzb0Hd6i2HdPZnC9+6R2Wt0sW8yO0jnnrnTfpe8eDO/eRYuD4zpzZbBzkX7+6pChitAyU5ZZ+6JhOJ7Td8Nnt/8XZBUIIprMJUlmENDg3AOPPIIktPrixO6AfCN7TDQ5tok/n+DF6E8bbeS1xPqCUQWmD1gKlBUrInWTHj7fru0jRKBo9G0ruisEIo3dAyc+kXniPEB4fPFEco7UZ5UJCIoXalZEZtDEIRvMwwdO3LQE/NjRXW/q2huBG2ZK19EPD4FqSNMPGluvrC7J8xrbesl2vkQKMMcRRzGZ1Q1tumWRTltdL2rqn3JRcna/58QfP+Z3ffp8nj8/J8inKxrgg6HvPZr3FeQ/e8+abdxn6lvXNlq7teOsLd7hzb4EKjnK1ZugGPnr8CScnpzJL06xpmkxoVQH953RE9+z5qWa/AOzZs2fP50SH+mDwfEwQS2tpT05TTk4zTk4OGPqBrnUQBEVR7AbC8RWg7RqMVTRNg3OOEALOB7SJqOqaum4wRjEMHsJY6mq0JniBNpoQPHE8lkUV+YTYJgyuY1st2Za3ONcjhcLoBGsjttsVQoztuHGUorUmOE/X9tgoIs8zjNGU2y1FkXDn3hHnZ5e8+4W3+cPf+QFt2aCVZ7u9pWsahJAooTg+OuT49JgHbx5yeJTz+MOXXF7cMpnOQAi+8Y0vgrfcrlb83J97j5ubGyaTGZfLM45PJ0yKCCUsdbMlSRRFXlA3gQ9+9JjpNOXoaEpddayWFcba3cAOUawJYUDKwGwxwUTRZ0lHVkdUVbO7wbdj4k8S03QtNjZoE6G0HY3ZQiC0QRoLcvzv3nsECinH3H8YlzWFQCIIwe/agAVejFvEp7GtQgq8B4TEWIsPo18jBEFwIMRo/JYE2L02jPGrAmtj+t4xthwE3NAz9N3O62BYr5YE7wmMX6PWmrbp8CEQZxn90BOCJy9ybq+ukRKarsJGY3HaenVD13fkk5zLixWrZcPLl9cYFfH0kxc8+eicsxe3pGnGdDpDAuHTf07viSLN0XHBZr1mebNhs90gxMB7X3yDulrTtxVVs/Uvz56vZvlkVRTTq0ia9a/+6q/uF4A9e34C7BeAPXv27Pkc+Pa3v+3//W//zWedE/9l0/kfOu/boog5PM7IpxHaWpTROCCKU5quBaBuaoQURNZQlttRs+7HuFCCo65q2q7D7wZ/rTR5NsFGMVES4f0oH2qaCjf0NHWN82P3wNhWG2NtjFKG4APb7Zquq9HakqYToihmuy1xLjCdzXBDwJiIpqkRCu49eMhHHz4mOMeTR0+x1iJ84PzsNbFJWCzu7hYZiOOI2TRl6Cr+xXf+gPOLS45PF6QTwxe//CbltuX1qzO+9s13SLMMqwsuLq75ylff4b0v3eXsxSvqtiSdJtgoolw3/OC7P6ZvB+IoY7suWd5U1NuOLIto24YkiTBa0NRbkiwhSlLiOKGuNxglqKoGgty9rMjdK8vY2hvZiNgauq4ezdpe0A+OaBfVGbxHeI93gYAYpUAIyqbF7XoGhmEYF4Wdxl4KSd87vBO7FWHsF4DdvhJ2H6T4VJUvQIydAEJKRHAMXU1kLd4L2rbf/XzHJugA5PmUvuupy4o0SSnLCqXGpCIRYDpdYHeLnbGWpm3ZbErqumJ5e0OWTnj+/DUffvQRk+mcpulIY0vfDpy9vMDX8PEPHnP+/ILQ93g/AJIkSZAStBHc3tYcHh9wcFRwfbGiqQaqynN5teG9r76DzVNAea3VUlv1/fl8+lhK6Xc/hj179vwxoz/vL2DPnj17/k1GLvxFvZY/bPvworB6lk8D9x4uqNpAuW2I4oSubTBG07YdMKC1oa5ahr5HCkAo/ODp2gqjU2QSoY0mL3KqqsSamKppaJpyjJkMnuAd3g9jik9WUNbQdu0uKQiS2JKmMWVZYm1EINB1LXVbUuQZWls26zVSKvq+Z7le8o1vfpXV6pq23lIcHlBX48ArgmKxOGYYOgbniLKUBw8fMgyOVy/P+OjDH9B2nq9+7RtoZbCp4Xaz5kcffMQX3n7AfDHj9aslL1+8opjEfONnvszN9YquHTg5mROnKa9fXxKZmNVti7WSjz98RhwrLi/XfPWrD6irkrLsSNKYYRg9AUkakxUT/BBomxaJYFuWZNmYEAQBG9mdzyEZb/QDOO/QSo/Sq8GNnQC9o+sGtJSkxjJ4TxSnODdeYEul6F2PUBqlLSEMCAHeB4beU4eWySRFKY8ABhdIjOVfaY0+9XlIQhjbe8XOfzD0NY1w2MgyDD1NWxLZiLquIUiKrEArRV1uMNYQJwl1syGEQACU1mTFDKU0k+mUvu/YbErmZs7zp6/4+te/wXK5pakHFrMpp3emrK9LXOe4PFuzvr3m+vKGLM8ICNI4xxdj7Onh4WJMnLKw3lbEWULeCmKt2WwrLs6vubr6hLe+8DY2S9XyZlls6yY9CDgRhx8ymg/27Nnzx8z+BWDPnj17Pkf6C2uNFnnow01Xt0OeWQ6OE+7fn5FnEWmc7Ia+0ZwKkqZu6Np+bGtlTNHxzhG8Q8hxMEzSGKkVaZYhlNoZP8cWKyU0So1ykvn8kDTJIUDbVPR9jQ9jOovzDmMMfddR1RV123BychelLH03YExMmua0Xc+d07vc3mx4+ugRJ4cLlrcrmqYjjmKElDRtD0ikUiwOjlkuVzx78gk/+N73CN7w7rtfZD474PpyxXbZ8uyTc06OTzk6nqGU5PWrK45O5rz7pTfxHrZVx4M3j0hMTBwZju/MEcrT1B1l2WMixaOPL+g7QRCe6+uauu6xxrBdl9goHl9FbMx6vWHoBtbLNXFkSZIxVQeh8B6E1pjYEoCm/bRPYfRoCDG+Aqy3FUgL0mDjaBcRmo5lZFYTWUvX9FhtUErSdR3GaEIYi8W6rkPInRlYCgIBJccEodF8u3sFEAKlR7nRMDiU0QQhqOuxD0II6NqKrqmItKFtK7blBkRgW62pmw3FpMD7UW7UdS3SGPJiRtU0FPmEg6MjQgBJwvXVhs1mw+nJPX7wvcd88MNHRLHk5nZDnhfji1BZU1Ul6/Ua7wM2jZjMc6bzKeW25OBwyuJwyhAEs4Oc9WZN8DC4Ae88m3XJ7/3u76Klcg8evjGrqvrrN1c3D45ktvzcDuaePT/l7BeAPXv27Pkc+ff/5t9ceWnqqhrO61V71m27kCYxk4nlcFGQJBFplo0mX+kZ+p66aej6sbW3H9xozlUS7z0I0FohpEdrQZzYcfDfiU6sNQgJSlrSZELXOdq2G+Mk4xitLVEU0fY1q9WSuq6RynKwOObw4IS6aqmrFmtjFosFSimsjRBobq5umc8WvHh2xtANWKPHl4muJTCm0hSTOduyJC9yuq4my3Lu3BmbYT/65GN8GFjdrsmyjAcPD3n73RPq7RYt4fBgAUHQNDVpYpgUGY6BN9484MH9Y5bXa6yRvP3eIa9frbm9qYhiwc11RV154jil6zxV3Y3fqxlfLzarNUJA3/XESYyUCucHlPz0ln9ASknTdRgbjzGgzuOCx4cx4UcKQRRZtDG4oUOpscAr+IBREqHE7nMSBOdpqwajzdjWbBTG6F3JWgyI8Zbfj5n/WmuEULtyMIndpQ9JoZDKAoq2GT/Dru0JTtA1HUkcowjU9RohPFpL8A43DKRpRvADUoy9CMoY+q6lbioWB4dkeUGSZmgT8erVC9586x6JyfnR917y6vkt/dDT9RXd0JBkOcV8wXR+gLaW2UFGnBtmhwU2jXnx8pKqbDHKELzi7XdPefHynNP7R5gkkOQR2oj28vzV1Wq5vJ3NZkUc2beumuXR53w89+z5qWW/AOzZs2fP58ym9v9s1fiPlrebHw5t52xkODgpuPNgynSWU0wmRHGMEAoXxsHz01vivu+RSqGNYegHECCVxgdI8xStx4HfOccwjI2+VbVFiMBmu6KqNlirmUwmaGUwxtC2NVW5xvvR5Ku1ASHYbNa76EpDFMXUdU3X9uRZznazIo4s27IEKRn6AWMtzo9Li7EWZSwYzb37d1mvl9RtS5xmoB1X1xccHx+SZjGTScbhUcH9hzPOX99y9uqG+w/uoCPB3QcH3L13xGya0w0Db713SpIr6rrm6mpLZBVGKW6uV+R5hAzQd4FyW5OkmtvbW7I8GQuohp6b6wuaqsQPjsE74iQev2bnaNoGpTRKauIoG+VJkcV1Y+Ou0aNPIi8K8iJBijGxx/kerRU+OLwflwQ/DFirQIJQcmwFRiAlSDEWug1uQOixpExrvesJcMCnEaEaHwKjLUBirEGIXR9APzAMPcE7+ran68aFII5jXD9QbbcYrem7ga7rxuXMj2Vifd8hpSCKYy6vLtBRzGJxRBCB+/fv0fc9g2v4Mz//JV69uGZ923J8csDzZxdoE6ONJstz0jzGRhpj4Qvv3iWfZLz9xVOEhBdPzrk6u6YuW4bBc3p3ws3NNSenR9RNFQ7mBxdHJyefxIn9vlDi/5Vk6e+FoL/6S7/0S/HnfT737PlpZO8B2LNnz57Pmbe/8ej9T7739j/pE/3Fvh+aWJHPDnIGF3jw5gzxXOEdXHbnYytvktI2DVqPsZVGG/q+pxsGtLYIqWibHinGYrExXcbhfY9SkjgxdF1L31d439M0gZubawKOyCYIIXZyFE1TN0RRTNe2RPGoh4+imKZtkdKQpgltVROcox16tDWYOEZIRfCOoeuI4ogojpFGcXp6RLm+4emTj8mzHKTk5nrDnTt3CB5c8GjVcu/uXZZXNY8+fsXxyYLpPGZxnDKb5txcb3A4Th8umE5SvPe8urxmcD3HdybcXmzRwuEHh3MZbdvSNGMakBCOokhom5rgBTfX12RZShBgI4O1iroeqKuGaB7TO0+cT8cmLxEQSlE3NVFs0UqPMaAhYI2hDbuiryAI8FnUJ2HM4Dd6bB4WQjIMHUE4kAJl5C71qSOWCmX0rlFY4FxAhPCZabdre5QUaDuak733hODHduLdIB+Co6pqpBTkeYZSiqZtEVVJnEBAoLVESk3T9ti6xmhDGqdcX9/Q1j355JDb2wsOjxY4P5qBH7w54/TujGfPrumdw0TxaAjOC+LII3XPZDbj/PxqXOIWOfceHnBzueHZtqRcbRnalu2mJk0V83lOOwjefOuL4eXLR/W946MPJ9PFx2k++a0GfvOvDIEfvvfm3gOwZ89PgP0LwJ49e/Z8zvziL/66S2P9tHPiR3XDq6HtwzB4sjxlfpQxX6Tkk5gkiZkvFgilP8uT77qewQ0458Z8+yhiu1pTbta4vmM2KRDB4YaOtm1pu5bgPd551C6qMgSIogSjI5Q0CKkxJkbriChJCGLUqG8327FJ1nukkFir6fqa3nW7hltBP/SUdU2eFWP2PR5jFbNFQZJG9F3Nj97/Id57usFT1hWz2Yyy2rLaLHHOcffuXT764CVXlyVHJwuOT+ccHU+IjGZ5s2EYBt5485g3HhzgfcAPksvzJXkRcXQ8Z3ADVdmSFRmrdc/FxYosT9hsSkLQ1HXDalXSNg5jYpQ2DMGTZCkIaJoOay3FJKPrO+LY7r4PS1uPP78kTXBuwJqx0GsYeqQYS7yCD3jn6HaSIu89ysR4r9BRRnBj8dqY66PQShFHEd45mqqi70bj8KfeDR8GBjcWmvVd/5n3wDm3exmQSAmDG3sGYIwPdc7R9z1qtyxUZYlkDBXq+540Tce/V8B6dYtSkiSO6ZoGZROECKSZxdqxN6HrGv7if+fLPHhjQd940jgniWO0EhyfTDg6OsBozVtv3mO77qmqgQ9/9IJ79484PplTNxXbTcPQdVycXfIH//J3eH32lNO7x0ymUymlXM1PDn/j7a985Td+/dd/3f3ir/+6+/a3v+0/v5O5Z89PL/sFYM+ePXv+NeCv/2//w098L/7R1W33ncur7VOBAOkJwTE9iDg8SnjrzQckaYLWBhvFSCXHZt/dABvFEU3TstluCUCW51RlRdd1o5Z/dzvtPQihSbMJcRwhpEcQdibSMLbe+mH3ajBQ19td94ChaWu00mijWa5u6fsW5x3SSNIiw0YJhwfHDDvd/GQyRSnNcjmaRr//ve/hHMRJTmRjpJDc3F4hFdy/f5fpfM6rlzdYk+B6R9+15EXEdlPjvSRKJQ+/cIeDowWuh5vLNcvbFYuDKad3D2jqjqurJdkkR+mIy8slSRKR5YY4NhgjaFuHkgapxsH5U0Outoam6WjrjjxPGYYe17fU2xVu6GCXr58kMSF42q5FSBi6BgYHbpT89P2w+1QDAU/bdiilGLwnz4vdUpEihKKte4bBI8WY8tO13VhGJkApcK6n79vx5+zGEjUf3M4Qzs5EPEqEggcp9WgY3oXn1E1FFEWjHKt3BAJRZHfSIo+SAucattslt8sbiknOMLQgAhJF19coPb5mSKM4ujvny19/wGyW0TYVx8cZDx8ekaaaN988Is/HBeTgcMbh0ZTl9Ybf+c53mR3MePjOCdvtkuB7CB3WKC4vX/LD97/XPXjj4SzJMqUE1/uhf8+enzz7BWDPnj17/jWh9/Jx27TfCS4MTTkO7FFkMHqUhQxDT5rGGKPxwdH13Xib7x2u7wnOsV4tkXJs8K3LDjcInAOkpCzL3UAYkyQpUki6rsV7TxxnIMKYAuQdWmq871gurwkB8jwfb5OVIssyLq8uGPpR3nN8eozUY/HYdHowSpC6hqapuF3e0vUwW5zwwfvvU9c1i4NjFvND+q6jqUu0tnz9G19nNj/g7OU5bd9yfbPi6uqWr33jzVEjLzSDc0wOZsSZpWsbXjy7pql7jo5zDo5yfHBcnC8RUpOlCTe3K+JEszhMiRNNFEUMQ0/b9tjYIgjUdY1QiiKfsbpZs15tx4QjK2mbCkkgBEfbNBil6JoaBLRtwzD0dH2PVIbeefq+xw0DbTuWnWkd4RyYKEYphbEaKQXOD9jYoIRk6B195wlyjATt+4HgxjIxJRXD0O9u+z348Nny4f0wSrWEgCAYeodAjlGk2hDHEUoJhBg9BtpYtmU7msad28l/up2ECLTW1NX4Z05rSd82GBtRVSVaK5zvsZEhSWNO7x5y940Zb7x5SFP3vP3ePWwkqdYlf/7PvUcUG9qmxvuWb/zsewz9wKPHTzi5c8zd+yc0bUNkJevNkvl8jutc9/Lly3Ml5NtN288/73O4Z8+/CewXgD179uz514T/zbe/fW6MWetA3TdNsEYzKVKMVQxuQCpF1za0bUXXj3GUdjfU4h3lZk3fjTe2TdsilKLr+zGzXgh8GAjBsVrd0jQN/TDshno9DpRIlLJYE6N1jBCSJE6ZTKY0TYNSiulkxvnFGQKYTmeEAM+fv8APHmMs3nuqbUlVVdRNizKGYj7j4tUZTbnl/oP7LA4WrFdL2q4iiizf+Nmv0nvH88cvcMMAQdO5kj/3l75E0znOzlZUdYdSkskkYxg8y+uScrvlzXfukBY5TdtSlR3eSebz6S5ZxvDGm0dorcas/qGj3LbESQI4ym1F23qmsyl11bJdV6yWNW3bI6XAGos1Y+Sph10MZ0AbjdbjMK+1RZvos9IvrQ1Ky1GihaDvBmwU4dyo4x+GdlxotMJG0WjolePf+2mkqJASrTTBid2wLndJQJIA2N0N/jB0uGFAG8vgBkw0Go/DrjogiPEivet6kjil7zw3V0v8Lk42uDC+KnhI4gwfPGW5BTxVeYuNLFZblJKjTGoIIKCYFNy5d8Abbx4zn+ZcX73m3S/dp24a1uuGd790l6GDi9cbvBCc3ruDawPPPjnjjbfuMZlPWa03tH3JoyefdJNJMUyK/PeFlH8w9P3x53T89uz5N4r9ArBnz549//oQtvB716v2j6ra3XoPNonIJpZJESMlnJ9fMgxjdGOcJMidTrwsV5TVGvCslkuEgKapSLOEtm2om5K+b2jbBqkEgx8YXM90ukBKgRCSxfwEpQ3Oe5RWICRRFFFutwAYY9lut2htmE5ntG3D9fUlcRxTNy1d19K1DcPQIYGimGDjlGq1ZLO65s6DO0xnU16+eMZqdUMUJ9x54yHDAM8enXF+dsbQOdzQ82f/wlepmo6PP7wmjiKkcmRFTFk2bDZblssN88MJR0czVrdrktggg0IrwdGdhGHoOTnJKSaa2XxsLx46j9EKowTbdcnZq2smkwwlBRevL3ADrJYVJjIUkwJtI7wXuMGRxAldPyCNQipJ23Y4F7AmwjuPjSKSPEdr+5lhV6kx4kcKQT+0aD2+Ygx9N5a4mQhlDGmeEEUpUgusHc2/EOiHlmEY5T5i91lIqVHKMgxjElTbdRhrCUhsHJMVOUobpDEobem6AefGpSVJYla3a7S0KAFKCKQI9F1HCJ4ostRly3azpdpesV5eY2xMQKCUIs8L6qZlW1ZkRYqO4OAoJYlirq5umB0seP7ihig2BFlzeXHNxx8+YnqQMQwd/2/2/jRm0yy978N+Z7n3Z33XWrp6m5nWcJVEyTBtQUls2B8dIx9CBAYCCHHAQPaXyEaAhDJgKAgcQIkjJ0IQaSKRsilKghgkUSIHUiJZsUJTMkVyOBzO3mvt7/as93bus+XD/XIkA0aESJ6pSeb+AY3qRlVXv/VUH9R1nfNfjvsjX/3tr5FngrreMQydS9JkU7fNszTLsmqx/NV8of/TN3kAJyZ+WJgWgImJiYkfIJzTz7Z1/N8fDsOHh0OHl5DmCdU8Iy0U5+cPWC5PWa5OEfexkD4ErLOE4DgeD6RJRt/2uMHh7WgQDt4TQiQvCnpjGGyPUgJrHd5Fzk4fYO0AwaFUBDyCcRBNEo2SCmsHhJJolVA3Nb0xLJentG2DDwOISAzufjmpsNayvXnNYbfhyfsPePK5d9ntj3R1S1HNWK3PKIs5Lz57zdWLlwitUVrz+NGK/jjwna+/JE81u33No8eP0OmYdy+QFFXG5z54l2NtSPOCPE8QyvP47RWzWUaSSi4fzjk/X5BnKX3bk6Yp5SwnBM9h19MbR1Fm1IeGvusxvWE2y3j46BylJL0ZAJBqHL7bpifLK5TSWOtJkhQfPdb2SK3I8goQOOdG9X8cB2+lE/reUBTFvXGX0SgcPGmuSVJFmhYkaUaWpwg1xoUOgyHR6l4CNMZ6CiHGFxsfUEKNy0eakd9/XXlWolWGYPQEeOdx3hFDoKwynHf0fYdOFFFA8JG2bYiAD9DUDfvdjq5taZo9UgsGYzF9T5aNHpLd4cB+d8DZyHI94+zshBgUxvRoBUPvef8Lb7HdbHn99BUvP3nKg8cnZFXCyaLi29/4OlIFlxX5bV4U2yzPX8tEfyqE/ujP//lf3rzB4zcx8UPDtABMTExM/ADxJ/7Enxh2ff9rIYiPtE6QOkWnGeW8YHk6p1zknF2eU1VLhBSkeY5QCUqNZVxlOUclYx7/4BzGDvSmJwrJcnWCdQ5n7egZCOPwenryYNSiu/67rbRDbyjzGVk6pt2EEO6lQqOsRIpR5tK2B9I05/z8ksVigQuWKEBINXoJkoSH77zF5774e7h5fUu9q6lmc568/TaPHz9gd3vD9fVL/L0WPctSotD8+q99g8V8lKWcPjhB5xIzWGbzEmcj6/UclUrKWYGUOc4Lzi6WvPXuA5qm5913L7m4WHB+ccJ+XzOfF8yXKUWVYYbx13PxYM1uV7O5PeBtACIPH52gtcCFSJIkRDGaq511EAVlMUcpjUSQJAnOG6zrkWKUT+kkxdnhu4N3URT35WBj6pL3gSzPyYtibP+NHm8NUirUfYSr1ClCjlGs6r7dV2tN33fjC44cG5WVkGipkCjKohzLwaQgxjj+3CJ+N060Ny1VlaG1om7qMfY0zQkB2q4lImmbHhiXghfPX7PdbBlMR55leOeojwdO1iuc8bSNRQTJs6e3PHjrgstHJwxm4Pb1ke984xnBSX7/T/0YXWexBl4/2+D6SDWbsTqdYe0wzGbLq/l8+Wy5Wj4tZ9Un6wcPnr/Bozcx8UPFtABMTExM/IBRSvkgzdW7SaHJi5KsSMkKjdSR2SqnmOcEIciynCQtUFKhkxSZJuRVxfF4xDp3LwdRxAh5nnPc7wjOItXYCxBjJE0zfAijabdrydKcvKgoqzkxjsbWRGfjsOssUkKIY7dAjJ6ziwsuHzxESs1+t6XvOogeazusG3j7vXd4+533+ezDV9y8uEFpxRd+4gtcPjzn+bOXPHv2lGHoqOs9gzGszua8vrohzXJciGRlwsXDNUKBIGFz13C33VEuFmTZbEy40ZLBBt597wnNsWaxmnNyMePkYk3T9QzWcf7wjPXpGpVokkxTzBLOz2d0dU+SSBCQJJo0TXHuvr8gVd9t/w3BkeWSyGi2JToEgaaux5v4yL0pF6wxtO2RYRiz+GO4lwo5j2TsVxDqPutfSQZnUYm4LwnToxdDarRORsOvdyiV3Kc4BUKwBO8RSiATCMKT5CmJThESdDrKlmIM6EQzDD2uN2NvgI/Uhz1usCidEhhjQYeuJ0s1SiXs9zXOBT7+8Bk3VzdY19H3DfvdAR885xdLrl7e4L3j7HTOV3/7mzx4dMJ7Hzyibjpub/b82q9+E5Eo3nr3c+RVxVtPHnL14jV//1d/lfXJKfPlSZwtlpuzy/Ovr9Zn387y4ma/30/dRBMT3yemBWBiYmLiBwwNT6pZWV08uCTLCpTSZHlKmWfMqwKtNEmakOUz7OAQSmG9RydjUZcxo8Rjt9vTG8tqtRpTIeNYKhW8IwZL2xzwziKJSCJlXpAmGYlUtG2NMT1Jko5LgvckSUKRV8xmc8qy4sHlY2bVmrZp2G83eBcQUtL3AzHCe+9/jpP1mmeffMrN62vyvORHfuIDTk9O+OZXP+Sw2ZIlKYfDln7oePTkAUoJXr9+zWy5xLjAwyenRO9oa8PdbcN2W3N+ec7q9BytMzabHU1z4PFbZwQ8bdtwfrYeYy99ZLvZ8eTtRyzXSwJQFDlCRvIyxVpLlicIESiqhPMH69E8LUaNvxARrVPSLIco7gfwMREo+ECqE1Qcm4eDtQgRCc6PMaswav8HP5qpi4zONBRlQYigVEKWFyQ6JbqAs8NYFpakJOnY6qykwg+e4ALeufuvq4cQSJSCMEa3KqnuU4c0xLGDQERGQzWBvu1w1iKUJM1ymn3LYAxRBGIU5FnJYbfDWcNgDW3XkWcVfet59eKK/a7h+bM7lNS8enUNMnB5ecpXv/xtBIGqLPidL39IUWgePFrjbaBvWr7ym98kzxTeBbabLfOloqk3fP1rX/NZXu6SNH+9WJ09my/Xf61ozN/4+Z//+eMbPXgTEz9ETAvAxMTExA8YSaaq08vTdbVYo7Qmy1JMPxCiJy80IQZ0kozttFKMt/9K4Zyn7zuEANP1eBchjkZRH8YiK+ssZuhp2gPD0JMmKVk2GlfHKFBB33dIAbP5nKIoCASSNCPPS4piRpYXnJ1fkqY5tzevGfqBPK/Ii4q8mHP58C3e+9zvIYTIs6fPub26oSoLzh6conTG17/yDYiePK8wZtSfv/XkbfIi47e//DuUxZK27/nRn3iHED3eRm5e7dFKU1YJFw/O8c7x+tVzDrsaqSVPnrzNq9c3rE9XzBc5eZZg2oHFYsHFgwtMP6B1irMOIRSr1YJIoJqPGfmLZUmaKsDfS24UIQqWyxUIifVhbFkWCV3fU82XCKHGiEzrAI8dLHawKKlIkgQhJN4FQohE3Nj+yzjkK60BcZ/2AzHc/+UcWkqEgOAcdjB458dyLgHNscX7wGB62qYh+DjKsiLE6CBG/ODxzo0yJOsQUdJ1HYPpOTs/ZegHTHuk745keU6aZGOJ22DvfSIpTd2QJpoXzzYoqWmbgaefXXF2sqJrLWZoefudh/zaf/5Vbl/X1DvLR994ydnpnLeenOCs5bhtePn8BVoKjvsDg4GHD99GSfkyyeTLYjZ7mmXFX/sLf/kvf+dLf/2vt2/00E1M/JAxLQATExMTP2AUZVYuV+t5klVoneB9RCrJbF6gEk2UkXJR4oVAJenY+OrsP2wIxv/DNB4hcdbdD50K68bSJyElSV6S5DMGF8emWiQ+eoSSJNloGN0f9igh0ErQ9y1SwGK+4Pr1K/b7DcvFCSfrM3SSkiQlp6fnrNYLBtfTNi2pTlifnJCXM9rGsLndY7pxED42O0KInJ894q3Hb/OV3/oKWV6hc8U7718iJMQgefb0jqTIKBeKy4cXzKqKut7TNT1N0/P48UNuN1eURUJZZHR9zTAMNL3h4sHlfcSmxNlARLGYr8iLAqVHCc5iOSdJUqz1yHvvQhRjyk5ejGVlfW9I0/EziUjKaklrelSq8YwyGu8sSPCMTwDeOWLwY0uzSghR4NxYFhZ9ZDCW4AN5keNDvB/iue/wEvcyqzGBx7oxqSc4h3UWnSSjKVhKfBgLvrq2xdoe7svCkjRhsGNzcNv2dG3Paj2nnOf0psdbh9IRpGC2KKiPHXe3B7I0RarIfJYTbeTVsxvee+8hr19c0zU9q/mcZ0+vkVpyenHO1776HZ5/esU3fucp3/7GC3ozcPF4TQyOq5db7q5v6ZuWzd0NEhXeefudY57n35nNZ8/f++IXP3ujh21i4oeUaQGYmJiY+AHiZ3/2Z5NqVf70bLVeyKjwv5vVnyjyMsNay8nZEkHEdKN5tG6PKKXQOgG4lwG1GFPTNAekVCQqxZgBM3T44AkxUmQFInKfNS/ul4dImebY3mDNQFVW98OzZVZVKC149eoZ88Wc9foMEBjTM5tVzKoZXddgTUe937NeLQkxYIYBYwyzaoZpGop0vInPkoR5teKD3/MB169f4gbHbFaxXC9wLtA2llcvt8wWBQ/fXrFYF+S55vrmJW1dc3u74cnbY2y8MS3LVUWMEW+hrQfKsmSxWnGsjygt6PqGWTVjNpvjXUDqlKyoxmXHWawdyLIcqROcj5RVifdjitLvGmFjtJRVRjWfk2YlWV6RZjnG9EQCaZ7d5/Q7uq4lSTSRgJSaGCVEsMYymGH0CpgenafEODYwq2Q0Ag9muO8UGFOARmNvj07GRUynCTJR98lACgBrB4zpcNFTVAUxRpwNSK3oe4uUYw/B6eUJIAjWIYAQHWmakqQpbdPftxI70gxOTmd855uv6JueWVXw7W98RIzwxR/7HF/72oe89c4lb719ycvnV1xfbfjs0+fcXB1YLOf35mnD7fVrZPAUWU7fd3shElvkC5WXZf3s2bPizZy0iYkfbqYFYGJiYuIHiHVV/ehitfj9RVlKez+UCiFI0xQfAvNFSaIku/0BqSVJmqJVRlWtiDFircEYg9CCpjnQ9z1ZmjAMAyF4RtlJMt5qD4b9cUPT7AlhvP1Ok/y+B0BTlhXeO4hwcXpJ9IHr6ysuLy9J05S7uxusM5xenhEj7A8bqirH+YG3334LZy2mG1ACrGmIwSIiuBBYLJakWc7J5RnGtDx7+ilSSBbrOa+urnj96prDvqWc5ySZJFiPQrLZbOnqjv3+yHxWMZsXNE1NWcxJkoz9roGoaeqGLMswpiMGT9+PMaAhjjfezgW8i8wXC6SQJDoFKcjzFCUFWVaQ5yXODfRtTZFn+OhBSGazBQBZXpCmOTFC9J4QLGmaURQVAkGw4++fv/dnxOAZbDcWsA0WHwJCyzElKHh0mpFmGVKN8asxBNI0ITI2+UYYvz4BSoIgEonI+9eHEMZoUhECeZ6PyU1aorUizVNCiPR9Q6IV1gZ601HvjyAEg/V0jWG9nlMfOoL31O2Y6OSd52tf+YxHj0746Nsv+fbXXyKV5sHlOb/6//wNzi7POLtYUh8O7DY7dpsjd3cHzh+c4GyLMy277TWJkrYsSwPc5UX+KtfZ/ng8TtKfiYk3wLQATExMTPwAEWN0s8XsTKUKH4ZxKE/T++8NROBw7EmzjPPLc3x0pEWJEJIQPE1do1WCiOONd9MecM7hvRtvmXWKUpqmrTns7zCmwXmDMR3Bj3pxpVLyNBslRBLKsqLpenyMPLh4QN8ZNne3lOWMspxT1zVNW7NcV5xfLnn48JJXL1/y/PlzrHXUzZHVeomSgiIvqKoKqQWzxZz1esXzp89BKM4uLths7jjcbcjvC7sO+44sSxmMZ3PX0daOrhlQMmG2KOj7nrzIybKM+h+ZJdNM33shRgOsFKOUJknGoBkfAkmiyTJ9n2wUqKqKCJjekOU5WqW0bYuQEWstIYBWGu88bVej5PhHqFIKfR+R6pwjy8vx5l2K8fcsRsZq3kBb15i+I3JvEjaONC1I05TBDuOwrzRCKrwPVPexrmlWolQCiPv4zggBRIy07QHvB4ZhACLqd5uKlaIsc7y35LlGKhh6g1RgeosQmt1ui+0th12HEAm2tyQ6MJulJCrh+mrH2cWK3/i1r3F7d0CKhP/4//CfcHPVYXo4bg1f+fVv8MEX30GnCqk0u7stH3/jQ5pjjRCe/f4m9ra9jtr92uJ0+auL9fo7SZLdxKhu1uv1NIdMTLwBpoM3MTEx8QPE5eX6R5fr1UOExHtHosd4RyklRVFSHy2RyOpkjvcRIVOSLMf50WyaZtnYGKsUw2Dw3jLYAevtGCuJpOsNEcFgDV3fEqMnRIdQgYC/H5QLEjkOgeBBRk5O1rRtw253YLW6oCgrpI4sl3M+9/m3+cIH73Hc1/zWb36F7d2Wqipo2gPL1ZLzi0uCA2cNfVcTiZydnXPY1QihyYsZs8WSum5IlCYMKS+f3fLo0TlX1zsG47HWkaajIVUpQRQBqcYb9P1hj7UD1SxH63HYT7Ua/50kRwDOja8cdrD0TctsNmPU2o8tx4vl+Iqik4Q8L2jbnrbtyPOcw+FIluV4FxlMT/DjsF3XDUJKohj1+jGO/QE+jO3AzjmyNLsvVYPo3bg4iFGyM/QGrTKKsgIixoyLFkhiEMj7UrWymKN0Ql5U9zIhjfeBVGvq4x64jxBVKbP5kjQrxoK0ckaSZ6RFRpZn9L1j6Ad8iHStRSARoWe/2bBYzthtarpmIEsLgjecns5RWvDO+w/46//Hv8352Yq26/ibf+1v4m3g1atrnn78lLu7DT/2e3+U1ekK5wxD1/Dxt76BwDG4PiS5/nBxcfJ31g8u/8bibPV3ikX5f/7SL/7Cr3zpS1+yb+akTUz8cDMtABMTExM/QLjQb5JkvOlVerwJ7kyPVIrDocN6y9n5Yiz0cp48L5BKobMUlWQUswVJntP1Hd5HZrMFZ5cXzOcLlNY4b4l4vLdj020c02e8DwzW4sO4dCipxxQbH3F+vB3fbu6om5rFcjVGWaaK+WrOk3ceIwR85ctf41vf+Igyn5NmGTdXV1w+uODs7Jyb61v2+y3GGNI858njJ9R1S6IzhJDMFxVpIun6lrIqqZsD73/ugk8+/oz9rhn19Sk09ZG8UAgZKIsSb6HvekQM2KEny1KEivf9B5HBDKRpikAymB7Tt3RNTZoopBR0XUcUEqkVxICSkjTLAIHzjmo2p+/HW32lJKZrcMNAojO6rkNrTaI1wzD+dxACKcZxXycJAUjS9L5cbYAgAUGSSgZrscPo8SBKTGdItCZNM5JklCtZdy/bShNihDQZ+wDGHzP2M+j7BJ/lao3WGWk6pjHl2YyqmrNYrEiSlKoscUNkc9MSrKdvLfVxoOsHqipnu9mRFyX1YeDm5sAweAQRazo+9/knXF484P/1d36VH/3xd/nwG9/h08++w8O3TmjbI9/59jMynbCYl5yczYlxwPRH9octIkl8WhZ3s2r2HZllf/ut99//5T/7C7/w9Td5ziYmftiZSjcmJiYmfoC4uLw8X8xXCyJIleJCT5ZlbDc7emOZzXNMH/FekJcJ3hs4CnSaI4TEWsNgerz3VNWc5P4mOM1yYowIxhvqECJaQ4iRpusQQpBnOXmeI5MEQiTGQJLmJGnKfrtDxMhyvqQoC9IsYzavyLKUr3z5K9SHBu/h9OQBXX/gcDzwuXffJ81y6rrGmI4sT/Ahsl6vESJyul6w2x7QWvH4rYd89NFHpEoSfeDktOLq6sD1ZsdP/+Ef5XCoSbMlSaZZr6txMWpaBBqlJNfXt+R5itaKrm3IihLvA3mRIVSk248FZYfdhlRntE1LOSvRaQZR3mvsJdZ6fvduTMjAYrGi6wx5kdN1LSE4pFBEH4k+kBQa7z1CKJwfZUJVVY7Lm5IMbUeSpLRth/ej7yBNPM6Py5dQCmsHQgwIFAh1LzuSaJVyOBxI8gypJN4HIpE0GV+EpB4lN0VZ0fY9VTmnN44QIQRBlhYICSFE2rYnTXMEkuubI4kGJSPWR5KkY7Vc8tFHVygp2NwdONYdy9WcTz98xelJxcff+YSzizV91/PhNz/m8194j29949v81B/4SYr0ffa7hv12R9c1eNshlKNuj6RaU5TVIGR6t1is/+6f/YVf+PSNHrCJiQlgegGYmJiY+IHhZ3/2Z5NHTx7/9Gy9zp1zBO/RiWQYPKa3FNl4C7yvW3yENEtx3t/HhI6xkKatsaa/v40GJNSH45iB7z0IRZnP0Wos9/LB4bwlSVOEUPgQGYaBzvQIrdCJxhiD0ookzZjN1xTFDO7Lwz769kd0tWG/2xKCRyUCay2np+d0/cCLl8/Z7+7I85S8LNFJSte1lPMKT6Q1htOLM+xgEWjStKCaLSiKOcfa8MUvvs3mriEvUoKPzBYzhJS0dY9pDSJabl5fgQ/jINz1WOvGr3voSbSiPbYc9jURiXejIVhqhRmG+/IsyNIcbz3eh1FKI8a/lFRE74jB4Z1F6fEGfjAGaweIo7+iKEra+gAEAJI0R8oUrRIG0yOJzOYFECF4cAEIlFU2LgMEZKKJMdAcD8T7pmalNMEHlBg/f6k01kLXDuPfO09WzMmyAoEaTcBSkKYZSo/GYe8hSXKcFyxOVgx+YHCe/a4jWPjoW6+4ut6QFwnHfYuIklfP73j94o67mzu+/tVPeP1iy8sXrxHaEWNAKk2qJM8/e0GaJBSpZr/d0B1rdtcbsjQjy3Lqw55MS7Ncrz+O//w//+JNna2JiYn/ItMCMDExMfEDwo9/7vKnl2fr/5YXo0xkNIkOmN4Q472ZNIJEIgHTWY7HDoHAuwGixzuL9/4+HlLQN91Y1JUVLOYnFPk4QDtnUUqipKYsZwghQIxNvsYYAIQQo7Qlz0jSjPl6hfeRzWaLdQO77ZaubXn9+jlJpnjy5CGbzTUxBozpODYHjvWB07MTZtWC+tjjg+DkbImxFhfABUeeFVxfbzk9PaXIK7KyQucZl4/O6DqHiIFHj1f4EFksZhx2LcdDjfOWwfaEEFmdVFhr2B8bylk53sYTiSGy3x9IdIKxljTP6AeD1ooiz4kx4MM41PZ9Q4ye4/EwJvsgCGFcCvq+RQiB6QasdQQC3ntC5D6LP+Lc6J9wzpOkKUolGGM4Hrf0fYMQiixPMP0wln5FyPIc5wZCcKRZhpKKtmtom5phMFRVhXMRIcaEpiQpRpmRUkipUTpD3ntErHUU+ZiqKXVKFAKtU7rWAIIQHdWsoizHHy+k4nhsGCz81m98TPBgrcO0niLJ+c1f+x3WJzO+8c1vcjgcubneo3VBNavQWvK5z32Ow/5AphWH/S27uxuGtiHNFLfXr8mzLC7XJ43W+turRfVs+7f+Vnhjh2tiYuK/wLQATExMTPyAsDxflmVRrGUYr+6t63F2wFlzb+yNSCUIHmazHO8dSmoQY5a/s/fDv3cgx0bf27sbVCLRaUJRzO5bXWuUkkihybIMrTXeO3Sq8N4wDP39QiDHV4EhkCQp3jm2u2vAkiUFu82B7eaGrMh4773P8fz5ZxwOew6HA847OtNx+eASJROePX1Jmqa89eSc3lnmi4Ku6zg5O8V7z3w2Zz5fkuSat98/Z7PbUtdHDoeGH//Jt9luW9ZnC26vdmw3NScnJ0QC1kakVug0pWl7yrxAADF68qJgcANZphAqkqQapRTG9FRVSfAOYzq8H5clISRd12KHAa0kdrB0bcfhsMd7zzAYnPPEGHDeUVYzlEpJkpSuG6U+RBgGQ5Lo8VudoNWY16+EGk23UhMixChIdMowjGVhMQRCjARvcUM/djb4sU8gAAiQSiLlGP8q7g3QY7Lp6Hnw3uHcP0x8kkKTJCnN4Ui8lyj1jSVNElanBQFLWw88/2zLr/+9r1HmBU+fXjEYi5Ip3/rWJ/zYT/wId3e3SAH7zYHdbs/6ZEE1H7sgPn36CUWlef3qU47HW15dfVYvlrNrKcNvXz56+GtPPvf+316tz/4fv/zLv+zf6AGbmJj4LtMCMDExMfGDgTy7eOtfrRbLedvX9EPLMJj7QTYjhMBgI7tdg9SMA2SARKvvZr4jJIEw3u5LiTE9UoFMJcWsIM3zMcoySUmSnLJcoJRmGFqSVDEYR9t26GQ0xHZdh5CSoirJ04z6WKOUIksy9psDXdshBZydXXLYHzgej9ihY7Fck+clWgm61nB3tyXLU05OKj788Fss1wuO+xocrOYLhqHj/GJN3bR8/vPv8tlHz8lTSV5kPHxygnWO3a4nTRI+++Q1FxenJGmCkpquG0hTjTGWGCJVmeCdI00ztJYc6yNZniIkZFmCMQNlUYxDvB3/nTTLcc7TtQY7jIZnY8bW46ZpCSGOpmgUzg0kWUqapmitx9cX75EC8iz/R15fJMfjntm8IgRGKZEU3G0bVJKSZAlZkY5Ga+eRSpNkGVLIMcJTRGzfYYeecL90KJ2S5iUqGWNhvXNjf4MQYypQiLTNWMTmhw47dIRgKfKM2+sNu21DkuQcd4GXn27ZvD7QN4Hm2NMejnz87U/48JvPMb3n44+f0nc929savObdd98HJLvdFjc4vvX1DxERlvMFfdsjAKUlx+N2s1gvv5GVs195//d88H86f/z4T61OLn7hz/5H/9Ek/5mY+AFiWgAmJiYmfgD4n/47/6PPLU/mfzgqTSQCY8SlB/reglCEMKbB9INjt28RUlGWJT44fPBjgZQYZT0xRLI0GwddZ0nShERryqKiqhZkWY5SAjP0ICIhBLSWVOU4HO/rHVKKUZaiEw71qKHPsgrrAnVzIEbP2cVDTtYntG0HKE5OH7JYLHDO0nXdmKyTKHQi+PCjj5gv1lRFxetXt5ydLxlsg9KQ5jlvPblguzkigZPTE26vDjx8tOaTT19z+eCcZ8+uefvdB5ydzbHWIJXEeUtR5vRdx8l6gTE9Ok1Ikoyu7YCI0JokH+U1BMZSNTfKfpIkQSBpuw6lBDpJqGYznPXE4KnrmrzIEUoipCRNMoKPpGlB23bfHeCzPMOHwGD7MfHHGJQch/m6qcdEoCjwLjBbzIlEtJLjS4wEYiTRCqUleVGAFCgtCd4T8WidIGT6XbmP9xYIBB8I3qG0IoQwvooMY9Rq04ym3L5rSFLFqxd3JGlgta7G3H8k201DBPq+pT4eubp+gUo8gUjX9RzrPa9ev6SqUqSQlOVsjBEdHN/8+u8QvcMaw/b2jkcPH0eZps8WJ+e/fvH4rf/LyVuXX/qrf+2v/19/4S/9pY/f3MmamJj4L2NaACYmJiZ+AJgv1yflbPmWRJEoiZKC4B2EQJIlzGYzkkShtaJvPVJKqnkJ9zf9ZjCjjEeMA6TWKVpp7DDQ7I9YY0mzDKE0Uqj74qkWKUEIhTUW0/ejkXjoSNOUPK+wg8VZS56XpGlGWVbsd3doLVmfnjNfrO+z8gvOTi9YLlYcDntub26Yz5cERl35bnegnM25vDxjc7vn9PSUJNVs7jacnl3S9Y66tXSm4+zBKZ98cs3Z5YrBDSTJGBWaporz8wUuOISISCk4PV1hTM98Pn4+PgqEStA6oa4b5vMFOtEoKema+v7Tllg3tvOGEOi7FtN1hBAoqhKdJrRdi7UWIWC5XBC8RwgYbKDpDDFKQgxjWZpSCDmWgXk/+gDqw/F+AYtoKUmSdFwIlMDZUWLlvaPvW5J0LCOTckwBStKcgEQqDSKiE4l3DuLY/Ku0xg6WVCf33oUW27eAQycSpRXWdbj7RWB/2JJlmuefbnjx9AVvv3fC9c01d7c7hGCUHDlDohWHw4GiyJBSUFYFVVmy2+55+fI1s3lBlmZjw3NTs9/e8vGH3yRVkeNh77u+s6fnF2J1cvKrj94/+2tf+tIvvXqDR2piYuL/A9MCMDExMfEDwAc/+vk/tFgulyEGQpQ4F3GDg+DQSmCGgabtMGaUmCwWOdYOo2zFeuaLJUVR3jf9JnjvqJsDg+k4HmuCj+RFwcnJGavVCTFEpFAEH/F+lGYnSYZ1cSzmyiqatiVJFHme0rYNs9mc7fYOrRWL+YKu6TCdIc2ye/Np5NXrp5i+42S9pq5r2rYFBD4E1ssVh21P13QgHMb2PHz0CGssMQSa45EPfuxdDnvDyUnJxcMF+32LlJK225PnCUkisYNBCIFWCiUkaaK/m8GfpQlKjDf6xoxfW1d3uGFM6wHJ8diRJAlKZyAkxnSYtsdZT5bndG1HcA7nDLN5iXeOru0QwH6/Z76YoVRClqVYP5DohLbpEELRHFsSnbDd7lBKYp0dtftSEAkgPGboCN5iuo6+PZJoNSY56QQp9BglOpujdUpAIqTgsNtgTUNwgSyfkWUzhNQIKTBdQ9ccwXtM16GlQgpBezzSHI9EF6gPPaYJfO03P6MsEqqq4PZmR9u0tE3Ng4cXtG3Lcd/gjBtfhNKM88uHEAP1vubu5oblYsl6vcYHg5KR7fYqNt3R61zdGm+/qhE5zs/+g//gL+zf4HGamJj4xzAtABMTExNvmD/+b//b75ycr/57aZ6KEAaCHwjOEmzA+UjXGfa7I84KiIGTkxlaaobeIaVgNluT6Pw+az5izEDb1jg3lqzawZBkiqzIcN5jzACMN/9lOSPPCrIsu/ccBIpigfOeNE2YzeZcXV0xm88xg8GYnvOzc25vbnHOkSQa7zxt01HXR1aLOctFRV0fcYPl9HRN3zfM5xVSRp4/+wyIaKUwfcB0jr4zHLY7Hj1acdx0DG3H5dkJ1y8P3L7asZxlEB2n53Oss0glCcGRpCmRSJIm6EQjlRpbdmPgeDiwWq1GA651OGtRKmMYBmL06FThgiUER9+1ODewWM7p+47meCCEQF4UlEXBYb9nNpvR95Y0TSjLkmFoMX2PlGNS0vZuh79vYxZCjIVdRLxzJFmKtaNMJxIZekfwkaY2mM7hrUcKiZIJWZajdYJSo+TL+4AUkro+0jYNg+mI3pHn+fh98ndfcyzWWg67PUPfE1wky5Mx0ShAdJLBOD798I6Xz3dcPlyxud3jDfSdY7lakxUFpm/55JNPmc3G15/VYs2sqrBdy/72mm9/5+vj5KAUxgw+SZNDb82HRVHdrE/W387ms//7YIef/u//zM+s39yJmpiY+McxLQATExMTb5jP//g7/8J8dfKBsWCHnmGoMV2N8wMhxjFBRo0SmO/KeuoaKfSY/HI/4BrToRKJcwPOjbfkgxlABFYnBctVRX4fEzlYQ1nO718LxhbgEBx5plACRJRkScazZ09ZrZfMFnMQkcvLSzabLQjBbDGnqEqGYcC5wOnJOWW1oG5aYoQHDx+SJClZlrJeLfnNL/8mRZUzGEtTDxwPhq7vMWYgKyWz+Zznn77i9GzF85evGUzPO+9cjAk5cWzujV7StQNFURJFBBEpioIYx2jMYbA0TYO3jtmsomlamqZDaMXtXcPgLfNFhbdjYk700NYD1XyGzhNs3xGcRUhQKqXvBwQRJQWDMSwWFVF49ts7vLP0fYtz/ejaCBYpBUoJ1qdrDsc93luCc2w2W7RWZGmKsxHvBM6PUaPWGczQ471FSIEQY0RokmaIe29ABBBj3OvxsMV7iw+RiEDpFJWkOG8RInK87yPo+n70bxwP+AA+ePbHmhev7khzzfp0zdXr10iVUs0XPHz4CJ3k1PWB7faOqszY7w+cnl2M0arBcf36M771jd8MQhKKouzLefnx6cX5by9Pz//6fLl8fnr54G8UZfUVI9yTN3ikJiYm/jFMC8DExMTEG2axWvxEVmSJcwMxAkKT5CVZUaKlHAdCAtZZmtYAkjzPMaZDxICSgnv/LzpJ8X4YDaHBY91AWZZU85yiStFpinOeIq8A6HuDlGPZlEDjbMCYnkQnOGuZzSuevP2E7eaWEAKHY81gHVpr0izjWLcgBMvV4n65kCyWaz7/hQ84Pz9nt9mgpeD586c8fPAI01kG19GblsP+ligc5Szlgy++z4fffs6Tdy7ouo7dZs9ylRGD4pMPb8nTEtM7bu8OdL1DJmNqkdYJaZqgpKTvDMfjuHysT5Ykieb1yw2mtwy9oz62FEVJkmpAMAyBwQRCCMyXC0xnsNaR5AlZkaH12NIr76U8WabIioS+bem7DiUkXTO2AycqYocBKSHgaZqGKARJmkKIWGOQQrJcL5FC4KwjzzTeeoL39E1L1zbj60/w9y8Ao6bfOU+iE7yzSATBW3wYE4BEjPgQyPMCnYypRH1r6HpD2xiC8/S9wQwDZZGhFHz20TVNbQBP2zW8fP4CawbefvsJj996m7woefH8M4IfqPd37O5uQUCRZeRaO2/728G0v7NYLb+1Wp+088XienV6+rfWJ+f/m0SprywePPr5pNIv41hFNzEx8QOIftNfwMTExMQPM3/0X/uj64vLB38wyTJcZ1AqJeqAGTqcHQjBkiSKNNPkRU5Reawfdf1SCKy3QKTvO2JkjKu8H7siASU1i8WS+uhQWpIWGfOTNUNbs99vmFUzNtsbYvQIpcaSKanGnHmpePzWW3z00cc4C0VRkef5eDutJYf9jvliyXy9oGlqlJRkRYKQgvX6hE8//Zj5YkkUcH55zmF/YL+veevtR3z00YesVkukSHj4+IzrV1tOVmu6tubZ09c8fvIO3imef3ZFoiSmbQjzhGef3fK5Dx7RNg7wlLMFg/U4a/B2IEnGboM0zbi73tEcj5yerdjcNlycr8kygUTR2x6kZnDDqOkXguPhSJHnZFlKbwxZmuGdRSeawQ5kSYqQkr7rybKU4b5QzA0DVVViB0ea5jgbaZuO9emSPCs4bnajn0NI5H3EpxCBokzZ9YYYwAaHMS2Qj4VvgHeBLM+xg2U2m9H1DT44lE5IkgQzGITgflHp0VIR7mVBXTtQljMO+y0hjMvDMFiqakZzbMizQHdfMFfvD7x4+pz1yZrPf+E9vv47Ld71PH/2GYkS7LY78iLHmc7leblNy/Q769Oz3ynmi6eEoCIsE6UOf+6Xfun5P/r/9i98f4/SxMTE/xdMC8DExMTEG+Sf+W/8yI+v1+ufiGGMmQyDw/T9aEK149AWA2NWvBDYfsD1/r6oS+Csp26OuOhRSUrbd4Q4RojaYWA+WyMRXL+4Jiky5vOK5ljQHWvmsyW3t6+I0eODR7iAFJq8LFFaU1YlNze3OBcpiooYIyF4rLO0nWFelpyfrbi9u8V7T1nOERJm8xm77Z6qnCEQCBmRBI6HHadn5zR1y/6w44Mvfh5iindQHw1aS54/e0E1m1GVOYddQwyWdDGDGHj9Yoc1nvZg0Flkfbagv/czSAQxBvI8QWtFdJ7jfs9sViKUopqlrNcVw9BjgwMgS1P2hyOLeTWaleP4udreYroeozVKjIZsqQIgcDZgB8tiVnI81mid0bcdaV4yOItOUu5utkghyNIc6zwBiDAWjGmNShQykYj7zgZrPT7CMHQopcduBmvQaYazDqU0UkqyPB9/rXJ8mUi0xjqLUgrvPUmSgBR4ZwmDIy+S+6biwGyZgvR0TYvWkhdPb9AqIU0KlD5w2O1pjjWPn1zw9rtv8/f+3lOyJEXqkpvjjlko+1mWbYMSN4uTs68uz86+VuT5Uevs73Yx3qUhpG/mBE1MTPyTMEmAJiYmJt4g5WrxLyZlsbbOEZzF9A3BDRACgxnjJg91S9sZggss5jMW85L5rMD7gbZtcK5HEsmyMbFGRDmaYREoqXCm4/b1c0xbo1RECsFysaJpaswwoHVKokvSLKcoZ6R5hhk6ILC5uyNRY1PwYlFRVjlSglaa9ckJt3c3bDZ3ZGmGEII8z/DOoaRkVlYkWjOvKvqmJU8L3n7nHa6urnnn/bfRaUZT1zz99BkgeP3qFXZwLBdzdrstRQ5FUaKUpipzPvnoJVWZ8/LZDbNZRVv3xAAhBIwZy7eUThBK0PYNZZkihCLGwGpVYkxP21qUHiVAWmuUEuRFTtsaQggkiaauW7qux1k7+gtCIIZIjB7nBrx3hBABgVQaoTTOBwTQdwbbGdIsxXpHcKPESCrFYAwCQZIlAPStJU01WknwEeKYyJRl+VgmFsefM3iPkHy3rThJEvq+Gz0RwY9xsQiGYVxsjscjUgr6ziOFwtuICJ71akaMnt1uhxSS46EjS0uKLMVbQ3Pc8dtf/i2KQvHk7fe4ut1w6FpOzh+Eq90uOCUPJNmNDYEoZJA6+40/9xf/4ie/9Eu/dPjSX/7Lt2/i/ExMTPyTMS0AExMTE2+IP/qv/dH1Yrn6fTrLCcFiTIP3HeAwpsd5j7MWIqRZgpSB4C27bctHHz6lrRtE8PjBMA77GilHCU+UCpSgt4ZXVy9p24Y8T1mfrpktZlzfXtF1DWmaUFVzTk7OWSxWCBk4HDZA4Pr6CikEIO8HfE3fDZTFjLKcsTscubm5pchKYogIIolKOB4O5FmOSjSL5QwpQKc5733+R7BeIGTgR3/si9THlpfPn1NkCc8++wSB4PRkTZIkzGZjys1gLI/eXvDixTVVkbG7rREImkPNbnNARDju9veyJwkonA9oremNp21blosZzgeM8aMqXUS8j/RdT16kSCVGozAK58L9UjRKmYhxvDVPFImWaBVItcb5SFZW+BBI85y+t+RZhbejJGsxn+GsHeM01djtEHyP7S0gKPKCrjVkSU7wbmz1dREp5NjuCwQ/EEPEBk9nxp4CIQDB+OvrOkSA4Nyo9e86siy7jxwFlWjSRJEpjQTyPGe1mnN3d8vlgzWDbTgetqS64HC4xQ4du+0Vf/8/+xXeevwWq9N1vL659Uon/WK18je7XR6k7Bw4a70f4JM3cW4mJib+6ZkWgImJiYk3xMl75eLs/OzHtcy+W/QklMQ6h5RQljmzxZz5vCBLND6M0ZF1bfCe+8bagbo+4GNgvpiRJjk6TYGAEmCGjrrrAEEIHm89g7Gcn59zenpBnpUoleCcpWlrhsFCFCQqwTr3XXOp0gl1faSqSrIsA0ZJS1GUWG+5295RVmM7rg+eLEuQQlCUBTpLOX/wkOV6Sdu2vP32I9bLksNuj5KKp59+gvOG1WrNYrUixlH/vtt0zBZjYdbVqz1aaw6HmhgjVy92BKdomx4pJVKNN/J2sORJwWHf0rRHHj68YLc9IiUoCVoohr4jz1IG15MkCoAQIz4GrHcM1iP16IeIQTAMfmxcjhHTW6x1SCURwiPkaLZOEolOJJFw/+1oSj7sxhhOoRQxQgievhvIioz5skDIiLUR0w9IQAjo+x47OJzxY5xnonF9B96PWn4zkOiU4BwQ7n/PIl3bYXpDVZbUh4Y0HY3agzMMg2O3q6mqDCUku92Ox29dsNtf03V7hAhsd9cIJXj2/Dm/9ZV/wOffez+u5vN9tP13zk/ONlkxE/umWUYpbZKmL40x/Rs6OhMTE/+UTAvAxMTExJtB/tRP/bP/8snp5bvOdsToGIaO5lgTxnxIYoS+GxN95FgBAEIQwkBe5Ji+p+0aqvn8Pt3HIhWjiTREhBhjRbVilLI4OGwPrJZLZtUcYwa6vuN43LPd3Y2mVpUwmy/pzUBVztFphpRjDn2ajvn0QkCRp/RdyzAYdrstbz95gneBY33g7SePGIYBIQWzWc5qPWMxrwjOU+UpP/rj77O5OSJ9oD3eErzn/OyUNBubeQkBN0SqKufiwQnXL/doEtxgSbRCS0lztHjvGezYWtzUY1GXkoLD/sBh33NycoJUYkxH0preGCKBJMnQiSRNEsqipO8HpByjPqUANzjKPCVLE8wwSoYiEWsdpjMoIXDe4b1HAMYYlAhY0xKcRWtJfTziTGC/aSAqrAkkSU5WppjBEpwkL8t7Db+mH4b7QrbR6+GdJwZo6walBG1zRIqxc2AwHd4aiGO52XjfP6YBHQ8NeZ7TtZ7uaHA2ImTCsfYcjjX7XcODywd89ukrTk/XxNhzPG7GkjQRaNoanSo++/SjuLl9Hc5PF7vlsvqwKvK/99ajBy8Wi8U+TfNDVOrZL//yL/s3eoImJib+iZkWgImJiYk3wL/1b/3Rxyfn838jK6V2zuC9RWtFosds/77v2Gy2tF373Yz7GMGHwPJkSVmMGfcISZKV+DBGTw6DgRCIPt6nyUQSLRiTgizOR5QWDNbRm56AH/PwY0AI8d3b9zQvyIuKGANd3yClYLVa4b1nuVhR1y15XmCM4Ud+9McAwWZz4PLyHB+hbjpmVUKWK9IkA+FIU8nDxyvyvKBvLUN3JFGS8/MLqtmcGAN2MAymp8gTTi8W7HeGrh1z+ce9xqMSRd22JEnC5u7AqxdbtErpWkOiJVev7ri4OCVJUpq2Jk3VqIUPgaJKUUmC9x4lFRJB33QkSiBFpGs6YgikWYoQKW3XsT5ZEgEpJSEIdDK+gCyWK6zz2MGOvoDfHcajoms7nLPE6LE2UOQ5Qih0miABKSDLivFlABiGseHZDQZnx3ZhNzj6rkMrRdt2DM6hlMKYjhAdIUZscGRFjtKavMjoup4YLUpqNrcNzdHinEfEwPWrO+r9kVlVkIqMw+2e9997m75vR9/AvZk4huCrstht7m4+zbL0MJtV9btPHv+tB+dn/9H5ydmuyqub9Xr97Td2eCYmJv6pmRaAiYmJiTfAj3/xCz918ej889aNkpLgxzx4cR+dLqUgLxKKIiPGyDA4un7AOs/x0FA3NW3fMVssSNOMWTUDGKMh742p3nucHYg+jEM+EakkzkfSLEUnmizL6QczTqRCEImkeYYQYypNXR9pmoaz0xNMZ0iSDGMs88WK+WLF48dvM1jP7tAQCWRZyeZ2T54nY+GWd2w3R8oyR2rPYrlgc3OgazqyomB9fobOU6rZjGB77DCQlxXLs5LAOLzWdYeQkrpumM1zXry85uzylP2uZXM7EAI477CDYXt3xBhPkkX6rh2H+0TR9f0Yb5qkSAHee0JwdF1HUzckaUaeZVjjqaoZeVkw2EDbdVSzgmAD0QeKKkdKifcR58ekpnBv3PUhkGYJcTQaEHEUlQICZZUgBIggSLTCuQGlUrzzCCmxLiCExNkB03dUVcngDYMf8DFQViV925JIRd+Oi164f5UAiVaKokhJkpTbm5osS3j22Q3HumVzV9O3A6Y9srm7IUsVq2XF089ecfHgLaKSmMHQ9z1SSJcqfVjMF68Wy+WHQvBJmqYLKeW3/8BP//Sfe3B5/qdny9mnf/ZLX+re2OGZmJj4p2ZaACYmJia+z/y7/+N/84PP/54v/PHZfD7v24YQHDF6QojjTbNSZFlOkZdIqQgB+t6hdU4MEtN5xgQaRVGUOOfo2pbBGNKsIM0rdDJGRg7DmDNvWgMh4swAPo6m0KzAu0ieFSiVEGMkSVO893RtS304IKXgrcePsday2d5hrSMKWKzmPHh0SdcbuqZjv98yX8wRRLpm4HS9Go2zUiJwJImmyGeY1jJ0A0oI8rIiy0uKPKVINFrAoqqYLzLyLMcZi2kGlIwkSuJ9QIqI7wOzQtPVDWWRkGSC477j6vme66sDq/WC2+sDzbElSUYJU9d1QERKxWCH+8K0SN8bhIyj4VdAXfesztYIKTkeR79Bmkj6tkMKcS/DMURvaQ8N+MhgHDEqBusQShKiQapA0zQIIdEJRAHWDfRdw2K5QmfFWPglR4N18B5ChBCo63GZ0qlCSIGUkuV6hZQKESFRGmssmUoRLmL7DqXGRCKi4ObqiHeedt9x2NQcDi1RCB48uuRYN7RdRwgGZw2HQ8v7n/vC+HWHYNM83xbz+VVWFS9Ozk+/dXJ29veLav4bQuu3AP7yL//yr/2Vv/JX/mMxJptOTEz8/yjTAjAxMTHx/UX9+O/7sX/j4vGDnzoc9hhzYBharLM4bxkGQ9d11HVH31sEYjTdCnDBEWLA+zFdRqAYzJgS5J0jyVLmyzVZOUcnGYKxRVgkCToraDrDdjPGeiIVeVFRVTOEgLY9UJYVy8WarmuwbmA2X3J29hDvBE3dk2UFRVkwX1c8eOuEzgwEIvvdhuAtaZJyOLSkmWZfHzk7nXP1ekNZ5STpaJAdhgGlJEEGkiwl0QmrxQytFNWsYrbIWa0qjseW4DRda3nw8IRjW1NWGV3TUxQZbV0jBUgRkAief3zD/q4bDcxa8uLTW2B87dht93g7MK8KBmuRWuH8AATs4BFI8jylPhqsDyxWS6yx9E3NfDbDO4f3YXxJMA7rPNF76kONHSxtMzCYAEKPaZ44yipFoEdpVyIRUjGYHoTAi4izDjtYVKIBz3I5vpY0TUf0AWJAa4lQEucCIgqEEATGdB+tFAKBUpK+b7HDgBKaalFireDu5oDWks31EWEVV6/uqBYzFqsZd7cb3DBguh2HzQ2JTLg4O4tFUXRS6lpn6X69Xn2al+X1YrF6dbJa/cU0SX7761//evJmj87ExMR/VUxFYBMTExPfR/53f/p/8Yff+8Ln/zuDbVTTjLGNQoqxjMuPOnDvPV1jODYtznryMkdIRZIqpJJIqdnvrzGmo6n3LJejrt55h5QaxNgQe+8Fputq+rJFJTlJntIPbhxUQ6TvW+r6yIOHjzg/e8jmbk+ic4pCUZULYhA0fUuW5VTVnMWqRKaCGCN5lnJ5ccYn+x2nZ2ccDke0Ujx99hF/6A//QdrOcn174McfXeDt2GuQJQmD8wz9QJlrnBLMFgsOu4G0TFmdzLm92ZMWCWbwzOZj/OgwWJarBfvWslgLfBSk2Zih39eeq1d7FvOK+arg6Wd3RBcJNnD16pYHj5cEL0AyZvlbDzEQQmSwA/K+M+Hmekc1r8bb/bojhkhVZYQgQHicCxy2LeeXJYMZ04lW6xX7vUGIiDMBJUZ5UJbNGewW6zxSCbQUdPfNwYJA39ZEIM8ygnMsZvkoz3IR5xxKSbSSKBEJgwUlKMsMpEBJib43JiMi1g4oNUqOpBRoJbl73eH9mNSUpjmH/YE8S3n86Ixvfv1DFrOCpt1hXcfJ6QVVOSOG2EctPk3zYqtVWs/KaqdS8bWf/6W/9NmbPjcTExP/1TK9AExMTEx8n/gzf+ZP/nP/7B/6qZ9fnJxe1s0RZw1CjoOkdw5nDIf9kaurLa9e7bl6dWR3bOgHgxlGI+x8XjBfVFSzOd4FpNJEFFEkrE/PWawWYzOsc8QISimkBDs0tO2eLEkpi4IiL8iLkrJc8N57X2A+P+NY95yeru/1/wPWGer6iBBiLP9KBWme0LcGpRTnlyv6vuX84gJjDMb0/NZv/yZCwvn5GR9/dEWaFRRFzm7bYs2YiX88HLHWQlSAIM1TQhxYLEqIkqbpKPICACHHjH4/eKKLCCRZmqIU9P1AmibcvN6RpgmzRYaWgu1dQ16l3G2OCBQCNcZ3Do5Ej/GcANY6kkSRZRnXrzf0faCaF1jb0TY1QkCSakw/Fpvtdw19a9BKczz0dO0Yk1oWGc4N9J1D6wQ3eKqiomt6lJAoFH3bMphx2VBSQnC4vqeqSkLwpFqSJAprPYj72/4QydKUrulo23YsA7tPNEKAUoIQAt5F+t7Rdy1d3RG8Z7c7MphAkgrM0CAj7O82KBKktByPO+bzFTe3VxwPdzhrwmI+88vl4tXF5cXXZuvFK6XTj//CL/3VL7/BIzMxMfE9YloAJiYmJr4P/K/+5//u+z/5kz/xvzx7/Oi9Y33k9vp2HNSDp+87hs7QdS1d27G53XGse4YhIITCWthtem5f1+y2DVWV8rkPHvO5D94j0SlpkvHg4QPOH1yyWp+QJAURcZ/uMxqIox/wQ4e3Dq1GI6qxnsdvvUeiS7rGUOQZV9evaLsGKeF43BNFwAVP37WslnOuX15xerqgKFNiiJjecfnkFGcDm80dvel557332W4bbm52SAmbbc319Y4YBcdjy263I0lTnBMgJXmhyYoUOwS2t0fKvMDZMfFGa0n0niLP8d6TZinRw3Hbcjz0hCAQ2jNf5lw+WHB3s0OrSFJIolas1jP61mKGgA8R5wNda5BC422EIDD9wH57ZD7PyPMEMwwMg6WalZTVgq7vGMzA3c2exXIGQnHY1oioR3+F6YnRk2aaNFUopQlhbG52g8dZcM5jBotzgTQtMH2PswMIiVACnWiSLCFN1Oij6BqsG9uN265jMA5rLfG+9dc5dy+nUgghubs9YDqLH6Dd96xXJU3dIJXEuwBOcPP6FbvNhscPHnA87sbPVylu714botsF3CFPk2I2W7w8W5z+J7fH499h0vpPTPz/JZMEaGJiYuL7wMPHj/9rZ48e/HO9cWxurtBSgZD09wVPxIAUkiTRzOYFXX9kvsxRSvH80w2vnu847Ft2uy3RO04vLtEqY7U64Xjc49wpu50lBsahWEgQEec9Xd+xi1DNJUgFUiC15MGjB+zutoQYqWYFTXMgRHcfQ2rIsxyhBEop1ssl11e36EQRoycvcm5f7Xj3vUe44NFas9ttWCwWJDpnszny/MWn/NQf+AN88uFLEAKlFbe3G7p+QCU5rTuwXlcIJcYBtw8IImVVkOc5oW2IfozYPL9cUdeWolRkaconH15z9nBG23WUZY6zgSTTtLeB2TKhqkpMvydiv/sCEHzgsK8ZjON4bDHdQJ6VmH5Aa836tEQAzliG3pKcpvTG0fc9IowpQHmu6VvDcd9xfnnG7dUWOwxIUZGmEqUhTROsdbhg6cyADwHvQUmN95HgA+6+Q6A3HQJBZIxh9d7hhgHnBqSQRCFQCuwwtv26GJFSY+2AlAo3+PHloDP0MWJaR/CgE8FymWIHh06grrcE3/Ptb3+N9955n6osaOpdmM+Xbd0fak+8zZTeCeTce7f/c3/pL/3Gmz4zExMT3zumF4CJiYmJ7z3iwVsP/+WiXIi2PhCDY7GYE7wnUZo0SVAqGYu2tKI3FiEFeaFpjobXz/fUx5YsTamqCqkUx+2BF8+e0bXHUePfNZRFQQSyMiGKCMj7aM/AsW1AJEgtmC0L1qczXHAgYFaVHOs9h3qP6Q1SSZJk/FqEFORlTl0fuLm5oVrMEFISPeRFxunZgq42aJ0yX6xJdEJVpvzWb36Ztmm5vTnw8vVLnB/Y7xtevHhF2zTs7u64u9lwdrbguDniejsWaYWA1hHnLEWZ0bUD83mJD46iTMmThKefvMIHx2I5I3jL0I+Z/tttO/64KmVoDe2hRykJKnLcdwQXMN1A8JL9vgYJPniOx3GJKIuMwViGfhyuo49YY3DOctjX5EWC947dpifNM6SG25sdkogS8bs+DqUkprdINCJKpIg4N5CXKc729F2N947B3g/1NhIiKPW7rwHJvRE4MgyGNE0IPjKYgfpY453lsDvSHFqO+2ZcnobA/m4g1RrvPPXREJFjB4AIdN2BED29OfLy9TNMb0IIoSuqqs+Kaid18irJ88/SLH2RKGXe9IGZmJj43jItABMTExPfY/7Mn/6TP33x4PJfMkOLNTVCQtN1zBdztB7bdZUU9z/ao7VgeVKyPJ1h3YAQkbzIWZ5UvPvuW5yfX/Dw0SWzWYlAYpqaj7/zda5fXtHsjjSHA1onSCHHu2Xr0FrjouPxuyesTrOxFMxFpNYM3uOtJU9Slos189mcEDxaK7yzRAKdGSjKkt4Yqqpg6C1pKu5Td8bhvypnLJdz+r7l7u6KCPS9oW53ZJnieDxwe3dD2xnutrdopcjTjOef3eJdoDfDd1uG7WCRQhMiCD3enhep5MXTK7Z3Pacna4beIRAcjx0RT9/1pIlECsnN7R6dZiRpQn3o8CGik7FJ93hoMcaSZSlNMyCVJC81UqlRp680Skti8AymIVhwLpKmenyxEZ75KuN4ONL2PTpLUcn4x+l4mx851jX1sR+H9+gJzhGcxVsHcTT5Oj8uDNY6jLH3un5JnudYF1BaYb3F2jimKInxZcYYg7WB+tDhBstx1yOi5umn11hr8TZw2HXcbQ5IJYkxYkxH1zXkWcLxuCXJ9NAN/V5KzLychSzNdnlRPssW828Xed6+scMyMTHxfWFaACYmJia+t+jHTx7+6+ksuTgc7uiaI13XU81mEEcTZ0TgvMWYI03Tc9wbFsuKw75hv+1x1pGmmrJMiXFgvV7hvB2XB6FRRA6bG3Z3V+w2rzlub/DWgBzbgH0IpGXOT/7Bn+T84QWHg0UoTZKmJFmBTgsWqzPyvAIJm80GKQVN05ImmueffcL19Su6rkeJlHo/sN82JDph6B3r9YyySHHO8uDBAz777BnWD3TmSNcf8N4SETx9+pT9fkPT97SmJy9zdruG27s9N3d3dJ0liMhgA03d0HU9IUaKqsC7wHHXcXu1Y3VSIXUkuMBhb5gvSqwd+xO8i7SNIXqBSgSDtey2LbNZfp8mFDCdHWUzboziDMF9t3+h7wdiiPcpSpEYAkpqEp1hncf70RAdg6NtDVJIQgSpRqmO9wFjxjhXpSTOOaQa05iyJMc0FmccMQhiiKPmX4Rx2egHEp2OC0E34KzDDYHtZodU4N3Y/1Afj2glaeoxMvR4aCnKhK4Z+Ozja4TwbO9u6OoDh82GWV6iVYrpO6yz0bnBIXBZnu57O7xIEh3SNI1Jlh3yvPhoSPJ/8KYPzcTExPeWaQGYmJiY+B7yzjvv6MV69UWpNLZtubu9wXlHWZb4GPDBM5iepmkwxnN329A2PUUiOW5btnctXefomo6hdxgD3jtCHJeGw3GDCwNCeOr9LaY7YkxL3x1xzo2pOVnFkydfYLE4pTsYUqXoe0NRZMzmOUVRkOicY33g5uY1MBqHrR14/vRT+r6hLAuKokAEye3rA8FBDIEk0VSzhCSBi4sH5PmScG8bLfOCpt4yny2xNrLZ3uC8oW72GOvwRF68uqUzhpu7W+q6QStFczAcdy3eBmZVSVcbrl/vqXcdiZLMqgpvxwz/trYkSow360FgjSPYiOst0Y2lZArJal3RtZa+9ehEIITiuO8wxmEHj/cw2Ijzo2wmRJBaEIgEAgFB0/Ykqca58fMfjCUvUuww4H3AOYcZPCJoTGtJEslu1+K8pR8sOsmIURJ8uG8WjmityBKJlpLjoSNGKIqMJFE0h4Y8TQnD2BTtfURGiWkHbO847FoGF9jc1bjgyPOE73zzOWmactjd8PrZx7x+/oLd5sjJ+hKt0zgMndGpPvZ9N+RlHlSi9kHwqU60zbL0mCblb/ziL/7i3Rs9NBMTE99zJhPwxMTExPeQP/JH/sjby9XJ+6bvaZojSZpSzud4IsFZpBiz6a0duLqqORx6PvjiW9SHjutXR7rWkucpXWt4/WrDYjHDmI6h91gTWMxPqI8bhFAEoCgq3NDj3YAUASEkRTUjy2c09YBpDyyqnNksBwnHncPbgbrekejRj+CDpWtbhBT0XUOeFtTNkSQ/Mpt3PLg4RUmB6QOrBwpjIqdnC6SUFEXOarYmT0suzi/Y7TacL8/ZbY9Ya4gx0JsjGRUhBDbbHU3dsN3cURYVddOAi3THjvxtjfOB559ckecp+8OevMhompq8yhmMZb6qOBx6pJDstw0yUTgfCVGMS0IXiCGAjHS9Y7drefBwRdsOoxymNmSpJnpojy1KQtt2KKXvk3fGW3chJTGMpWzOeYSMRAJKw2Asznns4NHaEoOgbwfmVcZm02KHMEqq0tGDgRBjc69QaJWQpAl9Z2n3LXIFAkmaa5yPFFoiVUQgKNKcQ1vjHePN/95SFAPWeJ4/vWO+zHh99QIpA2We8urVHXe7O8pyju0d5xcP3bHZdkLKo/U+Vuio06xOi+IqK8tDmulv/Pm/+Bc/fMNHZmJi4vvA9AIwMTEx8T3kxz54/M+kWXLZtUeKsiAvZywXS6LzyBgJ3tI1LddXe7abmstHC7JMsts1SKnJ85IQ7mUlxmKtxfRjJKRSEqUUxIiSmixLMcOAGXpiHCNE83LOcn2Gc55Xr15yqGtkKkkzhekNzjmOhwYhwQ4WISTWDxhv2Gxv6E1HP3QY01AWOfOqwAwG5/0oj1FjIddsnnF2seTBwxlpkvDowTuUxZxqvqIoK5JEIaJE6wStNKlO8M6x3W4QYizaOrZ76rrlUNcM3lE3A598/BmHfUuiNXXbgVIM1hKiZ3AOqRVNbelay2DA20hTDwgpkRJCiMxXJd4H+ranKMfbdQ2IGHBDIEkV6l7/DwEpRu+DdxFvI9FHorekqaI+1sQYUUqQKI2IAiIc9i0hRLwbP5dRWuTxziOchBBp246iSun7gSRL8AFCAKXTe/mSpW97dpsDhPtiOOuxQ+CwaxiGgcEYghMc6zEG9bjv8Tbw8beeYo3F9Ec++vDrCAFaK4a+4fbmebjbvDTBD+b84mGvEl0rpfYRGoSw3gcVEE1Z5N9+0+dlYmLi+8P0AjAxMTHxPWS1Wv1emQbZ7vZE71msT8Z4TjsgRSR4hx3GwXO+zLi4mHH9eoszIGVkvko57C1ZniARDGbADZ7eDOR5Tr3fIKUkr2Zk2aiVd36MnpQiIctKetMTt6/H+Egz8LkPHjM0A0VWYJqBLE3ZblqSTLHfHjHDQG8MWZIRosNYw4MH7zCbLbm+u+V0vWYYLO9//gydaopU0BwdeSGJQDmb8SDJiARkosmzFG8NWapIdIJQGiVTsiylro8sH67I0oT9foNz73A8tkQX+PSzF/TDwHqxZrtrabuB+RKyRGN6SzUvR528DTgT8MGR2jGdZznP8Q5U5qkWBab19J1hVhU0h44sG6M0y0KP5WBti5TjUH88dmgt7wfuQIwBxPh5WufQiUTECB5sZ1FK09Y9ZSWwWiEkFEXCdlNjB4dSmugjXd2RFWr0FggIwdO0HUqCc+PPE6Oga3sEK7SUGDNQHzu6JuLtFq0F222LNQPXL29JlccNgtuXt5RaM69Knn72CYSAkiIkWgy77Y2ZLRZ91w3bLEt7IZfROtckWbpJUm1a48rURil9Opl/JyZ+SJgWgImJiYnvITLTYhhauqYGqVikBW27x/mBGAJaj0VOIBj6wH7T0h7vb/mHSJ4LsmRJ13Q0tUFLwfFQY4MnBAcxglDMFiuEkBjTgHcQPFJretuye7VFIKmqJf/S7/u9EOJYrBV6Qgy8unrBerXi5u4Vg/XMZgsS3SKRBATn5+cURcWzF085XZ/Q954iVyxWGS4YNDnW9qxOKo57x3I5YyUl290BpSHPMvbbGxazOXmeYV1EJxlt05GohESlZElOxNK1PVmScn33gt70lPMFQsF2u6Npa8ywRmhJVVXo+1v76AKCgHcBEw2zeUYMgSgVZZWOZVebmizX4CNtM/YlqDQhz8fUnuDBu8AQHLu7mvXJAtNb+t6TpmJ8DVB+/L2KAq01g22RfURnCcRILCK2t2MpWJ6yXC65uTowDJboA94ODAjSLCOEQJanY3JRmhBjQGqBUGCdxwwBnWb0XcNgAl3bst83XFwucHZgvzG4YWBzfUeeVURvefb0U8qiIISBY7N1Suk2zYtWaXWHEpssy1qVqDrVZee9z3Sa3uZ5uVVmyNM8ff5f/1f+ld3/9hd/8U0fmYmJie8D0wIwMTEx8T3ij/3rf+ykb/ZPZJjjrCVJM7RMiN6NOfBmIE003kVMF9nfdSxmCctlhTE1wQfKKuXqVU2aalwe2N5tGfoOpRXOgrUD8+Wa5fqUvj4iYyB4TyTS9C11349yGKF5/4Mfx3nJfJaiRML13Q7nDU/eecz27pZj3XJ6coH3ls4dSdKEqpxjbWC/e8aDR2+zWp1xONQ8evI2TWdYL2fs7mqSNCFJNWbouHiwZLdtyNOEVEvMYOj7gZPTBwgih6ZhNluw3WyoyopEK5TMKIsUM/TkWUrfdwglKYtLnHOjB0EnxBhxNuCdp2s6dJIAAfDYYaCcl+RFxm7bcHK6INUJphklQrNFSd8O4MAOEXRABkHwkqrKuLvdo6Wgay0nZ+PQ79yA1hl2CJS5RGlB1zqKQmEHR1EkNI0lTRkLwRIxxqo6T0SQ5GMuf4wQoiMEjfAOLxQSR3PsiVVx7x9IyIsUN3h2myN5ntEeHd4Irp7foZPI7m7D5eUJr168Is00m7stWdrR9TX7/ZYnT95CJ6oPwe+yLO2kVn2SFa+zsvw0L7Iuz/KtzvLbGKNQWl1JnXx1BnRe7n7mZ37Gv+EjMzEx8X1iWgAmJiYmvkc8fGfx37y9vv1cNdPcXd3x8MkKZwfcMGbCO2NpDzX7/cD1ywOvn99xsspRuWdz01OWGYmSVEVK347yleOhpmn2eCuR1uKcY75ekSQpd/WBYegIAayLdIPFRxAhcro+Ic/mpJkmRnjx6obTswrbWz6+vuLlq9esl2fEYNnvj1TljDTNSZIcBLz79vsU5Zznn33KfLmgqjIUmrub0T+wXGV4H8lzhZISsQ+sTxbs9w3b7Z6iqFiuV2zvNqRZRppoEp2QpRV5UVJWM7y3aKXpe0OSaISAPE/ZbrYoqUZpk/eoENlsdpyfL2mbAa3GKE4zDCijMWZgu21Zrmd0R8swDBAc3lqGwWN6yyJTxCEwOMFsmQKC4MARyNIU0w6oBJwNqFKgpcB5P97W+4AbwLuIiBLbG/IsRUqJHSzOBXSiMM4wm+cM1hGjhxhHn0DwyBAgwGAcxFHSExPIcoUSmmbfoYXm7rrmeOy5evkCP/TMljPE/TKx3WyYVyVdZ/DBcWw20bkHlOV8b+NwleRZm6bZISurV3lVfZZn+aGs8q+oKv2drJHDH/5Xh+ZnfuaXp6F/YuKHkGkBmJiYmPge4Z1rtne1sv2zsN328tETiQs91lmCj9R1Q1MPfPzJHdcv9/z9X/3POTn9F8jKgq98+av8vt//EwwmstnW7A8tMQS87SFEnB+ww9hSe6JPcdbgvcWHgI+BKMAGjxCRZVWRpgmmP5JpzcvP7nj8zinD4NjcbWmOO87Wpwymp2sblsvlvUSlQiCQUrC52yJ2DbPFmifvvoVA8vLZjtPLCiWhmOXs7mpOThZ8/OEVaZYQo7w3zCpOTkfvg9KKIi+J3iGlQCmFUAqdJbjWffeWP0kKyrKk63vsYFgt11y93t8bonucHVDqjM3tjtVqho8C7yPWOHabmhgig/F0jcH7gJAaa8M4cCNw1hMt6FSSZQltM5Blmq4x5LnCDgbiWEgmiFjrSbMxYWiM/BxLv4wZEASSROFdIPhxaRBCMAyWLEvou7GIzPmAwhECCBdQSjL0huBSBJJuMPgYsC7y8sUd551nt++4u7plt7+j3u9Ydkvq9sj6ZE1T75HRkiYlEuEEsdntNjIvip0mfZYV+b7Iy30xn32aZrMdiWxWIvsH//6X/mID8Kd/6Q0fkImJiTfGtABMTExMfI8osvLv3lxv/tsv2v1PdL2T735wy/xEY/oB17f0reXupqPeOfrOcre94df/wVfIdMpXf+u3SHXC6uyMzbYl+oA1LX3b3BuHLcPQcjjuyIuMspzjvcO6MclncJYQA4lSDMbQqRYVBZ9++ILHTy64uz7w+vWW4Czr5Ql3dxuM6YlE2rYhywqE0CAlbXNkPl9SlHOqxYq67vn4o1ecrNa0ref0QU7T9Sgt2Gxq7m4PPH7rlK4d8C6Q5wVSaYzpgEhZlhx2e7TSYzSmHJOEuiagtERYSNMMpTRNfURphZSSSMQYg0pSijznsG84HhrKqsA6S993aKXp2o6yyjnsa2IsGGygrQ0hQt8ZqrKgax0hWGarYjRWW4tS4KyjKHP6biBGyWAsMTJGe3Z2TAUKgbYdGAYQ0pBl4wtCU3fEoAg+Yq0juEhZFjR1h1Zq7AgOYXy10OK7S0N7NGitCMEipGS/a9huO7rGj7f73rK5u2K73xDiwN32FoXAWcOhtjFNrPPemWpWXbnohiIrb6py8TTJ8uskK/rZYvE1FeSXfZ6bf//P//nmTZ+LiYmJN8+0AExMTEx8j/gf/vE/fvU/+7mf+5Ovn7967+OPn//+R0/W6uwsw9Q7+sazeXXguLFEY7HGIqTgP/t7v8KqLJFKYI1h6C1aCIw1mLaha9oxOzI4vDPs9ncgBOerc6wdRpOqFBg3ttvGCAhFnpVE57FDg+kthATvA+v1CR/d3aB1SjUraRpYrU9YLlZImaCTlK4Yl4vDbkvfd/iTJQ8fPqJ3LbHTvJ2fsNvUvPXWKb/5ax8xX85QiWKwASEVs3nFbnfA9B1JqhEiIqVEJ2MbMVKQZznX7SvEqaAoMtr6iHcBGwyzckbXtUgladuWk7OKPCvYbvcgJUmaUjcNfd9T5CXHumY2L2nbnjRL2W8bIqCage12T57ndE2PD571+Yz2MBCCJ8axXdjbceAf/xkSpWiOHUmqEGKgKBPqw7i0eQ95OS4H9dEgUCglEEKMRWnK03eOyEBeKrTW7Lc9aaIJ3pCkCdu70eOBAK0Dt9cb6kPDdd2TFRn1YQ9EmrYmVYpI5OnTQFmUoa0bTxm9j64tZ9V1UhSvirxoq/Xp7+gkqRXio8XZgy//qT/1p7o3exomJiZ+kJgWgImJiYnvIf/Ov/fv/fq/+d/9I3+qysT/5G//3/7TL15eVHq+Kri9bdjtW/rOs9vuOGz3Yyuw62n6wHy+5vLhQ/rBYbqGvm1w1qGTBG97gnMYYxi8pTc9XdcSg8d5R2d6nHcIKfAhIlFUecVyMefsYkGWFWS54t2zC559ekWa5iwXc9qu4+LBW2Rpys3VNUJYvBsvjEMMpHlGUqQ8fvttbm9umC/nSBu5fXXg0Tsn3N41qCSlnOWYYbw5z4uUutkilaU3RxbLNSEEpIKiKkjTFCUVQz+WlllryIoEH8cc/HBvls5zBQisHYgRmrbFe49OUnrT07QtUiuOzZ5IoGkNaZDM3Jzd9kA1L7i967HW0fU9x6ZDSYGIgvY44MO9UTdImmOPUJ4QBCGMMaDWBtI0oe8GlJYcj4bgI8FrrAs4Gxi6iPM9RZ4SI6Sppj529P39opAbihJM53EW9tues/MZx7pFSEUIEWsDu23N9vYGiNzeDRRZirUWnabsmwNlnsSb29f+wfmDQWtVd6apZ/NKBCVeVfPF17Mkk3lZ/GpQxbf+wn/4H/Zv9ABMTEz8QDItABMTExPfYx7/yAd/s++Pv+/jb2/LX/k7v3H+oz/2+Wp3NLJrDfttzW5zYL/bcKyPSCVIdEpVLum7gcF5DocDWgl8sAjAO4ezA3V7ZHCW43FPiiQQaExPbRoiEREFIkJZzSiLks3dhv3+yOVbcHp2wuaup206zs5PMV2PFAOm67m9enWfJCTIsgIQrNYrRKKpZguur3aYrufm6ooPvvhFDsbw3jzlow+vyZOEGBzHvWE2y7m92dB3/b38RaDVmL+vkEitkUpAjJi+ZbaY0fc9i/VyvJEnUJYLmuZImuZkaYF1A/XxCGIcztdlQd8a7OBRUnE47MmLgqZtUUnFbrunro9EAm3XkaYlL19co1VKmir2u462Hg2/EYVA0bUdi0VBPxiSVI9RniYghcMYj5ADwYHpBnpjkSpSVik+RLrG4i0oJQDoe4sxA0TNdtugEoExHvpAUw8kSUeaJbx4fosUihgixjiurq94cHHO6xefcXFxTj8Mo4TJeS+s6IssbQdvurxatUHxoU5SobOkL+er54mSX0/K9jtf+tJfnYb/iYmJ/1KmBWBiYmLie8zP/dzP3f3cH/sf/K/r3Sa5ef3sp3/l+upHzk4fz3vjZNc27A9HXl09pxvG23YXPHW945OPvsPlxQNUcCghEc5i+p62b9gcNhz6IxEwfuD1/hoYb+ojIMQoQ9E6wdqBpy8+IZEZjx4/wpoDV696tEwJ3tMPo/Y8z3Our18j0KRZTrUoOT09BZHQNjXWOfa3G5ruiDeW2XJJOc94+N45L18cOO571itF33mKoiTNJbvDgUxXvH71iqIoyPN8bDHWyVgUpgR9N6CTBBE9x7bFu4hSmq7rmC2WhLrG2IEszzjc7AFJAKSWrFZrNtsd3tkxZtM5BmMAQV1D33VjO/KdpShzXr16yf+bvT+P2TXND/rO77Xd+/0s73q2OrV1dbfdtoNZIgZNJMgwE4SUUTQRPRmZ2AQCIXgyCCxmRjN/tCzNhiJPGFmgpAXG7W4WuzPARIqYCQTwEGAwdINxr1VdXcvZ3+1Z7/1a5o/n2BiC7W47+LSp6yOdP85bp97zqH636r1/1/Vb8ixnPl8wDJKnT7b0Q898VhL8hBCCzWaLFAKQhODZbye6ZmDsLW03MPYGOwXafcfkHEI4lFJ0bc84OLabltm8AiHZbQbcZPHiMNv/1p1jxnHDZtUxTZ6mGajrAjs49vsV282asqgIQbBeXWFUYHXzDOcmZ631eZHtEm32OjU7mSSNNNKWVf00r+Zf8VP/keD98KP/5V/+uy/mSY+i6NcK8aI/QBRF0QfF/+V///3HX/ziV//9xw+e/q+uL9dv9DYsunEUnXVsux2jmyAIUqORQqKk4WRxxLyeMY6HcZbWTax3a3Z9gw/hn/n+Qhzq1v/p7yHRGhUESZJRlQtmZcnYD9y+8xJ37txHSknfD+jE0LUdV5cXZFnK8mTJ4viIq6trmm2DUZq+61DysCG3WsyZnyy4c/8Y7zyXT/fge+7drVmvBm7dO6bbjWxWLQLJl77wZc7Pl9x/9S7vvP0+CoM2CiEl+12DnToIE8NkmS+O2dys2Ww33Lp9m7bdY61nvphz8fQJUkCSZkiTMJ8dMXQ9zlu8t0zTYQfCfDGnbzuMSdhstyxmRwxjx7OLp+RFzenpGWmSYq3FGMXRcoFSir7vmUZLVVUEJGkuqeuS1fUOay2T5XmzsGPoB7q+5/RsyXI54+ZmjTEJQ2/RRnFytuD6ck0/THgfKIuc1z9ywm7T8fW3Lum6nuXxAik9V083dO31YfdDVmHtjvfe/xomMWw3V04nyb4dbJ/l+U4nqkmM2S7m86dFUQ5VVb03Oz7/76wfLl3X/TZK82Of+cxfvvjVfr6jKPq1I94ARFEU/Sr5P/zf/uT1H/kjf+S/SLJ8l5ZP/6dvvvXOb1k3m9P9OGUB0FKhjPbDOEpEgDDQPm0wl5oAh/GeIeCD/xd+/599+RcIhBReCZwUwmZp6oqs0F2/z4ySvHT7JWbzOf3YUxUl83nFdrfHOctsNmNxNMOkmkcPHmGUoUgzmqbBO0+WJCgtGZ1jmGBzPTF0PavrLa++cYubbUfbdiRJwsObFbOq4vrygnqWkxU50+RomoaTozP6YWCa7GF+/jSRaEEIkn4c2bUN2mg2uy2pNiAC290ek+VsVldk3rPMSzbrFWmS0Lft4UhLSJTSrG6uCc6hpcFNE8PYcXX9jLbdYZ2lrkoIjkePH3J6ek4IgrJMubrcUBQJ/TiitcYMghAE+6an73ukUHR9zzB2GJ2wWq1IUoOzgt2uoZ6BnQJXVxeUVc5kHZPzrC6vUOfnPHxwzWxW0fcNzbYhLyRpnjP0LcJbLp+8T1FUZEWKCw4VdFBKDVKIfZYmfZ6nV8WsepqYZFeUxXWRl5s0y7++HYa/9tnPftZ97/f+rqeZS9Sv2kMdRdGvSTEBiKIo+lX0n/1n/1n3Ez/xEz/6d/7Wf/25NC//3aurq990vVofd8NUj/1QF1U1rna7et80c2OSoe3acvT2nznZ//mklB4IIQQpECBCSJKkLfPsJkuzTZYkXVHkK2w4PjXFdwpkut5scEIzW0hGNzDZEYHAO0c1q/FO8OzxFep5vf44DgzDSJKmz7f6dlSLJUZrht7x7OkFEthctVxcPuPOnSWPH16zXbfsVjdIqcjLnKPjYy4vrmmbnuJeSts1TNOIMRrrLNqkOP98FwCBcbIkJsVNFmstq+2as7NTRhtQDsZxZOp7umaLSVKm4VBapLViu7lBS0ldLRiGnn2zxbqBfmjwwdH1HW3b0jQ7hJRolTGOA+M0sL9YURQF1k7MZjO0TFmvDlOM6lnN5dUzpqlHycMtTd+P9N0N1h5uaBKTsVqvWF2vWa821NWM7eoGCeyairt3FUoplID9pkEJg1KBrhsIwnJ18wC1MQEl7NA3oSgrO4z9kGX59WJ59HY9nz3WRnVKmS7Pik1a5V/5zI/9BQfwYz/22Ue/ek9zFEW/VsUEIIqi6FfZxz/+cQf84//dH/gD72zOz74NqU82281vVFJtri5vvssJMXvn3fc+VOTldLG6OW+a3Vwnpttt9yfGmN56mwbvZWqSbZKaDkQA0EqMCOkWs/n7x8ujd4qqeFgW1dtJkuS73e47V5c3brPefthaO68Wc1mWGafnZ6xuNmx3a45PTzFJynaz5uTohGGcaJo9AD44ttsV3k2kecHy9JhpGtit9/jJkmYJz548ph1bsvw+773ziCI/jOc8mh+z223ZbnY8fPCENMuRSrDdbZFS47xlshZGyWq9op7NSLOMm8srFrMl1k6HWv6+o20a8qwg0YbLi0uyTDOOA4UE5z3BCXxvny9FAzMNrDc3CBnQRjHZCSR0/Zahs4xjR9sEtpsMITTGSNbrG7IkZXV1hTEJzq3YbbcoKRmGjs36gjwruFqtqMoZc3vMfrdFKUnTOhbzGdM48t7775CYFCECzo+sV8/Ixx4RDrsRTKZRQrLfbMFPJCbBGMNq3SHVaKuq2jrvQ5olHiH29XLx3vHJ8deyIr9QSTIEL9okM2/de/XD//gFPs5RFP0aFHsAoiiKvgX8wB/83WcuK9T28fq4n6Z/UxqTPX38+CN5Wfc31zdnWkl/vV6djEO/XC6Xn3/27OIjR/PZSiVq5YOQWZbuvQu6rKqbsqouqkX1OSmTn/n0pz99/Yf+0B+qLi8f/4Yn7zz+/XYMvz1LZ2fL5ZKzuyfeWpSUh4qRcZhomobEGJqmxTtP33aMY0/XdTg30XUdp2fnLE5OkEIxDZb9do2UkrZvOTpZkmYpTx49oygTklSBg8mOdF3H+mbHvTt3STPF219/m5PjU9Ik5fLqGUYbnl084403PgJ43nvvfc5Oz5BCcHnxjMH2SCkpy9nzJWFbnBspioJpsmRZSpoUTOOEnTombwkEnHUEZxFSsmt2hBCo6yVCaPb7NfPZDO8Fi/kJIXjcNFIWFav1NcujE5IkpR8GjNYYpXjy7NFhmtHzxWt377xKPwx45zBao5RinAb6rqOua3wIaCnp+4Ysy6nqOVleoJWi23dMU0fXbtAS9vs116tLK2RoZ/PFdUBskzRt0zRf5WX1tdnx0XtZUV5qlfxDlefvffKTn2xf7JMbRdGvRTEBiKIo+hbzAz/wA+U43hTDanoNpapJyifdavWbTZ6/fXV5+eGT5ekX+t5eZrl4vc7zn/a5894XXratBFBCuD/56U9f//zv+ft+3+87unn09P+8We3+x1Kl9+vZItdKqdliJodx5OZ6w9B2LJdLBnuY4Y8PeOdQSuKc4+r6knEaOTu/w6yuefjwEVmS0nctOk1I8wwlD1N0hmFgs73iO7/rY3zlK1/h5Zdf5Z2vv02iDIvFMT5Y3nv/HV66d5/JOna7DVmSst1sODk5pShynj59SpKklGXBNPRcry8ZxpHFbInWhnHsCd4hBHjvyLKcPC+Z+gmwtF2Det5QPY0D3jtGZ3HThDEpSV6w222ZVSXjOFHXRxhl0ErhvcX7Q69Fmuaon9vkG+iHjnGckFrivKeuFozjiLee5dExm/WaPM/YNztmsxkhBKQ4lP1IKUiThLqeIZVhGAa26wua3RXeWhCepmu3QolVUZWrLM+fpHnxMMnytqxmD0yWvQP6C5/+8R9/81f9wYyi6F8ZsQQoiqLoW8wP/dAPNUADXP7s1z7xiU985Qd/8Ac98JM/749+7Rv9nvfu3VsP+/2nlZLv79v+X7u5efbr+q4/mb5uyyKbZ8vlktu3z9jtOorssKDLucN23JvViu12zbNnDzk9vUWe5Tx4+Ig0SdBG4ztLXtRkRc5+u6dttlyvLsjygnfefZftbstmu2OaHDLAerNCJYZdu6HtO1brDUJ4nHcgBf04kuYZwzgilWa93TIrS5AKj6frOtLU4/yEFJIQAkoprD18XiE8bdcyTSNJmiHkz45DBaMF+IAgILxDS4m3jiQxDF1DUh5uA0IQCKXwzuLciFIZ1lkIAYEkzzKarmE+XxC8g+BwziKFQGuF956iKEmTFKUOS76qqkYphbMTcPgM3juyNGF3M9INLR4/JFmyUyq5TNLsJs2Kx/Vi+Y4pynWSpF9Oy/nf+eQnPzn9D/awRVH0gRRvAKIoij5A/vD3fd/iuht/69WzZ79rvWp+qzHFaZ5nKk0zud83JGlCnmZstxusnRDS0Pcdl5ePMYngY9/+3ey2Hf3QM44jbddytJxxfusWF1dX9G3PenXJzfaSW7fv8+DBO2RZTpJk5FlO3zYQQKeGZ1dPWS6OGfoRCBiTQPBobbhz5w5fe/Or1OWMfd8yryo2uw3dsCfTCRKFSRTTNKKVoigqvPMYrRHB0w8twXtm8yMQh6FKbbvD47DT8/dnH5DqcA6mjEEEqKo5RmdYO+KCRytNCB5jDFrrw9fdYcRqCP75ojSAQN+NHB0dM9kBISRaGcrqUJKU5TlpmhFcYBh60iQ5bGsWgs31E/abS9q+cQ6/FYZ1UdTvV/Ojd+eLxdeT2fwrKtGf+7Ef+/GvA//ibvAoiqJvgnzRHyCKoij61fP/+NSn1kkl/lGSplOeZUbgps1m5R88fC/keQrA5c01XdehlMaOEyA4OT7jox/5Dp4+fcI7777Jk8fv0DQbZnVNmhU8u7jg6tkznB1xIVBXS6TUjFNH4DC+VGvNMA0MdqRtW7K8ZrPbEnCM00hiDN5bdvsN1jlGN7Fpts//nQkfPFmaMVpLb0es80zW0Q8DXd+ilGAau+dL0CRSKpRUGJWgtUbpw++1Nggh8ECSZAihSHSKlOrnfo1TjxSglAQCzk0oJRFCUeQViUmpqhlZWpCYjMX8mNlsTlHkFEXOyfEJRVGwWMypqgopFEWWkyYGpSTWOYwx7PcbnB3xzhJC6NIsXaVpfm2ydJNX1eOiWrxZ6/xv/diP/fjbxJf/KIr+BxJLgKIoij5g/N5+V57q1M/MX76+2vy6ROffvVieCutg6AZEkFTzBZvVmqIoEd6SmIIHDx5wdXWBNgllWVLXCyZrefrsMdPYY56P0rR2ZL44YbfboKQkOI/JNNY6xulQtuPxBCHphx6Jx4eADxbnPNtmDyHghWdylvA8KUhMQtcNyOfLyLTWlGVF1+4ZhgElJKnSgEAIEEI+34b8fM+ASXDOI7A458iyDOcOE3mkMkh5+PPDcGh81lozDANZliEEKJUAE0lmSEyBtY4szQlCUBbFoQdCa7RV5EVOXpVorVgslwzjgNSe+azEYVndbJg6QZZotlcNnuCUwSKCLev5++V8+WY9m72VLo7+6n/+yU9uXvQzE0XRv1riDUAURdEHjCjD/+/o3vH/tq6qr1Z5mRZF3thpaPfbjc/SBAHsdw31bIbUhxPwq8sLVqtrhr5hGjrwnma3pd1v2W3WDH1Pnudoow8vzCi22xXOO/KswAdPmqZ47xBSMI3D81p4cM7jrKXvR9I0xwXHMHSEAPJ5Q+40DYfRns6h9eEUve0bpFJkaY6WmnE83D5M04hSigAoZZ6X7xiSJEerBCEEUkq00kBAmwQpBFIqhqFnnDqEhGkakAIgoJQmzwuUNgQEZT0nBHEomSpy0jylqmtMklDPF0zeUhQZQimUEuRZihSKYANaaoo0J3jLs6cP6foGcRjhOkIYkyzfFNXsUpr0i5+ML/9RFP1LEBOAKIqiD5g/+2c/e1kU9kYGkS2O6r9aLs0PmWT8KyE0l5vNFdPUI4Bxmhj6nuA8zk4IIcjziuXyhGnytN2eptuhlaSua/qxo+16lNSM45a22aGVwSQZKIkyCUJKBGDtiJYCrRSJ1BAC49Aj5WGj8TiMGKkxWhG8PzTuOofAg7PMqhk+hMNegDQjNSnGJHjvCQS892hlSJIUKRKMycnSEm0MHo8yhgCHU38pnzfqOkLwWDsikAQgSIE0Gp0k+AB4njdIh0OpkYQ8z0gS8zzxAWMS5rMabQR5nlGUCcZIJuvYNwNSarSWbJsV/bCnG1rfT9NNUS/eL6rFU+dcaodulnl5/YvFMYqi6JcrJgBRFEUfQD/0Q59pxu/Rf/z+cv5/Oj85+dTxyeK/TTL9lX7onRAwjT1j11OXFXa0eA9GZ5ye3WKyE+vtNS4c6tjPzs8xyeF0X0uJCIK+7cizkuXiBO89R8sl69UVw9Bj3YQ2CcE7lADxc3X2I3YckMDQDxA8dhpBCkY34UIg0QpnLRLBrKgOs/dNilSGJDEobVDG4LzHJNnhxF8qTJIhlSY8f8kP3h9Gdwpx+CwcypO8CMDhNgCpDs3BQqGUwXmLUhJjDFIp8qIkMYfNw1prhPSkqcGOIyZRKKPpx4FpssxmNUWR0Y8DTdvgA3jr2GxWoOmDFDukuMpns68naXplErNOtb78JcIYRVH0yxJ7AKIoij6gPvvxzzqg+cT3fZ+7HnxQITu7dX6iAoK2bSmKAhB4bxESXrr3EpvNir5vuXP7Ds4Fjo5OaJsG5wLb3aGh1RiDc47l/IiqXtINLSIIvHNkSUYIHikUznryNCMIQaI1IoAxmmVZMk0DuUkY3XToI5ACO1mKLEUqQQiBeTVnu7khhMByuWS3XWOMxovn033EYfynJSCUPjTbBo8IHMZxEkjSBEFgHAcQYEyK8IeSnyQrSNP8cIthDqVDVV1j1GFjb5ok2KnHJBofLHme48aGpmvJipQyS1Cl4ebqhq4d0DIhMznNtGdsG2SALM3CNNmmnqV7bXSTJmZXzo/e00n5t48+9KH9i35Goij6V1NMAKIoij7gHvTTvxcm/7uTxGxD8NskLWaT80zeQ3BY5zg5OWG32yGF5OWXX6dteoo8o+snnBN4exjBiRRYG5gvjgg4dvsNZZWz3ezQUpAmBuc8JjO0+z2pSfDeYUyGD4EweY4XR4Sg8FpB5xE6AW2QSIq8JEk0SqcoaZAChqGhLiuKrGQYu0PDLpLUpCijsZP9uclAPhySAWM0HkmW1TTtFmcnsrQkUQYvJ5QyFEVNkiQkSco0TWRZilaKaXJkpSR4z3bbUM9r0lQBEmUUCIEbJ/JU03U9Z2fHbFZ79ruW0Xqkh8tnT9jtrujGySpjdklWPZwtj79Uz4/fVYn6mWacfub53ocoiqL/wcUEIIqi6INO+P+mms8eqlz+5sfvX1TDOH4soAghkOU5J+e3GIcek0i6pmG/7zk+OSFLEgY70XU9Y69wTPi+49VXXub999/l6uaSO3deJgRFURY0uwlvJ8p8hkkMuTmcpA/D8HxBlsOYQ318Wcxomi0EjxxH0tSQpxXee8qqJM8rmqYBciYOjb9FUeHxhw3BHGrxnQ+HlVvhUNojpEInhslOJEmOs45xHEjTFGVShNBInZAX5WHb8DQxXyyh7cjSHAR4bzFao5SkzzOUVJRlgfeQFSlSCiSese8QIbDdblHaoBNN223Zra7phpbd0GKFxAbhpsAgtRml1O/96J/78X/wgp+IKIr+FRcTgCiKog+4H/mLf/HxJz7xias3/8k/+deroiyHQaKTQ8NuUVVM04QUAuss89mcrMjpx4HtbsswjKR5gQtAEHzoldfouhYlBC+/9DKvv/5hurZntbrBJhnVrXucnt5ivdkgwuHW4Ga1Is8zjDJM4/i8sbZGa03St3jvGMee2XyGQAEBow3z2YK+b9kFz2RHCglJepg4NLkJF8AHkELg/WHSUGJSxq5HYMjSnN1+jTEpRbEgy3KmYcKYlDTPSdOEYXcYO1oUFUIcphKlqcQHx2JegbBIKQgB0lTR9z1ZnoCHaQAXPPv9gJ06pnFg7Fr2+w2bZsckdBjHYcx14sfJpe1g87T09kU/D1EU/asvJgBRFEURl+++eySlVkGINi2SUM8WYhwnpAxUVUHfKwKeMstZb7YMdkQIxWJ+xGQtZVnwxhuvgRc8ePiQ1179MHfvHaOk4tk0cOfsBCHPCAjsFDg+MqxvLtE6wRhNXdUooelkQ14UzOdHdF1DkiQcHx/x7OIJVT1H65S22T2fSJSRJJr5fMF6fc0wjSRJhnUJaurx3qOUYpoOjbgBgdYJ3nmqcob3jsm21OUJWmcURU3j9yTSAAIQZFl2WAYmJYiAUodFYsMwoI3k6GjOMAwMw0BdLyhKT9f0jN3Ift/iPEiV4IVCp5Kriydc3zxjch3WhlEqtUfKKU2zbZFn1yoV6xf8KERR9AEQE4AoiqIPqD/yR/7I0Xa7Pf8zf+bPfLl17tvx/LqynJ2UdY31ghAEZZEQhMQHSBKDnywBSIxGJhkmMdxaLqmrkr4fWa323Lt7h+XRYQnWZr3jzp1brG62eG+xLoCYqMuKrmmQ8jCKsypLpNTMljNC8BydzBmGnMtnA6dnxyhlGMaBLC+Y1xXX1zeHptyqput67t15mfVmRdt1GJOidUpelkh5qNm31mG0YpomlErIspzdbkuaVkipSdPDCFGlFcYo+q4lTXOUMkgl6bsRpTRCeNJUoZOc/a5luawBxzh6VqstZVnQOsduv2foHS4EQnCHLcduIs1S+mlk2zTkZdEkeXqTl/kjkySrIJXVXse6/yiK/qWLY0CjKIo+gL7/+7//zjAMo+vcd/ye/+X3/BvO+7uTc2813f69cRjE8aJmMS+o6+Jwyr6oDmU6JmG5mJNlOfU859atY+azkqHvkcJzelpxfn7MODj2+4bz20v2zY7ZIkPqQ/PtcrkgCMfp2Qnb3YbF/Ii260gyTV2X1LMUpQLHx3OOT44QMnDn7ilZmpIoQ5FlHC0WJElCmmbM53MAzk/PuXXrNlJIkqTAqBRnA0JIpJQEBNNkyZKS6XljcFUsn4/5lLTtnjQ5TPsZpokkTVBKobRCKnEYM6oUzluOj+eIcNhpIOUhaVivNwghmbyjms2ZrKcsK7RWDH3DZnPDOFmqeobSxg0u2KqqHty+ffunZ/P5Yy31NFmbvdgnI4qiD4KYAERRFH0A7ff7mz/1p/5U00zNXxoJr1gbaofVWrtpfXO9v748nKZvNg1KSvIsQSlJliekScrJyYIPfegl6llJ2044FwBN3zn2+z3lLOH2SwvyUvPRb3+ZxWKJQFJWBVmecu+lUxbLHKUkd+/dYrc7nJ5neUZqDG5yCALL+ZLVzZbFMuf09AglBd4dxn5WZYm1lqqqyMsCaTR1XbM4Wh7GeQqJ84cafSklIQS0TlBaY60DFFIqsrxAqsOPw7wsCUGSJjlFXpDlBVmecHyyJMsT5osSITRKQlEkOOdJTEqSaoSQrFYbZrM50+QoypJxGsmzhCRNWDdbHj95SLvfh7qqNkVerIOQo0nTNi+Ky6IofupDIfz0C30woij6QIglQFEURR9An/rUp3qA5XJ51zf+bzbj5t/d79q63W2VEqmfH1UcndTsNwNSa5SRWAtNM2C05+7dM/ZtQ995fIDNZo/WGoKkKA+z9bOiJM80dnIkiaMsMhZHFfUsQxvNu+++y3d+50dZrTbcvn2LJEnI85R2P2GMYrttSYzGmJLttmdxlFGWKU8e35ArODqq2W4anBsoy4SusyRJ8nwnQIeQCuk9QSqc84BAacN+tybLcrz3TJNlvjimbXcYo7HOkaQ52qSHfgLbo/VhxKcRkqrO6LqBzaZhNs+xw0iRGYIVFHlJ105U1eGzXbstQw/TYBm6kW6/Z9esSJOk90G0VV09nYJPr65Xr1X1/Kuf+vN//nMv9qmIouiDIiYAURRFH2C+72un1Hdbz5MQwm8RmLPF8SLTWoXri53I8wKce36SnZMVNYtZwtBZrp82tF3LNHkkkn4aMMagVEZZZgTn6PaOoZ9Y3dxwdFRTLzL6vqdpd+R5iUkKtGl5/fWX2TcDdZ0AJWkqCdseIQSLZU2z71kuc7RW3L5zRNP0BCQnZ0s22y1JavAB7GRJTMZsPmcaHVJqnLdYO2GMOczwz3OU0oQQEFIhhGC3bzg+WtK1LUVRY4zGeUcQDqUk0xTw3qJkwdFxyZPHVxRFihAwDCPaaPIixXtB0zQ4J5BKs9lcgQ+0zQ6CRwjhXfC7gOxtsBzPT98t6tkDh10L8bPjSqMoiv7liiVAURRFH2B/+sd+7IsO96Ybp980DuMsLwvZd637wj/5MsPYk88UMpFkeUaeC4pM0Q+ed957ws1qizGKJFEURcZyOePeSycsj0qa/cB+1dE1LdrAy6/dIq80zX5gu+m5uWoo8pphmnj9jdcBx2xekuYZSaZYHi0oSkNZpWS5oGt79s2IMZKyyCiyFCUUwzhwenpCanKqIkEqSdsN1PM5JlFIGbD2kAB459AmpShrhNDkRcl8vsB5R1mUOOcZ+h6lBAhJ34+EAIGAFAGB52a14vR8RponWBdI0pTJj3gcUkq0kVjrWa/3rDd7kjRhu1mx3lwx2hEbsELp3XyxeCCkmpbzxfuv3nn9z2qd/bX48h9F0a+WmABEURR9wGVZ9lUl1bYo8vLB++/uvvSFN/3R8Uzcv38HIRQekBqcC7hJsF13KGXIyxyBQUpNNSs4PTsGIRiGnrxIUeYwNrNterabHm0Uz55ds1k3zOcLTCK5c+cUrQPjaDEJBKCcpZhUkBeG2TxFSzg+WXBz2aGkJzCSpilFkaKUpu07ispQlPmhFl8G2r4nLyt8gGHogIDSCWmSo6Qmy/JDP4DSBA/GpEipUVrj/aHPoNm3TOOIsxYpgSCwLnBxseH07IimadjtRoI39J0geIkximHw5HnNMI6AoJpVXK6ueXT1jB7Gxnt0mmwWy+N307L82v9s3Dz57Gc/617oQxBF0QdKTACiKIo+4IZhWAYR7Hq7eUubZP+hb3vDL0/P2G47wgSLeUaaagQC7x34gJGa5bxivshZLEpCGBmngb6bcFYyjRMhgEeRJgWp0Vw8WZNow/n5kiyVvPLqKfv9CjuNSKXICwPBUlc53jmWyxl5YQgEFssCrQO7bc/x8RznDwvDlvMCIwRd12PShMW8QkmFmzx2POzU8v5wOi+lRAqJEJIkSQCB1oY0TYHD5uDE5ECg61qapmGz2WKdx46B/a5n6AKXT/esbzZM48R6vWW1bkB4drsdBEgzw+rmGiUETy+e8PjiIV6DF9jBWjs5pzf7Zjmr66dVnv/0x+PLfxRFv8piD0AURdEHnDHGFmX+laIuZVnWH60XZ+bpVYdbaupZhh3BTh7vA84GhrFjeVThPYzjhPUTJycVXTORJhohBcEFnBRMU0+aCup6xhsffonLix3aBI6OS955+yHHJ0cQJF07EUJAaY/wYK0lKVIOP6YCEKjnCXbydJ3j7HzO5dUOETSzKqPpLW3TIYSmrmbsdj3OToBHKf28kVeglGScJhACrQ99AMYc/rn3h5sIrS3b7RrvPFIpVtcbvNOsblaE4Dg9PePpky15mjFOA4vlHKMlzgbef/eCxdEM7wN921CWGY8e37DersmLogmCjUqSVidJb5Lk84uzs6+/0OBHUfSBFG8AoiiKPuD+9J/+0w+DlP/fup7PQ2BYXa2um3Y96tSHEAQQ0IlhcoF2HKnqEudgs20JUnD7zgnjENisena7lq7tMYlhPi84Pq25d/8cLxy7fYvJBCFo3vzKI46O55hE44OnqnKmwaGUoml6xsGCDEjtqWclk7Oc3znGWo9zlq4bOT6u6YYenSTUVY5E0LUTSinSNCUEzzhOpOmh3EdrhfMTSoF6PhY0yzKSJMEkhybeoW8Yx4F+6OmHHmstX/va+9zcrLi8uuDxkwc8fvSEYehYrW8IXtJ1E00zsVrv2e5bnjy9BuTh7x8GFvMjsiyfummcdJo282r2ZDmfv5/k+Rd++Id/eHjR8Y+i6IMnJgBRFEURxpuTVBqnpd727WadGe2nMRHb3Yh3jq7pEAFSrdltep48ukQAU+94+nDD9U2LE4J2GBidZ7tvaPc9+13LxbMNSgpMAkkiefL4mrqqIGj2256+7ymKhKZpSHSCQOJtQARJXWUslhlaC46OCubLDCnBTo7gA/NFQdf1ODehzWHc5zhYsjxnGh3BC5Ln9f3GJHh/KPWREoQQJGnKOI0kJqHIC+p6jpCKsqgIweHcwHp1TdPs8SEwjiP7Zk8/DEyTpchy/DjSNT1TNzJ2DeubS/p2x77Z0jUNzrlw6/hsdXp0fBHAJ3m6Sct8JYSYv+i4R1H0wRRLgKIoiiKcdu+ZPP0H+ZSZJElfsRb39tff8ucnR+LbP/ySKAvoe0BqpICT0yMC4P3EZr0DeZjIU+UpVVkymyd0+w5lAke3K7SStE3P46dbklSDlJRVynBjCfYwcef0fEGaKOxosRM0uxGTCKQSNLuWm6s9i6OSrhnQRrHddCyPCvJC0XcBgUBrxTi1CARJkjBNI1JKfPCHjb1JglCCJMtROmMYJtqu4/R4SZ4VjPMJH0BrTV3WDEPHNI503Y48zfA2R0qoq5qyqkjTlNpUbHcNJklBBNr9jtQoxqHl2eUT0rywWsrp/PTs0a5vxizLbqqielAUxVdfdNyjKPpgijcAURRFEZ/61Kf6JE/+zmJRvZkl6qea7dXXcO3w2uu3RFGnlFXJbHZokD2/fYRUh1P41bohySSLo4IPfeSMu/dPmC8KhsHj0SyWM7bbHV/50vs8ePcCow1XFzu22xYQXDxdMwyem5st57dOsNbStSPWBepFQtcOCGEwyvDmF9+nKBOG0ZEmBiUFbTOR5CnKSHwQSCVRUtG2LUlqMInGOsuhF0AwjT1SBLI0/bnpPgIYpxGTKm7dOuHoaMZyMaesSo5PTrh79yXGcWA+n7NcHlOWJUJAmiTkZUaQ4HEgA6dnZ5ycnNC1LV23x/pxGqb+yhTZJfjd/Tv3vnDn1q0vFkXxYBzH5sVGPYqiD6p4AxBFURQB8Gf+zJ959gd/7+/9ySHtj5fnx6/ev//Ktz98uAqPH6zF/XtnWDuSZSnTFEjTFDt11LOaECzlPKWsS54+2jKOh/n/RWFo2x2EwPmtI2azggfvX5LlgpdfucVXv/J1kjSlrAxCGMbBMQ6B3W4grwyJMYy9QxtxaKZ9v2W7aihKQ9cNFGVKP9hDyU+pGadDOX3gMPt/Gnu893RdQ13Xh10A3jGf1XRdQwiWPKvpWsnQjwhAGyhNAgRmsxypJPN5zXa3YpomTk7P8G7COcvTp084PjomSwqmYWC/WR++h4TJTkhtgtZmU83Lt7O6eFDk+bOsyFZVtfiiMOLvfOYzn4nTf6IoeiFiAhBFURT9HK/1AymSZZHnH/3pz/3jVd+O8+Pz28l8VomT45JhcOybkd1ux3q9Z7vf8Mprd6nqmtX1nt16T9M07PcDt28fsTitmSbLNMDF0x1FkXF0XOCtZb8d+c3/xmv07UTfj/R9h1QCISTWjmzWI+2+4+S0Aim4+9ItLp/tuf/qCc72oCV+8IyjIJGgE4l1IzYEnHeMw8gwdoQQcC6QJOL5+E9DP+wwJsUHidQKpTTOB5Z1jjGaabAsFzNcmHjWdtx/6TWurq5w1tH3HUpJuq5jtVpzcqyx1tL3HcGObLdXbPb7cHRyOpydnr9XzKs387p8P8+KmyzLviYT+blPf/rT8fQ/iqIXJiYAURRF0c8RQiyNkdPl1dVfF97+hqJI/Di2s92+zaoql5P3XG92XF1sGMaeo9MjVJLzU3/3S8ggGMeOspC8/Mopi3lFVaVIkWGtwySKtu3YbjumyZHlCfN5ztBN5IVGa7AaZouC+ZGhayzOQd87TGaY6znrdcN2N1HUCV0zUM9ytpueafQofdgZ0DR7grf/dImXgHHoMUahdYJ1DikPP/6ccwQCR0cLQnCEcBh3mqQJJhFkJkOqgJSSIq9wbmK/2+PchFSSYWjZbG4oi5q+3bLe3dB22+CxbTc2j07Pzt5fHB8/zsrsWarTf/KZH//xv/uCQxxFURQTgCiKouifEk0zpVny+cW8ukqkKreb/X7oupebtjvd7nqpUoM2msW8YhoNi1nN177yFmmSUs4WnJ4fsZjlzBcVBMd6tT0s4EJxs1qxWu1om4b7L9/h/itH9EOH1gqpFWWZstv21HPD+a05Tx419K2laQbSVLJrJ2ZFiXeOEATTCCE4slxhJwj+sC9gGAbsOOCcQwgBAryzjGNPPSuQUpMkBu8l0zRQ5AXzxRxvJxKTEACtDwvDhNAURck07g83A+NAlqY07YQIAUXAjiM+mVBS4oMjEMYkS9ezef1ulmVPpFCd0em1yrLPv+j4RlEUQWwCjqIoin6e//wzn7nwzn3RaN0rzReqqtpUeba9uny6bdomWHcoW1cSjo4WXF3eUJUFQkj63tI0Iw8fXfH40SUXFyua1jOMHp0o7ty9xSuv3WG+mHF0vOD81iljD957hsGhTUZRJpyeVeRFwsP3npIk5rBYLMA0WWTCYYlXokhyxTBZlFEIKZBCwfP6+812A3h8cDhvkVpirQUEQ9+jlCFJEpRSnJ+fHpqKtcKHQAiQJAapBV03kGU5SgmUBiFAaYUg4MYBLcEoSbNb0TUbpBQ2z/NVnmXXRV48LWfV0zRL1ij19U996lP9i4xtFEXRz4oJQBRFUfTP+OSnPvUVIeWjLC+NSeXXJtd+wU3D/uHDR1PXdBitKWcll1c3XF+tePT4GeM4kmWCy8vHPHrylC99+R0ePLhktAP90HF5ecnDB+/zD37q87gwcnJaIKRlu2nxQTB2lt12x2yRMV8meOcZhpGsMAQHzgoQElRg13SApJprpBAoqdGJIBAQQaGVxE4D1o+H2n7rGMeByU4IAc66w9bfoSdJDGWZ0vc9Qkh22xbnJsbRYpKAlB5rLUVRspjPSFJNliSUeUGapoflYlPH5eUjmmblJjdcl3Xx/tHR4s2iLh4nJt1nef7lP//n/3w8/Y+i6FtGLAGKoiiK/vvS9KelDafDOH44UakQGV01W7giz2maPVdXG7abHV5MnJweM6/nfO2tt/HeMlsumNU1SZbSdQNDb7HDgNKOoip47bWXubrcU8+PMCbl5qphtjA0uxEhJGPpafeB0/NjVALtzuJtYL8bODk/otlNXD7Zcvf+kizzdE1PURrsFPDhULrjvWWz3nDn7msQAvv9Gqk01k7Y0ZMQmCZLXqSHbb79yHJZc/nwmqI4BQ67AMpK4H0geI/WGcvlgrHrqauKse8Y+5Z26AnBYf2wLmez9+ZHJ18o6uphmmSrJFGXQbmnQHixAY2iKPqn4g1AFEVR9N/zIz/yI+8VefLf1NV8m2a5SLM8KClD1/QYnaClZDarOD8+RSJ4++23adod4zTR7RvafcP15Yq+m+j7Fu8tt+6c8Vv+jX+dyXnySjNNgstnW0DgneTmukUpzdALvvSFByyWNV1r8QGGyeFwjKOlmqVMk6DZT8zmGeNo0VohpCeIgLcefMBOA1or0jxj3zR0fcduv2W730AIpGmCD4Lrqy3jMJEXEmsd1nrAYW3AWYGz7nkj8UhVlyijyNKEuioRQtC1HdbZPsmzi6yq3itn9Xv1rH5UFNXnToX+S5/+9I9/+UXHM4qi6OeLNwBRFEXRv9BV0zw+qYv/Nwq2q114+vRhNnSnLy2Pljr4wH67YbV6yna3YbKePC/w3iJkQdN2ZFnK06cPmYaBD73xCkfLY7xzHB0XLGYlD967ZrvtuP/yGbtdy+pmy52XZkipeP/BNcuTBQ/ee0aaFlR1RlkbdtuBk7MK5zybdYtWOXWdYZ2nKBKG/ho3HZqEpTIIqRHB4/2hIbhp93gXCCIgtQYE221DWSUkqaEoU6x1hOe9AMM0UVY5u801UgratiF4z37foKRESoG1AxD2UumboigutVITQl433v/dH/3sZ+Os/yiKvuXEG4AoiqLoX+izn/2sC0rt8zz9alllnyszfTENo+2HgSTPCD7QDwNZXnByco4QAmM0q9UNF5dPuby+ZJosH/3oG3z4jTfY71qGZkKh+NLPPODrX39G0wx0/UA/jCRpwdAHvv61J1xcruh7S/CB7bplv2uY1zVKwjiOzOc53nm61qKNRKlAVabM65Su32P9hJCHcqAgBADDMDKNI0pL5PNf0zSS54a27ehaR1ZkhACgEIBzloAnzQwhQGoMSgiMMggkbdfS9K1PimyTl9VKCjUgpSOo5rPx5T+Kom9R8QYgiqIo+gWN8FgJ8R1pljRShQd9375SVrezrMjo2z3PLgzjNJIXmtPTO1g30ux33D075/T8nDzPqIqEZxdrun5kso6/9/c+x3xxRNM2zGYljx8r1pstZVVweVHwhS98jV3b8s671zx58IjXX3+dzbojuBtu35lhtERpyWxREIJDaoFJFAHPbFZgEoV1njQrGcYBpTVZUSOFIs0KtDFYFxB2QglFmhcMg2Wzbjk+XvD08QaCYxwD0wC77Z6qqNhurjFaIpXGywGjEwQwjB1yRBxlx7u0yDdJkjVCqasXHbsoiqJfSEwAoiiKol/Qj/zIj+x/z+/5Pf9ESGmF4tUkk/vjk/rceYd1E3du30VnGWmWE4Jju92Q5SVZWbNa79iu97w/tBA8dTXjwYMtUuVInbBaPaJpWjabDmMM0+R58M5Ps95eszg55tmzGx48fEpZ1hRlwXrX4B50fNvH7qMUVLMEO1nSTEMQyCCRUlGUFUJKsqyk6zsWyyPKesY0OpQ2IDXDOBJwnJ2e0XQtUmusDYAnzxV2ErRNh1IGYwJdNxCCZxwdSmmSLKPZ7Rknh9Ta+wBCpfvZ7OipSJIrlLIvOnZRFEW/kJgARFEURb+Y8KM/+qNv/Uf/wX9g6tn83xQhS/a73iGEuv/yfZrtnourG9brNZvNCucmqqrG+0DbNCznR9RljTFQFSWJSWi7nqGfmNVHZFlO2+65unpGCApCoG021Is5dhrJ05LLqzUnQnF6OmO9bhjGibRQjN2E95AmipvLPXlaIIVCBEGVl4dRn+OIEAIpNUEGggA3Wdzk6JzFeU/TNkihuLnx5IVkPs/oGsc4DEzTiDEJWmq0Mkgl8c6SaUme5tzcXIHU3Wxx/M7i6ORtWRSft9Z+6TOf+lTzogMXRVH0C4k9AFEURdEvKYHLIimbtu8uLp9d9VoqyiLHOU9wDtu3YC2JUkgCN1cXdO2OzeaK3W5NUWQcnRyRFRkmVUghWC4X7Jsdj5++z/XNM4axYZh6ENC1h8lBQki0SfFekJqU2bxGCEmSaooyRSdg3UiapQQRUFqQFwU6yZA6wVrLOE5Mk4Mg2ay37LZ7xnFitVqx2Wx59uyKvh/YNz1Pn94wjJ5+GFEywXtJP0wM44C1lt1uizYJ1jn2Tcs0WUBO+Wz+MCvL93/n7/ydn//MZz4TX/6jKPqWFm8AoiiKol9SWRS+m/aPskx+tdnuji+eqmxxtFTOW5LEMJ/PSdOcQOD65opxGtFKcvvObc7Pz9Bas97sQQjSNCOYwPvvv8d6vYYgmM+XDMPI0dEpSmqsHciyhCFPWCzmnBzNmM0ykiwlSQzOWbQWVCbH2RGpHVJIdKIo6hqpU6QAuxtpmh3WWrIsZ7u5Yhon0kwzjD3vvfcuWiXstnukFPRDitY5Yz+QJgbnwbkJITzOjljnWK3WKK1JsxwpJXivpskxeX/5xS9+UX3f932fiVt/oyj6VhYTgCiKouiXNGVZz37fGCXfE3Kc7/fb8vT06PTOnVO2m5bNeo9RIzfrG7IspywqTk6OuHXrNt5b8I5ESabBIqVkt9+R5TnzANPYMw57ZsWc4+NzELDfbEmTlDc+8ipIRVkb0kJwelaS5wYtA2mq2e16sjzFaMd+P+G9I8kMRhuGvmUaR0AghMDaCectfd+wXV3hAuzYs5wfoZUhz1Ls5FjdbOjahuOjY/qhw1pHlhqEAK1yxrGn7QbqquTV119h8t2QGTXm6O7JkyehbVv/ouMVRVH0i4kJQBRFUfRL+qEf+qHm937v9z4Omle9DM9kCIOzlnpWIRF459gGx2RHvJ+oqpI0Sw99Ad6RmQwpFM55+nEEKRASTKJJszmr64ksL+n7nqPTGbfOXmGxKLhzt0YoAcGTpYqT0wolBcEDAoo8YxwnpISyzNhtG5QQlHXNdr8FIbDWHm4ngqMq5zgHJskpdEKSZxhlSJOEerbATiOTm7DW0/UjznuGcSQxCWmaMgwDaVIQwkjX93TNGiGkKMtyFKk6+e2//bf7j3/843H8ZxRF39JiAhBFURR9Q3SW/XdpCH5RL/4X6+v95vL62cnte8fZNDq01oQASZJRJgVaay4urkgSxXxWExJJP45YN7HZbCmrnM1mzenJOUopNqs1nsN4z5PjIzKTUdeGo6Mck2oEgqGbkEi0lnStRUqB1IHCZGw2e7SW5FmKdyMeQVFWDONA33ecnt0GQAiJSTNSk6KV5uzWLYJzeDyL5RxjFJvVBuccPjiyvKCezXHWIqQiTTRPnz6jqGru3D2m2yVcPH2o3DgmwnuzWq0kEBOAKIq+pcUEIIqiKPqGfPKTn2x/4id+4m/+tf/qvz7pmu54MZu9Mo49JhXoVJPmGbdv30FKWK1WFFnKfDFnNqsZxwnrLH3fUdcljx8/wPuAtZ5hmMiKnLPzU55dPUMJhbeQmMOPKG8nTk5rrqfpeS1/QZpC1/b44KnrhKoqaZuRgKcsE4ZhIC9Khr7DB8/x2SnDMCCURDV7loslSZpS1RXBe6RSzOYFRZaRJJp76R3afcd+16G1IXjP6mZNPSs4v31CEIoQQGtDXpRBCDGJEPK///f//inw+MVGKoqi6BcXpwBFURRF37CPf/zj7pOf/tSPn58ff6Htu13fDRyflVSzjLxI0Vrig6Oscm7fvsViXuM9CKHQSiNQXDy74OryGe1+D0AIHqGg63vmiyWT9eS5xiQaKSVpZg5Tf+qMJMlomp4sM2RFytB7um4gSRQIh0kki0VJWWR451kujtEmQWmN1IpqVpFk2WE7sArkZYbJDDo5lCet1mukFIzjgEkTggist1uatsVaR9MOlGXF6fnR84Vilr7r0nEc54MdU5z7tj/wB/5A8YLDFEVR9IuKCUAURVH0Tflf/77fe3uylmls//Z2u22cFSjlWSwqslxztJwzn82ZrGe7a8jyFG0U2+2O9WbDMIxU9YKXXnoZgqcoM+pqgVCKNEnZbVvywmDtgNIKIQQgqGc5CI9zjnG0pKkhSRRSKqSANEnQOlAUCbO6wgdPlmcIKem7EecCQUiyomDfNngXUM9/CqaJYRh70jTl8uKGrhnZrrZY6xAKAoHl8Zx7985QSpJoydFxyWw+Q5tU4/1CB/EEpX4KmF5kfKIoin4psQQoiqIo+qZMWjd5Fv6hPCpfDj70dhzL87MFF08bjJI0u45hHHHWMl/McNYemmfTHOct2+2KqpzRtg3GGIQUXF7ecHZ+QrVckiiJzqCsE6o6Y7vtaNuJsjJMO/dzG3mDEBRlBsA4TUipSRKL1pIkTUFKnAgIKVlvtiilyDJDnmV0bctqtWGxOCLLUhCQmEPpUN+PJEnC0dEMqQTbbYdWirJMyDNNXRe0TU+WKBbLGbPZSTL2m5d2+/2H/p8/8RN/VQgRpwBFUfQtLd4ARFEURd+UBG4rpe5pnXRN276z33eE4BDCoaRhGEbafYMAgg+0+5bgAkmi2W032MkhpWS7XZHnBU8eP6Ke5dy7e4tX7h+TZYLz8yV5kaKMwCSa1bpDaUOa5kyjQEnDOFikVDhnSZKUYZpQMiHLNVIFfIB+tGiT4exE22wJwTH0PUZrrHVcX62Z3GHhmBSaPMtZHs0xaYo2CbNZzt17C+pZxmxR0I+W1arFeU8Qgtkyo+lb2Q/2tp2mj/z+7/3eD73o+ERRFP1SYgIQRVEUfVMSuAom/H0r5f8rzYqf2e37p/1gycsUbTRgqesKrRSbzYa+H9htNqxursmLkldfe5Xr6wuKomb7fCLQ/VdeZnlSoXVgMS+ZzzR5phA4ijxltx3omhEhA0MvmKxDCk3T9DgrmCZLcIHdrqMoE5JEkCjF5uYGN420+x0311esrm/o+4Gu65DA+mbNdrVju96xXW8Zh4k0M4Tg2bctLoDWmsVRhjaHGwcXAjo1CCPJyoRXX7vF9epZ0ux3CzeOH/6J3/W71IuOURRF0S8mJgBRFEXRN+X//slPXj252n4xkyZXmgtj5Ho2LzG5pKg15+dn5EVON/RM44SdJqxzlGVJWRY8evSAxGSkSYE2ku/+9d+FtxNFZkAIjs9KgnQcnRV4BNMEfWvZbQ9Lubq+x+Ox3tJ3FpMYCIIsSwkh4Kzj7HhBCJ6ub7i+ekLb7piGgdXNimG0OBdIM42dRlJj0FrRdyPr1YrjoyNOTuekqWK3aQ67C3qL0po81yyXOWmuOblVUtQJt+/c5yNvfCzt993d7X7zsb9eFG+86BhFURT9YmIPQBRFUfRN+/aLC7FanDzNs7xJ09SNkyPNDFkxQYBd0+LsYT9A0zTMF3NcEFw+u2JWz5nPl0xTx7d9+2sYLXn5pROEdCyOa7IcjMlodhNppmn7lmpW0PUjZVkQCGijafcd1gq8CwgpMIlAKYEQgfk84/h0xnq7w2mDEJKqnKGThLOzc5rtFucGFicLdCpJ84S+HzE6cHl5xfmtJcv5nN22OzT9phohBG1ncVMgrw0ISZoXNF1L27RpopNMKykIfWwCjqLoW1pMAKIoiqJv2g/+5E9afvIn/8F//Hv+46d9P76Kn14t6rJotpZ9u8f5wGw5x44TCIHWhm6347VX7nF8suDy8op6dkRiFHmeM5uljEPPyXGOUoHHDy5JM8Odu3OUEJgUnIdxtDT7ESU0m3WPQNA0ijQLZFlOmh5q+00CL798yrNnO6ytSBKNlAqtJWmW0OwCRVmTFyXOWSAcGnrnGaubLX0f0NqRZjnrVctsYWiakaaxECRBTUyTQ2tJWdQYk4lpGk+a7f61rK5OgbdfdIyiKIp+IbEEKIqiKPplO3v57BHBfzlP05CYQJ4rZlXB2fmCW7dP0UlCPasYx57jkxn37p8fbgTmS/KsBGC+SPBOcHw6BxF48O6WyyctxghubnY45wnW46fAzVXLzUXDbtPRtYFxklxdtIxjwNoRk2i01mSppkgzyrx4vvBrRlmVCCnompb1ekdVl1hn2a4Hxt6x2ewZh4mqqtk1LQFJ1w84Dw/e2/P+uze03URWKu69suTsfEFA8PTJFe3QM/Tujh3dS2EMcQpQFEXf0mICEEVRFP2yPXnyJBNSZ23reh8k5Tzl/O6M+TJlsh3nt084PltwdueY07MTHj+6IEkSlkcF9dxQ1znaSNq2BRFYrTo224HlcUkICikU1kLfjdSLhMlZnl2seOedG64udzx5tObtr13QtQ4pBVIFgg+YRFJkCcfLEiUlbdtgrWUcJy4untK3e8ZxwgfBODmePblkaEfe+dpjnj5e8fDhEy6ebnjw7jUXl2uywuC9wxjBy6/NuXNvTl3lpJnh9NYRWV7Sj6No9k092fG7P/GJT8Qb9iiKvmXF/0FFURRFv2y1yj42ON/umu6tpSiOA4EgQGnI85y+n3Bu5ORkxvpmT1nkHB/XZLlkGCzBa9rdRN87trsJkxiy0tCPI9kk0Drn+mLL0UlOOU959qxntAHrAs4F7AR5kTONFiEF0guEkmxu9hRFwfFRzeMna9qhYxh6hq7l6uox3nvefPMrnBzfYr/b0O53nJ/fpu8daXpIRt786nsoqfF4xsny8stnvPTaES+9cgtjBHb0vP/OBYiUs/N7+NGpQHOn3e6/c/XkyR3g/RcdnyiKon+RmABEURRFv3zBvDeJ/V90k/92O7rfnCSCqRdImTCODVIJTk6XNJsRrQUnxwusDXinkAqadsJ5iQvQNAOLVFMtCnbbgSTLCBKur3fcfmlO8BKB4O7dE7JMUxQVm5uBu/driuJwSyCQhCAAzeXljv2uZ7/fIQ1sVhua3Ybdds18ueTRw3fRQuP8BBrW+zXLoxOyLOXkZElZ5gDU85Lj04KqNmR5gnMTBIEIgqOTOV/5maesV2uGcY/WQQzd8G3vvv32/+bj//a//V9+26//9T/1gz/4g7EkKIqibykxAYiiKIp+2X7oP/+hiz/4u//gmc38zBgVTCLFoB0IOL+zJC8Stus9KhEsZzO8hXHqmJznZrXh5KSgqheMduTW3QW7fQ9ScnpWcH6+YLeZkImkqErWq8P3OX+pxkhNmknGwbJYFtipZxzAewtBEDzcrDqeXO3QmUFJUFJQ1zOSTJMXM85u3aXvWs5O7mOSlMVywfHpDCECi0VNPU8RAoQIVDNDnifgPe2+RUpxSDQE3H5pxpf/8TX73YoklZPQtfMifDQE8fu/9uUvj8DnX3ScoiiKfr6YAERRFEW/IlsvpnvLPKnnhej7nsmOFIVBG8HX37pCCkU1M2ipQDrKekbbWe7PzqlnmqtnW2bHFR7Pfjdw+86SspRM48h61XB6eoS1gfW6o55VSCWxg0VrwzhZvPdkWcI0ANIx9CNKCax3SK2pqoq6rjBGI5VgPp/T9QNVXWOnkbqeIwSkWUJR5ICn60b6fkSKQJJqunYkzyfSVKG0IAQPSMbRUc8y7ty7w5urS9V1+6KalWut1DoQGNv2e//9f+ffST/9V/7K33vBYYqiKPo5MQGIoiiKfkXyfBRtM17vti1JJimKnHEIPHt8jdGKss6ZLxKchbJUOO8RWuAmz8MHG/IiRSeK6+uOqiq5vt7iXE6729MPE8po2nbCTgKtEqZ+ZLftSdKMthO0rSNZaHywBA+TFQQpmC0rmiHw6OEKKHjp5Vdo9g3lrESnDucm5kdLpsGDd3Tdln6cEBKkEBitMFowjg7dSDahIy8T5suMNFW0XYdUBp0pPvwdL/P00VOxWU9hGMeiXMwfVmX1NYLIvFK/7T/8D7/nwZ/+03/u4YuOVRRFEcQEIIqiKPoV2m63m2VZvydl8Eli5DQ4ggvUdc2QtGSFZrW2VLXBBUHbBDYbS9dOECQ600gEeabYrvYEATvleP/dS9746MtcXa0wpudrbz5ltLDbdAxdzzAErm/WPHys6YaC5TxBCAshEFDMFilBJGybhqZvQSpubtY0+w6pNUJL2ouWRKcUZY4PsN81FHlKWhiKQmO0xlmHcx6lDiVHq6sGkxx6GEwKIXiOzmZ85GNv8Ll/cH3cD+7VEMIXi2X91r/1O4e/8Tf+q/RYJrP2RccpiqLoZ8UEIIqiKPoV+exnP+v+rz/4fzTVrJBaS9ZT4Oam58nDDUopdLKjnyS7/cjNsy3OOXSasFyUXF2u0Lng7u0FwzSw2ux5/Y07XDzbkuYVV1drxt7z5uP3eOvNh/igaPYtU99zfLrk5nrDclmSJ5qqkBSpQkiPnTzGaIS0zGYLEAFCoB4dQnrqugIEXTdgdEKWGBazEuccRguSRGCMQklFcB6Lw3uBkIJp8ggkRiv6acJkimA8H/2uV5j8lH/5p790Z73evoxQv+Wv//XzL3zyM5958qJjFEVR9PPFBCCKoij6Ffmjf/SPnnSDfWOcfJhcEOM4keQJ1bzAjmCdY7ft2O8Hhn5EBMH8SDKMA1mRc3y8YLKez/2D9/jIR+9zdblnvw2UNQgvkQK+/MV3sdZzc9HSNnsuLp4xWYkShjLNmdcZiZEoI9BGsrlsmbzA+8NugL4fmS/mCCRVlaC15smjC7p2ZNIjUjjcNBGER5U5zkmE9UjjyXLDMAiGfiKEw44B5xxaSBCHcaDGSHQBb3zH67RNN3/y6NG/1XXTyW7V/C0gJgBRFH1LiQlAFEVR9CvSb/rFLM+SvDCi70eOT2Y8fbYlyRTDYOkHR9d0bHdbiqLATwJjUpxVLI8LVjcDb37lMcHlvPP2NSFYyqokS3Ka/cjQeSarUEoyuQGPZTarmYaRs1tHID1FFcgygRACgcA72GwnJufQxuDaHu8d7b5HAk+fPmCzXpNnGVVdst3s8N6TFyneOYwxVFWGsxNaKaRUCCEwRuOcxQFKKRIDaWHw3tN1gdm85PU37ov15W5h3f6+HfyHgZ98wSGKoij6Z8QEIIqiKPoVGRh2y+Naaynx1jMNjvk8w/tA10+06x1BHMpyNqsNVV3TtnukLNntPBJNmuTkC816s+Olu6esV3ve+vKGalaw3uw4PT1FKUmWa9LkcJKvRWC+LJjPE/I8IdESpECqCe8F223HZrsDKVksKnbbHVeXl6xvJFdXl0gRMFqx3W4x2qCUoNlPDP1ACIFdnnJ+6xSZSvp+RGtF2/Z0/YRSCoQgzVMgEELATzB0HcvjmjSXPHv3QpZ18sp/8p/8J7Mf/uEf3r7oOEVRFP2smABEURRFvyJKqfT27dnHpFQMgyPPU8ZxwmjBfJFQ1rd59mTLYlHQd46yVty7f0S7n0AoVtc7PvThM7ydeO1Dx+x3HZuNRyUpaa44SSrKArQKLJcVPkwED7MqIUkkd+7VeGcRWqGfJwnjODFNjpOTJW07QhDUszk35pqLp89QSqAU1HXFdrMmUQIlU6z35HmJ9yPDZLm62nD79glCKsbJkmcJ82WFkB4IZJlCG4W1Dms9/fPk4PU37rO6vDib+vHW1DSvf8/3fM/bf+7P/bmYBERR9C0hJgBRFEXRr8jHPvzKx49OF69eX9+gUmi6jv1uwCSK46Lk8mLHrfMCZ2G16rj/6jEmgfm84vGjG15745iiUOx3A82+I0kT5vOK+UKyXbccHdf4mT/U3csEpIcgWBxl2LFHKUWeG5yfEFKhpKSeG/Zv7nGTZ3IjWpeEEEizhLwo8N7ivSWvSooqY7/bI5Sirkrabsc4DKRZTnCBaRxACpw9NACnwVGkmrIwJEYiRCBPEwSK9aZBiMD9+7d48LXTrB322nmfCyGOgZgARFH0LUG+6A8QRVEU/dr1/d///XdObs/+PY9HqMMOAKk0LgjGyTGNgVmdcf/+klmdcXq7xIWBQGCYWorSkCQpFxc70jyjmmUI6clyjXcT02TZ71qyXFJXOWM3kCSSaZroOstkBetVg9YSCFhrGTpHWRxq+K11SGmw1rLb7jk+PQUBdhzJ0oxpnNBJxuL4iJOzE2bzObP5DOscQ9MTnMVayzR5EIGyPpT89O1E1w/0vaXrJpyzSBUoyxQpPFJBNSt113VjtVx+Bbh4waGKoij6OfEGIIqiKPplu31+8juKKnvDEzAmoe87pnGkrjOMUXRtx727x+y2PcM0UVc5JgmURcHlsz37bcf65pKsyNhtD/X3eWF4+GDN1996wEv37rDfd8xmNX3T8+ZbD/mOf+0+aZZzcbknNQaTJOz2HVmqkELinSd4Tz0rCKGHACZRJIlBSs3prWMev9dx+9YtnAhIITi9dcrl5Q3KC4IPKKFJ0oQgwJiMotY8e7pmtxUsjiokHmsdSiq0FDgHfT/ivUcbQ3AgtRRaquZP/Ik/cRNCeNGhiqIo+jnxBiCKoij6ZZuc25okkdZZPBNpmnB6PGdW5TgXWMwXrNct+7ajrBKMhrosCH5CKUgSTT1LWRwlXDxdo6Th+mJPXWe8/Mo5s2WO84KLZzv2zchqveG9dy+5vm6xFqYxsN3u2W0HJhsQSLJMYcwhEdCJ4fT2nNksZ1anSBV46eW7jG5i02x56ZXbnJzNETiM1NhhYhws292WzW6HB8ZxQErJbFbRtY6hm0gSibMgCBgjEHDYcCwk4zQxuYmT01NMmhW///f//vmLjlMURdHPF28AoiiKol+2o0X1hjGmDEJDAqvVjr6xTJMjzTIuL28oioKz0wUXT25IjERrQVFVrDcdH/22W/TDyM3NjvPbC/zkKCvDSy8fsbrOMCZlGif225Zbd+bMZhXbdQ9o8lxRHkmmIdC1I0pJqANCBrIsJUk1ow14H9BKo7VkMVPkeUJdV89n+mdkOVw+vWG33VHmFTerFq0VBIGzFp1qtpuWNEvoh45x0EgUszrDGIEMEDgkASBIkgQlBInJEU4f3749DC82SlEURf+smABEURRFvyx/7I/9sbqeV785TTPh8dhxoiwLpOgJPtB1PYvljOAFNzcteVHg/Ih3gaEbefn+GZv1huuLhqwsyPLA+mrk9Q+fIoJkNk+5c/cEJeDJoxtOz0rgHtNkyXKDEoH79+fstj3GKPrWYowG55FCsFwWOD8wjRDUSJ5lFIUmeCjLktl8zjCMFGWBEIqj4yOuLq7Y7XeHF3oRGIYJZx1KayCwPCpJE0WaGoRwCDTaKLwPKGkOZUFCIiR0Q8/N5uaVt79Q/3rg777gcEVRFP2cmABEURRF37T/9Af+0/JabX6bg0KlAiaH0gLXOggCrSRGGbp2ou8n2nZCK0mSQppJloua6+trJis5Oq4wiWKyjuxOTd8NhOBJ04Ltds98kdA2CfNlwun5OUPfUc8KJmspkhSTSLbrDq0V42BJE4OdGopMURQJjx5dc+t8ibUBBOSV5tatY5zTSAkP3lvhpkPy0PUjSaKpqgUSMCZht+2o6xKVG2bH5eE/gBQYYwBHkiYMwwhCAeC8JThw3rFt9+XN+ubf/P7v//6v/sk/+SevX1jAoiiKfp6YAERRFEXftCt9pa8fXH/+Y9/1mtImp+8G7OQYx4nNpmfoR7RO2W5HLi82gKEoDfO5Ic1L1uuG3c5irSDNQXko8gTv4OmjLVmuCc6y2YwUhWJxXIIAgSfPUgSCsjAE6ymKBGs9RarY7UZ2+xaQBO9Yr3bYySOlxDtHXZckCQih2O0G8npCSWi6Ae8tR0cn9FODFBqJJssMWh8SB6FAaRAIxnGiqnKkONwUaK3oh0MPgHMOPwb8GDhaLFfD5I5XTy6/G/jrLzhsURRFQGwCjqIoin4Z/vgf/+Ob8mRZzub5R7U2SKURSmF0Al4xdIZ/9I/e56d+6mvsWwcSxnHCeckXf+Z93nrrhsePt1xebdnvAjfXDW4MXD3dEIJEa8XV1R5rwduAnSB4CQG63tKPE207MPQWpQSzWUKaKpJMAB7cYWa/c4KzWwuUgYDEeYsQh9P7zWbDZr2naXtAYkeP9575fMZsXpOmmizXHJ/OWRzlVFVKCIHEaOzkUEqjtUJrTZomBOeYRosSijB5us3Oz4rqC3fu3v0rIkxfedExi6Io+lnxBiCKoij6pv3ET/yE+uJbX/kN+bw6HoNApwWZDzS7jr6feOfdJ4yD49atOVmRgRTUdcHF0x37fYeQgo997DZKS5482nJ2Zuj7nrYLCOnYrC3eBnbrgVQlCClwztJ3jmeXe155/TZd26B8IEkFQ+/IUoUxiizVNGNgmiwmMXjvGQZQCoSQSK1puo7NZs3Z2RlBgtbmcKpfpygZyDKNKAxpekgasjxB6cPtg5Ga4Ceafc/ZWY0QghAgyxOmwTFMFpDkWdbIJPzVT/+Fv/CTLzpeURRFP19MAKIoiqJv2uf/5udny4+c/Ka8rJRWGcJoptFSzituvyq5dX/JNFrGfuLpRcN2PfHO158yjYGyzpDKcXm1odlZiiJlnODRwy277cRimXPn1oy333qX01tztluN0h4hJYt5QZ4FusZSlQWryy2zecZ6tePsvEQIjxAepSVFkbF79zHOBpRMOb89I00MY+948vgCrQ1d15GToZRCSEdZ5RSFQkiPJ5Aag50mxmEieEldp+SpRJsCKUAqRQgB7zxSgtaCgMY6j3Ny43Cfe9GxiqIo+ufFBCCKoij6pqVn6fkbH37lt+ZFroauxzqLTlKW+Tnzpcc7y+rqipura2aVZrvp2e/6w6KsxKNNwuVlwztvPWVW52y2W7rW8sqr9+mGjqvLhmdPdxTzkvV2Rbffc/feEe1+RtOObLcD3/Xdtwh4bq73dK2lb0dMIkiThL4dkQJsP/Deew84OT7lpfsz7GhpG0vXWdIsxVnHbtccGn9nBSFYQHByWjO6ESMlWmm8l/SdpWsnEinwHrLCELxHSsW22ZPoBKktaaLZPt1wvbp+tzw5Wb/oWEVRFP3zYgIQRVEUfVN+4Ac+cZZk+j9KUvVRby3eO4I/NMdOk4Xg0UqTlyW3jEHIp5zblKmvWa333Ll3wqP3LvmHn/8iiTFsN5ph7NHK8PjRE6ZpIDDS7BuKuWG72rDfbNmuG9K8xCQpXdOwmKfUlUEphdIDeIkIh+Y2rQ37zYp23/Ds4bskWtCPL+GDYL9raHZ7jo6PkErivSfgCUHQDRYpBWkiYJD03YgqE7p24ORsjh0GxPPG3922o6oL+n7Ae8Fu15Imh/0Bu91AO+yeLYtz8aLjFUVR9M+LCUAURVH0TdntnqzmJ6++b5IsSGUwOoAfmCZL33dM48SsrjBZipSSs1t3eellxce+c2DsJ978yiPeeavjO/+1D5HnCeM0kKQZX/qZd2ibPW2zYrV6wvzolNXVBV/4J/+IulzQ7TqUUeRlSVHM+cm/8UV+47/+Oou54eysxlvHOEKz75hGQZYoHr3/DsGNGJUwtJ6ry4bPf+4LpEmCMQnTNOFdYD6fk2cJRZaQJoahnzg5ybm+Dgy9AxGYuh4lA0omtG1LCIHdrsNOFoSgHyzGKLrtxM/84y9ZJ2xzpHUcthFF0becmABEURRF37BPfOITSddNv/7W3aPfMV9UefAgpGCcRvquI88LyrJkmkaChaKsyTJH3zcgJV3fsjzO+K3/k4/hHCTGIJTg8tmaupqxXu+4enbFS9Ntyqrk4uKCxeIYGQQwMvSWfmiQBJ48XJOkmpfuLbn/8jFSeIbWIoJEaYHRgskOlLMl57dvIxF8/auPePTgkjt375HmOQJB1+6ZzSqcDxSFJs9Smmbk6CilLBMm25EmGq0kAU/TDjgnEEIwDhPj6MF70jSBoHj3a+/z6NG7T+69fv7ekNrkRccsiqLonxcTgCiKougbNo7JvF7q//nxafHdUnqsHfHeAhxKcYxGAGmaMU0T290G7y3jOBJ8ICtLyrpmHAb6bmK7aejaCWMShumGJEv4db/p20iN5t13HqJ1wuuvf4hUgxtHlBJMPrC6WePGkWbf0/eC68uGNJV0m4G0SJjNc9p24mhxTjVfUFY1XdPy9a+9RZakZFlBYg6Th7I8ZxgnkiQ5vNSPA7N5Stc7htGSFgnNtiURkiw3WGuZxgmpFEZrNjc7pFKkKdhJ8+TBhSvr7B8VVfGu3fmYAERR9C0nJgBRFEXRN2wcb9rz8py6qpZ28nhvEQKkgCRN0FozDgNSCJSSpGmCc5IQAvt9S3CBvEgYrWO9abCTJcsSemdR0jCOgcubPZePVzT7NUfHNaP1LJcls7xGK4lH8Or9U7brDhccCInSGmc92miUFEgZ6NoWKQ87Cq4vL7m8eML65pLT83t4Z6nKBUIElNJM44TWkmn01FXG6qbB2pQsFyxmGbiAnRzeg3fgPRit2W1alD7U/RthePTeJTerizdvvXTrHyZZ8Q8/+elPv/+iYxZFUfTPiwlAFEVR9A0TInn9aLn8LVVVahEESkqC9yRJilAKAuA8q82Ko6MjqnLGbrslMTCfK4J3GKMRQmKtp+t6/GS5c2+B85ZnFy2XN3vasaNte1770Ms8frrh2eUGc2cB40hiFLduVSg1cXp2wvVlQ1mlbNctyEA1zzk+L5hd7njy5Ovku5LgXbi5uSLPKjH0HdMw0Oz3SC1J8wTvA+Pg6FvLOOz40hff4jf+pg9zujxF2EBZGvpO0GwHrPWHZWDWYacAImCk5uai4+2vvnOTlPovHd1b/kgI2cWLjlcURdG/SEwAoiiKom9Ynut8flx9JHiPD/b5VwOBgJIChCTLU7q94unDJ9y5f5+sKCB4vHOHaT9GU0qFEIE0NfjRMY6WepZQVJo7Ly14dl7yxX/8dcbBsbpZczFaTKr59m+/gwgBC9x5+YhUa5JE0DQWrSTTYBHSYv1EWRW0w45Hl19HSolWiQ8BFu4oYK1q93uRFAWIkSQ1uCmwXm25udmgpKEscqbJ0Y2ONFdMvaXrHG70iFwxjh6BPPQjPHvKO2+9M4xT+9O3Xj77//wX/8WPPXqRcYqiKPrFxAQgiqIo+oZVp9VrJtMnCIVE4r1lGDuE8LRdIDEZWgpC8CS5oWnWZEVBCAGBwCiDc55psuR5QWpSdpuW3b5jNq/xwTNZy9WV4UPf/hpDP/LK67chCLyDm/XInTs5xkBWpQgnUEwUwiCDYhotRZ6TZSnWbVBSYv2EQnkhcevmUqvL0GvlfZbW5ZwzKVwgM4a23XL57JKyrnn99ZdQSjDYCecOycvYBPqdY7NuSBODCw43eZr1mq+//WbrRfvf3v/Iq5/+7t9y9lM/+uMvOlJRFEW/sJgARFEURd8osahm/6M0y3QIIPCMU8d2fUNW5hiTsrq8pGla7r50j2WW471HKUXXt4fNuQSmcWTsJ5yyJEZydDwjBEvbNnSdRUpNligePrrBJJqyypnNEqbBUy8M9azk+KjGaImWirbZ0rUjznYsTnJMITk6XqLEJakS/rieXVRVebPZb892bbe8WD/N+qlti3TmjvdrMV+eiGeXjrZpMDrj9t3baCnodj1hShiGEZxAeM3N5ZaLZ1dICbvVNc1+7cZx/zirzT+699pLf2tRzv72H/7DPzy86EBFURT9YmICEEVRFH1Dfsfv+J66rqvvMEph7YQIkJiU2WJJ3/c4P5ElhrI6QWmN946h7xjHHu8DAonWknHs2G33JMagZwW7ZgDhOT6pSJPy+Vz+HJMKQoB5XRL8SFlmLJYFfnIoAcNkScqUol4gdYcEAo68TFFas9s0HB8tL87M/O8mWdotmtnpg8dPvmvftkebZl2v92uerZ6QJCkSxYde+Sgnx8dgoVn39JuWLDPYyTP0Aw8fvc/N1TPGqWezu6Hr9+HkaP7F1994/W+hxGhM8aXTV16Jdf9RFH3LiwlAFEVR9A35ju945bQsk1elkrjRMjlLKlKUNPTNmr5tkSpw5+497DTQ9hPjMOC8Zb/r2Kx7yjzl+KxGCYX30LYD7d7SNHuOj2ds19dUVc7yqOQ3HL9OcJBlCbvtjsePrmn2I3WdoYxBIRnGEZMkBKXIyhyPJy0y6tmSafLM8tL//9u7txjLsrM+4P+11r5fzq1OVXVXX+Zi2vaMIU4MRBYKD45iFEhkRUIzE6EIQSQPQjxFPOQlklXP5M3CSC0UaQiMYk8uOEEREQQrCQaMid3E0z3TM9Pd1V3XrnNOnbPP2dd1zcMZ8hLZ3XbGqTL6fi+lUpWq9v72Q9V/rW+vrzeO70W9/JZzLuz3B+8eHB3/6KIorkmlkk7JuGnLKPIjY6Q0TVWAGy3q5VzIToHBwlqgk6299/C2KsqZl8fRKgyDST5Iu8HG4L0wTSb5YPDQAj7Wg4jteT8rQgj5TigAEEIIeSqXrgx/LIjFNSkljDEIwxDOWVijUa0a1HWDa89so5MSTdNgPi3xcG8CLjjieD1dVyuJebEAAMRJCDiGatUifv98fcYYFosVUm0gBEeepTDGIowCxPF6srDSFr1eDGctpDSw1iBNUmjPhx/4sA4QYYjR5gaqqu0Ntvpu60eu/94Io3rnmePf335w6ScODvb/wfzs7AWpdbJcrTaatu0dnrznjK4LKdWIe9x1TQuPMx2GqYJwEkx6WS81ERdyY7Sx//yNG78Xp/FEOevgR//V47XZ3d015/uUCCHkySgAEEIIeSpCuBt+EAgwDjCACx9wQLlcomoqbGxuQvgejHWI4hTjbYGN7T6atgVzDIvZCkVRIwwFBoMeisUSTaMxGCRYLGq0rcJsUsEYg6vXPYSRj1W5QuCHiJMIV69fwunjKbRWqMoaxkhYC+R5Aq0ktNEQnKNedVgUBT76sWfgxSLuGpmPPjZqd1/e1QCOAfy7V175R7c2F5t/r67rG1LL8XJebBbL4mrVzpOmaWtjVMDB9M7lS3eCgDfLstgZ9LNqtDE6YNzjztmgLAv74o//+L/f29uzr732Wnvez4cQQp4WBQBCCCFP9NJLL4n+ML0huIBUHQI/hNUGXVujWpXgTEAIB0/4WJU1rAN6WQbHHBwYdKfhnEVTKURBjtWywWLeQnAPxgLTSY3paQUGByUdrAb6gxhNK7G5nWCEAXzPx2g0xLJYoViUGG/24HkejLFYrSp4noBlHMYYzOcF0jzC5taQr5bVxuorp9sA9v/qfr74xd+9xxi799nPfna8nBz/jeeefT6aPn783Nls9ndEIDQD74zS0XA8eidN4mJ+NrvhCV+HUVQ454xSyqVJFp6+/fbGa1/84v53KB0hhFw4FAAIIYQ80Yevf/h63k9+QioJZy0YHOpqieVyASkdnLWIAh+rZQljgd6gDz/wwAVHrzdEuSwQhh4AD4tihbbWeLh3jDxLUBQlOPcgW4XAFxCew/7eFPeVRt7L0TYazPnY2MoQhB76wx60nkFrjbbtYK2DYGI9WKxtwTmgWoVKWWxvb7blqpqEnvd/rdA753Dz5s0pgD96/3P+T//Jy18xxnr9bHhmrBUuDJde23qD0eag6zpYp3/M88Jv/f3PfOat3//yl39IrUefEULIDxQKAIQQQp4oGSUx5yJ31iGMYihjYQFEaYzT6RRaOUijwITAeDyC5/vwgwBGWSxXBZbFAk3VIIqBoZfjRC7gXIA0i6GVQ13WYAxgDChXDWS3bqXvZAPZBni0dwLHNjEY5rDGYVlU4IwhCNbvDiRhCGEZpHNQygEw6NoOw+GY9fvL4r2fPDnD57/zPTLGLIA73+bLEwB49dVX927evKn+1W//NgDc/YDKSwgh/1/x874AQgghF5+1dmytXQ/WMhrCE/B9D86uV9K10bDOYjAcwjkL2dVYFnMs53Oc7B+iWlboWokkjTEcpGhrjWJRo646MLfeQVgtaxijYGGwvZPj8pU+PF9gOlvBOQY4h6Zp4fvrtas0SxGEAcIoRJxnEJ6HNI3heRxKGTjL0DTK87xo9OLtF9kHUYebN2+qD+LnEELIeaIdAEIIIU/EOZ8yxsDeXzYyUkIpCWstfM+Hn3lI0wx1VcHzODxfYH5WoK5b5L0UaZri5OQUxmgweHh8dIYkCpH3YnS1weWrfYANcOXqBoQA4thDUyscPFrAOQdjNKwBOGNQSv2f6+CcIQgDaCPhBR6Yc/B7GYqihJQKkR8zZ50+Pj7+QAIAIYT8dUA7AIQQQp7INI0RjDFr1/+My269Ep+lPVhrkCQ+pOrQtDWqqkXTtkiyCP1BitHGAMYpJHGILItw7+4h7t65j+EoxvRxidWqwaKo4Qc+pFTwPIGm6hB4HDs7A/T6ERjjOJuWqKoWd9/ah9aA1gbOMQRBCM8L4RyDsQbWWsRxCMGAclUJY0x69/Jl6tUnhJD3UQAghBDyRHVZKqWN1EYDzsJYAyF8dJ1CtVwhz3M0lQJjDNoYCO4jz1LkWQptFDzuo60lqpXDnduPABiMNzOMN/tgjCPP+uAM2Ls/RddaDIYDdFJDBEB/kODy1Q1YZ2C1hmwcwiBYtwtZA9lJKKnAGQMX61OFwBh834ezRhsl848+eLB93jUkhJCLggIAIYSQJ1JSGiOtYcyh6xQY47DWYTlfYnM8Rtd2WC1LWKORZwk84UGr9WAv1Wns3TsCA0fT1FitKrz4wz+E8VYOxi2CgENJCThgMEhxNltCa4fNrU30+znC0MPmVg/Xnt1CEPrgwkHJDlmWwvc9hGEAMIe2beEYYB2DtQ5KGmgltbWGced6511DQgi5KCgAEEIIeSqy6VRbN1C6g7EOXdcBsPBjgST20e/FcMahbdZTgRljaOoGMA7j8RBxEmC5qrFzZQtXrm5gNilRLBqkeQxjgfm8QZKFCGKBs8UC09kCbdsiyzMwMORZAiF8JFmMMAnWL/qCgXGGIAjABIdSGoxbSCXR1BJHJyfKi8RDZObkvOtHCCEXBQUAQgghT9S2TBWLatk0DZSSULIGHMOyqNd/SDgDe/+jdYDwOLRU6CoJByAIAGMlulbiIx+7iqvPDBFGYv19SsHzAGsd5mcVpicVPC4g2w6yW6/oZ1mGKIoheICyrDHeGINzBs4FtDawxsITAs4xMMsAa9A1Cm3VNM4Zp1SSnmsBCSHkAqEAQAgh5ImUpybzs+JbsrHoGgvVGjw+nmE2L5EkETgDuOBQnYTgHNYYKKmRZhGMsZBKwWi7btnxPWitMd7sY7yV4kMf3kSahbhydYDhIIYfCJxNS3DBwQXAuIFUEtZpOHCUSwkpJaIoBucc1lgYrcEABD6HEAIcHs4mFRh4yQXrfGOC864hIYRcFBQACCGEPNHNmzdVVXQPilmty6LD4qzF9KSA7wXrc/kdoJUGB7AqljBKAwCSLMFgmCOOQtSlhOcJeB5HFIToWoOuVXDOoWkaSKmR90KkqUDdSMznNc6mJepqvevQNjWK+RKcR3AOUEoCAJSUUFLC6PXvN0qiWio8fO/ICY9PkjSeK2Pi86seIYRcLDQHgBBCyFOZTeYnUrlpkmeXhAeUVYvxdg/GWFijYbQFZx6clei6dn0KD9YtPgwRHj44xZVrI8SxB60ZurYCA0fbWGyMU/h+gDgOoXoGYWSQ5TGaSmJ6usRwxBDHPrQ26LoWcA6eJ6CNQZzEUEpBKw2lFJzjWJ21eLT3oGaB2gdnVdzv7593/Qgh5KKgAEAIIeSpVPXi4Ojw5KjXG/fDKI21tdjZ2YQxClXVwGqgqSskqQ/nLMI4hGAeFqs5ymWLy1dGGAxTCE/ANgpccAyHKYyx2NzuoViUmEwL9Po5otggTSIEvkBTK1SrEs4lODxcIM1CtHULN8jgnEUQxrDWQjAPUmloY3F8OEXb1mdRxBur9fTzn//86rzrRwghFwW1ABFCCHkqnoe7Fs2b3/zGf1/e+p9/au7evoN77+5DKwaj8f5pPD58zwdjHIwxODD0ez10bYvNrR7SNEG16tatPXWL8XaOtpMABNI0Ri/LAWcBrF8wdjBgwsLzfTDGsCoqcC6QZgkYE7CWQXAPzjC0rYJsJYySOD48Qae7QvhcSSmvfO5zL/nnXT9CCLkoKAAQQgh5Kh968cX742H+n5LUuz+dPGjfeffruHf/Lpp6PQCMMQdjDKqqgzMcxrj3e/dbVGWHplZomhbCd/ADhvFWBmM18l6AYlGBMQ5jJAbDDFobCE/A93yEgY8sj6EU0NQGzlgordY7DEJAdhJtK1HXNTgHVKvQtdIq3TWcBzIIw9M7d2DOu36EEHJRUAAghBDyVHZ3d+3Ozs6fPPvcc18JPFb2876L/D6O9mdo6vVxn1wIKKXRNh26pkFdrbD34AhaG/T7GYx2OJvWaFsFzgSscRhtZGAMaJq/mi+gEPg+uq5F20o4Y+AJQAhAqgZpxtF2LZqmQxAIdG0DxgCtJWSjMH1coyjKWd5LD5yzjgl364033qAAQAgh76MAQAgh5Kn92q//+tF4a+v1q9evfXVn59Lj04NH7ut/fAeTkxqLWYvjowJl2YJzjvlsgZOjCVZLjThJ0HQNmGBwjmE4ytHUEkZbOA3AOeR5hiCMkCQJ8n6CMAigNWABdI0EB4MxQF1rwAHlaoVVUWPv/jFmkzPIRmJ51uLhu8dutVrsjzZHd0eb23/e87Pj864bIYRcJBQACCGEfFd+87d+6/YPf/zjv9bvhW8+PninffPW1/HO7Uc4OSoxnVVoaovDwymcZWhahVZKFEWJrtVwxiFNQ/T7KTzfw2LeYDZdoesUjHEQ3rqNKI4i5L0MXDBYCzjmAAD9PIeU69BglMTJ0RTv3T7F6cEKi0mLx/tLvPXm27UIzH6c59/qbW7+2994/fX5OZeMEEIuFAoAhBBCvmuv/MIv/EWahn88HMaTsjjBW395C0cPZ9ASKBYdmAtQ1x2sE+hajSwLwWChtEGShmhbibZVsEYgiCP4YQBjNXzfQ1XVqGsFP/DBPUB4PrwwhIGF1gyytlCdgbWA1Rr1SmI50zh6WODPvvoNnM0nR0kv+Uo2Hv/uF77whfK8a0UIIRcNHQNKCCHku/apT31K//LP/dy/rlbV39SGi/nkaOfW1/6MPf/RFzDcGMMZh6b14QU+rLGIIx9ceKhWHYKQo2k04AQYtxgO4/XMAOfgeR44t6jKGsLjyLMcq1UJrSyC0IdWLWRlMZuUcM7BKWD6+Azv3b3nymKql9V8cu25nT8aX9v8Lzdv3qzPu06EEHIR0Q4AIYSQ78lvvP76/esfeu5fXn/22lfDGIeHB2/bW3/xpzg9OsHZ6Qqz0waHD2eoK4nlskO5bFGVNaxxUJ1BnARIUg5rHTzPR7EoUSxKWOsghIe6Xr8UzBjDclEhED6iwMPZdIGmcjjcW6ApFSaTffvNW/9DHxy/VThWPw7jYD/1En3e9SGEkIuKdgAIIYR8z67eePw1pS69YazTbdf83dns4aVvfqPF9es3sH3pKiw4wsTHeJxBa4a2sQgjha410LpGfxDAWYem7rBaSSzOzrB9uQff8wHnMJ8tYZ2DswyrokQcBfj6u+8hTCP00hS6A9IsrcOYl0kaT+I0OZVS9qSUtMBFCCHfBgUAQggh37Pd3f+mv/SlL/2H//jGv8m06hLhH7w4nZx86K3bczGfnWIw3EYY93A06mHn+gCzWY3eIEVZ1ahLCc4yOOvgXIv5tMF0UiPLE9RVjX4vAOMepFLwmcDkqMTx4Rke7b+HVTm113aeqTbGg6qp62o4Gk56WXac9npH40sbX44nk4fnXRtCCLmoKAAQQgj5f/Lyyy+bf/ZLv/QHDJBhEHza44ydzhZX7j+6ncSTh+jl2xCeQxi9ACUdJo8LFIsGqjWolxF8T0Abg7Lo4DGO+aRBXXaAtrAOsNrBKI67b99zd27fLvxAV1KeLWYLd8S98X4QCnb1mWvTMAiqJM2/9jOf+dmvvfzyy3TuPyGEfBvsvC+AEELIXw+fe+mlYF/g00cHB/9wNp9/4vjxZGe5Wo58L2FJsuF95CMf967sXELWT1ndGHAORAFDf5AhiDkevDvFoJfCOANPcAjm0EqHalW5k+Pjav/gwUqa1UEvDee9XrY/HPYf5b3Rbd/z7nmc687a+vqNG3u7u7v2vGtBCCEXGQUAQgghH6iff+mlK0o3P11X9Yf3Huz9ZFXLUd00fYYoGQ22ok/86N/2Oq1ZmiUwUsIPAviBhztv3sfWeAudrBFHMXzfubJt5aOHj4q2m76zsTF6yLjTURic5Xn+IEmS//yctfu7b7whz/ueCSHkBwkFAEIIIR88xvArv/iLO8fH+/94uSz/1sHB4QuL1XJrVanRR559oUqzXjAeb8aC+77R2hpr5P+686ZJ08wYa6zHRcu4bYpyzja3xt9MY3E8GA5PwjDsoiB4nEfRH3zhtddOzvs2CSHkBxEFAEIIId83//zVV/uT1eJThyeHn3609+iTq7rrp1FaeF5QJ3HG0ySPqrZ1Z4upX3Yl4jieJEl8FobhMk/zZjKdXd+5vPXmaDg8Zpy3eZL8eZhl92nAFyGEfO8oABBCCPm+cs6xn3/llZ9aLpY/07Ztbpl7FIZhaYyzjDGure6XVTWwDCwMk6LXy96Kougdz3hLLfTQKrUVZ9le5NzsN3/ndw7O+34IIeQHHQUAQggh33fOOfYvfvVXrxog/sQnP7l3+w//0CvznNnFIlxI+SNd2xaeEMo53z7/0efv7e7uUl8/IYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCvjv/G4hj+Ujq//mNAAAAAElFTkSuQmCC\n" - }, - "metadata": {} - } - ] + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAwAAAAIACAYAAAA19gs6AAEAAElEQVR4nOz9eZjv2V3Yd77Pd99+++9Xe91bd+vue293q7slIQmBBGZzbCdecDPGBgQINyBbYDDYCSRzrXGSmUnizGBsHJPBMTAwCbKHiVdmMME2i3apF/V2t9rX37589+XkjxLEfjIzsaUWRbfP65/7VNXz9HPOqd/pOp+zfD6gKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIqiKIry1iEuugGKoiiK8kb79V//deOle9vvMQrj9Q996DvOLro9iqIov59oF90ARVEURXmj3d3evlmUkw9nev9b/h+/8subar9LURTlf2FcdAMURVEU5Y2WFJMnHX3+TKvm32R+70/8v/7J3/wXWez8xrd883f/TxfdNkVRlIumtkQURVGUtxjBX/3PfvRvh/39D4gy06oyNZc3m+mjt28cV+bS3xqcrPz0d33XH59cdCsVRVEuigoAFEVRlLeUn/u5n1t66TO/8T/uvvLiu6sypdFokKZzbj2+xNve9VSie2v/aFZ0/stv+ff/zCcvuq2KoigXQb0BUBRFUd5SHuw92Do9OrrZbDYlGqDndJeW+eTHXudf/tNfd4ge/smue/aLf/tn/sa7LrqtiqIoF0EFAIqiKMpbShlmW1kU+45vi95Sj/liQa3lYfktXnl1xD/7Rx/HKgfXNtbS/+znf/6f1C+6vYqiKL/XVACgKIqivKXE8aKazeZ6npesrK5y+fIV4jBha2uN9Y1L7O7OeemzDwmM+dc12w//0kW3V1EU5feaCgAURVGUtxQ38KP5Iip++2OfYu9wl3avSc2vc3w8oNGus3llncEgZnxwjKtPPvQP/ukvvOei26woivJ7SQUAiqIoyltKu7dkPv2u95xtbF7l05/a5tOf3ibLDDyvxSc+8QK249Bottl+cMLZg9datjj6q7/0q7/auOh2K4qi/F5RdQAURVGUtxTbsO+2l7uLxx77QxzsnvHaK3f5/Kv3uHX7Gtp9nXt3j3BthyzLCecDhLf9B9qb7f8YxI+CvOjmK4qifNmpEwBFURTlLeX9H/rQg6PD/sm9u/eptzTe9weeZnPrMkI3yQvJyemUh7t9JpOC1+6dcveFB6KYHHzol37pp/74RbddURTl94IKABRFUZS3lMc1LROYDwfHEwZnfQwr4cojLZotn8ubl0kzyb3tYwzTo6wk80nF7LTv+V7xfc8++0v6RbdfURTly00FAIqiKMpbi5Rowvr03buH1T/+B7/OL/+9X2PYn2FbFU89c5ONjTXyRHJ43MfzG0znIaPxHPL4Hc9+a/T2i26+8nvv1q1ng3/9O8/qzz77Q+7vfvXssyowVN5S1BsARVEU5S2n2Wy81O4szzStah7vD/hE+Rpf/f5bPP62q+wdddh+uM9gOCVwl7Esg9F4jne43V65VvvgnTu//tmPfORri4vug/LG+57v+fNXWp51dXU9WDMtHovm+aC3UnumKIrNLL19Lw6LiUCbhPPiyeksPfqub//Q33cC+XJZ1itgetHtV5Q3igoAFEVRlLecoFkfN5rBXNBoanpJVWn81r98DaFLvvJ9T/DyS3u8/vpdGjWPS2tdHKdOGqdU+fDZW2/b+fSdO/JnPvIRUV10P5Q3hPZXfvwHP9hueM84pnxHvWZsuoG2JEDEdZcwHuG7Buur9fdr+MhCMBwsOOkzcafi6+Zh+tuaXPzzO3fu/L2PfOQjKjBU3hJUAKAoiqK85ZRlWZ6cnjiTwR5+zefGozeI51N27x9z+/FH+JY//bX8nZ8Oubt9yBNve4QKSbPZJJ3OWqZv/8XNzZ/5/wC7F90P5Yv1rP7+93u1p2+b6zeuNH7Yd7VvT6KpGU8zTvcSzs5GzBYhaVKRpSVe4BAEHr7v0+zV6Cw3WF5tNC1bNLPd9PYile3+fv9jqM+E8hahAgBFURTlLafdDsaNZmcez6e9fn/Apz75adqdGvWazf72CYGn803f+B5+67efZ2fngNuPrZCnMZrm0mrnjxad4t2oxd6b0oc//H1Pry81v6/TMd5Wc4xr08Gwe/flU/pnE6IkJ0kKBqMZaZ5SlhJdCNyFhxBzDN3AsAw0Q+PRm9cRQBpLTdfct49m8x/44Hd88H/4mZ/7mU9edB8V5UulAgBFURTlLSeHFctxvaDVpdapI0tJu9nl+PgeYZSgSQeZJdx+9Bpn/T7HJ3NcR6fXs0iiFL/hvB34Hy66H8q/nR/7y9//h69dafxko2FfmY9CPvkbryIrl6PTMXGSogmTLJckaUqcphRFQbfTIysqxuMBpmFSVAW+57NYRNy4fh3XdfB8cVnTat+VFWL5T//p7//wL/7i3xpfdF8V5UuhAgBFURTlLcfx7LU/9q3f6JtawWI+4t6rD0jmAtPQOD4OKToFszBG6Aa67vPqq8fMJhMeu15y2TJoNLOn/i//3S83f+i7/vjkovui/G/74Ac/eHO9F3zHlcved0/O+kv3X4jY2z9BYjJdnHBy2seyHJIoQlLh+3UsBEHdIspiZrMZRVHS7rSZTmdkeY6IIk5Ojlla6mLqUq+5tdZxP/4aoRWPA79x0X1WlC+FCgAURVGUt5w0De1X7u/Y80mf5aUaV652mAwTFqHOfBpy/cYSR2cjeu0azWbA4cEOwz70W1OaI4etW/LJtebiL/3nP/mTf+3HPvzh4UX3R/n/7t3vftb96nct/5GtLe9/71ry1vBoqH3qUzvMFjG2bVOJgoOjE2pendlihmVZBPUaxydHUMHiJEJoAr/m41gW27u7WJZFvVYjSRP29vfJ8gxTu4zEwHXFymKcXkEFAMqbnAoAFEVRlLecxTA5/ORvPR+P+0eWrkvazQaPPnqNbrvJ4UHGqy/v0mq22X14wFNvv8mVa6s8fH2b4dCjfuShldGyqxff7BbFTwMqAPh96IPPfrD9yOOdvxJ45TdXyWzt+DjnY7/9EpN5ieN5GFJw//59akENhEa71WEynfLwwUNM0yLNMiQalm0zGI6p8hLT1PA8j0pKkiSlyFIMQ0fXdC5fWWd5ZUU/68+/8YMf/OBv/8zP/MwDQF70OCjKF0MFAIqiKMpbji31U0v3wzLTGqZrEyeSl158naeefpLl1S6H+wc0A4EsdQ72R3S7yxztnlGUGqbtMO4PaQS9nu/LTWDnovuj/Ov+8l/883/g2qXgTqtmvO+1l3YZj1Me7h3x2v1dtrYewfcCJpMJZSGYzmbAjDiOEAKWe6tUEuZhSFFVDEcDNMBzPQLfw7QMirwkSwsQMJ8vSOIUL9DZ3Fony7LHo7F4m5TygRDioodCUb4oqhKwoiiK8paTaeJ6nBVBUiQcHB1yejYiSSV7e7volsSwTMbzGVkhsW2Pfn+KZjs83DlA1w2SKKFZk826W/ylv/bX/tN3X3R/lHPf+q3Pdf/Kj33oP3niVvujhpa/73OffpndvRFnw4j72wesrV8mLwt29rbZP9gjSSKSJMYwdB555Cbra1s0m01q9QDHtknjEMcysQyDwHcpyoIwjAjDiDgOEehYlofn+QR+m8Usoln3b4L2we/9ru99/KLHQ1G+WCoAUBRFUd5y0ip+7PbbH3X/8J/4I3zzN/8Jbt++SZKnnPUnZHGF63ns7RwRLkKG4z4bW0ucnfWZzWIe3DskWoTouhDLveYzg+PJ0z/4vT+4ddF9+nfdt3/7t3eeulX7iccfW/srw5Nx+2O/8Tr3H4wJU9g5OKLV7lJvNqiERtBocOXaVVbW1rh2/QY3HnmUNM0pK0mS5YxGY0bDIVVRIiSYpklZVRRFQVmW5HmGZZsIAdPZiP7ghJ2dPWaTBe2mb2XpfDVKFl//Pd/zPVcuelwU5YuhAgBFURTlLeUXfuGnWkvL1td2O7bZqnsEdZu3v/Nxvu7r38v1R65hOTquZ2IYBlIaRGFOvW5x5eomVSnZ2T4mSQRxmlBrmiuNhvcVSVHcuHPnjvqbeUG+8d3f2L651fxPLq25f+rh3R3tM5+6zyKuCGN4uHPAbLHAtC0836EsC0CQphllWXJycsbHP/EphuMhw/GA7e2HDAZ9TEPHcRwsy8Y0LaSUaJpGWZZomkZVVRR5QVVBFGekaclsmnJ4cEj/5EQr03y9SIubFz02ivLFUG8AFEVRlLeUMA+/LvC1dz2/dxrnYerOJ1PyLAWtpNttkqQJtYbP+uVLvP7KfcxQA83kxmOXGZ6OqYTg3oNjllcDuqu25jjaI5qhvWN4fOb/yId+5OP/1U/9VycX3cd/lzz3Hd/9vhs32j+0uep+0+svP9D2d0NMu8bwrM9J/4zZLAJNYhgmx0cnVEXJbDalLDJmswWaYeD4LqPJkCrP0XUN27IpZYGsBJqmYxg6cRKhifOFv2EYaLqJkEAFmmaQZjHNpscUgWEGO8I0fyVMwt+66PFRlC+GCgAURVGUt4wPf/jDdrKIN+2GU7N1oWHomBbYtkccJ4xHEbLQiKKEpdUu917T0DCIkpTeSpd3fuXbeOEzL6MJi4OdEavrm2yse4+fHC+Kxay8EWexAH75ovv574K3P/ec+X7X+JOPbNX/w17XfvL1lw945ZUjbLfBzoMdtnd2iMKQbm+ZdqvJ4OyMoihJkozFfIaUEj/wKIqCxXSCbVkYpoUmBEV+/sDXMBxqtTpQkiQhRVEAUJQ5RZJjWiZCaAhNsrqywtJKD8RY7hye/MNf/KVf/GcXO0KK8sVTAYCiKIrylqFpmptHpRwm4/Djv/Fbues49vrmJs9/5gUsS3DlynV0zWAxjbjxiIduVISLmMU0Y3A2oV2v0VtqUa8HdFZWODkZ0PBrdVmkTwtNNJMsn9y5c+cffuQjHykuuq9vZc9963Ndh/x7bj/a+2DN5fq914/45KcfIjHYubfNgwe72K7NM29/O6trS+xs73PW77O6uka7rVOWq0RRSP/sFImkHjTO83VWUFb5+e5/WVIUBUUeI5FUVUWe5xiGieW41GstpCxJkgRD0xmP5/T7E9IsR9cs9ftX3tRUAKAoiqK8ZfzET/zE5G/9xP/h4Oh4lue5MM9O9siykrc/8062H94jSTPiKGU2mzM4HfL0M7d5+cUdZtM5pZQcjPtsbq5wcnrKY29bwam5LC1vEcev18ajaNWrBe/cee21jR/6oR86DcOwsG1b+8mf/MkMlQ/+DfPss88GX/GVq3/DENXXj08HnZ1Rxv3dY+4+3KURNAjDlM3NTS5vXUZScnx8zHw+Z2P9ElmWEIYhmtDIsoxWp0er1cateWgaDPsDjo8OkXn1u3f9s7xECA0pwTQtNE1DliWjyRBD13AtD123CII67V6dOEoKbUeL7ty5o33kIx+pLnq8FOWLoR40KYqiKG8lopDFuu1onXqjWdvY3KKsJJUGz7z7PVx99CobV9dwPQ8hNFY3elx/5DJJlNFp1xlPppSVQVVKXnt+G8OUOIHJ0toStuV0HMd6XDO8PxaGoTUejytHiEt37txRyeDfIH/0/R9ovv325g9trrt/kjLt3Ls/4mgw5+HuCbPZgrP+GZ1el8tXtyirAllqdFqrPP74bRzXZjIZ0T89ZjwckMQJSMl8PmN/b5e7r7/GaDRCAAKQEoQQOI6LadrYlg1CUJZQlhWGEJiGDkJimgKBxmw2w3dsc7Xb+47P7e/7Fz1eivLFUicAiqIoylvGBz7wAbsouFVIUW5dedSaj6fMxkccH+7hOTb1tsfqepuToxM03aHW8PACneWVS9iOw0ltxIP7uzzz1A3iaMFiPGbk7LKx2aZ/ONQsQ6+VjvVsuIiNbrf7M16SHKpd4DfGs89+V++db+v+ny+vut96dDDRn39hl2kSczKYEIYRmtRYW7/M8sYqZV4yGY1ZzGecnZ1h2SbNZodms0er2aOqKvwgwDQ1Tk4OmY/GOI6PYehI3SQTOoZRgZCkaYaUFVKWgMC2PQTiC2lBKzQhWYQz5vMZeayBb5BX+e4/+Dv/9/lFj5mifLHUCYCiKIrylvGzP/uziS7Eb1WljMo8r9AqwrhgMYt47dVXiaMMw9JpdZo4js10HLG21sUPfBzf4D1f8wyaodM/m9LqtNjfOSWOZly9ukyRpcgq01eWG4/blvXVWZat9A1D/R19A9y6dcv6hq/d/K9vPtb6rvFo5rz+2jHTRYFu2UwnE9Ik5fLVaxSy5NOf/BjPf/pT7D68z3w+Y3VtjdWVdYLAZ2m5Q2+lR3elh2bqJGmG5/l4vkOSLpgvRqRZhOt6+H4dy3QwTRPDOH/sq2mCqirJi4SiyJFVQVWV1GotWq02sjLY2T7h6PjwAercR3kTUycAiqIoyltKTvlCmRHv7dxtvuOdX0HQ8EimKVESEkYJuqHTWaqjC40KSRiVeIFJGJf4fs71R7bYu3+MHzg06hqjQcTVR116Sy1GozH1ZqfWiv2N4+PRNxkYvw188qL7/Gb2R//oB5p/+Os3/2qvZXzb/s4J43HBSX+Bblrcfe11Tg9P6bQ7HB/ukaYplmnieR7NtS5Bs4HvB+gIoGA4HJKmCWkWM53OSKMYSUVZ5pRlhZQSKSVlWWHbLpblYOgmZVGSFylFmZHnBZIK07DQDR2Q5HlGkkSYlqSm+9KxvGsf+v4PBT/1Uz+1uOjxU5QvhgoAFEVRlLcUQ1pas92qbV6/ZMziCY/dusK9Vw6pt7o4gWAyDc8rvxYS23EZTEc0mi6GbRDNU9K4Yn1zjZ2dPW7eukSeFzhuQa1hc//egqVJLEzN2NLQ/rhZCR0VAHzRvvc7v/PGO9+19V+vrzh/5FMfv0e/nzOdh7z6+j3iMGbYHwKS4aCPEBq1WpOl5RUuX93EsHXCKCWLEzTdpMoLFosZo+EQTUgMXcfwHSoJmtCRwGQ8pChzyiIhLjKEriOEjqGbCE2i6waaZmA7JpowEAjiJEXXNCxbZ32zzWKeC6Rm9XqhygSkvGmpo0tFURTlLUVYVEWZzx3Pm1x/5DKbV1pce3SZRtfCdnWEFGRJhUCnKkvW15e4+9o2raaL55kc7g9otup4QZO8rLBMA0OHRssnilNODyfIvGi1Gt4zaHLjueeeMy+6z29G/+GP/NAffMc7L/39Rl3+kRdf2OX5Fw54+eVtXnjxNU6Oz0iTHNf1aTTamKaN5/gs9ZZpd7qEcUY4T2g3fNqNgE6rRq3u8vjjt1hb28T3GwR+HcepUQvaeJ6PZVt4vo/jOGiaBkKiawLHdkAI0iShLHM0oRFHKdPpmEoWGLqkLFMWi4jJbMHp6Rl5ngBbFz2EivJFUycAiqIoypvWnTt3NIB/9SFulglfE0ZydjRMDne2ux/4nv9A3H5qnTwpOT4ckqUVVSmYhjMqzq+PpEnJ/deP2NraQLc0mr0GzsmQJMnQ9PNqsaZlYlsWo/EUy9OF65lBPM7f3z+Z3QJeuLBBeJO5detZ60//767+exur9l+v+calV1464nOf2WY2T0nTjCxOkGVFkiSYukmel0gJhcyZzcfopo5u2NiWwenBHgKDsqyIkxTTNGg0GownI0bDU0zToJQVlQRd08nzgqqSgI6mQV4UmKbENEzKUqeqKjTTwLVshPAo85woiuh22gS+j6EL1jdWOD2ddl555RW1hlLetNSHV1EURXnT8n2W9JAMGAHcuXPHiSbzcValWpZF/oNXH4p//Mv/nD/5p/4gmihY3ajTHyScnp7SbHgADAYL5tOCMDzFMjwarRpFmTObz1haXsKyLGQFfs2h3qgzOO2TLDJAoOv6o45j/slnn3321Y9+9KPZBQ7Fm8L3f//3tzqe8SMbq963+G516d6rfT77mQc83N6j3V5iMp7SH/QRmsA0TGpBcP5Y1zbodltkScZoNEFWJYauE0YL4iQhySLKssJ1PIKaz9UbN3jt8zFpHKFpAss8v9oTBA00oZPnOVmWUpY5iPN7/gCO42AaBlEc4vs+cVEiNI0kzbBdizTLMR0L29D7v/jRj0YXO5qK8sVTV4AURVGUN63JJN4qwPmdrz/ykY8kpV6URZrq08nIWur1ePGzr/PLv/RrTCYhlmuxeaXD6kYb3RB0uwEbW13aSzWuXt/g5Zfv49gehwdnLK12uHf/gDgsKKuMZtvFcTWEqEiiEikFq5tN79Jm99t90/+h7/zO5574nXY899xz5p07d9Qm27/iz/zxP7N6fc37L971jrUfyqPw+id/e4dXXznm5VfuslgsODjYYzA8pdFosrGxxY1HbrG1dYXllSWarRaj8ZjB6IRmq87G+iaaxhfSc47RhcWljS0a9RqHBzuMRwMuX7lKs9OjRJBnBWVREscRaZpSlRLH8bBtH01oGLqBEBpVVVIUGUWR/u6/mibY2Nyk3akT+D6TcSijJHkJUOlflTct9T8nRVEU5U0rSjMvEab+r37Ptt2rMq9qspD5fLHg8pUr9PtT/vmvPs+7vuptXL6xxCM31zjY7iPQCOoWT7/zEWxTIy9CXn5hF8u2eP/XPsn+wwO2Hwy5cnPB6lqX9Y0Oo7MJQgiSMCFLdC5f7lx2LOPP9geh8+yzz95vubV3FkVxf297713PPffcb/z0T//04KLG5/eL7/nA9zz15K2lH796I/gPRqdj61O//Tr7+1OOzg4YDAYYhonj2iwvr6DrJu1Wm6XlDroQFGVJkkZcvXEFz/OIZiH3Xr/PeDzGdjwsy8EybcoyJ4xC4mhBmWXkErq9JdbW1hESZrMZSZqSpiFISRRVCKGRpBGGYSCkpMxLKlGhCUGepYgvlA3rD/pcfeQKfmCxfW8vr8p8fNFjqihfChUAKIqiKG9aUtebNcP41wIAS7PKVCSuoZvS0HVMx+LWk49wejRicBpSyUOuPrpGJeDuq0cIXQNRoVFw8+YVDnb7zGcV81nE1uV1oigDAaZtIDSBppskSUhRGsRTj6kZYWv6ZQPe3rT9P1cV5XVK/qGoqq8pqtx+7rnn/v5P//RP5xc1Rhfp277tL/rr7fwbe0vGh7auBF87GcT6P/wfP8NLL99lMplg2x5ogqzMMCuNyXSI59SY6pKH269gmTZra5v0ltpURcF4MGH7wQ5xFKFpOhUVmqaRlynhNKJRb2HbAZ7rMBwNmQwG2I5NVZbYjk+93ubs7JgwmiKrCttyMHRBVRVomk5ZVlimTlUCZUWr3iQvcsqyJElzFlHE5uWlbL5I1PUf5U1NBQCKoijKm873fd/3LQ3/m+GwW/M7lmV2gZ3f+VmSxjeSJEMTulgsQvI8ZzJakGUFURSxpDc42Bng+z7tpQDTcvjsx19hdaXHqy8/pN1cYjTYJ5xUlHlFOC+YjkOESIiihCBwOD6YEQQBs3FCkpbUao6xttb8uv7Z5PpwNN/Jy+QPSyF7uq6/19L1HSH4uJQXN14X5dpG8WfXNrzvvXql/djZ4Yzf+J/u8qlPv8IinuO7AaUsCdOQIKhhGg71Wh1D0xDApY1L+L7LdDrjhc/tkiQx9XqLXm8V36uxWMzIsoiiqLCtgFrdwDJNtMBC1wyWTZckWZClCXmek6QTsjTHNEyoJCCRVEg43+eX8guFwKrz4EJKkBWOaaI7DtPZgjSrCHynEJpxcrEjqyhfGhUAKIqiKG86dqVd+Shnoz9QeyaIsqT7r/4sCMzHB/0kNDSj9viTt3nfN7wHzZDs3B9wcjyg2WmwmCVYVkSRF2R5iKzOC0Stri/x8d98lZPDQw7aFvVaF8ezSeMS13YxTYswTCiKCl03kUhkJUCTtJd8T2jVI7Kq6qOz7EqSR1GlyV5K/nV//ju+rZnLSuiOOLX8pVfTNNXfykWk3v/+9xtf8xXv+J5HbtR+eHnD2zw7nPGr/+QFXvr8Q0azAe1WG9fySNKc69duoiHpnx5TVTn1oE0Wz4ljnaPDFKHpCFmhC0EUzugjCGpNdNPCNU0M3QApGYzOSNOERqOOrpnomsFsOkPXdUzTwtI0sjSlkjkSMHQTTdPJsxwNqER1HgkgcFyHNE2J0xRdA01KBv0p81mIrRmWFDQudoQV5UujAgBFURTlTUegNwAcyzCEsP+1n5mGeDRLQn0+HZtlFRHFKV5gs3W1B5VgPk2QZcHewZh2u0EcJZSFRpIW5LnO8mqPcJGyfzjgySdWMUwDx3YoywRNlJiWA4ZGpYHQDGxLw7Z1sjyn1Qv0zlKw/vKnq2xvdzKzTK1ajCYii+artudMAt0fl5OjpzXJ+EMf+sBv/9RP/exbcif5m973jqdXVvz/KAi0zbP9iF/71Zf45GdeJE5z1lYuYZkOZVGwstolyzPOTs/odlcxTYN+/5hur0fgeURRRBbHxHGIJgS60PFsG9/1sb2ArMwY9I+hqLh69TEm0xHHRwe4tkNV5lRFiiZsCqmDqJAIPK9GlhdoAqQsgfMCYFUlMTSNsiyhqjB0E8/1iJMFjm1h6zq9dg1KTFHKpYseY0X5UqgAQFEURXnTEZqo3brV13RTM6ryf/lb9oEPfKA5GYVzUzeX8iypxuOI8XDCYKCzutwBwLJNLMNmMc/J0hLXDajXJIHvcu/1PZZXOjz19pu8/touhqXjBi66YSJlyfJqi8WiopQljYZLkWs0Oh7Ntkeelwg9Y6nXwA0cq9lodfIsytJ4cX0yPwt83z4WWesUSYJphJ6sX/pL3/ftr5qa+ZrR2zy+fft2+Ylf+ZUNaZRukjIpbHv4Znw78GM//Oe+aWOz8Z+ubQaXXvn8Ls9/Yo9XX39IVpasr1/CMFyqsiSM5hwc7BLFC27ffIpOu0uap9y6dRuB5OjwkFEyYjoZUskK36/jBTVmizmjyZRmp8vNx5/gkceuc3J4zGy+IPDrbF1+hCRLmU5PmY/PELpOEDQIggZ5XpBnOa7jksQhRXE+vLquI4T8wteSLEsxdJMsjcnSFMvMaLd8bt7c5LOfubs9D5N7FzvKivKlUQGAoiiK8qYjtMrq9W5bhm0g0vR30zGKrPrK+SJv+7W6WF5b1zVhoEmN1fUu03FErdFAaAUlUJYCL7AZDMYIoSGEgSxskiTn8tUesAVCghAsFgkIDc0UmJZgfX2VTs+hqio6Sx7D/gLbMTFNqErB2uUOlmWJg+0T24iy5Ul40k7n48uiyPdBhIZtzWSeLKUz85pum+8zZmfj33z4wqKSwi1SaWSFkCLVTn7gAx941bDtXb0o0v/y7/yd+Rs9jr/07LP6y7duiY985CPFG/Hf+/Ef+Qtf/453rv3tRtu4PB2nfPZTe3z6ky/iBQGrq5eQleToeJeizNA1g6BW5+mn30G9ViPLEqI45d69l+mfnaIJjVqtzY1HnqRWb1CWOcPBkKLSQGQkYcjnPvVJVtZXuXTlGt21VaJ5zOuvvEaz3Wb18iqf/URIEs0JoylpEmLbHkVRYhg64vziP8iKNE2wLBspK4QQVFVJnOcYho7j+jQbywTNBpN5iJRakWXZwRsxXopyUVQAoCiKorzpWJrWevJK80nXdIxQT3j22WfdWq22ZOf5e2RR3JJCw7YCdK1i98Ehvh8Q1B0WUUZZSmp1F02fU8mcesMnXCTEcUFVQbiomM1yOst1pJRYuuD0eMZoFOIFFkHDZTLOsByBZTvMpuf31C3n/C56XhRYjsbSRgPP99l7YOtFmeoindtkiZPkSSKybJqn2bomWFieMzZNqxS6sZAlMi81wjTTHdst8fO3y0g7Mm0z/v4/+2c+8bf+21/41J07d7R/tfLx7/jBH/xAU8S2W5J4cZzqNSsYFJ5X/PW//tfnQoj/1RPkO3fuGB8/OPhaMzn+JDD9Un4fzz33lxtpOl69eq35V1fX/Mv3Xjvis5885PMv3GVl4xL1VodXXn6e/tkJvd4yT9x+iqKUrK+ucnJyxMnJEZPJlNFogGVZ9Lpr2JYDQicvJKenQ+q1Jssr68wXY8oS4mjBeNzn1Zf7HO4dsLJ+mZX1dR67fZN79+7ieg43bz3D6y8/T57HQHWeAhSNSp7n/JdSggBZSdI0QQiJpplUFWiahmEY6JpGSYHp2EzmCfsHpw+3GlvDL2W8FOWiiYtugKIoiqL827hz545RTUd/YZHk8iu/8smr29uHuy9+9jVRalVQ89yv3riy+v7ZLOZkZ0S3UyfJS5pdj0Y3IMtK8qykqiTjfvSF+/sus2lEWRQYloUb2EhNEPgmtquhVQIqQdAwcBzByWnEfJaxtOTT7XqcHs9xHBfHK3Ecj/EgwnIkvaUWWVJQZhWnBxPOdk9YjEZMZiMZJWFeyTh1HWvhOE5clEUmhChNy2ER5TLOiqrVahHH0anjOTMJM92292zHuafrpo5mzoVWRUUlD6oyXxeVuCQqva5r8qoQRi+oNW7meb4TLsKRaYuPW7b58crVDtvtS3PYYTZre3I2uymr/NpP/Hc/93Nfyu/j277h2/yNJ7rf/9itpT9640b7q84OB/z3P//P2d4bsry2RlmVvHL3ZQ7399nauMzXfMMfoN5qMDgdkcxShmcD4niBlBqOYwMlYRiT5QW+X6PeatFsN4iinFrdp9UNyLOc3QcP2NnepeYH+G6TNEvIZEG92UAISaPRBHQOd/c5O9klL0LO83ueP/iuvrDbL6sKXZzXRZVINKEjhIZlGdi2SxwndJaW+dpvfC+PP3WNf/ZPP/3TP/vf/9L3fqmfY0W5SOoEQFEURXnTuHPn/ca8f/z+wHEv6zAoynJzMV88U8ZJb7YYG6lrt1c3urieh2EtEIaJberUGg7D/hzb9hDCYDyYkUQZVWGSJguE0EnSnJV2HdPWGI1jqrykXtkURclsmuNNdWzLYjhMqcoSw8jQNIM0Bt/XyXPIs5zRNOTG0iquX8N2UzRh4NZ9gpbHfLTM6HQi5qOJdXywa+mYbjhfFHEyLxqNdiFsq0qTuGjWm0YWF0mZlLV5NEorUSWu619JNP09CLHQTaNfykKAHgR244Zlu81SVujYnbUrPavZaQavvbx3u9tezhfR4g+N+5MHruvunwwf7OuGYRbZWc2yjbj0/B//Un4f/+Ff+AtX2x3/Rx99rPtHV9a91YO9Ac9/7pDJXPL42x5jNi14/sXPcnJ8wrWtR3nf+74Kw9TY295jOpoxG42pBQ3WNzbJioIkjdC0Cq9ew7Itmu0G9WaNRtsjaNQoy4w4LigLeOLpr+dzn7zHyy/dJS0r6u0Ohu2xmM0YnB0wH09Y3dhkqbdKlWeMJ6dkWUiZ5wiq8x3QSmIZJlVVnZ8GSKhkieOYIGA+n2FaNo4VoEmDe6/slfNpfPeN+TQrysVRAYCiKIryppEOHm+UWbopzXKpqIqpaRp+FEePyqpq2sIU2SKDUqDpkkKWOIFJuEg4PlgwnsQYRoKpS8qiIs8hTUNcxwWtwnZchIDJaEqRauhCMB+fp4wsy4osEximpF63CRcxnmfhuA6zSUSjbbNYFIxGM65cX8Kv2SAkRSVxPJtWvUGtU0NIwfhsQjKOeP3lNuPhxCiyxDDmk6qqhJT4VbPtiaqqisWibxV5VhmaKDTD1rJUVJahk5VJXIbRQheGYdl+0wyCRnd1xSjKkjTJqTeajIcJ7W5bW1lr29v3znq1Wr1XFNW7KymzJIpkkWRJEi0+zjz6Ux/6wAd++ad+9ovLRuQFxlc98baVb+90bX/7/ikf+817nA6mLG+sU+QJr7z8PHla8syT7+Hq1iUe7uzwysufx3d82u0O08mY8XhCnKasbKxx+9ZjLK82MEyBruvnBb90C900sGyJJkw8z6DINQ4Ohnz11z+N1/B58bP3SKuK1dUulqORhTOKPOHeKy9jmDZFlmFZDrqukesxeZagy4pKSoQQCCHQNA0p5XkWICRFXqHrAkMXGLZJreVjm2IuJb/1xn6qFeX3ngoAFEVRlDcHiUi+N353WZZ1IZwAKdbKqvSyNPVs2xE6BrZuEiUFQpM4ls3gZMjyWpc4zmnUHIq8RBMazY7PaBzjBzVm0zlVKZBUHJ0kiEoQBD7tpRpZVJAVGVe2eucFogywTYss9ajVDKqqwrRtoqxgEcU4jk+apZTSYxHGhFFCU7OoquILj0w1Ous9qm6GdAw0AYd7Iw72zjRNk2iloQsExyeHmmEHtuVr0rFsWVVCaEIITUiKWKvXmo1lXbd003aE7tr4DR9NFwxHUzAlZ2cLrj22hOHprF9tMT4LWWp6jEahJSuNPCtsUYj3e577rsVi8b7nvv1P/TWzbm7/zb/58//Gd9v/i7/6419//Xr7LwU1wz84GPPxjz/g4HBGVUESjrl/9yG6brN1eQ3btnn55Rd5/cFr1GtNbNujLAuarTa+18B2DTRhsLM74OikT6/bQTcEi0VEEpUkSY6mw/H+IbPxlEdvXaG9ssa9+3vcemKTqqg4PBijG4LVS10Od3dJsowsS5CyQFaSLE/RNA2h6RimCVVFnudUlUTXdaSUaJrG71wR0oSGaViYlk93tU1vtU48TmPbdGpfxk+5ovyeUAGAoiiK8vveX/6BH7iU/9nFVydR8o4szw3qwSVdCDMrq26WSdvyAyxXUgK+67JYxDQbNWazEE0X1BsWrmNg2hZIncU8pFZzsGydorDIC8iyCsOwSNMIqVUYlsD3HLLMpMhysrSi35/TbNUoZEqWushSY2d3yP2Hp1iGjWWa9JbrTMdneIGNaWoUeQoVhLPze+2r68skRYntGyytNOlttNi6sczwdM72wwGeb9LeuKmZtkkYFiKaJ1iWSThLGJ716XWWjFqrSb3tkcYVo1HIaBZjWyYISV5Cc9kiTmOKWYFpamR5wXwesZjHJGmG5Xl4XsuxLcuZ78b//qS/uNop3OPv/c7v/NG//Xf/7v/PFJc/+t0/WjsaHwXv/qob33nlavfP1mrGlU/85mscHi4YDBPOBhM802W+mNBZ6fLUVzyBrdt84rc+zvb+fWp+jW6ri9D4QtXd8x33OBKU5RivZlNIwcH9bdI8JWi1qNda+H6DvCx57HabnbvbvP7KPsuTBFlsEgQujXaN0WjB/t4xjz95jVtve5JXPvcaaRKSZwlSlucnOVUJEqQU6JqGaZoA6LqgLMvzkwD9PAuQZZqYpoXnB4RhSjRPOTocDAZx/Prv0cdeUb5sVACgKIqi/L724z/8/ZtJGH1DGsXfUuRlj6qykyRta7pRk5XWzPPz6yLrW10QFa5rkmY5umaQFza2o6ObOqYhycsCw9bQYwO9yhGiYGmpwWyeMJ+ngEa7U6fedLFtnfHZgjQuabRrjEcLqqpE16HVbLL/cECr5XN6coomPIRIiOOE+bTDtUd66FqG360RLkLQTMJ5RLvV5OxsSBKlyEqyvztAInEsk4ODCWubLdYv1dEMnek8w49SZiObIs9pdl3WL7dAF6S5RDcMTndPSAvJ6uUm4SwGaTA8WdDqeaRpSRym6GZFUcDhwZgiL5GywrQs4jRDMwxWNtb88WDxFVVVycWwb33Hs9/8dz3HuydsTRPIpLd+5d5kMvGS8fiRWTX8I29/77U/eOvpy0+ZIjd/89df5nOfeojtu6CZVHmF3fDYurnFyqU6s2nK5196jZdfe5V6rYNjOpiWhl+vnz+61g16q8ucHg/Z235AEi+QssQ0HBzPxW1MqdWndJd61FsN6o06b/vK25wcNTnZP+Lll15l5VKPOIloNOtEccZgPMV0bbprS4SLEclkikRiWTZJGiOrCkPXkJqOruvoX6gkXJUSKM8zvwJCQBgusJyAbqeFlEIOx7PffO97337wz/7ZP7rgWaEoXxoVACiKoii/79y5c0fL89lqOY1upGH6jiyKb5e53NKF6KVprqdZbjqBY5eV1DqdBmfHY0pZomkSx9NoYFEVBhUFvdWA2Swiyys0dBBgugYVEk1AWZYUhcQPPKIwwbQdJpOUKKyY9EMs28SrF19IDamjaxV5WhFGCQd7p4yHCxpNjXqzheU6tDoeQeAwny/IkhmtJY+SlCLX2H94guc5eJ7FZBpyehKxtt7mNJ0SxiX0QwpZ4tc8ZmGIqRvESY4QIHWBMExm05T5JObuKw9Ikoz3vO9xOl2bIss5OxjTPxvgeg7tdpOgdn5KEc4T0qRgOhnj+T5pmuFJj7Iq6bQb1NoeVV6KSjS/aTQ8fTyW0bCSVWba1nByNvlEVVW6Yftf8Y6vvHzryWcubZpGxqd+6wGf+8weUjdY2egyHsZsbq6Tyxy3bvLg7j6vPL+NYxs8/dS7mC1Clpe6rK8vEy1CZos5cZQyHs/QhM71R24idI0srTB00EwNy7ap8oL+wQn9w1O6K0vohmDr2gqPPbrF4eGIo6MB125c4vOf3aUoBK5jMR5NkZVgZX2dskhYzIfngY9pkGXZ7+b6B0FZFuc1AZBUZYWUFYZhgDg/pfCDGpcurREtEnlyPP7Y/+3n/9cpWBXlzUYFAIqiKMrvOy6sWWa9OconV9IoeTyP0g2hW+2ykEGWFnqRV8J2DGE7Os22T5YU+IFFRU4lzne54zghDhOyuEJUBlmSUpQFpuaQJhlRXOD7LmleYFg6liNotpsIXWM8ipiOQ2azBUurPUzLJM0meJ5LnkiODgac9adUuYaUOrVmnVqjDmiAYDaNSZKK+Tzh+CzCD0zKtGRlxWc6CimynHrdYzKICecpBwcTikIwGs7Z39UJGhbNTsBsOsHQTExTQwjBZDwnywQPXntIEoasrq3R8F1kUXJ6OOTkcMRiMSMMM9rtNkmck4wSsqxgPp8yHvSZjEY4nsV4dF78LItzmu06pm1i2YHWbhmbs8loczGf4AutmI/n7/YDi2fefaX2nq++aUz6Y+7fPeVwd0KalWxeWWP74TF5Kjg9OuDK9auE05zD7T4N30QzDFY3VrnZqRPUPF5/5S7RYsrSaovVS3XSWHK4e8ZoPMG0LYqyYDIaUOYZhmWy3F0mqDVIkoLZZAFI7n7+Lu1eh9tP3cIyYXd7QLvdwHVtxv0p3W6H4937hLMJtu0jq5w4nqFpYFk2lBLE+TpeyuL8LYdhnBeD+8J1oaqSaLpFs93CcXV8z6PmNyYXNysU5Y2jAgBFURTl9x276Rg1u/zDk0PRlqV8KitKzxDCN2zPsCshDMcmqDnkeY7jufi1BMuW6IZJUVSATlGW5Dncf+2MvCipCkmjE7AYLahK2H5wyCOPXkY3NGzXIMty8iQhaFpYlkZZlcxmM2pNj+lYx/dtOi2f08MJnhtQqwf4NZtrj64wnaZUZYZpm5QSBoOCNMnprrR5uH1EGHqs9BxqNRvLMoiinJ3tIe2Oz2ySc3oyotFqAFCVFadHfaaTGbZjk2cJQWBzcjRkPkup1wOSZEqj1UQ3dXa3j1nNW0TzFNu2yTOHokrpD4YUWUm9UcdxHZhAXmQUeUaWGRR5gR+0GA9nGIZFrR6QpSmlFDhBnUroLC23jMd6V1obl9s8+sQ689mIB/dP2X8wZTpNaDSbnByNCRcVJyd7RIsZ7ekSUZojKRnPJpSlRBoGRbXK8eEhly6tkCY9NE1gOoKV9QbLmz2qXDAdLwijiLJYpUxzsqxiNJqwt7+LbuhsbF3CtGr4QY37r7/C2a+e8ZVf8x7WLi3zL37t06yuNFle6TCb5bi+wWIskWVJlmbomk6Wp5imjeU4lHlGVRXouomsSoqiwrJMpBTkRUZVQVBvYFoGS8sNZuP4qNTK7QudGIryBlEBgKIoivL7xn/0Ax9c1irrfafbB0H71vo3Wq7XW8yilm7YXiGlE/i20C0DhMTzLYTQzheYUiPPJFUpSJISy3SZjCeU1Xmqx/kswXZNEBqWpRGFFZ4XMJuGLK20EUKSLGA8nFKPbKpKZzaKMS2HjUsdDEPH8w1GZwsWs4LeesGVqx3qDYckTrFMDU0zKauC7rLLaJgRNGw0I2dtvQOyotlyOdgbs4hyZvMUBLS6PsfHfeqNGo1mQJZkzKYL8nlGNkmxbYuyrIgWJmdnJ+R5xnzqIoA0z6hpGQILpEFQ85jNTimrClnBbLKgXqtTVCV5VWEYNoblMF+M8PWAoiiIogmaYaAbJhIoqoosL/Bch6BmceV6l2fecwmvZrG/02f77ilnJ3PiSLK/f0ZZZszDGMfxSPKIUgPL8SmrhFdfeYG8KDFNiziJefnzn4NKsra6iWXWMUzByuYSk3GO4zqYtsTxPfxak7IqSNMM3XLZkIIqT6mKgsUiwrA8DEOj0X4PpycDPv3Ju3ylb/E13/BO/vH/81+wFlW0Oz02Nlc52tnHMi0k5fliXwNZFeT5+R1/zdChqpBUCCHJsgSh6WhCQyLI8xLT1DGMivk8GWaxiC96jijKG0EFAIqiKMqFevbZZ/VHN5Zu9Qrt7q6Rlto8eVJU2jvn42S91vBXTg5ORbvTdAzHFJblIEtJkkdYpo4sUuLZnJO9M8q8hURyfHR+VSdNCyzbIq9KOt06hmkhS4lre2zfe0heSirpIGVJmUqKPGU4HlOUNdrtBp2lOlXpnb8rsAWyAr/u014CNzBYWqkRxwVJUtFd9ljMY2zbZG21AdWMk6MprXaNs8MjWp0aZ6cl9+8d4TjnC3jL1Rn2I5rNNnGcYlsCwzDRtDqu7XF8dMh4PKKsSjRhkiYRmtAoypzVtTUWi5AqK4mjmCJLabV8JhOXspDohoWh68ymI/TIxDAtsiTDNG0kBkKY6IaObtpomkme5USLhPk8xPddltc9bj2xhm7CaDjhwd0F+9tjPvvp1zB1D8s0mE7G9McDWp0Ws8WUw4MDnnnHu0GveP7TnyIKFxiGRSkzzk6O8D2PTqtHs97CdX3a3SZLK23yssCt6chKZzQcMzgbkmWCCgt0A9cysR0d0xSkcUo8nrG2sYZTr7Fi2vj1FjvbE9q9Hl/7772XFz+7zUvPv8K73/0knZUO4WhOUGsxmfYRQqLrGoIKWUkANCHQNKikQJbnlYA1TUcIQZEl5FmGpmtkWdZPRkb/YmeLorwxVACgKIqiXIgf+8EPPVUJa5YXhcyT/Om9PH9vOc+6aZ4sp/MscH1n7dZX3KiNRiGmadJecYjmJRoa6TwjikqieEYcx8znC0zHxfNdNM3EtEySJCOOI+pNh2arznA4p1hkHM77+L5HUVUsrzbQdYnv2eztnGHbDmlW4voGRVHSarcoq4yilNi6pN21WFtvsLs7IEnz89OERUZv2edSt02j7n5h9z1hPo2Jw4Kd+0dIuY5u2kRJzHg0oZIFT7/9NrphUFUpjqdh2xZpVlBkMXmWIsuM/tkJjUaXUqbUghphGFKv1VlabqMLQVVVpGkKlBQF2JaBrLlICaPBiCIP6fZ6TKYTDMPAti2ajS6GYWLJCtcL0HUDz3eJ4wRdF7Q7ATefWCZoarz84iFlKhkPIuK4oN8f4bo5rWaTNI8Zjvq0u1329u+zurJGu9Hm7uuvEM6meLZLKSVVmWObGlVZUlYlo+EpaRrz4H6FYRpIKag3WzSbPYKaS6sV0Ggtc3w0ZDSckuY6SVTh+i61wKcq4eG9PRqtBs12i1rNRk9Ntu/3Wb/c4j1fe5vf/NWM3YNDlldXeXH/CM9xaTZ6LGZDZFUCnBcAA3RNpygqBJKqqjAMgW17JGlKrd6gs9SiKAqEFCe0h+mFThpFeYOoAEBRFEW5EGUpn6SMrwL7ZZY9WhXlk1mU+WWRt0zbXh2MF35WZHgNhyTMMSwD04ZolpJnkvksZzoeM+qPkFSEUcjmVhfbBU1YDPoT/JrJ6lqH0ThE6ALXCVhEKbee2OT+g0N8z2I2S8mjhCvXVoiSggf3DxkNE2azOWGU49ccHNek3rCwbAOQDAcxCEG9ZrOYz2l3V9F1iabDaBwyHkekecH+/gmSCsOySdISKkn/+ITLVzbxPIeyKml1XTQ0jvanxFFOOF/guz4Cge94tBpNhNApypw8z2k0AjRR4TgWYZii6QLb8hj2+3iuS56XgEYUz1hMB8wXUxrNDoZhkMQh7U6XNM0Iaj62dV4FudOpM5+HtDo1bE/j6HBEsV3y0ud2kKXEdlxGkzkInf7ghLzMebj7ECErzo4PCVyPZrPD3t4OJ0fn124QgqLIcGyLqihwHIeyzDnr90mSmHZrCUPXydKE+XiBIRw0CVUFo9E+jufQbNSYDiNMQ8PRTZIoQwqNoNFkNJwxHc/oLfco8pKqqLj38h4bV7o8/sxjvPriNg3Hp9Pt0T85QNfOd/+LPAcBRVGeF/4SFUV1XiTMMM6LuwlNx3EbGLaH5VqcncwfToeL/+ZXfuVXVACgvCWoAEBRFEX5PXXnuee8hVH0sjhp2qZxu0zSy0kSN+t+87GoSjxN9+pB3bNrnRaLWUKvW2NQzkkXGa5jkCwMslSSxBmB53OcnGBbNrXAZW3TJwotHtwdnu8O1y2OjibYnolh6sxmIQiN8SQlnKdMximzWYhl61xZX2KxM6DZbBLFC2p1n7woqTUcpISigqWWz+dfOCKJCw73J0x9m+WVJqZxnjWmyCVHhwscz+T4OEKYOtduXvvCIjtgPhwgRMXW5XUMIXFrBo5TkScamhQUSUK7VUMTAs9z2Vi/xDxaUK81KMIE3/fpdFu4jsliFgMVaVYw7M8xTYOyEviey2w2Yzw+JUtjlmoN8iwjQaBpOovFFMf1sGwLz3dxfQPfNzGtgFrToShzDnZmTEcL0kQSRQucNOPevZcpco2NjQ1Oz06RSLqdFYQQ1Bt1iipH0zRs18V3fKSEpm2wmIzPH1rbNrPZhDha0O2t0Gh0iKKQokwp4xIxN4mygiArWV1bwzAEbsNGMwSLcchwMEUYGo1mG7fm0e02GPRPGU+nnB4eYpg2N27cYOe1A5aWu7RbLU5PBvj1JnE0ZzQ4JklCDHF+9aesSrRKIKUGVOdpXoWGrmtkeUZvaYWV9TWuXr/EeDC+/zd+9mOfutCJoyhvIBUAKIqiKF92zz77rPXRj340+9Hv/u7aQqZ/rAiLJ/NSLsosR9fMr07DjFkZ9RzH80pKbM/FdjXms5Ru1wVykrhCIkjTHM+zmM9ndHsNrj56lSRKaXUCqqpkNs3I0hLb1hBCJ69KttaX2L434OSwT1WVnBzsUZUlWVrhBz5Sluzs9Bn058hKww8cag2HVscHJLu7Y8IwxnEcHjw4JU0rpJRImdNbWmI6jpFSMJtFzGcRKyttdrcPWV9doxAlaGDZIHRBb6VHs32emlLmGnmsM58tKPKcJInwA5ug7nPJ3GBn+xCq83vqVVES1F0810JWEllVaEJSlHB0csKVKxscn/SxLJPxcARSo91ZxnYCZJWfZ8IxLCxLR4iSLI3wPRsqncU8Rgid/umIdrtJHKYkSUEUpkhh0B+MKYqCdmeFg4N9HMflxtVbGJZO/+yM+SLk6tUb7O9tU/MDkBpCagxOj9E0jU6ry9mgTxwtaLc7+H6NWTQlzwvq9WV6vRUs16feamJbFkII/JqNbumsrHWZTuZMhhlHR6fs7x+i6xmmKbh8+TKd1S5PPL7FZz/7eQ4Ojul0W9x7fYd2ZxkQTEdTWs0utmlzdPiQNJkiRQWcBwKU1fm7AAmSEil18jynkJJazaMZ2EQz69Vnn71dfvSjr1zcJFKUN5AKABRFUZQvqw9/+MO2lk7f9QPf/W2juEzeZWTy64qsWC6KYiBNY7XWqXXTrG6NJmPn6Wduc3zSx3Ac3MDl/oNj2t0r2J5LluZkSUWRl1iWSYVBULeoNzzm8wWu53D/fp/ZqCRcxEwnE3rLy9x4fIkozFjMM9qtFq+98hLD4Rnd7hKmvaDZapCkGUKDeqPBbL6g1a3h1yzSLKeqJJZjkeUVn/rEfXRhYugFURxj23XGo5gsL0iTioODAdceWUJokmvXL5HEktP+BNu1yPMKIQTLSyvU6ha2ZXF0MEI3Stodn6mWoBs6YRRjOiZlJVmEU4LAo8hTHMdma2udLM/IkhJdF/i+w97+AX7gEUcplYQ4zdFNi5XVTVzXJssLDN1mNpshixRNNymijLIoSJMYoWnUGw1M08GwQAiNosgZj4dYtkWapQxGfTY3rzGfz0jzmI2NSxi2y8Od10izhMsrVxiNzjg53adZbxMuQjQBG+tXqDUa7O49RGqCS1du8MgjjyGlhl9zsByHOCqYjqbIqmQxndKPQ9IkJai3cDyfE9ug3vTYuNxmZb3GbJIyHk6YT2d8+uOfxvZcHnv8NldvPMposMC1HRzHIYxSiiInSeZE85SylHhei4qSPIuAii9UAAMpkRrwhcw/fr3F8voa3dUWSZSEJ/uDv//Rj360vMBppChvKBUAKIqiKF82f/F7v/dWmYZX4jhbI0se1aRYraT+qG15vTQOL+dZVpNdEVTCFEWlCdODdq9OmuSYhkG7WWM+LUAzsEydLC2QUiIErK7X2dxoMJnOWFlpMBqnRFHJ4eExo+GAbq/H2maLyTjmwd0+0/GcMk+Yzyc0mh28oEVeZEynCwzTpCwqLM+g0fQJ6hZZUjA4C3Fci2gR0W21efjaNsvLa8hMYmgGRwcneK7D+uYqo9EYqpKrV5fZfniKrkmCms5ZX6LJkvEgRpYlZZbz8O4eSys9DEOQpDlFJogWC6J5iDAEVVmQ5xlpGnH58gZVlaNpJnGckmUZhmlSq/mMhgNOjg+58chNBv0ZUoMsy7Adm7LMyLKcpZUeo+GI2WxMVZUEfoDrugRBjcV8TpqntNttHNfCcXTytCBLczQBQkqm4yk1P8AwdZIkod3ukqQJ4WTEYj7B9332dx8yXyxotFokSUpepliGQX94zHQxZevyVVwvwPcbmKaN6ejU69YXUrmanLo6Z/0xEoON9SuYJswXIUITFEXBYhKSZwnLK12WVgN6Kw3itODyjS32d3Y47Q+43mzh+ybT4ZgkjVnMFxi6SVXmjManmJZFVVQIYaJpJuK83Be6Bhry/JTGdLAtn/bSKlevbeDXTSzH2h/n+f2LnkuK8kZSAYCiKIryZfHDzz3XLcv48TiOH0vCRVMvi6c9x+9JXfiW5zZdNH02mdtRlOumZaFpNvNZxfrlHvfuHjMYjektNzkbTJGVYD4MWVnpoOsheRXz2O2bNGoWWZozGkTcu3vG6fGILMlJ04LOUpOSkvEoYbEIQRQMh2c4jocb+OiWSTSfE0cxZRVSq/vYXpullYBW2+aVF445Ox6wsrzE6cEpg6MjqqpA0wTj0ZTLl1c52JvTaQf4noEQDp1OF8sqsW1JlkhODvvE8zmBt8RksaDb6zAZnjEcnr8pACiLkvkkOb+SlKV4lo9h6NTrTYaDPq6v47g1qkqyv3fI5uYSfs1jPFgwmy2wrfMiXpPxDCHANB2EqFiEM9ZWN8jTgjzLkZWkFjTQNIGmaWR5ihRgmucpQRutGrIqiOLz7zdbTYqywDAtlpaXefDgdUzTQVY5+/sPzk9GdIssTsjzlHqthq4bJDJGIpFC4DoNGvUGg9M+jbYkyyocz8OIDY52pxgGtLptNrbWWL+8wsnRmCwpKfKKoF47f7RblRimhqHrjAYhjbLAsEzqdY96w8f2XPrHA6LFHMtyOTo+IxyPaDZqxFWM0Aw0XSeOpgjAMM5Tn2rid6oA5wgNdGGgoaMZzvkD8OmM5e4Sg9PRy7/wC7+g0n8qbykqAFAURVHecD/8fd/9tCyT9+ZJUivmi/cWyaJWyapr15stu9Zw5lFsW4FvNG1DhFHEpa0N8rJCtwT1doPNLcnBwSlNUVGru6QRjIuQLM1Zu9RkEcb0+2Oi0CRLS15/5YiXXnxAnufU6x38wMf1Avb3hxi64PRkhyzLmc9mGLpBksdcbTbITYPhoE+z02J5pU0YpVhmHVMzKIuKWi1gNDhjNh6QJgV+4HD3tXtcvrJJt+dyegxBYNNoaPRWltBEyWy0QFY683nMoD/D1HVmswVZkuMtN1gYgv7ZCZato2kGtVqNcBFTFucLW9dzaLYazGcLllaXaPXqDE7mZFnMeHTGxkaPPMuYzeYkac616zeo1wPiKCTPCsoi4/B4n0atDgj6/VPSJMF1XTw3IMtjgiAgSRJ0XaMsIU0Sjg8OQUBVCTY2lwkXKcPhhNW1NeaLCE0zWF5e4qUXP01ZFjQaHcIwJM8yyqpCUFHmKVIKtrZuYBsWk/GMwbCP4zp4no/jBudF16KIqoI8F1TV+VKk0fRYXWsxm8acnYzRNY0yl2SFpMhi4ignzyWD05zxcEAYD+murFOvL+FYJod7hzSaTa5ev8Lu3ZLFbIAb1EEKLNMhTyOqKqcscjRdRwodTTcoyoKqqvBcG00zqGSFEODYBnXXjPf2Br8GqOs/yluKCgAURVGUN8wPfuADzcrksSovHidNni6TpF0k+bpW6b5t2nXTcvx2p2G6gRTjWSzWNpd5+cUd8qJkabVF0HAwrYqV9Tp5cb6oNKTGPMzRhEYYpvTWbBzXJ0/hte0+w/6ch/cPiMIZ8/kMgU6t6aEbxvk1mf4pRVKyCBfkRYZEI5xMODk+JksLtq5e4rEnrmM7GkfHJ5ye6CSxS56n6EIwm00RQsf1LKQEqNjc7GAa5wvldseh3bGodINoUTEfpxzvjpnNYnw/IM1y4jjGcwyGZwOODg4YDM8oi5KVlVVSyyaOY+q1Go57nmp0sYiIwozeyhLj4YLpZMaLL3wG36uzv9MnSaPzdwd5iuetkMQpVVni2Danp2doCEzLw3E8DMOgNAwc2ycvUnzfZzQaYds2WZah6zrDwRDbtinKknqjeZ6bv2Xi+j0mo5i8gGvXH2M07GNaDiaCMI6QCFy3gabpeH7AbDah1WxyenpCmeW4jk3g11hb3WI+mzIZTWg2Ovg1D8u0WF7pYTs2/ZMx917fRWiCp565Qa3u8InffpGzw2PyQqDJijyNKWVBViSYuo0sK3Yf7rKyusGVa4/QanU5ODxifdMmKSKyIkFGgirPSKIQTRfnxb4AWUlKWVAW8rwQmNDQNAPdsAmjOVsr11ldaVHmxUlRVK/wuy+GFeWtQQUAiqIoyhtBfPjD39Ut5sU3kFfXqyIzyYumif5o5dhtKqfZ7ratlcs9kZSSm2/b4MH9U+bzBb3VDv3BjJXVOitrTVzPIApLNi63GJwOQRMYhmTrRhPDFASBx+nxGMuWPLx/zMH+KWE0J80yrly7SpHrLK/2cDzBbCI5OxkQJSHd7jJ8YaG3t/s6Xs3i0cceZWWjS9A0KfKKVrtNVWhEUcbx8Rl5nBItznfW67U6flAjqBnIKqcqDdodh3rDJE0KGh2H+Tji9HjG6fGAoO4js5x63QcB4WxGFEZomkCg4dgeoHF2ekqz1cYwTSQl4/EMKcGyTF75/KtUVUVVFaRpyvJynUpWwHll3K2r65QFzCYLhCYYDAdomsCxHbLf2ZkXGrbtMZtOidMF8/kYx/EwTYsgCLBtG8MwKauKRqPGynqbzpKNJnTCRYxp+vg1mzQ1OTsdkOclne4yRVlQrzWYTkbMFhPmZzNM2yCOBZ32Eq7j4fs1TEswmgyxLIONtWWQBrohCII6B/v7zCYTylLSqNXJ85yPzxKuPLLFlWvXuLq1zunpGWcnE8q8wWw+QhgmUThHFgWUOYd791nM51y9cZN2s0schdx+4ja/8o/+HnmWUPcaSFGSFzlSSjRNoBs6VVWSFyWmAVIINN0gK1KEpnHp8gZLSx7Hu/3DSoidC55bivKGUwGAoiiK8iX78Ie/q0uYf7fM8nVTVGtlXjbzNPNdy2/7Na/V6rWtoqpwaw7dlkO/P2Z5pckLz3+OdnuZRsPn7HTEykaby1trCCaMRxMCz2YcJQR1nWfeucVsHjMdRchScHY8Y2/3CCl1HLuO73UxDBfbLXjksQ0G/TkH20cc7B2SJAuC2jUcx+fkdB/b8rh8ZYtHH71KkqWEi5wkTMnTAq0UnB33WequMBqcEocxlmXT7rboLnVZXqljWgLbMtFMaHcCxsOYJMzwAx3NEriBQ7tbp+yP6bR9hqMp4XzCbDxCMwzSLGMRhnhBQFkV+L5NkRdMp1PiOGVlZZnd3YccHuzSbHbI8hTbdjAtk8Ggj+s6rG0uEwQ+Z6cD4jhmMh3TbrdJ4pSTkwnNpsFiPjvf2S9y5uGUqsqo1Rq4vk+z3vhC5VsDTdOwbAvTMmg0aoTzhPksOb+iVLcJai6f+8wOWRZy6+aTSKDW8BmOhownQwpZEjQadNtLWJaLEDDonxDHCzY2t3j8qSdptuuURYZrG5ydzJmMpsRxguMEVEXC2ekhURhTb7XI8ozltRWGoxGalFy9tokbuBwfDxkOQ5JwzGh4cp5FKJwzHh3x+RembG49RoVGHrd517vex2/+xv+b+WKEaRgIDYRmIoSOlBVCSAxdwzA0TEOnLAsWsylXrz/B2voyuqZlVSU+s+Qzuuj5pShvNBUAKIqiKF+S5557zjTT/NE0zw1RpptVJa51251uUVaGEIaHifno2zaZTSMOD8Zc9j10Q+fkaIZlOoyGQxB1nnnHbZI04fhoxtJSwHgwwTQMXM9iMg7RdY2qhOEgYjrJef3VPcJFwtalawhdI8sylla6uAHIUrL38Jj9/R1M22Gzu4Rtu5yc7tPvH3Ll0k2We8vM5nOKsiKoBUz7C4okZzw94+h4n8uXbyCR1BtNkjhB0zUcp2Rp2cF2NEbDBFM3kFT0+zMMQ2Nts0WyCKEoiRYJQkIUzknjhNF4yHw6oqxAConnecwXIatrbVbWejy8f8jJySlXrlwhiRMmozGe52NaFpbl4DguRVYQRxFbVzfwPYfJeIGumeRZRq/Twa8FHB0cUgvO79q7nsWg32d3b5tup4eQklJCHEeMBn2klCz1llhaWaVWD9AEHB2cUeQV3eUG3SWP05MZD++fYWg6X/nep6nVPZK44OGDPU6OTllbu0qtUaNeryOlZNA/ZjA8Q1QQOA2SKOT+63e5/eRt4iTCX1+i2Q2wTIuV5SXCKMXQBEKXRIuEk+MheVYRzmZc2Vrn5Rdf5+TghM5ykyeeeRvW3hll2mK5t8zB4SGOW2M6OSFNInYevMyVK4/y+qsv0uksc/PRp7l/70WkLEEKhAQB5+8LihLHNrFMA9dxyYqS7so6zXabpaWA0WB2NhzGv/Z//bt/Z37Rc0xR3mgqAFAURVG+JA2Kt5V59meSxcJM5qOe79S6ZdlqePWaHceFvrLRo7fSprfewrB87r1+zPXHeozGE+I4p5Qlo9GcOE14+iuus/PwgIO9kKqUxFFCULOQ1fmDUllqPLh7jK45FKVOveGzubXEgwd79Hot1jZqlBIe3D9hNJiQpykbW1vYbsBkOiWOEhpBi62rq7iuwWga43gmw+EcXTM5PXlIHMUYusfR4QEbm+tYlsvJ0SlZlnH5apfNrRbzWUxxkrK03PjCY9aK+axAMmc6jpjPQhZhSpKG1BotoihjPp+SlTGz2YJGrUGchNi2z+raKp7vUJTguAFRFJOmKZphYOg2juNj2x6mZZ5fPZpKPC9gNg0piwrb9vC8GvWay87ePtdvXKHRaLIIE6qqRNN0rl65gaYLxqMxeZFRJgWT8YjLly/T6nQwLYOqKillRatTwwtsDMPg9HRGmpTcfHyTSmYgBEmYgSbRDIvHbj2JZhq0mjVOTw64f/8eo/4JtaCO7wVE8YKiSCmk5PjgCN8L+NwnPs+Nm9dYhDFaWeG3PExLI0oyastN/FaTNEoZ9PsswpDbTz7CyX6fk6NTPvEvP0mn1yMMY3zP5/LWVfZ392g2l5mMTsnyiMODB3Ta6+zv3KfdadNurzAaHoEU58XXNA2Jg2boxEmIZbYRQlBUJb3mMjceuUTNNzgb5mempS8uen4pypeDCgAURVGUL0lOWlFmG4Ft3ZycJi1bePUoCnW7ZojltSVOT8d0T30uX1/m2mPLeIHPvXtHFFKgaYI0SvmKdz+G6+loomR1rcP23QOSJMX3XOoNH9MSHB8Nse3aF64GtanKiitbl2k0z3Pir6y1sSyN2axA11zKqiDNYpAgq5RoPmFz/RKe57K82kPTNXzXQdNBdwQPd3cYj0bU683zSrXzIbWag2Fq2A7Ytk696WK7OlXlEdRjgrrJbBJTq7kMzwYsZinzRcTu3g7NVofReMiW5TKfjonDOXmZk5c5pmUShhGW5TKfpkwnJ/i1GnGaATp5HuO6HlJW+H5AFEUkacjSUpsgCMizivFoxqWtFdKkAipm8xDTNGm3W4SLhCSKyfKcWq1GHEUM+gNM06LTCpiMxix1l6kFAc1WAwlMxjMqKdFNg0YrYDxcUJQFSysB3WWPSjrsPOxjGjZ5EbJ1ZQnDNigrjY/95sd49ZWX0HWDsiyI4wXz+QQ0wcrSKq7jMT4bs7ATNKGRxBVxmlP3PYbDKfPZjGZriaIIMXUdyzTQNY/7rx2iaZIiy8mThGSakcQp4+kQpKTRbNBsdclymzgNSacRSRIymw/QdcnZ6QGNWgvPrREnESDRDR0pS2Ql0XWTsiwAn2Y9IEtDHFfimFquGfIzpi/uXuTcUpQvFxUAKIqiKF+UOx/6UBAW8yeyefgV0Wy26rv+xpVL1yyhG6K51CApcqI0Yuv6EnfvHtNb7mB7OpqZsHm1i7A05pMZs+kEISTLK22m0zmGpp1n/Jln1Gsetm3g+Q0GZyHzJEETJnmWs3Vlhc2tDkKUNJs1oijG81ocHexj2QGartPu9rBMm9PDPdIkRWsvUW81kNIgXCR0l2o8fHBEq1Fn+/59bCugqs7vh9frPmtrDRzHIYkSGm2PTtfHskyyNGFp1afMK+aTFM9zSJKEIhfICtI85ax/hKYZzKZTBsM+QtMwMDB1kzRLaTeadLordFeaLOYVYRjT63XJk4IoXlBWJWVZYlomRqZxdHTCxtoyzUaT86Q0OuE8pd700ESHo6NjLEdiGDrT6RRdN3Edh+FgiOM6rKysICswDIOqrPBcl1anzXw6YzKd4dgOUsJ4MMNxLISQNBou03HK2ekBna5PqxnguhqtlouuaQwHIZ/62PM8vHuPutvBcly0pqCqCqSs8HwPDUjTlM2tLRzXI6jXaPXqBHWbxTRjubnC5aurVGXGZLjg7GRCUUp0w6aztMJiHtNqWeiiIo5DpNCpNZo8vH+X06N9BoNjTMvDDwIsQ2c+m5JmEYZmI4gJoymuXcPQTYqioMgLHNsB7Tz7j+f6lGVBEWdc39xgZaXNdDTdn0/jv/uf/+R/e3ixs0xRvjxUAKAoiqJ8UdJidruM029PF+Hbiri6JhzL1E1N9DZ7dFbbrG2uonsdjg532by0zEsvbXPz8TUcR6c/mNLr+mQbPdrtLnFcUJQZdd9nOoyYjEOCwMeyNRzX5uy0j5QVs2mCYdjU6j7dJR9JQZ5LanWHySTmYH/A/t4xSytLmKbN0lITAMO08TQL13Wx7fM/fZZtkiY5tuGyv3NAnhUUxYJOrwWyxPcNdK1Clgmea1Cra3i+hmmaBHWJYcDgLCFNcgBqQZ3hcMR0NkXTDKazEZubV8mLijiOsS2HJA0RGji2T7O9xCO3LrOy0eJTH7tPkZU06w2Gs/OqvbPZBN+vE0Uxw+H5jresBEIYGIaJroPrm7iezdnJCUWe02z3KKuKldVlykIyHI6oigLXdbBtl/l8RlmVrG2sU1Yl8/mCKIyI45jZeILjOAS1dY4Pz7BsDaoeaZKBWVHg014J8GsGw9OY11864fPPPyRJEt7+zLuZjKeMJ2OarRZZmpJmMbIC03ap1zogbEzbo0SSpjHNpkO77VJUBZZl0mp3kFWBYTgkScZ4HDKfZ5SZwXA4Zzyc4poBln1eydgPHD7/4mdJkxhZhsTzKbomEEIgJUgpMQwTWVUkaYxpmhiGga7rCMCyDIq8wPd8TodnrKwts761hueaDPdHdyez7JULmlqK8mWnAgBFURTl39qdO3eMxc6rX2XJ6lZjefmRMMpqeVlqzW6HvJAMRlPcZsD60hKPPXGLaD5mOlnw4N4RtuWgCxPX0ml1bLanY56+dQND05iPp1SlRhzlmJaO69eopCRNSkCcF+equ+iGxqA/w7JNHEfH822ms4xBf4hjB+TpeTEn33UpypJubw1d16g1AsIwxK9ZeKbPZByT5xWD/vliW2oS09SxTQvXKVnMMgyr4tLlDkHNBqkBJXEc02zWGZzGFGVFWaRMJzMW4YI4jpFSkBcFhm5TlBVVmVEKmzRL0TUdxwtY2Vih2fNJkgxN6MiyYD6fkecpx8cHBIGPbTmkWUqap9QbDZIsQwjYfrBNmifoZo7lbBLHMb1ej0WUIKUkTVPOzkZURcnly5s4gcMijCiKHN/3ybOceThH0zSCeg3DsggXC/xaneFwiGt7GLpHksT4vsPlKz0abYfJKOJou+DkZMjR4RlbV1doNhos5jH90wFL3SWEfv6oNopMwmiGX/Not5ZodFs0ewGaJmk2XVot6zwlqoA4KTAtjbLUqaqCZstmdbWOYRoYhk5RwtnpgoO9Pqcnc8ajhEdvP87Syhq72zvsPLxLVExJihSBRNdACIGh2whDQ8rz05TzjEfg2jaarqNrFpXUcL0Wa5uXuHxlmSIMZ6Ph7O+/N4rnP3HRE01RvkxUAKAoiqL8G7tz51lrNkMf7776tXm02JoP+tSCmnZp65pueDVyWWAYJrbrkKUF89EJue9T5gLHhq0rK3ziY9uUpUazbbGy0kDXdMJFxI1HrnBycsJ8FuL6Jq2Wh64LRv0pmjCpyhjLNbEdAylhPs9Jk5I4qnBch9PjPq++9nmWeuuMx338WgPHdQjDCMOw0AwIah6Gdf7AVcoCx7HZubePY7k0L3UYjU5BCsqqpN5sEGcl47Mxb+vUEcIgiXPqjYAszUmTnCRJ0HSBkALH1mk3WxzsH1MWJYZukeYFaRySZSmVFLhencViTruzxOpGl6rk/CF0njEaDliEUwzdQFYVG+tbLBYR8+kIQzfwPI+yLKlkQZrmuIGP43i4jke73Wbn4TatThPPt3n+s8/je01sxyRoeBRlBaWg1Wrgug5FUbKxsUJenOfCHw2neJ5PnqcY2Ni2hWFIKorzE4bTOdvbxwipYZgWzZrPrW94hqDhcu/VPicnpzzy2BbTccjO3j55nmEaOmmaMJQDTMOl1nbx/RZ5JjnYG/PqixOgpNlq4gU+g7MJeVagCYmuV+eLdENjbbNNu1fD9z0evbXO0+90mE1zXn35iDV/Bce3aXbb7G7vMhsOWSz6IAukBCFAaALH9EmzBCFAVqBpAsu2yOKUNM/YvHKVVq+OKwqi8eKzlSl/+Vs++lFV/Vd5y1IBgKIoivJv5Meee251fhy/r8qzdhnFX20U+tMNv9vJ89zd2TuivbJKvd2g3qyjuaBpFXEYoQmB73uMFiFlCVvXOrz0wgnhYUynU+PSVovtB6ccHZ5iWyZxVBDUTDQD0jQnWhQUecV0mhDUHBzHIlzERFFEt1unVq+xv9tnNptyafMS3W6PJM0oKg2haeR5wdJaG8PQqcoK363hBRq1usve9pAonFNV55l1TNMlCAKKMmExT7Edj9XVZRAGUXK+qC2KHNM0mc9DNB1sQ6eowDB0JpMU03SwrAIPQVGWDAbHyAo89/xdwrVr17j5xHU2tlo8vN9nNoqZTGdMpkPKIicqK7rdHoP+GbP5lF63i2e6SClI0pgoSvD9gNlsTK3mM5uFnPZPaXXrdLtd+mcj6vUGq6uraJpOt9dgNosYj6YsLbepqpyW51NWJWlWkqcpVVWQZQlVBZ7nk+YxzU4b17EZDicYpuD6I+u0Oh6UGv2zBf3RlMkiZTROaS/1ONg9YDSesry6jq6bRFFIu1Oj2+uAKEjTkFc+9zpVJXDsAE3oxEnBeNTHccbUA5/5OERoOtEiZjQ8T78vRYnn21y5egXHtbBdC8ezCDwbw65YWtlgdb1Nq13j7mv3OT4QJIsxyAIhJFQVyArT0ACJJs7faBiaidA0pCZY21zj1u1lNrpmtj2f/vP/49/4udH/6W/+/MVOOEX5MlIBgKIoivK/6T/+we+/GSXJU8UieiKO5q0izjdcp3mpt3nJ0y2doO5Ra7kIo2Lr2hKGrTEez0BKdCTzaUgWQZql1GoWz7zzEg/unaIbFbWmzepGk7Is0XWLsqwAiZQFaQxRlJBEBUVekucFQlSMhgtsy6Ld9UnikiRJuHz5EqZpkWY5tUaT07NT0ixBIqnVPDzP4fRkRP90wobbYTpOmI1CsjRmbX2T2WxOo97EsmxqjsdsNufy1TqP3FwmSVIm4/n5u4E4p9HwODoYES0ykkVBHFVEUYrQBKZlIQyTZtAgimOa7WWKLOPS1jXidMI3/aGvRhoak3GIqCqyKGN4dkYUhcRxiOt4RFFGGC3QNO28TkHgMRicsLq6wmg4wnNdBv1T6kGL6WRKVUk2N9aRUhBHGb7v4wc1hMhJ05SiKGk0AxaLOY1mjclkRlBzqTcckjjGMg3qjR5lUTGbLYjjnN2dE9qtJq1WnZWVNpquMR6HmKaBV4PuWpu8lESLGM/xuXr1KRzPoEKyWJTM5iGGJth/cMKD+w+Io4Q8LzEtHcuyqDUDOt0uXtBg//CE+XTO+qUlLFvHtpfQxAbhLKN/OuZgf5dP/tYncJ0AN/BJsow0iynKBMMy6LR72K7B299xi+1Wk72H2yxmA8oip6okZZkjKfFdBykr8jyjMEocv440LEwT2nWT+WQxWUTZx4QQ8qLnnKJ8OakAQFEURfn/60f+3He/J44WT2dx2JuNJk/KgkfjKO7mlWnZ8YwnH7tOq+MjK4kwNUqZUyVQFhVlKdkbDMmz843YsioZjmIqqeEHFp5nYxiCjc0lBv0xWZZhmBXjQUqz5TMczJmOU5DguQ5VUeG4FgiBaZmUhSCOMyzLIk0TVlYavH53gO10WFtbwrIdjo5P8Hzz/O55IyAKY/KsYPE/s/fnMZZleWLf9z3n7svb34s9MnKpzKy9urqrm7P2zEgzFFdLIKkBvOgfyfYYhCnLIAhD5B+N9kJAIAzZWghTsiWAC2SNCJqUaEGj4cxwerbunp5ea8vKrFxij3j7e3e/95zjP6JmTIj6kz0Fst4HCGQikRkRuBk/4PzO/S2rjKY2BL4PaILApdUOiFoeUeQx2goZDANcT+J5PlHosFot6XY7rBc5q2mBrsCyHJJVgjYWaZKCscjLHDeKQSs8J0bKCtsN+MKbt8myhoYGC4vFeMXF6TlpmpNlKUWVUZYlIAjCCM8JCf2YJFlRliWTyZSyKpgtJpR1ievaHB9f0O12sFyJauDq8pq9/R2yPKHfb/Px0zNacYfT05ObBWE4eL7D1laPsmyIWx7tTkxTK6aTBaqpCEMf27ZodyPiVsBynlMUFZZtaHXaKK04P58RhB472x0sx8J2LKpGk2eKq/MJ58cTTk9OybKbZM33A4QoaZqatFpTlDWz6ZIgiNjZOeT0xTNePH1Mrze4SaQ8hzAKaLVCvvRH3uH0eMzZ2QVX1+cUVYkwGikEZVly+vQFtSr59u/+Hvt7tyjyFKVuEkYEWI7EEjc/f5YQ1FWNcmu0gdFwm93tNk2WMT4Zf6s0xXc/5ZDb2Pih2yQAGxsbGxv/lF/8xa+4z55l3uR4+VDl2Y9Xebadp+s7ZV6/Hvjtg9H2ltXpdHFjn9VK0dQrut2QYl2QrCRhHFBVBsu2WS0LvvN7H1PkJe1eRG8Q0+t36Q08yrJGShvPd+l0Q+bTNbpxKIuMi9MVda04fbEiDF16fQhCl7KoiCKPoqhYLj5ZupXk9Hptwtij1fJYzhNq1XDnpX16/RZ7h220Mvzg25fkSYFl2cymKf1ByPPnJY5j8fY7LzObLcFohGjYPRjgBYaqLPADi7jts141Nw3IVxlZUlOVNQaLNMuQts06TSiLDKMUqqqJQoeybHjr859HyIbd/SGXZzfTdibTKR++/xGz+YRud4tKNyTpCs+18Hwfy3Z4+/PvUFcFX//mb3H//kNOT06oqoKz84Qf+/KPE/kxxy9OERIGgw7f+8775MWaXv8hRZmT5xWW5eL5NkEQkeUl3X7NaNRDCCir6mY1rtAY3RB4Hp1OjzRP8AOLVitkuUwIIx/dWGTrhsV8wnAnJohdBJpa1Tx/MWO1zgkCm/lsyXpVkK8LwqjN/uEhRiuW8wVVVRIGEXGrQxy3ELIGy+DFgnuv3yZLdtCNxrVC8rymrAvee+9j8vwHuJbDajXDCLCkfbNgTCkc26MVx5SloC4zjp99hGu7BKGHcQRVWaKEJGq1qIqSWjVgNBqF63oMhj22Ri1ElRQa+X3ie5NPO/42Nn7YNgnAxsbGxgZwM9nn+NGjL6TL2cP/93/6tVuNrjtKNUOKZtT243aZ53fCVnev1R7Jwc4AI0A4Nu++d8pkvGRrq83BrR0aXRNGOY4r8QKX6+sV63XB2ekZ+Qcpti34iZ98h3svvY42GVpp1usVrVYIwNnJnMk4JV03RLHFbDqmaXqEbQeV5jiOTbfXIssq/MDDdX1UA2la8f77xwwGPYQs6YU304GMMVxdLNnZ6zMeX7G/u4NtSVzP4sGrO1xejun0Wjh+TX8UIjFMr9ckSQ7SAqDV8fEDl7ppKPISIaDdDjhb5gShT1EVeEHMOluS5hXtuEu2XGHikL1bu+zd6rK900KVhpPja9aLBYv5gqvxBb1uD600trTpdAZgNGHQoTdssXc44Nd//Xd4/e3P04rbnJ2dEYct2v02w+EW77/7iK3tLQajDudnY1aLnLc+/wa9QYQluxR5CbpNkmS0Wm2ydEWr1UJrzWK2JssbWu0Yx7K5SK4I4puxolIqoiigLmpUo1C1IakL/MBme9RmsNVCaUWnEzGfJdy5O2I5z2kaiIKQsTOnaSmMESyWa5LlAoGg1x9gWTfPNMtXtLsthtsDRjsdelsRWV6QJxUXL6boecnxk1N6/SHMpsxnExrV0NQFCIGUEiEEdZNTNxYSG8uWCDTGKIriJrELXBuMoS5KLEvSKIPrhwRBj35/i1de3acdWUye5afSdn/5q1/9qv7UgnBj4w/JJgHY2NjY2ABgdnn8AFP9bF4Urxd5eStN15HAiqusGMz0KhgNttw006I7sFisCnAkRZ5QFpBXmvceX/LsdEGnG9Prt8AobBdMY6GahmQ9Y7aY0NQ1f//vXyGsmn/lT71DVZVYls3F2TVBELB/2Of42RzLcdEafD9ECoE0DhiB51lYEqqyoapqQCBti5bv0W57ICBu2QxGbaRQdNoOaV6wnBd4gUulC8KgyzDosloX3Dra4/B2GynBKIUf2Fwq8FyLdjumrhRCWCitCAOfLM2wbYluFFlekOWaRmlk0zAYjZg/eU6yWjHoDxnt7PCzf+KPsLMXcfzxmO/97guePn5MliYYA1HUotsekuUFWzs7GFPz9OlTdrp9Xn39Hk+evCCKO9w+usuL5y/Y2t7FsyVKa54/PkGViv5BDyEMV5czRltD2nGL4bCN49o8/zinUTVhGDKdrgCLNCmoSoXSNbZjc305ocprmsZwdGfI2dkVEgn6pmcjDEOM1kStgChysSxxM6nJD5jP1ywWOZPLjKuLOU7g0Bt0uHW0DRgur2a44RC0Il0n+H5E3I0Yjrq4vsDzPK4u5/zKf/fbdIcBr7z+KnVV0ZDS3/K5Lw/47d/6Jov5AguJ49wcW4zRaG1ukgBpIQxoY0AYLGmjtUZrhTAC27KwpERrQ1M3WJaNbbtYbogbBBzstxG61mVdfdfvdL/3acbgxsYflk0CsLGxsbEBQNsSqR2HS1t3zqus2bOH2x1hZFvaVqxp7N3dA7QQ1EoRtiOWq5z33nsEQhC12rx094hGVaRpilES27GQApbrFRcXp8yXVyzSJUhIFin/+d/8u2Arfu6PfoFu10FgoRqNFwj2j7p88IMrBv0uju2RZzlF1uBHkqoCITT9Qcx0skZYYDuC3sBjMGxxfZlyeNTDcSR5WpBmDUmqaPSCuw/2qQqFQaONIs0kRkKn63F+ssbzLIq8xvNsPM/ClgJjQZ7mBH6EMhrXtrDdgn6/Q5IUnJ2mpGnKKi0R0sf3Qmwp+NEv/xGOHvTZ3vUwteLD7x/z0YcfkaYrtAKlavwgwHV9HC9gsDPkyeMPGA53ODw6ZDZNsW2P0SjgyeOPaLfb5IlLEEYURcZ0PKXb7mLZNlme0W61aMUxQeShdM31yYLpeMFg2OP6esFiPiYvVty6/TnG1wsevnqbZx9fcn25JAgcXnpwi+lsQd0ohsM2eVogpaSsa+L4Zltu1YCsHDzpsFyuUQiWi5IiL9ndb3N4e4Tr+5yfzxEIuv02q1VJrx/juRI/tJF2zXw+I0szep0ug1GHn/rZL3F5ec03fusHtNox9+8f8t3vfpumEfzMv/xTTCdTHn3whCIrgQatFUrd3PQbpW7GfYqbNwIGgxDiZmSqBml7KKXRukFrjZQucafL/uF99m8NEKZmvUjXQRSkxm6cTzsONzb+MGwSgI2NjY0NAIR0uq7dWC3PVXleV1Josbu7FbqRY2kpyXLNaKuHG3mUpUBYDm+9/RaL1RLHlkSRpN8f4blbN829LhhjcXpic3zsU1QVlTJoBYiGqmj4m3/z7+IHki9/+U2iKGS9znFcm+HwZm77eLxEiJvb3aopiO0WearxQ5umqen2ImxHMhq1sG2J40naHUWWVxTTmqq8ub0v8xpVW6TJFNe36LTbSCmIIo/ZeM16VbNaF8gUXBu2t+ObQ6OCdJ0jcREaMIaqqvEDl9l8Qbvb5vq6QmmNbVtgJAbD0b19fuSnX8J2DPky5/mjCR998JSqzDDaEIYtrq7OEdLC2bdZpSmN0Xiez+HhHtfX12By3v785/i1X/sdkvWCtz/3JTCCqqxZrdYYFLZ/M8pyNOjT7rh88N4Je4cPmc9yPvrgnKM7uyTrAgGcX5zw5S//KJPxAtd1mY6XJEnOwa0hd+7uYNkGpRp2toasFmtOT85xHJdur4trW7iuh3IMjaWxLMVou0WnG9HpxRhzc7uudU2aNjQ6ZDEvaXKNrktWq4TryzHX40swBimsm0VdtsAPfPrbfT73xZf4n7zyJb71zcd883fe4/U3XyVLcr72a7/JcHvIa2+8wXS85MXxM0RTInSNUiDQN30MUgMWCAlobFuCgEYpDAatNQhB3OrT6e6ghcILHfLUaNeSS+3YukjF5ly08Zmw+UHf2NjY2OArX/kpm7k6lCBtRwxc394SympX6dqxg54Y7g6xPJuoHWKkptGafXvAflJgOYI4cojjgDKvWa9Totgiin0m12tOniuyrKTRAo3BtQOiIEY1JXm65m//7f8vg/4273zhNrYjqKqG/jDi1tEW7333FN/zGQyHDEYhSt+M27QdyWKecfvuCGMUtgPj65vvxfEli2VJnijqMsf3XTzXRggIA4+6brBdgdaSumhAN1SlwnYktmPjSoGQUJSKNC3JU00UwuXlmjByKcsKW/rkqSJLUppKUeYFyyphMDrk3v27/Ml/7S129nocP7smXSjee/cZ5+fHhGELIV3A0Ov32T+8TV5WCCEYX13w1luvcXJ8wWIx5k/8iS/z0aNLPDdi96UdOq2Y9Tr9ZIxlSVkVdKoSrRqaWrJc1rS7PkVRoVTDy68dgjHkmcV8Nufhw1dxnAjfh539LsvFmtGwx9ZOiONJJldrOq2Ii7Mp0+mC3d0hVSE+2eMQkKUZ81nOzt6AILBxbJs8T6jrBK0lZdkQRhF1XVHlFXWlUBpanS5hGLGzu0OePSRJUmbTKdPxmHWy5HpyxZOnH/Pbv/k7HBxt8y//7E+zu3vAk4+e4wUOP/rjP85//9//Eo8evc/u7j7dXpvrqwvKKrvZJIxEaIUUEiHkzQIwDEKAFDdJgKpvdno5js9o+xaHd27R226hm5rlfK0HPausdL1OPT//dCNxY+MPxyYB2NjY2NjATu4f1qY+MqIMbKEi13c6YRzHrUHPmqYlaVHT64Zs3xrhBx6qqahKRSsHL3DxfI8sTWl3YkY7XZqmRJsGy3G5uBhzcvaMpEoJgi6HBy+jGiiLlGR9weX1nP/iv/iH9Lr/Okd3RjSqoNElu3sx3/itFUIKPF/Q7XtUlWK9KPB9D2kl5HmBUobewCMIJNK2qJRCYBO3LMLtiMU8xZKaNGlYLVIsadjaPWJ8Nefx+6cURc7WqEueFOzs9SiKgqIwFHmFrg260eS5JpsWtLoV2ztdsqRAa5uL8xmPPnzO9dU1WsD9lx/yxR97yJ07bYpVwsXzGfNJwsnJGWEYgxTo+ubfbm8fEIQ3t+fnF8f85Oe/RFUari+ueen+S6TZzYjRVhTjuh6L1YKdvSHXl1eEYYfRzhZvf+4B8+ma+XzJzs6Qdsdjd69L09wkL+mqoN32CB4eEkYhk/GK0XaHOHKJwiF1U2LbNicvZljS4ur8Aku4dDo9ZrMptuXiOh5Xl7Ob6U29Lp5vI6TFbL6m0/XwAx80OK5LWRqytGIwiGl3NZPxzcSlbi8iWSekSU5dD5lPhzx55PHe+wvAxrZAWjZnJxP+8//Xf8mto0O+8PaPsE4KlssZ73zhR/nmN36T85PnuP7NBuRSZNRNg+P4CEArjUCjqRFCYf9+b4A2GCSW5bC1t8ed+3fo9HwG/Ra/9Wvf5M4f/ZyyHXK3caJYWG1g+ulG48bGD98mAdjY2NjYoCrrV+06PxK6aVmi3o1brX7/cMtxeyFDP2S1LBlud7DcTybwpAXGJDTKpqkaXNfDj9rUSuPYgiCOqKuGs5NTvveDx0yWc4wR3HvpTcJoh+liiu/HSDfCjZZ8/HzO/+fvf4M/++d+jG7PwXUsOr2IKAooy5JuP0BKgWPbOK6kURXdfkxdg2UZWu0A33eoG8Vy2VDkMwaDNpYl6PYiricJdV2jaoW2DForfN8hTSumkymtVoQQmjDwUFoBkixpmF2nDIctri+XdHstTCMo84qqapjPK07PpsxXa9rdIUWVsXPQod+XLCYrVvOKZx+dk6U1Vd3wxptv8NFHHxH6EYF/c2hVTU270+bw8Ij1MuPyYsyde4c8eO2QZ08u6HW6VHlD3LPY2uljGsPp84wf+fFX6G3FzMYJWVbi2i5hbLO712FylbBc5qxWCa++vodj2xR5A1IxGG6B0UyvE8rmZinX+GpMp91jNl0AsFrNSdOSwWDA3sGAjx6dEPgeWkGyTihVRVbW9PsxUQRZWiKlxCCxbYs4DjDcNNvqGh6//4zx9Zjz8yuSZEWn07lpwrU9+v1tLi9PKMsSITS2uPkcZy/Oubz4+zx8+BpC+kzGK8IwJlln6LpkXRYooz55AyBuvpYqMVqBkLRbbdANZVlggOFon05vxJ0HdwliH8cR/J2/+V/Sb/Vot72mKdKkLNU3ps3y6lMMw42NPzSbBGBjY2NjA6lRolF3fUvsS2FGqk69Ismw4xg3kuwf9vBCgyUUi/mSqizIszXzaUZVaHZveeweHVAWFcY0SEtSlUsevfeCy4sLGqOIoh5b27eZzjOqpsa2bMJ2j6gzpFhN+e1vfJs79/f5yZ94SBz5pFSEYYyUgt4wAkDpiuGoRZIUbO92WSwyHEeSJgUYibAEWmlWixzPcwlDhzDwCEMH32+hK81g1EbVMJssMSiCICZJawaDiDyrOX5xTLfbYbQ9ZLGY4fk2Wml6/RhbWMyvExA2V+cTFoslShl2dra5+3DEgwd9LEuQrCvG1ynrdUIYtbl79z6zxYqd3X3qUnN1ec5bX3iN2XJJEIWUZcPz5+d4ns3ewTZFWTMY9liME7b3usQDj8BzePbolL29HY7uDHACh8WsJI4i4pbD4dEAaQmMSPjwvY957a0jlNI8+uA5w1Gb+w/28EOL2Tjh/HzGOkmoa8VgMGAymbC3t8OjD58TRx637x7QNIqiLLj90pBW1GY2SRBSE3g+o2Gb7sDHoFnMCxzXxfM80nVJU2uU0ghZsc5KGiGoTYPtONS14OpyilKKRjcIYcAohCXRGkB/MrnHQWvNuz/4Pnv7h+zu7bFcTmm0wHNstKkxxiClhcAQ+AFKOUghGI128WyPyfQKIQ0IzfbuEftHh0jbpdVq87Vf+xXe/egRf+6P/1GENk1dqCVS/MZXv/qfZJ9qIG5s/CHZJAAbGxsbGwDfFbb11DT1vcBvb3tB6OZZBcsMtx3SKE2zLtHaxvNc6iIjXeVEXohrNxilKYqbxU9VmeOFHrPZnMl0RakywDAY7KCwybIFRlUgHRptCAObyfqS9XrCL/3SP+btt4/Y2+9xdrJGG82du3u4nsBxJQO/z+XFnK1WjBcI7ERgjGQ2zXAciyJrCEKPBw92WKxyhIR2x2c6W9NqBUytNVWjOD29QlUwnYyRUuIXHoKI5fpmpvxslhBEMeAwnSZYlk2aV2hlULWmbkrm8xRjBH4geOvztxntxFgWLBYZZa64uJiwuzdEaZvFeoUf2kRBl+Nnx2ituDgfc+f+HdI0I44j2m2HdtdH1RXnp2uyrMLzLe6/tUuWV1yeLXj80QtGwyGzccpor0uRpGilaPfaZEVNmpbMZgWHRyPq2nBxtuTHfvIhQWgzuUrIM8nZ+YyPn56yvT1kf3eXi8sr+v0uL55f0Wp1iGKX2XLB9k4f13GpqobFIuPs7JpuN8BoQ5ZkxF2Huq5xXAfXc3FdSRiEGGGznCf0h222diXvfOkeUlr87tcf8/Wvf4er8zF5kpGsxjSqQgqB7QagaxrVIBA3iQEWjmNxfnbCYjnj7p0HRHHE5cUZUli4to3AYEuXVtwjiloMhiN003B9dUWvv0VZ5niBw87eDnlZcGe/z/Mnj/nO976HjeBgd4Ar9WIt+F4q3c3t/8ZnxiYB2NjY2NiA0e3revz+d9yq+VmMFQs3xPFt1mmNXOR0pEEZwcnxHC9wCXwHz7aplcb1JY3KKbIUzw/ww4Cmrnn86JyPn55SqU8aMO2Auq7QOkfoGktKpBTURYpuCgSC09Mr0kwTxhFFqYnikP4gpN326fYjxtcJRd5g24JW20Nrw3pZ0mrF+JHL6fMZ0obDwxbddcjV5ZKo5bC332E2Kxhu9VgvUpq64dbtIR/6LmVRYEzDarUmbrdJkpw49hFIBoM2y2VCkVdMxwUHhwFZsaYsG5Q2dLoddg52Obo7JMsKikyzXBfY0iaKQqpSMr9eErVc7t6+x3vff44fBhg8Dm7vEAQuRVZR1zd9DeuloqlLsqxEG8X91/ZxPMHk6ZIyrXjjjZcZDCOMgW/8xgcsZhm7uwNWi5L1OqMVh7i2RX9vm9UyQwqLxx9eEgQBliWYTRd885sf4HkhlmVTqZSDWzvUtaKua7zg5i2LH8LuQY/J5ZrpJOHyYkG6zmi1Y6qmRhsHVUO7HWE5AjAEvk0UhziuRxw7rJcZZSlZVivmizWOZ/EjP/FFzk8mvHj2BPsc6rrBwqbMK9b54maMpzYompuxnggcyyJPE37w7u+xv3+Lg9t3uL64xHdcorBFrzdACMnW9j7GKC4mN9uSo2iI64fcf3iHWil2tvqkqxm//I9+ibRI6QVdfMenyeuuLuqtyDGv/+Iv/uJv//zP/7z6dINxY+OHb5MAbGxsbGxgzy7u1EXzcpqsPM/28GwL6cXUacNkVuJGAdKB+aKg1bicPr+m14todSLabky/PcANAtJiTSRjhIFOq0W730e+uDnIFfmCJlvQ5Gscx8V2JNKSrOZjsnKJtGx+5md+mvsP7lDXhsWy5NbdEXcfbBO1HCxHooUmiG2CwMOxbUbbPaQ1p9uL8AIfaTkspisENq3YYuFbWI6F6zsU+ZJ0VVHkOVlaUdeK+w9vM7lMEEIznS3Y2tqhFbdoVEGWlwwGEY5r4fkR82nCaDtESPB8j14/wgjBg1cOsWwIY4eiVAhp0TQNtuPx4vgSL7B4/eEDTl9MePW1e/S3fVpRwLPnl2RZhetZqFrS6/ncf2WPOI5YLpd4ns1gK+bF8wmjQZfwwGa03WKdlFxfzfA9mwcP96lrRZrmDLdaSNslbGmkFCxXa5aLhLsv7SJkRbIuOH5+xcHeLaStuH13yP6tPhena2zLxbYH1FWF42n2Dnb43neeMr5coZXEtiTDUR/LtojaIULYnDyfE8QWu/sdbNuiLgtmsxQhLTqdENuVrFclq1WNUh5GG5IkIytLvDDi1dfeoS4bXnz8hKqY3sz1RwBgDBjT3Mz01yCFwAAnx88JohavPHyd0G+RpSWDwRZB7BGENt12izRZk2cJcbuNF0coKRhs9QiCkL/7i/+QyWKOFIJep0dvtMPpWdpJZumf8VvOQVH+1t/9a/+nv/iPkiY+++pXv1p9mjG5sfHDtEkANjY2Njagyd+xm/oV14t21knF8sU53b1d4naX6bLgve9fs73fwvN9zs5mdDohGsnzZ2Pk8QI/HPOFH32V3cMtLOmgdc2D1454881XefHsmIvrNXm6Yjk7R9cVAkGezLCkwFQJRkOr1eVf//l/icODLs+enSFkw5vvvMTu3gDbcUnSNbfvHLCzneI5Do7rUCkNEkajHtPZgm6/RZ6XaDRV02DbNlobMJoodEkXDVlWMZ8veP7UYjBsUzcLpCWwLY/5fMZwq0OaWriuxLIMQtYMR0NWixSQhO0Yx7FolCZq2WztBBS55upyjh9GtNoucWiTLJYYAztbI54/vSIIYvzYYp3W1E1Dtk6ZL3MOb28z6IQIYbAsRdySdLsdzk4WPHt0yTqp6XQdej2f8WRNbxTzYw9f4ep0xvnJkmdPL9ja7THc6nBxcclg2OPZ40sm4zU7u0PyXPH+e88wWtDr9VjM57z9zm1efW2P6bSgLDUX02suzq7Y3Rvw4OUDvvOtj5DS5uhwm0ePXuD5PlvbQxANeVowuSpptUMC4zIbF4SRR13XuL6NG0BZKlzXotXzMdJmPFlxdb3kyZMrvECwvbPDar5iOhsTxTGNamiMoKoLlK7BNNSquckEuGnstYSD7QgEBmMEjudzMBihEewd9dk7jGlyyXe+0xDHnZstwSj2dkZoKn7rN3+TF8cvcKXEkoIHrxwxvN3l+YeKSVJ13cL8ZE/VQatv3e/61df/xt/4G//NL/zCL9SfdmhubPwwbBKAjY2NjQ20zaVt0dJCOsPdXdJMMx+nhAR4vsv4as3zJzXttkfTaC6v17Rij1bkE8U+nueSrRYsZwLp2NiOQ6fb5Y/82EO+8VvfYbG8pshz1skUz40RGFSdgrTQVc3n3vwcf/xP/zRvvvUyTZNTlhXvfPFltraH+H6INjDyA1bzKU4U4gcOy3WKETajrS3KIgNcEA3bu220UURakqU1GIUtbZpKIS0BBsAwGU8x2kJakjAMWa1SZrOMl+532T/sfrI1VuB5PkIYwtgnjByU0Qih6A0i2j2PLC0xRlKVgnZb4vs2adJQViW7e0OKoqA/6LOcrygKyTotOLq9zStv3OHqaoWQNr/3e484Ox3z0oNDbt/dRdU1vu+yXhboxrD3cIfhdsyt2xaNgqdPznj2eExdNoShj6obHn9wgmosLJMjDRzd2caSLlop7r10i7JU1GXF7n4frS1ePF8xvl5ycjJnMZtx69Yu+wcD6qamP+izXie8eHHBYNhha2fI6cmY9TqjURWD/pDZbEmjcg5ubaGUJvA9/NhGWpJ2N6DT6+H7EWlacLuq2T+b0uu1WCwLHn/4EXlaY7SFEZqiKrEdC89rU9cVWZ4hMSANQto3s/2FhWcF7O7v0447rBdzmqrg5Tdus7Ub4TgO1+drPNelWC0YX55z+94R44srfvUf/yrn58fYDiAkvt3i9TdeRQgLZVwmqxopdDBbF2/v6Vbd7yFV9fQa+M1PNTA3Nn5INgnAxsbGxmfcV/7t//Ut06QvF5Wahp5nySDAtiWxETSW5Oo8IVlXaJOi6pjFMmW9KnF9SacfMxx2CCKHrFGsc8Xd+wfY0qOpa37iy69RZX+G//g/zHjy8WOKLMOxbTRQVhm+2ybw2/z0T7/D//R/9rNUZUOWF7zy8ivEcQeMxLIEVbGmyFakqyWrZcre/g6+18ILu9iuizIT2o5kPp3gBS5pUrKzPWI5W5Guc3q9LstuxmJek2XFTfOqHaMUuM7NlKB7L+3T6UaMtmN292McWzIZZ7iujR86SCkIYhsMZElF3PaxLEGaloShy2jUIklyzs+XGG3hBR7zVYpjS1aLJdeXc06Oz0EKTNXQ7rWQlsb3JYdHh/S3tuh2AmzLpso0ja05OOwSBA6GAqV8rs9nnJ7MaLUCHryyR1mWlHlDVSlePD9je7vLy6+NEGKL8VVCkTVcX66QlotSOcNhn9UqJ000J8fP+fDDx/heRLfbYjhqMRi2yMuSJE2xbY+t3RG2I3hxfEmeVbQ7IUE4oCwaqsqwXhk+eO+EwaBLFLtsyZi4HVKVNeskIStKmsbgOZK794YMBhEXlzM+94UDxhcZz55dk6xSsjxnNrtmmc6om5txnsZoLNtBChuEAdPQjgN6UYc6zZBNiYtDvxNjSagaBZYkTVfkyZput83x8XM+/rUPyYoM17Zu3gS1Bjx88Ba7O9tkyxJLWFRVyXg2oWmK+PTi6ovtWHR39gbyL/1v/s3xX/t//GePPu0Y3dj4Z22TAGxsbGx8hv21v/i/iMqqels19StC6QfrZengrNBhxM7dLcraMJmVKA3LxZoibwjjCNepoZEUaUPZ1mRFyeVlxuNH57z3/efcfWmHL/6RB4RBj5/7Uz/C7tGQv/uL/4jf/trvMp5eY4RCIOkM2/zxP/aT/Mk/+SXqMiPPFHGrTxiGCCGpipS6qtFNhTaKre0dfL9gtc65fe82jh+hlGG1WuK6Lke377JYTWi1bVSV0+2FzCYZUtRsb8csphUCheM4bO9t0+5GCEq2tzvUTcPD13Zod0OKPKPTjpCuTdh2cD2HMiswaBxXEkQB0+madtuj2wuoK4vJZEqyarAdn9V6TZY1rNYppyenXJ2dfHKLLQHBhx+8z9b2kJfu38YLIpbLlFY7pg48JukKg+HsLGM5iDi806fj+SwXOddXSyzLQRvNbLxEKU2aVJy8GLOz3+aV12+BqMnSEs/1efHsguOTK1xHcPelfYwGzw356IOPmc1vauH39ncYDjrUjeb50zFlVbG102WZVdRNg+3adLsxgV8zG0/59re+g+/H9PtDeoOYrZ0BQhqMsVCNhetaWBZIc/Osfc8BCVfXY8q8Iggsilxj2Yr+MKbTDXH8t7i8mDC5umA6PSdNl6R5BvpmpKwQAiklURhTlRmqLrAtSbu1x8X5nLu9HQb9mN/99R9wdfwMx3O4uL5gurhGqRrXkaAlluPTH+zwxR95m/miJFkuKSpB2IkI8pTVsiBZV62yUHeMkIUS7vjP//k//x/+9b/+15NPO1Y3Nv5Z2iQAGxsbG591UnSMboTRpht6gUMDZQbj65sSm7LMiVshloSLizEg8b2I+WJKkq9ZLea89rkHWG0P1/VJVzW/+1vPmVyvuXV7wP6tPV554z7/+7u3+bk/9i/xzd95j+vLKxbrMV9851X+9J/6McJQUDWS4c4OvhegdUVdNVjSplE1XtDCD1soBUeDA7KiImz1kJbFOkkZjXapKoVtS4wwSAl5InAdD9dOiQIPSc1oK+LNz93jg/fPiDs+XghHR/uMtlogYWu7S6fTJU0XYDSjLYdOt0IrxdqFsqww2lBVBY4l0UpwdZZzfj6nqiTJsuBy/Iznpy+YLxfM51OyLKXRCjBIJI51s0n3xeVz3v3wXcIgwPN8Dnb2efmVO7x0/xZaCKLY4+Jkiu0IVkuLvb0+g2GH5apC0xC3I1pxRJpljLZ6NE3D08dXfPToCVXZ0Gp1cT2f+/dvsbvfYTbNePb0AqM0/UEXKR0aXRJHAappuDxfEUQhUHJ+rGi1Yg5uDZhNE66vL1ktcvJszTtf+hxVBY5jcev2NleXU4pCkfslRZHTNF16A4/BqI3UkJQli1lGUdaEkU9e5Jy8WJKsaoRt8e1vfYfrqwtCPybLcnwvIgxa+OmCqixomgZtbpbNaa1I1gtA0Qpb5FlFRwh8z0EiMI1FlqVUSUZZNzfLxSwbrcFxPLZ37vLmW6/S6/g8eXrN+GLNKk0Yz84xTUmZZlgWRH4YlXmwi7I/50nvHvC9TzFCNzb+mdskABsbGxufYX/p//q3s6/82//Wd5XSbwgIjZTMFmvi3QApLM4u1izmBVJKXNthf38XpRq8wKIz2qMsa549ec75+ZQf/anP8f67x7z62i3WqxYnL64pMsP11Zq6gcO79/jxn3mHL//Mj1KVFXk+pywX9DodXCfE9TtoNFqVICzCIKCSEiEFcdRHWhaW7WKQeLFBSuem9jzQ1JVNGHoYU+O6NyUdVVGwtbVNFDgk6wV+4DLY8ji8fQvHFShtg9VwcGuL/aMRCAvLsrAsF7spcCwbx22wihStGqoqxxJQFDWqEWhl8ej9c66vMoqiplQ5Hz99yun5Gat0iUF/8pQFnwy3QaMpVQWfDJrMq4z5+mbKzeX1FcenL3j04S3u3LvDnbs7CN1QZjXb2z1mk5TFYs1rbz5ga7uDlDZCgKGgyCueP53w5MMpRWoBFr4f0GpHSKn58L1jBDa+e1NOo5SmMQXr1YqXXz5iNltS1TX3b3Xp9SOaSmE7FlXVAIbPff5l0uSmdCqMXfzAY73MuL5asFyU5HmGajT9QRuBJEt8Jtc5ipveia1Ri9t3t1isc9ZnM+4/3Kbb87m8SHDtL3B+fM7l+RV5lqG0YrGcUZQJUv7+VCCNFBZVVeFaEttxkNIiqzKCKKSubxq+DRb1J0vGpHXz7LWGVmtAq73Fg5fv84UvvML4Ysl8knB5fUGeJ1TFmtXqGlVXGCFIbN/2fccfJumg1Wn/6Fe+8pUPNlOBNv5FskkANjY2Nj7bjNJa6FpvO7bX7e9t0XZ8jOOSNwYpbLq9iCgKmFzPcT0fS3osk5RBK6bVafPaW/d478MTat3w5/6Nn+XicsrdV4+IOxGrxYyd3SHHL66xXZtbt28jbQ8/cLCdDi06OJaDEAKNwTQlRhXk2QK8CD9oIW0bIxR5niFtD8fxaFQNwsK2XFzHQgqBVgrQGKWI45tJPel6jh9I8rJGWoLDO12MNvzET3W5ur7Cdh38yCJu9zHGRmkFRmOMi+V4KF1iORVKKYSxybOMyTjl7GTOxfmCyfWK+WLJYr3k/PqYZbL85LGKTz7MJ0/5n3zkv//nn/wqBApIq4STq5zxfMYHHz/m7TdeZW+0z4cfPGW9znjniy/x0oM9kjTj+MWK3igmitocP72krhRZWuIHHreOdvEDh7ppePL4OYvZkk6nx/b2EGNgvcjIi5wsTekPuqxWKatlikFycTpnNlmhVMOto10QAstyGF/NEJZmMIzY2e0zm+WkSU5V1bTbEbbtML5esFisaXd8IEIpTXcrZmuri+sJqqbCcQS3j7YJXJvxJOXibIFRmsUqZbZMqWvFYjUhzeafJE0CISSBHwISy7ZxHBuMYJWmdLaHeLHNaKeN40Gn18d2Q/I0wbYFEknY6tEd7rK/f5vX37jN+HLK5Cpne2tAkieMPzqhyJa4jo+yJGVVo40SyXrtJ8na7xbdh+nMPQQ+/uGH48bGH45NArCxsbHxGfYX/sJf8Kx6+UUh5NHu/p7vd7voyMYNApA21+Nzqkpj2yVaSZSoGGz38EOH5bJAGI0fOfzcH3+HRpV4ocNPfPlnWK3WHB7dYbWYcHVxwdZeTK/XQwJKFSh1s+jJmIZKVxijqMqUYr3AsSVK5czH5wRhj7jVphYSpcHWCoFBmxqQCNsmTRMc26aqC7Sq0apCNw6uG9L4FVXacOvwDpUqUUajtUBpw8s7g5skwg0RMgCjsIQFRuEHLTQNVVVijMCSDnlecT3Oefp4zqP3PybPcs7Oz5gsZ6zzNdqom0VWmE/GV4p/4kmbf+rZ/34aYIxBfHLYbYwiKRKSYs2vfG3Knf27vP7q68xnCR99dMn1ZMVoK6LVDpiPU0xjsbXVZT5NmI8zwtAljh2ytKEqDUVekGUFcavh+PiUuqxJshWL1ZLAjXBdl7OzGUWRk2cpy8WCnd0hW9tDXjwfUxQVQRATBA7DQYRtS97/wQl1BXWtEAKqOkephrjl0x90sW2J68Lu4QjpCHSjEJ5DnubYjkUUeYzHc84uUnqjLqNtiR8HtLp9puMpz56BRpEXOY1q8FwbIQV1XeF5HnEUg7CIYodX3rpHfxjSjh2UdqiaGse1kZnGwsWyPaKoS6c1YG+vT6sV8oNHx0jbpS5LXC+iPzygyGOydImuwHcsJAKEsJI07Wf5utPuR//0f+DGxj/HNgnAxsbGxmdYv9831tViV0qvtZivTS+K8eKIi9M1fhgy2op5/nyMH8YMtwdcn0+ZzhK0UTx87RZXkzFRK+Tw9j7g8vzjM0ajO+zsHlI3NaOtQ27feY0sXYGp0KbBoLAth6bOMfrmJjoMAsp0Tbpa4PsOnu8S+AFVkTKvctrdEbbjsZiN6XX7JOkCaTk4loNtS4oixbYtbMejUArDzRsBzwtBq5vSHhXhej5NoyirklbnZoOs49yMNi2bHLQGI0BodFPjuRarZcJ0OqMsFE8fT/nedz7i7Pzk5vZ4fo3SzR88z5tG30+SAD75vfn9s6P45KB/0w1gjP6DvyOEuPm6Rt9MvEFQ65rHJ49YpSveePVzcAJK7zGbpBwcdhmNYk6fXRO1Anq9FnfueVyeT6gqg+MIvKCm1XY5OHjAeLwkSxVCWNh2gO839HoDoiiirgx1VeP7IQKb2XTNcpmwmC2wHZfhaIBjO5y9OKMoGw5u7WKEJghdDrdiVKOpSs3VxYo0SVktG8bjOY8fX+CGPsOtNoOBTxQ4IB2ep2vStELYLllRIAy8OH5BmiYYUzEcjoiCiDRdMluM8dyAqqwZjLa5//JDuq02nb7HrTs7TGYJR7cGBIHH+UWGVhWr1QIpLRzbx3FDwiDizTfucfelLV48m6CMjRSQpjll1mC0oGkaLNumF/apipzVai7yMreNbizfFuOu503/0IJyY+MPwSYB2NjY2PiM+mt/8S9Gyfz6nmV7d9zYGYZxLEttiH2HzqDNt3/vOcNhn4ODEe9/cMKDV2/x8I0jPnp0Qn844M7LA14OjqgV2F7A7s5ttrb3Wa9nRGEHL4gQgO/GBH7IZHZCowy+Z1HlGUJXpOs5F+dnbG3v4LkOruehtKKsFBjodHsI6d18LiHoDfrURUaRrWl1etR1RWMUgR+QpWuEbaFRVHWG68aUVYHSCo1G2i6W4xCEXXytsW2PmxrxCtu1MNKhzHPqssTokrrOuDi5ZDHPuL5KOP54wnd+7yMuLk+5nJyTVxna6P/BU/0nb/4Nxkgc20MKSaMrpDFYtkvYvUtdleTpBNsGR7oo1WBMRdMUNyVHQqAFXM4uWHxzwc5wn2Sd8PrLD5hcZbRin939AcbYTK4T8jyjLAyzac71eIrSOWA4fnGM67SJWyF5WmBbFq7jkuclVV0ShW0QEm0MRleUacVyucT3AvzQYzCIkZZNtjIMRj1aHR/daIzQzKZr5rM1vV6XTjckzWuWs4JOJ2R7t81op0WRK54+mqKVJs0a1klNUdZkWcne0QhLSj589wXLxZTVcgoYorBNXuR021tsjXbxHI9ev8Xe3h62Da+8totBsx92AI024LgB8+kSjEFaDrbrEQQRt45ucfvOkKKsmM1THC/EqBpHSoosIUuWLBcTimqNZVnYUiClwXbQnU448zyHpqpiYMnGxr8gNgnAxsbGxmdUVdextOQ9pWShhNTacoUlbV48m9Lqtrn70gEfPTrl/sN9fuwn3+DRe8f02iF37mxRKUOru83tO/cJ2300Amk5DLeGbHNIWdef1OU3zJbnpMkM17WRGMp8zXoxpS5S1ssFri1ZL6Zktk1T13R7La7H1/iuTxS3aBqD40Voc5MUaG2Iohhp+ThuhKpTqqZBo7C0xpaCoqzxfUkQtklVRVWWuJZEG4ll+wgBQtzUkteqoqlytFYoVVAUK7J0wdmzC772ax9wfrHk4uqKdFWwXE4Zz8/Jy+wPbvn/SUKIm+p/IbGckFZnm3bc4friYxqTY2EDHvHWAxy/iyNLymyBaWrqck2VLSmLJapI0aahbAoMiqLKeXHxjOniitlsys7WkCQ5pNtvUdc1e7sjPN9jOpkxmy9YrRZUpWI2nTC5vsL3fdrdHp1Oh3a7xd7+LstlSrcXs1wkOI6F41jkeUlRpISBS3/Y4+VX7rB7MCRLK5aei7QMWVqiak2aFbiuR+B3uDhfMJutcF2fuBXi+TZC2qRpg+86HN4e0OtFrNclbtAiSdfUVYHnB3z7W8/YHm3jWS6O8FAqISty6rrkzTfe5mB/j9PTE97+wkus1iWtXoRwLI6fjtnajVkuFVE3okxzzl48RwqFbTtI6TDa3uXtd15Ba3jxdE5TC+JAUpaSqiqRUt68EapLalWBsBFaIKRF5Hply3Myx7VPRy+/fP2HHqAbGz9EmwRgY2Nj4zPk//mX/lKr6oZHf/6vfPXdv/If/AdXX/nKV/4bcX32wPd9z7Vd4cceg4MuZ5crqqrm5dcPub5ccLfb5U/+az+BMg1xp0WjJVEnxot7+GEH1/FuSlqMQCOwHSjznNXqgixd4bmSukpYLeY4tqTMVxit6PY6VGXBOlnjGZ8sy+gN+3T7W3heiDY2AgMojK7wHI9SA9LFmJteANf1ULVCOC5pukQIizCIqesC27ZRTUMYxmDZOI6PEBZaFUipQIOFplYlRbJGUrOej/m9bz3mH/69X+f99x+xzjOU1lhSonRNUWf/I0d/PplVL5BYuF5M2N4laA+pixVN02BLDyFdaulSCY3tC+Koy/aog+1arFcJi+mYMl2higxVZaxWY+oqoTEl2mjW2Zpvvft7BI6HIzz63T5HR7d4+cFDtKnwAw8pDaPRgMU8YzabIWyJsTRCaALfp6wKptOMnZ09+v0eWhtefeMOjm3z6NEJVdljf3+EkIIoDrEsm/VqwXvvPUbpmv5wSCsO2dru0zSGxTLl6mqBVhopKzxPYtkOk8kCZxEQxi57hy2kC3tHLVrtNq43YL0u+c63nhNEHuk6pyhLbMvBcTqUtebO7UNefe1lVJ3xzjv3GQxaDHfbBJFDux9yz93H9QR+ZONZPl/75V9hfH2KbdvY0iaKAh6+cgc/EDRlQ7rOkUIgJbTbwc2uCWlRNxWGiixdoJsCC0Enjpvtfm8Wx+FJ5Lu/9gu/8Av1H3Kobmz8UG0SgI2NjY3PkJMwTPfRvb/17/3lz/8b/4e/+m1AV7r5cDabnvS13smaiJ2ez/7tNvN5w/bOkC/+yCt88N4TVumSB6+9SdRtEbUGuG6IbkArhREN0rKpmpJVMsXoirrMaKoCYRrWyzV1XVAWCWndYNsSy7IpywppS2zXJYxjhG0hLJ/BaAfXCzCqoSyyT8pbVpS2jx+EKKNw7IA8S24qblRNWaZEYQvVGLJkSVWVhGGAFAKlGnw/QkpJVWY0usCYBqE1WhfU5ZrLFydcX874+u885tf+8e/y5NkzyjKhMg0gkH9Q2//7E37+/+U/v1/HL4SF78eErW38Vh+EZrW8wrIENj5Oq0sQ7+F5bdq+x3p8wZPnP2C0s82dh2/Rat2mzHJWixVltsTyQrJkQra+plYVyigapdCuR9zrE8ZtFsmax8+eEAUxcdxhvV5RVQVlWbJ3sMPR7UNOjk+YL+YIXKQlMKZhsVwQxj4vPdhHSE1VKNrtEMsOyYuCsqyoS8HTR1fUpuCnf+5LjLYikBaXZ3OSteLi9IrlYklZFty5e0CrFVJXGssSKF3j+ZLRVgthCepGkOUNYWQwRhDFET/x5bdIkpTnz6/54N0u5ydjZpMlrVabfr9Pnt00dh8cDmh0ReD67B70AY2QFq1WzHpZ8A/+wW/wrd/5HlBi2zaWdInbEa++dsT1eM3V6YqyqOl0A3zPvlmglmVYlqTf7ZPnK0ydYxyJRDPs95K9ve2ncb//gYs7/hRCdWPjh2qTAGxsbGx8hnz1q1/VX/nKV37nnqX/V3/n3/+q9+1HL9a+0l+0pNNzvADp+6TrhtBSrOcL8iQhaj3k1bce4votxuMxBkHodxD2TVOrkIK8KnBcj7xckWdzpL6Z559nKVm6Jl0vP6mttsjTgih0uBxPsByH7d0B3d4IKR1qZajqikgIqjLDtlxsx8FzPXzfI1ktydI1VV0Qxi1s26YuGzzHpi5r1tWcVruDFCCMIs8SbNvBDSRCWghhUdYrmibDMg3ZesF8Nub8eMpvfe09ppOcr3/je5yPTyiaHPMH0zwN2ph/oqnXABIJgMGxHSzbwZibUighJKrJMarCkQbb92mqhrKBwAlZzy/wtE/k21iyYTk55dIPObj3kHDQwQiBH3pYjovtBDi2z2x6jO3Y3L//Kj/+41/i6GiEYwuSdUG6zCjSAqMERhvmdY2QEmMERV7i+wEvvTTCdW0Obm3TbofMZiuaCibjJb1eCEJw7+E2vu8SBA5VrZheremNQqKWR9zyUabGlQ37hy2a2nDvpe5Nec2zCUJaNEqxmKeEccRg1GHvoIcfeJyfzzk7niGE4Oz5nCIvWS4rnn18yWQ6JY4thsMud+9v8/DV2yznKbPrJUo3dNoR3b5Pq+8Qt0KEarg4nyOkS7Jo+NX/7lv86i99naZegFAgwHZdXn3lVepKM75YslysiVsxW1tDfN8hSQrMqiRZLzg/O6XIFuimAWq6nXazt9077w27H3qB/3iYF7NPLWA3Nn5INgnAxsbGxmfMV7/61eY/+spX/tZe2/o/Ptjfvrx4fvmTceTdEY6N37K5nmQsVjWjQZf1MuH46SnvfPnHGG7fZkfD5dkxk/ElewcBCBuMjWU7KFUhTI0tYJ0uGI9nPHl8TOgb6kpxsD/CthXCkiAdRjtb2JYH3NRsV1WN57qk64TAv9mOq4UEA/PlFa24j+24rJYTjNKUWcFisWR7bxu73SGIOzRNTZqnGNUQxRF5Wd3UdyNxwxpMSl2lLGYTzl+ckK/XPP/4mnffu+Ljj094+uIxq/WSBo2wLAQCoS30zZaCP5jcA2BJG4HEsg1x3EYIi6apEULg2AZTZ9RljrRspGODKMjKFLW6Jl9NmK49Hvz0H6dIpmSzC9bzC06fCdq9bRojqeuGpjY0RmOk4JXXPseX3nmDH/3xhxwcDpBCI4zCNBarecaTR1ck65osLzBGABYvnp3gOALXuyl96nTahKHPYp5zeTEnjluUy4qmKWl3WtSVjSZnuc45eT6jqfTN/gUp6fcDHr66RysO0dqQrBKM1mgFYegxGa+ZzlKkkNh2Tq8JULXG+HB4tM3WbsNqnVMXJa7jcH5+RV6UdDt9qkLx3neO8T+4pLc1wvVd+v0W/WFAFHk4vkEIyXpZcfpizm997ftUecM6XTK+usaSJQ0Gx27juB6vvPqQ23du8eG7J6yWaxAKxwHb0rgOOLagKjLGk2vmqxk0Gb5r0WkN2e13ilboz5Qx88aYqjef/w87vTc2/rm3SQA2NjY2PoP+t1/9avJ3/vpX/qPtg8F/phtzJ5vlRtsCO/J55XCHi7M5SZawfzRikaS8+93v8/rnXbb2jnjwypvUdY3S4No2TdMgjCbPxlT5mo+fnvLd7zxiPFnR67l42x1Wy4x3py9otz3ilkPrTpvlcsXF6SkH+0MmkxnGWHiuhesKnj55xtbWEMsRtOIucdSlKDOKIgMEjmOzXicYo7k4P6epaoI4/qRsSFJUNcY41GWJlFA3imQ1ZzmboVTBbJLw3vfO+d53PqIoSp4+e8F0NiMvSsKwR6NrEGBhEwYhYRxz994Rja65urji/OKMxTpF6RpLWmB52I4HVoXlONiuS5WnIG426jrWTS9CVYzRTU47bhOHEePra2xp44dthBOwnM1ZLFLa7RGulAzbHodvPOCLP/qQVx7eJg4clEpupvgkBVma4jgORlb4sWQ2q1ksF1yNLxiPL9FK0Wq1aUUtFvM5772fIi2L/d0jRqMhVVXS7cVYtiAIXHr9CDCkWUW740LjMdOCy8sLyiKjLGosywZtIW2LbtfDtuXN92BsiqzBsgVhE3J1vsIYTX+r4eD2gMPb21hWQFEqkiQlHnY5ur/F1cmSJx9dUdYVjm0xv5rS6cbotofr2hzd7RKFHot5zumLKf/ol36bq4tLyirHlj6uK9GmxnZ8pO2wtT3inS+9yWQ85/pqhmUbXNfH81z80KHXD+gPQlzPoahvdkNInWOjkaoyvuNUvutJS3OLuv6dn/+v/qtNArDxL5xNArCxsbHxGfU///Nfffrvf+Uv/zsPHhz817PpSrqRzdbRPkWZ8frn7zCbJFRNyZtfeA1tHFzHoqlrGlvh+iECgWoa0A1ZMkY3CR+++4i///e+zuVlzvZ+h6OjPmlSMBjFhF5AluQUheHF8RjVNLhuwOnZAjewuL7Mmc/XfO7tQ4xSNGpCrxexmK0YDoc4roXtWCznOevFza2uqg1BFLBcLmiamqYZY4B2t8N6tSRLMry4TdBuUaQFy9mKjx69QGuf58cTzi9XYHIC3+HenTvkucJzfVqtgMn1mDffeMDDVw7o78Rs7Q9wHMHsOuf502ve/eBjvvv9d7meXCGEg7Qj2nGfuqlB2iAsjDYIYTDGUNc1RisiP6DV6eOHHRbXV2gM0o6xbZe9nT69bosHD27x9hfus7PbpdP1aXVCTANNVbFeNdRVTVWXtDttmlojqDFakGclURCxu71HWRTkZUapKrLZFShN3ZQoZbBtl04vxg1CsqzC9zySdcX56Ql1U/DglQPe/sJdyqJmcu3xxucOqMqKyWTF8fMJs+mK/rCDFB0sS5AXc+bzFWEYARbLZU7gW5THY/KiQWtFus5pdyLcwGMw7LN/a4uLsysuT6d88Ufv8fSjfX7wex8zmazp93vcu7NPEEMyT8mXObNZxn/7X/8qT548wbbBc3yEpUjzFCFdgqDNzu4uR0cHPHn0gtl0jtbgSOdmt4TnUFUNdd3Q7UUEvgPKolitMDphp99ne2tbR6FXGmgapXxRKcn/2Ba3jY1/zm0SgI2NjY3PsA/eP5ml6/Qbb3/xzT+bNjXXl1e4vs/p+SlZUtHrR1huSLe7xSpNCVSD0YqqqrAtmyJfo+scP/S4eD7hl//BN5kdj4n8FovLFd/49ZR7L+1zdbqiKSsMAiMl/WEHIW62yU7HGUFsIyxJUQqMCanrBQGQJBntdovZ/JpOp4MQFnmecX66wpIWmoYgLOgPYiDDcSRlVWElFkYphOUQtbdx/JCqNHT6PVbr55wenyOMzR/7V75Eu+MyG2ccv5jQjlzabYcocglCyYNX96ialOH2EG1L2u0ut+8IXnlzxJ/6s59nPvvTvPveUx5/fMbHT6ZMJynT2RS0QdoBqq5paoUtPISw6Xf36Y128KMu0nIIHYcgDjjcH/LgpX1efu0O+wc9hsMetm1R1TVN06C1oa4LqqrED0KMp3AdyWqxIllXrJcJWhkODnt0OjEvXricX1wyXy5o6vJmWZk2uJ6P51ok6YrjFyf4fgvHkuwfHqCNQdqKu3e2EQiuzuc4gYXnezSV5vjFJWnS4PseR3e28TyXMPYoi5JWO8Dzb0rB0A6XF1OKsiZutcmynOXco2lqFvM1rudw/OycuNfFNDbvv/ccxwWExZ27txmNMuJWQJalaG3TlC6+7/Gb//i7PHv6DESFEA4aQZrnCAHdbvzJ4f8IKQVPnj5DK43r2riujWUJfN/BcVyStCDLKpK1YtgbsuyPSJeGMluZ3NNZFI2WrmcvLUuuhGVNPuUQ3dj4odgkABsbGxufLQIw//5XvtJ1nPS2xP5SU4nhyfMT8/ZPfV40KJqywY9rylIx3Brhh12i1pCwNUArsG0HIR2MVkjr5la6KWv+27/3q8yPrxnYGkmOsn3mkyU/mC1o6obFIqFSDTiS3b1d5vMVtu1wcOuAq7FinazxA4uj29t4jmK1GtNuhRR5TRhZhH4JQmI0HBxu8ezpGWHs0hu0sG2LLK/pODHpek0Qxni+j7QjpOPSaEOr06Mscpq6Jooc5tMF15eay3NJp+tz67CFKyT37nUYDGKCjsTvRKDbgMZvtyjynEXaUFYN11enpFkFWvMjf+QhP/dHO3huSFFWTK4T6qpitVqyXmY47s1SLdd1sX2XsB1SVw372yP6/TZ+5NKOY2zHp65TtFJUdUVZFjRVie24eK5HslrTVCVlkdFUFckqoaqaT2r+4fxszmyak2Yp/V6PLFtTNz55mdA0DVXTEIQRURSTZCnrJMW1fYK4RX8Yc3A0ot8PyJKKslSo2iZPM0ajLoe3drBtyfX1jMl4yXJRIIXE9T2qskZKl+U8o9226Q8CbMcmzyuqVYMRkr2jW2ztdDH69xuXCyxL8Oqrd3j/3Rc8fXSOKis6nYBur4vlSOL2zbK23/36x1xd3Szq0rUFRmIJiR9F9PoDdnd2cV2f89MzEFDXNRiNIz08x8Z1LFqxS7dvE8UR0+uCxXxOWZQMBl182dCOBO3YqW1HXGLbj2zHPQmUevppBuvGxg/LJgHY2NjY+Az5v/27/+7W3HXXJUVbVOZtSfVTEuth1Ark9dUZe/ceIiKb7PyC4ajFaGeHTu8AhIcxDa7noBqDFgWqyhFAGLp87ze+y9PvPsbTNbIpadI1xrIJjGGeJEjXo+VIpO9zOZvx+N33KbXCSIu8KIk6XXzHo8oMv/Hr79EKbX7iJ19iOl6DAVsEfHB6xmg74uxkhhcEbO0N8P2bg29TaYw2HL+4QivYO4pwwxbSckmyBNsOsIIA1/EJfJ8gsJGiYX5dUJcVUitu3Yop0or5PKWoct44vE+twHMEedIwnY55+nTCu98749mzS5brFKNBSovb94e0Oh6Hh3sc3Npm2OsCNY5d8dorh4x2t+h0ujdjROXN9CTX8TFGo5Wi0ZpVMsEog+u6rFcJmIamqcizAozElhamETiepEYzvV7SVIZuv0VOwdaOT38UsZpnlLkmzSo+frbFR0+fM74a40hJXuSEYRshJNfTC4yqCLwQYSmKIuXqss9oq0+3G9w0aMsE25JYtqDdDVkvcw5ubdNqtSlShdIaP7QYXxU0RuI6LqCJ45gkzRHCIm7Z9AcxeV5w8uKKqBVSZJpWq0PcMWhV8+prewy7MU8enVJkNUXRMGiHxLHLyemKk5MxR3ceIixDnmdsb2/j+T55UbG/s0eSzDk5fgZKYTs2xoDEYIzCtiVxHOL6km7fwwsshPAoig5pekloHLrhDqGnjWNViXDllR24S8tzfvur//F/spkAtPEvpE0CsLGxsfEZ8r/7q3/1+v/+l//yVi1tkTfVt01RPOi140DVhuuzGddX3+LowQPavQ51XdAoaJRCWgohDMooLOGgmwqhahCKfLbkG7/8XShy6mIJtaJRJY7lYiHpexaWJxBSINB0dnvM0pKrZYqybObzCZPZmGF/QCvuIK2IJK149weXdFoWjuuSpzVZmqKNoTvocn15s3n21q3+J42+PnlZ8uzjJVWecXBvj6g7QGubOAg4v7ggOryNH/aYL1POX1xzeDRisTzh/kuH9HsBXujhRyFVU/Dy/SOqUnF+seTxR9dkWcl7PzjHsh3OLs6Igi77eweUZU6WlHz03glFmRMET4n8gCxb4Tget+4egSWJw4D9/R5B7HHv4T0W8xlCNPieS7fdoygLVukC24HAd3FsiaqhqRWL5ZpsVSGM5Ox4RhA7dHo3de3LWc5iniNtge+7CGlumo4dh1LVtLs97t2WlKuS1XrKajm5eaNgO9RFged7tNoDVGORZxWX5zMuzidsjbbZPRhy624Xo2oWiyVCSq7HKemzCaOtDoskQ1rgxh5+HJCuKpKspKhK/MDHaI0fWGztdNCiwfc8+sMIadl0uzZh7CNFw3SS0e35FFlGtxeR2Q1V1XD8dIa4vc0Pvv2E9z/8Lm+98QW2RiMmkzFSQLJeIaTk+PhjsmRFWaSURY7jhURhjJASxE0PhlKaLFWsVwpp2biepD8IyPa6LCYLXK3xbK1VVRVImUvXmTq+PP6043Vj44dlkwBsbGxsfIYIIQxw9Z/+ta/8tKDzxXpt/YzrWN3VsiBvJFiC93/wa9x+5R4PXjni+PiMV9tDhJAYoW+23drcNLYKgZSaF49PePboBXVZUOYZ0ihAUygFgO1Y6KLCAEYrBJJAC2KpSGqDlDZIw9X5McsgpNUbsLuzQ5oU2MLjB9+7pNWSfOGLd9FaMxr1OX5+je87TMZrdna7VI3m4iQhKRTbox5lXqCbhtlsTlEpDg4OqOqS3mCLP/FnfoJH737Mu995zM7+FtPZglu3tzg9ueal+/sYE/Lee1dICybjkg8+vKKsaopcMR6f4jgWloTZZAwGfD+g3x2wXM4Bw3q9IlnP0Rgs18d2PJIgZDlZUjUNv/FrPyAKQqo6JwwimlKB0HRHMZ1eSF2W+J5LmZdkWUO31yIMXJbzBMuyqCqF1g6dbkiv20YI8CMPx7VQlWG5zFitCnzPpc4mnB6fIN0G27PZ2tnHGLCFfTPmVNzsCsiqhHK6Jg7bOI7LZDKl1fMpi5hOJ8BzJQLJw1cDFouULGvwIxchYHy9pikNYRDgeR3qpsIPHHrdiEZVTCcpZaVI1tf0+gGjUYuo7dOoirqCF09nJKuCbj+iP4rp9g2LaYlqJGenK84vJszXU377G79CHIRgDI5l43kBlm1RFSWIm0VtlmUhpAAMUt6UCpVlQ5HXVKVmPl2hVEh/EOIHklbXI3B7NKuEKltXyhIGRUeVTZ+S5tOL1I2NH65NArCxsbHxGSQcd7I9DP6k6ruvpauS2fWarBCE3YjR9oDzk3P6gx53XjpiOrtka2sPiUB8MgtfG0GlG3xL0lSGk5NnxK6H1NCoGq3qm8VZUuDYLajBGIUQoI3Gt1x2u31mScZVukYGLXAC6qpiNrnGc1x2tyOEMCTLlOGgQ68XkhcZ0/kax3cZ7bbp9Ya02y1Ozo45PNqm06kYX1xSVmCMYGt7n1rBfDbDcz3m4wmddpfd/T0evfuC1TTBKIv1MgcNl5dz4pZPr9+m1Yl58vQRtuOitWGeJWwNd9AKHFsSxS3QBsexyPOconBZLCYEYUCn2yMIPFxHopqaxeyK4+WaVquF5br4tkNTKM6uz7Btie04aKMwGnzX4/hsxsXZNU2t2d7ps7vXJwgdWh2XrZ0W7a7P+fGSxTxjtBMTly7rdcne3pBu3yXNcyxbc+/eHrZweHF6QbNlmFyPqStNki5ZpwuUUlR1TNMoXEtiSedm0ZswlFnF9GrNerEiCDz2D/u0Io8o8pgvUuLYxXUdlguH2STj8vySxTyl2x1Q6wJVj3Bciyj08AOF6zgIo8E0OLYkSwsuzxbMpznPnl5xdXXN3u4ei+kUVRu6wz5aGcqyxBhDVpc0qsK1LDzHpVYlUlrYloNn+0hp4Tgejm0jEDSqIk0Vjm2RpRlVUaEjn6bSLOcZUkji0KNWhtyWKCF13ihjCykwzW7je68DX/u0Y3Vj44dhkwBsbGxsfAb9L/+dv/Le3/ob/+dfubXXeWWw143bu12axmddGJCSL93eZ7CzRX+0i217pHmGLQWOK7FsD8uysaVA1RW7t7bAbnj+4pyXbh+SFjV1nWOMJvB8VFOjlUbrGs8NsKyb22ffsfC6bcIo4Hg2x0iLJCvxPQdV5EzHGStnTXfkce/lXZIkZ/9wh8rA9v4OUdgmjmLG02sevvoWQkrOT0/xI4vh9j5YPk7UInQjLMclWy/xfI80zUiTFdk6pdsNuL6ekVc1B7e3sD2Hbjfi6mLJfFXRG7ZI0pI0TdnZ32HY7zO+moJlE0UBy/mCZJ1wfHKM59g0dcX4ao60bOKojePmNI0hiCK2d7c5PzvHsWwC16FRDdKG+w/u3GxTLkscR7B31ObWvQ5v1Lc5fX7N2emEvCx558fu4riSVtvD8ywwNot5yXSSMxjEzIuC85MJg2FI4Nn0bm+zmt80Eh+fnZAsMoIwwPUMq/WcrEhwbQ+la+q6JA4G7O8egbQII5c0SfngvQV7+0N8X3F+uqTbjUmSnFY7RpuG0XaLh6/sc/x8ikGxWleMZ1PKPKfKFVHsE0YOvu9zfvGETqfNwa0dzNMpWztddne61GXF4WEPx7a5OJ/guR6Wp2mMZnw5ocxSAsdF2hZGNUhAa4Vt2zi2g225eK6P7wdobbCERCuF41qEYUgQBHiOg2oUTX3zZsBzHbRW+J5AZeD6HlI20nZRpVaJdJ2pdKg/7Tjd2Phh2SQAGxsbG59NplH88nqR/qv9UWdra3vI2eWCkxfXjK9TknXO2+0Wfl4SeDa+H1CXBQYJCKQUWEgaYxjujfi3/sK/yf/lL/97ZFVBrUpq1VBUOY5tU9clGINGo4zCFjau42BZFqHrEeoAW0gukpSqcgiDENuyaOoKpKDTGXB+viBrO+zu7yAti053RKvTI0/XdHsDwnYP24lx/QH3Hgh8P8J2XLI8ochz3CDEdhxcx2ExucZzHWzbYbFMOLizR3+7y96tHhfnK548vqYqavYOBqxVQhx7tOIDlvOUq6s5WZpi24IsWTCbzCirCt/30aomSxMaVbO1NWA43KGqStI04cGD+2RphlEGKSWeFzC7vCRJFqyTOY7j8vprr9NpxWTrkt4wIAxdXnvrgP39LSbjGXmW43ktnn40IU0L2l2HwShmvV5zfZWwWqUYY0A6lHVFXa1RlSLNMh48eIn1qmS+TJgvl4RhzM7gFq1Wl+X65sC+Ws348KPvsbW1S5l3sKQkDAMapVkulyTrGmNsev02xgiKXPH133jCe98/Y3evT5EroqiFbSW8/uodyrohzyrCyMdxLB4+vIcXWBgEq1XDb/7j7yPRDAY9gjjAsSFwbcqypr/VQRsYDAekqznduE9ZZmBLDAopBbZlIYXAd31c28WW1s1iamOwpIe0JEYLyqLGtnP8IkCuJH7gYNuaMHJoNLiejaxtIieya1uGhapayvEWjSUbPpma9SnH6sbGP3ObBGBjY2PjMypN6ueRaa5UVlRuO3c7vQG3b0tMM6MdtQlcD0FJUWkcfGwnwJI+UljUdYHWBlt61Gjuv/mQ3Xsvkc0nWFqhdE1RlzTGoI0AI6iVptEFomzwtIXvSxpt8B2HURwR+AGu7ZKohq29Ed1eRLfX5sWzK15+9ZA4alEWKX5rwGB4SK1AyYYwiAnjPrbtE0Y9tNZYlo1uDE5T01QFRlooLXFcH2FJ1mnDv/rzf4Lx5ArLgTCQqFqjgfF4hS0sZpMFi1lGUdSs5hPanRZB7FIXDnVdMptN8TyP3mBAURTMpmNc1yP22sRxh3WacHFxRuiHPP3oCXGrxWhryOnpGU2jabXaxHFIGPrkeck3v/lNbCnpdkcc3t4hil1cR9Dt9rh9d0BVFnz0/hqtDctFxvOPC9qdEM+zSNZLdnZjdg+61A1oHVCXhvl8Td3YJOuSj5885vziCoXCGEE7jnFdG8fxcF0PTENZ5kxnV5RhhQD0uGG5HOIFIVEc43kOYWyTpzVx5PDK64dMpwvOTid0OjEvvbRLsi5odWJ8XSOtAte1cGwbKWyuLid0BwH3Hgy5dTTg2ZNz5tMllxc3i8S0NmAE86mk1YlxHIvBaAfLdplMzqjKkqrO0EYhsJDCxpIWBg0otL7pBRBCfLKETdA0DXme42cZtm1TpA2eLYlDn7jlIaOG0pNML2YmTctUW6yEw7Vru+NPOUQ3Nn5oNgnAxsbGxmeUZayW9MVlHLvrJNeDx989IW9sWsMWZ1cTomdPefDqA/zAQ+DgOD6eG6J1Q92UN3sAHBckLFYJWVHjCBsjDHldUTaKutEoR9OoiqJRXK7mXCZrjOXSD9rc3z6kH0UEjocvbXqBTZ7UXF2O2T/aJi0qhjsdkIpuP6Y/GNLbvod0I1qOT7s7RCtQWlNVOUJIbMtBGIElwfPbuH6Ma9uk6ZJ1NscJIn7sp36Eoqi4df+AIs+5vDglrQuOXtrBciTX50vyTNHpxcgkoyptNBV5mqJMw3K5wPM9hoMBSkNZVnQ6HWzbwrJcpHRxHfjCF9/BtVyquqHX67Jer/B9H9UYzs+e0e60sR0JaLI0QUqbTlegtKQ/7FEVJaenE9K0jVINy3nBZDrG9z1c1ycvUnZ2YoSA68s13W6EtODyYkWVw/PjSwbDLrdu77OYl8StFqs0pVGaNEu4uL7A9z3iuMN8domUNloJHMchCmLWyQoE9Pt9wjDEYEiXFZ4niVoulmPR7UdkSc7jj65QjaFsSq4+mrB/MGLYj8nyDNcVuJ4Aq8vp2TVXl3OGoy5pkjGdrjCmod1pY1keVV6Q5wWpgDCOGGwNuXf/LtdXB0yup6TrBVrXIAWWlFjCQukaKUA1NxN/HEdiWRbmkz4Uy7ZRjUagbxrRhaAqKqChaRSO59Le6tpx5G9nefGwUCZrGqOA559ulG5s/HBsEoCNjY2NzyjH9aTri7blChxlcC3JRx9NqFD8yE+8xmhnQKMaGgNR0CIOIhplqKqMpspBSFzpoFROkqX0+n1WZxnacPPv1M2NulKKRmnG6xVP5xNyrYGcWbak1g2vHdy9qd1GEPoulilYr2quLmZoGnZ3ejx59ILPf/4u0+Wa3p5NrWqElEhjgbmp0/j9m1+MQaubQ55lCYyxMUbi+zHCljRNSVWk+IGFMQ1aNdRlyXQyZ7lYAQpbWuRlycXVkvlkSZGWBJGH6zlIWf9Bbfl6vaYsa6I4ptOJ0caQrDP2D/v0R33KvOTs5IxWu8352RmLxRwpbMqyxPNcet0eSMl4fkWr1aHbHWCAyWTOerWm2+ngug6Cm2k24/GEJK1YrdKbZyZtJhPY29umKjTJOufw1pBGwcnZNUpJ0lxzfDoljALa3YbxbEqSrnFdlyDwmS2uCfwAjUCrGqMV63RFVmRIJFVVYYwiSdZs73SIWx6qNtSVIc8bTk8n7B0OGWz3OX52Tbcf4rkBeVIQBQ6e42LZNklScHh7yP2HferSMBkv6bQiirzk9771PYwR9HpbaK3p9XtIYTG+njAYDlBK3SwmazSdTh/LEqRpRl7kCAcsy0EKQVnlKKUIpCQIAlzXxbJsLNsCS1CrhqqqqGsXrSVa3/zMCECrRqRpXguNLQwWkumnGZ8bGz9MmwRgY2Nj4zNKyywtUqdysUBYeF5DO4ZnL5b87m+/R9x2ud/q43khtuWhNVRVTlkVuK6PtF2kkCTrNUWZMtoaUc9XpMWcRhnq5mYMqNGGsm6YZwm51n9wSEfA8fwCheHuziGelKhCMJleME6WTOfXREGLrydrDo/ucHax5Md/6lUcr4PleBgDYGF9Mv6xUQ0Ig2VJjDForcGAlAKlGooyJc8TXC+gqms820XiMJ9dkyzXnL245re/9gGeC8NBhzC0aJqGNJvhOj5C3JSZVGWB1jWTyRrVKKJWm7Ismc+nVFWNkIbnL57w4ljgOREAk8kVrhuws7NLlq1odEar1WW+WJDlOZ7nEHghTVMynlzT743wvJD1IkF/UppjWTZBEOK5LmHYJsvWhFEENJSFQinFhx9MeO/dF7z8+h737h/y/e+dUOUFcTvCEh5+WDCeTBhPTul1R9R1g2oayrKi2xnhOg7L5ZysmrE9OqApK7RqGI66bG0PmM9WFHlB4IVk0xyNwPE8JtMlO3tdZlOP8eWSg/0t/MDQ1ALQqKJkucioH18QRy6vvrHPvQdDFrOC0XaHn/mjX+S9H5zwg+98jOe7lGXF9fU1fuBTFAXG3Ow3ENKwXM0RCKKohTIaS0oc26ZpaoRVglLkeU7TNPi+j+f5RHGEZdtIy8bzXPzAuSkb0ha+ZyO1IWuUrutmbQSPGsv5WlybzQSgjX9hbRKAjY2Njc+oUg51Xq8qKy98advcrLVV+IEDUqMtjR2EeF4Hy3KolcJws6nWtm3quqaoErSpiFserXaIsG34pFFYA3wy9lN98nHD3HRVftJaeTa/YpatsQRUqqaqGwyaRbrAkwH9zoh792/T7sYo41A3CqUrLMu6mQwDmE8O/krfHIQBjNE3M+9tC4FNGLQJwxZNU+E6NsZoxtdnjK9n9Acdbt9TnByP2d3tc+fuiPU65fTFmO2tIfn/j70/j7Y1vQt63+/7vH0z+9V3u6+9q+9SlY6QxECACKEX5eARUQFBPXoFm6uAUWw4cFBQRFA4wlGvKEGFBAhpqKRIW0n1u6p27X6vdvbd2zfPc/+YdTz3nzvO+cdUIPMzxhp7jDV2M9da8zf283ufX5OkXLt6k7bTAGAeTpBK0Kh3MIzFtBnPc+h2e4zGJ8RxSFEUVBWsrW2ws3WKoBaQZRmt9iobW9vkaYoajKjVm4ThjEazie85NJsNbt25Q69/gtA1NDQc2yMIahRlgSZ00iwhK2KuPf8q589dQmg57U6DJC0xdMH1K12ODsasr69h2jquY+O1XdqdGmH0AE99tmQ+H2GaNrbp4bk1TMNGViW6YZDEEZPxgO2tHVY76/SOJmxstNneXaV7NCFOcmZRQhhmmKZBnlckUUaR54RRyHBssao3GA3HrKzWcVyLoFbR7gTkWcnhwYRKZjRbAcl+xvUbR0RRSXutRZ7GFJlibW0V3w8wTINGq8ZsOmFmmSSJoihzpPQI/Doai83KVVUhNB1FTqUUQpZkry09q6qKPC9wXRuhSYRQmObiPS9fm/Zfb3rCt4zmeBS641IlX6w4XFp6PSwTgKWlpaUvUw8/eOkb19Zb70xmE2c26pGpQ2r1jDQ2kUJHx2altYvv+iDVokRk8didsqqQZUFVZUhVYdsCy9YpZUEpK6RS//3QrxQIIXAMC4NFiZD6/xmsolDEWQSApgmE0BCYGAh8u8ne1hnOX9ihs7pCvdVECEGaJDiWi65VSF1QFeVrNwtQVSWGYSwWQQFSKsqyJC8SpCwxTAtNM0mzIY1GmwsX76bM59RbDTxfZ9gPORlOGA9iDN3FsjVWtzpkZcz167exrRr11gaWtVg0devGdUzTpNPpAIosW5SZ5EWOYweE4ZSrN17BMHTKIsO2fZrtFbY2dvGCOmE8pdc/ZjYbYVkm8/mEslQoJWg2myRJxHH/kOo4Jy8K/KDG1sYWnuuzsbHHLIzJ84w7+0cYusHpM5ucOXeJly/fZjwZ0267zCc2N68P0ITk4qUzBH6NgzvHTMZTojQhSRM83yPPMuIUbNtCAVEcYZkjYsPiM58KuXjvadbXO6Rphe7odFZ0Br3J4vubldT9OtqmjmmbzKMcx7WxHYFSOVmWIwTs7jUQQsdxFv/Gw4/usXNqlRvXhzzx0c/TPRlTrwVIKZnNZiRxTFmuU6sHnDt7Ftu5i7Jc3Hr0B6PFzP+8QNMWP2/HcRcLwZT6v5aDsdhE7XoWpmOgvfb+U5UkL0pkXlAkeVVk+TRXqmfq5s3/5Z/9bPZFC8alpS+yZQKwtLS09GXq5StXPlzmm9+4s7Ozs3PqPBcfhls3D/nUk89RypKz5y8upsPAok5aSDQWh+w8i0jjCUk6pqxShBDMwgnD0RATtai7FgZ5UeI5JjqChusTpAlRmSOVQqChazqGMNE0jUpWi0ZjMyBwfRzT46GHH+TNX/kg9z92no2tXQzdRwiDes1BFzpCF2hCp6oqKrmo51dKkeUphm6i6wZCCAzTQEoL3XKRqiLPM/K8QIgSNwgYD0Ki+RTD1Dg6HDCZJQSex/hkSrc/pLPZ4NHHL7FzZofAtxj2Z4zHIfGs5NaNG0hZopQkSWICv0a9ViOKQgzTIkkSbMdF101WOpukSUo0n3Irz5ESwnDKdDplroFlWUgpsS2b6WxMloVITeB5dQxdZzIdEccx165dxbEdLl56gCgMGQznrLRX6feOmM+npKnEskxuXLvDJ4/26ays0F5Zp9Xq4Dghiopev09VlpimQZorxpMxlmUhhEGeFCTlkKyMaTYb1GpNjk9OqF5QRGcqNKFh2zaz6RzPdVBIoijFtgxOn9uk0fQ52h+RZSlCF8hKo15v0jsJqQcelp2jazXSOMMyC1baHkoKbp85TZ6axPEEXRP4fkBl28hKISuJ7ZnYnoErLLLebHG7VJaURbEoAdJ47WMxDcgwTUzLwvYsvMDGDSxqDQfPNZBlAZlEFhV5miqUzKWsYk1hmzqP/sQP//Crf/Mnf3L+Oofp0tL/EMsEYGlpaenLlK2nZ/LZYe3k+iHH1y38ziarG6f5zj/3rei6i+X4KE1QSYmqFEWVIaVEypKyjMizOWWeoxuCoO6xsdlZbGo1DAzDRNMATUepRTmOZZo0nToqDcmqElSFpTu062usdLaJipQoDjl/9gz33HeG1bUNLt5/iov3nyOor2A7DXTTIksTpF6glIZpWliWixA6QmhkVUWep5imgVzcWSClwtR0HNtb3FxUFYZuEPgt5uEYyaKO/M7NY+bzmHrDR0rFjSuHjHoziiKh02ly82oXN3DoHo54+nPPcnBwCGhURUXQ6tDv9/A8n/G4j1QaSZoSTwcoqfBrDWzbpts/ptlYIZpHzI4PsWyLosiwDJs0TxmPRnj2om/AcV0qWWJZDq1mhyzLWeu4oEmGwxOiZM7Lrz7P+fP30OseUWQJnfYaCsHVa68uSl9KSa0W4Hk1srxAISkySSU1VtZW6Z706J50KVVGnmUUMsN361img7AAzeTqjSskWwXNWhvdMOn1RmxvrZKli59BnhWE85RKKnq9GbMwYnO7QSOoMxlnFDJA0xR5lmDbFkdHY1BwsD9nc6eGLBVCzxe3CobGZNJnNhuw2l5HGAZRHC2mD0UhfuDTai8mIo1GM5QE13UpdEGShCgF5ms7JtRrPSAS9drnNTQkGmCaOgWgqQpZlRi6rlVlVek600JyKOF6T0r5/z96lpb+cFsmAEtLS0tfhn7iJ364ZqDeZunVKdc29bIsGXdvsr9/g8baeS7e+xZ0UyBfO0DnRUqRR0iZU5UpeZ4gdAPfqwEVpSZptDxyWaDKElku/pwQBkoY5DIjyReTgQxhYesercYqrlunXvO5dM/daLZJpXIefeQuWus+d917kaC5hjBspAJhmOiawNAN4jjEsT2SOKaSFbZtA4snvlJWKCURmkBKhVISTbFo4hUaaAohTISuYVkWUT5nPJ2QJDlVDq5js3nfOs12QPewh6Z0NM2g2bRxXZtPf+EW80mCzCVZmYOmmM5m1OtN7r3vIg88+m5mYcwzn3uZGzcOOel2CaMZaZYiK8nNW1d47VhKGidUVY5j+limhWmYhFnELJ1j6w71Wp0sSymKHNM0mU4X24xdr0FQb6GAcB7j1wKOD+6Q5TnN5gppkjCdjVhf3SDwmyRJQme1Tr3uo4mCIKixuWMjhLl4gl4tbgLiJCLLM8oyJ8sLWs0GhimwHAuhL361HJNeb8KZc3vo0YxWs0alZYzHIa1OkzSNuXPrhE4rpdVuILSK1U6AaZjcutkjDENc16dedxj2QyajlLX1OqZtsne6jVIP0O9OyVNFmqeUVcp4FNGoddB1g+l4hus6NOo10jTDMHTMeoDruAx6x2RpjFIK07RwHRfXchYftoNAUOQVlVSYroXSCwzdoZCRSmbpKJHVS7nl/PqP/vTPPvP6RujS0v9YywRgaWlp6cvMv/yX/7i13ax/f6tpvqvmGx2BotIqhDTw7YDO+i6WGyBVRVGmlFVFWeRUVYlUBWWRoaoKQxdUZU6ehkzHPepNF8t3OL59G9+2yKUiTAtAEGY5kyQiygsCv81Kc527777Exu4ma9t1Tl/YoLnSwnYsOittispgdXOHSilsK0ApjUoqKqkwTIsgaIJaNCQXZU6eCwzDQNM0TMOkKArKYtHQapomsqzIihSoQFOoSlJVFZqmowuLeqPGmfPbpGFCfzAgjmN2T69z5twapq6RZ5LROOTVl46ppMLxPNbtXYKgTiVLXNdiNB6RJRlVpRF4Pg8+cC9VZpOEJXeOryB0jU5zHde2UVIhBZRpsVh+JcBzg8UBN5ygGRqWsCnLnDAOmYYTdE1fzLcXFoEXEIcpEsV0MkDoGo63mNYURRFlldKoNxYlPjWL/kmXJE2pN2zuuecccZpTKZ3VtTbHJ4fMh3OmRUxR5liOA5pGpSrSIkYrdaxwwsW7LtHuNKi3fQbdCdLMWd9pc+PaESvrHe669xTdox6WtUXveISsDPrdKY5nM+xFWIZBISUnxxGtNjiuiZYZqCqnKIbUWy6ObbK62kGWNjdv3mI+m3Lx4l2E04jZNCbLElzXwvMdHMdmNpsjK0mepaRpRFWVmKa56P/QFsvnbKUwdB1DN/A8B9+z0XUQSpEmBVmYYFYqFqZ+W2j6saVby/GfS3/kLROApaWlpS8z3W469XVxoCqCMnFMw9DBMKm1Vuhs302tcwpN6ChZgQRUiRCLJ/qaVKBKDK2izDOicEoUzpCyYu/UOhfvvsjVV64jNUVeSeZFQS4roiRinqY4Tps3vekrOH16mze85SK7F7YxHYOyrGi0V/G8Jlklado+luWT5uFrTb0ujm0t5v9rYBiLvoI0S3BsD6kUWZYuEgDdwDAMlKah6wKUoshzknSOboBCW1wEaAIl5WsfitksZD6b0253ODrqcuPaHbpHY3ZPbZHnBYPeiFvXDxmPB7iuzfrqBrVagygKKfIM27IYjyKe/exVgkaNspBcuLiLGxgknwqZz8fIqsQ0LTRNEGcxVVGhYVCWJYZhYjseRVlSlTmyLNCFSaveQaGQUsPQNfIsZT6foGSF63lMZlPQFGsrm/hBjSSOCeczpOej+4sn+A88cC+abvDKK9dJo5g3vu1BLKekSAvO7F3gSD+ikClllaEhKMqCPEsIwxkVBePJiFu3rtFsPYzjaTz2lnNcv3HMcDTF1D32bw1QVcmZM1vM5ylFXidLKiwEUkpKqbOyUiOaJ9z34CmiMCFLS1zPQdMrmm0bx7XpnoTcuDpjMplimQ7ttstwEIEmMW2TZtPHsmxmYUycpPi+j6HrhLMpeZ5RVRIhNAzDxnEcDENgOTa2a2EYi54RKUEqDV0H0xSkWSZn0XyQV3K/sv3rud86er1jdGnpf7RlArC0tLT0ZeZ973uf/LV/+1Nle7Whu4YuijQnzXPCMCKoFF6lIVWOlCUgQAmkAjSBQiKrjDyeEocTppMJcZrQ7rSYhXNOn92j0VlhPutRKklWSqRSTJIIx2tz//2P8PZ3PcTO6TY7F3bRdBvLCcjSjDTJcf3FnHZd6JR5jmk4CGFi6IKizFFIylKii4pCghAs+hJeK/lQSi4O9koBijzPFw3MOpimgaKESgIalUoJoylVmZJmEZZrsOI00CrF6koDJWF1pcnJ8ZTLLxyAynnwoTN0Vh7A812qAm7fPCEMC6bTGfNZTLstmM102httmhs2q6sBdz+0xfpmi0987HkOj29imBWtZpO8KnAsH9M2SZOQ/qiH5wdoSqLrOpZpogsD2/GwHQ8lNZqNBmgVk+kYhSCvUupCJ5xNSOMQJNTrbXaCs4TzKWhweHjA4ZHi0r338JXvfAs3rh1w69aI1fUWaZohhEat7tHtTRmMu8iqwnN9qqogTSOKquD0qbPEScRsFqIdCrpHU8Bi3E0YD2fEUUQ4mnN0a8jO6TaWZfPqlZt02m10zUB3Svr9CUIJsjhj79Qqs1lIGM5ZWa2jGzp+3WbHsojmYJiKwWBEHMW4rsuZc9tYjk73eEKWVbQ7ddIkYjKZg1TouoYfNJCloqqqRYKo65imiWmaaDpouoaUUOQVSSjJdIWNjut5mlbJlot4MKUci7B78mM/9mNPvO9971v2ACz9kbVMAJaWlpa+jPzyz/+zC0HNfcy2jUeF4dp+p6HpukFWKCrdxXZqIDSEZqCbJiiFpilQGlWVoSoJVUmWJKhKUavXEZaOUhLP8zl3fouzZy7w1BdOqLSCtEpJipJcSb76bW/jq975ZvbON/BbPllRYuk2umlRdwKUlETRFC9oUCkTTegYmkGeZWiWjm4alFWBEBpFkYGSJGmE69bQdQslJaZuohsGUqlFg7Kuk+eLKUUIgVYJoCLNF7X3tmVRagpdN3Fsm6osSNOSF188ZDia43s24Szlvvv3WN+oM50kHB+NiMIDygJms4g0zuh02gSBT/fkiKJogFrn/MUd1tfrOLbDxnqHc+d2efGlV3n688/RO56hKsWlS/ehoXF0fIe8eC3pEhqB38SxPYoswTRMNta2QROYpmDn9A5FUZEmGftHh+gaTEYjDu5cBS3BLlLu2ruL2ayO63ucPrvDdDxjPsuYTRIu3X2BPMuZDWM812U+jzFtj2ZzHdOyOTi4znjcxXF8fK9BnMSE8xDXrjGZztAEmIaL0HOUBqZt4Soo8oyDwxOSrKDdbtOod+j3ZliWzvbeCuvrNSbjjNFwyu3bXXZ2OmiaSzjL8XyLwTBi1AsxDUGnGVCvOYzGY/JckhcVtmtiWjpVWSGUYn2tDQLytGQ2jZjPFJquY2nite3OCqUUmi6olAUKLF3D1BWWITB1HZmW5ElOVZWl1FQm0VwhxJtq6XwMLPsAlv7IWiYAS0tLS18GfvVXf3XNdbWHfNt6oN2qvaHmOfc7jrGrhE5RVFi2hXDc1w7/2uIJvCqpqgK01/4SJRcH5CxF0zUMy0IIQVFJBv0hw2HCzesj8qLE0E3yMiMrYsqq4oEH3sCb3vIwe+fb2J7GysoGEkGhJHmWUWlg2waqKsnTCGF7oHSKssRzfRQSXdPRdKhkSVIsmj91TVBkCZWxqPdXlcQCDMNCmYsRoBqCOJlRyfy/jwk1TBvH8dE0SUqIkgZJnHLjaperr/TorLicO7dGninOntlEE4oXn7uOrATrGw32zq7iOCYAveOIk6Mxjm1x6dJd3Lxxh353ykc++Hm2tzZxbItollGvO9x/3wVOnz7LJ598msvPvcCg16fTXqdVWwdRUVaSKJziB3UajTZ5klEUKbop6KysYJg603nI6VMbVLIgVwXNWgNdKDrtFlkm6aw0abbqbO5skRUlldI4c2GXLE0X03rKinA2Z//WESvrKzSaTbKkoNGokWcreG7A7dtXkVJiGjqaVpFlKc3GCnme4fkW49EMXTdxXRfX89EQhLMZVdUgTTKiOKZeq2MYBifHXXrHU1BQa9hsbLUwDQPEYmeDrmtEcYXtCRzXJcpyDAMc18Yw2iT5YjGcEIq9vRXSqGT/9jF5UWNlrc7Kmo3nGcRxBijKPCHPKoRhYJg2pmXjOC6OZeDZOq5nIMsKWZZYhqA0RFrmcqSqYiw1O8or7fNlo3b19YrVpaUvhmUCsLS0tPRlYGVlZdrtdj+nl3J/EEe+qnlvqWqeafs+luchLBfhBNhuDU0IyqqgrHIqWS3GflYZVRaRhFOKPEMAYRwRRTFRnBGHKSeHU+7cGSGEQa1WJxnPAZu9U2e5+76HWd1u0FwJyIuM0WjEytoW4XSIKiscO6DAxLYswnDKfDqiXm/hBA3m4QTLttCExDZ9lIRa0CRJ5uRZgiYUJjZFaeA6HvNwgm1Zi6+jLHG9ANetUZY2cTxmNhvTbHYQurnoL7Bc9s5cYGW1hWO7eA5sbrVBCZ5/5g6T0ZQslzz48EXOnltnOp1z48YRs2lBUZRITWG5FllWMRp1KaqK6fGQ6WxCb3/IW9/2RnRTYzLPwI6otwK+8Tu+gje+5R4++qHPcnTQo6wy8iJGR6ILE1kpBIIgqNNsb9Hs+Oye2sA0NZJUAhXbu2ucOrfGbBzSate474EdXnnpiMk4oxbUqTUMvJrDtSvH3J4f02o3kErHtAzWtlpMpmOuvPIqtVqNzkoH13cIZ0OqvKDVXKGsUuI4xjRd0jTl9p1rrKyu4dg+juvQP5nh+QWWZWBZOv3+Ca1Wm+29TcJZSFmmdFYa+DWTsiiZTCJMy8TxTMqqxHF9PE8HFONpTniSInTQdIGJwrZ1DNtBJBmgk8cJlBWu63P63DqOb1JvuLiOTla3mUclg2PBrMzQLQvLtvE8h8CzsW0T09CRVUWVaVCV5EVBmuVlNJ0NKlkeaJaxj9BetUTw6R953/vC1zVgl5b+B1smAEtLS0tfBt7znvfk73//+x/oNIOvkkX6NmkYRmXYSMsiUwWyjPFFDV23qaqSIk+pqgo0kDKlKlOKPCTPQoRQ5GlBmqZUlcQ2TEqjYnOjTb8XcWh4OFaAQGNjfYfz5+7F93zSOOXocECj5YHKmE56mKaJZVnE8RTTtEhTgaUbZFnKeNgnkArTskmSjDRJqNcUrlsjzyp0w8bzm6TpjKoqcVxvcWMhU4b9EyzbxXY8skxgWQGO7S2WRCkN07AxDQvbdKlkQRwN6XZ7TGdTzl04hWGavHJ5n5de3EeqkjJTHNzsc/m5GygUqxsNHMciSwXRLML3TUpTMZtKnMDBsE3Gkyk3blxndXOV9c0Nam2X1a06mqZotBx2T13grvt2eeXFmxwd9HnhueucHBziBz6mbWO5Jo16g2azzub2CqZhoCqFKnNsx8ZxLBptB90QpGHJdBhjmSaTcRfbEIzHBRI4d2GDOMpeG+dpousa7ZU6X7HxBoRmMugNyPMc07IIgjpRkjAfjBiPR9i2i9AMOu1V6rUGVQHD/pjVtQ2EroijkKq0KQuTqoCb129ycniC5zvs7m4zmU7Y3l0hmhcYpoZpg64vJjnduH7CyppHo1mjKApsS1Cr2xyfTJmPC2xHY+d0B7+mU+WQmDpxnKNURaPpYXsmnudQZAXzSY7AQOgC07DRtBJN09EEGLaGoYNjm8iqIg4zVBJBmVdllsyrqpiUmtbVNPMwNpzfft9PLJd/Lf3Rt0wAlpaWlr4MKKX44Af/q2lalq/p0jE0FSTzuTg5OESKlPbWNvXGJmWZkOYxVbnY9iqlpCozsjSkiOcUeUYSh4RhiONaVFIxmUVMRhGyErRbNc5d2KHb3SfwOjRaLTTAMhQy17j2ygnbu23O3bWNQGM6nbHlNzFMk7LMObpzSJHlrG9v01ldQ8kSyw6oygpZKYb9Q1bXt7GtAL3S0RwP3VyUkwh0iipb3FyUGXGegiwpigy9rqPbHo7jY2gmmljsOFCyJC8SSlnR6axRZjk3rt/m1o0ugWfyznc/gOdZTCcR1185oigLNndWuXTPOtNZxHw2wLEtVKVhCA0pNXr9Hv3+CUWWYzs+SZITBA5RmGIYgp3dDpa1KFlpdzze/q7HODka8rZ3vIFrV/Y5PpqS5RXbuy08zyFNE4SQDIdjDF3QXmmwf2vE7ZtDgoaNEJJGw6eoMlodj7e87T56xz3OXDjFrVvHZJkkqAfEccjKap2TkwkvvdjDNC0Mw2Q0GnL11it4bkCj0SJJEhr1JlmSk+YRui4YT2Lq9QYbqyvEScZwOMCyLObzOSKKWF1dQYjFBKY4CplOJwyHQzQBk8kmzXqb46MunbU6GxurhPMI0xJ0uyFC2LiuRVnkxEmKaVtUpKS5II5z6jUDzVQYlqRuuXS7Mwpp4xVw/dU+GjoCwXAwZTadIYTCdR1s28H1bBzXRNcBVaE0haYW64KzopA6lKZpommaWaJNU5Lx6x2rS0tfDMsEYGlpaenLgKZp/Nf/9J+Gw2L45M0XXnKnh4cPICVuo8befac521nH8pqgBLZVQxmSvIyoygxZ5iArhIAsy5hO5/iBj2ObKKlhWDm1to/vOhi2S7cb4jguthVQFIuJPKbQmQxDkrigzErm04iVNQdDaCTJHA0T32+wvaMzHY/RgDCcY7ouo16Gbiy2/iZJyMF+yNbWeXyviVQS3wuoKoVhGCSpICNEFzaVyjk52qfZWcVxPXTTxDIdLNtBCI28KFBCENTaCGGCLDk5HnFwq8vWWhvDgHAecfPGEbW6w+7pDSqpsB3FeBLSbPk8+vg5rl894uhgzHya02q1sF2XeqPGtSuvcnJyiERhOTZvefsD9HszJuOUBx/eQ1Zw42qPRsujKAviRGIHDm//6l163RGObVOre5RlxWg4ZWWlBkojnIecOtcmzyom44TpMCeeTbnngR38usGwN8dxfSaTmLvv3aXfjRgNQwxDpypL7r5/l6tXDnnuqVvEcUi91SFKQwb9LroQFEVFEs8JajXKyaIHRGglN2/dwDBszp45h5SS+TxidWWFMIpJk4yyLInTiEazDojXyocM5pOIcBJTFjlH+zGmbrKzt0rFYgFZnkNZZOzutTFsmI4z+idjwnlGHPmgSTzHwrR1Wis1HN8gjnOChk69uUaeVcSRJElK0iRBoRC6iWEYeJ6NY+m41qJfRNMUQpMoXWCaelZlZTcrq+vKsg5LZXzmJ37iF6avc6guLX1RLBOApaWlpT/ivvd7v9d8FDi88lw8jfIzWl4+ELR8f+vUNjuXzrF19hJ+a40KSVHEZFm6GK0oAFmiqhIlJXmeUVQSv17HsW3SOCKJEzQkhqHTG8S89HKXeZih64tGYiEWJSeObVHmJZ22TzSP+MKnRpy9sIlft/FcH2FbSE3g1GsYtoUQOpouUEoyGfbRNI1mZw2lFFWVce3q8+ztngchcLwaGouGYcMw8bwmpulSFjGO38CwbNIsBy1CsJhYVBY5mlpMjRG6gWlYeG6dRx59EM+2ePLDT3P1lTuEcYxl65w+t82li6dQVUVRasSpIjmcY5uwvdvm9Nl17twecLQ/YxbFKGHSaLUpi4IkmnHl5VcZTUa87R2Poqmc575wk7XNGpZjsH97SJrlnL+wi24qJBlUcPmZAyajkLPn16g3HUbhFIlOECye+tfqNo63aGqdjRM+/rFnuO/Bs2zvNcnynDSRzGcZnRULwwy4fWOGEJCkXQKnxqV7zlCVOa++cpML5+9hPp9z0j2kkjmj0QDP8wmCJkVRYZoGrutRFAWj8YhOu42UJVmWIWXFfJ7jOC6u63J8fMT62hZCMyjygtWVDQzdoChSgprPxXtOk+UhgevTblnMpjP8uss8jLFLnVbL5d57T3P1lQMObo9oNBzW12tYtsHx/hDPM+m0fRqrAfWai5JwdDgijTOKLKCqSjQhsRyB0MG2dRxbR5MVlApZFOiqqvSyjEuq48LQr+m6+QUH7+nXO1aXlr5YlgnA0tLS0h9R/+s/+DvnbM1+u2Xr99q22SnC2V6n453f3Nnc3Tp3BmHYJHHB/s1r2Mf7mK6LG9Rw/BpKF5RlTlFk5EUKUmIYJoEfgFDISuLV6pRSYzjuM5ukHB/OEUpDoGHoNo5tE4V9bGcXxzVp1Ux83yKOJXEYU1UKIXSEEFiWhS4WS6MM0ybJMizDxBACw9SZjMeLcZN+QBpFVGlC9+Q6pmvh5x1sp4FEYhkWtuNiux5C76Ckem2IkU5VFuR5ilI54XTE0cFtXM+jubKG59fRhIETNNk6vcrpuzbQdZMkTfCDAMdRlEVBq+3hBBbd/oyj21OKNKFSkr3zKwSBT1mVhPMpRV7QabUYDU4Qps54fMJw1KVMKh58+BIray2O96esb7c4e36d6TTh2qu32Tu1QRymmJZFo+kx6qc89ekrvPGtF5CajqSilCUyL0mTDNNUPPDwHrNpQhhGdA9nlKXkocfO8PRnrzKfRTiOz8pajSSRzCcxruvh13ROujkCjUt3n2c2jWm3OwgdTrpHNJstwihkPp+i6zpJWiBVSbu5wnAwXNTZ6yZJ+lqvCJBnMUVRsLq6ge3YrK6uEkcx09mc9bVVgqCDVCVRlNJqB+zf7mKYBqdOrxEnGVItjiTzWUw4W2xyllWJJjSKosL1LdZWO4SzjJOTkOE0ZWunzupqndaKRxwWRLOU0XCGa1s4jrXYnKxp6LqO0DSqPKUsM7I0isosuVUY2g0M76rS9Ct//af/afK6BevS0hfZMgFYWlpa+iPo7/7lv/zwrSvHPyQL7d7N3VVnZ6+zsrq353q27+RJxrNPvUSaJRiGjuvatFdXuXDPfdTqbZI8QwFKKlSl0HUdhaSQ1WLBllQITRDHEUkcUxSKNClZafoMyoTJeMo8mhHFM9A0er0TdHEeQ9dQVYWsSi7evcbmThOlQb87xA0yOmur6LpBkhXUm21MxyWJQoJGB910AEVVFGRxTO/kGHfu0VppoCOoigLNUCQV1IIOFQLH8dGNxahOpSoMy0ArNbIsJ4xnICo8z8Z1fCzTRTcMDMulXaW84a0au2cHdI+7DE7myLyi1xtzcHhErd7AcRwG3Qlr6wF7pzv4DZssLxFGTiln3Lx1B5krpCyYzyfYto1l21x99TLTyYQzZ85x/yNniMKU2SzF8wS7ex2Oj3rouonvm9z70BauZ3LnluTZp6/w9nc9Tq3tECcJs7HE0BU3rh4zHM5501svsnumyWQgmU1jRqOYrZ0VXn7hCKUMPE+yselhGToH+0M2RZOz59b55BMvUBQVvuchdEFRFETxHE0pbMumLCs0qSFlxWQyIE1SWq1V+r0ee6fPYZs50+mYoiyoNet4NZ9SSgQ6aZpiWAZruxv0T06wnVVsy+Hm9R7jocPm1gqHByOuXjmhvdpidjKkVndptR2cwKTZajOb9YmTnPMXV7FMjTs3ukgpqDcc6g2X6SgkjhJqtYCiUOimoNkMFn0AtomugawUSZSiS4nMYvIsygXlVGoMpNLmuibCtLZ++XUN2KWlL7JlArC0tLT0R8xf/FN/6ht6Byd/pb2x+cb7Hru/trK1gqErrr10h88+8xK1wGTjVAtNCILGKvc+9BCr6yuE0ZjB4ADHq6PpBqoq0FRFnkYURUJVlqA0lJJEYUyRp1iGgSwUZVIRZ5JBd47n+1iOSaUybMul2WwzGkdYmk0QWAhDx7Ak8zCiKMCyTSYHhyRpxNb2LjU/IEsiLMfF9QMqyyKod5BVhVQl/ZMBUkoa9TplpsjiiGgeoguopKJIY7x6C8u2KLMcXehIqTBMgak7BEEL51RAtDJmHk5Is5RKKQzDpJIlZSmopMOFS/dy7/0PkMRTsiwhSTJMQ9A/7jPoTzCMkpdfPCRNSrzAxPZ0zl/Y4/SZU7zy8j7Xr97gxtXr6JogTeLXlmbZzGZjXnnpJWbhjLe+4352zqwRRQkagrvv26PfHZEmJb3+mHOXVtk9U+f5L9T4+O8/xzu++gFs30IXBVmZ0m63uHnzgDAseOChHeo16J5MePozV9neWcHxDNIkI08LdENDCHBMi+7RnI0twYW795jPUqbjCKUk589fIIoixpMhli4wDEVVSUwhsG0b1/WxHRtJRb/fpVlv0VnpMBqO6PX61JtNgqCGrBSOp6ObMBp2WV1vMuwPWF1dw/MMylJy89YRnU4b1zHxawZ+0CSaZ+i6ju0YOEGOaQmqStIbhDQCj5pXQ0oJUlLkJUHdR7JIesJZhCwllq1j2+DoAtsx0DWFVoIqC1RZlEKVwyLPb5WauCF1646mqVvAcuvv0peVZQKwtLS09EfIP/zhv/zORjP40dbqysOjWaU/8fufJQkVjz5ylsAWrHV8ElXhdNZ5/C1vZn1rnSybMx4cMhn2sV2PwK+TFyllnpBEM6qyoChzhCbI0oSyLCmKRTJQFjmWoWHZOvNpimGaVOWMcD5lHk2pZEG322NnZwPTrYG2+I8nTSqEWTCZzvD8xWz/cDpnVhsjDIO8yIhDHcPysG2PeTijFjSwnYC9s5KbKqXWalNJDdu2iOYho2GfsqjwfZ9oPgIFntugqHKkVOhGQF4VaCVYpkOzuUW9vkZVFWR5TJ7HzOd9Lj//Ch/8jU9w++o+6+tN6m2H9fU1Tp/b5fzde+yc3WXn3CnKPOP0XSe8+PwNesdDooM5t29MsS0NTde5+9LdnDl1ii889TS3b18ny2KSLKPuNXAdmE/nPPXpVwjnCZ3VOkmU02jV2Npe4/Cgh1Ims1mKrsPqRpOsKMiyDHQWW3FNgV0ryeUK/W7MU5+5Rq3usr3VplbzuHa1y12XdqjKDNPSCecJaVpwfNAjTBJOul1WV1fJ8xLdFISzmFpQZ3NjD8fx6feOUKrA91wMw6YoCjw/wHV8siwlimZomoZfr9NeXyWoBYxGY6J5SJqlrG2ssr65OPCH8wI/aCClZHNvk6JIkUpD06AooXcUEtRMGk2bNC6YjhK6xxM0KoKgRjjLGA8jHNvEsSzcwEK3dZReLXoiai7RNGPcC7Ftm0bdBllSFQVIhcoztCKTWRaPlKqulpr+IqZ5IExjplvWlR953/vK1zt2l5a+mLT/+9+ytLS0tPSl7jd+5Z/eHTjGd0+n8z9x+85g91OffUl/9eoJQri0Gps89OhZzt3V4cxd57nr3odZ31unLHIGvROuv3Kd29duoFTKI296kPbKOnE6J0sWYz+zLEcXBkITi5KfqkTTBFWWk6cl3aMRg0FG9zhiMplz/cYrXLn2LHmRIjTB2vp5Hnr4TZw9tcq5U3VUJTFtg5XtJoZhEEcFw+GERjugs9ak0WqRpAmm7lCr1xFWgO36CN3ENDx0oZhNutieSyVBVRlCKg5v32J//5C777mE4zskSYLQBIZpEAQ1DMtFmDU8r46Ghi4WzcZlmVOWOZUsiOYDnvzox+gdDFClIs1Kbt88YNSPsGyNBx7ew/ZMstykvRqwvbdJp9OmKFMObg54+cWbdA/7FHmO41oURUYcpRwcHnJwfJv5PKR8bQOtrgs67S1qjSZnzpxBNzSUVvGmt96D59pIBeE8Jk4rHMtkPIiwHR3DMJiMY9zAorHiMhtkvPLiAXun2gz6E+aThFa7QVEUlFVBZ6VJNI8Ruo5SGkID27XIi5LxIGIym7C61qFeazAaTJnN5/T7PebzMbPZkCgKMa1FM7fnNdjbPYfleASeQ384wK+1cV0T27QZ9rpkWUYUxaysruHXatQaPrW6i65JxqMZ01nI7u4GrmPjuBZHR2OiWUZQd9jeaeJ5Ovu3ZswmCWm6mF509/1nqLfNxRSkWUGSJGzt1ems1UDCsDdnNirpHQ1RUtGsu/ieQEeh8hJRFuTRJJaqvImuXi41/ZpmuC+VpvXZ9/30z736esfv0tIX2zIBWFpaWvpDSimlfeJDv/KYLMr/ef/W4TfsXztYu3XtwOqPE607S7UwrLC9Jrtn9njHV72Rd3/Du9g+s02ZlRzcus4rL17m6HCfPElB6dQbLg89djfCNMiLhDxLqcoSzwtQSpFEMVVRICXkRUaR5YyHGSf7U0aDlNEw4vDoNs+9+Fnm8QjbMBez5i0Hy2lz7uzdvOXNlzh9qk2RVazvNAjqDlkmSeKcVrtOVqQ0Wk36JyNODibsnd2gtdZhdeM0lutS5BmapmPbHmhQlgV5NscQgjQKiaMIx3UxTAMlJUUaMTgZ0usO6Ky1OH3+PF5jHd30sQwbTVv0N5RlSlkUoErCSZcXn32eGzeOueuuM6ysNYmikls3Dzg5GZCEOfPxiEG/i0KjVm9w7sI25y/usbG9zvFhj6efukwaF9Q8j8HRiPFkzmQ+Zh7PGY76jIa9176XJbVGh057G9/ziJOYi5dOs76+TpGX7Jxa4aQ3pdmsIcuSk+MJZVGRJwWtjoflWejo+IFNmia0Oy0uP3eDQXeGHzRBVOiGwrMtDg/6uJ676P2wTYSusb21hlSC46MeSZyytrbGYDig3+sSxxFpGjOdD7Eta7E12XDY2Nil0eywsb6KV/M4OuhSZCWVLBkPhxiGQNM0NM1A6Ca7p7bZ3d0gL1LyvFiUkM0zOisNWh2H4ShFUxqB75KkEWvrAdNxxeGdIbWGiyEUszBjZT1gZ7tNFObEUYofmNSaDkHNQ2gao96cJCxJkxxDA9/RMATIIkMmqSrieU8X8gXNET2pm7cy3fuVv/e//dxVTUO93rG8tPTFtkwAlpaWlv6Q+YVf+AWzYeb3+6b6U9PJ5Ouf+fyL5166fMvcWtvGslwG85T97oisynjvt30jf+q7v5W9vQ3GoxnXrl7j8rOXKbIpssqxXYc4zcnSipWVgNW1Jk5g4/kOSmq02ysoJLPZhKqqiOYR8TwCpYjDgts3xiSRZDKccXLc5dkXPktveETg19CkRNMU8yTG99t4QYN77nmIt/+xh9nccHFcC9s20TSBUhqNpk8cp6DryLLi9vUT8rLk3ofuwvU9DN3G0HUs18GrNRHCRFUVVZVjWS5SlUhZInQTWZUgK6L5hGsvv0KnucHB0SF7Z7fZ2NlF0y1A0GpuooSJrusUeUFRRghykvmYD//Ox3npmVfZWK3jNTw293bYO3sKw3QQVcXh4SHXr97k1qv7jHszDMtgZWuFC/duc/7CDrdun/DcZ68x78dUZcE8mpPkGZoQHB8fcnR0m7JavGbdMKnVWqRpTFok3Hv3G1BVRZaFbG5vUBaK02c2uH37hOFgQsNvkucpZ86fRRiCja06hmnQOxmzsdHh6iv79HsTdGGiVMX6WoeyWFS5SEBYgsOjI9KwYHWtg+979Ho9NE0nSzOUVBRlxtXrl5nHU3yvhqnrGMJka+s0nl+n3mqxutFEVZIbrx7QbLVRVU6/32U+m1KUJb5Xww9qrG+ucPc9exwc9MiSgmaziRs4aEIRBA5JVFDkORfv2eLqlQOmkxyJwPddTENDlhIJrG/6OLb+2sZfQVVV5EVJLXCYTxOmoxRdg8A10JVC1zUoC5LptCiT+LrU1SeFYz2jmWavKOwPve+f//PZ6xnLS0uvl2UCsLS0tPSHzH/6hV9ofOGZp37wxksvfodKso0kSVfHSaF5tTWqymJ9b497Hr2Hr/3Gr+LsXWfpnhxzcOsK80GPq1eOCTwXKxCEUUqVS9KipNGpcfrUNu1OHcs2mc1Ctrb3kDInDCcUeUZZlERRQjQLiWcZ40HMsJ8yHsaMhkOuXnuVg6MbaEJhmS4KMHQdKSWSinkYcemex3n4DQ+xteNz/swKZV5SSUmtEdBo1hmPpvh1D9M2MITGaDjDr9UQukH/ZFGWs7rZxg18NrdOLZqTK6jVmkgF6BplWWLqBlJWKFUw7nfxvQae3+aVl55lbaOFFzSIoxlFLtk+dQ+m7VAUOXmRYAgNTUFVxpzsX+fo1jWytGAepyhdYuoBQbDK6voWgR8wHY/odgecnAw4PDgiS3MaTZdH33gJpSSf+J3nicOM0WjAcf+YSlU4js98PmUwOCIOp6R5juPUqNUadAeH2LbH+bMXOTk6JM4iWo0OrVabZqPNwcFtfK9Op9VhdWMVKQSz+YzHHrvE9StHaJqOaViLGf2VQkmFLgSmZTAcjVEKTNugvdLi5OiY0XBMrRZgWjbDfp/5fM7pM2dotzvcuHWVZ194iqLM0BAYukGntcb25mm29s5iWoJG02M6Dun3RxRZhmPaNJsNDg73ydKCVqfDeDRic6vN2XOncRyLgzsnuIHL3tk1ojDDFALXtalkQeDXuHx5nzjN2N1dJ40SVAVRVmBbgtNnO5iL4U44rkESZ9iWQ1WVlGlJOovwHAMdDaoKipx4MpllafqS9Oxf18z6v966L0u+7/t+sXg943hp6fW0TACWlpaW/pD59m//dr1ZFX/j6Na1HyjTcsfz2/QGEW6rwbu+6et59ze/m+3tDQ4PjvnUE5/mzrWbrK0F7JxaZTyboiudJEqptTzQDNqr62zvbeG6FlGccHzY4/y5c4xGJyitwDQ0VCWJo4jxaApKMexFjHoRg17MdJJydHzM8ck+RZEiNAPH87EsmzgMSZI5YTRFN008v4nj1zlz9gKPPHqKTtul0bDxPIc8L8jyitNndyiRBG5Avz/Cr7nousnVl6+TxzntToO0yNnY2qSz0iDLc1ZXt8jygqDeJIxC2u01iqKkLDNMXSMvStrtTeL5jMPDV7Ftk7LKObzT4+XnD3jzVz7C1m4H0/bJ0oQsm6MLQe/4kCKZgarQbYeyVIyGE2zL5/bNQ46PBrTX1tneO82Zs7vUgoDb+z1eefEad27c5u77diCXfOR3n6IqK4oqZxpNMCwDVSlkVZAkMcN+nyieoRsmCo0omtFurVKrNxiPRyRpQr1W49Kle0mTlH53iKkLvJrLhUv3oRAEroFl2Qz6E6J5TBjPkGUBSoCm4wce9aBOnmdM5jOEttiuPBz26A2OabdWaDVWyKsCQ9fptFdJkpwXLn+ek8E+oGjUGiRJhiEM9k6d48Jd91IUFWgVlmVyfOcQlEDTFk/py7LCC0xa7TqD/gQ0wV0Xd9ncbLF/Z0BQ89g53SaapwgElqOTxDllaXByMiWOEgLfBaHwfIs8Xbw209RxXJN6wyAIbKoKsjRDV4oiztCVhioyRJ6RxVGVpsmdQui/rwL/J/7Jz/2bZc3/0pe95RSgpaWlpT9k7tnb64xv37i/5tbqJ7MBdsPla7/rPbzn29/D2s4ar750lf/wS7/O1Zevkc0z2u0mhiboHd/gzIUVSk0SxpLds1ucu3gO09IRQufosMfxcY+HH3uEj/7eJyjiAY+84S400yZJU+azGUpW6LqBLCuqEqSEOEkopaRea6BpTWwnwHY9KlktSoXikEatSRTHjAdHWOEUx3T4dFZw/vw6jz92ivk8QlYS3RBEYYSwDEq7wnJtHN/DsmzWt1bpnwwoqxTfdegdHRDPpkg0dE0jjhPSOMQNArI8QSEWU4lUiek4RFFEs71KUsQk8y7RJGZje51wVPIb/+53ufv+Hc5f2mZje4WyKNFMiyyLuXOti2fZpEWflfUWK50W168fYmom99x9hjArmPRu89nDfTAszpw5xVf+sccR734r49EQzzWZJwWfffI5apZHEPjMwjmGLYjSBMt2CYImo2Gf8XiILCWB28AwHapSYegWll4ShzHPPfcMZ89ewrAMijzmzp0eSVpy/q6LaMphOp2jKtA1gSxKomiO6bgEfoOyrOj2etTrAZsbG1RSMp3NOHv2Ausbm5wcd0nSlHqjznQ64aWXnyVJMsoixxQmUkmKosSxLZCSyajPycEdTp29QJREFFmJ7/scHN0hqDdp11dI44QozKk1TC7efYHROGQwSPGDCsd16femZGnO2mYLw9RJ4oKVNZ9hP8b3LTRNcXI8wA980qREU+C6oORiIpVje1BTWL6JJgTRNEITAtswKYuMaD5Dk1VsGiIsNTG2jKL3esfv0tKXgmUCsLS0tPSHiFJKe99f/L5vzePk7Qir/vZv+Ca+6bu/i7P33M2Vy1f4F//oX/Hh3/pd2o0NVlebmIHC9U2ytGI2TdDUmL0L27z7PW9DN3UUCk0Jnv3Ci6AJ3vmud/Bbv/Ehnv7ki9x118Zi5rpSFGWJbpoITafIClAgdIECNDQCzydwvcXG306LeZTS7fZxHA/HcVCyxLFtpCxxLYd+9xZRNCeLR2ysr2AYkp3dJlJm7N85or3aBqUQmkEWp9iWTWe1iULiuRZCwGSkc7w/YjQICXwb2zUYDY7QxgY7e+dI0gTDNPHrdYq8wHUESR5Tb20gZUlLA6ngK77mjTRW2/zBx/6AW1dvc/6udc7fcwav3mZ94xT9gzGf/sTzKAXNVp1a06Pe8EmSFFUp0jQiSXJMyyGNR3zh9iGfFxZSCVa3Vjh/7wW+7lu+lvseuZdnPvs8x7dOsE2beTxnvVFnNp8yGo1w/TqT6YQ4m6FpgnxaIJGgNKgkhmGQpDEvXXmGdnMFx/LZWN9hPpvQOzpkc3uXsiipXiuB0jRBrdFe/BoEtFebjCdjpuM5RVnSXu1w4dIZVFVgDBWOvYfQBVUhcWyPdqPD0eEdbk9HCF1HIBCattirgMA2bYo8xbYFQb3DdDJDEzXW1DZKGriOy+7uBnGSkqUVSVoQhSFVBdeu7uMFJhfOn+L4aMid/SGNho+uazRKl63dDlHSZTaraDXrTMZzTMNaNKBnJbW6xLJMblzrkWVt2hsermvh12yySFGpCt3U0XUhsywfV7p2qFvmBMOxXu8YXlr6UrBMAJaWlpb+EPmR7/+z56P+5Fvb61ub3/HXvpO3fNW7OLizzz/78Z/i8594muFgBFIjLyOyIqBebzLsz+j1pzz0xvv56q96G65nMhoO2D19isFwwuc/80nuue8Sj7zpUT7wXz/EJz/2Odr1BvWGTZoVlKpAN0wcF5IwIk0zNKEhdNAF6LqOIXSEbtJZbbC502HYny22+kYxWZKgC+j1jjF0A01TFGlKVA6YWYInn3iaTqeOaZ1la7uOXcFsFKJrGkJoZHmO53lUssRxHbzAo6wKdCuh2fFJ04zRaExQ96nXfQ4OukxGIe21BpZj47ouWZJClVNVkmZ7E7++SprO0KuSwfCEBx+7xNpah6c/+Slmown7N28xm13h5pUxskqhFCgUCugdTzg5GGMYGuEsZWunTaMVkOYpXmCQxBVlWRKHCTdeHvH8s89x7uIZ1jZWefAN9/FVX/8OBsMRLz7zKs987jJxJJG5ZDafgALd0InTOaayyYqCQpYITUAGEokGpGmEa7msrWxiWRZpnNI/6WG7Dlmace7cKbZ2VwGFbuqvLduyWU8CuoczBoMZWZJxeLCY9b+xukZmVMzCiHkYUuYlSZxQKcXm5g7H3QOKPAU0KikxdRuFjus1ybMS0xIURYTrBGxuXeDg9hHd4xPm8wln7zqF7VQcHQ5RVUFRFrheG8MwefWV22xudWiu1Tk6HJPGJWUlscwpnufhWBmj6QxZSuZxhG4I4kQRxQa+59BpN5hPCgytQO9oCF1hWRZUFZopcet1JTWSqlAnmmldfd//9q+WNwBLSywTgKWlpaU/TDRdF3/2jV/3rje965u+WVNWwL/9pV/hA7/2m7x6+SVAsbK6jue7FEWGrRv0ugOCTo1v/hNfx6nTu1x+9gpb2y1OnzvL0089w2Aw4q1f+VZ2z6zz8Y8+yRc+9Szt1mK2+v6dEcKG9c0a7XaLrCxRaAjdQBgFlqVTa3ikucJxbdzAob3mYTiSZsdDaoI4jKgFAaPxgFqjhR4vpu00ah3KskCWkuM7t7h1IwNdo7P+OEovCGcJQhcUZY7rBWjaYmOwpgl0oZPECbphsLbZpJAFpmvi+g5ZlZOlGvs39nEcQRJHNOotiiwnnI+pNTKUrGitbDM3fIpqiiU0bly5zPnzD1L/mq/mEx/9II2Wz+65PYr4Fr/33z6GjobuKCbzOo7tEU4TLN0kimbMxhPOXDxFs+PiONZi6dY0RwiBaYBbOiSzCXOj4hd/7YM0V2s89MaHeOwtb+BNb32YF595mSc+8iSXn59RlSWyUghhoGk6liEoC4nSFKDQ0BCahoZGJQtmszGGsHAdjziek+UJtmnx8ksvcPrULo1WE8/3Wd+so5tgmDAeGWRFSiV1bNsgz0o+/7lncB3vv/cgFGVJlIbM4xm25bDW2aY7uP1a4mdQyhyhaYDk8PiEXXuDU2fPcPWVm4wGQ9rtOvXmNkdHPW7fOqbTWSEvUkxDkGUFcRRjWj5BLeDwsIvvm/iuiUBnNIipBR7dkx7xPMa1XDzfoVIlx8djhG6gCZ00LYnimI21BvE8JU8THEfH1MFQEi3PpFDVTJjWbcMUL0jPfPJ1jt+lpS8ZyybgpaWlpT8Efux7v9d797d/y185feH0D/tra+0nP/EZfvmf/xLPPvV5PMej1VphMp0yHB2zsbGFIXzOnz/Lu77hHdx9/yU+9+QzdHtjvunbv5Yin/OpJz5HY6XG29/1DqQhuXb5Kl/4g2c5ORiTpSW1us/Kmk9r1WV9y6fR9ChLSZ7n6LpOGCaEs4zZNCVO1aIp0zHwahaGqZPFiitXuvSPpty+cQ1kgWnZ5HlGlqXYpsNk2scyXfIiJ8lSLt3/EGsbbc6e38IPbGqBhWloRGFJ0DAJai6gsbLWRlFRlhKBQtddSpmhGxqO7TEbF7z6wku012p4vkN7tYOGYjYLqTXqCF3n7PnHsEyd8egQTVPE0yknh7d54NF30DvpcfnZT6EMyZlzF7jy3C0+9dHP0O4EuIEDwqLXnZDMFyMnbc+j3vZod3xMU6coJNNRyHQS4XoeigrDgp2dNV54+hr93oj5fI5bCzhz4TTnLp3m0r0X6Q3GfPz3P8Mnn/gsJ0cnFNkcqUpKUaGkBDQkCqEJTMOgyktqfoOa65MXBa1WkyIvcR0HTRP4QYNGc4U0yxZP/zfabG2voRD0un0mwwgdgaYriqxgNJqS5YtDdZyE5HmMVJBnGWgSdEWZ54BGWZTYtsvZM3exsraNbpgITcMydaqq4OjomPZqi1Nn9hY/36QgCVPmk5AkjvCCANux2T21QjTLGA3ndDoB7dWA8TgDNJQGRVow7I0Ioylb25sIzSZNMxzXRqDQ9JJW3aPTDtCUpExz6r6JSS7j+ey4qOSrBfp/sHTnt/7Oz/5s9/WN4qWlLx3LBGBpaWnpS9ijYP7iEx/9hp27zv2N5tr6Gy+/eIUP/Pp/5cknnuJkf58knpIXOZrQsW2bOE4pgW/81m/mT3zXn+DO7QP+4OOf5y1f8QgPPngvz7/4HK9cvsqpvW2+8qvfRr9/zGQw4TNPPoPnGtiWSRQVTEYR/f4I24V3v+cRDEOjkhXNdg2lJFlSkKYZSVKQ5grTNDBMHdvWsG2H/Tsj9m9N6Z1MiWYhruUynYzJi5jJZIJpGEwnAwzTBASBXyMuMuZhwqV77mFlrcO586vcd/8W+3eGBDULy9TRLYGmCdorDVAa0TxkdWMD0zLJs4JGewOk5Orl5zBtC8NQyEox7M+JwpyNzSZe3cJ2N7nn4TcQhz0oCwyhc/v6C5R5xb0PvZNXr7zCEx/8MLZn89hXPkI0zbj63EvYjiCJc467A+aTBNuy8Go2umGR5QmWreM4Lp7n0DsZIqX1WmKiY1oaV67cBCmYTufICkzT4s7REV7g8OCj9/Hw448QtBo88/kX+diHPs6rL71CGs+RlCgUQmgUssQ2LXRNpyorPM9ffD7Lafg1hDCxbRvPqxMEdVqtVebRjKxI2N7ZxnIcdAMsYZAnJfMoZjwcUpYlEo12u81sFrJ/cHOxzVkXzMMxWZbRbLaRFRRFgmU51Lw2a2ubbGxuEUURhm1w9vwOxwddDvdPuHD3WSzH5vatfQzNQhaKXu8YPwiw3YALd2+wd3qVl148pCo01tY9kkQjSXIqWeHYBgKNk8M+k8mcdqtJrV7DsgxMQ8MLFomHYyp8zySfZ8g8wxD5rCjTZ3IlPlWa2r/4yZ/75aPXO5aXlr6ULBOApaWlpS9BSintd379V+6/68GH37e5e/a98/lcfOD9H+TDH/g9ukd9dCGYRzOmsylpFqOUIowTHnrkYf78D3wvzVaL//Tv/wuW6/Jdf/ZPsn/7Dr/zwY+zu9vi4t3nue/Bh3n+vMQ3agABAABJREFU2efZ2mrzyU98Cse08AObQT9k//YJ+3dOyMqKP/mn30Wr6ZElEUHDRtNNHEunyEvKsgQUZbkoT0Fo2I5FkUvGo5jucczNa11MzSSJY4q8ZDTqYdsWcRQxn0/wgxpZGjMeD3C9BkpVSGWiWw5v+cr7efzN9xKHIbaj02h5JIlkMprSbNVYW+9wfNTjrrvPYlomcVywtXeRKBxxfOdlLNvB0AVRmNI/njLuTemdDLn73jNgOTzwxjeTJSPWVpsooTEfD+jevI4V1Lj34Xfw0Q/+AR/9zd/Gcg2+5lu+ljNntvjI73yE7p0jmvUaCoVUEt0S1Js+jmsymySkScV0GtJs1LEck9k0Ik1zRv0Z1159FceyiOcJ8zTE8erUanXmsylJlFKhWNle47GvfIxL99/NZDDhid/7FJ/+9Oc46d7E0Q0MXaeSCt/z0XWdsihwbJs8y1CvTWPyHJ8sK9ANjVZrlfbKKlmeU1UVjWYb2zIZjYa02itIqUAplKzIq4IgCLBMixdfvEyvd0ReJJSyJEnmlFLRaLSp15rkWUbTq7O7exrL8ciLgjQraXVaNFs+49GQ4WDMSmcdz/N56fJL1IKATqfJzVvX8L0WnfUWj7zxPEHN4+hgSFFIskxSpIphf4JSknarCQomkyloYOgmrmvjexaKCs8z2NzwsS2deBwzG0VlmiZ/YLniC5pgpLn6r/yjf/oLh69vRC8tfWlZ9gAsLS0tfQn6tz/3c+sPvPWhvxXUO1//8d/+CE997BMcH4/RKoFEEYUzkixkOhuTphFBs8Ff+Mvfx3ve824++N9+l89/+lm+87u/k4v3XuT/+KV/x+3rXU6d2eSxNz/C6fMX+e3f/B0ef+wenn3qZbRKw64L4jhhPk3IQvC9Ou/9+odptgKe/fxVdnfapGnO5nbztc29FYahoesC01zMAjIsA8MUFJZGGOZYps7e7irXXj3ANEwm4zFVUeA2G6AE9UaDNMmZTWfohqAsUkzTIs9idEth2T7PPHUHPzBZ365z6d4dLj9/HcexSZMEXRcUhWQyntJo1gAd1/aYT/po6KCgrMD1HLa2daosZ9J3uPbSIUHTQdOew6vrvPz0i9x132lMXQOhMxudcOWFz/C17/0aegeH/OZ//HWuv3yFP/Fd38q7//jX8eHf/gjHN+6wudui0XbQdZ16w0VoAssSjIcR4wE8/4VXabR82it12u0aqtTJ0pI7t2+gJNi2QxrF5GlCo7GCECZxEjE+mvDffvUDfND+be5/9G7e9TVv5lu+42v5+Mc/xe/85u/QO+piGoskTDgGAh1ZVCglKVRFnEaUZY6GIA0TkjQhr0oct4au64z7fXb2Njl77hSWYyCEzmQUUxYSW6tAk2xs12ivPMYnnvgsh4e3KPMcIcAUBmE0Zx7NWGmvkVc5w+GArZ1dNtbXufbqDSgr4jhm7/RpppOU3kmfeiNn79QpRsMR4/GY7e0dJqM5VVbxhU9f4Q2P38Xudoej4xlFlpPFEb5nUxUVg96ANMtZW1uj2z2hKCS1modGHdd1oJJkqURTCr9moyo1lTrXpaF9tEiSz9aTjfj1juelpS81ywRgaWlp6UuQv7YWDg6Oy0++/3cmV56/2S50g0mccXR8xGQ2QJYV8zAkyVIef/vb+P6//BfpHR7zd/76j3LvA/fyvp/6O7z68jX+wd/6RwhNZ2tvm2/4lq9mdXOD//Tvfp3HHr/I9Wu3GA2GrKx6tFoBvZMpOgIl4bE3X2Brp8kH/ssX8GwN0+qwd7qDG3jIqsI0DaSUKKWoXhsVKoSOVApY7AfwazpKQWejgaELDAFTSyOOYjzfpywLdKHTaa2SZh55XmBbJrqRcvbuu3j6qcvIHDa2O7z1Hd9C/2SMISTCFIBACDBMg/1bJ6jditbKJrZhMur3MQwTIWA+j+msrFAkMzqbdYSAgxtDPNfihWde5PGvfJj/8u9+m69575vYO7eJY7rMRxndo+dJEos/+T1/ioPb+1x77lme+OATTOOc93zrN/DpjzzBrRvX0IQiCBxG/QjL0rFdC0PPqQcue3sbTMYzbl07wK/XSeMC3/PwPJf+oEecRrTbHbI0ZiqH+PUaKytr5HlJIEuSJONzTzzDR377o2yf2+Ybv+Ub+Mmf+cd86g8+x2/859/g6GgfM4vwHR9dWJimhVZWKAlREuLaHs1mG6UUk/GIi5tb7O3u0Dvpc3v/mEYj5szZHVY36+iG4s6tIWWxWLh15eVr7Oxu8/hjj/IFXXBn/wZ5IaiqCsOwqGRJr3+C77rYtsNkPMWreVy47xxXX7qNGGpUhWRja4XLz15BVpKNnW3anRXC2ZQ0LTEMk/k8olYLuPHqALQhuqmDphPGCUIDyop2u05vMCRNQjzXZl7GGIZBkRcEvs3qqk+rYeB7BrpQcy8wPuaHzvuniXzlR/7Fvxi9roG8tPQlapkALC0tLX2J+Zmf+Rm799LT3zyYzR4gKZrbWyvi1mBOGIbMZiOSLCFPUtyazw/8je/nvgfu45d/8Ve5fW2fH/zrf5EzZ3b45X/1Kxzc6NPprNDaavOdf/YbqeKCf/9Lv8ajb70LTUluvHqLlZZPGhd0szkohReYPPTmU1y6f5PPfvI6RSrZvbBFvRngBh6VVBiGsTj4VxUKDYXAtgWV1BbbX4uUZt0iqFnM5wPqjRrTSYjruUxnBVvbq8hKp9froQmFYZlk0wIlFdiCM2fPMemNuHPjKpbl8+ib7kEIwbNPX+PcuVXyMsOwdDShI5TOsDvDsk02ds8zHU945fI1Hnn0LipSLEPg2i5TFWM4Aq/lsLpTR1USw6hoBB6r7SaXP38NpZXcc98ZJqMIx7b5whMfxzQ9vvsv/Tn+9U/+M2xTo4z7PPu5T/DOr3sXL7+4xo2rL1OVUMmS/Tsjykziew5FWRKnCZ31OucvbTEZx3zhqZdJkgRN0wmCgOz/LNlpNUnSnCxbjNn0gzqVlLQ6qziuh92z6N3s85M//k/ZPbPDN3zzH+fH/smP8Aef/BQffP8HmAzG6FoD27LQhUQBtuMtxnVaNq5bxwtqICWua7F7ehOv5jEcTDg+HBLFKbW6w/buCoP+hOFgjKY0nn36VXzfYWtzl1q9zo1b14jCKYZuIoFao4OmFMfdA2bzCXGZcv9DD7J9ap3DO0cM+xOarRatdouiWHx9nutQq9UZDgc4joVh2uRFRZoW+IHNdDZjdW2F85c2UFLj4GaP6zfuLMqeCsnqygqGYeE4Fq5tEAQmvrcoS9M1iayquULvlVTP/sg//EfHr3MoLy19yRKv9wtYWlpaWvq//NiP/ViQ7V/782I6/fOkxcVSOMYkK9Ftm6oqybKMcB7z0Jse4n/9F/+YLE75S9/9g+RZyo//zN9nPov4uz/0D7n6/G0s06S12eE7vuebuXOnx3/4t7/FSqvO1toKn/z9F/AdF9sUi0bQWUqSSEzf4Pw927z4/AHPfvYV8jim34s4Pp7Q7U2YzxPiJCEvCxSLufSWbSIMA8uyMQwT27II6jZlVlDzDWxTo0or0iThgQcexHZshK5jWzb1WkC9HuD5AY1Ok7P33MV0PqV7eIwpdNyaxVf+sTfx/v/4EY7uzOj15pQFZFmF7TgIoVGVi5p7w3QZDSYc3Ogyn8SUZclsPF9MnglzZCkQQtBecdEMSaNlU6Y55y9ewBQ64SSlKHMc32X/1oh8nvNf/t1/ptLgm/70t9PZbrKyEVBlE578+Ee49OBDPPLmt6DbJprQ2N3rsLnVIgxDsqwAITg+HpFnBaYpULIkTuaUZfVaHbuHZTk4jkuj0cDzPDQN8jwjnM+ZDcfYhsHuqR3Onr7AvWfuYXoy4qf/0U/z43/3H1ALAv7m3/t/8zXvfQ8pBb1Jn7QsCdOINM+xTBs0ge83iKKENMkYDacMR2N0A1qtOtPJlGF3yuGtAbNRhGM51IIavu+zvra+SNT6XSaTGbbl4Xt1HNcBTcNxHDy3jmE4pFnCsN9j/8YBnZUOm3tbKKGRFRnrG2usr2/gBwFlUVAVGb5rkSQhSkls00QXgjTL8Byf2zePyLKKPM/Y3u1wz33nsW0blEYYR2i6hh/YtFoe7bYPmkTXIEmK7mxWXonDrKtJUX+9Y3lp6UvZ8gZgaWlp6UuIEYYdAx40hXV+WuHsTzKk6zIc9+mPx/iNOn/ur30Pe2dO8/M/80u8+MwL/IW/9Of4ine+nV/9N7/G5598Ctf18AOX7Qtr/Nkf+BY+95kX+cgHnsI0DL7uG8/zwrMvY+iK9Q0X19HonURESY5Xc3jowYtMJyGf+4NXKEuF4/ooAZohSJIcTZqLUY+yQEqJrgs0BRrGf78ZsHyfNEnwXJPUthiWKVmasLvdJkszhsMZ7XYDoYNhWtiGTlCPOX32FN3+kCwt2dre5PDohG/7jvfywovP8eTH/oB2q4Vu3M/6RpPdsxtIaVKUGZoOjuvSbrV58frzOLrNrRv7nL93h/07IzQR0O9N0GSG6YJtGJiWTa1mMI+mdDZbXHn2GqY3JU4UYVgwnyTkecZkMub/86//d/7K3/qrDPpHuHpGY8VhNJnzwjNPct/Db0bTK65ffpGjGydommBzu8OdG31kKhESDm6PGE2G9IdHdHtHSFnRqq+hqNB1nTiKsG2XJEup1ZuUlUTKgv54gG3bdFZWqDc8xlXJ9sYeGorJ0YBf+ul/xdl77uJdX/c1PPLGR/no736ML3z6M6iywjBNiqogL3KEruG7HrPZlN6JjWlZmKaJ7dqsrLVJ0oRas4ljOwxHQ9I0pdPpUBQFrucy6AvGkzGu45NlKbP5BCE0otmcqsrxvAY1f4OqrJhNZswmMVub61imzosvvIiQivXNHbQiQ6BwHZNCpdx970UO9o8XjcNmDU3TmU5CijynzBRlJZmnc1zPZvvUBqNBhFQSUNi2jucLhF6i6xZomrQ863JSySfKSv37/9fffd+N1zuWl5a+lC1vAJaWlpa+RCilNFOW7yavHrfq3qrmexi+hZKK2TTk4ccf5B/89N9jNkv4q3/+bxJOE37+V36e0+cu8KM/9GO88JlnsW2L2WzOqYs7fM8P/s986Hc/w8d+5ylsy+BNb79EmIV4vs6Zcy2yPEdDp7PaoNnxePRN57FtwbVXjrjr4hnuf/giugH9Xo/RIKN7nNLtjhiNxkynIUUhURI0tRhnqWkCoS/GdC4aYjXSJEfXKs6eW6WzVifLU06fWQetoqwKNA0MXVCrBUTziHgyx3N8TC/gDW99mEv3nua33v8h8mTM4fEN9veHPPmxZ6jX21x/9ZAslwjD5Nz5s9iWw0n3GN2ErJTU66tMJwnzMMQ0Ne7c6DEZRcwmKf3ulKqUmAZs7K7hNlxMwyKJSvbObVGWBasbbc6c20OGCU996rM8+uav4PLlA8b9hHa7SZHMeOWFp9jYPMvF+x/i1IU1PM9mNo2pNz1s2yAOMybjCdPJhLLIF8u9LJuyKvA8H8+ro+smSZqglFr0QdgW9Wad9c0NlIIoigFFUFv0X9SDNu3mCq16k8Nrt/n5n/pZ3v9r/5mveMdb+As/+P1s7e2SxAlpmjIPJ/R7Rxg6NOoNFCBQRPMZg26fLE/YO7WJXzfQzIx63UcIQa/XZzwes39nn3k4o6wydF0ghEZZFqRpjCYEnlujyFPSNMF2HHRdcHhwxHgypdVocGbvNLNwyosvPMPtW1cJoynX79ziqNtlMgsJgiZKKTQBrXaTjY0Oa6strl15FdMwsF2HwWDOcBii6zqmYSCEIEsz6g0fzzexHB0pVS/L1MtSclTqLJt+l5b+byxvAJaWlpa+RPz03/ubb9Rk8TWG49w1TgozyivW1lqM5gnf9t1/nI3dLX7xn/0ST37s43z9t72XP/O9f4YPfeDD/P7vPslk0KXutUlzyeNvfyN/8a/9BT76oU/xyjO3sAyTBx8/zZlzGwy6B9iWIEtKdHSyRCFVjl/zKcqK25cPWV2toxs6l5+7ydGdYxzfZr6SYZiCZrNGs9XEMA3KIscwdISho5REEwLdMCjznCIvyLKCZtPBMQVJliF0ietYrK036HXH+F7A2nqLIoPhcEKaZgR1HyU1dMfkO77rvXzgv36YIioBDcswOTq4Q62uoZTi9z74GTY3N6i3PPbOnKffHTHojbBdg87qGkJ48FpSYpkCJTRM3cQ0BXFYEM+nbG6usrXXZuvcCrqCUX/MG9/2Zs7d3yeezPA8g7KqePm5Z7h43z08/pXv4sO/+X7OX9qi1ba59uJ1JqOYt779XVRlRZ4+Q15Ijo7HKEMnLebEUUhZFjQbLRzHo1ZvUOaLRWauG2BYBrZjE0YRSZQgpc3G5jpFXmAaFkWRk6Qxpmlz6vQZrl17FS2RmIaJbStMw+bOC1f5+Ss/w5vf8Q7+9J/7bl56/iWe+L2PMB710ZQAqfCDOnpREPirrDdX6Xa7xGHCyy++yt6ZDU6d3WQ8iMjzOvN5SJ7lrK6u8sqrl4niKZblkeUJhi6QQmc0HbDa3sC2bITQCMMZ66vrIEzGgwl1z8Ov1zh96iLdkyPSbMZkKtBNG1VKbt+6xcbqDhqCbneE0E0mgxGmZeLaHndu7rO6sU6aFuRJitAEvmfRbvvUaiZFkVE3PDQlidK8V6A/7dzY+j/+yi9+X/F6x/LS0pe6ZQKwtLS09DpTSmk/9bd+6G3VPP/ueVi8LbJ0VzdNCqnj+y7f/M1fx+UXXuLv//CPMx5M+Zs/+jd48PFH+Cfv+wluX7tDu96h3egQpQWPvu1xvv+v/nk+9rtP0r11RM0CUbN4+A3nuXHlOu2mjVKCcb9PrRZwfDihsepx6sIm11894eRwRhQm5KkiClOEIdCFTp7lzKY5Z861sSyBUhLTMtF1HalpFGWJY9toaIuVADq4vo0qS/JkUbLhBy7hJCSOUwLfZeVUHdC5daPL1uYqk+kMqRSWZ/POr3sjh4d9Xnn2Ouurq9TrNbI0Yzw+4au++b18/gvPcOvVW8xHOe/4+sexXZeXnnmZcXeCH1j4QZMkKxFC4fsW8zDG8U0kEs1QNFc8ylRSSoUwHVqdBkKVCFPj5GTA3Y/exyc/8gkCw1kszFEVv/+h3+Xb/qfv4s7tK1x9/hXuvmeDZJ7y1BPPMu6GvPM976a1eY4ku4EYRPS7A2rNNYpKopcFtZpJWSlcN6AoKuIoopAVOia+H+A6HrEbM49CxuMRa6trhGFEkiQ4rk0Sx/ROTljpdIjjiCwJKYscITRc10UqyZO/93s8//SzvOe97+F7/tL38pu/9n6O7+zj2AFoAiMr2M9ygsDDcRxM08YwDG5cPWY0nPLwI5ewbRt7YNHrDimrkvW1Da5fn4Cs0LXFpKeyLLEMnel0yNrKBnme0mytcHR8RKu9gaY5jMdTojhibaOD7ztkaUZv0CcJI7IsIksjlBKsdDao1QOmoxlVBbPBFCklfq3OaDhb9LLU6iTxDF1As+7TXvMIAhPL0tEUZZxWx3klL/8vy8P/0tL/I8sSoKWlpaXX2Y//0A/uZXHyffN5+e4oNdbiVGc4K9m4cJq3fd27+MiHPskv/8tfx6+1+Ymf+wc0Oh3++g/8EFeef4Xa/7kMCsGjb3uUv/AD382HP/D79O8cIyqoNx2+5o+/EVlEBC7IvGQ2mWNaBkZgsLbbZPvUOgf7PV554TZPf+Zl8ljhuRaaVuG4Nn7Nw7J0Tp/dxAssKgm6roNSZFlGluUITVuMBZUSwzKxXRddCKqqRDeh3QkYDqb4vk2zabGzVwdR4PkaZRVTa9js7W2g6yY7Z9bYOtXhEx97ika9Qb3VpN5o4Fg2G9ubPPrww3z8w58gnk3I8ikX7tlDSnj18jUMoXF8NMPza4RhiKYWryFLSqpSI4krorBEaaCZiigpMMwAzRDU2j6OYzHsd1nf6rCxu85kGmEYJoYu6B8d8elPfJK3vf2ryUuD7mDO5ukWlaq48syr/If//T+ytbNLo72DLASe5VKpnHZ7jVZ7Hdup4Tg+Qtfxax6aLmi16/iBRxzFpGmK7Th4rotSitFohJQVhi4os5LVlVV0Q2M47LO2tkG71aZZb+I4Hpqho6FRc+tMhn1+5Zd+id//8O/xx7/tvbzlq99JGEdEYfRaGY9ASihLiWEYaIbO3feepaxyPvPJF8mzkqLMSJKY+WxGHKU0GysYholpmNT8OrowkBWUZcE8nJIXKWWR02i1sW2bqoLJOKTeaJBkCV5Qo15vsrmxg+/XMSwXTTdQmkZeFmhImo36YoOxX6PZ7qDpGqahoyqJbhg0GnVW1joIy0QJsD1BkhfysB9+bhqp/4xZf/H1juWlpT8slgnA0tLS0utEKbSf/bG/fcnT7e9ME+1Ns7jansQpkyjirV/3FTz4+EN85Lee4ODaEY99xaP82E/8KE8//RL/8O/8fSa9PlWZcXxyQH9wwhve+ijf8/1/ho9/5Eny6ZDtjRqdTZ/Td23RaFgcXjsACZrQF09yDYv5KMVxDF545gafeuJlbl07QEnFeDCmSBWO69JqN1nbXMUNHIK6i+faaBpo2mKRvAYgK4qioCxLlFLo+qLO3TBN/MDF802qKiWLSyzTIEnSRblMUlGrWZw5u7UoiakyLEfyhjfey6c+8QWaNYf2agPXdqnVajiey1e9551MTiZkkwo0g7Pnt6jV6kwGEU9/7lksV2cyC/GDGif7A473p4xGITevHTPshgy6MfNJjiEEtuPTH4wxLY0sl6SlRqtTI57N6B73uXj/BYRhcvvmkDQpaTU8Xn7+GZKi4G1f9cd4+XKXybxi79wGUZYwGYz4lX/zHzl76QK7Z88jCw3HqJFlBZ4f4NcCbHuxMMy2XZQGtbrH6lqber3OZDJmMhnhOi5IiMKINErI05T5bMpoNGR1dZ2g3kAXgma9SSNo0q53aPpNdMMkLVIsy8YydF74/NP821/6Vda2N3nv//RezLrDSfeAPFtMO6rVPUxbx7Z1bt28zdb2Hhs7m9y+c4QsNbIspT/oYpqCSirSNMM0TTQEpmFiGDpllZHlCbVamyRZ3EbohsIwBJPpjOF4QnulQyELptGUJEtx3Dqe16RRW6EZNPFtF1MXhOGcSircwMN0LHR98V5Z3DhkCH3xnjMNjU5rMcHKMY3Kc92eVGoQhuHy6f/S0v9DywRgaWlp6XXyU3/7r7xhPJr+7W7v/8ven8danuZ3nef7+T2/fTn73W/skZGRe2Zl7eWCapcNxmVsKDA0+9IMbqCZ1ox7Zhi1RMndPeqhRyMkmoa2MVMGe7BdtjHglSkvZdeSVZlVuS+xR9wbdz/7+e3rM3+cbP4GiSSVrfP6K6RQSOfeOI/0+z6/7/f7yf7aaFpeWEQppu/yx/7iD+G4AT/3xV/j1lsPee4Tz/D5P/05/tlP/hQ////5OSQNgopG1dBInnj+Kf7Kj/x5Xv3mS/h2xua2g9MWXL6+xpPPnOP+nbtUZc1klHF2HBMvGo4O5qRxyf1bxzy4PSRLS9xWwNbuFu12gGbAYLPD+s46eZHT6boELZcwStENE6FpVFVDVdUURUmRZTT1Mo1WEwJdN3FcB2kY6IZEouPaEssCy9JotS3aHYs0zbEdidSh1XX5/s9/hpdefJnZ0ZS1QUArsAkCB9e2WdvZ5JOf/hAvvfAiW5sbSK3h2Y89TZpXPHxwyNnh0fJ2vR3Q629w58Y+J0dT5tOM2XjG3Zt7VFnFq9+5QZkr8rzCtm1OTw6wbZ9bbx2hhKTTa/HW63eANpZnU6ual7/9gMk0Iw5jvvzrv84zH32KzXPb3Hj1iKbRaA3aCCAPY37qx3+aT3/vR3jsw08QpwW24xLHKfN5tLx9V4q6rpYrLx/skUQRYRRimSZlkZPGMY5h0u10aPe6bO9eoN3p0jQ1WVbgex2EJjAMg0F/DV3XkZok8Fq4josmQAodS7eIx0N+6Z//LPfv7fNDf/aH+NAnP8JiMSWaTxienuG7Ns8/f41nnr9OXtS0ux2ClseDvftYtsv6xgZVU9HptkjSmKau0HWJEALV1PheQFkUpEmCbds0dc14OCRJQzzPpK5qRsMZtmOxub1BrXKKLMF3PDzbR5cSzzMpy5q6EjSNoiwrirwEBYZpoukamhS0AhvPM9EtgWXpOLaN60q50TMHG32n9/bbb9fv95leWfmgWM0ArKysrLwP/l8/+qNeXRV/MFxkT5yNi8vjWczFRy7yw3/hB3nz9Vu89I03qJqGz/3J76HbC/gf/+7/xN1bd6nrhAZFUeVUteBDH3+O/+OP/i1++9/+Nl1f4PoWTaXIyortnQGLyYzFLEVKg4aaaB5TFRqOb9Ae+Lz6xh5NBUoJtnYHGOjMxxF1o2h3fOI0Zmu3gy41Xn7hBls769BI6rrA963l8K/Q0HQDBSgBjarRECB0DMtE0DBLYvy2iWrAtk2iqERKyNIcyzDwAo0PfeIJptMF99/ep9/rkKYZlqnTODVlCc89d400SpiOE0zHYOfiDtvnz2GbDm/deJ2qrhCGoDtoowvF/t4edV2ShDlCKdIkYjqZES9SXnvpFjsXL3LuSoeXX3obU+rUheL+nQm61nByOOP6Ezlx3IBmokm4c+uMwVqHNDnm2996ne/5I9/DP/gf/yFSUximgWEamNSMR3N++sd/lj/3I3+OSsFr33gZ17PJi4TJfMH6Rp+mhqAVoFRNXtTLUDWlyJKE+XREq9XC89p0+gPKquLSlcsURc5iEZLEOW0/oFaKoNWl3RswGp0wn03REKzZGzQNzOYTtGJOU5S8/LsvMD4847N/+FNcu3aJr/3ON4ijlNPjKZomePy5i5y/sMlbbzyg2/UInnmMO7cfoGod0/QwLcmjjz7O8PiULA8pqwpdmliGg6l7ZHkEKJZBZm3KpsEUijxJmIxGnJ4Idna26fUGDKtT4niO57VocqincwbrfUxNJ00y8jTD912UUlRNQ1lWJHFOaEi6Aw8v0BGywpAGTSmomzLQdNQv/MIvrAqAlZV/T6s3ACsrKyvvA3+gr01H88+cHs0eaxqNx597hM//+R/gy7/5Vf71v/gNhK7x5/76DxNFIf/Df/v3uHPjHcosJA5nJHlMU2s885Hn+b/+d/8N/+oXfoOz/VPabYdonpOHFVK3qWvBnXeOoBYkSYplaWhCEsYpvV7A17/yCrNpSJblIGpmoxDTkFy4usa5SwGDdYte32M8XHDn5gFCGeRpzdnxhEG/h+042LaN49goWLYBZRlZnFDmGZom0A2NWjUIKWh3PbI8x/Z02l0XoQkQAk0InvvI45i65PDOQ1ptn/FwBkqhaGi1Xby2zzPPPcqLX38Z3/MIWg4f/fRzFEVB2/V487W3sQMHpcGVRy4SLebMJ1Ns1yBLc/KsRBOwmM3p9/rcu73HzbdvoZsNZycTbr29h9B1RsMFk1HK8CTi1W+/xfUnrnDn7gMMw+bk6Iy7d/eIFwm/9StfZvv8Dk899yzRvKAqFU2j3r2pDlhM5nzpZ36R7/uBP8i1Jy8zGk545NFLDNZaVEVBkqWYlont2GiajpQ6RZbRaQU4rkP57qrNxXRGOJtz453XWcwWdNtd2h0PpViu+QzneK7D4088wfbOLltbWwSeh21Kdrd3uHrxSXa3r+K4ksVwwq/+wm/wcO8Bf/xP/WE++dlnEabi6HDCC7/7FovRgqefOU+tFIeHQ7Y2Nwg8F01oHB4cggLPD/D9ZWZAVafkZYptOQRBB0O3qKoCITSSOCaJM9IkRdckru0wm82ghvXBBpqAosioVU2UZBwfn2A7Jk5go1RFkaWYtoHrLr9frbaHYRkIXWDbJr7rIkWDRlMpZKRqNfh7f+//ErzPx3pl5QNjVQCsrKys/Cf0pS99Sf73//Xf+NPxOP0L1PLaZB65567v8rk/+Uf45Z/9db7xlVfYvLrFX/hbf5KXX/42/+wf/XPmZ0NMqVMWy35oic3Vp5/l//bf/7d87Xde5vD+GVtbfZpakS4Ux6cLLlxe56UX3mY+y0jTCqEE09OUIqu4fHWbd97c4+ThAs+20YVBnTZoaORFielIfN9jf2/K4f4M17fY3F6jagRIxaXLW0CNrhuYlrW8qc1L8jgjnkeUWUZTV9RlTl03CCFxPZM0yUFpuL6F6SgMU5LGFd2NLt21DrfeuIFlamxsdpaDzXmBlIKmqXn6o1fRNI3jvVNavofr2Dz17OM4lkM0mbIYR7iOjWUaXHnkMscHJzR5TbvtIaVGlmcYlkSTEmlKWu0OUbSgqlPiecJ0nBFlKQcHE/KyYT5b8J1vvczmzoBW1yHLMxCK6WTKYhZydnjCV3/363z/n/h+hC5RtUJVDZ7n4gYOjm8xOzrlX/3iL/JD//n3c/HaRW7fvMfO1hpJkqEpRVmmnDu/jaYpmqZClxpRFNHrrNFprREEbaIoxHMc1vsD5tMJ49Ep7U6LrXNbrG+sMZuO2Htwj3bX4NHrF6ibirwosWyLyWRIEoa0Wl36/U2krmNoOje/8w6//Ztfp73W4bs/9zy7lwekecUrrzzg4GDERz52lSef2WU2mzAeDynLHE0I9vYeoOkalmXiuQGBN6CuGuo6pyxzqqYABVKAqhRJGGFInSRJaJoGTQmqsmQ+nZHGGeF0SpkleJZBmVWMRyPabZet3XWKqiYME/K8XO79z3Ns18T3TTQFcZhj6DqmJQ+aRn27qfQvJ4mbv9/ne2Xlg2JVAKysrKz8J/KFL3zB3H/lm3+0KZrP7d87+0tpml/87h/4NJ/9gU/zUz/xc9x8Z58Pf9eT/B/+1p/jl3/23/DL/99fZTI/pRY10rDZ2DzPzu51Lj/+JH/nf/g7fP33v829tx7w9FMXaPWWt8ZJlHH+0U3CMMLSJaYjkSaoWidNKmzX4XQ45eHBCN2QlGVDVVV4noPvmXiezWSY8M2v3mI4nPHIY1v0ei2mkwhUhWtJJpMZhmkidUGWp2RpRjyPiRcxWZq92wqkqBuFagSO4yJ1SZ6XOK6BaiRKgdcy2NxpcfXaBR7ef4BqStDUco1nYAIKaUiUBtefvMw7r97AsgykLljb7tPqttnc6PHKS69iSw3XsnBMnfMXt7l94zaWrqNrFVJCp90CKrzAxfFMLMdgfaNPXVYc7Z2RRhFSChaTOePTCVLC6eExt2/d5+Pf9SxRtCBo+fh+QNU0WKbBN37nBVr9gCc//CTxIiKLM4qqpK4KyqxASp3JwZAv/cwv8lf+5l9isL7F+DRCaA1nJ8cUWcX+/iGmaSJ1A7/VwnF9xuMRRZnitwO2drfJq4JWp8PVa+eJ4jlpEjOdTJBScvnyRRQNX//9b7OYR3zmD36Kc+e2qBtw/QChVcympzimz+baBtvbO6zvnCOe57zwW69z//YpTzx/hY/+Z4+yfXGNo6M5t985pdPp8NxHnuCpDz3OoN/F9wJc1yFOQjShEXgtTNPE93x03SLPM6azIUKTZEWB5wc4jk1ZVbieT101jMdj4iRkY3OTdqdDWeXE0ZyiTOl1OoTzmP37h5RlyflL2+i6tpw1UA261BGaWn5HHQNdNNR12TR1c5hX6mv/5f/577z+Yz/2Y8X7fMRXVj4wVgXAysrKyn8ifhVtzcfTzx0dTj7x8DS8dOXDTxnPfuoj/PRP/BIH98/4ns99mh/4/B/mJ//RT/Od33uVluWhNwZpmi0fOi2Pzto6/9X//b/km199ka/8m9/HsXW2dz1aXZPZJEezNHq9Nu+88hDRNLi+gS4NqroGKblz74jf/52XOHi4h9AaijQmSyPKogCliJMMocGV69tcu36es5MpRbaM+7UdEyEFQctf3tAXJU1RkUYJdbVsv9Z0jbwoKPOSKi8xpIFpGNSqwXIMvJZDmiRomqTdDXj6+ce4c/se4Syk1wuWGQKaxvZ2H8s0SBcRl6+eR1MNi+kUzzOxTZ1HH78CQqEbJnffeQfbkriGxHU9AtflcG+fTruNITXqrMSQJoZu0jQF3Z6H69lceeoSeVaiKkUchyTTGNOQzGchhm7gWi4v/N63eOLZxwjabQxdx/UcbEfi2gaqLvjmCy/x2T/83SAEeZRRZiVlUaKaZUCXqRu88+03+NV/+Wv89f/TX8MMWujSwjJtFvMQQ9eIohghNbKyxHVdLl66gG6aZGmGaZmcu7hNWqQgJZeuXMB2DK5eO0+aRQRBh+vXH+P8uQvs3z/k7u277O7scP78RWzDoSorhCbI85RFOCfPK/ygy7mLF+kELd5++S5f+603GQ9Ttnb7PP2hSxie5Gg05/7BhBs37xNFIVcuX6bdDogXIapWCM0gCHpIoVOXJVLTKauCyfwEJSBJQrI8+3erYTU0WkGHuoE4Teh0ujiOiyYFujSp6hqpSwQao9Mpx4enmFLHcw26HQdNgm5oSF2jaWo831wOH2dVLCpuCSHU+3y8V1Y+UFYFwMrKysp/Av/vL3yhV+f598WL7MrRcH7xY9/3Se2RZx/nZ/7Jr3B8MOcH//Pv5cnnrvJP/pefZ3gS4/oebqfD9s5lzm/u4po2ZZPzV/7Wn+bhvYf87q9+lUG3x+Z6C9uTVEVFFGZcfGSbB7dPSBeKPK6pC0WelmhCY76IyfOSsoiRQpEWGUVZ0eu26HQDojgji2pcy8DWDQ7vntIUiqIo6HZdLEdS1oLFPGY2n1MrhWZILM/C8k0qpZjPMxbzhPl0TpokSKlRliWqaWh3PExTIg0NqWns7p4nSiKKPIFmuRnHcSxUDY5nsL4RoBsal67scOv1m/i+RVPVmJbk0tULSKlztL+PFA2WJdEEtNvrxLOEcDIHAbZtYToCtJpur0NZ5limhm2ZtNoOs1FEvx/Q67UJFyGGLqmqiqqu6PY6TE7npHHO088vA7J0qaNLSa1K2oHJ2y+9jN/ucPmxR8jSGKEpTFOnzHOqqqBqajRN8fJXX+KFr32dP/s3Ps/G9jaO42DbMJ+NCTwPU9dot9vMwgmNaPADjyLPkVJQVBnrGx10XSLf3a7keQ6PPXGJ2WJKkmRsbAy4cuUyqhYcHp6gmpput0e3s05TC+JkQZIk5HmOEArDMumt9bh4eZvZeMr9W0e8+I0bfO13X2c+igg8i0cf3ebS5V00wyIpE3Z2L+C4AYtoQZIm5HmBEoJGNHS6A3Z3rqBrJov5mDSZk2Yh08WIpi4pipw8z2maGiGhVhWa1FFKLGdQUAghiONlgVEXy7Axz7WwbQPbMQkXEUVegVoGz+maTKqSYSHoK6XE+33GV1Y+SFYFwMrKysp/Ao5tK8q6Nx6H1z79uT+gP/rUFb74j77EcDjlh//a59g8v87//Pe/yOxsgkFDXTUoNDTborW2zsa5Hf7mf/NfUJc5P/dPfont9TUef3abS490KOua4Wm4DJeyBLdvHlNUJWnWcHy4YG9vyGyaMp/MmAyHJFGKYVhsrq/TbntEUcR4PEFqEtuFyXjOG6/coy4aVA2WrmNIndk4Z+/OsnWl2+5gWya2beH7HlXTkKQpk2HMw3sThsdzNAF5kZIkKYZugQCpa0hd0u71iNOU05MjOl0Xw9JZPgRW+L5OXlYUdcGjzz1CHGfs3zmirhpsy8L1baSp0W63uPX2DdpdF9PQ8fot1ta3uXv7HlLTcHyduinQDR3HsyirHF3XaVSDYWqYQjA8mVM1FYHvITWBaUqgQZcSP3BouQEvf+sNPvrpp+mvdSjzCE2rlzv0TYN4OufV73yHT373pzBsjXgRURUVVVGSRnNc1yYrEjodn69++Wu8/eZb/PG//IOs7W4jhGTQG7CYz+i0fHTN4OLlR8iKijRNOTk5ZP/BfXRNZz5LsSwT29ZZzKeMRxPSuGB3dx3XMcjzjCzPaRT4joNjGWxtr9PpBfR6XUzLoaoLjk/2SZIply71cFsGlRI89tRlbAuyMKNKYTrMOL434fYbd4nnMYahAxrr22s8+aGn6A0GOI6J7dr0Buv4Xo+yrBBomNIkT0PCcIFCw/NbFFWOUjVSU6i6YjoZ0moHrK2tEwQBShVMJickyYIwnJHlMVWVo+tgmgLT0iiKlE43wLYkuiEQAiqlSpSai6Kyf+InfmK11XBl5T/AqgBYWVlZeY/9wy/8TX9x8vAvnD4c/dGPf/enNq4/cY0v/oN/yWQY82f+2g/iBQE/9Y//JSpVaKomi5cPXXm4YH5yyPHBQ777+z6B43j8g//nTxI4AesbA649ukOr71EWNWGYs3NhnXs3TggXOVGSMppOGI/mmLrD7Tv3SfOC05MhpuGSpiV379whTVN0Xccw5bvDnpKmAt/zKMuSOMqYjGIe3DlhMcvZ3ukzWPM5OztlNp2RxClVWaMpjSxumJzF5EmN47jMFglpklAVOU3T0DQKNA3H8/BbLQ72D6AR6LpGoxoUAqUUpq1jWQZlo7hy7So33rmDoRvMxnNsS7B7+QJVA6qBxeQUGuit+bT7Hls7m7z9xts0Wo3v2wQtnzROsQyTwdqAJE4xTcn27hpnD8fEi5iqKNE0hWVLTNOgejeZFmCw0eHo4QmGJbl0bRNL17Ftnf5Gn6JsqMqKF3/vG5y/uM3m7g7ZouD48ID5fMLZ2QlxuMA0TIqqwNZ0/u3P/f9YTBf8oR/6Q5i2h2WabGwMKIsaXdaMzk4JvGWbUFWWnJ0cc/f2A1zPYTiaoekGg7U1FouMs6MZhw9OqcuSZJGgmuVWpTAMmU1naFLw5JPXuXT5PL1eH103KIqc1197na/8ztfodm1sF+7eOaIddLh8aRPHlYTRlDzPsA2feJ5x9PCQu7du8s4bN8jzFE0Hx7ExTROBoNPuoVRDGE6xXBvL9vE8n35vQFnWNI0iTfN33wRBEqacHY9RjQ5quZlK15cFIECWppRlQZk3jEcxs3FCmTfQNEipMEyBaRlI3cyQ5jup5n/lR37kR1YhYCsr/wFWBcDKysrKe0uo0rw+Oj753ic+/vRzO4+e17/4j3+BKM74K3/7TxNnKT/747+ML20s00QaFm4roGpKptMhw7MTPvHZ57n22CP80//lZ7l08SrPfuwxdnZb5HlGtEixdIt2N6BUNeEsxbYMLMdEKcloOObuvTs4rkeeF9iWTqfr0+112d7aYXdnjf6gTb/fw/FMHtzbYzgcEYUhUhMELZu8KrA8m2tPrtNbbzGaRNhOm1a7R7c3wLQsdEPDdgw6gwDbs7lz64TRaUS0yKiqmqapUEBVN/QHa5wcn0KjkFKjapYtKULT0A0D9e4mmd3z58mThGQR4ng6RVqi6ZLdR87j+1327z7AkBqW7WDZko3dDSzH4vjwFKUL0jSjP2iDqkmzCNe16bR71HXNxnabB/f2KLIcIQSu62LoGqZp4LoWeRqTpQmtroVtabz16k12r2zjdVp4XkBZFsxnC9Ik597NexweHPChTzxPVTdMp1Om0wlN3fDg3n00pTE6HpFGMUVS8vP/5Ofp9Tp88jOfYrbIcX0Tx5ds7QyIowk3brxDWZWsb2wAiul4SBRGDAY9oijBsA3aPRfbM3Bci3ARLVewljl5WaCEJM1Kbt64zd7eIf3+gGefe5qNjU1c10MIxenxhC//xou4js3169sMh0ecng7p9nq0Wy3KPAPZ0Ov36HV6qKognIzJwoJet8/p2TFJHFJVGZPpGXW1TIIuypxudwDAfDFla3MT03Lw/BZV3ZCmCbPZhOn0lNlsyHw+IwwjRCPQNJ1Op0+n08PzAlzPpq4rqqqk3fFxHQtVC8oC8lxRN6RF0xz/6I/+aPq+nvCVlQ+gVQGwsrKy8h5SSnFyMn7s2U88/ez2tQv2T/2vv8hiFvKX//YfI01j/vXP/AYmGuLdMCnXd0izmCxOaRrFZ//oZ/ijf+L7+Md//yfJw4QLlze4cHFAt+fgejbTUcytWyfsXNng6HCINMAJdEajBVIXbGytMRqP2du7x5uvvcrx6SHD0TH9fgvL0gnDkqJcDgjbro8X+KxvrjFY76MbJlII+r2ATs8hTXMUDecubDJY7xB0WghNo1E1hqHT1IpwlnNyMCdPa1Aas3FKntXkWUESp8sd+dMF88kM1TToUqMoGkxTR7cktm+CANOzOH9pi1defBOBREiBJk38Xh/NELiOzf7du9DoGLagLGF75wJZnqO0mnbgEMUJSVpgOAZ1tZxBsGwT03bJygKERasdUFYVSRITBAFVVaFLnflsxGh4BELgey533rlPp9dm88IaRVUzOpmTRhlZklJXBS99/UWe+PB1agosy2EwWKfX7VFVDQ0aRVkQJzFFlbKYzvjp//WneP5Tz3Ht6WscHc7QpYmQGk8+8wxNXXJ6vL9cCdpfw3N9Dh4cEi7muI5FGmd0uy0Gmx2koeO1WxjvtuRURQmiYX19gO/7jM5GvPjNlxiPh2ztbNLudNnYOI9lWxR5xte/8gr3bx9w9coFqqrkrTdvEM4STNMhSzN836Td8el01wmjkCSc47su21vbCFWzu7NDEPigFFLTGI1POBvto5uSNI45OtpHt3SkqTNY20BoksBvEccRSRYyWOtj2yaNqpcDw6pGGgLLWRax7ZaD75ioqmE8XBDOC/K0Js8U4SJpG7rYfL/P+MrKB9GqAFhZWVl5Dwkh1Ic/9eSz565d2v5nP/7LjM4W/NX/6s+xmIX8i5/4RTzLRKmKNI5Jkxjb0jl/fhtdN7hw7RI/+MM/xBf/4ZdIpiXPPfcUFy72sR1JVTUc3B8yHeZohsH4LOLezRGaZlDXEPguZVnz8OERuzsXaWpYW99gY2Obfm+DNMpYWxvQ6bp4rolt6ZiGxLFtZrM5jm+iWzrTaUg4S6HUaPkO3b5Dq+2i65KyKCnKElgOctquhRdYSKlR5DXDowVFUZNXFXFcIjWLLMm5c2OfMquRukZRVsynKXXNcgOQ0BA6XL1+mdHphHC+XM9ZVwrTkly4eg7VwPDslHg+RwN0KUDTuHjxIvPZEJoa1zER6GR5Rn+tjUAu99e3TExTsH93iKoF/X6LS5e2aOqawHcRWo3nmlRFRjRbMDmdowtBPE64e2OP609dYzELKcsSz3cpixTb0XjnjXfQbR3DMYkXEY5rods2/cEaVZVjOzZhFFJVBbZl8PDefX7pZ36Bz33++9BNiwe391nMFtiWw87mOVASqZlEYYSmgWFIbrx5k5PDIaOTOQ8fDKFZzkyUeU5ZLDtgDEdwNjxlPp+zsb7J1vYurXabt9+8wYN7D9ja2qbf3yDwu6RJiAAW84Ib7xyxu73DM09fZjw5JUkSTN1A6DXnr2zxxNOPcfHSZbI8ZXh6iqmb+F6b2WzBYLDO9u4F1jfPsdbfJApDRqMTiiIhXsw4eviA2eSMxXxKUWYsFnN8vw0Izs6GAGhSYpommibRpYHUNMoip65LDNMgSytUs1wvW1U1KIVANDTi6j/+B//dM+/T8V5Z+cBaFQArKysr76Hf/c1/fnX73ObnfuVnfk2Lxgl/8W/+CWbhlJ/74r+mKUrKqkQagjzPGI9Oef3Vb/Odl76D1dL5L/7Wn+DLv/EVju6P6W2so9sGjmvgt22kKUmihr29Mzp9j8OHZ9QFDE8WCDQG/TZlWQI6s9mcOI7wPB/H9rAtF02zODkdYts6GoLpcEqZZQgFAoGUkrquSaIc39VpeQaG1mAZOmWRM51MicKQIi9o6gbbNtAN0E3w2y6O4yClQZlrjIcpcZShasXd2wckaQFCkKQFi1nJYpYxm0akaUWalDiBy9rGGrffeYDQNAxLpyxrDNeit9Gj3erw1ivvUKQFmglCatgtj7XNNYanh+RJgoaiqhrKssZ2TDQpKcuKwbqP1DUmZ3OqIiPPCgZrHWzHJo4TBCVJHCJ1Sbc3II4yHu4dk6cZb718i3MXzuF4zjLFGIjjiFYnIIpnvPP2XZ742HOcDYecnY3IixqhGei6SV3X2LZNkiSMJ2Na7RbffuE7vPiNb/FH/9T3UjYNhw9HNCVkeUxeptRNTuAF7O8/IEkXtFo+h/uHHOw/4PTohMODMYZukGYJh0eHRHHIud1zXLp0kdl8xPHxIVIKtnd2CII281nIjRvvMJsNCYIWg8EmUbSgKHKEgOOzMV7X5+Offo40LxiN56hGo9N1OHepw6OPn8PzA5I0oyhqZuGM8eSMg8M9FospnXaL7c1zbK5voQlBmiUAWKaJ0ASKZjlbUhVE0QLLsOh1e3iev9ywpMvl90aTSE3Ddx1oNOazGE2D9S2f3mDZLuUHBq22dVajvSSVOQZWW4BWVv4DrAqAlZWVlffQYhJ+/Mu/9HvrBw9n/Im/+oOA4F/8k39JYPv4QUCW5URRSpbnVKphNp0xX4z5U3/++3jztZvcffsBVx49x/UntukOTObThHiR0VSKxSKm03exTYPx8QLHNGlqsGzJeDRFQ9LttJgtRqRFzGQ6pm5K/JaLE9h4rYAoTpjNYk5PhywWc4QulkmsRUayyNCQqIbl/IBtMTyacf/2EdEiIc9SdAm2ZSA1HU3TMB0dIRuSLCaJC+7fHTM8TKhyyfHRjCQB3TDI85LZKCOalyRhyXycs5jn5EXNzrkL7N07JA4z4rBEAHleIkwb27PRtYrD+/fJsoKiguksZ/vCefIiJ5xPEEJRVDXUFU1RU+QNtmdRFiV+y0PTdLI0xXMdatVgWFBVGVEcU+QVcVyAZhJ0O2RZwvBsRN0UnB2PWIznPPr0FcIopswKDNMGNPrdDi9/61We/eiH6a2vk6UlaZIRpxmW7dLp9+itD3D8gDCKiaKEC+cv8Pu/9TXcls3H/rOP0SCYzuaYpsnR2Qn7hw9QCPr9Nc7OTtGkpNvroukaZVUwn4bsPzxhEYYITXDz5g1u3bhPv9/nqWeeJMsTHjy4S6Mqzl3YpdvroGmCk9MDHh7cRYmaK49eQRiSOMtB07l3/wxp2lx+5AKGLnnztZvcv3XCbJwilM3zH36K85d3EFKQJSlnwxNOTh/yYO8mx8cPGY5PGY5PsSwbx/Zw3y06pdQJo4gkjvE8DyEEi3DBZDrENE2CoIWUEiEEjmPjOA40Csd1cFyb3sCn03NwXZtW28e0BJZtn9a1uPnX/+u/c8D/NkG8srLy72VVAKysrKy8R378x/+6cef1t3eO9075nj/+WeyWw8//1K/SlDUKge062LaBRC3TY/MCTRr82b/yeUzT5Mu/8g02N/s8+dQu5zYDdrYCLAOSacn0LCUMS7Z3exzsjUnDhtFwjGEqet0OaVoxnyeUVUld10hNw/NcyrJksYhwHItHrp/DDzxsx8DzfObzAk3TMHSd6WjGw719xsMRp8czNCE5PZ5zcH/KfJywmM3QYLmBJo7J04K6UmhC4LcMLl3boNFqojiiLmuODxfcvXlGnuZURUk8z4nmOfNxRJZUTMcZeVrj+Q6OZfHmq3dJooqyqInCjKwo6K75DNba3Llxj/loSl0r4ihnfBpz6dJVHu4/JE0SLNtBaOC6BuFswXS8QBOgFBRlSVVWVHmDJjWCwMV2JKBh6gbRIseQJlLotFsBnuMiNY3pZMJiMuLN197kkeuXMQyJahQXz+8yHc1QlcbJg326nTabu9tIqTObjLBME03XMV2PdreHH7QwDIPFYkGW5fR66/zmv/kan/ne7yLoBsynCzyvzfbmRdI0ZTIfs719Ec8JGA6HIDV0w0ZIHa9lkxU5ujTQdYt2a8DZ2SGnZycoTM5fuEyel9y7cwupNVx/7Dr93jqGYXA2PGRv/z77D/fY2u7T8h0O9x5QxRl7dw8ZjybUdUFTV5wcnHJ6NOLg4TF7eyd0W322Nra49shjrA02SZKIOIu4s3+Tw5N7qKZCNQ1Cg8ViQV1XrK31WV9bIy9SpBQ4tsugP8BxXKJ4QZYtMyMsyyTLUibjKVGc0qicdtvEcSSoBiGWeRF5UVCWWceUOO/3OV9Z+SBaFQArKysr75FitrVGTe+zn/8ea+vKZX7yf/45qqzEdhyyNGE0HDKdzjgbnTEej5iMhjzzqSd57EMf4Z/++C9jmw5NqYgXOV7HxmnbmLZBUTaEYc7azjLA6/D+hEZVBEGAY9mcHJ+B0KjqiihM8Jxlwm4UheimgaYJmqoinM8JFylB22f34iYXLm+hCbh7a5/JcA6qYhHOMQzJ6HTB8HiKoQsCz14GTu2NOT4YksYFi0VKXVWggevraLqG13K5dn0HIWtuvnOf+TQhWpRMxylnp3Omw5AkzIjCmDiKKcqS8xfPcePNu4STlDhMaMqCyThCkxaPPHaNPKl58zvvIDAoy4r5LKYRks2tTW69cxNVq2UAmG/R32hT14o8rgjnyfJzzlKKPKOuSlRTk2U5SVxTFTWqrrFMG9d1yPOcLElpt1vYroOuSc6OD7l38wGmqbG20Ue3zGXBJHVGZ0OKuGExXfDhT34I3ZQopRidjrBNC1UrlIJWu8Pm9jZK1ITRAhrFYrzg5Zff5M/81R9Gtx2aRmHrkpYXkCQL0jSh3emxCCeUZYHvB9RVhWoUrVZAksToUsP1PDzf5+DggOlkgmmaPPLINcpSsb9/QJrmbG/v0O2u4bo+RZ4xOjvh977yO5yNTtCkxt07d5mPZmxtDsiLjMHaOrP5guOjM0zDQgqD2XiB67lsbK1zbucS3c4GpmGDgjSNyYsUlKKuC2zHplEN+w8eIJRgfW0H0/QAyPIcx3YoipL5YkFRLPv9+/0OmqZQAjRhohoN09RoGoWiQTclaBZZVm5A/n3/9J/+P9be56O+svKBsyoAVlZWVt4ji9m0fvzDT+9sXr7s/Isv/muyuCBOM+q6oqGhaWriKCLLE9Ik4/Jjl/lTf+GH+ekv/iJkGi03oNtzMSyN6Sjm8OGEe7dHy4fxyQzXs1hMcjQNWoGDQDGbJoyGM6azMXmRUjclioZ2u0e328exHTRtObR7//YRmpRUlKAtN/I0qqGqCpI0AiFptdrwv/Vvt2xs1+DhgzF3bow5O4nJ85pFGCKlRprWaEJD143lJpotn1bHZDHP0IRGHIXcuX2Pu7cOGJ4tlp9zHjEdzairGrflohC89uJtyqImiWPiKCGcR1iuy8WrV3jz9ZtEsxhNSKbTGUVWs3Vhm6Io2b99sEyJLSqqXKGbJqZtUdcVhqEznUw4OxqiaTqWbZCECfNJxOg0Jsty6qbBtJfDzzs72xwfnhBFcxzPot0d0FQl+3f2uPn2LZ7+yBPLlqGieHcoVaMuSx4+2OeRx64TtDusb2xguw7T6RTXcZZrW6OIbr/Lhz76IZIkIgkXbG51ef3ldzBdi0//oe+i3R2wvr2ObftUecFoeIIftHFtl+l4iGUadLpdprMQ1wu4dPUqcZygVMO1R69T1w37e/cpyxypmfT6A07PTnnzjdeI4jlrG1v0BusopWiqZRH0xtuvkhQxrU6Pvb0jXvzma1iGT16U6LZFHGWMx3PKoqARahn0VVc88dhVnnn6ozz1+EfY6G/h2B5lXRCm03f3/4c0dU0raDGbTanrEk0TIBSaplGWFZpYJkM7jotAEM5D2q0WrcAFpVCqwfVN2h0HIXUqJcirhryklcXp4+ms+IhazQCsrPwHWRUAKysrK++RC488sj7Y2X38d/7tN8X0ZIJl6XgtH920sGwby7SQmqSuFd31Nn/tb/55fuXnfo2TW/fRZYXj2ssd9bbEdTSaRDAbFZycLnBtG0vTefP1+ximiW2brG+1QKsYjSacnBxycvqQ0fiUuq4Bjbpulpt2NA3Xs3Fdl3ARs5inJGFBWdRUVU0YLiiKFKGB5dicnJySZgnTScb9u2fkWcV8mqDJBtPSME2TOM4p8pp2x8N2dCzPxPF0ppOMNK1paJjNJ0wnY8ajMadHJyRpynA4Ikly0jRn61yP3/7NbzE6i5hOZ0RxwmQ25+h4zObuLkJKXv72a6hGMY8WKAV1rXjksavcuXWXydmI8WhGuIgZD0OE0JCmiRCQpyV10zAZT5lNIvxWQJoXFFnJ8cEQqemEYUJWJJiWyebOOoO1HnEcs7bWA2Bze4c6L/nqb3+bc5fW2Tw3ACnpdFpoCAyp8fD+Pv21HoZto2mSVrtNv9fn7OQEgaAqC9I0ww8CnvnQs9x78JBXvv0WgWXyr37hV/nEH3iG/tYa6xvn2d6+gOcEFEVGmqa0Wj2KPCZLEuq6oj/oMJmMMU2DVrtFnmUkScaHPvQhHNvmcP+UMJrTbrXY3tyhKDJu3HyT27ffxnZd+r0BAg3bkhRlwq07bzKejrBsk/FozI2bt5jPF0ip01CT5zFZllHkJWlScPOdO7zxxltURU633eOpJ55mrbdBLxjQ8fp0vC4tr7NsQZMGrVabPC8oy3L5fyMlruug6zq27aFLMKSg12vjeiaOLTENgWVqNFWJEIpW18O0DfKsJJzk1uws68Xj5In/6e/9Pf/9Pe0rKx8sqwJgZWVl5T3SG3Tst156dbB3+wEYcjl8mkTkVUldViRJQlHmOL7LX/4bf4b7d/Z45Rsv47vuu6suz0jTmrRokIbObLpcCel6DpanU9YNRV6RxAWz2YI4TinrHE3XkLqOUjVK1e8+YDmsra3jeT6dTpuqLMmLhP7Ap9dr4TgWdVMt04BRJPEC27aWCb0K5rOYsq7Iy4Jbt+5i2xq6aTOf10RhjSZ1rl3fotf3sD2Tbs9DkwJkzdp2i8H6gP7aGpZhUZcFdZ0wmZ+RJHPCaEowsDl4cMwLv/cd8ixifHZKGi4Ynp6SFCnXn3mMb/3+dxgeDqmKgqquMEwDdMGVqxe4+cYtFDVJnFBkBXlaUJQ5tmUg0CiLEsMwCfwOh/snIDQM26BpSrI4RJcSTWioRpGkMUHbpdNv43o+lmXSGbQYbOzgeAGn+yfcv32ba09dRWgag60+RZWhW5BFEUHbodvrLMdShULXBYO1ProuCVyXZJHy+iu3mExC1jb6PNy/x9tvvMPejT1e+P0X+QN/6OPkhaLX7bGxcx7DtCjzkl5vDddt0SgYDU8QFASBwa2bb9PpdOj0uoSLkNPTIc9/7Dm6ax5SaoRhyNbWFmuDNVp+myLLuPn260TRAt0wKcsK13JBwf29GxydPSArYqSEJA6hadjcWMMwJePZmPlsSpoktDod4iQniVNAYAc23/v938Pa2ga6dJHSwLYd1td2lu07qmF39zytoItt20gpKcuaXq+PZZhoaMRxQl4UVKqmqBoQCi8wcBydpoG6KhGqwTANhKbpVcH66Czk4OCgeH9P+8rKB8uqAFhZWVl5D3zhC1/Q9l5++XtUON707OUAY9U06KaBqkryPCUvc7Iq57v/yCdxnRa/8xtfY2Nji/76gMGgT7fXIS9Lwjhnb29EkldMpyFFmXHxkR1GoxhTt/EDCz9wOTmZMR7FWKbNWn8d12lRV4pGKVrtgPF4SJ6lZFlCXddMxguiKMY0FUm+YD4LGZ5OiOMIQzeomxpN05C6hu1Y6IbiYP8h/UEfzdQ4OhohWPZnX7jUR5NQFCW6oeF7Jqahsb7Rod22sGwNKTUMQ0ephqquMXST+XxGVEzYvrTJr//K7yE1jXCxIFxMWCzGNGXJxlaPoO3z27/+e/imy3weoguJhqTV8mm3A+7fvI2UAg1BVRSopkFKbZlXkC9DqhzXZjDoUGQZ4+EZNBWgaFRJGM7RhCQIAjqdgDiOiZOEJIq4feMW/Z5PoxTdtR5t3+elb7zKYDMACQ2K7fPbTKZTqrpG6jo727t0+x38wKGmwnYNbEvQbjvUdQ5lxeHeHlKTdDs9hsMzJsen/Nov/ia2a/Dkc4+ghIkXtNjZPY9SYrmydLABAjrdHvfvPWDQH6AJjb29BziOjeu5FEXF3sMDnnz2Gu2+g+cH5EXBlauP0Aq6tIMOnaBNUeRYpkHb79Lyu7iWi2tbhOGM0eSE2WxImoacnu5zfHSIoVu4ts3R4UNmiwlSCkzDIM9TGlURLXIWYcTnPv99OG2XOA+pqhJYpkBHUYSgxLIkmiZxHRcpJaBwXBNNCqqqIo4zppOILC8Io4LFoqRSNVVZkUYFVBq6FDQoFMoSTX3VCKv19/fEr6x8sKwKgJWVlZX3gF0vnmIef1olmeHYDq7rotHgWjaD/hpCGCymcx576iLPfvxZfvonf440zqmb5UOsbVv019v013xM06CpddKkxNANdFPn9HjMyf4JeZ5SlSn9tTatjkujCuaLGWEUEgQter01LNOi3TG5cHEHTWrESUyWl5iWwYN7x9y9dcR0FDEejcmzFNt26A3WsSybqiqX+/G1htHpiI21bU5Phuw/OGJtvUNe5NiWRRwlVHWBYWh0Wi2EAl1KTEPiezbtwMV1PHTDx/HauK5PFE2Is5APf+KjvPPWbc6OzlB1zmw2QihBoxqyKuGjn3iWvVv3GB6cUmY1xydnVGXFbDTlwsUdJqMJ88kYiY5AQ2i8O7egKOuKMIpQCkzLpNVx6Pd71GXKYj7C83w03UApRVHmVHVJq+1gmgbxIqYpS/KkYDKaUBUZLd+l2+8wPQ1RdYUX2KRxjlAabb/H7TdvkSwKgm4AUlGXKb1+B4SgKGpqpbG1s4Htmui6QZFV9Hp9NE0wD6ecHhzzm7/y23zyM8/jtVt4XoDUTc5f3EI3FIZukmcJgddia+Mcd+4+ZPfcBZqm4eDgIXESE8ULqAV79w+4cHGLnfMDkjQjTTOuPfoI7U6LVquL4wTUdYNSy3kUz3GxDY/N3jaO6ZJmKYhlQtvJ6TGHRwfo0uTChUtIXRLFEY7jIITg6OE+45MTXvv227z88pt8/x/7bta2N0nShGgRopoKBIynMxrVoEsdTVuGfy2LABBCW76tskzWBz1816LTdggCHdPQ0YQknKVMJgvKvMBve0K39DzO8nkp9Oz9PO8rKx80qwJgZWVl5T1gy2be1AXTYcTp6Zg8ydE1naqqmM2nzGdTOusen/uT38cv/PN/zWIaIiVITWEYBn7g0e47tPsmSlTklaJulv37vZ5PuijRdYuNjQGO4xEuYizLptvtYFuSLF1wenpKU9fYlsn21gZbW2vsntvk0UevsrE+YG1tjcuXL2AZFoPeGrZpkaYJmi4J2m0a1eB5Lq3A4/TkdNn7nWWUlaLKGl7/zm0ms5S8yoCGpgHTtIgWGZNxhEJguRpCa9A0Dds2CVoBtm2RZyWq0nju+afZ2Oxz4/Vb9NsdUMuf3zDksmXk4jp/4Hs+xVf/7ddosozJ5IzFYkye5YRhxFMfforXvvMGWZxTNiVFXWO7HpomaRrQdZ26qanqCk1qIEA3DfqDPkIILNei1e4gdY26LrFNgzzPqeqUza0utVB0B32Oj4YkUbx8c6A1GI1BOAy5eG2Tk5NDdN2i1WqjSji4f4jjmxRpRlXUjEYj2m0Px7WRusCybdbW1+n1BxRljuf79PvrFE1FUoS89LVXOTs548nnroHQsV2XTq/NuQu7GLZJVcNwNKQ76LG+vk4UJ6yvb1DkBVWZ4/su77zzBuEs5Y1X7hAENttba0zGU05Pzrh27RF2zp2j11+jrmuiZEEUTYniCKnpSN3kyuVH0XWD6WxIlITohk6SLDg82qNuSjY3trBdm0o1dHpdHM8hzxOoa47vT/jOC6/xsU8+x86lHaJ4QVUVGIaJado0ajkAbJomZVkwmUyYTCZEUYyu63ieQxJHlEWFokKhsB0bw5TY9rJYU40gS3JR5MWuVOKKaNLz7/eZX1n5IFkVACsrKyvvAdfwcpQKJlFCGKfEWbRsvWlqqiKn0Sr+9F/9PF/9yjd55+W3cGydOFpwdPiA46NjyioHAXFYA7C+6eG1BFWTYhomaZajaJiHIbN5SBTlnJ2OicIE03AwDBdNgqKi1/c4OjplNp+zttFCaA1ZlhJHCYZhUNcNN95+izic4lg2DaBJiee5pEmCQGLqDrPpnMODIzrtFkmScXQ4psgajo9SHuxFxGnN2emMxSLCso13H9bAtE38tsNgvUWrZWNbBq12mytXH+FTn/gIh/dPGARddra2kYaFbupIS+A4Dj/0wz/AfBLy+itvgCzJigzH0gjDGVbLYfvcDi9+4wWiNCJNMtKkoMwz6rKizCvyIkfXdYRQCBSqgboG22nhB22k1JBag6Eb2LbFbLpAopOGKZ2uT7fr4/k2rZaHbenkaY7j2NRNxevfeZPHHr+IISsm4xMsR6e/vsYsnOMHNqfHUwzHZjGLmQxnaAg0Tcf1LVpdj1Y3YG2jw+HDfdqtDrbjkeYZNA0//zO/xDMffYxG1CxmC85O5qSpQug6juuSZhlHR0fvFlY2Ukh2d8+/27aU0O8OeOftN8mynNdevsl8FuH5HlEccef2XTa3N9nZ2WZ7+zztVg9N0ymKjLopmcxOGU9Oabc7GLqD1HXSPCEvEqJ4wsHhA27ffIcsjYCS4fAM27TZ2TlPr9ND12E2Tnj1pRs8/fTjfPS7PkRRFeRpQjgdU+UFTVOTZSm2bWOaJp7roWmCcLEgiRNsx8ayJIM1j6Bl4Xk6tq1h2ZJOx6auS4pcUdXS1aV1VdXl9ff1wK+sfMCsCoCVlZWV90AyH39mkaTbi6JBGQaN0KgbULVCGCbf/8N/mMUi4xtf/g6+51E3NY1qMC0Lz2/h+T6m1DClzugoYnK8YNDrsLnZpakKsiRFNeB6Nu2OT5blxHFIuJhRVRVJGlE3DVle8vDhEXFUcnYy5+RoQpbkxItsmca6WKAahWM5pEmGbuiouqLIU5qqIUtyJqMRSRwxm07I84S9B3sMJyOuXDtPNE+4feOU2SRnMorRDQ3XNTEMQdNUy8HXrkl3zcZ0dLyWx/b5XTZ3trn+zCUarSGcxGysr+N4Lr1Bn06vi2G5PPXhJ/nQx5/mW199Eaqc7qBFlkTomiRJpjz3yec4PTjh4Z27VE1CkeQki4wsSSiyjCTKqEuWbSa6gWoqmqpC1wRN3SxbYKoCz7NwXAvbMhEojo9P6HR6727xWe7c39wc0Oq08DyP8XjMuctbDI+GzCcj/uBnPwpVSbyYY9iSLMvw/A5CSOI4YX2wxnwaUaQwG4cs5u8OCvdd1tb6WIbGYjLm/NYFdOlQlDn3bu1zd+8eT37kKRZhwt7eAfsP9ohmCzRNZ3t7FxQMT0fkSU6WZSgU21s7VKWiKpdbMd+58SZSk8RJSLiYoxoIw4i9ew8YrA/wWy6B38Z1WvheB01otIM2URyR5ymtVh/f7tBp9wGNPM+oqwJFzf17d7h1402iaMzB4QMePtwjzhJM2yJNY6JZyJd//XdRdcVnPvNxBmtdpG7iOT6e46LqGl1K2u02pmXh2A6WaZOlOUIpOh2b9U2XVscgTyvKvMGyLAzDQjUSTWoIHTWaTN0oip/6whf+kv3+nvqVlQ8O/f3+ACsrKyv/eyQtIy1qLR8uYibTlKrW0A2DOM149LmrrG30+cm//0Vs0yPLI+q6wffa2JaDYUqgYTGPEEjKoubG2w9pVMXu7jZ6pXAdE+FIyjLHth0sy6bTbqOqmjTLkFIjTkI0O6CuauqqwjAcDvZPEYh3d7ELWi2P6XyKqes4tkOeFti+jet6pEnGbDalyNPlLTqCeLEATbJ76TKj0YyizGi1WiSJgyZaHB1H9PomQWCjGyaGWZOlijQqljfkuksclphOiyvXtnntO3cxbYtW4FPk1XKIWOh4gz7Pf9fTzKZTHtx8wPrGBroQ1EXOMtlA8LFPfYpvfO1F8jSmEQ15njIajQjDEIW2DIwSAtO2aJqKulakWYVuashcYBkGhi7x2z5Jvkxidl2L07NT/MBH6hpKaVRVBZoCXaEZGmdnJ3Q6Hp12wPH+GRce2WXn3AYHD4/oeRskYchg7UmkqTEfL9h+vEelHBaLBWmWEEUhAo3N3R6JKLh6/So33r5F23JZX9ukQSDQ+a1f+30+9/nvRchlQXY2irAMG8v1OT09ZXt3nXa7xd0794mjBM9z6Xa6+H5AlpV0u5vs7d/ihW99hScefxbDtKCo0KXB4cNDsjzh8SceRZfLLfpxnNBUDaDTa7uUVYEQIHVJWZf4Xot5XSPE8u6w2+0zX0xJ0hgaMA1rmSGRNJimTZVnqCrn2y+8yoUrl/nIJz7E6emUs8MxnVaLuqqYTMbUdY3jOEgpEUIiDQtp6EipgRJYhk5RQKMaiqJA1wVB4CBlSVm19aNDW0/iqDfaE48DL7+f535l5YNi9QZgZWVl5T3gWu1XPNcadX0PwzQRmiIKF9hdi498/Gn+xT/7eSbjMybjYxaLGVmeUxYNum6xttbFNHTqUmM+y5hMln+v6mXIVV4W9NdarG977Oz2aZqSs5Njjo+OMQ0bQzexTIfNjS0uXLjAs88+zYWLG7TbzrLXXZXkeYFh6BR5ieM41E1NELTxW208L8D3fQaDDrouKPJl2JjvOjRVQeB7VGlOFkZIoVGXiuFJzM13TkiTAssyME0TgaCuQDUalmXQ6Tq0Ojatrs3Tz50nClOqQrG5tYFtm+j6sp3FD1p87JOPErRNjveGxIuEditAVTVoUKuGnXMX2Vhb4/UXX6JpCqAhjkMW8xlZllAWGYv5sp1E1wFVU+Y5URhSlRVVVVMVJWmavzsroIFq0DUN2zQ5PjrEMQ3qskQ0kCU53U6Lqi7YXNtk//4hRV5x+52HBN02buBgWRZnxyNcx8bQFf1uB63ROD4ZMtjo0WgF89mYOAwp85rx2ZRwHiINm3MXLlJTc/HSRQKvhW2b3H37HnEYcu7KJmVVYpg2i3hOWWWUZckrL7/CO++8w/b2FoZpcHJ6xnA4RhMaUTynURWPXH0c227x2huvcDo8QUqJJiVN0zAeTnjxm99kc2eN8+d3sE0TN3CBirouAUUUzYmTBUWe0TQ17VYXISRCwGIxJfDauLaP63oUZUoUzlBKLW/qLRvXbdPrDAinEd/4vW/juw5PPnuBrEyYz+YEQQvHsUmTGNU0tHs+jm8ghKIoa9Kkoa4BGlzXIPAd6qqmrHLmi4g0qYSpW6Yu5Ulpmm+8n2d+ZeWDZPUGYGVlZeU9EBtlLRukp1tI3SCbLZCW4nN/8nv52u98k5MHR/h+gGkYOE4L3/dpasV8NuXO7YoL5y8hdIEQApQCBbo00ITJYpbQ6brolmR8EhPOY5IkoygKRpMhZVEidYkuTfI8YzyZ0O/1abUCpKFTljXRIsH1XKJFuAxnKioM38ZzbTSpMZ2MkbrG9u42um7SNA22K5GmjVLLzyRRNFnFfDHD3Q1QFXiuy+F+jGkk9Hom7baPtJe36EmTIqRifTsgaBvcvRXS6baoy5J5lFLXNYZps3Oxz87uOgcPhkSTkDROkRuwmE8xLIc6zviu7/40D27f5uHdO2g60NTkaYyhW2iiJs1i6ndXgVadgCzOsSyToiiJihgqRZKk5HWJlHK5MlQYhIsYXeocHR5wYWeXIivQDZO6atA0ied75GmO57mMRmPG0zlH+wdsXdrkwZ0TRF2zmIyBhjicUZU5cSxJ4pSNrTUmowlNXXN2csx58zyzSYzluvhBm8kkJM0yHnnsMnfu3IGp4ttff5VPf88n+dpXXkTTBY5nEi4mOLaHAN5++y2Oj465cuVRyrLidHjCdDZCN3XiWYxt23S6fWzbIVwsSJMM32vhBz6LxZxsVvPC117h8uVLdHt94iTG63tMJxOyd98klVWKlCZ1o0jTFMfxiOI5aRpTVQ2dTo+yTBBCvBv2laHrBlI3MSwT3/GosoIqz3jj5TfYvbTL409dJJxlHD0cUufgugGGadDUik7H4tz5Dkop4iTHsiW2LYnCDE2TgIZtG7QCl2SeiHan08nzrDOdTldpwCsr/55WBcDKysrKe6Go1nRTZ5GMSeMEVM2n/tCnOHiwz7e++nVUXS9DjczlQ3peLB/MGtWQpRlxErG+sY4QGlVZU2QlnudhWZK61onCDLfxyPMa3bDwWwG2Y1FWFUkcYVkG2zvbzKZzwjDDtkvmUUI4D6nr5dDrdDKhyHLqsqIqCkDhejZKKcLFcnWm1EuKuqCuakQl8fwOtVJIqZEmGUo1eIGP0EravR6HBzMWs4StLY+NjTUs2yTNEjRdYDkWlqVYX29zcjTHcW1qo2E6LlCAbljYvsmlRzZ447WHdAY+D+6d4Xnu8n21As9pY0ibj376Y/z0T/wUZVXgmBZ1tdzpLyTUeYXKE5RS6LpkdDqhKEvqSlEUBQgwpKSqKySSs9MRlmmia5IsLdANQZaW3Lr9ABDYjk3f7lIUFZZlU2Q5nU6HOIpQdcHL33yLD3/meaQtCZRDNJth2jqGIYmiKVkdIzRBp9NjMBgwm4xp+x6LWUia5oRpSqfdwXFcHh4+oLUW0O33UEXDG6/c4A989qM8/8nn+ObvfoMgWBaKSjXUTY3rOSzCGa+8+hKDwQDHNQkXc8qwwrIswiSkzHNarQ6maWMYkrxIsW0Px/GYTE8ZjyOGo0PW13do+QOiMMJxPIJ2mzCcE08SNC2jFfRx222SNGTQ3yQvcsJwznw+xNAtHMfH1B3qKiUJZximjem4VMpESA2UhhAahw/OGJ/O6PcDds/3ybOSNKmQusBxlpkRZV7it0ykrpHEKVJzUMB0EmGaEr/tYpgaDTmz6UxD8UzbMteBg/fv0K+sfHCsCoCVlZWV94CBQSWFNAwDoQTXn3+Mbifg5774b2gHAyq3xLZs8jxH0wSGZeH7AYN+H8936fZa1I1gPg2ZjKfUdU1XCuI4wTAleVKRRjOKslpuTkkiFosZpmHgBwFQkyYLgsCiURZCV9i6yXxaURQ1QghM06QuKhA1uuOQJjGWbWGYJk0Dhm5QFDlZkiKUQmmwCOe0O10MXcexLYKgg2ZIds6vMZlEZFlDf+DSXWthWh4np+Nl8BMamgC3ZYIQSB2Cls1svHygsz2bsqi5fG2TOMo4ORpy/sI6k7Mplm6RpQ3SNMmTkGvPPAEI3n7zLayWh6wVQtewHIcGRQPoAjShqMqC6WSC7TqEYYguDZAC09QRurbc/1+UZGlKXZUgxDK9VxqMhmdUTUVeZVyurqALnf5an7JqsE2BYVjoUudkf0oaZniWQ1bkSCmpVc7O+S2OHh5QFwVFmBAjieI54+mIoiq4du0J4jTh6OFDhmfHbG7u4jo+N9+8g99uEc7HqLrhK7/1At/16U9x5/UHLOZnIJatSq7jUjcVWFDVDcenJ5iWgZQSXTfIigJDN3E9n3k4pakbDGnS7a0t16yKhq3t80TRnNPhKcenx6RpgZQa+TxBlwZlnWEakkU0o6orLpy7jGV1GE2mGJaJZdokaUVeZJiGRd2UGLpGVTYIpWjKhjIrKfIMTVPoEmxb0lQFD+4dcnR4xtb2Ohuby6RkpSpc10QItfxe2AbzWUpZ1uiGQKBhOwZlUVPXDaZhMZ8tkrJJf9ew29P3+divrHxgrAqAlZWVlfeAoardMK18pMfF622uf/gRfu1nv4xWC5SoKcuUPCvwPA/TtNA1A4FAGjqtjk+3HzAZR5imQbvbJglThqMh/X6PTq9HVVWMx8v03tHwlMV8hNAUfitAzzRczyNJCtI0Y3tnCyV0NN1ke/ccZVaQ5jl5llM3zXLAV9MwpfbuGtGSusyp8pyGCsPUicIIwzSoqpyD/XsEQYfz567guCZlpWhqsdzfbwrabQvfNfn2iw8IAoOtbZ8iLwgCl36vR5LEaFJRNxWGpRNIQaME/W2DoGXx2ov32L2wydnZjLoCzTYoqxqhGeRVzCe++9O89vJr6LVA1x00S6AasGyfmgYhNCQaAkFRFqAEhmlQNw11XWNaNkIIdH35O9elTqUUaRGhacsB6ZbfYjIZEiURk+kUKXSqouRqcw2JzqIKUY1Cmjq29Dh7MGZ9s8ObD2+zeXmb2WRBUVfs7u5weHgETU0Wh8xnU+I4Zu/hHmVds7l1Add2mC+mPHhwB9t0ly1XVcPp6RFVWSBeEXz448+ztrWJ1DRmsxOmszGe28K1AsJ6hmlomJbFbDEjrwo03aDleGgI8qKk012nqSvmsynj8ZAojuitrVFUFb7fRilJWRTUdUpViWWh1EBdlFiOj+0EpOmCvf3btNtr0DTMpxNaQbAMeNMNtHd/55a93PWvSwPXcdGkxGm3ltuDGkW4mKNJg7Is0YRiOgpJwoLNnS5XH9tmsOaAaohmKVle0NSCLK1wpaTIKxzXROqComg4PZojlVYJTR7+9M/8TPx+n/uVlQ+K1RDwysrKyntACG2tVpqMiozHPnaFt16+SRoW+IFPUVRUpcI0DYqiYDo9YzY/Yb6YcnY2YjqJmI4T8nR5I20YkqDlE7RaVHXDeDynqqFpGspyuVMdAVlesphHxEnK4eExD/bvMVuEDIcLkrhgfDbi4cOHnJweE4UhhtTRpSQvCpRQy2087Ta1qomTGMPW8dsB61sbdPsDWu0+m5u77OxeoN3pE8cphpQIpTg+OKUqa9otGwn89m98h4N7p8hGZ3gSoRpJu+dSUzGbp8RJCRJs30QzJN0NmwtX+0zHEVUluHh1l8kkYW1tA9txqRUYhs3F69e4/vh1Xvz6S9hugN/q0uqu4QUdPL+FbXu4ToDQdMq6oSiLZSJwVVHXNQqFaeg0VYOuG7iuh23bGIaBZdnYlk1dgyYNDMPE910sy2S2WJAWOffv3yFNY07PTiiqDKkLVF2QhDmbF9dIiojZZEhdNuzd38d2PIKgS5ylJFlMkqcoISgbxctvvMKde7dxAx/btjB0narICOcTNKFz6fKjSNPgzs0b3Lv7gIvXz+P4AesbF7DsgCSLKOsYXTewTAsN2FzfoR10oKmIkog0z9A0wcHRIXWj8FotiqpgPp+wd/8O0FBVNXG0IE0jNKGT5ilhtkBaOp7XocxTUBWakGRpxmw6pFYFrmuQZjGO4+B5LUzLxXZ9dNum3etj2BZlVVJXFdEipqoUnW4Hy7YoqwzbttBNE00Kumsdyrph7+4x80mEY0vWttoMNtps7gZ4LQshludgsUhJkwLVNMRxwtl4OBGm9IVYjQCsrPz7WhUAKysrK++BpqmkY+vOUx+5wsGdQ9761g2gIcsydCkxTZ2qzmmaCsOwaLUGWJaz7O2ua+IoJYpSonCZjmrZNlIKPN9Z7pePUkzTxLYtLNvCcR1sxyavCrI8o93psrW5Q6/XRQhIk5RwEdJUNaZp4boepmmg6ZKqqRmNTjg4uMvR4T3icAqiIQ5DTk9OEcDm5gZROGM6GaPrJrbtYVoWVV3juMsVoXVR0RQVr710j8W0ZGNjnTjOME0d2zWQ0uT0aMHh/pQkrDBMA91scFsG5y4N0NCIo5zNcz0836HMwXJdlNDQNBNh2Hzqs5/h9OiM+SSmt75Jq9sDoSN0E2mY6IZJ2SgQ2rKdh+W6U6UaDGP58zZCkFclQkqkaWBYFpblYNkujusjNIkCXNenKko83wFVg6gYTc84GR0TJRGnw1OmszFRHHF6dIKmSy49fo6zyZgwXjCfz7h95xaaBMMylw/jQoASrA02QMH9e7eZjs+o8gJV1wgpsGydJFowPDvl/LlLdNsdXn7xZc5d2mQ4PML1XLa2dmi3B2RpjqEvB7uF0AgXEwLHY727AUpRFDlxEqFLGA1PmEzGaLqGbhlIqbG3d4/RdER3sIZhW4wmZzR1SZKl7B/tsUjnKKFQVY0UGo7rUjc1aRohlY5tupR5iS4ElmmgCQ1LtymLisBr0W63qOuCuqnI85zRaIpA4toBhmHR6XRY2xggDYlpOaAMzo5jHj6IeOfNQ85O5lRVTa/vg4RG1LQ6LrohKPMS07aRuunXVeX/3b/7F1c5ACsr/55WLUArKysr74EyywYbW51eaHg8/MotLp47z/HwlDiNmUcjDGmiCZ1ut4dlG5RFhSYFuqGRpSlFVqJJyfb2AMfRSLMGqTtoEppaoWka8/mcKI7J0oQsz/7d/v+6yjk9PqTdbqOamp2dNoZhYZkm88WcplkOxyIbpC6xbIuikETRnDRN0JD4gU/TKNI8ZT4fc/HiVVrtDmmS4Tge7XYLP3DJ8gKFhu861HXDvdsnCCEZbK1T6QrDMymqEsfVieOMG2+dkoQlG1vLB3whIeh4SENneDSjrhXb51pMp3NQDZqUSMNAlRlW2+G5j3yIX/7ZX8SxHRqlKMsChIZuLnMWkjRdrqnUNKBESh2lFFIaqAayLMM0bcqqwjUc6gY03cDS9He30FTYdoamaUjbxtAtTNNCI6KoclqtgCyLqOqGOIkpq4pOq8NoOmLr8i6djU3SWnL48IjR2ZAiTwmTkMHaGkopqrJCCg2p6Qy6A6JozvD0ENfxUQj8VoeqrvE9l9PjPXRN0Wn3GB6dYLkm7W7Ards3uHTxEt3OAF2zSLKIwaDPfD5G1RWz6RjXa+P7LfI4ARqqusI2lz9PmqXopoHUNAxD5/TsmPliTrfbodvtUmYF7XaPRTgnzRJM3cKQBo0q/t18QVPVmJaL53jEcUSeZ5iWTVPXWIaFahRVWWLbFnVdYVkmdV1RFhmoZXGlGwaGoeP6Np2eQ1M1NHWDEIrpNEShGJ0mxPMSx4/Y2GmjmxqaajBMA9uBpq5xHEsXmigmk5Z6v8/9ysoHxaoAWFlZWXkPeC3Dbm207F/9+ZdIkhqhF2R5iqDCsT1Qy4RaTYM8KzBNk6LIqeuKpm6Qmon97n7+drtFUSwosoqirCjLAsM00DSNpm7QNA3HcWm1uwwGPeqqZD4LKcqSyXRKVTX4fkBdQ5pnWIYBKIKWD0pQZDm+GwACz29R1zVNXVFVOZ7v43kBeV7heD5e0MexTFpdm04/YDaNScKCMivIi5KyqvACG9c3MKRBlmTsnutjuyZvvXXKIirp93ySuGIezrl4uYcQGtNxRJbkWJZFb9Dj/p2HmKZGUSikoWF5Fh/9gx9jNplw9+aD5Z561dDUClBoms7yT8viqK4qpDShzhGapKpqUALP8SiynKKqcByXum6wTIOyqjBNm6Ys6XQ6FEVGXdesrW9S5QWWYRNlEYauk8QhRZHSNLAIQxrVIDXJG6/e4MpTz9Lf3OXseEw4X6BEjTQsjo+O6PZ6CCkpsoy6yQiCFkI1FEVOURXouiSJIjTDwbJcNrZ2CGcTKmqKpmQ0GfL4h58jjV7h4OEBrVaA6wU0qiZJIh577Elu373NYjElDKeYtoPve2RFQVXl5HmGRNIKfOZRRNUo+r0+vttiNh+RLGb4bhfbNkiTmMANcGyPNA1JsxShCbIso99fx3cDiqKgrEta7TaabqIbBpomqFQNmkZV5MRxhfbuvxOaoMwzmqbCcSyktCjLkiRMCHwL37cwDI2irNANHcNQWIZOVTbkac3wNKLVtRGqwXUkRZFi2hIhlKFpxnpd1waQv68Hf2XlA2JVAKysrKz8R/aFL3zBXdvQd996fV8/PZmRN4qiKjAMjTDMKYplqFJVF0RRRLe7BqWirita7Q6eF9BuBezsrqNLQRLl+N6y3SZNSwQOYRRRFjmqqaibkurdwKrFbI5umpiWhaZJ2mttNGlgWgaablBPljmzWZISLcJlp4wGCA3bdEjSBENaNJokL3OkNAjDCKUagpbHI9e2mM8WgEDXodW2SKIUoSmapsbzlv30qhEkYULvQgtd19nfH5ElBefOdZicLDjYn3Dh0Q1OzzLMeYYUDbohcTo20oQ8LbAsC92scdBppM2HP/kcv/KlX4VG4AcBZZ5TVtVy139T06hlMYRuYFo2WZpSJQ0oAIESUFQlRZETtNoUZYkpBEJoyzWghkRJiUAgBGgoCtMgEjGOptFqt5hNZ7iOjyYlRbEcpk7iBMs0uXf7Hp3NCzz5sUe4/Z3hclONXqGiCMd2uHP/3rJXXylAkMQxne6A6WSI1CS6bqGURlmmxIs5Quj0eutMZ2c0WcPDmwc8+eHneeObtzENg7PTh3h+SqfdJ4oiTk5OuX79WY6Ojzg6uENVFlRVhaHr6FJDQ6NRDVVZ0fEDkjghTRNM02Ktt05ZlqimIs1zLMcmyzPyIsd3fFJRIESNY1tURYHTd+j3B0wmY4qyou0GFEWG0mCepsuBdqmhGoVtO+iGie8HROGULCtwfR/L8WgaRdU0xFGO5cDadg/DNCmKirqsUE1FK3BI05yyrkmTDNuyiJOcJFHLAjSPTwxPfHttbS17Xw/+ysoHyGoGYGVlZeU/sqtXe/rwYFY/fOewcgMH25SgarIspagK8jL/d7fVjuNQlSWtoMX62ua7QUdQlg1ZkpNnKSdHY44OpxR5hQCKssQwDNbX1ml3eui6TZyETGZnCKDT6tHt9vACB8sxqOqck9MzsqJgbWOD3sYa0jSX23I0fXkzW1fESYxSFWkaUpU5qhG4ns+FixdwvRatoItpCPrdFhLB0f6IOCwxDRMpa8o8YjqZMZ+F5GlOu2NhGIIozMljhWtY3Hv7gAe3jwl8l9FZxnSaQSOoUo29+zM0aRBFGULouL6BaUkaGq4+8QjhLGbv7kNMx1iGkUmJ5/ssc8mWPf7tTodur4dlWTRKYLkOluMiDROERlGW79781+RFSVFU5EW5bCOSEqkLpBTYto357mpWy7ZodzsEfgvdWK6+bHkdXNvDdz00oFE1ZV6wf+8eQdsjTTLqRtE0UNU1YRIhEJyMzpjFCSCIk5g0L1hf3wElQRi02j00AaiCokipUVi2gy7g7ZffAFXSqJSySNk9d56yrlmEIaZlEscx4/GY8xeucOWRp+n11hFooBS26WJbHp7rU+YFRZ7hOhaOZSGUoiwrhNBQKLrd/nJI3TAwdI00SzGkjuf4dFt9XNtjMh6jSY3rj1+nO2hTVSVZlhIuZqAqTFNSVSVpmpKlGVVZsJhPcRwPw7TRpYPnBjieg6Eb6LpECJOz05jT4wlVmWNZOk0tWMxypCZpBQ6ea+E4BklUEsclKPADOzMsa+/HfuzHqvfrzK+sfNCs3gCsrKys/Ef0pS99Sb71wgv6gCxaW2uXudEYYz0jL0uKMkfTJLbt0PJbCDTKssJ2bCbTCZom6Pb72LaF67gkSYVlC6oGiipHr81lkFOWI3WdLC+xLJt2p4NSFYv5hOl0Qlk2OJ5DlqfLXvpGw3YcVFkTh+EyNEs15FmBbVvYlosuJbBMetVpqKsax/FQjWBrexvHsQjDjOHZnCxZBnfVFUzHI5QGjqnhei6zeY5pSlotF9PRMGxQQjEdl5wdTYjCFNP2WSQ17Z5Dv+uRRiW333qIMCRPuw4HD8douo6tK4QuqTXJcx95mt/+1a9gSI1CVJRJCZqgKEuyLMO2baSUgKAochSCTreLEII8L6mrAqFJLLEMOovjBNOyKUVNYwuyosSydKSuo5sNQkCepbiuQ14W+L5PnqZ0Om2SJAHVUKsGaZiYhk1WpJimRhxP0FHUStEotWzvatmopkYpheu6TMKQpm5wHYu9/Qdsb53HcwOyIiNO57ieR101NE1D09RYdsDGpsciiZhNI9a3utx6/W2KMmNzc5dwPuPs7AhN08irnI3tDbygTZmtEwQBo/ExWboM//L9Foau0zQVTaNoajBti6aukdKgaZZviLqdNsPhKUHQwrBNkiRCKIGuLwtH0zSIwmXv/7nzF3FMh/v37nJ2dkwchzRNvRwWdyzSJCZJI4KgDUh63T67F9bQdMF8pui0PGxXkiYFlmug0FhEJUVeIxQ0SrBYZOixpNU2cH0HIQWua+B5Nod7RU8J/Pf35K+sfLCsCoCVlZWV/4gevvCCGYfzHyzK6BHf9kVezinLGqUElungOiWT6Zj5fI5ju5imBSy3qnieTxpnxGaMgHc3qJhIXdIUCXleowmb/qCDEoI8KzgbzkiSDNdrAxq2peN6HkIz8IM2hm4wHg1J04TppAB0LNPCNA1cz6OpaxaLkKpMKfKEMl8OEntBF8t26fUGuI5Dp2ejS0Gr45HEKWWhSNOKokjICwXGclWnGzi0uy08V8d1TOKoYTYOOT1c0FQNUZxQxzFrGxskacL+g5holiCwaPVMxqMF9+6csb3VodN1yacJTz73NEUYMTw8xHYsZCHJTNCkRrQIkdLAMm2EplPVFZqu41kOTV3TNA1S18lzQV03qEZRFiVe0EbK5YOsoqGqlm9dpK7RoCFUg9A0DNNc5hsYxrINqt2iaRoMXS5nMFRDkRcYpYHUdBzLokxzdM3AsW0oK+qiWLatpCmB55PnFXGRAjWNqHh4vMfuxi6mpRPFIVIY9HprSE0nikJc16PbGdDqbVBVgrWtDns3TYSqmQyP8NwW7VaH47N9XOVy8+3X+cgnPkU4PWGxCOm0u8yUoGkgikIcx8E0PcJFiKIhTVKUarAshalbNHWJabpsrG8Txws0IfC9FtAgpcQ0baqmwjEMqlJx7/YDLlzc5pnnH+PuHZfDg0PyNEN6BgqB1E0c18K2HVzPZef8Gheu9BA6nBwuKJKCNAVdM4hnKZ2+iy4MLNNAGg2WZVFVNRqKbtejUg2WYXP33l1uvHmrFAZvu657+n6e+5WVD5pVAbCysrLyH9GP/v2/n37hb/9IlCXyyv5o6qQlWK4Fi4a6qlAoDEOjrivyIqVRNXFSY9kOQdCl3x3gtzzKsmA2m+G6LlGUMJ/OaJSgrAu6/R5Bq4Vp2DiuR1VWCA1a7RaqhjyrQBRoAnzXpdtr42Q2SgFINCGpqhxdl+i2hW7qhLMS1SgMQ6dB4AUB29tbOI6FUiWnJynRImM8GRNHKUXRUFWCOF4wHI3wPB+v1WawPkDXDU6Oxzy4m7K1s0Ucl1R5wmw8Is0z1na2iZMYy3bJswrftrm/d8TGlauMRgl1JbFcE8PWafc6PPXs43z1t7+KLgSNIdDQsJ2AJFne0EspqcqaIs3QpFi2zEhJXdfoUhKGIZblUFc1RVHieiaWZaHUMhdgPp0hNIEuJUITCBSGBF03QAh0Q0eI/z97f/KrW5bm52HP6na/v/Z0t40+IjMyszoWS7RlmjYFS4AHhgfmfyAY8EADcyJ4YBRzrIltwQ0naiDJgFiAObBlyCYlShQlURZZRWZlRlZ2EXHb05+v2/3aay0P9lVpSgIBBAL4nvHFveec7yzc913rfZ+fQApNOZtP2kslQQAIKg4kWUISp8SzOftNg1YJp2dnNK2k7xwISZ5l9H1HmcTsW0ecZLjO0w4Db2/esJytyJKMqj6QlzOcHVFSsNvcYkxEHGXUh5rHT5+Spj9naA9EUULd7JiVCx6fv0dV7dje3/Lzn/6E73/6ff7kTyo2m3v6vkUpSJKS7XbDarFmNptRVXviJMXaATeO6GRqQrq+Iy9K8rxAK421Iz44un7Sz8ZRzP39Lc4NhCCo6h1vL684OV3z7Okzbm9vGW2gyGeTcSnPWaxmLFdzkizl9nbPk+enfPL986kx3Fl2mwOHbcf1qz1RJDFJhEkkq/VICKAjhbVT1ldbD0SRoWvakM3NZXF+/tW3eOyPHPnOcWwAjhw5cuQbZr1Iry3hvrMyCOfEaC0COc2h2xo3erSOkVK+8/6PzMsljx89RklIYkOSJGwfNtzdPhDHCVGS0A89Mkju7u7I8pLeD9R1TQie5nCg7SuKbEZZzpEawLPb7ojTmL4fscM4zbfHCUpIgh+x1iGFpCgXpEmGtT1N3wMCpZmSfgdLXfXYsUdHCVlWkGaSq6tbnPeTzUhpzs/PMUZw+eYaN3oeXSxYLzP2D2/ZPmwYnSeKDPd3D2TzOW13S7Pbsb29Z34y5+LpKT/54y8pi5zFMsH5wGc/+gF1XXF7+5Y4M0incJHHe4isQQpF3/f0Q4dAYUxEW9fT168km6bB++nmf78/sFiusHbA2h4p4fLtK5q6JstK7DASpylxFKGyiCAlQgmixDAMPV3bk2U5aT4nBEuZxIx9D3iSOEUqw/rRIw6bA7HRzGYL8C2LMuP2/pYsS/He0VtLGkUsFyeYQwRiS9f3BALOO6Io+vPPqmtbpDBstw/MMsfm6o5nzz7FRBF2kCiTkEqDsxYBzGZzdrstd9dveVvM+eCTT7l6+4b99o7rm1dTbkNSorUmS0uSJGe33zGfrdBKYe3A2ekFdV0R3EhvB0ScURQFVVMRxzFVfWBWLMmznHGM0DpCKslhu2H7cE9kIkxkyIuCJE1ZFzlJlpKkmsU6Y77MqKqWl1/dEOeak5MFjpHlecnp4yXVrqHe9TRVT9+CEpLFKqGuO1ohwEvqw8DPv3hJcDipdPQtHvcjR76THBuAI0eOHPmGCdI5a2tvlEBLhQ2Ox4+fcn13xaEKWDeQ5QXBQ1XvWC3PiKOUqqp4/OgMpTS7fUUUa6KopK4brLUIIUnihNC22K6bXPh+nMZQbMtud8tuc8/Z2eMpCXfoESrgXSCKUozSdI3Fdj1pPHnXpdFE0eRq11GEiWKQDQjB7e0dQnhs79DacH5xzu3NHmstbWsJITAMlqIoKecLxtFzd7shy1Pe++ic1TrjfrOlrlucG6jqAwHIi4Lr119x+fZroijik4++zw//4ue0XYuShnIW07UDi9WaJ8/f5x//V/8VRZ7QyRFfdYQRBIIkiXl42DP0Dm1SEIKm6wmjn+b8qz3W9nRdx+GwY7Ve8fb1V9hhQEioqh1Ns0crjceTFTlqnCxAkdEoqWjbflq8Hgacd3ghUFFE8BAnMZ2SSK3QUqGjhPXZkpu7DZGKefb8Gb/pW5arFaOfloRnRcm+rpDOkcYpkUkxUUzT7pHCk2YJm812+jxUjJAGJdW0ZGwHqu2BNE+Ii4KH27ekaUZezHi4v6MoFjTNAa0lRivuH64QkWaxWvP06XP6P+5p6gdG29APMUJAOZuzXq6pq4okLyZhEpInT97jcNhjbUfXdYTgyZLinTqkpap3f65Rdb6nyAtOTk65u7sljbPJQiU0WZGSzHI+/OyM2SzGaEHbdbz38QllkbK5a/mzn77h6bMF+01NU41oFaZl9lWC95KmHYhqiQ8BaSTVfuT6zZ6rt29CnPAzE83+46urq/rbO/FHjnz3ODYAR44cOfINEkIQ/6f/3f/mo8RERopBOD/gcNT7A7e3l9NMtRR0XTMFVgmmfAARWC5nZEXGw/2Woe8xRuH9pIwMLuCBMs8w2lAfKh7u7hjdiNYarQ1ZVuJGT11XCAHOTarRR4+eE0UxfdezWC6QUqFQpGkKStJ1HUPf45wjTVKev/eEJDNERqK15PZmz35XMzpHXe2omg47gJSSp0+fcn11zW53YBhGTk4XRFHMOFqECCgVMVpH0+wYB4cPgTe7L9nXO/J8xiff/xFPP7jg2dMVTevIUs1imRGC4cnTD9ju7un6HXmeEPw01jTYka4ZqQ7Ta4pSkig2CCHQSuKtY7d9oO9qDocDdX0gzzOqw479fkcIDikFwzAFWyVJQZlP+xJuHFBSMI6K1mvsKFFSYEcJ0hB8wLsRrQVZFjOOA94JIBBnCciRoWnICsOzDx6z2T4wn80Z+p76cMCOA3GcUjU1hPFdmJciTwsQgcF2zGZzlFJ0bYNQCiEESih0pGmrhgCcPT7n7Ze/5OHhnrNHOUIExtETxwldv0dJzWG749mzpzR9i7WKTz78nK+//iVJoghOYMeBw35HkmTEaYQPARMleDey2+2Ik4i+b2jaPc4PZOmcJEmZz+b0vUVrgzaGqq7Z7vYU5YKnzz5g87AlyWasTlbEaczqvGC2iHj2/hoRJFobqkPL/tCyrw5ESvOrn17yB3/5I3a7luvXFaP1VLseHUE5jxlHz9A5tk3P7e2Odl8T6yikmfjp7//gB/+vf/3f+Dfct3vyjxz5bnFsAI4cOXLkG+Rv/I2/IZYgoyjq4ggvpZUBuL27pmsrILyz//ScnjxhVi7R2vD4yWMePT3BGMPmASQSNzgGaxFKURQZ4t3SaVmWNE0HeGzdMVqQMuLJkw/o2pa6qrG2QypIo+nPKhNhtMb2A0mWE0UxPni8dfhxqp2KPENHhv3+gBApQwdtZ6mqPU3dMtic4D1v31wSmZgiz2kaQZxE1F3DydljEIHdZscHi6fEcczOHRhGSwga61viJCLyCR+eX/Dx9z5jsV5z/mwBMrC93/L8vTMi45Em4vn7z/jiZ/8tWRYxiIAdBAGFHT0AWZ4hpZmc/VJgB89h3zD2A1JptJnyEJRSBAJd12CMATRSKoxJSdOUJJ5SafuhJ00zEAKpJxuNtZ6gBQFJ27YkSUYIvEsaBu88sTYEGTBpzGAdSgnavidf5jx97xmHhwNZltH3A857tFLEUcZgB4yeGrHIGBbrFS9efkUcJRgT0etpt2Eq0BOkVERJRlNZZvMUYyKEhO3DHUYp+m5HnGTk2YK2bUjTguvLWz77/Pv85le/IYlTFoslbVeRZBlJkrLdbdnt7lms1qRpTte1hCCQUrDZ3mN0RJZmdF2LTwaGIXCopgajbVuWyyXPnj3j9vaefnCsz845OT/Be0FelESR5HQ9o28HmtqyWmUoafjo4zPquuFrrthevWa7afn7f+dn/Oj33idJNPu+AwFZFtMcAnkxjaI9PNR89ZuXvHn5S0bfudMnJ6t//Ouff++v/bW/9sd/9Ed/dGwCjhz5Z+TYABw5cuTIN8iPf/xj/3/+w7/+KzfWGztYH3ovvRspijlpEnOoduwP0yuAVIpx9CSJIS9S+mFks6mw1oEE7wJ5kSGlpGoqxnHEec/eDoAEIaZF1uAYR8d20+O9o24O9ENDbBLKcoGJYuzgKN4FMnnvGUdLXmYIoajrir4fiOMI7Yd340aeNIkQwjNf5dNY0qYBIfnoo8/Y7/YEH3AuoIzhBx99zv3dlru7PU+fXRCCY3tfc3+3JQjJcrVEHBy3D9eMFtbqjHEcaOuGy1cdjx6dMj8pSFKPEoof/YXf5VBvcUNLFGlsPyLlVOj74FBa4/uGrFBEJqJtBtrGkqQGkUSMg0NJRRwnjONIVe+RShPCtOgs392sJ0mCQEEA9W7ZN4kTIpPi/IhUgmGwdH1PZCIQim4YsQ6QHc7B6EaSLKGcp1gbIEj2u4rFyYyiLLm/2TKfr6iqFmNigg8MduBkNqfr7WQGGjvmixnPeR87eGbzBXW1Y/uwpSiXxGlMXpY4EdBSU5YFaTaj7Q/ERjFajzGaopjxsLlFK0VZLugGy9Xbez54/yO+/PI3QKA57JECZvMlSZLhx5ZXr76ckqTXFxRJMRmWqgMA89kpdrimOhxI0oQin9G2HVJCddhTljmffvoh292AiTWr0wVFESMlHLY9d7db5usZD3cHghup9g7n3vD4yZKzs5JIPyF4x0/+0Sv+9E9e8+jJinIWMzpL0wx4B/vdiPMK5zSIiM5Zmn4fqjf9j2Sa/qvvffDB3wT++Fs8+keOfKc4NgBHjhw58g3yN//m/9rYr4cfVIfuUd12Ko4NHYHFfMV+t6Pv7qYbZKFQUuH8VLw3dY+UmuAhigxaaaI4ZrSWtq3JkoRyXmJH/+6Wv6Fte4bBQgiT8nIYcc4jBSRRglKGutqyXp8SJxld2zGLE2azGcYYmrZmt9tMgU1dRdsdWC5PyPMZtg/8+pdf0NmKNM1J4wVpmoCA+/srhn5EioiA5uLJOa9evGV3v+f9j5+S5hFV15GLmCzLOewqNpsHHu7viaOE1XwFMqI9BPCWT7/3jLJMaNoeoxVJNufx03P+9E/+G7RU+NEhARMZum5Kth26ATdCmmriWDPawGwGSmdT0WgDaWao9oZDdaAsZxCm1F/xLotBaYVWmqaqCQQQAiFhvphz2NckaQReTgpKpRFCsdvtsXbEOYsdPVpr+m5ExiCFIkkkQcT0w4GhdyRFhFTTq8VqtZ4+LwT7ww5lInIzLR37NiBRPHr8hNGPeK9YrOZYO7JYzdBRQT6LUbEmjI4nTx/z5PlHDF3D3c0VJ4+WbDYbimKGVILLN6/QRnE6X9APPYMdWa/O2e/uWCwX7PY70qxmPp+zfbimKEoO+w37/Z6PP/qMIpmxXp/w5u0LQgicPrrgxde/QUrJ2flTnr+34PWb1+AFXTtyODQ8e/4IpzTFMuXR0xnzMqXvBva7nofbmqapGO1AWZS8+vKemzc74kgy9J5HT1cYbXj54oGb25qz05IkTmibBoIABG/f3LLZ7Hl4uKFqa1Ri9sv18kWUJK+MNTff8tE/cuQ7xTEJ+MiRI0e+IUJA7H7F79dV9S9orR7N56UYbE9dNUip6PqeJMmYzeZEUUzT1CRxTBRFUwHvPCIINg9b7u7uqKo9Vb1n9I7BOi4vb9hsHuj6bhoNCpJZOSNOUuaLNfPFCVKpdyMcGqk0Smn6tqWpa5wPdF3P4XDg7u6Wru2oq5pxdJRlSV6UKKUYbM3u8EBR5KyX56xXZ6xPV7R9y939FXW1pywLothQzOY83O1o2obT8zXVfsvubkukIpSRtG2NtSNDPxKZmO3Dhuur14x2YLd74OJR/s5/36GkZr8f+e3f/z2u37xm6Bo622HHEaUVRutpNrzq8AR0JEjTiKH3SOVJ0ikobX2SMl/F6FgTpRGL5YL1+oyiyJBSEccRs3nOajXHGInSgmG0jONIbGIO+3rSfirN6DxKT7P/1lr6bsCOFg+0XY8PgsGOCKk5VBYTa5SSmLhgGD06FuRlRtt2nJ6fkuYZUkmyvEAnKWkxI4jJGHSoDgz9yGI158l7S8pVzvMPP0RHMYt1RlrEnD+aI5Vgdb4gneV8/qMfcPH0Gc5Jnj57HyElFxdPOXv0hKZpePzolCfPntD0PXVT4wVYF5BKUVVblJScXzyjb3viyKBF4ObyNePQcXp+xnvvfYwdHXle8vnnP8Ij2O92ZHnKb/3eb5PP15w9eYaJC3o3cnI+YzZLyLKY00enLM9OePz8lB/+7nMePzth6CS7XcNHn12w29XTK5J1/PxPp4yH1emMLE9pG8vmoSFKFXFqyPIIpeHNmxfcbd4w+IZddVggVfXee+/9P/+Dv/0fvP62z/+RI98lji8AR44cOfIN8Tf+xh+qXN7+cOjq533Tp73TIY4jERnH4XBAKUmaZWy3d9ONcxDcP9wyn48IGaibBv8uAVYKgR0cAcFgO4J3VFWFVJI8LxhHx+im239rLUVe0g0D6/UpAijKEik1fT+ACwQCkYkxJmIYLM4HmmaPUkwBWRjKMse5kaIoydICN3qcdxij2e637xaXA2lSsNsdmC/naAPBGc7WJdvdw3RDfLKgbXps8KxPC968fMXV9Qu64cBiseZ73/shbT8glWS/b0EpYq057Cp+9Pt/gSiKeHnzFilAKI1EEpwniBEkZEXG6Cx5HtNWPXYYMZGm7TqWq5zqMC3yFuU0597VHW0zNU2eQDkrmc0Ktpstfd/TdhVCmHday5G26zg5PQEp8AHatkVJiR176mqPD4EoilFa03YtAY/3AesD/eCIs5hx01HVHeVpSj4vubl+oO4GVBShA0hjWJ6cMPQjbpxeFAZr8d6z3zU8evaE0TmE0OjakBSSKErouoH5sqRYxshYcr/d86Pf+yG//rOvKIoMO4zUVcvFxXP2+3vevH3L5z/6Ef1gGYcBuklB61yFHRr2hy1FseD07GJa1JYWN1pevPyK75czzs4eoVTE/f0tTx494bNPf8Rms+H6+oHns4w/+B/9kPv7PUVZEoJAK0gzRRRN+QvrkxVvX19T79tp+b3vaR467OD5/IdPefvqlounC7pu4IufvmJ1MuPxkwX9u+C6fJbQDwO7XcdyvWC+mPGwfY3SCjmGoKTqlFL9t332jxz5rnFsAI4cOXLkG+LHP/7x+H/43//1L0fTVFkhm/3NLjStEKOfrDNCgh89WTYjjmOk1Bhj8A4OhxpQFFlOIOC9QxlBnheU8/N3qbrNdPvt4e72AR88VXVgHEc221suLp5ih4GHhzv6fuSd05GsyCdNpTZYa3FuJIRJlSmEpixT8jwlTTM2uz193/+5s/7+5paq3dM2FbPZkrKY0Q3dtATrBYdtTZ5n3N7dUJQzkjhhs93w3senvP/pBT/5R7+maSuev/eMR48vWC5O2e0q6mYgKROefXJO1wz84mdv+MFvf8gHHzznl1/8hGFocUBm3r2OANLAcjmFhw1WUx8mN39RZvTdQFGmNHVPXQ1kWQzBYQeH8yNZntAPOaXWpGnMZrNjt9ljtEHrBBMZotiweTiglMIk079rh4EQAkjoupbRucnBby12HEEIIq3RxmCtp7eO3o7TfkPj6VuBjiLSLKXrLHGSYIIgEEiSGEQgz0r2uwNV1ZFlGfk8Y7dt+N4PHtE2b0mLBd5ZZouI1y82XON5/tk5n/3gfb764pK6a/jgsycoFYFz/Nmf/gYVGc7OLxi6kTevr7i4OOXX2w1SKvJiTp7lk9rUgRCgdYwxGVE05Zs1bcOLr77kd37v97l4ekFaFNxe33GyXvPRp5+y2VXc3zdcPDnhD/4H36OqGrwP027Lu3wLicRZz3w+56tfXSKDIjaG9ZOCh/ua2nQ8+eCUtul59N6a5jCyua0oy5j5KqWpRrwTlEXK/VXD7fWW/WGDcwO4kUipLsuSK+/97bd36o8c+W5ybACOHDly5BtksINIU/PWDeoTpRPR24ZAmFJ3jcZ7jUAw9ANxIrAD9MN06661om4OeBeIk4T1rKBtO37+s5cYo2nbqRiNoxgTxSRpipCKrm3p+4br61uSOCLLcrquYxxHvA/kZcZivUCrmP1uB94z2gGlI4QQCDUVfLe3V2iTcFe3775mGIae4B0mMtR1TVUdEIopXCsELs4fcTjsWS6XOAd11VAuIj789DFf/uoN2/uOslxQZBn4mJ/+5FfMZzMW8wWzs5Jf//IVMijee/+Mf+Ff/ItsN3e0TcXoLLPFDCUEh7YiMgpjDCGeFo/b1tL3PcvVHMdIJDXBCaqDRRmwo8V2U/Lv6fmMatcSmQhpNMM4cnd/Cy4wm5XY/fQ93t1eE0Lg/OwxdrDYYWAcB7IsmTIVpKKYLRAh0A8dxsQkUU5epnhgHB0hCOqqQxlFQE67ElIwX8/Z3u7/PFPAGIM2ksPDAaMNcZ6iTU3X9kSpQY+w3becP17wcNtyfV9zelpwel5wd1/hBs96XRB9/xGHuic2mt2h5YP3z+nagdvbHdokJLniUFVsNjs++vR9vvrykgD0fUWaFQQk2sREsaPra4xOWK9O8cFyefWW+4d7fvh73yfNc8r5DE+gKAsW53OCF3Sdx4WRs0clxkTc322ZzTKUlNRVRZzEnJ6dcXp2x1e/fENRpNzf7zm/WHN7vWWzaXn+4RngQQXSOOXq9QGp5KQh7SyLVcFqWbC5qVBSMfQdkhDyIrvP0/T+d37nd5p/79/7977dg3/kyHeM4w7AkSNHjnyDyDi81Gn6K+fFfrB+ss3ANHYzuimUy0QoJRmGHjsOzOcznLPc3l6z2dxjxwHnHA/3O5pqCvyKTMpyuaYsSnRkGN2IGx2RNkSxRmuFHTo223sOVU1AUJQlJ6dnSGE47Btub2+4v7/h9u6SwfY0TUVVVygpOD1bcXJyxqzM0QrcOAVfVdWBru+ITEScxMznC2b5HO8DIQwcqh2zckFdVXhvcb7j/OyEl1/d0fcO70eKvEDpmOvbK56+d8bqtMRozfWrPcJLHj0958kHz4gSzS9+/jPqpmG1WqGVZH+oEFIglAIhCUz5Bj44illKnCmyPCYr0mksKFPIIBl7h/ee84sZSaJAQFFmxHH8biwK5vMFTTcQEFSHA3VdsVwuphvwpsPakSzPMEYz9BYTJeR5zjD0xHGCiRJkZAhSYf1IkkR0jaOqOgICbRTOjhgjyGcZzo/0XcdgB/q+Z7ADs9ls0oPaESlgd9ixr2q0inj51Q19Y4kjSX1ouHr7QJHHZJmm2jaEAG0zsFxmuGEkjSNubne8/8mK1ckcL0DHmuXJknZwtEPg/NE5WmuSOEMIQZoWlOWcWblivXpMUc4YvWB1+ojf+Qu/Tzs4hIbT8xnP3jvn/NGa5WnOxdOSj79/ipSOEGA2LyjLjCfPHtO0HUJ4lqsZh0NFXR343ucfEILksG+JjeHm6gGjDd4G/tv/8ucoEXj6/poxeOI4ZnvXTjpW6WiaATc60khT5jNO1+fkcUKRxYcsSfq3b98ea5kjR/45Ob4AHDly5Mg3yRi31rbxvmqS7aEVo1OM4zgtlQqNlA5lDDqE6UY7wNBbpITlYkGeF9OYjnc0TY1zjqY+YEzMarUiKzKauqXpapzzGB3RDx1CwGw+AwJCSEBitEHJSW3pxpGubYiMwvbQHmrSrKTrK9q2AjdnGHruN3fstluk0tPtrh/J0hwhDMYIQvCYKOP99z4lzQ0Xj065vnxAKfPn4zi31zseRYr9viEvMoIfcd7x4cdP6LoRpKJvHUkKZ2cz6nrHs2d/kX/4D/4R49Dx2fce0/UtQz8QvCcvS0IIBOexUpCkBoSg7yxaT3P6rnNkWcLQWnwYiRKNVAITR7R1y2yest91+N4y9B2n5+fTz973aBOj7cj5+YI0zdhXFVJPszBpnnN7c/Mui6FkGBoGO1AuFkgTI9XUlAig6wZ2+w4hpnErCHRdy3y5oGksQUqEVswXS+ww0A+WKI7pDnuUzJDS40eLG3r2dw3e97xpBpancxaLOdtNQ5JmpEnK/c2G+SpDKEGaGrbjSBCaqqoxyvHo6Yqm6qmqPVFSsN8lVPXA6emc08dn1PuWODJYO6KimNwYBjeyWhTkZc52e+D9T55y/vyEICRnFwsO+4p1PO1JSA3zRUyeP6G3PVIopAqkxnB+ccZ+/4B1nuViwasXb1msZnzw8RP+7n/037JcxHz86WNub3esT2d0Tcbf/7tf8Du/9zHrRym7u47gNHVjKRea+9uGy9d31PuKx4/OkaHDdQfB4FItgvhWz/uRI99Rjg3AkSNHjnyDCG8/HNrug0PdzQ9dR+enQlArzegcSivatmEcBmyvCAjiOCWEwGAdounJ83Sy93RTAee9p24O1E2Fepf6q5R8N8MPeT4FNUVRQt/1OD9ix5EmBLQ2JHFCCA4lA97DrDzBO0ucRBgN15dvuXzzCo9kGAaEnJSbUZRwsX5Mns/ZbO64vn6NMTHlfEWcxZTlKW/fPiDQCD2Spwl2GDk9Tdnvaup9R1kYBucoZwVvr65Is5TD4Y7FYsbp+Sm/+vnX/MG/+Fu8fvWG2+tb/uIffEY/tHR9jyAwm5cYY2ibhraddg+0MdBZlAIpAkpHjKMH7/BBEeWaoR8ZR8f+0JFEEUlucMGjG4mJzxBKsdseKOcZfdOjtCTLMpyf0nStc+RFhrUDCEUxzwHo+gFlNHlZYp1HK4G106x61x/wCMoixzk37VIUCUPvsKPj9GyNEHJaug4BO44keY4dRqJoahIRoKRk6Dv6rqWtp5C3oespyoTLt/csljOq/YiJJOXMcPVmRznP2B8q1iczXr284dPPC7LCoPSSuhnw3uOs5/p6w6PHK5IsZRhGnpzOefHyitOzE7J5hBslJ+clq7M11lseP11SzgusHcjzFG2m16KHhz0hwGKVEFzGaC1JWiClRGaCwIJxsMhcMJvPuLq8xjt474M1X/3qFUmc0vWW7eaaTz59yt1txa9+cUM5i1icJIxW8rCpiNIZ64s5D/c1r1+8oev32G5Kdvb45dgNq2/3xB858t3k+Gx25MiRI98gWvJaG31QUoyJMTTVjr5vCQLyIicAVbWlqrd0XQ3BY4eerm0RIWAizf4wFfvWDrRthRCS1XpNOSuJzJR8a0zMcrFCaTmZVZqarm2wwzROoqWizEtiY2ibCikgilJMlOKAdmi4u7/Eezs58uXUWFw8esxsviDPS5aLNVpHvH7zFW/evkSrBO8FbV2hheD+dkeiSrqq4fzRCVGssdayP9RoJfj0s2ecnZ+xXM65frvBdpbddsvz5xckacwXP/uS9z96zu//D3+Xumv47PMndG1D01iMMazXC5SSNE3DfrdF4EFMBbdSgjgxJElCcGHam4gNUSTxLiCCJNjAYhEzW8UoLfBCEGWa2TwmiRV5kZBlhsW8oJyVSK1BSpRRSCWIIoP3Aa01UknqtmUYLGVREkWGMI4IBH3XUdcHiiJluShwbsT7EZB0bU/TWZarhMfPFiSZRGoJImAHOy0DpwldPxCALE+JkohymdO0LaP1jIObTEcqJjExh11DZBL22wGpIobecXe7p1zMOFQVkUl4+fUdowMTa4oi4nQ9Y+havB2oDy2n5wvSXJLPEj757IK+b/j448d8+skJy3lCkQkePZoRGcWjixnnFwuiWLJc5ixWCWcXa5LYQAjUdYOQkiTJsNYhpaKcFVNuBJKT8xWPH13QNT0vfvOGi0endLbHxIbdzvL28oEPPzln6HuGPrDZNIzOkiYRh82AHx2fff8Zn33/Q/KkwPYtTbVl6BsvpfBF2x5fAY4c+efk2AAcOXLkyDdIkqpGaGm10s67EWd7xqFDCkHwjqY5IBAUxYz16SkmioiimDTLkEphtKEsZ5T5jMVqyXy+ZBwHLt++YrO9Z7ff0DQVECYfvfcoKTEmpq5qmqbGunEai3EjUkqKYkZRLMiyEoEERnQkETJw83BF3dVorYhiQ1Uf0Erz+OIJQz9wfX1J1/bMijlxbAjeI4XksK/IsoztfkMUK5p9y/Z+R10dyLKIxWrBOHrevLrmy1+/5mFzx+b+ATuM/OLPvuLLr17z7MNT/hf/q/8ZL75+y/11Rb1v6bqO+bxktZzTdj11VTP0Pd55gg9IEdBGYSJDksToKGJ0nsUiI44kWkqUnMZy4tSQJBo/eobeEseGJE0wkSaKFXlhmM/z6eY9CEIISCVQWrGY5xRFQhxH7xKIwShFlhfMFyvu7x8YR0fb9hz2e9YnC+IkpreWfrRESfSuGfOUeUSaTF9LEkf40ROCJy8yYqMQZsockFIjlGK2LJES8rJARxHWOZbrObdXtwgHfnAY7VjMM169uCNJY+7v96RpSj947GjpG8fmbvp9iCJBOY8xkSbPU4oy5nCoePr8lL7tePJ4xfvvnzJax/njBU/fX3FynlMWCRJB13UYo0nTiK5pkSJifbIkz1PyLCUvM7bbAwKJlJL9viJ4S9O2CKGwQ08xy3j/w2e8995T7m4OVPseN3iiSLA7DAQpyfKYt6+uiKKYm9sHhPQYI9neNUgx8tEnzyjKYgqBC5Y0jTdRZLZVmoZv+dgfOfKd4zgCdOTIkSPfINaK3DufDUMXCzxKCbphwDlLV1u8c+9ueksICu9H7NCitUArhbXD5N83I7PFnHK2xNoBsQ84bxlDS93UaKMgKEIIZFlOmuYIGpq6wTlPFIEdB0LQZFmMNpokiRmGlrbtcd4hhCSLUxDTHL33nmGYbt9fvPqazcMDduwxJkYICATyIieOM4qiYLO5QwqNGwVXl1f0Q0+aZgRv2G47iizC2pG6bai7jg8++ID1yYqyiDl7b833fvcTXry84r/4T/8pi3nOvFDksxLEyOHQ0zQNcRzTtgO9DYx+JDcJ2ohJxTlYHu72FHmM0YJhCFPj4z1BOOI0ZugDIBFIIj01Pd4LeutI45SH+x1dN9K2LafnS6q6JVIR80VEUkTUTUucRCglMFohpWSwjjhKUUrh/MijJ+es1ksuL++xw8BsUdC1A6NzzOYFzoXpVUMH+sEzjpbV6QoIVPuGQ1WR5yUMgSzNSNKYw75BCknbNMRpjEAQPFxf3zBbzAhegoe26dhvWgSC3WaP94KqbtFKgBL4EHNxbiBIlNYkWcz6tODq+oGhH1itMzbbA/NlSdd1IANCSaJUI0QgeI8QkqJIqPZ7XFBs7iuevjen60dMVHJ2uuDr+iUvXrzh+XsXDENH18Jh1xGpLUIEvJDs2waTaj76+Alff3XNfrNnfT7jbtNxf3dgPs+5ud6z2e7Jizk2eJYLQxRpokhz6C3lYsHy5IL72w43youhG06Lojg2AEeO/HNybACOHDly5BvEeZ4IyLMsC2fnMdu+p3MNzjuKogDhaNuWcRxBS6TwDK5n6C2D7WnbltlsidGK+/ubSb152HE43OHD+O6WVVNXFXk+I44nlec4Wpz35EUBTG73NM/xbnoh6PsBrRQheKIoQZuSsW8BzziODNbS2R4pA4fDAeccZTnHhykhd7ffsFguiZOYyES8ffuak7ML8qxgu92TpQVJkiGV4HDYMl+c07UV+92OJIl5/8P36LqeN1+/oK16Ht2ds1yV/Od/5+d4r7HpiHOOcQjstw193zFfzGibjqrqGEfHYpESxTHBecbR0TQ9aWJIE0NVjRwOkz1JCEWeR3g3KUOttdjBkaTRO7XpSBhhu99zf/uAMjHLVYHRilhHCDGSFwUexWgdaZIhBahM0A4WwnTT7f2IkFDO5lxf3rJ/OJDECV01MLiB9ckCkByqnsUixroRh2N5OiMyhv2m4v72HpNEaKOIIs1iMccOA4ddRVt33Fxf44NHSRBC8fCwQUea2yvJ5mHHfFby6sU1jHD5aosyivk8p61bbN9R5jGH/UCaxVw8XqKMIskMZ+cLhtaSncTgA26wpImmKFL2+woRNHEsp+9bS6SQeGcY6dnudqyaNUII9ocDUgVOz9a8fHHDbrfFmJjIpFT7Hj9YHj874X57QGk4HCpef7lhvZ7R9QNJHIM/YHuHGx1FGXFztSMyMfksY3QSOTreXm6IopjZrGCxWNIebhHSo7Xs+AHjt3nmjxz5LnJsAI4cOXLkm8SHWEqZmsiE9qEK1nqhdYRSGq01Shqcq3B+JE5Tum4y5GijkVJyenKGiSKccwQvUNJgtCHLCkKAOM6IkylETAhJ17X0/YAx+l14l8c5h1QKZ6dFVCUgTnK6vp+SZ51HKU8UJwgCWjsO9R11UzPYFucCeV4ihMAOgcViRZ6XdF1D27Z4J7h4/IiiWHA4dMRxghSSvrcE2fGDHz5jt+l4/fqWR48fIaTlF7/4gtubO5z1PHp2yl/8S3+Vv/+f/oTDduTjz57w+PkKYwL7bQchMJtn3N/tCB5G65nNS9IsxnsP+GkPQCqUFmw2Nff3LXFq0FoTPATnQEiQHh88Wisg4INkaB37TcPl5RXr0xPyIiNJDc56+r4jK2JMnHB9uUFJRfIuHUuqGL+rEQjqemripJHsNnvu7+9RQhK8x/YjaR4jpKSuW8p5QpLEVE03jR+ZiNvrB+ptjbOe1VlJnEYoIdhvdygt2N5vGQdL1zb0Xctvfv0bLs4fIwUEF7h8c42j5/TiCVma0B46mronTjRRrMnzmO2mIY4m4xJiYLZMqXYt65MZeluxD5Ku7zg5ndHUA0oJ7GCZFQV3tzvSJKPvLXXdkiQxq3XJduu5utzx5uVblquCzfaBzUNPmuacrAse7racXVwg5EBRxLz8+oY4z/FecnvzwMXFmu1Nz83tHWfn5+z2DcUsxnaOat9ycjLtizRVTZRKHp3P0DKgljGbTcv24YHrqzc0zYEoU3uTRNXtf/T1Crj6lk/+kSPfKY4NwJEjR458Q/zhH/6hlOEwt84n3jltTEyelfh2QOsI7z3GRJTlAudGlDQslifsdw80TUWsDEiB9yCkIkkUbVvR9DV2GCc7zWjRPqIoctI0Y7vdcjhsCWEqXgEikxBHKSF4ZmVMOcvRxnA4VIyjZXQD3aHCOUcSx7hxZFdt6brpRSAEQV1XZHnG8+fPMTrhzZuvEUJi+4HV4hxEzGADUWRo6z2jlQxjy1/6y7/Fr/7sDaODZx+cU+8rXn39goe7B5bLkkfPnvA//lf+EiZNObs455OPY+5utvyTf/QlWRERRZq8jFiuGpTWVIeO07OCNI0Y+o44Teg7i1SSKJJsHhr2244k1SRJhHPTmBVh0qNmeYw2I33riWNDU3ds7ytevbgkSmPyMqEoIkbnqBsLUrA+W1DVLUM3sFoVhDBiYkPbDQgRiOOYqqoYBkssU7qhx42WLJsRRTFj8GgTEVxAR4o4Mly/3VDVHUkW05uR5tAjPBCmMbEQAnXV8PrlK4QAN3qEFGijub25IQCRjrF2MkMN/Qgy4KzncKh4/OSUQ90htEJqQwiB2XyGlIEoFrRNS1bMuKk3EDyn5yVRLKl2A8oIdBQwcUzbj6R5QlFGBBHQJqJpLXJTEUJgdbLAWs9+vyfLprGy4MEOIwGHUgLvBGlSUJQpeW749c9f8Mln71HvauoDfPy9p3zxpz1IwehHjImwrqfbdUgjWZ3MOexbhs6ited0neGZXnPAI4VHCIcx0aCkJAhhvsVjf+TId5JjA3DkyJEj3xA/AH2JO0EIkSSxS7UdjQhGIRntSJImKKWQCIQyeOcZ+o5h6P5cAenclAqcZCl929E0EUpKvNREOsb7Ea0NxkRorZnN5u/2CAbsME6aUGUwWtP1HUIYhmFk6C3OjsRJjGtGIh1hCTRtj7XvbvGloqn3aK0p8jmr1ZKhb/n66y9RSpFmGUIoxtGiRwcm0FQVzgVUBH/hL/yIn//sJWVeMp/PuLvZst9tUVHCX/xLf8D67JQnH57ywcfP+If/xZ9yf3XHT69uSOKM05M1wTkCAqkU+/1Addhw8WjBk6fnNG2N95Lq0GKMxGhF3zgEklmZkqSG0fl34VER+23LfJlhIkFzsIy959BVWBe4f9iS5inL5YL1KqfvRsbOIWEK0PKW7UNFPk+IU402kw2oaSGO4ymnYRzx3iGFoG5rhFDv0ms1RgqiKMKLQBRL7q7vuXp7zWy5YLFYcH+3wY+Bar8nThOGzlHdbrm7vqap//vGbPQOAIGg61vqpiIEiOIErR1tN7zbAUnpu4Gz8xnD6CGAAqqqY3mSY60jJsY5S6QNr7+849MfnLKYpYTRY3uHlBI3TsrTpu7wIRBpTZpJ+n7kq19fsVpmJLFmscpo247N5oDSgpOTNXEc8/CwIYoi4jgCYRBS4EcwRlMdKsoyZ7PZTHsS85y2azm/WHJ3vyeKNbdXWxSC9ekKE0ckiaKuHbbfo6Wm6R1xUrJanbG5e8NoRyGVePN/+bf/7df/13/n3/k2j/6RI985jg3AkSNHjnxD/AxYC1mb2DwMTb93Y+e08MbZliBhsZrTNDU6MlPB5Ua6dsQYjdFT2qxSksVyNjn+iSnLGX3X07YNnoCJE2w/cHtzjVaKYRgYx4EkTViv17gxYKIYO07udx8cBE0IoHVE13UoZfBeoNUIaISM8EOgOWypmgNaS4zRvHi5o+9roigCqUHAcnnGfLFEqWhaWh4H8tmMT3/wAVfXN0Rxxvr0hLcvrpBKo5OITz/+kDjO2NZ7fv/JGX/v7/433F4eePJozdP3zonjDC96louCrrO8fXOHUhFPn655+t6Su7sHggsI5VmfrJDScdhVuDGQpgnBWwIOKQNCarYPLVkeEUeKl19vGHrHapkSvODu1QNZkZFlKeU8RinDaDuiOAJlSTPD0Ftms4I0ixAykBeauuohCIoy4bBvwAtEEIxjjw8jeV6go5SkSEhzTd8FIqPQBr749VeUsynMq2072rpDK43UZiqcb+9pqgo79CRpQtu2BAICgTHT+JGJIxCSKIlQJmY2j/H3B5TRLGcZ1lriRDFPc0brcMNI34/EscIHz/5gWSxzAp6b65Z8UWGMIMtinPPYMeCdIy80XW3RkaCuO4pZ+m6vo2e5KnHO0/Y9q2XB4dCRpjFN27JYLriIU64urxhci9AFUTT9zNMsoeta9vuWx4+W3NxumS1K9i+u3jV0GmFBetg87NBJRJpNtqb724rVskBlkq5pKRY5UqesTh4TXDvHuid//a//q0vg4Vs+/keOfKc4akCPHDly5Bvixz/+sfVe/jx4f+1svzGSIYkj0jgiUoaHhw3OT2MVznuGYUAqiRsdu+0WZQxCKA6Hmq7rGMeREAJKaeIkZTab4f006iOFYBgGImNIooTROm5ubjhUe7quYRwn8xDv/ChK6clkYyYTjjEaHWmcHxlsz+3tJU1zQEmJVJr9YU/Xd8zmCzwQgidJExaLEm0kzrlJGWoiPv38faQUNHvPcr7g7ctpcVWawMc/+BjnA3/8j/4p3//8fS7fXHH1akuWZdStZ7fr8cIRxTFffPElL19ckRcpSZqSz2Jev97y4usbPI48T6iriqaqaauecfBoPXn1lVboKKLtHdoItIbL1w8YrXj63gykpG4cUWR4+vSUJDXMFimHqiWKIuxoieIIpSDLNUn67uelBVpJvA9IJVEKksigtCJNEvBgZMRyPefkYsbZxZyu7ckLw3KV8vbFJVrHnD+6oG176kNNnhd4H8iygmGwDMNA8IE0y4nimCROiKOENM3QJiLLi3e3/oYsK8nLhPlqSV6m5GXEbJWQz1JWpwWr04Qkkew2NVkWEaRhtswQYgqOUxqqpubutuFh03J1fWAYLftDSz84sjQBPCbS1HWPFILZrOTsYs5+15LlKUZGpHGKFAohNN47rq8fyPKc84tzql01qWNnOTqamsm+9Tzc1fzZF69RaGbLgvl8zs3bB9p6slKNbiQ4aHYNYXT40ZGkBiEGlHQ8fjLn+ftrfvBbH7NanyOhrPf736k3+yff5rk/cuS7yPEF4MiRI0e+OcJGFn8yF+0HxSx70jX2t+MO0jTj0I6MdprLbtuWECxVtSdNM3wQpHnGYrGgaztm5RyjDV07kKc5TV2z3T0gWvnu5WAy98znCwgQfKAbeqIooJSi71usHafC1lqsHdFa03U1w9ASguNQTcV71zccqu30ihDFzOZrur4ljjKWyzVNU9G/U4NW1YEvv/wNaVyyWp+jdMRv/c5nzGYx//Af/FNO1ufUhwNBBh49f8x8nXN388Av//SX/N7vf4+m2vL/+A//Y06Wj8jSjGqomc0zdrsD3nuePbsAL7m7b5kvBW9fb9k+dLz/4SlDH3j59R3LdQZecX9Xc3paIpWf7sqFABmII4mXCUPfsVyVZHnEbtfQNANSB84ezWm7aRG22lVEcUq1q1FGkySSOFFY67CDR8pAGhuG1uMdIEZC0ICY9g1GT9c51qsVZ+czilnMm5cPKKkRUnN9fWC/bTk5O2VwI4N1ZElC3w9EsUEEGMeeWTmnEhVZntA0NUmcTi82xjBai4kilDQEIC9zykWOMXB2sWC2SskKA8KTZYYkjaj3A5tNxfN5jDHQ9Y7ZLKbadfStQClBUzfMFkvcOEKAcbQoKbBuoJynGGNI4w6JAA/Pn674zS/fcvV2w4efPOP1i0tOT1fc3Nzz5Pmar766YrlckWYJ9a7jcrwkzTXLVcHV2w2EQBYnvPrNHX6AkwuHTjw60vTdyECPVIK6ahEB/GhRkeHZe2tms5K2Hnjx9T1KKrb3e7I8IXv2zAgR/ordtf/Tv/bX/toXf/RHf+S+3eN/5Mh3h2MDcOTIkSPfIP8T4KeR6WzbeTt60fcDTdvQdY5yvkQw3aZ7D1k6hX8VRUldVRyqmtN1Tj/0IDx+DO/0kBFZluHeGX60kQglaLoWNzjiJCLPM/q+xw4DPgSUkmhtUFKT5wVKScqyYL97YPNwSxynxGnKoTrgRoiiaQdgv99ioojZrCAET91UKC1QUiGEIksLZrM1UZLx0SdPyArN3/s7/zVZktLUB5q+4dmHz1idL9nc3vHlT79CSYXUCX/7P/w7xCrh9GTNy1cvODk7YbE6IU1TlqsMPzpurg+cXeRIqfj61ztC6PnVn71AqZhn763Zbwcu3+55/t6CcpEyWguAiQzODeRZghvdNFbkPHYcaKqBONaMo2O0HoXB24C1HoSj6UYulgXlXBC8oq5GtIYkMfgQGJ3HeU+Wxdzd1BipJzWnkWQ65eLpnMUqZ/vQovS0VH11eccwTKm4JooQSLRWCAlKSYwx75SvM4SUSKVI8hikIksTuq4nerd7cDjsiaIYEBRFynyeIYKnKCLyZUKRR8SJRknJbtsRgiBOI9IsJommGf5xDAQPgx0pZhlZPi2UF3lMlqV//j1qrfAOhjBwdr5itFNSdRwr8iyhb0eyLKOc5wRGVus5AsGjx2d8+eULnj97hFSSw0PNdjuQZQlpqnCDo5xlFGWK0YbDtqOYx1TNNbO8ZLvZIKVGSsEwjqQhJoljum5EKsFnP3zCYr3nP/s7f8L27prt/Vtury9Znyy2xXpxHP85cuSfk2MDcOTIkSPfIH9id6e49vPm0DwbbIjafkAgJpe61PT9gJKaSEcYpRlsTxB6cuhLQdM25GVJHBs6P3Bze4uUkjQrqKsDIgSGoSeEwGhH0iQjiiP2uy2EyVDjJo0Q1lqyLMf5kabt8d7R9RapY5LY0A1TAWqMoqoPdF2L8w4fAvv9gaZp3o37gJIgxfT3lqs1H336mMUs5u/93X/IrJyUoVdXV6zP1qRxyvb+gV//2dc0TcNv/cEntLbh8aPHnF+ccn//wPe//xEff+99ilk8ueQNjCMs1wl9Zfnpz19SVx1aerzXnJwl3N003Nze8+EnF6xOCkY3IoUgyhJcAD9CmimaOrDdVhgVsdu2xMm0CG2tJ9ISpSX7Q4WQhjevbymKhNnCkKeSzUOLCI7lqmQYRrreobUiSQxD5xgGRzpPSbKEKI7QRpCVCX3XE7zE2gEQdG1LGAVBSoIUSCmJpJjSiJPpNr/voVzmGKNp6ghlQEdqWhQ3GqPfLeQKQZGn2MGxWGWkqWG0I9pIilmM0Yq0UOzvB37x07e899E5F4+XzBcFw+jI8pjXLx5oa8v6pCSbGdIiJksM4BEI0iRmu2uI4phxHOh7x2Ku6dqaKJ12G9anC25vd3RdR1kWtHVLnAIByizhZdtz9faWOEt48eXltL9w5snymIf6AEaQlAmhDxy2PSdnp8SxZrfd0ncDUnryWYmONEEqhJI8ebqinCc0bcf7H5zx0cdP+c+++jV9U5MYQ1DiQRfxP/mjf/94+3/kyD8Pxx2AI0eOHPkGiYwOYQxJECIHjNby3SJuQGlNOZsRxykBqJuGcRzp+w7nR6LITGYZKdEmwvuAVhprLYfDnrZradsWoyOyd/PiQ9+z225IopSiWCB1hA8C7ydlYl3v2e+39H1D2zYE7yiKqWCXQgKBruve7RrIP/flj25kuVwRmZjIJBAmDePgRhw1Qg78//7LnzJ20FQNN3cPzOZzYpVy9fKaL3/+FePgefLhOZ9+7z3uLq/pu5G72wfWJ3NOTpZIKTlUA6t1iZRQ73tefbnhJ3/8NQwOIwaqpiKJFfW+4pc//4oiN5xfzKjqGolEyanYtn2HQNI2I23TI4l4/fJhukmPI6RSrE9T4kwxWEtWxDRVT/Dw9PmC9TIhMoZxCKxOMkwk6bqBEDzBAwhGOxL8CMKxPMko5oaTs5wijxDod3agacTGWUfTdGitpuZPTbf35SyjmMWUZQRAmiqSRJDnEecXc9brgtnMsFzlOOfpusnBb4yhnGcUi4QoE5jEIFRAELCDZWg9L7964ObyARECq3VMnGqqesC7ERECRhmKUrFYJMwKxWqVoKRnsBZBYBxHxnFq8pwDFQmUkgQXSJKc1dkJCBh6iwgBwrsXFa3ph455mfL2zc27FxjD2xf3PNx2CC25unpAC4HRmhAUeMfN1ZbHTx4z2vHdwvrUKJ2en3B6sSJJNdeXN2zvau7e7nnx5RV5VvD+B8+YzUucd/Rtvz48NJ9/S8f9yJHvLMcXgCNHjhz5BrG2uXBuzPrOxsM46jhOkPU0UrJczvEhEAIkScTQt+z2W9I0J4ojhr7HGMPQDzzcb9FKsVqt2O231NUOoyWCqWjf7jYYFZHEMT44hAhYOxCnCcZEU+EWwNoBKSVd1+HcpK7c7u6nPIBxpLcdo7c4Ny1dSgWRMUjx7hbbe5qm4eTkjLPzxwgduDg54e5yj2C6re66jtVqQfCw3+/o2gYpBbP5gu99/yP+87/7D/n5n37BcrWmaxOcdRTFCoenKFJumo6XX97RN443r16w3d6y399Szkp+63f/gHrfs909sDxZslzO+OpXN3z82Tm3NweMMWQZJElK21pCkPS94+F+z/o0Z7HIadue80czuq7jsHfkRcIwOPq+5dPvPWJ9kuN8oOtHVKSIc8Nh32AiQ/AjWkuqTQMC7OjQRpFmmiiSZEmMHydvv/eBJJYMdlrqRQiKsiCOosmwU2rKmcZEKW9fbUhiQ5bHAOjIkWUxzaHn7HTBYdfw+ssdTV1zcnqKD57l6QITS+LEoESgqUeCl9SHmiiK+epXr/EOQnDMFwVX1zsWyxltbd+9IpmpkShiklSxWJS07UjTdqyWM0ob2O8bPvjwgqq+JklT2sQSvKdre8rZnLRI3yVKg3MeFwJCaIoiojn03FztSdMCZRRlmVBtauJE4QiMYyDLNftdjUkz2sExSzVRmhCCw3mPlJKhH1gsS548WzErI2wzQJDc3hxoOwdCUzU1GMbdvjp/9fr1//Jf+9f+tb/3b/6b/+btt3v6jxz57nB8AThy5MiRb5DBuloiWh1FTmlD14/4IBByMueMoyUvMrSRtG1LHMUopWmbmnFoGfqWu7trquoAcnLiSynfLeFW7HYbqromiWKSOCVJcpI4o+5qrO2ww/guDdgTRTHL+ZIiy0mShMViTlnmpGlKlmVk2TQ+5P1UDAsBxqRkeUmSFiyXZ5TlivlshUDg3cCiLLFtjxwlfdtQVRtmiyXD4NlutlT1jtFZkjzn8XuPqbYNL756QxQpbm/f0PcNy+UCO1iuX91z/XrDq9/cIpzn1YvfcHt7xTj2/Oi3f5e//Ff+Jepdy93tjiRJGa3n5dc3zBc5b19tuLvtaRrLaBXbTcPQO+7vDtjBcf54uk2vm55ykTCM9l1qsSaKFMNgef+jE1YnMX3X0/WOQ9UjjcDaEZhsSVme0NQtQ++mBkFKTCxIUs1qmZIkkm6YFK390CNUwI2BECTayKlY9pN9Z7XOiRON1mBHz+pkhhunhe68jHDeUcwipILr63se7u+QgBSCJIsJYkqMHrqpYdNaUVcDfTc1mHVVUR8a7m523FxX7Hc9JlLUVc84BtJMIZRgHB0g8UhMbDhZz5FSYCJJU7dEWrNaFTgPaZ4hjaLreuIk5umzJwx2REcGpCTSiu12jzYGZSBNDDeXW9I8xsQaRthc1yRpzpurDcvTOdnM4AR4AspIPJMJabQ91XbH9m7Dy6/ecn25p+8si3XB4iQnKzIe7jZ0jaVrB4QQtQ34ly9e/ZWv/+yL//nf+lt/S33Lx//Ike8MxwbgyJEjR75B/vV/42/+WifRl+D2wY0W53DjiFaaqqqmWX8h2O22RElEUZaTlaWtqOo9XdvgRksIjvHd6E/TNggBSRSTpQVlXhJHMVFsQASsnQo6pTSR0pMRhymAKY4TQJClOUmSkaY5WZYzOs9m90Db1oBDKoknIKVECsk4WKqqBiQBT2978nxGve9o6pbLq7ccDgcWyxNCgL6rITi0iijnC04fn5PPc778zdcE52j6hqAkWTFn9PBnP/8l+4eaw74hTgwPmx1JWvL5D3/EX/1X/mWevf8JX331hoftnvlyRpzGhBAoy5zL1w+8ebmlPvTs9x0P25qq7vnqy2uCd2SFxg6eh82BYhYRPFTbAeGnW/CmtcSxoZgljM5jXcCHQF31GKNw1pMmEUoKQLK5bzHG0LeW2SxDAEZPutAokpSzBCEEUayITYoQktEOeDvixyk9OC8ilFYorWkay2KZYhKF84EApKkhiiWLdcrD5sDmoQUJJjYMdiDLEyRysvIExWHXsFhk3FxvuLupuL2qEMHRtTuu3my4v2nRUiOCJ800UgjiJCIyCu+mhmm/q5FSgPSUZYKzA0oKdvuGs/MzqkONUYY0iYnjiN12y/nFGXEyBdoN/YBz0Pc9zgfWJ2d87wcfcXuzQQqIYkhiQ1sNdK1nu+nZbWrOzuZEUWA1L2n2Bz746ClKa8bBUR324D1GR1y92XJ7XfPiq1tev7mfzExBc3t5xWp+xsnyoj07P/v56Ef9cPfwr/ztP/q//8G3de6PHPmucWwAjhw5cuSbJSDUW0loInyfaslqViCCR8tAFAmyPCGJY4L3tF1L17cYE5EkOWlREsUZShp22w23d9fc39/wsLmjaStG22H7luqwp2lq2qbBuREpFVGSYpLJH6+URAjJMFqarqPrW8ZxREpNEiUoKYkjjdYCOwy40SGEREqBHSxKKtI0Zhh7tElYr8+5v7vj5vaG29stSkYsFiuiKCbPUiIToaRCSRidI8ljDvs9r1+/Yhgadvs965MnlPMFL17+htPzNbNVwen5jGFsaLqOYl5w8mhNlCVcvb3E256TkxVCCkIYiSLN3fWGV19e4oYeArS1Y7cZuLqqmM1yBBEPtz3DYEmzlHEM1NVAWw+EIOjbyRqUpAqtNVJOxffmocUODi0VUWT+vImq9h3j6KbiPoqIYk1wYrIzJQYTGwgCqTzn5zO8s7RVS3iXEiwEJKkmLxPGccS5EYDZLCUyGiFAazGFwRmJ9x4tJQrJbDYjBEFeRMSpRApIYokfA95P2Qf1viZ4zea+412UNH1jGW3AWYcInvk8JYoEtutREoJ32GGkPjTMygwTxwThMdqQpQnjaJFKoZVhc79FCoVSgr5vp9edNOWwP1AUBdtNzXxRMvQOhGK1muM6y+G+I3iJThTew9CPNFXL1eWGvps+AyH9tHtiFMUyJSsLkjwjSRPOLhacPZrhCMR5SttJvvjpS+5vb0giQVVvcaGvHj959E/P1utfSSlcGP2TP/zDPzzWNUeO/DNwPChHjhw58o3jLmfL/GWWibsiVWSxxnYDUoAxkvu7W9q2Z7QDh92GtjkgREDIqWBsu4amaxFKoZTGKMU4DPRdjRsHBjsglSJNCmazOVIqtDYIIVFGo5QijqebaCEEPniarsaOIwJF3w5oIYl0zNBbgpAEOf13MLoRO1qcH7F2YDZfsj45o94feHi4ZTbLWa9PaNuOpu3QWtP3PV3XEMeTxjLNM5Ik4je//pLgBZvdA0+fvk+Rl/zyN7/m/PE5zvdY1/Piq0t++cUb+m6gmMV4Z/mn//if8HB7S1FkbLZbrB3p2pa+ban3B5r6QJZnDIPlsJ9CrdI4pWsdV1c72nbEj5rDrmVzV7PbDDgPzgVCEERGEUWaODI4K+hqy2HXTT9/KTBGYQdL8ILtpmIxzxEeskxjraOuW5yDEBTOeUCQZ4bBjrz8+gpnIYpixrGnKHN0rJkvUwKBfpjGfay1JOmkLk1Tg5IS7zxSCPI0YhynkDdtBCfnS5QWLFYJ5SzDuRGtPU3dY60jyyOECPjgSdKcvMyAgBCCtp0CtooyQWtBHCvsOL34jHYkTTVJpN8tOnsGNxKnMdW+oijm7PYH+n7Ee0jSmBA8UioO+wofAnYc0SZlsVpxOFRAoCgKXv7mDucDr14/0HSWoXekWYS1gc2mZbSOzV2FUp626Xj/o+dEWYyJIrp+YBxHsiJifZJhYsGTJwUXFwveXr7l61e/oGpuGcbWihBkmqXbOIk3cVa+/vGPf+y/vXN/5Mh3h2MDcOTIkSPfMFFYvJHa3CSxPiRKhqHv0NqwXp9Os87DtHDbDwPg8O+KbWt79rsNITgWyzlRPC30RiYmigxZWmKijCTOiEz0zvCzQWtJ37fs9xv2uy11XdN3A8O7m/3IGIp8QVnMGYaBYbTY0RKAJM2w40jA40MgilKytCCKMlarU5bzJQ93lxwO95ydXqBUxKHao4xmtV69yyXQFEVJVR8AzeOnj6n3DSoommZHlpVolfLll1/y/MP3uX/YcHd9T7tvuXz9mmIWc36x4Bc//Rlvv36DbXqsHXj56jV5OWlMpRRUhwMPD3esT0+QMuL+tmF73+Os4/pyy5vXG0YbkEJwd7fn/rqhOji61jJah/fuz00+Uhju7/bsdx33ty33dxviWCEldF0PQlAdRrp6RClFwCOVwlrPaD14QddYmnogiqfF6Tdf39M2HYEerSRFWVDOcoIPREbTtZbIRIDARBE+OMp5SlpESCmp6gEhBYOdXgmGwbJYFsSxRirBcp1jx2nH4+R8hkCwWs0pZ9OYl1KaxXLJ6qQk+u8SeDtL2/QU84SsTIliwzAMxLEhzxOkFJRFhpKaNEvw3lFX7dTYCMF8vqBpWpQ2JGmBMdGUbRAnhBDI8hQlI7KsACQu9Hz+2+9zc/OAHyX7fYf3TC9KScZqOSOJNUoFDpuGN18+YGtPGid8/PH7lOWCNMk5bGv8APu7jr4ZqXY1FxdLPv/8e3z22W+hdETXDZkdbR5FUdd1fT42zetv7dAfOfId49gAHDly5Mg3TDMcTutdda5QsVY+aDl50vv2gG0tcZQg1KSGnGQxkqIoMMoggmA+m5NlBYvFgiIr0SYhz5ecnD5muTojTqYGIDYReBjf3Zimaf5u/EehtMIYTTd0dH3HYHsOhx2Haks3NNgwMtieuqnRUjEr5swWi3e7BRHPnz9nNivZbm+pqi1FWTBYy+gcSinSNGW/302aSymnIlEaFosF4yh4++qK2+u3NE3DxcUTRmd5//33uLu65OWLl7gRvvjipzgss1nJn/7pT3E4inmBdYGgLJ/94CO8cyzmBYftAy9f/Yb3PnnCJ599wHZ3YBw9SgWkCKSp4fx8QRxLqroDNM4HmsPAYd9QVS394NkfeoSQdG1P1wXaznJztyeOUuI4ou8sbhD0TeDm7YbdtmGwI8Zo2sbSd35KWB4s1bah2Q04K7h6s8FbgdGS/W6Ps5ZZWZKkmkePFgz9iFKaEKYGQhvFMDiSbBo3UpGk6y0uKDbbHm1i3OiZFQXaCJarjLxMCEjSPCbLppvy+XJ6CWmaHqMTynnJcl2Q5YY0UygFShvyWUyca07eJRZLJUhzQ9OMJFmJVGZ6QUgy9ruWJE6xtiXNImbzGWU5QyDxAdq25eTkDOc8s/n8nS510shu7yvm64zZqmC0jtksxTnBMPTU+4qzixlBSFbrBUp7lDBU+56u63n2wZqzxwtOHy349PPHnF9kPHo6B6FRRvPzL36Jsw377Yb3n7zPvFjYxXL5J2me/8YO/XIMIf62z/6RI98Vjg3AkSNHjnzDeD+etnVzsjtUcd21QWuNHTqSSLNaLpBSEAgYE6GNIU4S+q6j6RqiKIUwhXhppQgEtDFIrXDeIwR/rvD03iGkQUUx2kzjKCFMCcNTpsCkphzsiHeWfqgZXUc31NzeX7Pdb/BuZHQj95t7bm+vOBw2DH3L5eUrbu+vUVJQZCXBT3PvUkqknGQrRVHQti3X19eEACae6q+ubumairapee+9j2nqHmM0l9evubp8zawouL5+jdaCWbnkJ3/8T9jc3fLZp9+n7QZ04vnRb3/O/f0Go+BnP/ljbm4v+cv/0r/A9374OVfX92gF5xcFj58uePp8xnsfLEiy6ec/aU97gg8EN7LbdoBke1+jlUb+d3Ym52hqC0jSIsX7QNc5+mEaU3n75g6kRGoxjUmJaV8gNpqmsmy3ln3V09SeurbsDzucc8RxQtPW+GBBBAAOu47qML1sxPE06x/HEVGkSZIYO3riNKLrHIOdPjc7WoIUaCNJMz2pXYWnmCXYIWDtSJYZxsFjIoM0GmVgsUwoF4Y4gTSPKRc5XniK+WQSevRkRVYkRLHGRNMLU5ZF5FmC0qA01HWNkgrrLFKB1gYAa3uGvmOxWiK1QSG4urxkGFryvGSzqfEBPv38Oa/f3vC97z9HyoASCiEkD3cVi1WBJ2C0QhmJiRRXrzdcX97x9PmaPFfYwRGC4PS85NmzBWWRcnK64O2bK9puy8vXX9tiMfvl6vz8/33x+Pn/rZzNvmrq/V8+7gAcOfLPxvGgHDly5Mg3TDC6LRazh7zMuiTJaeqG0Y1T5qqErp+c/EVeEEUxVX3g5u4GpRVJmjCODhGgazvGcRoHiaIpoKtte7QyeB9wPhDFmtEN1HXF0LcIEZBKorSm7TqEEKRJOi19VhX39/fUVcW8LMmyDM9UuKdJQpFmFHmB94GmqbGDxdqpUFZa4caBrusQUpEkk/lGCEESJ8yKEucc88WCcejYbu54/t5HSCk4Oz/l4e6Gw2bDcnGCc566rjE65urtG3a7e37rd37A0/eWSGP5rd/+Pv/0T/6MoQ588bOfIJXhr/7L/zJxesJvfvGacRg4O1uyXKc8flby9PlyWq4NgqzQlGXGcpETJYrNriFONH07mZKkEuy2PVU1MPTTOE2WxXjn8SPUlWW/bbi5vENIyejGd8Yb9y6sbcpXqBvL9e2Ouh7Y7RpGH94lMIMbLWmW0TQ9zgnu7mruriu6qic4Td+/K361R2lYrjOcHYmUpu8a4lix321IkoiiTFB6GtNxzqMF2M7ycLsH51FS48eRPEvI8owsi0iziDQ1pGkM75aI60M/BcvphGJekmYRSkryLGJ0A24cEEJijCTPE9quxnk3LQz3HX3fk2U5Pni8szR1xdn5Y5x3CBzbzYY4UfS95+rNjtNHc4p5znbf8uH3HhEX6dTgtAN90/Hiy2vKRc4wDHgXmJcl1292fPWbK5QSEDxvXh741S+uaFvLz37yGiVT1utTsmxGmuebKE9+msxO3v77f/RHXy3Xq3+gpDr94osvjvlGR478M3BsAI4cOXLkG2bU3Zso0hul5OiDQ2mBiQz7Q8u+quj6gbIssaMjiXOatnpng4nouqmI79qWvu/R2hDpiOAV1g6kaYyJDVJJpFbU9YHDYYcUkiRNEUoxOoe1FmMMWincODJah5SKLEtI03ha4m0atFZ458mihEgndN2AUrBcLpiVxTuLTcYwDGgTkRcz0iSl6zr6rsePjuA9l1dvycuUfGb4+sWvOT97zEhgfb7m9u6KcbQ8evQEpeHy6g1ZlnJ59Yaqqnjvgw/54e/8Fpc3D5yerflP/r9/Hxx8+dUvsC7wyfc+5auX17x+dYlW8PjpnGKpefrhktNHCw71QJJFZLmZFl0jSFJJ8JJynpFkEXEaMfqR/aEjIHAW7DAtZVs7TtkAo6dtPG9f3XN/s0GKwDhahsFTNwNaaeJU01tL3wWcDVT1yO1thR38tAtRFvgQiOOEKEq5vdmx3TTc3+8ZrOfutsb2nv22wpgIYySj9Qz99DVIIbB9T9c3lIsSZCDNUqJIo4WkrR3jCNZ6tFY475ASytKwXJYsVgXGSKSURHFEnGiiSBCbiMO2AQ/1oX0XaBYRgqeu9tjRcaga1LvciThWNE1FmqY0dTP9PkVm2qVwnr5ryPIZaZEyjp67mwf2uxqC5PWrW9p25Ae//TFvL285u5jjvCXPUoJXGKHp6ophGFmuSrzz7LZ7zk4XZGmKD5rFOmN5YtgdOnrrWa7n/ON/9BPq6oASgkhrpYI0SdsGgGx58vdmi9lPy7I8jgEdOfLPwLEBOHLkyJFvmKFXTiDGJIo6KaS3/UA/DJTzhMWyYDYrKIsZaZLhXA8+kMYpJoomH78bqaoKOwxE76w+UxiYZuhb6uZA3dW0bcVge+IoYT4/IUlyRJi87G07ueu1MeRFzsXjR5ycnmFM+i5ALCaKYoZ+mG55xwHvPVJqnIObmxuGYSAvZqzWp5ydPZ4Si6OIuq7Ybu7Z73eMo6Wqd+hY8ennn/LTn/4EP1i0Tjh7dM79/Zbr60vOHz0BpXj75pL5YoYPjouLJ6xPVuRZyVe/vqNv4T/+j/4Os2xGFEc4Efjk0x9Q7VtmheHTj5/wwUdnrC/mfPDZmtVJzm53IDCSJjF2HMiyjDjRWDuSZor1SU6aKmCyzZRlCsEx2AYpJQ+3FXawRHFE143s9xVXV9eAmF5gBsd2U1PXPUmqMUZT1T0uBPre8nC7YbetGAaL0dM8f1YU+OBI0pj60FLvakbn2O8OWGu5ud6x21okgvow8vb1A0kyLdXaIbDf1wQpETrG+4CJYBwd3gceHmpCCCADHqaF3lQj1MjZ45KsSEiSCK0lygSiWDD2ltkiputGmrqnrgakUByqqRmyo0UoRRDg/EgInqKY4QPoKII/t1O1ZFnBflujpEDIQJQkeA911XI41Hz6+XtTdsEA1aFnuVpyezsV93Z0NE3DqxeXrJYlr15cUsxSVCSIEkPXe5SW+DDiQ2C5npFlKW9fb1ifZDx7ds7tzTV9WzGOgxi9W+6a7TOAf/ff/Xe3H3zvh/+ff+vf+rcO3+rhP3LkO8KxAThy5MiRb5hCSOnxXghq7+wQvEeIwMXjBctFxnxW0NYN+92WzfaOyESkaUbb1Ay2x7lpvn9K9B0Z7DCFbQ0DzjlCmEw3xuj/fuRHqXeJvh0CQZIkOOcm69A47Qb4IMmLGUJqnBsnM0uWT0WekHg/4lzPOFpm5YLROnbbe7bbe0LwBA+H7Y66mhoP8PT9QNM2/OC3Puf65oG2Gjg7OyPIkaHpuL++5YPnH1LkOW3bcXZ6QXWoEEIBgtl8xdcvXrDf3vOf/yd/lyJO+cEPP+duc8f7H7xPkhi0CXzw4TOqpiGbJTx5PkcbQXXoKWcJjy7mjKNltZ6DcOz3LdqYd+5+yTh4uqanLDKMFngfKMuCh4cN+33FbJ7RVj1vv77h/voBJacwtWEYEMB+19APAybS9L3lUDWE4HHO0dQ1fdvh7IhWCmsdRZ6/04g6Ii2o9jvyOKY5VAyt5er1PSI4Hu4P9K2nbaZ/5/LNjr6x3F7dkKU54+BIUoPWAakEm80BHSnafrIMEQRj74mNxmjN6UWJ1AJUQBtJZCKG3tF3I4KA1pLNww4/BrIsZbetptE0L4jjiDjWU+BZPwCa+XxKbE7zHOc81aEijjNGL2jqjnGc7EBFURAZTdt2LFYzLh6vqQ8DxmiklPStpZjHnD9aoI3i9nY3aWpRHA4tOorIiwwhFXXVUpQxSk762vOLEhk8v/7Fa1arBfOyoG0aoiS6Nkb33TA+/+/O3VEBeuTIPzvHBuDIkSNHvmF0EQ/ei9Z5PxgRxiKLWJY5MqjJmy5hdzjgw0gUxURxzGZ7T13v6PuWtq2REkII7Pd7QnC0bT2lAM9OWC5OSOOEvu8Y3y0LSynRerLwrNYr8jwjiiLSNJ0Wgp0nTVKcm26S4zhFGQnBI4NAK4NSmtFZvPfs91v2hy0gMSahaSp2uy3z5Yr5fIHWMYRAO1T87h/8HnEyo94PLOYr6rahq1s2d1tOT05Jsxw/epbzJX3fcrI6wTmHiTSHalKZvnjxK6QQfPq9z/jyxa/J85LT5SlVdcv3f/gRP/vZrwhy5OLJgt72ECSzecJiXtC1I3keQYDd5kBeGNJUIaWYbu/3DRDIinhqKJTmsG/w3nPx6ITBei4v72i6gcGOxHHMMHQYM1mBmqpBS0XfO/reYgeHtQ5jNEpLrB0QQlC3Lc47PJ44SWjajqxMaOqaJDHMZiWbhwYEBAltN7LbVuAFX395z9WbLfWhpetGijydshWSiCiO8M5THTrS2DB2lqJIQAiQgjF4ApL5PEYbGIdhSvZ1UxjZMIzcXlUwgjaG2+sHvBvwfqTa7YkihQ8jSaSItWbop9+B+XyJtYEsK9DG4OyIGy3z5Yy6qsA5nB04OV8itEBJxa9/9YYPPnnMoT7w7L1HBBzzZYIPnhA8JjF0g+X2ZkeWRVy+uSSJInSiKcsYLaGpW7qhJ04U54/nPHm2otr3/OrPXtB1PUpLr7TcGZPuJPrlt3zcjxz5TnJsAI4cOXLkG+Z/++P/41ZIcSOlbIVwg9GSRbni/mY7hXrZKdVWaU1kIoITxCahLBZ49y6RVwmqasvoRtq+I81y5vMVwLScKUBKjTYxUimUkoBnt9tzffX/Z+/PYnXNzvtO7LfGd/6mPZ6pTlWxqjiTkmjLQzsdxEDnwkBiJIDUCJBAuTKC3BgJ4HT3RSDwohO4lZuGETQgwEEctBO3BDfa7YYTOHHbbkmUrZEUyWKxhjOfs8+ev+md37VWLt6vysllOqLKor8fcECwwDq19/7OKj7PWv/hjBAGZvMcpSV1VdN1LddXl1Tllq5rKMstTdMQAGkUUgAEjI6QUoKAOEoIIeBcT1lumc0m2N3Q7oaGrmt48623UTrl8vUlBk8IA23XYdOYxdEBcZZxu1yyXK3YLFcUeQFhlDwJoXj05BOEFpTllnffeQ+lU66u1hzOj6nKjsXihD/8w/d5+PYp733xwWcFWZMiQyvDyxe3eOfpu8Dl5ZqD4xlHxwVxrBgGj40SDk8Kju9MsVZRbhtWyw1RpJnOCpwLrJcV3jO+imiFDwHvIfhA17bjoiQl19clwwBJYmmbFhEksbHUVYkU0DUNSgiatiNOMtq2RyhDWmRUdYNUBjf0aG2oy4HlTc36tmV92/HsyQVtPXB1vqSYFATvUJrxFUNbqrIb+wi8QyiQWmAjSZJEpHmM1GDsWHDWdwJrNN47bKx3PgPBMHiiSOPwmDgiiRO2ZT1+v24MLJIiYLUC4QDIsgxrI6IkQUcJZVWRTVKSLOHm5oaurUkzQ5aNRupyW7K83vD2O8e8fHnNw7ce0neO0+OMs1cXHBxMOb07pW4abBRDAO8GIgNRLDg6mY7eCK24vtpQ1R2nbx5x9+ERs3nOprzF4cokm15Lra8O8vzp53XO9+z508x+AdizZ8+enwBC87Qb2sb7YSjynKatUAL6dkAqjY0Mk8mMPJ8wnc64e/cNpFRENiK2muvLC6yNMMYSxwlxErPaLKnrLW1b433A2gilNE1Tc3t7xWazQWvNfL4gTwuur29ZLVdoo9Fak2UZIQSGvqftaoahx7kBN7jPGoCjKCaOY6yN0HqMKK2qkkkxwzm4vrygqbej5GZxhBsEt9e3DF1PXW4RgNGGMKZfYrQmjhMEAiklF2dnVFUJQvDo8UesVisG5zg+vUOcTvjgxz9GCHh19gITa1arii9/5R0WB1PSzHJyMmM+m2JNwrMnY1twmkWUVc39BwsWBxnOBbbbBo/j4CTi6OSQvvfYSANiTNZRBikUygjiyGC0xlqLNpphcGg9lnMpKdDa4D009ZiUE0cRBI9QgihN0EojCKRJwtAN+MHjXUAGyeZmTRqnLJdrQKCMwvWOq/NbNrdbyk3Hxesl29UWPwyEICiKnG25YTIpsJHCuUDfBuLYoIwiK1KCHOM6s8IQJ5I0G2NCiyJmcB7n/dgFYSVZnpAXloCjLBukhHJdMZlkY/pR8HS9wwcBAtI8wTs/Gr+VZhgGrLVEcURdVURak6Y5dd2xXm9QQpOlOUJ6pPR89OMXzCZTXj5/RZYZVqsOrS14wfe++yF3HxzT9hXXN5csDg7IspQiT4kiUFYRZ4r5IqOuen7jn77Ps8cXvPHmKb733L/7JlGUbW2SXiVp/MNf+dt/e/s5HvM9e/7Usl8A9uzZs+cnQPBqnefFZjrJ+q7vuLq9ZHAdN8st+TRBa0UxKZBKMZsvUMpgtMEow3q9JI4jRPC4YcAaS9NU9F1PFCUYY3G7pB/vR9mzkII4Hhtdu7bjZnmLCOxu2sfmWSEkSZwQEAQvGPpx+NdCIIRkMlnsmmo9IQQmk9kYrZkWFJMFBIlznrZtRgNx39M2NUWegYTBD9R1hY1iZAicHB5glAYvWCwO6bqWJE1J44yrm1eUzZrF4oA3Hr7N6/PXfO/7v0dVbambFhMJvvzVh/zct742fj3ec+/+AX3nIRg+/ugZs3nGbB7jfODwaEqaRbTNQD84pvOch28dMpvnrNYrssKSTyKEciSJoZga4kyhlBoNs1qhtCYQiLIYHRmMMYRd1GkIjnTXlJukY2nYMAxEiSVKIkCQTSc4PF3dEILDRJrV7ZKmriEI6qYhjgwheOqypu9a1usVy+XYxyCkJ51m9G6U9Cg1Lh/bsiIIQATiRLGYp+RJzGSakRcxUWwopgmD80ymGVLCEDye8RVDaQE4ktTQVh1GSaptzXxWEDxkSUEUW+I0wjmQQiC1wHlHP7TUVQk+kCYxkdFoKZFKMZ8vxmbl4DHWYmxEklqkDvz4g+fcvbvgh9//mMODgmePX3P3zTnL5TXPHp3x1lsPkVITkDRdh2fARpq7d6dEieH6puLhW3eZ5Ck/+t4jnj9+weA6zs+uMSZp0ix/keUH30Xsihb27Nnz/xP7BWDPnj17fgJoaZ1QqpRSDdtyi7EJq3VDlETYRDCZzlBKc3B4QJpn9G68ZSUMQKDrG9bra5zrkAjSqCBLC6Qcb9eFlGglESEAgjyfEkcJ2iiyyViaVEwy+qEfYxONGeVGjM2wcZyOQ5ux2CimyOckcUYIDucceZYz9ANaG7KioOtbetfS9R1ZPqUfem6XVxwfHyK8QgVJ37VU1Za0SHjw1hvc3N7Sdj2Lg0PquiOKM/LJlKvrMwKgVcR8vuDDjz8gimOU1LR9RwD+zM//LMMw8MEHH1KWDQ/ffMDzZ7e0bcf15TUHhxMWBwVN09FUPU3b4YPDGMVkEpFPLFk+Lk5pbimmEd57Do8mnN4dl4UQHHFsx+F/NF0ggDSNyWcZUkukNiRxgto1NwsRsJEc/RLDwHSekeYF27KhHzxKa5qmYnAdJtL0Q0/btUTW0DajWbjcbACPdwNVuaXvx8/YOUdVtXgv6DuH1gE3DDRNzzC0RJEejbpWoK0kijRKCYxVpGlEcGKXEARSSbQRCGVo6oF+CEipMZFEW0Hb9Qx+oO8Dbd0hhSfLErre4RxE1iKlAAb6fpSMIcEmFqU1Qkkm0wOUiii3Fd4LEAalJHmW8OijMc//7OUliVU8fXRBXTb83Dff5ebymu2mYjE/wIcBYyVJGtP3juvLJcenU04ezGmGli9//SEHizmPPrpEGY2NVVBGX2pr1ynsE3/27PlvyX4B2LNnz56fAHXbqaZqs7ZpVWwsdicjOTiaMAxhN7RZ5vMF200FQeC8p+1bhqHldnVF1zcoJYniGGPM6PmUY19AGqcoZRBqLOUyxuKDRypFcIHlzYbl7RZrRwlRABCf/gpobRAouq6nLCv6rmO7WXNze7UrgILIJrtb3p715gbn3RjPuUsnssZwc7Pi9mZJVVZsyy1aa6b5nO2mGbPtzfhSIBAUWU61XRMEtE3DnaO7LG+XZGlGlmZ0TYPwgQf373J0OOE7v/H7gOQv/dvf4uzsYkxDQoIQGAmr5Yabyy1l1TIpctzgUUoglWI6nSCFAi8ZBo8EDhYZk0nCtixpu9FImyQ72Yx3eODkdM5kmkAQoCWzg4Ikt0SRIQjIi4Qk1gy9B8KYz280QULfNIgANhoLuMLgx74GOS5H+NE0KwX44Oj7jmHoxwSn4KnKCtc7mqpGhIBWAj84usbhOo9gvNGXWuMGR2QVcWSIrKXvHEob6qal7wOB0SMsJDgXqKuWqmqxsSFNIyDg/fj3XJ7f0NQtSkfEcUxTl/jej7ItpUczdF3TtS1aW7wXDM4TcCwOFtR1SxxZisn4giBwaOF59uico6NDfuf3P+Dhu6f8wW//gM2q4cGDU/ww4Al43E6S1qGNoWkcH77/Etd6Xj695up8jTWaoXM8f/KKsloHlLtFCr/y7buf0/Hes+dPPfsFYM+ePXt+Agg4FIQ4iazMI0NiNWlm6buB1e2Go8MJR0czbq5XeB9QSqKVIgBt1+CGjqF3WDs27m7LDdvtesyF1xHWZmgVIRilPMMwjFIV72nqjuAZNexK0TQNfd8D4MPoHUAIvPdopUmiCG0kve+YzQ6YFHPiJCXgub4+5+r6NZNJht79XsH1lOUGH2C73VKWt3RdSQieo+M79K1nu9pgjCLNEqwxTKcTqmpDVW8QwOnJA7SNaNqa+XTBxeVrrq7P2GxuOTgq+Gf/9LcIPnD33h0+/vglddOTZxnrVUNV9ay3DX7wBC84OipAeIbej6ZmKamrhu22pe8Hjo8KDg8npGlMXTdorciyT2MvLReXt0gpuffggDg1tO2AVIGT0wWLwynaeJJEoyVMpgZwBOHJigwbGeLEjvIgN8phikmOAJqmJU0yCKMEy8YRUmsQAq00QcgxrtREBC8w2iCDp29bfHCkeUI3BLabFtAMg6csW6RQtM2Ac6CtgSCpqjERyijD0Pb4fhz8pQxoa1Bq9D6UVU+S5WRFjACS2LDZ1vS9pK5bZosCZRRt26G1QSmLVmMSUdeOL0AQSCJLVa8ppgVRlFLVNZFVZGnE5cWGJE/46MNnBB9wfkxj+ua3vsLl6xVPHr3g+vqS7eqWtqpo2oG+FfjgODieYFTE5dmKIon5J//oX/L73/mQtupo6g1tVzXeIxFqnQT96HM63nv2/KlnvwDs2bNnz08Ab/3LoNS1FpI4NuRJjNaB89fXFJOCLNdsyw4XWtIsI88L+n5AKYWNYqyJ0HrU85dlSdPUhDAuCsMwAKPuX0iBiSKElLtb5Z68KLBRxOAcXd/jgme1WnJ9c0PwnsE5lBr17gKITERVlhR5zmQyRQpJFMV0fUPXtWRphrUJ1kQUWYEbeggBay1Ns2W7vWG5uub45JQoSnh9/gzXVxweT5nOUqLIjNIZa0mThDwvsNby/OUTTk/vcX55zouzl2STCe995Ws8evSKjz96RpYWXF1fcXOzRe6+7+XNBu8ABG3tsZFEKk9d16RZjFER3gUCHmMlB4cTksQSxWaMukwMs3lOmhqcc6zXJbN5wb035kSRpGsgyxK0tmR5RFZolAKlwBhBkkb4AJFV5EVEYMBGljiK0VoDkul8ho4MAokQEqMNaZZikghl1PjZKTkWvwmF0RqlFSFAud0SgsNajdSG1bqmbAf6EGiHQNM6/ODp+wHE2GSslCRNE5zvUFoihKCuakQYpUpRHBEnMdrIsReg7Tg6PqB3jjSPaLqOthlYrzdoI4nTmKZrd68XmqbpEAgia3B9Q91sGPqWoesJDOgojLG1XpIXMW+9e8D9BwXVaskf/cv30QNsbkpubzYkacLdO/eIbUS53dA0Lbe3S8qyIU4inO+xVvPi2Q1GS9770kMuLs756KM/YrO9wjOs0yJ/lefpP/mP/87fWX5Ox3vPnj/17BeAPXv27PkJ0LV+6Np2UratNNYQvMNIhUYymyW7ZldH1/VIIfHOE9kELS1xnJGnExCwWt/SNDWRjcahvOswZtRn611yTd/1NE1J2zUMznG7vGW92aDU2DtQl6PESCsFn5laAxBQWlG1NRDo24bV7TXL20tWt1fUVT0mEEUx11fXlNWWrqtxbsDYiK5t8H3Dze0VB0cHNG3F5eVrIqt594tvkSTxaHANPfce3B1ThUxGnk25uDznzp17tE3D64szFosjjg7vI1Asb1ecHt1ludrQtI433rzDg4dHtE1PksR0Q89mM+AIHBwvCMKQpNnYQOvqcfg3BoKgrFvSPEZHCqkEaZoghOR2WTEMgcOjjMVi7EpQSrI4immaBrvT2BOgbXuMVhgjkFox+IE0i8hSjZSA8ERRhADiJCbNYuLEYKIxtSfKEpIioZiMTcpKa6RSxElMFBmsNUQ2AjzDMKCUJo5SqnVL2zjarsMHzzCMvzbrBu/HJt627dBG4N2AGzzO9WgrkRKkEjSNo64rfOgRSBSStm1R2jIMjN9P5yi3NcEH+m4gSTIQnq5tSaKUtm1Yrzfj61RTU27XLJdXdE3N9fklQzfQNg3BOdLUkGeGB28e8ld/4S+TTiQ//O4HlMuKo5MjotxyfnnBbDFnNp9BCOAE65uSm6sNm1VHP7QYY9hsOk5Op5zeORpjZ9uabVnZpu22X/3WX3zxOR7vPXv+1LNfAPbs2bPnJ0DfuMOybO8FaYohCHrvcT1EdozkrMqONJG4PkBwLA5mY0JOPiFLJigTjZp7I2nbCikFaZoRRaMkyJiIOE5wg8f7Ae96qmpLVVU450nTbPzf7wbONM1IkgwlFUPf0e8iHt3uRUAbxTD01PUW7wbAk6UpfggEL8jzDIGnLNdIKTFmzLRfr5fM5zOatqWua+JY8dWvfZG+81y8vsV7z5e++C43V1dEkeX4+Jiu78izguACL55/QhHHHM2OOJgdcnN5jdUSaSR1d8tXv/H2mN6DpOvGG/w0s9hI84V3H5JPCtpuNK5eXtxioxSj9Vj2tdlyeDzDpqPGX6jAMDiePb0A4Oi4IIoNUSSJY5gfRNjIUBQpp/cmKC1ZrUoE4ytOnFiE9ESxJk4lNlZorUkSg7ESaRRREtP2A1keU0xTlDFk0wxpJUkWEacxSZ6irSFOY6aLGcYapFYoq0nzHKXHheb87Jq+HTBKE1mN6x1uCJSbDgLgA34YU3vatieEUZuvtCTJYoxRuCHgBsiKnK5zhODJ84TNdkPbe7rOk8YRw1Cz3dZsNi1xnGKNpusalB4Tf66vb+g7RwiC5c2Sm6sb+q6mrrZcnJ3z4sVzXl+cEXBkWcJqteXgTsov/s//Ml/71hd4+uwlL5+/4OhoynQ+49Hjx5jIcnxyhHOOsqxJkoTb24bXrze03cCLl+cYGyPwFFmBNbFX2obtevvW7//jf3z4uR7wPXv+lLNfAPbs2bPnJ0A7dP266vRy26arsqF3nqZtsYlhtarRRoEI9H3PdDZjcI754oAkTXHBgRxnPIKjqTZkebq7tQchxkx95zzGjPGebduSxAlHh4fkWQYE+r4jBLDx2Das9KgjN3pMj0ni9LMCsRAE3eCxcU5WTInimLLc4JwjSwvSpEApTZZPSJIMgcK5gSAVIUi6uiXPE9577z0++vAxn3z8lKYuefjGPW6ub8Y0oXRM7ZlkU4zWXF+9Jo4TTJQwnc94/uop17cXTGZTVtWWn/35v8ByuYEgefb0NdpotmWFtZa33r6HG+Dp4zOaumV1u2E2yVBilDfd3q65c/eQ+SzBDwOSUWN/c7vBxobFQY5QYszyTyKiRJHlMd71FFPLZBqzKaudzl6BDESRBe93nglFsltE0sxiIoGxhrxIQAiUNru+AUGSmrGFOFHEqSHJE9I8I4oikizdZd9HREmKNIo4iVDG0A+OumxGMy8BoQSDg7JpaVtH3XQoIwki0LXQNB19H/AhkKQZQhhc37PdVgyDw/WBpumQSmAijQ+O68s1UgiSOGG9qnny6DV9WyGkoqkr6mZcKLXWrJZL8mICQtOUHVdXt/TDwHSacrCYsLqt+PjDMwiKPE+pty1pYvkf/7t/ib/yP/xLDI2n3jTcOTlksTjgyZOnXF5dE8WWrh/4+KMz4sTSd4xpSq3nd77zA66vrrhdXoWub67jNH6NkMWqrt/+HI/3nj1/6tkvAHv27Nnzx8wv//IvxW07/PnByXc2rdA3m4bL65uxcKntWC83xLEBDFGU4QaPMZq8SKmqLf0w0DQtsY3GNlkh2G7WNE2NlIIQerbbNSGAlAatDAcHp8znRwQgTmPSLBnVFS4gdvE/Qow391JqhNC0XUfbdrsFoiOyEX3XsdksaZot+aRgOp/Ru56mbmmbjrqqkFKRpik2isjznEmxIEkyTg7v8P7336faVrz99gPeefcBL19c0nYtxycHFEXC/bsPxn8+4J0DqXjw4CHPnj/m6vac07sn3N6ueOedL3N1dUteZFxdLkmilB+//xxlIg6OFlxfLXnx/IxyU5NnMcUsxsaC4OH12ZLJNGE6LQDo2orgBuqyJ/jA0eGE4D1SwGyaEkWS+WyCFJLpLEPKMRKzLXuUgDSJiNMxVlSKMYJVaU2aRUSxQmtBHBuSdPdCEI2RoXFimEwyhBDkWUKWGZLUMJtnZFlMkmqiWIESxJmlmKRE0dgrEAQUkynltsM7T9M5vPfUm4pq23yW6uMdY0Mwge16/LPSdT3DAHGSIsy4MPreg1A4J+gqR2QsfdPy6tk1Uiua2pEXCVcXt5RlS1PVOOe5vb5GEDBasdmskEIwmc1AStqq4eb1mucfX5PYlC+8c4e27Pnhd19Qr2v6xnHxcsVqVfIX//I3+Iv/vZ/hZnXL+eUVb731gDfffEiW5UgtSFJLEltePr3EyDHJSgTJelVSVSWDd62Ko+c2SZ+ryJ5v++bP//Iv/7L+/E75nj1/utkvAHv27Nnzx8zNzcRu6/6tTd1n29bTDgFlRp33dl0Sa0OWxpRlzWIxxXlHFEes1kuEEKNGW0ic5zOD72pXDtb3Lbe3VzjX431ASjneHEtDWVZIOf5rve16QvAIAZvtmtXqZmfENSg5SlekVph4bBtWUtI2NQRPlk6ZFMekScYwtCgFk2mOkIKua9hu158VkSkh8d6TpxNuLlcEpzg9vce2qvj4oxco5fjGz7xNMYmZzxfEccL4OtHjvOONu/cpVys2qyXvvP0uOE2RHfDs6VPWqxvWy4qrizU/+sEnZFnM4cGci7M1N9drklSRFYZ+6HDeI0TMo08udxIpSQiBuvQ458fGW+c5PJyiFESRIc/GF5D5fIK1kiSxCBGYLyZsNzWbVYMxmmKaMJnGTKYWrSVaCbSWJIkmzy3WaLSRFLMUHwJpFmEjBcIzP8yRQuKGnqIYXwFm84hialBakGQRSWrIckuaW5I0Ip9kOzOuJcvjnQ9hgCDYrioIYSxka8a23mFwxIlhta1G/bwy1FWJEANSCIKDumqxMZhI0fY9201JnhV0rcN5hwue2FqkhOvLNdW2RITAarkkeIcIAe88q/WSNI2YzSdoqzk8mdD1A7/zLz5iaCVf/cabmBhePF+xuh2XlcuLW/7wez/m7sMD/p2/8nMUs5inz58xmU3xwGw24e79Y6SRnN495Ob2hrzQ5FmMsZppsSDPp/V0MT9fHB5+MD88+GEUJ8P777+/n2H27Plvyf7w7NmzZ88fM3EcB5Surm42om47pJIUacb19Q3bVUkUy/H2PggmE4sbHH3viKKY+WKBNZq6XBPYRTga/Vn2fl2XxGmCMpogPB6PEIK+b1FK0bYtN7e3eO+Y5NmY6e7c2BqbZ1gbjSkxdcm2XBOcG9OHtCHNCubzI5IkY7NZcnFxRtc15HmOD44QHGW5xfuBqtrSte14K11vubh8zeB7Tu/dxQvYbjvm8wnf+Jkv8fL5DavbGuck23LDcnmLVJLT03skUcLt9SUnhydIqajqmvOLZ5yfPWO7qnj+5DU3VyvqduD0zjGffPiM73/3EX3v2WxrlLZcXZVIqXn16oreeeZHOVGcc/bqkrJaE8UWbRRpblFGAIxm4r4jikb9/mh6HUiSCIRjdVuRFymLwwnTWUSeRySJIc9ztIY4UlijSZOYOIkwVpJldkzhkeOCEQhoEyiKlBDAe8hyi7GKYhKhtEQpSBJLkhnS3BLno5xoMstxviPJNFFkxhz+ukdIiY0Mzo2xp3XV0DQNcWoIeNrWI/AMXcfgekSQ+H5gdV2htaQfHP3Q4wZPnGqywu6iaHuSWDGf51RVA0g22y3aSNzQIaQnjgzlZkXwHUlqxy4JFbjzYIHrHN/73Y+J44Q/9299A2U1Zy83vHh+hXOBclvzz//pH7Lalrz1zn2m8wXL1ZYkjTk/v8L5nsUiJ4oFxSTjk49e41xPtS0ZupY0jrvJbPJofrB4UeSzi/n84Dd/7dd+rf98T/qePX962S8Ae/bs2fPHzK/8yq9s3rj/xj9bHBw+KZvK123Ptqqpyga8J00N5bZmMklxw0AIoJRgMZ9zsDjatfTG2DgeC6V22n/nHFGcYG2M955h6CGAGxxKSfq+o65rsiwjiWNulku0tuRZgZKW7bYc5SRtQ12X5EmGFpI0jrHGYq1lcKMRuOtbht4RRQkhjOkwaZownU4BxsSXJMEHT9u2hCDwQeCcRxDo25p+CHz80RkX52vcEFitbrldLpnN5izmB7z58C1enZ1xeHTKnXv3qKuati+5uDxnNpnTtw3Xl+c8efoxX/rKQ549OWdzO3YczOdT1suBy9cNaZJhleH2uuT4dEGUJLx4eQlKcXg8G/0S3qOUpto24/fZO7RW2Eji/cDQB5QWIAJN7RBSMpklJCkkmSWOIow2aKMwVpOmliSJMUajtaSYJGR5jFSCKFJEVpEkETbSxInEGMUwONLMjr6AxLJYZGjF2EegFZNJQhIr0lSTF4YoipBSoJTE94H1uiJKIoQArQR11dC2LW3Xo7QkTSP6xuGcp2nGP1dlPcZ5+gASPcrBgmS7LamqLUluQEi6rsf5gcVBho3Gz9G5QBRHRMnodTBWYoymrlqytGA2n1JteybTnK984yE2cfzohx/Q9zXf+rPvcnovp2sHnnx4wVAF0sjy3d97zKPHl0wWc4IIrFcV1mZcXiwhwPq6pis9fdXw/h/9iLffvsO6ufSrzW0vg1RZln3PRtFv/d1f+7XfFUKEz/GY79nzp5q9fm7Pnj17fgIcHR39nv/S8LeFGKaXFzdfqJpOhgH6YYo2mrb3BDeghCaOLU09GiZ9gDjNyPqOPC3o+x4pJSE46qYkzSZ4PyAERNZSldUo55GaNE0pilH3vllvx3ZgD3VdsVqtMdrghgFjLEUxwfmBzWaL84E8HZuKfXBMiinGGJTWCDn+Na0Uq6albhuE8LRNRUAynS0okpS6aqnrDctbTZpGCCGJbERVdWR5RPBwfn5FFGVkWUGcKD788H1CgHt379G5UeN+dXnNl7/0dZbrWx4/fURkDf+TX/pF2ralaxsuVxu+8bPvcnZ2ydCBjTxHJye8/4OnYxpR3VOVNcUk5uTkgKos2W4qsiyl2nYkcQpA13fMFxOE9LhB0vfj4Nw1A2kS73LpFVFkmU4y4jhiGAa6viFNI5QeZUZtFwhiTAYydjfIz1IEnq4JBK1JtKcsHdpIQGBjg/eOLLJ4HFJofPBoLZgfFFR1QxxrssxSVT1CQAgCgUJpSQieKDaEMOC9wLmAc47FPKWueqRU1E07xr0ODiEENh5v9K1RYzOylKyWW9LUIKUkjhUhOCaTFGsVUo6ldMYoghZoYwDQeoxRbZqGPEvwi5TlcsXx8R3QgavzW67Orwho3vnSPdI85jf+6+9xdbFlW1YsjhZcX28QYsvp6RFlWbFabUFNePniljBaFUjTmBdPN3z04x/x83/uW+6HP/rRxzZN/9C/m/zmf/rtv9N8Tsd6z56fGvYvAHv27NnzE+B/+R/8B7fpNPvhu++9/dt37x6+MhovFGhraZueuqypqwGjFFGkEHjiRI057HHOdHZAlk3I0gLX9/R9y+B6QnAM/RgN6Z1DSUWe52MOvRBsNhvatiWOIkIINE1FXVVjkowE7z1G6TEjv6ow1pIkCXbXMaCUpm1bvA+4wVMU4xKyLStCEGglKbdrmqZlMp1zsDikLEvabszfR0DTtnRdy/J2iRRjsdbNzQ0nJ0csFhMePLjD67PXeCf4wttv0XUd6+WStq744rtf4uLijB9/8j7res0Xv/IVimLB5fkNVVkTRE/vNC9frhiGnqOTCWevbgFJ13coI5hMY46OJnRtze31EmsN3nnErpSt63om04woGjsU2qbHOQ/hU4NvIE0i8iJiMkuYTPMxW19LlJREkSVJIpTS9N1AFBnSzJDEmijSWKPIspg4jfBuICssxmgIAakFUim0VZhYEaURykKSGZCBYhaNhWZWkeQGG0vYfXbaSKQUDMNAFGuiRDMMQJBIYFrERFbifUBrhR8cQngQApto2u5fvRQN/YAQiqpq0AryNEZIkAom04zJNB8TjtIUsyuli+MUbSKm08XOUDxQFBPyPOb89RlxHKGtYrNs2a4qPvzgBZuy4Ws/9x4qEjR14NXTSwwK4QMvnr1kfjChmMa0VcPqtuLZsxd8/4/+COccSZxw9uIFv/c7vyO+8IW3v5ymuYvOIvc5H+09e34q2C8Ae/bs2fMTIjjXGs0Hbzw8+a037h6+jK2i3GzZLhu8Ezvzrx1vPcWYEFTvhvLZ7AClLMPg6doerTTWGPquJQSP9x4AExmarqWsSgDiOP5M7qKVggBdV9O2o5b/8PAQKRQhBNK4YJJNMMpSbpf0fY0xlsgmJHFGHCW8eP6Cpm6YzaYYo8eceWmZzY6ZFAuqqkdJS5oWTIoFTdvSdi1xFBNFMdNpxpPHzxHCcP/hEV/9+tucvbpEa82f+fmfZT5fcHV9xcFixtHRCavVildnTxAI3n7zSzx84z1evjhju9pyeX3BW194k8efvKSpB7Q1NK3ng/fPUFKzvN0wn+X0fUtZ1lye34KQZHmB94o0s/RDi9KKw6NDttsarTVCQJrFBOExeny5CD6QJprptEAbi5IWgQQkUkq0NoCi7z1Ka6yNRlO1VRgrEFqSFpbghnGQLgyD99hIYeNd7r8WzBcZNlLEiSVNY6QULA7SsexNKyazBBtLotSOaUOJwkYGISHLY+JkXASlBB2NUiRrFXlmURKkFAgVmC3GgV5pxsWl97ghkKb/SoYWR9HoX0gS4rSgmMzRJmYyOWAYBFpHZPmMtvfcvfsmbR+QOsLajKJI2W62nJ4esl631JVnOs949fKGtu148PCIh19YMAw9F68vCc4zm8958vg5i/mMvu1Y32zGorK65kff/wF9W+Fc65bLm49evHz1I6vF/+z2xYuDz+9E79nz08N+AdizZ8+enxD/3v/uP/pdodSHVonbxbS4nuQRiRb0dY8UMJ0mJKlEaUiShPWqIQTHbJqh1FgAtl7fjJIMBEppnBtQEvq+R0iFkBIpFUkS4/xA33ck8WhwLcuSYehRWmPsaDbebDeUTQVSoLSmdz1ls8V5TxynNHU16tyNGV8d+h6tDX3f4b0niVKyrEBpye3yirbZ0jQVwTu6viGfFGRZvstxr3n0+DEguPtgzle+/ibn59dU9Zavfu1LgOD5y+f8/M//HEmS0TQ155dn+AAnJ6d88xtf5+rqJevlNY8fPcaamMuLLQJYL68IfuC3f/P7DENP53refuc+5XZLcLBdlwghmc8LpFBYM95YDy5w5/4pZdkghEBrizZjJr5Uary1F462H5jOMtIsRUgNQtEPHVKDNmBjRRBgjMHsXgac60kSi1QK1PiakKYx3sFklqG0pK0H8tTQVB1GWyaFZTotEEBexERGk8aG2SzGWok2MD/ISFJFnOrRXxAphIAsi0kzjbHqs4bhfJIQJxJrJXEcEycJQgYWi5wi1xRFRBRpkkTj/DDGki5yAj1CKaI4w9qUOMnIihlCGorJfJST7boI4iyhqkvuP3iHq+slNoqRUpPnGTeXG95+9w3W6xIRFLGVfPj+S7abnvlBwb/93/8Z3v7iA3rXYWNBkeWcvbwmSQ3dUHN1eY3RhthCWd6gI3GzOD76YVRk/+dh6P4zbcV/9O/+1b/65ud6sPfs+SlgvwDs2bNnz0+IX/yFXxhbtqTyUob2eJauHpzOQmQEaaLJJxplNHGsmRYJbnA7eYhFCkkIA+DwYUApje8dVhv6rieEUfctBChACgUEZrMp3nvW6w1SKdI0GfP9+5bNZk0IgiiyBKD3HWVdjoViNmIYOrQxSKlou5rVasnhwTHGxgTCbmA2GGu5vR3z4ctyNTbCNjVI0ErRtB11U31mGH3jzTu89+X7fO8PP+Hy4op3v/iAqq65vFzyrT/7Laq64+b6mpvLC/A9RT7hG1/7OlfXl0gpefboOcvVFffuHeG7nlcvnnL3zgG/+9vfo2s6bKSx1rDdlqSJRe2G0dk8w5qYrm3ZliVd65jNJyRxwmq1ZjLN2FUk0HU9gvHGvNxWxLGmmKYkaYKUin4Yf+Zaa2xkxwIzo4isQYqAFhItJVFsgYDVCq0ENtJ07YAWMJ/lEAJaSQ4PpiyXW0Awn2ZoqVASolgjhCTPIqazAu8DxmpmsxytJXEWEWcWZdS4tEWWIo93XQMZWisWB3OM1XjnOTo6QkiBtYaT0wVpYckLg9WKJNYEP7A4mlFMMwgerSOUsmhjidMME6c0Xcf84BikZHA9WkqkDLRtw4M33uL8/HyUU7WONEvYbLbcvX/Ejz94weLoiNVyxeXrJe//0SuePn7Nw7cP+Zk/8y7VpqTalKggKDctb3/xHouTCa53bDa3NN26S5L0dVFMrmbT4g/+0//8H/4nBPEPlHD/i1/4hV+Yfm4He8+enwL2C8CePXv2/IT49V//dWcy/RtxnD1aLOaf3Llz8DxPVZlEDi2BIBBi4M7dnLZpSJIYbQVVWRKCp+8H2qbBO5BSo6TEO4cbevzQQwgENyAFDF1PGucEB03dMJ/PyYuCtu0oyw1d1xAnEVFk6LqOvm/pmho3DCTJmF9P8EBP3Ww5e/2ctqvZVCsQo+wnjnLSLKfrWpQev8662e6MrBKrDPW2REvF0eERzvd882fe5Y2HR/xX/8U/5+Wz17z7xfvMZgXr1ZbDoymb1Ypnj5/x+vUrJpMpR4tTvvn1b7G63dDVDRevz3nx/CV37p7iBnjx7AV4xatXl4RgaKqeOE5QxpIkmjQzpElMmqSEILm5XbJZl+MrgFVMJjnXV1e7gi6FlJrtpkYgxmhWDVU1cHwy/2z4J4ylZQKJMRZjLOP/fQrCLohGCoHSCqFGvb4SiijSeAAhEWIsC4sTS1XVHCwmZFnC1dWaIQxM5yltM5DEEUIEQggUWUJeFLjek2eayTTGh0AUj/n4AoHznjTPxj6IKEbs/rMoCpquIZ+kFHlBWdbkRc7h8SHGGpQdk4q0kggBi8WcKBoXRcRYHqa0IU1zXAigNWk2GxfNEMiLgrJakyQxSZTTlg3VdoOxgvOzG+LIEsWGDz54zrtffoNHnzyh2qz5+P1HvHxxjVCK07t3WBzOaNsOJWPaXvDNb32Nk7sHu69BvY7z4sM0z84CYgLwf/sH/9Xf19b+o1zzM5/Pqd6z56eD/QKwZ8+ePT9BNhQ3XvvfNdY+0lJeJpG+PZglg3A9Ugiy3NK1wy7yMTCdJBA8TV3Sdx3aaIyJxoIvAV3XIqVEEJBiLHEaQiBKErSxrDdb4ngcBLebDX0/4JwnjhOk1Nze3tB3LSI4lJAcHx/j3IAbBqTU1HVDCIE8L8Yh2sNmuWZ5e0sIDgnEcUqeTpnPjkiTAoIgzybjkqL17tXBcOfOEdZa/p//j+8wySfcv3+CG+CjD5/QNBXWSF6+eEGSGO7fe4A2hgdvvEldN2zLDc45PvzoQ+LEkiUHfPf3f8TF60vOz2+5vtmQTxNW65IQYLMpidOUbDplcXyMF5LBB+qqxlozZvanCVXV0A8t1mq8H6VUUggia4mTGIEijizHJzMCmgC0XUMIHmv1uABYi1CSAGMaUxyBUtjIEsUKT8CHgI0TEKNcyDkHjMlBURSz3ZacHM+QQF23xFmMjSJC8KS7jH039CxmOZHVY0HZPCdN7Ti4y0AcW5ASoRT5ZEoQgjTLCUAUxURxQtM0vPHwDYZ+YLnaUEwmHBwejr6HIiXNE5q6wUYxk9kcKdXYHOw9BInVCVk+pSpLYhOT2JiuaxmGgSSJWa5uOD094dXZLdoaXjy74Ktff4vf/s53OT6Z471DG8PiKGe5WbFZ1/zwux9RlhVOBlSccvLgAB9atIp48fyCr37jixTTSRBS9VEUVUmcvpgI8/1Pz9T/5df+8//GFPPvfD4nes+enw72C8CePXv2/AT59re/7f/Gf/gf/zcd4Ts2No+T2FwoMazbah2882zWHUpZpB4HvNlsjh8GXD9gjCZJc2yUEMcp3nukHG+tlVJ45zHKIKXGe09VlbsBTuGGcfBXWpPnE6yJ6Id+11SbYW1Cnmc0TUPfN8RxhPeSSbEgSTLSZCwFOzg4ASGp6jWr5RXDMKblpEmExOG9QyhFnGVEcTLq4q3BB8f52SW//3vf5/TOPU5P7tA1Pe9//wnX17e8+97bDJ1gPpvwxht3qZua6TJ0vroAAPoYSURBVHRKZC19W3F6esQPf/RHWGs5OLrD5eU1q9Wai6tz2q7i7v1DHn/8BGtitpuaNLGIEMjSCXXd4H3YdS7ECBmIk5jNtsb7gDUxfe8wRgKONNUQwGhJXTfcf3iCFwEpNX3vCD4AAZBEcQpy1ODjA0oKoljvknoEUWyItCGEMXFHKYmUfDZUhwBSQiDQdQ2zeTGmQlUd88WUgMDEEdoahBRI6ZhMx5eIKDJMJ5bIilF6pOQuDSqMRmVjiJMMIQ1SaYpiSt87kiTh8PiQuq5p6pr5bE6SZGitySfj68F6vSXNJmT55P+rdyIAkY2RCMpygzYGpTTr1QqJYrvZMISKYlHw4cfnLJctr16c8a0/82X+63/8OxSJ5cfvf8y777xFlqd4RrP7b//GHyAY062Ojhfcv3/CxdkZ/bblkx8/5t/67/68ny4mvu3agDG0mbf/n+fqV3/1V/clYHv2/P/BfgHYs2fPnj8BfEh+s/d82PX+si6bK6NUH9kxqccatXsNiNist3jv0VoxKaZEUUIUpwx+lAGFIIminMFBCGFseh0cIYRx2FQCqeQYo5gkRFHE4Hq6vqOqSibFjDjKmM8Occ7Ttg3eO/q+I01SbBSxXt2yXN6Mun8CeZ6TpDnT6RypAhJom5rNdoUPgaPjuxT5lKquUFqRpJazV2c8+vgps8kBAsXV1SVPnj7DB8dXv/Yu83nO4iDnrbfus16vefjG28RRws31LXdOjvnx939AHkfMpuNA+uLlU65X5wijuP/GCVW5pVw1CAICwe11zd17d3DOUa4rcB4lx8z8JE15fX6NNpa+H5BCYK2m6zu88wzDaNbtu4HJJKeYpjtz8Di8K6Xo2m6UDGmFd4Gh8/R9jzYKIXd5+R6slhAEWim0kERGY6RGfVro5Ud5j7WKoR+wZowI7bseHxxpkeMRSC2ReiwPi+MIYwyDH1+KtNZEcUTwHqUUUoBzA1pHu18GIRTWxkSRYXl7y50797Empm06vAvji43QpHHGdDpls9nSth15PkMITQiBwLBrgIb57GA0gjswOsVGCU1TU2Qzri5uOTkumE0L+sbz2//8R2w3JV/60tu8eHqBVobf+q3f4ctffYukGL+XxMScv1hiJLx4fkk/eBJrefzhB3z4w+/zW7/5W/z8X/jzh4vDY9F3w+vb2+H28z7De/b8NLFfAPbs2bPnT4B/71d+ZePj5B+Spn83aP3dvCjKrqkxSnB9udql5nREkcVE41BotCSJE4wxaK3oBkdWFCitkTLQdw1VvUFKiUSi1Bjv2bZj+2vbNGw2a/qhp65KBII4jjk8PKTvewbnqNuGpm+RUgCezXZNVVfMZ3MEgrOzFyyXV1ij8MHTtS1SKTySOC04Ob6HUZaq2hJCQAp4+vgJy9sbjFYEJ3n9+oKXL8+I4pjDoxl3752yWtaUm4aL60ve/MJbAGy2G7Is4/nzlwx9j0JQlyvK1S11taLrKt56+w1OTo/56McfgfCgPC9evKIqlwyh5+bqmuA6xgRUT2yTMXHHjIVrUo4Z+VVT03Xt2KJsLDqOSNKYPE8QPmBNRHCO4EGbiCDEqLv/VIrV1uOCJGEYIPiANoo4Seg7hxRiN+jL0UOhNMYohJAIodB6NAt0w0CaxQg1fm7WGKRQ+BDGG34bUdc1URoTkAghsZEmSWO8h2EIGK0RAZTUGBuR5cXu6zVoOyYcOe84ODyi63p615NmOd5LQlBMJlPSNGF5e4tUaiddEiAEgTC+PKmIJMto+w6hNWkxoe4GZGTJJzPOXq5474unlG3J5XXD//2//BccHM3ofSCfTEizhB//6Clf/8ZXWBxNAU3XOZI0IU40z56/YL1aM58vSKzl+uxi873f/+76/uHJXzZCHP36r//6Pv9/z54/RvYLwJ49e/b8CfE3/vd/6/1rO/8vglQfOedetGXbNWWDFAJpNc5Lbm/X9F1PPi1AwuHhwU5zPQ6xaZrRNRUhBHrXIoQiyVKiOKJpGqqqGn8/IcbbfTfgdjp3pRRVVVOVFZGN6LoWgscIQ3ABISWTfEqRTyiriuvrS9brJUPfcX19yfL2kqpcY4yiKKZ4H2ibjq5raLsGrTVt2+FCQNmIJC+omorb22uK6YzJdMHp/QXXV7fcXK3oGfjqN79O07Q0dUmkDdYo4ighTRMEgjydopREhMDd0xOKNOXF0xe4LrBtNnSd48mTRxweTbm9XNG1PV4EvNB4L3beicBsMSWEgTiOGdyAEhLvoGsHDo8WEAIIj3MO5wKb9Yq2qXHDpyVsDoTAu56uqWibBsHom3aDAzkWfElhkHJ8YXDOoZTCGLXrCrBorfE+IMTYJ9B33bjkKUPbtqzXa6wxo7RLyF28q8c5TxRF+OCx1mKtIU0TmroGRonRzo6M3hV3eQd5tiBJU+qmYjIpcA7qpkQqmM5ndEOHMTGTyZRhaGmbijRJUFLh+46hHT0hgx9Q2uL8gPcDQ+dI04KXL19ycLhgudxwc73l7Xfv0fQl5+dX/Jd//5+QRJZPPv4xic24vnjN00dPuffgHofHxzgfuFmuyYopi8MjEFCWa1xwQ1EU2+22+uCjR4+eCu//1//T/9H/4N/5HI/unj0/dewXgD179uz5E+USnSavejc80lKtRBDYyCKEo21rzl5ekGcZymgWh0d0fY+UCmsj7ty5w2azpB/Gv/ZpE2tsI5a3l2y3a5wbmE4Luq7DGEuajoVe2tixQTayKKnAB0LwRCYiiRKMiUAI6qam7SpW61u00Rwf3SPL50RxStd3dH3Dan1LWa6pqi0IR9Ns2ZZjWpA1FqMjFgdHzOYHBGA+nzKbZ0xmMdJErNctSRzz1rtv8PjHL3n5+CU3t9ccHR2TpimxjZBCMimmHC5O0EqTpTlxlPDk0WPWN0u6rkM4aDYrRJAkWUZdNrQNtG0gjmOkVITA+KqiQWuz07cHJIJqW3F4cozAMwzd2KysND4E6qpCabmL8Wx3EaiKrq4YulE6ZIwdpUEhELwbE3hMQpJEu89UjLGh1mIjg1IGY8zYxtsNaDkm6pSbLXmaYLWiqSrC4Ijt2OzsAyRpTt/3RJHZFZcJIBDFBiECXTt+1oTx1l4gUMpQNy1GR6TZZCyFE5qT0zuUm7EdOk3G9l/noCgOSPOUpinxziOFRCEZuhbvB6RWwFiS5vqWoW+xWnPzuuKDHzzhrbfucXF+jY009x4csC23rJdbnj95zunRAY8/+YjIWp588pgnjx4TWU1fN+Aky5s1201JbA1DVwcf+rW29nwyn752Wv7LbdN8z/fhb/zSL/zCtz7fs7tnz08P+wVgz549e/4ESatMdEMQINdai6UUeKUEeRpTlxUCByJgtKHvh3G404bpZDZKbJQiSROGoSOKYpIk4uzlE5a3l7iuJ4li+p2BOMsz8mJCnKa4EDDW0Pc9XdfjnCdLU5x3u1vvgdX6hpvbC4ZecOf0LebzY0DQdQ1dW7Mpl5TNBhd6pByNwIIwpgoJEASGruf09C5JnBHCaAg+OT3k3v1jpILNdc319Q0mCyyv1pw/O+fVszMWiwOUtVR1S11tMcpyeHBM2zVcXl5xcHjE+flrHj58wOB7tuWaSEsuXr3g3Xfe4cWza66XFc+fn5OlOVqPptu27ZB6d5PuPDaKUFqx3Y5Rq8ZqNpsVIXikUgit0ZFlMp3StT1t/enrxli65r1ncH5M3xESHzyCQN20aK1QWu1MvBHD4JBSYiKLjiw+BIZh1P0LIcc4Tq1p2h4hBcZopJBUVTVm/CcZ3kOejx0AIYxfv5AKKceytjRL2ZbluBSIgBRi1w8hEATariFJMoxJ8N5R5AVxHPHixRl935MmOXVTo60lS6eAomsblFLj96gVbTu+hMRxipAWY0b/QblZ8eUvvcujj15hTYw2McvbCqNjjNYEWpq2Zbtueeud+6xWGxaLI16/esX561fkaUK5XhNJwdCWPHvyiKGrB6H8JVosQyCaLRYfxEXxf3BC/B+VUuLzPr979vy0sF8A9uzZs+dPkLhpuhDYKquW2spzG6lWy4DrPGGQTCYFQowZ+wSPNZokjkjTmBDC7jZf0/Y9SZKzWi5Zb26RWqCtRmpNPwx03UDAjznvSJTQSKGQUqK0RJvRcwAQxDgodl2HlIr5wQHs7LVKj1nxwXuMTphOjvBOjIViNhlvsrXFmgilDJPpjHK7ZXV7S1NVRLElihMeffKMZtvx5NETnG9ZHB7w/NFLnr34hHyWc3J8Qt9UdDsj8WJxQJFP6IeBo+NDlqsb3nvvIcENnL18RZHGlNWG2eGC25trPvnxM549HqMo40jTtwPeQ5RGTKdTbm/XCDRKaoY+MDjHZJrTtWP/ghRybEw2BmvHMq++G9huK5TUwNi14P2oForjZDfgK0xkEUFAAEePshIfxhQdBONSYMzYEBwCxmicCxDARuOi0HefvvSMYTfeDVgbY2yEUhprIrq2Q2uNVGbnA5GkaYqSks1mO0aHurFMTggFeLwfRuN4nNMPA0JJDo+OccPA00fPKLIpbdvQthXFdDG2TfuBtm1RNkKq0cDctRV914GXrLcV+bTgZnUFquNr3/wy/+J3f4g0MZ98dM50WrA4OEZIixCBly8vmM4WHN2ZooTi9OQ+zsOmHuVozx495mCe0DY3rFZXTmm18QiPUNs0yb7z9/7+3/+j/+wf/sN/8H/6e3/v9z6fU7tnz08f+wVgz549e/4E+et/62+1Oor+OVZ930b6PAS/CsHTtQPOBSaTCdtNSVPVEALeOaJoHEBDEBirkGo0eBoT0bY1RmtiGxF9JhvxxIlhNsu4uDxntVnihhbnHF3f0TQNTVuTxGMUZNt3OOewWqOkYr1e0g0lSo9G2r7vqaqSyMZoZYhsgnOObbWkG1qiKGUxO8LalBCgrmq883R9jzaWx09fEILl9dk123LFV7/5ZZ58/Iqr15cUecE7771H3TYYpYiiiJPTezx88+2dpl7gvWNoa44Wh3z40aPRHxAkk0mOCIFXL5+jZUTfD9y5s6BrG+qqRinJZDqjadox5tMq1qsbNrcr0iTGxBalNEPnGIaxxRYEQ98xDN2Y/OlBBOjqHoJHKYEQYK3ddTKoz27kpVQI3FgIJkBJsYsXNQjEGE0qPB6HGwLOe6QYk5aquoEw+jQCDu/HGFKCwJixeVgiPlsUAgIhJdpYkiShqrcEPyb2aDW2IQ+Dw7keFxxRmoPUdF2LMTEP7j9gvVxxc3XFYr5gdXtD3w9ESQaA9w7XdwTn0Sai73ratqTvGrqqo617Tu++xdPnL7j7xozFQcEH7z+mqcfl4cEbJyRJQluViOB5/eqan/25n0FZDUKS5inBe6w1xHHE08fPKNIMqUXjEZ3SusyK/Emh9cvP77Tu2fPTy34B2LNnz54/Yf5X3/6bT7ww32uH/qXw/bNI0EkRMLHF+YAfPMEHXD+w3ZbkWUYIDoTHmPGWN7YxAsZb4KzARgn+04Uh1tx/45QQJEpZ0mTM/e922nUA58M4tAqJknIstgqSNM2J4xRjNGevniHwCCnJ8oI4jun6hm15TVWvqJsSrRVFkdM0Ld45wq4Ya6DH5jG9d0wmc9J5wXq75Of+7Nd5/vQlz548xyE4PLrL0PcgOkxiOTm9w4M37mOtZrVakiUZCs2Xv/Qlri42WD16FoSQ5FnB1c0N88M5k1nMnTsTqm1JVVVjZGYaEycp5bakmEzpuh7vxuhMbSOsjfAuUNctUo839OVmQ9dWKCVp24EQoKmr0bA7DOOiZM3oGfCOKLK7W3eJ2hmtpZBopRncGFEqhaDrOrRWWGvGFxU8IFBSkaYJBHZJTECAvu8IIeB3bWOjj8Ai8GilcG78M2JtgokTjFZU1QYhRx+A0mOa0GazhuAIBNKsoHcdbdeQFwV37p5wcf4Sm8RYm3D+6gXWWrwLJElCXW0RAqQYy+hc37JaXmGM5Mc/+gBrLKcnJzT1lsPDKZGJefn8Ba9fXzE7nFPMZ9x78ACpJWXZ8vLlFW+99wbKqF0J3eHYV+EdRhvK7ZYsS3DeS22iMi+mP/5P/q9/d/k5HdM9e36q2S8Ae/bs2fM5oEk+8IinJhKv81TXi2mG1nJMm/ESKQwuBOIkwXvP5dUF1hgiGyMQmJ0EJN5l/UspEEKQpAlZltC2HZcX1zvJyFg8FcUxkbUIBGmckSTpzlA65sh7PHXTUFUbbm6uUbuhNwRBmub0fUfXVYBASsNifoK1Ea9fv2C7uUaIga5taLuebDphMZ9x8/qcrl2zvHrGO1+4y+uXZ3zy/lOEC8TWQoCmadAywdiMg9MpJgpcXa0oiglHh8fcvXOfaTGj3Da8cf8+IoA1Fq0tp3cO+cJbD7FW0LcdXdfT9QPOSZrac3NzS57n9F3P9fX1qK1PE6Q2WBuxvF3R9QNpltD3wzh09wHfK1arLUorvB+H/qYeX0o+TfLRRqHN2PIbRZa+71BSoqRASoEbPIFRDiQlgP+s0TlJIpwbb/rjeGx6FlIQ8Citdpn7A0oL+r5DKIlU8jOtv1ZyfBkZBqIoxmhDU5f4oRt9CUoRxTFtXdO3Dd4PRHGKVmZMP+oGju/coZhNOXv+gul0Sr1ZU682u5cMgQC6riF4RxTH9K5HatiUK4zRPHv6MVlmaLuOw6NjqnLDNCv44Xd/xJNHn3Awm1AUU2bzBX5w3F6tiVKFtmMC09APxFHM+flr+q7E+YHlauuMjWolZevCMEY47dmz54+d/QKwZ8+ePZ8Df/3b394EqX9krCltJJdxHEJexNjYMnhHP/QkaQIEnr94gXMeISTB+TF1JoTRYGpGzbgAhAhoLdFS8+r5BWL3r/hhGIfW2NqxMMpYiukMPGhtR1+AHBcK8DjX0rYl682K5fIGGxnquqJtS7z3RFHKfH4EQtI0DXEcQ/Dc3FzStS1KKx68cY/15orG3fLOV95klh+wWte8//0PkX5AK4PzgavrGzarFmMt73zpHtZqbi5qpDTM5gf0ruH+gyNevbxkvig4OT4iSxLe++IXmS2Oee/dd1neLrlzeoLSUBQZfQsEwZNPXtJ3HYGxvyCOIpI0BSFRylBuW7bbLdoakjQjeA/BE0Lg6vIW7x3BjwvQeOvugIDWhmFwRNZ+tngZM8Z2Sj36MEIIGKtI0wgIO2lW+PSTwkaGYejHsjElkGr8faQUKCmBgHMDcRIT/Lg4aGsJQtD1PcaOiUZVWSGVHuVC3tO3DcENhOCwUUwcJ1RlSXA9hIBW45+BEAYCMJ0vGJqWvqlI84Szl68QKtC0DXk2oe8a2nbLp03IQmjWqy3OO9q64fEnz2laR+e2TA8Tlusl0yzh+uya7XpLU7ccHh4xnU85OFjQV4H33ntIlEi6pmZ1c0NmIy7OX3J8fIwyUXS7XM+lkgOI/K/9tb+W/gkfzT17/o1gvwDs2bNnz+fAL//yLwslhRuGcCl1WGa5DodHKad3DhAy4MLAMDjqumK9XjMpZrRtS9e3SCHHAdOMmfNKKZz3CARaauqyRvidtEeMZt0xzacdlwGjaduWKE4wuwVAK4Ug4L2j7RqatkIqQZqmNHVNVW7wwSOlQStD19d0Xc0w9Gy3JVIbjNZsyzVpZujaLdeXV/zsz32T7//BD3nx7Ixnj55A6Al4mqGl957ZYs7h8QGzWUFb19xcrlivl0ymMdtqzb0Hd6i2HdPZnC9+6R2Wt0sW8yO0jnnrnTfpe8eDO/eRYuD4zpzZbBzkX7+6pChitAyU5ZZ+6JhOJ7Td8Nnt/8XZBUIIprMJUlmENDg3AOPPIIktPrixO6AfCN7TDQ5tok/n+DF6E8bbeS1xPqCUQWmD1gKlBUrInWTHj7fru0jRKBo9G0ruisEIo3dAyc+kXniPEB4fPFEco7UZ5UJCIoXalZEZtDEIRvMwwdO3LQE/NjRXW/q2huBG2ZK19EPD4FqSNMPGluvrC7J8xrbesl2vkQKMMcRRzGZ1Q1tumWRTltdL2rqn3JRcna/58QfP+Z3ffp8nj8/J8inKxrgg6HvPZr3FeQ/e8+abdxn6lvXNlq7teOsLd7hzb4EKjnK1ZugGPnr8CScnpzJL06xpmkxoVQH953RE9+z5qWa/AOzZs2fP50SH+mDwfEwQS2tpT05TTk4zTk4OGPqBrnUQBEVR7AbC8RWg7RqMVTRNg3OOEALOB7SJqOqaum4wRjEMHsJY6mq0JniBNpoQPHE8lkUV+YTYJgyuY1st2Za3ONcjhcLoBGsjttsVQoztuHGUorUmOE/X9tgoIs8zjNGU2y1FkXDn3hHnZ5e8+4W3+cPf+QFt2aCVZ7u9pWsahJAooTg+OuT49JgHbx5yeJTz+MOXXF7cMpnOQAi+8Y0vgrfcrlb83J97j5ubGyaTGZfLM45PJ0yKCCUsdbMlSRRFXlA3gQ9+9JjpNOXoaEpddayWFcba3cAOUawJYUDKwGwxwUTRZ0lHVkdUVbO7wbdj4k8S03QtNjZoE6G0HY3ZQiC0QRoLcvzv3nsECinH3H8YlzWFQCIIwe/agAVejFvEp7GtQgq8B4TEWIsPo18jBEFwIMRo/JYE2L02jPGrAmtj+t4xthwE3NAz9N3O62BYr5YE7wmMX6PWmrbp8CEQZxn90BOCJy9ybq+ukRKarsJGY3HaenVD13fkk5zLixWrZcPLl9cYFfH0kxc8+eicsxe3pGnGdDpDAuHTf07viSLN0XHBZr1mebNhs90gxMB7X3yDulrTtxVVs/Uvz56vZvlkVRTTq0ia9a/+6q/uF4A9e34C7BeAPXv27Pkc+Pa3v+3//W//zWedE/9l0/kfOu/boog5PM7IpxHaWpTROCCKU5quBaBuaoQURNZQlttRs+7HuFCCo65q2q7D7wZ/rTR5NsFGMVES4f0oH2qaCjf0NHWN82P3wNhWG2NtjFKG4APb7Zquq9HakqYToihmuy1xLjCdzXBDwJiIpqkRCu49eMhHHz4mOMeTR0+x1iJ84PzsNbFJWCzu7hYZiOOI2TRl6Cr+xXf+gPOLS45PF6QTwxe//CbltuX1qzO+9s13SLMMqwsuLq75ylff4b0v3eXsxSvqtiSdJtgoolw3/OC7P6ZvB+IoY7suWd5U1NuOLIto24YkiTBa0NRbkiwhSlLiOKGuNxglqKoGgty9rMjdK8vY2hvZiNgauq4ezdpe0A+OaBfVGbxHeI93gYAYpUAIyqbF7XoGhmEYF4Wdxl4KSd87vBO7FWHsF4DdvhJ2H6T4VJUvQIydAEJKRHAMXU1kLd4L2rbf/XzHJugA5PmUvuupy4o0SSnLCqXGpCIRYDpdYHeLnbGWpm3ZbErqumJ5e0OWTnj+/DUffvQRk+mcpulIY0vfDpy9vMDX8PEPHnP+/ILQ93g/AJIkSZAStBHc3tYcHh9wcFRwfbGiqQaqynN5teG9r76DzVNAea3VUlv1/fl8+lhK6Xc/hj179vwxoz/vL2DPnj17/k1GLvxFvZY/bPvworB6lk8D9x4uqNpAuW2I4oSubTBG07YdMKC1oa5ahr5HCkAo/ODp2gqjU2QSoY0mL3KqqsSamKppaJpyjJkMnuAd3g9jik9WUNbQdu0uKQiS2JKmMWVZYm1EINB1LXVbUuQZWls26zVSKvq+Z7le8o1vfpXV6pq23lIcHlBX48ArgmKxOGYYOgbniLKUBw8fMgyOVy/P+OjDH9B2nq9+7RtoZbCp4Xaz5kcffMQX3n7AfDHj9aslL1+8opjEfONnvszN9YquHTg5mROnKa9fXxKZmNVti7WSjz98RhwrLi/XfPWrD6irkrLsSNKYYRg9AUkakxUT/BBomxaJYFuWZNmYEAQBG9mdzyEZb/QDOO/QSo/Sq8GNnQC9o+sGtJSkxjJ4TxSnODdeYEul6F2PUBqlLSEMCAHeB4beU4eWySRFKY8ABhdIjOVfaY0+9XlIQhjbe8XOfzD0NY1w2MgyDD1NWxLZiLquIUiKrEArRV1uMNYQJwl1syGEQACU1mTFDKU0k+mUvu/YbErmZs7zp6/4+te/wXK5pakHFrMpp3emrK9LXOe4PFuzvr3m+vKGLM8ICNI4xxdj7Onh4WJMnLKw3lbEWULeCmKt2WwrLs6vubr6hLe+8DY2S9XyZlls6yY9CDgRhx8ymg/27Nnzx8z+BWDPnj17Pkf6C2uNFnnow01Xt0OeWQ6OE+7fn5FnEWmc7Ia+0ZwKkqZu6Np+bGtlTNHxzhG8Q8hxMEzSGKkVaZYhlNoZP8cWKyU0So1ykvn8kDTJIUDbVPR9jQ9jOovzDmMMfddR1RV123BychelLH03YExMmua0Xc+d07vc3mx4+ugRJ4cLlrcrmqYjjmKElDRtD0ikUiwOjlkuVzx78gk/+N73CN7w7rtfZD474PpyxXbZ8uyTc06OTzk6nqGU5PWrK45O5rz7pTfxHrZVx4M3j0hMTBwZju/MEcrT1B1l2WMixaOPL+g7QRCe6+uauu6xxrBdl9goHl9FbMx6vWHoBtbLNXFkSZIxVQeh8B6E1pjYEoCm/bRPYfRoCDG+Aqy3FUgL0mDjaBcRmo5lZFYTWUvX9FhtUErSdR3GaEIYi8W6rkPInRlYCgIBJccEodF8u3sFEAKlR7nRMDiU0QQhqOuxD0II6NqKrqmItKFtK7blBkRgW62pmw3FpMD7UW7UdS3SGPJiRtU0FPmEg6MjQgBJwvXVhs1mw+nJPX7wvcd88MNHRLHk5nZDnhfji1BZU1Ul6/Ua7wM2jZjMc6bzKeW25OBwyuJwyhAEs4Oc9WZN8DC4Ae88m3XJ7/3u76Klcg8evjGrqvrrN1c3D45ktvzcDuaePT/l7BeAPXv27Pkc+ff/5t9ceWnqqhrO61V71m27kCYxk4nlcFGQJBFplo0mX+kZ+p66aej6sbW3H9xozlUS7z0I0FohpEdrQZzYcfDfiU6sNQgJSlrSZELXOdq2G+Mk4xitLVEU0fY1q9WSuq6RynKwOObw4IS6aqmrFmtjFosFSimsjRBobq5umc8WvHh2xtANWKPHl4muJTCm0hSTOduyJC9yuq4my3Lu3BmbYT/65GN8GFjdrsmyjAcPD3n73RPq7RYt4fBgAUHQNDVpYpgUGY6BN9484MH9Y5bXa6yRvP3eIa9frbm9qYhiwc11RV154jil6zxV3Y3fqxlfLzarNUJA3/XESYyUCucHlPz0ln9ASknTdRgbjzGgzuOCx4cx4UcKQRRZtDG4oUOpscAr+IBREqHE7nMSBOdpqwajzdjWbBTG6F3JWgyI8Zbfj5n/WmuEULtyMIndpQ9JoZDKAoq2GT/Dru0JTtA1HUkcowjU9RohPFpL8A43DKRpRvADUoy9CMoY+q6lbioWB4dkeUGSZmgT8erVC9586x6JyfnR917y6vkt/dDT9RXd0JBkOcV8wXR+gLaW2UFGnBtmhwU2jXnx8pKqbDHKELzi7XdPefHynNP7R5gkkOQR2oj28vzV1Wq5vJ3NZkUc2beumuXR53w89+z5qWW/AOzZs2fP58ym9v9s1fiPlrebHw5t52xkODgpuPNgynSWU0wmRHGMEAoXxsHz01vivu+RSqGNYegHECCVxgdI8xStx4HfOccwjI2+VbVFiMBmu6KqNlirmUwmaGUwxtC2NVW5xvvR5Ku1ASHYbNa76EpDFMXUdU3X9uRZznazIo4s27IEKRn6AWMtzo9Li7EWZSwYzb37d1mvl9RtS5xmoB1X1xccHx+SZjGTScbhUcH9hzPOX99y9uqG+w/uoCPB3QcH3L13xGya0w0Db713SpIr6rrm6mpLZBVGKW6uV+R5hAzQd4FyW5OkmtvbW7I8GQuohp6b6wuaqsQPjsE74iQev2bnaNoGpTRKauIoG+VJkcV1Y+Ou0aNPIi8K8iJBijGxx/kerRU+OLwflwQ/DFirQIJQcmwFRiAlSDEWug1uQOixpExrvesJcMCnEaEaHwKjLUBirEGIXR9APzAMPcE7+ran68aFII5jXD9QbbcYrem7ga7rxuXMj2Vifd8hpSCKYy6vLtBRzGJxRBCB+/fv0fc9g2v4Mz//JV69uGZ923J8csDzZxdoE6ONJstz0jzGRhpj4Qvv3iWfZLz9xVOEhBdPzrk6u6YuW4bBc3p3ws3NNSenR9RNFQ7mBxdHJyefxIn9vlDi/5Vk6e+FoL/6S7/0S/HnfT737PlpZO8B2LNnz57Pmbe/8ej9T7739j/pE/3Fvh+aWJHPDnIGF3jw5gzxXOEdXHbnYytvktI2DVqPsZVGG/q+pxsGtLYIqWibHinGYrExXcbhfY9SkjgxdF1L31d439M0gZubawKOyCYIIXZyFE1TN0RRTNe2RPGoh4+imKZtkdKQpgltVROcox16tDWYOEZIRfCOoeuI4ogojpFGcXp6RLm+4emTj8mzHKTk5nrDnTt3CB5c8GjVcu/uXZZXNY8+fsXxyYLpPGZxnDKb5txcb3A4Th8umE5SvPe8urxmcD3HdybcXmzRwuEHh3MZbdvSNGMakBCOokhom5rgBTfX12RZShBgI4O1iroeqKuGaB7TO0+cT8cmLxEQSlE3NVFs0UqPMaAhYI2hDbuiryAI8FnUJ2HM4Dd6bB4WQjIMHUE4kAJl5C71qSOWCmX0rlFY4FxAhPCZabdre5QUaDuak733hODHduLdIB+Co6pqpBTkeYZSiqZtEVVJnEBAoLVESk3T9ti6xmhDGqdcX9/Q1j355JDb2wsOjxY4P5qBH7w54/TujGfPrumdw0TxaAjOC+LII3XPZDbj/PxqXOIWOfceHnBzueHZtqRcbRnalu2mJk0V83lOOwjefOuL4eXLR/W946MPJ9PFx2k++a0GfvOvDIEfvvfm3gOwZ89PgP0LwJ49e/Z8zvziL/66S2P9tHPiR3XDq6HtwzB4sjxlfpQxX6Tkk5gkiZkvFgilP8uT77qewQ0458Z8+yhiu1pTbta4vmM2KRDB4YaOtm1pu5bgPd551C6qMgSIogSjI5Q0CKkxJkbriChJCGLUqG8327FJ1nukkFir6fqa3nW7hltBP/SUdU2eFWP2PR5jFbNFQZJG9F3Nj97/Id57usFT1hWz2Yyy2rLaLHHOcffuXT764CVXlyVHJwuOT+ccHU+IjGZ5s2EYBt5485g3HhzgfcAPksvzJXkRcXQ8Z3ADVdmSFRmrdc/FxYosT9hsSkLQ1HXDalXSNg5jYpQ2DMGTZCkIaJoOay3FJKPrO+LY7r4PS1uPP78kTXBuwJqx0GsYeqQYS7yCD3jn6HaSIu89ysR4r9BRRnBj8dqY66PQShFHEd45mqqi70bj8KfeDR8GBjcWmvVd/5n3wDm3exmQSAmDG3sGYIwPdc7R9z1qtyxUZYlkDBXq+540Tce/V8B6dYtSkiSO6ZoGZROECKSZxdqxN6HrGv7if+fLPHhjQd940jgniWO0EhyfTDg6OsBozVtv3mO77qmqgQ9/9IJ79484PplTNxXbTcPQdVycXfIH//J3eH32lNO7x0ymUymlXM1PDn/j7a985Td+/dd/3f3ir/+6+/a3v+0/v5O5Z89PL/sFYM+ePXv+NeCv/2//w098L/7R1W33ncur7VOBAOkJwTE9iDg8SnjrzQckaYLWBhvFSCXHZt/dABvFEU3TstluCUCW51RlRdd1o5Z/dzvtPQihSbMJcRwhpEcQdibSMLbe+mH3ajBQ19td94ChaWu00mijWa5u6fsW5x3SSNIiw0YJhwfHDDvd/GQyRSnNcjmaRr//ve/hHMRJTmRjpJDc3F4hFdy/f5fpfM6rlzdYk+B6R9+15EXEdlPjvSRKJQ+/cIeDowWuh5vLNcvbFYuDKad3D2jqjqurJdkkR+mIy8slSRKR5YY4NhgjaFuHkgapxsH5U0Outoam6WjrjjxPGYYe17fU2xVu6GCXr58kMSF42q5FSBi6BgYHbpT89P2w+1QDAU/bdiilGLwnz4vdUpEihKKte4bBI8WY8tO13VhGJkApcK6n79vx5+zGEjUf3M4Qzs5EPEqEggcp9WgY3oXn1E1FFEWjHKt3BAJRZHfSIo+SAucattslt8sbiknOMLQgAhJF19coPb5mSKM4ujvny19/wGyW0TYVx8cZDx8ekaaaN988Is/HBeTgcMbh0ZTl9Ybf+c53mR3MePjOCdvtkuB7CB3WKC4vX/LD97/XPXjj4SzJMqUE1/uhf8+enzz7BWDPnj17/jWh9/Jx27TfCS4MTTkO7FFkMHqUhQxDT5rGGKPxwdH13Xib7x2u7wnOsV4tkXJs8K3LDjcInAOkpCzL3UAYkyQpUki6rsV7TxxnIMKYAuQdWmq871gurwkB8jwfb5OVIssyLq8uGPpR3nN8eozUY/HYdHowSpC6hqapuF3e0vUwW5zwwfvvU9c1i4NjFvND+q6jqUu0tnz9G19nNj/g7OU5bd9yfbPi6uqWr33jzVEjLzSDc0wOZsSZpWsbXjy7pql7jo5zDo5yfHBcnC8RUpOlCTe3K+JEszhMiRNNFEUMQ0/b9tjYIgjUdY1QiiKfsbpZs15tx4QjK2mbCkkgBEfbNBil6JoaBLRtwzD0dH2PVIbeefq+xw0DbTuWnWkd4RyYKEYphbEaKQXOD9jYoIRk6B195wlyjATt+4HgxjIxJRXD0O9u+z348Nny4f0wSrWEgCAYeodAjlGk2hDHEUoJhBg9BtpYtmU7msad28l/up2ECLTW1NX4Z05rSd82GBtRVSVaK5zvsZEhSWNO7x5y940Zb7x5SFP3vP3ePWwkqdYlf/7PvUcUG9qmxvuWb/zsewz9wKPHTzi5c8zd+yc0bUNkJevNkvl8jutc9/Lly3Ml5NtN288/73O4Z8+/CewXgD179uz514T/zbe/fW6MWetA3TdNsEYzKVKMVQxuQCpF1za0bUXXj3GUdjfU4h3lZk3fjTe2TdsilKLr+zGzXgh8GAjBsVrd0jQN/TDshno9DpRIlLJYE6N1jBCSJE6ZTKY0TYNSiulkxvnFGQKYTmeEAM+fv8APHmMs3nuqbUlVVdRNizKGYj7j4tUZTbnl/oP7LA4WrFdL2q4iiizf+Nmv0nvH88cvcMMAQdO5kj/3l75E0znOzlZUdYdSkskkYxg8y+uScrvlzXfukBY5TdtSlR3eSebz6S5ZxvDGm0dorcas/qGj3LbESQI4ym1F23qmsyl11bJdV6yWNW3bI6XAGos1Y+Sph10MZ0AbjdbjMK+1RZvos9IvrQ1Ky1GihaDvBmwU4dyo4x+GdlxotMJG0WjolePf+2mkqJASrTTBid2wLndJQJIA2N0N/jB0uGFAG8vgBkw0Go/DrjogiPEivet6kjil7zw3V0v8Lk42uDC+KnhI4gwfPGW5BTxVeYuNLFZblJKjTGoIIKCYFNy5d8Abbx4zn+ZcX73m3S/dp24a1uuGd790l6GDi9cbvBCc3ruDawPPPjnjjbfuMZlPWa03tH3JoyefdJNJMUyK/PeFlH8w9P3x53T89uz5N4r9ArBnz549//oQtvB716v2j6ra3XoPNonIJpZJESMlnJ9fMgxjdGOcJMidTrwsV5TVGvCslkuEgKapSLOEtm2om5K+b2jbBqkEgx8YXM90ukBKgRCSxfwEpQ3Oe5RWICRRFFFutwAYY9lut2htmE5ntG3D9fUlcRxTNy1d19K1DcPQIYGimGDjlGq1ZLO65s6DO0xnU16+eMZqdUMUJ9x54yHDAM8enXF+dsbQOdzQ82f/wlepmo6PP7wmjiKkcmRFTFk2bDZblssN88MJR0czVrdrktggg0IrwdGdhGHoOTnJKSaa2XxsLx46j9EKowTbdcnZq2smkwwlBRevL3ADrJYVJjIUkwJtI7wXuMGRxAldPyCNQipJ23Y4F7AmwjuPjSKSPEdr+5lhV6kx4kcKQT+0aD2+Ygx9N5a4mQhlDGmeEEUpUgusHc2/EOiHlmEY5T5i91lIqVHKMgxjElTbdRhrCUhsHJMVOUobpDEobem6AefGpSVJYla3a7S0KAFKCKQI9F1HCJ4ostRly3azpdpesV5eY2xMQKCUIs8L6qZlW1ZkRYqO4OAoJYlirq5umB0seP7ihig2BFlzeXHNxx8+YnqQMQwd/2/2/jRm0yy978N+Z7n3Z33XWrp6m5nWcJVEyTBtQUls2B8dIx9CBAYCCHHAQPaXyEaAhDJgKAgcQIkjJ0IQaSKRsilKghgkUSIHUiJZsUJTMkVyOBzO3mvt7/as93bus+XD/XIkA0aESJ6pSeb+AY3qRlVXv/VUH9R1nfNfjvsjX/3tr5FngrreMQydS9JkU7fNszTLsmqx/NV8of/TN3kAJyZ+WJgWgImJiYkfIJzTz7Z1/N8fDsOHh0OHl5DmCdU8Iy0U5+cPWC5PWa5OEfexkD4ErLOE4DgeD6RJRt/2uMHh7WgQDt4TQiQvCnpjGGyPUgJrHd5Fzk4fYO0AwaFUBDyCcRBNEo2SCmsHhJJolVA3Nb0xLJentG2DDwOISAzufjmpsNayvXnNYbfhyfsPePK5d9ntj3R1S1HNWK3PKIs5Lz57zdWLlwitUVrz+NGK/jjwna+/JE81u33No8eP0OmYdy+QFFXG5z54l2NtSPOCPE8QyvP47RWzWUaSSi4fzjk/X5BnKX3bk6Yp5SwnBM9h19MbR1Fm1IeGvusxvWE2y3j46BylJL0ZAJBqHL7bpifLK5TSWOtJkhQfPdb2SK3I8goQOOdG9X8cB2+lE/reUBTFvXGX0SgcPGmuSVJFmhYkaUaWpwg1xoUOgyHR6l4CNMZ6CiHGFxsfUEKNy0eakd9/XXlWolWGYPQEeOdx3hFDoKwynHf0fYdOFFFA8JG2bYiAD9DUDfvdjq5taZo9UgsGYzF9T5aNHpLd4cB+d8DZyHI94+zshBgUxvRoBUPvef8Lb7HdbHn99BUvP3nKg8cnZFXCyaLi29/4OlIFlxX5bV4U2yzPX8tEfyqE/ujP//lf3rzB4zcx8UPDtABMTExM/ADxJ/7Enxh2ff9rIYiPtE6QOkWnGeW8YHk6p1zknF2eU1VLhBSkeY5QCUqNZVxlOUclYx7/4BzGDvSmJwrJcnWCdQ5n7egZCOPwenryYNSiu/67rbRDbyjzGVk6pt2EEO6lQqOsRIpR5tK2B9I05/z8ksVigQuWKEBINXoJkoSH77zF5774e7h5fUu9q6lmc568/TaPHz9gd3vD9fVL/L0WPctSotD8+q99g8V8lKWcPjhB5xIzWGbzEmcj6/UclUrKWYGUOc4Lzi6WvPXuA5qm5913L7m4WHB+ccJ+XzOfF8yXKUWVYYbx13PxYM1uV7O5PeBtACIPH52gtcCFSJIkRDGaq511EAVlMUcpjUSQJAnOG6zrkWKUT+kkxdnhu4N3URT35WBj6pL3gSzPyYtibP+NHm8NUirUfYSr1ClCjlGs6r7dV2tN33fjC44cG5WVkGipkCjKohzLwaQgxjj+3CJ+N060Ny1VlaG1om7qMfY0zQkB2q4lImmbHhiXghfPX7PdbBlMR55leOeojwdO1iuc8bSNRQTJs6e3PHjrgstHJwxm4Pb1ke984xnBSX7/T/0YXWexBl4/2+D6SDWbsTqdYe0wzGbLq/l8+Wy5Wj4tZ9Un6wcPnr/Bozcx8UPFtABMTExM/IBRSvkgzdW7SaHJi5KsSMkKjdSR2SqnmOcEIciynCQtUFKhkxSZJuRVxfF4xDp3LwdRxAh5nnPc7wjOItXYCxBjJE0zfAijabdrydKcvKgoqzkxjsbWRGfjsOssUkKIY7dAjJ6ziwsuHzxESs1+t6XvOogeazusG3j7vXd4+533+ezDV9y8uEFpxRd+4gtcPjzn+bOXPHv2lGHoqOs9gzGszua8vrohzXJciGRlwsXDNUKBIGFz13C33VEuFmTZbEy40ZLBBt597wnNsWaxmnNyMePkYk3T9QzWcf7wjPXpGpVokkxTzBLOz2d0dU+SSBCQJJo0TXHuvr8gVd9t/w3BkeWSyGi2JToEgaaux5v4yL0pF6wxtO2RYRiz+GO4lwo5j2TsVxDqPutfSQZnUYm4LwnToxdDarRORsOvdyiV3Kc4BUKwBO8RSiATCMKT5CmJThESdDrKlmIM6EQzDD2uN2NvgI/Uhz1usCidEhhjQYeuJ0s1SiXs9zXOBT7+8Bk3VzdY19H3DfvdAR885xdLrl7e4L3j7HTOV3/7mzx4dMJ7Hzyibjpub/b82q9+E5Eo3nr3c+RVxVtPHnL14jV//1d/lfXJKfPlSZwtlpuzy/Ovr9Zn387y4ma/30/dRBMT3yemBWBiYmLiBwwNT6pZWV08uCTLCpTSZHlKmWfMqwKtNEmakOUz7OAQSmG9RydjUZcxo8Rjt9vTG8tqtRpTIeNYKhW8IwZL2xzwziKJSCJlXpAmGYlUtG2NMT1Jko5LgvckSUKRV8xmc8qy4sHlY2bVmrZp2G83eBcQUtL3AzHCe+9/jpP1mmeffMrN62vyvORHfuIDTk9O+OZXP+Sw2ZIlKYfDln7oePTkAUoJXr9+zWy5xLjAwyenRO9oa8PdbcN2W3N+ec7q9BytMzabHU1z4PFbZwQ8bdtwfrYeYy99ZLvZ8eTtRyzXSwJQFDlCRvIyxVpLlicIESiqhPMH69E8LUaNvxARrVPSLIco7gfwMREo+ECqE1Qcm4eDtQgRCc6PMaswav8HP5qpi4zONBRlQYigVEKWFyQ6JbqAs8NYFpakJOnY6qykwg+e4ALeufuvq4cQSJSCMEa3KqnuU4c0xLGDQERGQzWBvu1w1iKUJM1ymn3LYAxRBGIU5FnJYbfDWcNgDW3XkWcVfet59eKK/a7h+bM7lNS8enUNMnB5ecpXv/xtBIGqLPidL39IUWgePFrjbaBvWr7ym98kzxTeBbabLfOloqk3fP1rX/NZXu6SNH+9WJ09my/Xf61ozN/4+Z//+eMbPXgTEz9ETAvAxMTExA8YSaaq08vTdbVYo7Qmy1JMPxCiJy80IQZ0kozttFKMt/9K4Zyn7zuEANP1eBchjkZRH8YiK+ssZuhp2gPD0JMmKVk2GlfHKFBB33dIAbP5nKIoCASSNCPPS4piRpYXnJ1fkqY5tzevGfqBPK/Ii4q8mHP58C3e+9zvIYTIs6fPub26oSoLzh6conTG17/yDYiePK8wZtSfv/XkbfIi47e//DuUxZK27/nRn3iHED3eRm5e7dFKU1YJFw/O8c7x+tVzDrsaqSVPnrzNq9c3rE9XzBc5eZZg2oHFYsHFgwtMP6B1irMOIRSr1YJIoJqPGfmLZUmaKsDfS24UIQqWyxUIifVhbFkWCV3fU82XCKHGiEzrAI8dLHawKKlIkgQhJN4FQohE3Nj+yzjkK60BcZ/2AzHc/+UcWkqEgOAcdjB458dyLgHNscX7wGB62qYh+DjKsiLE6CBG/ODxzo0yJOsQUdJ1HYPpOTs/ZegHTHuk745keU6aZGOJ22DvfSIpTd2QJpoXzzYoqWmbgaefXXF2sqJrLWZoefudh/zaf/5Vbl/X1DvLR994ydnpnLeenOCs5bhtePn8BVoKjvsDg4GHD99GSfkyyeTLYjZ7mmXFX/sLf/kvf+dLf/2vt2/00E1M/JAxLQATExMTP2AUZVYuV+t5klVoneB9RCrJbF6gEk2UkXJR4oVAJenY+OrsP2wIxv/DNB4hcdbdD50K68bSJyElSV6S5DMGF8emWiQ+eoSSJNloGN0f9igh0ErQ9y1SwGK+4Pr1K/b7DcvFCSfrM3SSkiQlp6fnrNYLBtfTNi2pTlifnJCXM9rGsLndY7pxED42O0KInJ894q3Hb/OV3/oKWV6hc8U7718iJMQgefb0jqTIKBeKy4cXzKqKut7TNT1N0/P48UNuN1eURUJZZHR9zTAMNL3h4sHlfcSmxNlARLGYr8iLAqVHCc5iOSdJUqz1yHvvQhRjyk5ejGVlfW9I0/EziUjKaklrelSq8YwyGu8sSPCMTwDeOWLwY0uzSghR4NxYFhZ9ZDCW4AN5keNDvB/iue/wEvcyqzGBx7oxqSc4h3UWnSSjKVhKfBgLvrq2xdoe7svCkjRhsGNzcNv2dG3Paj2nnOf0psdbh9IRpGC2KKiPHXe3B7I0RarIfJYTbeTVsxvee+8hr19c0zU9q/mcZ0+vkVpyenHO1776HZ5/esU3fucp3/7GC3ozcPF4TQyOq5db7q5v6ZuWzd0NEhXeefudY57n35nNZ8/f++IXP3ujh21i4oeUaQGYmJiY+AHiZ3/2Z5NqVf70bLVeyKjwv5vVnyjyMsNay8nZEkHEdKN5tG6PKKXQOgG4lwG1GFPTNAekVCQqxZgBM3T44AkxUmQFInKfNS/ul4dImebY3mDNQFVW98OzZVZVKC149eoZ88Wc9foMEBjTM5tVzKoZXddgTUe937NeLQkxYIYBYwyzaoZpGop0vInPkoR5teKD3/MB169f4gbHbFaxXC9wLtA2llcvt8wWBQ/fXrFYF+S55vrmJW1dc3u74cnbY2y8MS3LVUWMEW+hrQfKsmSxWnGsjygt6PqGWTVjNpvjXUDqlKyoxmXHWawdyLIcqROcj5RVifdjitLvGmFjtJRVRjWfk2YlWV6RZjnG9EQCaZ7d5/Q7uq4lSTSRgJSaGCVEsMYymGH0CpgenafEODYwq2Q0Ag9muO8UGFOARmNvj07GRUynCTJR98lACgBrB4zpcNFTVAUxRpwNSK3oe4uUYw/B6eUJIAjWIYAQHWmakqQpbdPftxI70gxOTmd855uv6JueWVXw7W98RIzwxR/7HF/72oe89c4lb719ycvnV1xfbfjs0+fcXB1YLOf35mnD7fVrZPAUWU7fd3shElvkC5WXZf3s2bPizZy0iYkfbqYFYGJiYuIHiHVV/ehitfj9RVlKez+UCiFI0xQfAvNFSaIku/0BqSVJmqJVRlWtiDFircEYg9CCpjnQ9z1ZmjAMAyF4RtlJMt5qD4b9cUPT7AlhvP1Ok/y+B0BTlhXeO4hwcXpJ9IHr6ysuLy9J05S7uxusM5xenhEj7A8bqirH+YG3334LZy2mG1ACrGmIwSIiuBBYLJakWc7J5RnGtDx7+ilSSBbrOa+urnj96prDvqWc5ySZJFiPQrLZbOnqjv3+yHxWMZsXNE1NWcxJkoz9roGoaeqGLMswpiMGT9+PMaAhjjfezgW8i8wXC6SQJDoFKcjzFCUFWVaQ5yXODfRtTZFn+OhBSGazBQBZXpCmOTFC9J4QLGmaURQVAkGw4++fv/dnxOAZbDcWsA0WHwJCyzElKHh0mpFmGVKN8asxBNI0ITI2+UYYvz4BSoIgEonI+9eHEMZoUhECeZ6PyU1aorUizVNCiPR9Q6IV1gZ601HvjyAEg/V0jWG9nlMfOoL31O2Y6OSd52tf+YxHj0746Nsv+fbXXyKV5sHlOb/6//wNzi7POLtYUh8O7DY7dpsjd3cHzh+c4GyLMy277TWJkrYsSwPc5UX+KtfZ/ng8TtKfiYk3wLQATExMTPwAEWN0s8XsTKUKH4ZxKE/T++8NROBw7EmzjPPLc3x0pEWJEJIQPE1do1WCiOONd9MecM7hvRtvmXWKUpqmrTns7zCmwXmDMR3Bj3pxpVLyNBslRBLKsqLpenyMPLh4QN8ZNne3lOWMspxT1zVNW7NcV5xfLnn48JJXL1/y/PlzrHXUzZHVeomSgiIvqKoKqQWzxZz1esXzp89BKM4uLths7jjcbcjvC7sO+44sSxmMZ3PX0daOrhlQMmG2KOj7nrzIybKM+h+ZJdNM33shRgOsFKOUJknGoBkfAkmiyTJ9n2wUqKqKCJjekOU5WqW0bYuQEWstIYBWGu88bVej5PhHqFIKfR+R6pwjy8vx5l2K8fcsRsZq3kBb15i+I3JvEjaONC1I05TBDuOwrzRCKrwPVPexrmlWolQCiPv4zggBRIy07QHvB4ZhACLqd5uKlaIsc7y35LlGKhh6g1RgeosQmt1ui+0th12HEAm2tyQ6MJulJCrh+mrH2cWK3/i1r3F7d0CKhP/4//CfcHPVYXo4bg1f+fVv8MEX30GnCqk0u7stH3/jQ5pjjRCe/f4m9ra9jtr92uJ0+auL9fo7SZLdxKhu1uv1NIdMTLwBpoM3MTEx8QPE5eX6R5fr1UOExHtHosd4RyklRVFSHy2RyOpkjvcRIVOSLMf50WyaZtnYGKsUw2Dw3jLYAevtGCuJpOsNEcFgDV3fEqMnRIdQgYC/H5QLEjkOgeBBRk5O1rRtw253YLW6oCgrpI4sl3M+9/m3+cIH73Hc1/zWb36F7d2Wqipo2gPL1ZLzi0uCA2cNfVcTiZydnXPY1QihyYsZs8WSum5IlCYMKS+f3fLo0TlX1zsG47HWkaajIVUpQRQBqcYb9P1hj7UD1SxH63HYT7Ua/50kRwDOja8cdrD0TctsNmPU2o8tx4vl+Iqik4Q8L2jbnrbtyPOcw+FIluV4FxlMT/DjsF3XDUJKohj1+jGO/QE+jO3AzjmyNLsvVYPo3bg4iFGyM/QGrTKKsgIixoyLFkhiEMj7UrWymKN0Ql5U9zIhjfeBVGvq4x64jxBVKbP5kjQrxoK0ckaSZ6RFRpZn9L1j6Ad8iHStRSARoWe/2bBYzthtarpmIEsLgjecns5RWvDO+w/46//Hv8352Yq26/ibf+1v4m3g1atrnn78lLu7DT/2e3+U1ekK5wxD1/Dxt76BwDG4PiS5/nBxcfJ31g8u/8bibPV3ikX5f/7SL/7Cr3zpS1+yb+akTUz8cDMtABMTExM/QLjQb5JkvOlVerwJ7kyPVIrDocN6y9n5Yiz0cp48L5BKobMUlWQUswVJntP1Hd5HZrMFZ5cXzOcLlNY4b4l4vLdj020c02e8DwzW4sO4dCipxxQbH3F+vB3fbu6om5rFcjVGWaaK+WrOk3ceIwR85ctf41vf+Igyn5NmGTdXV1w+uODs7Jyb61v2+y3GGNI858njJ9R1S6IzhJDMFxVpIun6lrIqqZsD73/ugk8+/oz9rhn19Sk09ZG8UAgZKIsSb6HvekQM2KEny1KEivf9B5HBDKRpikAymB7Tt3RNTZoopBR0XUcUEqkVxICSkjTLAIHzjmo2p+/HW32lJKZrcMNAojO6rkNrTaI1wzD+dxACKcZxXycJAUjS9L5cbYAgAUGSSgZrscPo8SBKTGdItCZNM5JklCtZdy/bShNihDQZ+wDGHzP2M+j7BJ/lao3WGWk6pjHl2YyqmrNYrEiSlKoscUNkc9MSrKdvLfVxoOsHqipnu9mRFyX1YeDm5sAweAQRazo+9/knXF484P/1d36VH/3xd/nwG9/h08++w8O3TmjbI9/59jMynbCYl5yczYlxwPRH9octIkl8WhZ3s2r2HZllf/ut99//5T/7C7/w9Td5ziYmftiZSjcmJiYmfoC4uLw8X8xXCyJIleJCT5ZlbDc7emOZzXNMH/FekJcJ3hs4CnSaI4TEWsNgerz3VNWc5P4mOM1yYowIxhvqECJaQ4iRpusQQpBnOXmeI5MEQiTGQJLmJGnKfrtDxMhyvqQoC9IsYzavyLKUr3z5K9SHBu/h9OQBXX/gcDzwuXffJ81y6rrGmI4sT/Ahsl6vESJyul6w2x7QWvH4rYd89NFHpEoSfeDktOLq6sD1ZsdP/+Ef5XCoSbMlSaZZr6txMWpaBBqlJNfXt+R5itaKrm3IihLvA3mRIVSk248FZYfdhlRntE1LOSvRaQZR3mvsJdZ6fvduTMjAYrGi6wx5kdN1LSE4pFBEH4k+kBQa7z1CKJwfZUJVVY7Lm5IMbUeSpLRth/ej7yBNPM6Py5dQCmsHQgwIFAh1LzuSaJVyOBxI8gypJN4HIpE0GV+EpB4lN0VZ0fY9VTmnN44QIQRBlhYICSFE2rYnTXMEkuubI4kGJSPWR5KkY7Vc8tFHVygp2NwdONYdy9WcTz98xelJxcff+YSzizV91/PhNz/m8194j29949v81B/4SYr0ffa7hv12R9c1eNshlKNuj6RaU5TVIGR6t1is/+6f/YVf+PSNHrCJiQlgegGYmJiY+IHhZ3/2Z5NHTx7/9Gy9zp1zBO/RiWQYPKa3FNl4C7yvW3yENEtx3t/HhI6xkKatsaa/v40GJNSH45iB7z0IRZnP0Wos9/LB4bwlSVOEUPgQGYaBzvQIrdCJxhiD0ookzZjN1xTFDO7Lwz769kd0tWG/2xKCRyUCay2np+d0/cCLl8/Z7+7I85S8LNFJSte1lPMKT6Q1htOLM+xgEWjStKCaLSiKOcfa8MUvvs3mriEvUoKPzBYzhJS0dY9pDSJabl5fgQ/jINz1WOvGr3voSbSiPbYc9jURiXejIVhqhRmG+/IsyNIcbz3eh1FKI8a/lFRE74jB4Z1F6fEGfjAGaweIo7+iKEra+gAEAJI0R8oUrRIG0yOJzOYFECF4cAEIlFU2LgMEZKKJMdAcD8T7pmalNMEHlBg/f6k01kLXDuPfO09WzMmyAoEaTcBSkKYZSo/GYe8hSXKcFyxOVgx+YHCe/a4jWPjoW6+4ut6QFwnHfYuIklfP73j94o67mzu+/tVPeP1iy8sXrxHaEWNAKk2qJM8/e0GaJBSpZr/d0B1rdtcbsjQjy3Lqw55MS7Ncrz+O//w//+JNna2JiYn/ItMCMDExMfEDwo9/7vKnl2fr/5YXo0xkNIkOmN4Q472ZNIJEIgHTWY7HDoHAuwGixzuL9/4+HlLQN91Y1JUVLOYnFPk4QDtnUUqipKYsZwghQIxNvsYYAIQQo7Qlz0jSjPl6hfeRzWaLdQO77ZaubXn9+jlJpnjy5CGbzTUxBozpODYHjvWB07MTZtWC+tjjg+DkbImxFhfABUeeFVxfbzk9PaXIK7KyQucZl4/O6DqHiIFHj1f4EFksZhx2LcdDjfOWwfaEEFmdVFhr2B8bylk53sYTiSGy3x9IdIKxljTP6AeD1ooiz4kx4MM41PZ9Q4ye4/EwJvsgCGFcCvq+RQiB6QasdQQC3ntC5D6LP+Lc6J9wzpOkKUolGGM4Hrf0fYMQiixPMP0wln5FyPIc5wZCcKRZhpKKtmtom5phMFRVhXMRIcaEpiQpRpmRUkipUTpD3ntErHUU+ZiqKXVKFAKtU7rWAIIQHdWsoizHHy+k4nhsGCz81m98TPBgrcO0niLJ+c1f+x3WJzO+8c1vcjgcubneo3VBNavQWvK5z32Ow/5AphWH/S27uxuGtiHNFLfXr8mzLC7XJ43W+turRfVs+7f+Vnhjh2tiYuK/wLQATExMTPyAsDxflmVRrGUYr+6t63F2wFlzb+yNSCUIHmazHO8dSmoQY5a/s/fDv3cgx0bf27sbVCLRaUJRzO5bXWuUkkihybIMrTXeO3Sq8N4wDP39QiDHV4EhkCQp3jm2u2vAkiUFu82B7eaGrMh4773P8fz5ZxwOew6HA847OtNx+eASJROePX1Jmqa89eSc3lnmi4Ku6zg5O8V7z3w2Zz5fkuSat98/Z7PbUtdHDoeGH//Jt9luW9ZnC26vdmw3NScnJ0QC1kakVug0pWl7yrxAADF68qJgcANZphAqkqQapRTG9FRVSfAOYzq8H5clISRd12KHAa0kdrB0bcfhsMd7zzAYnPPEGHDeUVYzlEpJkpSuG6U+RBgGQ5Lo8VudoNWY16+EGk23UhMixChIdMowjGVhMQRCjARvcUM/djb4sU8gAAiQSiLlGP8q7g3QY7Lp6Hnw3uHcP0x8kkKTJCnN4Ui8lyj1jSVNElanBQFLWw88/2zLr/+9r1HmBU+fXjEYi5Ip3/rWJ/zYT/wId3e3SAH7zYHdbs/6ZEE1H7sgPn36CUWlef3qU47HW15dfVYvlrNrKcNvXz56+GtPPvf+316tz/4fv/zLv+zf6AGbmJj4LtMCMDExMfGDgTy7eOtfrRbLedvX9EPLMJj7QTYjhMBgI7tdg9SMA2SARKvvZr4jJIEw3u5LiTE9UoFMJcWsIM3zMcoySUmSnLJcoJRmGFqSVDEYR9t26GQ0xHZdh5CSoirJ04z6WKOUIksy9psDXdshBZydXXLYHzgej9ihY7Fck+clWgm61nB3tyXLU05OKj788Fss1wuO+xocrOYLhqHj/GJN3bR8/vPv8tlHz8lTSV5kPHxygnWO3a4nTRI+++Q1FxenJGmCkpquG0hTjTGWGCJVmeCdI00ztJYc6yNZniIkZFmCMQNlUYxDvB3/nTTLcc7TtQY7jIZnY8bW46ZpCSGOpmgUzg0kWUqapmitx9cX75EC8iz/R15fJMfjntm8IgRGKZEU3G0bVJKSZAlZkY5Ga+eRSpNkGVLIMcJTRGzfYYeecL90KJ2S5iUqGWNhvXNjf4MQYypQiLTNWMTmhw47dIRgKfKM2+sNu21DkuQcd4GXn27ZvD7QN4Hm2NMejnz87U/48JvPMb3n44+f0nc929savObdd98HJLvdFjc4vvX1DxERlvMFfdsjAKUlx+N2s1gvv5GVs195//d88H86f/z4T61OLn7hz/5H/9Ek/5mY+AFiWgAmJiYmfgD4n/47/6PPLU/mfzgqTSQCY8SlB/reglCEMKbB9INjt28RUlGWJT44fPBjgZQYZT0xRLI0GwddZ0nShERryqKiqhZkWY5SAjP0ICIhBLSWVOU4HO/rHVKKUZaiEw71qKHPsgrrAnVzIEbP2cVDTtYntG0HKE5OH7JYLHDO0nXdmKyTKHQi+PCjj5gv1lRFxetXt5ydLxlsg9KQ5jlvPblguzkigZPTE26vDjx8tOaTT19z+eCcZ8+uefvdB5ydzbHWIJXEeUtR5vRdx8l6gTE9Ok1Ikoyu7YCI0JokH+U1BMZSNTfKfpIkQSBpuw6lBDpJqGYznPXE4KnrmrzIEUoipCRNMoKPpGlB23bfHeCzPMOHwGD7MfHHGJQch/m6qcdEoCjwLjBbzIlEtJLjS4wEYiTRCqUleVGAFCgtCd4T8WidIGT6XbmP9xYIBB8I3qG0IoQwvooMY9Rq04ym3L5rSFLFqxd3JGlgta7G3H8k201DBPq+pT4eubp+gUo8gUjX9RzrPa9ev6SqUqSQlOVsjBEdHN/8+u8QvcMaw/b2jkcPH0eZps8WJ+e/fvH4rf/LyVuXX/qrf+2v/19/4S/9pY/f3MmamJj4L2NaACYmJiZ+AJgv1yflbPmWRJEoiZKC4B2EQJIlzGYzkkShtaJvPVJKqnkJ9zf9ZjCjjEeMA6TWKVpp7DDQ7I9YY0mzDKE0Uqj74qkWKUEIhTUW0/ejkXjoSNOUPK+wg8VZS56XpGlGWVbsd3doLVmfnjNfrO+z8gvOTi9YLlYcDntub26Yz5cERl35bnegnM25vDxjc7vn9PSUJNVs7jacnl3S9Y66tXSm4+zBKZ98cs3Z5YrBDSTJGBWaporz8wUuOISISCk4PV1hTM98Pn4+PgqEStA6oa4b5vMFOtEoKema+v7Tllg3tvOGEOi7FtN1hBAoqhKdJrRdi7UWIWC5XBC8RwgYbKDpDDFKQgxjWZpSCDmWgXk/+gDqw/F+AYtoKUmSdFwIlMDZUWLlvaPvW5J0LCOTckwBStKcgEQqDSKiE4l3DuLY/Ku0xg6WVCf33oUW27eAQycSpRXWdbj7RWB/2JJlmuefbnjx9AVvv3fC9c01d7c7hGCUHDlDohWHw4GiyJBSUFYFVVmy2+55+fI1s3lBlmZjw3NTs9/e8vGH3yRVkeNh77u+s6fnF2J1cvKrj94/+2tf+tIvvXqDR2piYuL/A9MCMDExMfEDwAc/+vk/tFgulyEGQpQ4F3GDg+DQSmCGgabtMGaUmCwWOdYOo2zFeuaLJUVR3jf9JnjvqJsDg+k4HmuCj+RFwcnJGavVCTFEpFAEH/F+lGYnSYZ1cSzmyiqatiVJFHme0rYNs9mc7fYOrRWL+YKu6TCdIc2ye/Np5NXrp5i+42S9pq5r2rYFBD4E1ssVh21P13QgHMb2PHz0CGssMQSa45EPfuxdDnvDyUnJxcMF+32LlJK225PnCUkisYNBCIFWCiUkaaK/m8GfpQlKjDf6xoxfW1d3uGFM6wHJ8diRJAlKZyAkxnSYtsdZT5bndG1HcA7nDLN5iXeOru0QwH6/Z76YoVRClqVYP5DohLbpEELRHFsSnbDd7lBKYp0dtftSEAkgPGboCN5iuo6+PZJoNSY56QQp9BglOpujdUpAIqTgsNtgTUNwgSyfkWUzhNQIKTBdQ9ccwXtM16GlQgpBezzSHI9EF6gPPaYJfO03P6MsEqqq4PZmR9u0tE3Ng4cXtG3Lcd/gjBtfhNKM88uHEAP1vubu5oblYsl6vcYHg5KR7fYqNt3R61zdGm+/qhE5zs/+g//gL+zf4HGamJj4xzAtABMTExNvmD/+b//b75ycr/57aZ6KEAaCHwjOEmzA+UjXGfa7I84KiIGTkxlaaobeIaVgNluT6Pw+az5izEDb1jg3lqzawZBkiqzIcN5jzACMN/9lOSPPCrIsu/ccBIpigfOeNE2YzeZcXV0xm88xg8GYnvOzc25vbnHOkSQa7zxt01HXR1aLOctFRV0fcYPl9HRN3zfM5xVSRp4/+wyIaKUwfcB0jr4zHLY7Hj1acdx0DG3H5dkJ1y8P3L7asZxlEB2n53Oss0glCcGRpCmRSJIm6EQjlRpbdmPgeDiwWq1GA651OGtRKmMYBmL06FThgiUER9+1ODewWM7p+47meCCEQF4UlEXBYb9nNpvR95Y0TSjLkmFoMX2PlGNS0vZuh79vYxZCjIVdRLxzJFmKtaNMJxIZekfwkaY2mM7hrUcKiZIJWZajdYJSo+TL+4AUkro+0jYNg+mI3pHn+fh98ndfcyzWWg67PUPfE1wky5Mx0ShAdJLBOD798I6Xz3dcPlyxud3jDfSdY7lakxUFpm/55JNPmc3G15/VYs2sqrBdy/72mm9/5+vj5KAUxgw+SZNDb82HRVHdrE/W387ms//7YIef/u//zM+s39yJmpiY+McxLQATExMTb5jP//g7/8J8dfKBsWCHnmGoMV2N8wMhxjFBRo0SmO/KeuoaKfSY/HI/4BrToRKJcwPOjbfkgxlABFYnBctVRX4fEzlYQ1nO718LxhbgEBx5plACRJRkScazZ09ZrZfMFnMQkcvLSzabLQjBbDGnqEqGYcC5wOnJOWW1oG5aYoQHDx+SJClZlrJeLfnNL/8mRZUzGEtTDxwPhq7vMWYgKyWz+Zznn77i9GzF85evGUzPO+9cjAk5cWzujV7StQNFURJFBBEpioIYx2jMYbA0TYO3jtmsomlamqZDaMXtXcPgLfNFhbdjYk700NYD1XyGzhNs3xGcRUhQKqXvBwQRJQWDMSwWFVF49ts7vLP0fYtz/ejaCBYpBUoJ1qdrDsc93luCc2w2W7RWZGmKsxHvBM6PUaPWGczQ471FSIEQY0RokmaIe29ABBBj3OvxsMV7iw+RiEDpFJWkOG8RInK87yPo+n70bxwP+AA+ePbHmhev7khzzfp0zdXr10iVUs0XPHz4CJ3k1PWB7faOqszY7w+cnl2M0arBcf36M771jd8MQhKKouzLefnx6cX5by9Pz//6fLl8fnr54G8UZfUVI9yTN3ikJiYm/jFMC8DExMTEG2axWvxEVmSJcwMxAkKT5CVZUaKlHAdCAtZZmtYAkjzPMaZDxICSgnv/LzpJ8X4YDaHBY91AWZZU85yiStFpinOeIq8A6HuDlGPZlEDjbMCYnkQnOGuZzSuevP2E7eaWEAKHY81gHVpr0izjWLcgBMvV4n65kCyWaz7/hQ84Pz9nt9mgpeD586c8fPAI01kG19GblsP+ligc5Szlgy++z4fffs6Tdy7ouo7dZs9ylRGD4pMPb8nTEtM7bu8OdL1DJmNqkdYJaZqgpKTvDMfjuHysT5Ykieb1yw2mtwy9oz62FEVJkmpAMAyBwQRCCMyXC0xnsNaR5AlZkaH12NIr76U8WabIioS+bem7DiUkXTO2AycqYocBKSHgaZqGKARJmkKIWGOQQrJcL5FC4KwjzzTeeoL39E1L1zbj60/w9y8Ao6bfOU+iE7yzSATBW3wYE4BEjPgQyPMCnYypRH1r6HpD2xiC8/S9wQwDZZGhFHz20TVNbQBP2zW8fP4CawbefvsJj996m7woefH8M4IfqPd37O5uQUCRZeRaO2/728G0v7NYLb+1Wp+088XienV6+rfWJ+f/m0SprywePPr5pNIv41hFNzEx8QOIftNfwMTExMQPM3/0X/uj64vLB38wyTJcZ1AqJeqAGTqcHQjBkiSKNNPkRU5Reawfdf1SCKy3QKTvO2JkjKu8H7siASU1i8WS+uhQWpIWGfOTNUNbs99vmFUzNtsbYvQIpcaSKanGnHmpePzWW3z00cc4C0VRkef5eDutJYf9jvliyXy9oGlqlJRkRYKQgvX6hE8//Zj5YkkUcH55zmF/YL+veevtR3z00YesVkukSHj4+IzrV1tOVmu6tubZ09c8fvIO3imef3ZFoiSmbQjzhGef3fK5Dx7RNg7wlLMFg/U4a/B2IEnGboM0zbi73tEcj5yerdjcNlycr8kygUTR2x6kZnDDqOkXguPhSJHnZFlKbwxZmuGdRSeawQ5kSYqQkr7rybKU4b5QzA0DVVViB0ea5jgbaZuO9emSPCs4bnajn0NI5H3EpxCBokzZ9YYYwAaHMS2Qj4VvgHeBLM+xg2U2m9H1DT44lE5IkgQzGITgflHp0VIR7mVBXTtQljMO+y0hjMvDMFiqakZzbMizQHdfMFfvD7x4+pz1yZrPf+E9vv47Ld71PH/2GYkS7LY78iLHmc7leblNy/Q769Oz3ynmi6eEoCIsE6UOf+6Xfun5P/r/9i98f4/SxMTE/xdMC8DExMTEG+Sf+W/8yI+v1+ufiGGMmQyDw/T9aEK149AWA2NWvBDYfsD1/r6oS+Csp26OuOhRSUrbd4Q4RojaYWA+WyMRXL+4Jiky5vOK5ljQHWvmsyW3t6+I0eODR7iAFJq8LFFaU1YlNze3OBcpiooYIyF4rLO0nWFelpyfrbi9u8V7T1nOERJm8xm77Z6qnCEQCBmRBI6HHadn5zR1y/6w44Mvfh5iindQHw1aS54/e0E1m1GVOYddQwyWdDGDGHj9Yoc1nvZg0Flkfbagv/czSAQxBvI8QWtFdJ7jfs9sViKUopqlrNcVw9BjgwMgS1P2hyOLeTWaleP4udreYroeozVKjIZsqQIgcDZgB8tiVnI81mid0bcdaV4yOItOUu5utkghyNIc6zwBiDAWjGmNShQykYj7zgZrPT7CMHQopcduBmvQaYazDqU0UkqyPB9/rXJ8mUi0xjqLUgrvPUmSgBR4ZwmDIy+S+6biwGyZgvR0TYvWkhdPb9AqIU0KlD5w2O1pjjWPn1zw9rtv8/f+3lOyJEXqkpvjjlko+1mWbYMSN4uTs68uz86+VuT5Uevs73Yx3qUhpG/mBE1MTPyTMEmAJiYmJt4g5WrxLyZlsbbOEZzF9A3BDRACgxnjJg91S9sZggss5jMW85L5rMD7gbZtcK5HEsmyMbFGRDmaYREoqXCm4/b1c0xbo1RECsFysaJpaswwoHVKokvSLKcoZ6R5hhk6ILC5uyNRY1PwYlFRVjlSglaa9ckJt3c3bDZ3ZGmGEII8z/DOoaRkVlYkWjOvKvqmJU8L3n7nHa6urnnn/bfRaUZT1zz99BkgeP3qFXZwLBdzdrstRQ5FUaKUpipzPvnoJVWZ8/LZDbNZRVv3xAAhBIwZy7eUThBK0PYNZZkihCLGwGpVYkxP21qUHiVAWmuUEuRFTtsaQggkiaauW7qux1k7+gtCIIZIjB7nBrx3hBABgVQaoTTOBwTQdwbbGdIsxXpHcKPESCrFYAwCQZIlAPStJU01WknwEeKYyJRl+VgmFsefM3iPkHy3rThJEvq+Gz0RwY9xsQiGYVxsjscjUgr6ziOFwtuICJ71akaMnt1uhxSS46EjS0uKLMVbQ3Pc8dtf/i2KQvHk7fe4ut1w6FpOzh+Eq90uOCUPJNmNDYEoZJA6+40/9xf/4ie/9Eu/dPjSX/7Lt2/i/ExMTPyTMS0AExMTE2+IP/qv/dH1Yrn6fTrLCcFiTIP3HeAwpsd5j7MWIqRZgpSB4C27bctHHz6lrRtE8PjBMA77GilHCU+UCpSgt4ZXVy9p24Y8T1mfrpktZlzfXtF1DWmaUFVzTk7OWSxWCBk4HDZA4Pr6CikEIO8HfE3fDZTFjLKcsTscubm5pchKYogIIolKOB4O5FmOSjSL5QwpQKc5733+R7BeIGTgR3/si9THlpfPn1NkCc8++wSB4PRkTZIkzGZjys1gLI/eXvDixTVVkbG7rREImkPNbnNARDju9veyJwkonA9oremNp21blosZzgeM8aMqXUS8j/RdT16kSCVGozAK58L9UjRKmYhxvDVPFImWaBVItcb5SFZW+BBI85y+t+RZhbejJGsxn+GsHeM01djtEHyP7S0gKPKCrjVkSU7wbmz1dREp5NjuCwQ/EEPEBk9nxp4CIQDB+OvrOkSA4Nyo9e86siy7jxwFlWjSRJEpjQTyPGe1mnN3d8vlgzWDbTgetqS64HC4xQ4du+0Vf/8/+xXeevwWq9N1vL659Uon/WK18je7XR6k7Bw4a70f4JM3cW4mJib+6ZkWgImJiYk3xMl75eLs/OzHtcy+W/QklMQ6h5RQljmzxZz5vCBLND6M0ZF1bfCe+8bagbo+4GNgvpiRJjk6TYGAEmCGjrrrAEEIHm89g7Gcn59zenpBnpUoleCcpWlrhsFCFCQqwTr3XXOp0gl1faSqSrIsA0ZJS1GUWG+5295RVmM7rg+eLEuQQlCUBTpLOX/wkOV6Sdu2vP32I9bLksNuj5KKp59+gvOG1WrNYrUixlH/vtt0zBZjYdbVqz1aaw6HmhgjVy92BKdomx4pJVKNN/J2sORJwWHf0rRHHj68YLc9IiUoCVoohr4jz1IG15MkCoAQIz4GrHcM1iP16IeIQTAMfmxcjhHTW6x1SCURwiPkaLZOEolOJJFw/+1oSj7sxhhOoRQxQgievhvIioz5skDIiLUR0w9IQAjo+x47OJzxY5xnonF9B96PWn4zkOiU4BwQ7n/PIl3bYXpDVZbUh4Y0HY3agzMMg2O3q6mqDCUku92Ox29dsNtf03V7hAhsd9cIJXj2/Dm/9ZV/wOffez+u5vN9tP13zk/ONlkxE/umWUYpbZKmL40x/Rs6OhMTE/+UTAvAxMTExJtB/tRP/bP/8snp5bvOdsToGIaO5lgTxnxIYoS+GxN95FgBAEIQwkBe5Ji+p+0aqvn8Pt3HIhWjiTREhBhjRbVilLI4OGwPrJZLZtUcYwa6vuN43LPd3Y2mVpUwmy/pzUBVztFphpRjDn2ajvn0QkCRp/RdyzAYdrstbz95gneBY33g7SePGIYBIQWzWc5qPWMxrwjOU+UpP/rj77O5OSJ9oD3eErzn/OyUNBubeQkBN0SqKufiwQnXL/doEtxgSbRCS0lztHjvGezYWtzUY1GXkoLD/sBh33NycoJUYkxH0preGCKBJMnQiSRNEsqipO8HpByjPqUANzjKPCVLE8wwSoYiEWsdpjMoIXDe4b1HAMYYlAhY0xKcRWtJfTziTGC/aSAqrAkkSU5WppjBEpwkL8t7Db+mH4b7QrbR6+GdJwZo6walBG1zRIqxc2AwHd4aiGO52XjfP6YBHQ8NeZ7TtZ7uaHA2ImTCsfYcjjX7XcODywd89ukrTk/XxNhzPG7GkjQRaNoanSo++/SjuLl9Hc5PF7vlsvqwKvK/99ajBy8Wi8U+TfNDVOrZL//yL/s3eoImJib+iZkWgImJiYk3wL/1b/3Rxyfn838jK6V2zuC9RWtFosds/77v2Gy2tF373Yz7GMGHwPJkSVmMGfcISZKV+DBGTw6DgRCIPt6nyUQSLRiTgizOR5QWDNbRm56AH/PwY0AI8d3b9zQvyIuKGANd3yClYLVa4b1nuVhR1y15XmCM4Ud+9McAwWZz4PLyHB+hbjpmVUKWK9IkA+FIU8nDxyvyvKBvLUN3JFGS8/MLqtmcGAN2MAymp8gTTi8W7HeGrh1z+ce9xqMSRd22JEnC5u7AqxdbtErpWkOiJVev7ri4OCVJUpq2Jk3VqIUPgaJKUUmC9x4lFRJB33QkSiBFpGs6YgikWYoQKW3XsT5ZEgEpJSEIdDK+gCyWK6zz2MGOvoDfHcajoms7nLPE6LE2UOQ5Qih0miABKSDLivFlABiGseHZDQZnx3ZhNzj6rkMrRdt2DM6hlMKYjhAdIUZscGRFjtKavMjoup4YLUpqNrcNzdHinEfEwPWrO+r9kVlVkIqMw+2e9997m75vR9/AvZk4huCrstht7m4+zbL0MJtV9btPHv+tB+dn/9H5ydmuyqub9Xr97Td2eCYmJv6pmRaAiYmJiTfAj3/xCz918ej889aNkpLgxzx4cR+dLqUgLxKKIiPGyDA4un7AOs/x0FA3NW3fMVssSNOMWTUDGKMh742p3nucHYg+jEM+EakkzkfSLEUnmizL6QczTqRCEImkeYYQYypNXR9pmoaz0xNMZ0iSDGMs88WK+WLF48dvM1jP7tAQCWRZyeZ2T54nY+GWd2w3R8oyR2rPYrlgc3OgazqyomB9fobOU6rZjGB77DCQlxXLs5LAOLzWdYeQkrpumM1zXry85uzylP2uZXM7EAI477CDYXt3xBhPkkX6rh2H+0TR9f0Yb5qkSAHee0JwdF1HUzckaUaeZVjjqaoZeVkw2EDbdVSzgmAD0QeKKkdKifcR58ekpnBv3PUhkGYJcTQaEHEUlQICZZUgBIggSLTCuQGlUrzzCCmxLiCExNkB03dUVcngDYMf8DFQViV925JIRd+Oi164f5UAiVaKokhJkpTbm5osS3j22Q3HumVzV9O3A6Y9srm7IUsVq2XF089ecfHgLaKSmMHQ9z1SSJcqfVjMF68Wy+WHQvBJmqYLKeW3/8BP//Sfe3B5/qdny9mnf/ZLX+re2OGZmJj4p2ZaACYmJia+z/y7/+N/84PP/54v/PHZfD7v24YQHDF6QojjTbNSZFlOkZdIqQgB+t6hdU4MEtN5xgQaRVGUOOfo2pbBGNKsIM0rdDJGRg7DmDNvWgMh4swAPo6m0KzAu0ieFSiVEGMkSVO893RtS304IKXgrcePsday2d5hrSMKWKzmPHh0SdcbuqZjv98yX8wRRLpm4HS9Go2zUiJwJImmyGeY1jJ0A0oI8rIiy0uKPKVINFrAoqqYLzLyLMcZi2kGlIwkSuJ9QIqI7wOzQtPVDWWRkGSC477j6vme66sDq/WC2+sDzbElSUYJU9d1QERKxWCH+8K0SN8bhIyj4VdAXfesztYIKTkeR79Bmkj6tkMKcS/DMURvaQ8N+MhgHDEqBusQShKiQapA0zQIIdEJRAHWDfRdw2K5QmfFWPglR4N18B5ChBCo63GZ0qlCSIGUkuV6hZQKESFRGmssmUoRLmL7DqXGRCKi4ObqiHeedt9x2NQcDi1RCB48uuRYN7RdRwgGZw2HQ8v7n/vC+HWHYNM83xbz+VVWFS9Ozk+/dXJ29veLav4bQuu3AP7yL//yr/2Vv/JX/mMxJptOTEz8/yjTAjAxMTHx/UX9+O/7sX/j4vGDnzoc9hhzYBharLM4bxkGQ9d11HVH31sEYjTdCnDBEWLA+zFdRqAYzJgS5J0jyVLmyzVZOUcnGYKxRVgkCToraDrDdjPGeiIVeVFRVTOEgLY9UJYVy8WarmuwbmA2X3J29hDvBE3dk2UFRVkwX1c8eOuEzgwEIvvdhuAtaZJyOLSkmWZfHzk7nXP1ekNZ5STpaJAdhgGlJEEGkiwl0QmrxQytFNWsYrbIWa0qjseW4DRda3nw8IRjW1NWGV3TUxQZbV0jBUgRkAief3zD/q4bDcxa8uLTW2B87dht93g7MK8KBmuRWuH8AATs4BFI8jylPhqsDyxWS6yx9E3NfDbDO4f3YXxJMA7rPNF76kONHSxtMzCYAEKPaZ44yipFoEdpVyIRUjGYHoTAi4izDjtYVKIBz3I5vpY0TUf0AWJAa4lQEucCIgqEEATGdB+tFAKBUpK+b7HDgBKaalFireDu5oDWks31EWEVV6/uqBYzFqsZd7cb3DBguh2HzQ2JTLg4O4tFUXRS6lpn6X69Xn2al+X1YrF6dbJa/cU0SX7761//evJmj87ExMR/VUxFYBMTExPfR/53f/p/8Yff+8Ln/zuDbVTTjLGNQoqxjMuPOnDvPV1jODYtznryMkdIRZIqpJJIqdnvrzGmo6n3LJejrt55h5QaxNgQe+8Fputq+rJFJTlJntIPbhxUQ6TvW+r6yIOHjzg/e8jmbk+ic4pCUZULYhA0fUuW5VTVnMWqRKaCGCN5lnJ5ccYn+x2nZ2ccDke0Ujx99hF/6A//QdrOcn174McfXeDt2GuQJQmD8wz9QJlrnBLMFgsOu4G0TFmdzLm92ZMWCWbwzOZj/OgwWJarBfvWslgLfBSk2Zih39eeq1d7FvOK+arg6Wd3RBcJNnD16pYHj5cEL0AyZvlbDzEQQmSwA/K+M+Hmekc1r8bb/bojhkhVZYQgQHicCxy2LeeXJYMZ04lW6xX7vUGIiDMBJUZ5UJbNGewW6zxSCbQUdPfNwYJA39ZEIM8ygnMsZvkoz3IR5xxKSbSSKBEJgwUlKMsMpEBJib43JiMi1g4oNUqOpBRoJbl73eH9mNSUpjmH/YE8S3n86Ixvfv1DFrOCpt1hXcfJ6QVVOSOG2EctPk3zYqtVWs/KaqdS8bWf/6W/9NmbPjcTExP/1TK9AExMTEx8n/gzf+ZP/nP/7B/6qZ9fnJxe1s0RZw1CjoOkdw5nDIf9kaurLa9e7bl6dWR3bOgHgxlGI+x8XjBfVFSzOd4FpNJEFFEkrE/PWawWYzOsc8QISimkBDs0tO2eLEkpi4IiL8iLkrJc8N57X2A+P+NY95yeru/1/wPWGer6iBBiLP9KBWme0LcGpRTnlyv6vuX84gJjDMb0/NZv/yZCwvn5GR9/dEWaFRRFzm7bYs2YiX88HLHWQlSAIM1TQhxYLEqIkqbpKPICACHHjH4/eKKLCCRZmqIU9P1AmibcvN6RpgmzRYaWgu1dQ16l3G2OCBQCNcZ3Do5Ej/GcANY6kkSRZRnXrzf0faCaF1jb0TY1QkCSakw/Fpvtdw19a9BKczz0dO0Yk1oWGc4N9J1D6wQ3eKqiomt6lJAoFH3bMphx2VBSQnC4vqeqSkLwpFqSJAprPYj72/4QydKUrulo23YsA7tPNEKAUoIQAt5F+t7Rdy1d3RG8Z7c7MphAkgrM0CAj7O82KBKktByPO+bzFTe3VxwPdzhrwmI+88vl4tXF5cXXZuvFK6XTj//CL/3VL7/BIzMxMfE9YloAJiYmJr4P/K/+5//u+z/5kz/xvzx7/Oi9Y33k9vp2HNSDp+87hs7QdS1d27G53XGse4YhIITCWthtem5f1+y2DVWV8rkPHvO5D94j0SlpkvHg4QPOH1yyWp+QJAURcZ/uMxqIox/wQ4e3Dq1GI6qxnsdvvUeiS7rGUOQZV9evaLsGKeF43BNFwAVP37WslnOuX15xerqgKFNiiJjecfnkFGcDm80dvel557332W4bbm52SAmbbc319Y4YBcdjy263I0lTnBMgJXmhyYoUOwS2t0fKvMDZMfFGa0n0niLP8d6TZinRw3Hbcjz0hCAQ2jNf5lw+WHB3s0OrSFJIolas1jP61mKGgA8R5wNda5BC422EIDD9wH57ZD7PyPMEMwwMg6WalZTVgq7vGMzA3c2exXIGQnHY1oioR3+F6YnRk2aaNFUopQlhbG52g8dZcM5jBotzgTQtMH2PswMIiVACnWiSLCFN1Oij6BqsG9uN265jMA5rLfG+9dc5dy+nUgghubs9YDqLH6Dd96xXJU3dIJXEuwBOcPP6FbvNhscPHnA87sbPVylu714botsF3CFPk2I2W7w8W5z+J7fH499h0vpPTPz/JZMEaGJiYuL7wMPHj/9rZ48e/HO9cWxurtBSgZD09wVPxIAUkiTRzOYFXX9kvsxRSvH80w2vnu847Ft2uy3RO04vLtEqY7U64Xjc49wpu50lBsahWEgQEec9Xd+xi1DNJUgFUiC15MGjB+zutoQYqWYFTXMgRHcfQ2rIsxyhBEop1ssl11e36EQRoycvcm5f7Xj3vUe44NFas9ttWCwWJDpnszny/MWn/NQf+AN88uFLEAKlFbe3G7p+QCU5rTuwXlcIJcYBtw8IImVVkOc5oW2IfozYPL9cUdeWolRkaconH15z9nBG23WUZY6zgSTTtLeB2TKhqkpMvydiv/sCEHzgsK8ZjON4bDHdQJ6VmH5Aa836tEQAzliG3pKcpvTG0fc9IowpQHmu6VvDcd9xfnnG7dUWOwxIUZGmEqUhTROsdbhg6cyADwHvQUmN95HgA+6+Q6A3HQJBZIxh9d7hhgHnBqSQRCFQCuwwtv26GJFSY+2AlAo3+PHloDP0MWJaR/CgE8FymWIHh06grrcE3/Ptb3+N9955n6osaOpdmM+Xbd0fak+8zZTeCeTce7f/c3/pL/3Gmz4zExMT3zumF4CJiYmJ7z3iwVsP/+WiXIi2PhCDY7GYE7wnUZo0SVAqGYu2tKI3FiEFeaFpjobXz/fUx5YsTamqCqkUx+2BF8+e0bXHUePfNZRFQQSyMiGKCMj7aM/AsW1AJEgtmC0L1qczXHAgYFaVHOs9h3qP6Q1SSZJk/FqEFORlTl0fuLm5oVrMEFISPeRFxunZgq42aJ0yX6xJdEJVpvzWb36Ztmm5vTnw8vVLnB/Y7xtevHhF2zTs7u64u9lwdrbguDniejsWaYWA1hHnLEWZ0bUD83mJD46iTMmThKefvMIHx2I5I3jL0I+Z/tttO/64KmVoDe2hRykJKnLcdwQXMN1A8JL9vgYJPniOx3GJKIuMwViGfhyuo49YY3DOctjX5EWC947dpifNM6SG25sdkogS8bs+DqUkprdINCJKpIg4N5CXKc729F2N947B3g/1NhIiKPW7rwHJvRE4MgyGNE0IPjKYgfpY453lsDvSHFqO+2ZcnobA/m4g1RrvPPXREJFjB4AIdN2BED29OfLy9TNMb0IIoSuqqs+Kaid18irJ88/SLH2RKGXe9IGZmJj43jItABMTExPfY/7Mn/6TP33x4PJfMkOLNTVCQtN1zBdztB7bdZUU9z/ao7VgeVKyPJ1h3YAQkbzIWZ5UvPvuW5yfX/Dw0SWzWYlAYpqaj7/zda5fXtHsjjSHA1onSCHHu2Xr0FrjouPxuyesTrOxFMxFpNYM3uOtJU9Slos189mcEDxaK7yzRAKdGSjKkt4Yqqpg6C1pKu5Td8bhvypnLJdz+r7l7u6KCPS9oW53ZJnieDxwe3dD2xnutrdopcjTjOef3eJdoDfDd1uG7WCRQhMiCD3enhep5MXTK7Z3Pacna4beIRAcjx0RT9/1pIlECsnN7R6dZiRpQn3o8CGik7FJ93hoMcaSZSlNMyCVJC81UqlRp680Skti8AymIVhwLpKmenyxEZ75KuN4ONL2PTpLUcn4x+l4mx851jX1sR+H9+gJzhGcxVsHcTT5Oj8uDNY6jLH3un5JnudYF1BaYb3F2jimKInxZcYYg7WB+tDhBstx1yOi5umn11hr8TZw2HXcbQ5IJYkxYkxH1zXkWcLxuCXJ9NAN/V5KzLychSzNdnlRPssW828Xed6+scMyMTHxfWFaACYmJia+t+jHTx7+6+ksuTgc7uiaI13XU81mEEcTZ0TgvMWYI03Tc9wbFsuKw75hv+1x1pGmmrJMiXFgvV7hvB2XB6FRRA6bG3Z3V+w2rzlub/DWgBzbgH0IpGXOT/7Bn+T84QWHg0UoTZKmJFmBTgsWqzPyvAIJm80GKQVN05ImmueffcL19Su6rkeJlHo/sN82JDph6B3r9YyySHHO8uDBAz777BnWD3TmSNcf8N4SETx9+pT9fkPT97SmJy9zdruG27s9N3d3dJ0liMhgA03d0HU9IUaKqsC7wHHXcXu1Y3VSIXUkuMBhb5gvSqwd+xO8i7SNIXqBSgSDtey2LbNZfp8mFDCdHWUzboziDMF9t3+h7wdiiPcpSpEYAkpqEp1hncf70RAdg6NtDVJIQgSpRqmO9wFjxjhXpSTOOaQa05iyJMc0FmccMQhiiKPmX4Rx2egHEp2OC0E34KzDDYHtZodU4N3Y/1Afj2glaeoxMvR4aCnKhK4Z+Ozja4TwbO9u6OoDh82GWV6iVYrpO6yz0bnBIXBZnu57O7xIEh3SNI1Jlh3yvPhoSPJ/8KYPzcTExPeWaQGYmJiY+B7yzjvv6MV69UWpNLZtubu9wXlHWZb4GPDBM5iepmkwxnN329A2PUUiOW5btnctXefomo6hdxgD3jtCHJeGw3GDCwNCeOr9LaY7YkxL3x1xzo2pOVnFkydfYLE4pTsYUqXoe0NRZMzmOUVRkOicY33g5uY1MBqHrR14/vRT+r6hLAuKokAEye3rA8FBDIEk0VSzhCSBi4sH5PmScG8bLfOCpt4yny2xNrLZ3uC8oW72GOvwRF68uqUzhpu7W+q6QStFczAcdy3eBmZVSVcbrl/vqXcdiZLMqgpvxwz/trYkSow360FgjSPYiOst0Y2lZArJal3RtZa+9ehEIITiuO8wxmEHj/cw2Ijzo2wmRJBaEIgEAgFB0/Ykqca58fMfjCUvUuww4H3AOYcZPCJoTGtJEslu1+K8pR8sOsmIURJ8uG8WjmityBKJlpLjoSNGKIqMJFE0h4Y8TQnD2BTtfURGiWkHbO847FoGF9jc1bjgyPOE73zzOWmactjd8PrZx7x+/oLd5sjJ+hKt0zgMndGpPvZ9N+RlHlSi9kHwqU60zbL0mCblb/ziL/7i3Rs9NBMTE99zJhPwxMTExPeQP/JH/sjby9XJ+6bvaZojSZpSzud4IsFZpBiz6a0duLqqORx6PvjiW9SHjutXR7rWkucpXWt4/WrDYjHDmI6h91gTWMxPqI8bhFAEoCgq3NDj3YAUASEkRTUjy2c09YBpDyyqnNksBwnHncPbgbrekejRj+CDpWtbhBT0XUOeFtTNkSQ/Mpt3PLg4RUmB6QOrBwpjIqdnC6SUFEXOarYmT0suzi/Y7TacL8/ZbY9Ya4gx0JsjGRUhBDbbHU3dsN3cURYVddOAi3THjvxtjfOB559ckecp+8OevMhompq8yhmMZb6qOBx6pJDstw0yUTgfCVGMS0IXiCGAjHS9Y7drefBwRdsOoxymNmSpJnpojy1KQtt2KKXvk3fGW3chJTGMpWzOeYSMRAJKw2Asznns4NHaEoOgbwfmVcZm02KHMEqq0tGDgRBjc69QaJWQpAl9Z2n3LXIFAkmaa5yPFFoiVUQgKNKcQ1vjHePN/95SFAPWeJ4/vWO+zHh99QIpA2We8urVHXe7O8pyju0d5xcP3bHZdkLKo/U+Vuio06xOi+IqK8tDmulv/Pm/+Bc/fMNHZmJi4vvA9AIwMTEx8T3kxz54/M+kWXLZtUeKsiAvZywXS6LzyBgJ3tI1LddXe7abmstHC7JMsts1SKnJ85IQ7mUlxmKtxfRjJKRSEqUUxIiSmixLMcOAGXpiHCNE83LOcn2Gc55Xr15yqGtkKkkzhekNzjmOhwYhwQ4WISTWDxhv2Gxv6E1HP3QY01AWOfOqwAwG5/0oj1FjIddsnnF2seTBwxlpkvDowTuUxZxqvqIoK5JEIaJE6wStNKlO8M6x3W4QYizaOrZ76rrlUNcM3lE3A598/BmHfUuiNXXbgVIM1hKiZ3AOqRVNbelay2DA20hTDwgpkRJCiMxXJd4H+ranKMfbdQ2IGHBDIEkV6l7/DwEpRu+DdxFvI9FHorekqaI+1sQYUUqQKI2IAiIc9i0hRLwbP5dRWuTxziOchBBp246iSun7gSRL8AFCAKXTe/mSpW97dpsDhPtiOOuxQ+CwaxiGgcEYghMc6zEG9bjv8Tbw8beeYo3F9Ec++vDrCAFaK4a+4fbmebjbvDTBD+b84mGvEl0rpfYRGoSw3gcVEE1Z5N9+0+dlYmLi+8P0AjAxMTHxPWS1Wv1emQbZ7vZE71msT8Z4TjsgRSR4hx3GwXO+zLi4mHH9eoszIGVkvko57C1ZniARDGbADZ7eDOR5Tr3fIKUkr2Zk2aiVd36MnpQiIctKetMTt6/H+Egz8LkPHjM0A0VWYJqBLE3ZblqSTLHfHjHDQG8MWZIRosNYw4MH7zCbLbm+u+V0vWYYLO9//gydaopU0BwdeSGJQDmb8SDJiARkosmzFG8NWapIdIJQGiVTsiylro8sH67I0oT9foNz73A8tkQX+PSzF/TDwHqxZrtrabuB+RKyRGN6SzUvR528DTgT8MGR2jGdZznP8Q5U5qkWBab19J1hVhU0h44sG6M0y0KP5WBti5TjUH88dmgt7wfuQIwBxPh5WufQiUTECB5sZ1FK09Y9ZSWwWiEkFEXCdlNjB4dSmugjXd2RFWr0FggIwdO0HUqCc+PPE6Oga3sEK7SUGDNQHzu6JuLtFq0F222LNQPXL29JlccNgtuXt5RaM69Knn72CYSAkiIkWgy77Y2ZLRZ91w3bLEt7IZfROtckWbpJUm1a48rURil9Opl/JyZ+SJgWgImJiYnvITLTYhhauqYGqVikBW27x/mBGAJaj0VOIBj6wH7T0h7vb/mHSJ4LsmRJ13Q0tUFLwfFQY4MnBAcxglDMFiuEkBjTgHcQPFJretuye7VFIKmqJf/S7/u9EOJYrBV6Qgy8unrBerXi5u4Vg/XMZgsS3SKRBATn5+cURcWzF085XZ/Q954iVyxWGS4YNDnW9qxOKo57x3I5YyUl290BpSHPMvbbGxazOXmeYV1EJxlt05GohESlZElOxNK1PVmScn33gt70lPMFQsF2u6Npa8ywRmhJVVXo+1v76AKCgHcBEw2zeUYMgSgVZZWOZVebmizX4CNtM/YlqDQhz8fUnuDBu8AQHLu7mvXJAtNb+t6TpmJ8DVB+/L2KAq01g22RfURnCcRILCK2t2MpWJ6yXC65uTowDJboA94ODAjSLCOEQJanY3JRmhBjQGqBUGCdxwwBnWb0XcNgAl3bst83XFwucHZgvzG4YWBzfUeeVURvefb0U8qiIISBY7N1Suk2zYtWaXWHEpssy1qVqDrVZee9z3Sa3uZ5uVVmyNM8ff5f/1f+ld3/9hd/8U0fmYmJie8D0wIwMTEx8T3ij/3rf+ykb/ZPZJjjrCVJM7RMiN6NOfBmIE003kVMF9nfdSxmCctlhTE1wQfKKuXqVU2aalwe2N5tGfoOpRXOgrUD8+Wa5fqUvj4iYyB4TyTS9C11349yGKF5/4Mfx3nJfJaiRML13Q7nDU/eecz27pZj3XJ6coH3ls4dSdKEqpxjbWC/e8aDR2+zWp1xONQ8evI2TWdYL2fs7mqSNCFJNWbouHiwZLdtyNOEVEvMYOj7gZPTBwgih6ZhNluw3WyoyopEK5TMKIsUM/TkWUrfdwglKYtLnHOjB0EnxBhxNuCdp2s6dJIAAfDYYaCcl+RFxm7bcHK6INUJphklQrNFSd8O4MAOEXRABkHwkqrKuLvdo6Wgay0nZ+PQ79yA1hl2CJS5RGlB1zqKQmEHR1EkNI0lTRkLwRIxxqo6T0SQ5GMuf4wQoiMEjfAOLxQSR3PsiVVx7x9IyIsUN3h2myN5ntEeHd4Irp7foZPI7m7D5eUJr168Is00m7stWdrR9TX7/ZYnT95CJ6oPwe+yLO2kVn2SFa+zsvw0L7Iuz/KtzvLbGKNQWl1JnXx1BnRe7n7mZ37Gv+EjMzEx8X1iWgAmJiYmvkc8fGfx37y9vv1cNdPcXd3x8MkKZwfcMGbCO2NpDzX7/cD1ywOvn99xsspRuWdz01OWGYmSVEVK347yleOhpmn2eCuR1uKcY75ekSQpd/WBYegIAayLdIPFRxAhcro+Ic/mpJkmRnjx6obTswrbWz6+vuLlq9esl2fEYNnvj1TljDTNSZIcBLz79vsU5Zznn33KfLmgqjIUmrub0T+wXGV4H8lzhZISsQ+sTxbs9w3b7Z6iqFiuV2zvNqRZRppoEp2QpRV5UVJWM7y3aKXpe0OSaISAPE/ZbrYoqUZpk/eoENlsdpyfL2mbAa3GKE4zDCijMWZgu21Zrmd0R8swDBAc3lqGwWN6yyJTxCEwOMFsmQKC4MARyNIU0w6oBJwNqFKgpcB5P97W+4AbwLuIiBLbG/IsRUqJHSzOBXSiMM4wm+cM1hGjhxhHn0DwyBAgwGAcxFHSExPIcoUSmmbfoYXm7rrmeOy5evkCP/TMljPE/TKx3WyYVyVdZ/DBcWw20bkHlOV8b+NwleRZm6bZISurV3lVfZZn+aGs8q+oKv2drJHDH/5Xh+ZnfuaXp6F/YuKHkGkBmJiYmPge4Z1rtne1sv2zsN328tETiQs91lmCj9R1Q1MPfPzJHdcv9/z9X/3POTn9F8jKgq98+av8vt//EwwmstnW7A8tMQS87SFEnB+ww9hSe6JPcdbgvcWHgI+BKMAGjxCRZVWRpgmmP5JpzcvP7nj8zinD4NjcbWmOO87Wpwymp2sblsvlvUSlQiCQUrC52yJ2DbPFmifvvoVA8vLZjtPLCiWhmOXs7mpOThZ8/OEVaZYQo7w3zCpOTkfvg9KKIi+J3iGlQCmFUAqdJbjWffeWP0kKyrKk63vsYFgt11y93t8bonucHVDqjM3tjtVqho8C7yPWOHabmhgig/F0jcH7gJAaa8M4cCNw1hMt6FSSZQltM5Blmq4x5LnCDgbiWEgmiFjrSbMxYWiM/BxLv4wZEASSROFdIPhxaRBCMAyWLEvou7GIzPmAwhECCBdQSjL0huBSBJJuMPgYsC7y8sUd551nt++4u7plt7+j3u9Ydkvq9sj6ZE1T75HRkiYlEuEEsdntNjIvip0mfZYV+b7Iy30xn32aZrMdiWxWIvsH//6X/mID8Kd/6Q0fkImJiTfGtABMTExMfI8osvLv3lxv/tsv2v1PdL2T735wy/xEY/oB17f0reXupqPeOfrOcre94df/wVfIdMpXf+u3SHXC6uyMzbYl+oA1LX3b3BuHLcPQcjjuyIuMspzjvcO6MclncJYQA4lSDMbQqRYVBZ9++ILHTy64uz7w+vWW4Czr5Ql3dxuM6YlE2rYhywqE0CAlbXNkPl9SlHOqxYq67vn4o1ecrNa0ref0QU7T9Sgt2Gxq7m4PPH7rlK4d8C6Q5wVSaYzpgEhZlhx2e7TSYzSmHJOEuiagtERYSNMMpTRNfURphZSSSMQYg0pSijznsG84HhrKqsA6S993aKXp2o6yyjnsa2IsGGygrQ0hQt8ZqrKgax0hWGarYjRWW4tS4KyjKHP6biBGyWAsMTJGe3Z2TAUKgbYdGAYQ0pBl4wtCU3fEoAg+Yq0juEhZFjR1h1Zq7AgOYXy10OK7S0N7NGitCMEipGS/a9huO7rGj7f73rK5u2K73xDiwN32FoXAWcOhtjFNrPPemWpWXbnohiIrb6py8TTJ8uskK/rZYvE1FeSXfZ6bf//P//nmTZ+LiYmJN8+0AExMTEx8j/gf/vE/fvU/+7mf+5Ovn7967+OPn//+R0/W6uwsw9Q7+sazeXXguLFEY7HGIqTgP/t7v8KqLJFKYI1h6C1aCIw1mLaha9oxOzI4vDPs9ncgBOerc6wdRpOqFBg3ttvGCAhFnpVE57FDg+kthATvA+v1CR/d3aB1SjUraRpYrU9YLlZImaCTlK4Yl4vDbkvfd/iTJQ8fPqJ3LbHTvJ2fsNvUvPXWKb/5ax8xX85QiWKwASEVs3nFbnfA9B1JqhEiIqVEJ2MbMVKQZznX7SvEqaAoMtr6iHcBGwyzckbXtUgladuWk7OKPCvYbvcgJUmaUjcNfd9T5CXHumY2L2nbnjRL2W8bIqCage12T57ndE2PD571+Yz2MBCCJ8axXdjbceAf/xkSpWiOHUmqEGKgKBPqw7i0eQ95OS4H9dEgUCglEEKMRWnK03eOyEBeKrTW7Lc9aaIJ3pCkCdu70eOBAK0Dt9cb6kPDdd2TFRn1YQ9EmrYmVYpI5OnTQFmUoa0bTxm9j64tZ9V1UhSvirxoq/Xp7+gkqRXio8XZgy//qT/1p7o3exomJiZ+kJgWgImJiYnvIf/Ov/fv/fq/+d/9I3+qysT/5G//3/7TL15eVHq+Kri9bdjtW/rOs9vuOGz3Yyuw62n6wHy+5vLhQ/rBYbqGvm1w1qGTBG97gnMYYxi8pTc9XdcSg8d5R2d6nHcIKfAhIlFUecVyMefsYkGWFWS54t2zC559ekWa5iwXc9qu4+LBW2Rpys3VNUJYvBsvjEMMpHlGUqQ8fvttbm9umC/nSBu5fXXg0Tsn3N41qCSlnOWYYbw5z4uUutkilaU3RxbLNSEEpIKiKkjTFCUVQz+WlllryIoEH8cc/HBvls5zBQisHYgRmrbFe49OUnrT07QtUiuOzZ5IoGkNaZDM3Jzd9kA1L7i967HW0fU9x6ZDSYGIgvY44MO9UTdImmOPUJ4QBCGMMaDWBtI0oe8GlJYcj4bgI8FrrAs4Gxi6iPM9RZ4SI6Sppj529P39opAbihJM53EW9tues/MZx7pFSEUIEWsDu23N9vYGiNzeDRRZirUWnabsmwNlnsSb29f+wfmDQWtVd6apZ/NKBCVeVfPF17Mkk3lZ/GpQxbf+wn/4H/Zv9ABMTEz8QDItABMTExPfYx7/yAd/s++Pv+/jb2/LX/k7v3H+oz/2+Wp3NLJrDfttzW5zYL/bcKyPSCVIdEpVLum7gcF5DocDWgl8sAjAO4ezA3V7ZHCW43FPiiQQaExPbRoiEREFIkJZzSiLks3dhv3+yOVbcHp2wuaup206zs5PMV2PFAOm67m9enWfJCTIsgIQrNYrRKKpZguur3aYrufm6ooPvvhFDsbw3jzlow+vyZOEGBzHvWE2y7m92dB3/b38RaDVmL+vkEitkUpAjJi+ZbaY0fc9i/VyvJEnUJYLmuZImuZkaYF1A/XxCGIcztdlQd8a7OBRUnE47MmLgqZtUUnFbrunro9EAm3XkaYlL19co1VKmir2u462Hg2/EYVA0bUdi0VBPxiSVI9RniYghcMYj5ADwYHpBnpjkSpSVik+RLrG4i0oJQDoe4sxA0TNdtugEoExHvpAUw8kSUeaJbx4fosUihgixjiurq94cHHO6xefcXFxTj8Mo4TJeS+s6IssbQdvurxatUHxoU5SobOkL+er54mSX0/K9jtf+tJfnYb/iYmJ/1KmBWBiYmLie8zP/dzP3f3cH/sf/K/r3Sa5ef3sp3/l+upHzk4fz3vjZNc27A9HXl09pxvG23YXPHW945OPvsPlxQNUcCghEc5i+p62b9gcNhz6IxEwfuD1/hoYb+ojIMQoQ9E6wdqBpy8+IZEZjx4/wpoDV696tEwJ3tMPo/Y8z3Our18j0KRZTrUoOT09BZHQNjXWOfa3G5ruiDeW2XJJOc94+N45L18cOO571itF33mKoiTNJbvDgUxXvH71iqIoyPN8bDHWyVgUpgR9N6CTBBE9x7bFu4hSmq7rmC2WhLrG2IEszzjc7AFJAKSWrFZrNtsd3tkxZtM5BmMAQV1D33VjO/KdpShzXr16yf+bvT+P2TXND/rO77Xd+/0s73q2OrV1dbfdtoNZIgZNJMgwE4SUUTQRPRmZ2AQCIXgyCCxmRjN/tCzNhiJPGFmgpAXG7W4WuzPARIqYCQTwEGAwdINxr1VdXcvZ3+1Z7/1a5o/n2BiC7W47+LSp6yOdP85bp97zqH636r1/1/Vb8ixnPl8wDJKnT7b0Q898VhL8hBCCzWaLFAKQhODZbye6ZmDsLW03MPYGOwXafcfkHEI4lFJ0bc84OLabltm8AiHZbQbcZPHiMNv/1p1jxnHDZtUxTZ6mGajrAjs49vsV282asqgIQbBeXWFUYHXzDOcmZ631eZHtEm32OjU7mSSNNNKWVf00r+Zf8VP/keD98KP/5V/+uy/mSY+i6NcK8aI/QBRF0QfF/+V///3HX/ziV//9xw+e/q+uL9dv9DYsunEUnXVsux2jmyAIUqORQqKk4WRxxLyeMY6HcZbWTax3a3Z9gw/hn/n+Qhzq1v/p7yHRGhUESZJRlQtmZcnYD9y+8xJ37txHSknfD+jE0LUdV5cXZFnK8mTJ4viIq6trmm2DUZq+61DysCG3WsyZnyy4c/8Y7zyXT/fge+7drVmvBm7dO6bbjWxWLQLJl77wZc7Pl9x/9S7vvP0+CoM2CiEl+12DnToIE8NkmS+O2dys2Ww33Lp9m7bdY61nvphz8fQJUkCSZkiTMJ8dMXQ9zlu8t0zTYQfCfDGnbzuMSdhstyxmRwxjx7OLp+RFzenpGWmSYq3FGMXRcoFSir7vmUZLVVUEJGkuqeuS1fUOay2T5XmzsGPoB7q+5/RsyXI54+ZmjTEJQ2/RRnFytuD6ck0/THgfKIuc1z9ywm7T8fW3Lum6nuXxAik9V083dO31YfdDVmHtjvfe/xomMWw3V04nyb4dbJ/l+U4nqkmM2S7m86dFUQ5VVb03Oz7/76wfLl3X/TZK82Of+cxfvvjVfr6jKPq1I94ARFEU/Sr5P/zf/uT1H/kjf+S/SLJ8l5ZP/6dvvvXOb1k3m9P9OGUB0FKhjPbDOEpEgDDQPm0wl5oAh/GeIeCD/xd+/599+RcIhBReCZwUwmZp6oqs0F2/z4ySvHT7JWbzOf3YUxUl83nFdrfHOctsNmNxNMOkmkcPHmGUoUgzmqbBO0+WJCgtGZ1jmGBzPTF0PavrLa++cYubbUfbdiRJwsObFbOq4vrygnqWkxU50+RomoaTozP6YWCa7GF+/jSRaEEIkn4c2bUN2mg2uy2pNiAC290ek+VsVldk3rPMSzbrFWmS0Lft4UhLSJTSrG6uCc6hpcFNE8PYcXX9jLbdYZ2lrkoIjkePH3J6ek4IgrJMubrcUBQJ/TiitcYMghAE+6an73ukUHR9zzB2GJ2wWq1IUoOzgt2uoZ6BnQJXVxeUVc5kHZPzrC6vUOfnPHxwzWxW0fcNzbYhLyRpnjP0LcJbLp+8T1FUZEWKCw4VdFBKDVKIfZYmfZ6nV8WsepqYZFeUxXWRl5s0y7++HYa/9tnPftZ97/f+rqeZS9Sv2kMdRdGvSTEBiKIo+lX0n/1n/1n3Ez/xEz/6d/7Wf/25NC//3aurq990vVofd8NUj/1QF1U1rna7et80c2OSoe3acvT2nznZ//mklB4IIQQpECBCSJKkLfPsJkuzTZYkXVHkK2w4PjXFdwpkut5scEIzW0hGNzDZEYHAO0c1q/FO8OzxFep5vf44DgzDSJKmz7f6dlSLJUZrht7x7OkFEthctVxcPuPOnSWPH16zXbfsVjdIqcjLnKPjYy4vrmmbnuJeSts1TNOIMRrrLNqkOP98FwCBcbIkJsVNFmstq+2as7NTRhtQDsZxZOp7umaLSVKm4VBapLViu7lBS0ldLRiGnn2zxbqBfmjwwdH1HW3b0jQ7hJRolTGOA+M0sL9YURQF1k7MZjO0TFmvDlOM6lnN5dUzpqlHycMtTd+P9N0N1h5uaBKTsVqvWF2vWa821NWM7eoGCeyairt3FUoplID9pkEJg1KBrhsIwnJ18wC1MQEl7NA3oSgrO4z9kGX59WJ59HY9nz3WRnVKmS7Pik1a5V/5zI/9BQfwYz/22Ue/ek9zFEW/VsUEIIqi6FfZxz/+cQf84//dH/gD72zOz74NqU82281vVFJtri5vvssJMXvn3fc+VOTldLG6OW+a3Vwnpttt9yfGmN56mwbvZWqSbZKaDkQA0EqMCOkWs/n7x8ujd4qqeFgW1dtJkuS73e47V5c3brPefthaO68Wc1mWGafnZ6xuNmx3a45PTzFJynaz5uTohGGcaJo9AD44ttsV3k2kecHy9JhpGtit9/jJkmYJz548ph1bsvw+773ziCI/jOc8mh+z223ZbnY8fPCENMuRSrDdbZFS47xlshZGyWq9op7NSLOMm8srFrMl1k6HWv6+o20a8qwg0YbLi0uyTDOOA4UE5z3BCXxvny9FAzMNrDc3CBnQRjHZCSR0/Zahs4xjR9sEtpsMITTGSNbrG7IkZXV1hTEJzq3YbbcoKRmGjs36gjwruFqtqMoZc3vMfrdFKUnTOhbzGdM48t7775CYFCECzo+sV8/Ixx4RDrsRTKZRQrLfbMFPJCbBGMNq3SHVaKuq2jrvQ5olHiH29XLx3vHJ8deyIr9QSTIEL9okM2/de/XD//gFPs5RFP0aFHsAoiiKvgX8wB/83WcuK9T28fq4n6Z/UxqTPX38+CN5Wfc31zdnWkl/vV6djEO/XC6Xn3/27OIjR/PZSiVq5YOQWZbuvQu6rKqbsqouqkX1OSmTn/n0pz99/Yf+0B+qLi8f/4Yn7zz+/XYMvz1LZ2fL5ZKzuyfeWpSUh4qRcZhomobEGJqmxTtP33aMY0/XdTg30XUdp2fnLE5OkEIxDZb9do2UkrZvOTpZkmYpTx49oygTklSBg8mOdF3H+mbHvTt3STPF219/m5PjU9Ik5fLqGUYbnl084403PgJ43nvvfc5Oz5BCcHnxjMH2SCkpy9nzJWFbnBspioJpsmRZSpoUTOOEnTombwkEnHUEZxFSsmt2hBCo6yVCaPb7NfPZDO8Fi/kJIXjcNFIWFav1NcujE5IkpR8GjNYYpXjy7NFhmtHzxWt377xKPwx45zBao5RinAb6rqOua3wIaCnp+4Ysy6nqOVleoJWi23dMU0fXbtAS9vs116tLK2RoZ/PFdUBskzRt0zRf5WX1tdnx0XtZUV5qlfxDlefvffKTn2xf7JMbRdGvRTEBiKIo+hbzAz/wA+U43hTDanoNpapJyifdavWbTZ6/fXV5+eGT5ekX+t5eZrl4vc7zn/a5894XXratBFBCuD/56U9f//zv+ft+3+87unn09P+8We3+x1Kl9+vZItdKqdliJodx5OZ6w9B2LJdLBnuY4Y8PeOdQSuKc4+r6knEaOTu/w6yuefjwEVmS0nctOk1I8wwlD1N0hmFgs73iO7/rY3zlK1/h5Zdf5Z2vv02iDIvFMT5Y3nv/HV66d5/JOna7DVmSst1sODk5pShynj59SpKklGXBNPRcry8ZxpHFbInWhnHsCd4hBHjvyLKcPC+Z+gmwtF2Det5QPY0D3jtGZ3HThDEpSV6w222ZVSXjOFHXRxhl0ErhvcX7Q69Fmuaon9vkG+iHjnGckFrivKeuFozjiLee5dExm/WaPM/YNztmsxkhBKQ4lP1IKUiThLqeIZVhGAa26wua3RXeWhCepmu3QolVUZWrLM+fpHnxMMnytqxmD0yWvQP6C5/+8R9/81f9wYyi6F8ZsQQoiqLoW8wP/dAPNUADXP7s1z7xiU985Qd/8Ac98JM/749+7Rv9nvfu3VsP+/2nlZLv79v+X7u5efbr+q4/mb5uyyKbZ8vlktu3z9jtOorssKDLucN23JvViu12zbNnDzk9vUWe5Tx4+Ig0SdBG4ztLXtRkRc5+u6dttlyvLsjygnfefZftbstmu2OaHDLAerNCJYZdu6HtO1brDUJ4nHcgBf04kuYZwzgilWa93TIrS5AKj6frOtLU4/yEFJIQAkoprD18XiE8bdcyTSNJmiHkz45DBaMF+IAgILxDS4m3jiQxDF1DUh5uA0IQCKXwzuLciFIZ1lkIAYEkzzKarmE+XxC8g+BwziKFQGuF956iKEmTFKUOS76qqkYphbMTcPgM3juyNGF3M9INLR4/JFmyUyq5TNLsJs2Kx/Vi+Y4pynWSpF9Oy/nf+eQnPzn9D/awRVH0gRRvAKIoij5A/vD3fd/iuht/69WzZ79rvWp+qzHFaZ5nKk0zud83JGlCnmZstxusnRDS0Pcdl5ePMYngY9/+3ey2Hf3QM44jbddytJxxfusWF1dX9G3PenXJzfaSW7fv8+DBO2RZTpJk5FlO3zYQQKeGZ1dPWS6OGfoRCBiTQPBobbhz5w5fe/Or1OWMfd8yryo2uw3dsCfTCRKFSRTTNKKVoigqvPMYrRHB0w8twXtm8yMQh6FKbbvD47DT8/dnH5DqcA6mjEEEqKo5RmdYO+KCRytNCB5jDFrrw9fdYcRqCP75ojSAQN+NHB0dM9kBISRaGcrqUJKU5TlpmhFcYBh60iQ5bGsWgs31E/abS9q+cQ6/FYZ1UdTvV/Ojd+eLxdeT2fwrKtGf+7Ef+/GvA//ibvAoiqJvgnzRHyCKoij61fP/+NSn1kkl/lGSplOeZUbgps1m5R88fC/keQrA5c01XdehlMaOEyA4OT7jox/5Dp4+fcI7777Jk8fv0DQbZnVNmhU8u7jg6tkznB1xIVBXS6TUjFNH4DC+VGvNMA0MdqRtW7K8ZrPbEnCM00hiDN5bdvsN1jlGN7Fpts//nQkfPFmaMVpLb0es80zW0Q8DXd+ilGAau+dL0CRSKpRUGJWgtUbpw++1Nggh8ECSZAihSHSKlOrnfo1TjxSglAQCzk0oJRFCUeQViUmpqhlZWpCYjMX8mNlsTlHkFEXOyfEJRVGwWMypqgopFEWWkyYGpSTWOYwx7PcbnB3xzhJC6NIsXaVpfm2ydJNX1eOiWrxZ6/xv/diP/fjbxJf/KIr+BxJLgKIoij5g/N5+V57q1M/MX76+2vy6ROffvVieCutg6AZEkFTzBZvVmqIoEd6SmIIHDx5wdXWBNgllWVLXCyZrefrsMdPYY56P0rR2ZL44YbfboKQkOI/JNNY6xulQtuPxBCHphx6Jx4eADxbnPNtmDyHghWdylvA8KUhMQtcNyOfLyLTWlGVF1+4ZhgElJKnSgEAIEEI+34b8fM+ASXDOI7A458iyDOcOE3mkMkh5+PPDcGh81lozDANZliEEKJUAE0lmSEyBtY4szQlCUBbFoQdCa7RV5EVOXpVorVgslwzjgNSe+azEYVndbJg6QZZotlcNnuCUwSKCLev5++V8+WY9m72VLo7+6n/+yU9uXvQzE0XRv1riDUAURdEHjCjD/+/o3vH/tq6qr1Z5mRZF3thpaPfbjc/SBAHsdw31bIbUhxPwq8sLVqtrhr5hGjrwnma3pd1v2W3WDH1Pnudoow8vzCi22xXOO/KswAdPmqZ47xBSMI3D81p4cM7jrKXvR9I0xwXHMHSEAPJ5Q+40DYfRns6h9eEUve0bpFJkaY6WmnE83D5M04hSigAoZZ6X7xiSJEerBCEEUkq00kBAmwQpBFIqhqFnnDqEhGkakAIgoJQmzwuUNgQEZT0nBHEomSpy0jylqmtMklDPF0zeUhQZQimUEuRZihSKYANaaoo0J3jLs6cP6foGcRjhOkIYkyzfFNXsUpr0i5+ML/9RFP1LEBOAKIqiD5g/+2c/e1kU9kYGkS2O6r9aLs0PmWT8KyE0l5vNFdPUI4Bxmhj6nuA8zk4IIcjziuXyhGnytN2eptuhlaSua/qxo+16lNSM45a22aGVwSQZKIkyCUJKBGDtiJYCrRSJ1BAC49Aj5WGj8TiMGKkxWhG8PzTuOofAg7PMqhk+hMNegDQjNSnGJHjvCQS892hlSJIUKRKMycnSEm0MHo8yhgCHU38pnzfqOkLwWDsikAQgSIE0Gp0k+AB4njdIh0OpkYQ8z0gS8zzxAWMS5rMabQR5nlGUCcZIJuvYNwNSarSWbJsV/bCnG1rfT9NNUS/eL6rFU+dcaodulnl5/YvFMYqi6JcrJgBRFEUfQD/0Q59pxu/Rf/z+cv5/Oj85+dTxyeK/TTL9lX7onRAwjT1j11OXFXa0eA9GZ5ye3WKyE+vtNS4c6tjPzs8xyeF0X0uJCIK+7cizkuXiBO89R8sl69UVw9Bj3YQ2CcE7lADxc3X2I3YckMDQDxA8dhpBCkY34UIg0QpnLRLBrKgOs/dNilSGJDEobVDG4LzHJNnhxF8qTJIhlSY8f8kP3h9Gdwpx+CwcypO8CMDhNgCpDs3BQqGUwXmLUhJjDFIp8qIkMYfNw1prhPSkqcGOIyZRKKPpx4FpssxmNUWR0Y8DTdvgA3jr2GxWoOmDFDukuMpns68naXplErNOtb78JcIYRVH0yxJ7AKIoij6gPvvxzzqg+cT3fZ+7HnxQITu7dX6iAoK2bSmKAhB4bxESXrr3EpvNir5vuXP7Ds4Fjo5OaJsG5wLb3aGh1RiDc47l/IiqXtINLSIIvHNkSUYIHikUznryNCMIQaI1IoAxmmVZMk0DuUkY3XToI5ACO1mKLEUqQQiBeTVnu7khhMByuWS3XWOMxovn033EYfynJSCUPjTbBo8IHMZxEkjSBEFgHAcQYEyK8IeSnyQrSNP8cIthDqVDVV1j1GFjb5ok2KnHJBofLHme48aGpmvJipQyS1Cl4ebqhq4d0DIhMznNtGdsG2SALM3CNNmmnqV7bXSTJmZXzo/e00n5t48+9KH9i35Goij6V1NMAKIoij7gHvTTvxcm/7uTxGxD8NskLWaT80zeQ3BY5zg5OWG32yGF5OWXX6dteoo8o+snnBN4exjBiRRYG5gvjgg4dvsNZZWz3ezQUpAmBuc8JjO0+z2pSfDeYUyGD4EweY4XR4Sg8FpB5xE6AW2QSIq8JEk0SqcoaZAChqGhLiuKrGQYu0PDLpLUpCijsZP9uclAPhySAWM0HkmW1TTtFmcnsrQkUQYvJ5QyFEVNkiQkSco0TWRZilaKaXJkpSR4z3bbUM9r0lQBEmUUCIEbJ/JU03U9Z2fHbFZ79ruW0Xqkh8tnT9jtrujGySpjdklWPZwtj79Uz4/fVYn6mWacfub53ocoiqL/wcUEIIqi6INO+P+mms8eqlz+5sfvX1TDOH4soAghkOU5J+e3GIcek0i6pmG/7zk+OSFLEgY70XU9Y69wTPi+49VXXub999/l6uaSO3deJgRFURY0uwlvJ8p8hkkMuTmcpA/D8HxBlsOYQ318Wcxomi0EjxxH0tSQpxXee8qqJM8rmqYBciYOjb9FUeHxhw3BHGrxnQ+HlVvhUNojpEInhslOJEmOs45xHEjTFGVShNBInZAX5WHb8DQxXyyh7cjSHAR4bzFao5SkzzOUVJRlgfeQFSlSCiSese8QIbDdblHaoBNN223Zra7phpbd0GKFxAbhpsAgtRml1O/96J/78X/wgp+IKIr+FRcTgCiKog+4H/mLf/HxJz7xias3/8k/+deroiyHQaKTQ8NuUVVM04QUAuss89mcrMjpx4HtbsswjKR5gQtAEHzoldfouhYlBC+/9DKvv/5hurZntbrBJhnVrXucnt5ivdkgwuHW4Ga1Is8zjDJM4/i8sbZGa03St3jvGMee2XyGQAEBow3z2YK+b9kFz2RHCglJepg4NLkJF8AHkELg/WHSUGJSxq5HYMjSnN1+jTEpRbEgy3KmYcKYlDTPSdOEYXcYO1oUFUIcphKlqcQHx2JegbBIKQgB0lTR9z1ZnoCHaQAXPPv9gJ06pnFg7Fr2+w2bZsckdBjHYcx14sfJpe1g87T09kU/D1EU/asvJgBRFEURl+++eySlVkGINi2SUM8WYhwnpAxUVUHfKwKeMstZb7YMdkQIxWJ+xGQtZVnwxhuvgRc8ePiQ1179MHfvHaOk4tk0cOfsBCHPCAjsFDg+MqxvLtE6wRhNXdUooelkQ14UzOdHdF1DkiQcHx/x7OIJVT1H65S22T2fSJSRJJr5fMF6fc0wjSRJhnUJaurx3qOUYpoOjbgBgdYJ3nmqcob3jsm21OUJWmcURU3j9yTSAAIQZFl2WAYmJYiAUodFYsMwoI3k6GjOMAwMw0BdLyhKT9f0jN3Ift/iPEiV4IVCp5Kriydc3zxjch3WhlEqtUfKKU2zbZFn1yoV6xf8KERR9AEQE4AoiqIPqD/yR/7I0Xa7Pf8zf+bPfLl17tvx/LqynJ2UdY31ghAEZZEQhMQHSBKDnywBSIxGJhkmMdxaLqmrkr4fWa323Lt7h+XRYQnWZr3jzp1brG62eG+xLoCYqMuKrmmQ8jCKsypLpNTMljNC8BydzBmGnMtnA6dnxyhlGMaBLC+Y1xXX1zeHptyqput67t15mfVmRdt1GJOidUpelkh5qNm31mG0YpomlErIspzdbkuaVkipSdPDCFGlFcYo+q4lTXOUMkgl6bsRpTRCeNJUoZOc/a5luawBxzh6VqstZVnQOsduv2foHS4EQnCHLcduIs1S+mlk2zTkZdEkeXqTl/kjkySrIJXVXse6/yiK/qWLY0CjKIo+gL7/+7//zjAMo+vcd/ye/+X3/BvO+7uTc2813f69cRjE8aJmMS+o6+Jwyr6oDmU6JmG5mJNlOfU859atY+azkqHvkcJzelpxfn7MODj2+4bz20v2zY7ZIkPqQ/PtcrkgCMfp2Qnb3YbF/Ii260gyTV2X1LMUpQLHx3OOT44QMnDn7ilZmpIoQ5FlHC0WJElCmmbM53MAzk/PuXXrNlJIkqTAqBRnA0JIpJQEBNNkyZKS6XljcFUsn4/5lLTtnjQ5TPsZpokkTVBKobRCKnEYM6oUzluOj+eIcNhpIOUhaVivNwghmbyjms2ZrKcsK7RWDH3DZnPDOFmqeobSxg0u2KqqHty+ffunZ/P5Yy31NFmbvdgnI4qiD4KYAERRFH0A7ff7mz/1p/5U00zNXxoJr1gbaofVWrtpfXO9v748nKZvNg1KSvIsQSlJliekScrJyYIPfegl6llJ2044FwBN3zn2+z3lLOH2SwvyUvPRb3+ZxWKJQFJWBVmecu+lUxbLHKUkd+/dYrc7nJ5neUZqDG5yCALL+ZLVzZbFMuf09AglBd4dxn5WZYm1lqqqyMsCaTR1XbM4Wh7GeQqJ84cafSklIQS0TlBaY60DFFIqsrxAqsOPw7wsCUGSJjlFXpDlBVmecHyyJMsT5osSITRKQlEkOOdJTEqSaoSQrFYbZrM50+QoypJxGsmzhCRNWDdbHj95SLvfh7qqNkVerIOQo0nTNi+Ky6IofupDIfz0C30woij6QIglQFEURR9An/rUp3qA5XJ51zf+bzbj5t/d79q63W2VEqmfH1UcndTsNwNSa5SRWAtNM2C05+7dM/ZtQ995fIDNZo/WGoKkKA+z9bOiJM80dnIkiaMsMhZHFfUsQxvNu+++y3d+50dZrTbcvn2LJEnI85R2P2GMYrttSYzGmJLttmdxlFGWKU8e35ArODqq2W4anBsoy4SusyRJ8nwnQIeQCuk9QSqc84BAacN+tybLcrz3TJNlvjimbXcYo7HOkaQ52qSHfgLbo/VhxKcRkqrO6LqBzaZhNs+xw0iRGYIVFHlJ105U1eGzXbstQw/TYBm6kW6/Z9esSJOk90G0VV09nYJPr65Xr1X1/Kuf+vN//nMv9qmIouiDIiYAURRFH2C+72un1Hdbz5MQwm8RmLPF8SLTWoXri53I8wKce36SnZMVNYtZwtBZrp82tF3LNHkkkn4aMMagVEZZZgTn6PaOoZ9Y3dxwdFRTLzL6vqdpd+R5iUkKtGl5/fWX2TcDdZ0AJWkqCdseIQSLZU2z71kuc7RW3L5zRNP0BCQnZ0s22y1JavAB7GRJTMZsPmcaHVJqnLdYO2GMOczwz3OU0oQQEFIhhGC3bzg+WtK1LUVRY4zGeUcQDqUk0xTw3qJkwdFxyZPHVxRFihAwDCPaaPIixXtB0zQ4J5BKs9lcgQ+0zQ6CRwjhXfC7gOxtsBzPT98t6tkDh10L8bPjSqMoiv7liiVAURRFH2B/+sd+7IsO96Ybp980DuMsLwvZd637wj/5MsPYk88UMpFkeUaeC4pM0Q+ed957ws1qizGKJFEURcZyOePeSycsj0qa/cB+1dE1LdrAy6/dIq80zX5gu+m5uWoo8pphmnj9jdcBx2xekuYZSaZYHi0oSkNZpWS5oGt79s2IMZKyyCiyFCUUwzhwenpCanKqIkEqSdsN1PM5JlFIGbD2kAB459AmpShrhNDkRcl8vsB5R1mUOOcZ+h6lBAhJ34+EAIGAFAGB52a14vR8RponWBdI0pTJj3gcUkq0kVjrWa/3rDd7kjRhu1mx3lwx2hEbsELp3XyxeCCkmpbzxfuv3nn9z2qd/bX48h9F0a+WmABEURR9wGVZ9lUl1bYo8vLB++/uvvSFN/3R8Uzcv38HIRQekBqcC7hJsF13KGXIyxyBQUpNNSs4PTsGIRiGnrxIUeYwNrNterabHm0Uz55ds1k3zOcLTCK5c+cUrQPjaDEJBKCcpZhUkBeG2TxFSzg+WXBz2aGkJzCSpilFkaKUpu07ispQlPmhFl8G2r4nLyt8gGHogIDSCWmSo6Qmy/JDP4DSBA/GpEipUVrj/aHPoNm3TOOIsxYpgSCwLnBxseH07IimadjtRoI39J0geIkximHw5HnNMI6AoJpVXK6ueXT1jB7Gxnt0mmwWy+N307L82v9s3Dz57Gc/617oQxBF0QdKTACiKIo+4IZhWAYR7Hq7eUubZP+hb3vDL0/P2G47wgSLeUaaagQC7x34gJGa5bxivshZLEpCGBmngb6bcFYyjRMhgEeRJgWp0Vw8WZNow/n5kiyVvPLqKfv9CjuNSKXICwPBUlc53jmWyxl5YQgEFssCrQO7bc/x8RznDwvDlvMCIwRd12PShMW8QkmFmzx2POzU8v5wOi+lRAqJEJIkSQCB1oY0TYHD5uDE5ECg61qapmGz2WKdx46B/a5n6AKXT/esbzZM48R6vWW1bkB4drsdBEgzw+rmGiUETy+e8PjiIV6DF9jBWjs5pzf7Zjmr66dVnv/0x+PLfxRFv8piD0AURdEHnDHGFmX+laIuZVnWH60XZ+bpVYdbaupZhh3BTh7vA84GhrFjeVThPYzjhPUTJycVXTORJhohBcEFnBRMU0+aCup6xhsffonLix3aBI6OS955+yHHJ0cQJF07EUJAaY/wYK0lKVIOP6YCEKjnCXbydJ3j7HzO5dUOETSzKqPpLW3TIYSmrmbsdj3OToBHKf28kVeglGScJhACrQ99AMYc/rn3h5sIrS3b7RrvPFIpVtcbvNOsblaE4Dg9PePpky15mjFOA4vlHKMlzgbef/eCxdEM7wN921CWGY8e37DersmLogmCjUqSVidJb5Lk84uzs6+/0OBHUfSBFG8AoiiKPuD+9J/+0w+DlP/fup7PQ2BYXa2um3Y96tSHEAQQ0IlhcoF2HKnqEudgs20JUnD7zgnjENisena7lq7tMYlhPi84Pq25d/8cLxy7fYvJBCFo3vzKI46O55hE44OnqnKmwaGUoml6xsGCDEjtqWclk7Oc3znGWo9zlq4bOT6u6YYenSTUVY5E0LUTSinSNCUEzzhOpOmh3EdrhfMTSoF6PhY0yzKSJMEkhybeoW8Yx4F+6OmHHmstX/va+9zcrLi8uuDxkwc8fvSEYehYrW8IXtJ1E00zsVrv2e5bnjy9BuTh7x8GFvMjsiyfummcdJo282r2ZDmfv5/k+Rd++Id/eHjR8Y+i6IMnJgBRFEURxpuTVBqnpd727WadGe2nMRHb3Yh3jq7pEAFSrdltep48ukQAU+94+nDD9U2LE4J2GBidZ7tvaPc9+13LxbMNSgpMAkkiefL4mrqqIGj2256+7ymKhKZpSHSCQOJtQARJXWUslhlaC46OCubLDCnBTo7gA/NFQdf1ODehzWHc5zhYsjxnGh3BC5Ln9f3GJHh/KPWREoQQJGnKOI0kJqHIC+p6jpCKsqgIweHcwHp1TdPs8SEwjiP7Zk8/DEyTpchy/DjSNT1TNzJ2DeubS/p2x77Z0jUNzrlw6/hsdXp0fBHAJ3m6Sct8JYSYv+i4R1H0wRRLgKIoiiKcdu+ZPP0H+ZSZJElfsRb39tff8ucnR+LbP/ySKAvoe0BqpICT0yMC4P3EZr0DeZjIU+UpVVkymyd0+w5lAke3K7SStE3P46dbklSDlJRVynBjCfYwcef0fEGaKOxosRM0uxGTCKQSNLuWm6s9i6OSrhnQRrHddCyPCvJC0XcBgUBrxTi1CARJkjBNI1JKfPCHjb1JglCCJMtROmMYJtqu4/R4SZ4VjPMJH0BrTV3WDEPHNI503Y48zfA2R0qoq5qyqkjTlNpUbHcNJklBBNr9jtQoxqHl2eUT0rywWsrp/PTs0a5vxizLbqqielAUxVdfdNyjKPpgijcAURRFEZ/61Kf6JE/+zmJRvZkl6qea7dXXcO3w2uu3RFGnlFXJbHZokD2/fYRUh1P41bohySSLo4IPfeSMu/dPmC8KhsHj0SyWM7bbHV/50vs8ePcCow1XFzu22xYQXDxdMwyem5st57dOsNbStSPWBepFQtcOCGEwyvDmF9+nKBOG0ZEmBiUFbTOR5CnKSHwQSCVRUtG2LUlqMInGOsuhF0AwjT1SBLI0/bnpPgIYpxGTKm7dOuHoaMZyMaesSo5PTrh79yXGcWA+n7NcHlOWJUJAmiTkZUaQ4HEgA6dnZ5ycnNC1LV23x/pxGqb+yhTZJfjd/Tv3vnDn1q0vFkXxYBzH5sVGPYqiD6p4AxBFURQB8Gf+zJ959gd/7+/9ySHtj5fnx6/ev//Ktz98uAqPH6zF/XtnWDuSZSnTFEjTFDt11LOaECzlPKWsS54+2jKOh/n/RWFo2x2EwPmtI2azggfvX5LlgpdfucVXv/J1kjSlrAxCGMbBMQ6B3W4grwyJMYy9QxtxaKZ9v2W7aihKQ9cNFGVKP9hDyU+pGadDOX3gMPt/Gnu893RdQ13Xh10A3jGf1XRdQwiWPKvpWsnQjwhAGyhNAgRmsxypJPN5zXa3YpomTk7P8G7COcvTp084PjomSwqmYWC/WR++h4TJTkhtgtZmU83Lt7O6eFDk+bOsyFZVtfiiMOLvfOYzn4nTf6IoeiFiAhBFURT9HK/1AymSZZHnH/3pz/3jVd+O8+Pz28l8VomT45JhcOybkd1ux3q9Z7vf8Mprd6nqmtX1nt16T9M07PcDt28fsTitmSbLNMDF0x1FkXF0XOCtZb8d+c3/xmv07UTfj/R9h1QCISTWjmzWI+2+4+S0Aim4+9ItLp/tuf/qCc72oCV+8IyjIJGgE4l1IzYEnHeMw8gwdoQQcC6QJOL5+E9DP+wwJsUHidQKpTTOB5Z1jjGaabAsFzNcmHjWdtx/6TWurq5w1tH3HUpJuq5jtVpzcqyx1tL3HcGObLdXbPb7cHRyOpydnr9XzKs387p8P8+KmyzLviYT+blPf/rT8fQ/iqIXJiYAURRF0c8RQiyNkdPl1dVfF97+hqJI/Di2s92+zaoql5P3XG92XF1sGMaeo9MjVJLzU3/3S8ggGMeOspC8/Mopi3lFVaVIkWGtwySKtu3YbjumyZHlCfN5ztBN5IVGa7AaZouC+ZGhayzOQd87TGaY6znrdcN2N1HUCV0zUM9ytpueafQofdgZ0DR7grf/dImXgHHoMUahdYJ1DikPP/6ccwQCR0cLQnCEcBh3mqQJJhFkJkOqgJSSIq9wbmK/2+PchFSSYWjZbG4oi5q+3bLe3dB22+CxbTc2j07Pzt5fHB8/zsrsWarTf/KZH//xv/uCQxxFURQTgCiKouifEk0zpVny+cW8ukqkKreb/X7oupebtjvd7nqpUoM2msW8YhoNi1nN177yFmmSUs4WnJ4fsZjlzBcVBMd6tT0s4EJxs1qxWu1om4b7L9/h/itH9EOH1gqpFWWZstv21HPD+a05Tx419K2laQbSVLJrJ2ZFiXeOEATTCCE4slxhJwj+sC9gGAbsOOCcQwgBAryzjGNPPSuQUpMkBu8l0zRQ5AXzxRxvJxKTEACtDwvDhNAURck07g83A+NAlqY07YQIAUXAjiM+mVBS4oMjEMYkS9ezef1ulmVPpFCd0em1yrLPv+j4RlEUQWwCjqIoin6e//wzn7nwzn3RaN0rzReqqtpUeba9uny6bdomWHcoW1cSjo4WXF3eUJUFQkj63tI0Iw8fXfH40SUXFyua1jOMHp0o7ty9xSuv3WG+mHF0vOD81iljD957hsGhTUZRJpyeVeRFwsP3npIk5rBYLMA0WWTCYYlXokhyxTBZlFEIKZBCwfP6+812A3h8cDhvkVpirQUEQ9+jlCFJEpRSnJ+fHpqKtcKHQAiQJAapBV03kGU5SgmUBiFAaYUg4MYBLcEoSbNb0TUbpBQ2z/NVnmXXRV48LWfV0zRL1ij19U996lP9i4xtFEXRz4oJQBRFUfTP+OSnPvUVIeWjLC+NSeXXJtd+wU3D/uHDR1PXdBitKWcll1c3XF+tePT4GeM4kmWCy8vHPHrylC99+R0ePLhktAP90HF5ecnDB+/zD37q87gwcnJaIKRlu2nxQTB2lt12x2yRMV8meOcZhpGsMAQHzgoQElRg13SApJprpBAoqdGJIBAQQaGVxE4D1o+H2n7rGMeByU4IAc66w9bfoSdJDGWZ0vc9Qkh22xbnJsbRYpKAlB5rLUVRspjPSFJNliSUeUGapoflYlPH5eUjmmblJjdcl3Xx/tHR4s2iLh4nJt1nef7lP//n/3w8/Y+i6FtGLAGKoiiK/vvS9KelDafDOH44UakQGV01W7giz2maPVdXG7abHV5MnJweM6/nfO2tt/HeMlsumNU1SZbSdQNDb7HDgNKOoip47bWXubrcU8+PMCbl5qphtjA0uxEhJGPpafeB0/NjVALtzuJtYL8bODk/otlNXD7Zcvf+kizzdE1PURrsFPDhULrjvWWz3nDn7msQAvv9Gqk01k7Y0ZMQmCZLXqSHbb79yHJZc/nwmqI4BQ67AMpK4H0geI/WGcvlgrHrqauKse8Y+5Z26AnBYf2wLmez9+ZHJ18o6uphmmSrJFGXQbmnQHixAY2iKPqn4g1AFEVR9N/zIz/yI+8VefLf1NV8m2a5SLM8KClD1/QYnaClZDarOD8+RSJ4++23adod4zTR7RvafcP15Yq+m+j7Fu8tt+6c8Vv+jX+dyXnySjNNgstnW0DgneTmukUpzdALvvSFByyWNV1r8QGGyeFwjKOlmqVMk6DZT8zmGeNo0VohpCeIgLcefMBOA1or0jxj3zR0fcduv2W730AIpGmCD4Lrqy3jMJEXEmsd1nrAYW3AWYGz7nkj8UhVlyijyNKEuioRQtC1HdbZPsmzi6yq3itn9Xv1rH5UFNXnToX+S5/+9I9/+UXHM4qi6OeLNwBRFEXRv9BV0zw+qYv/Nwq2q114+vRhNnSnLy2Pljr4wH67YbV6yna3YbKePC/w3iJkQdN2ZFnK06cPmYaBD73xCkfLY7xzHB0XLGYlD967ZrvtuP/yGbtdy+pmy52XZkipeP/BNcuTBQ/ee0aaFlR1RlkbdtuBk7MK5zybdYtWOXWdYZ2nKBKG/ho3HZqEpTIIqRHB4/2hIbhp93gXCCIgtQYE221DWSUkqaEoU6x1hOe9AMM0UVY5u801UgratiF4z37foKRESoG1AxD2UumboigutVITQl433v/dH/3sZ+Os/yiKvuXEG4AoiqLoX+izn/2sC0rt8zz9alllnyszfTENo+2HgSTPCD7QDwNZXnByco4QAmM0q9UNF5dPuby+ZJosH/3oG3z4jTfY71qGZkKh+NLPPODrX39G0wx0/UA/jCRpwdAHvv61J1xcruh7S/CB7bplv2uY1zVKwjiOzOc53nm61qKNRKlAVabM65Su32P9hJCHcqAgBADDMDKNI0pL5PNf0zSS54a27ehaR1ZkhACgEIBzloAnzQwhQGoMSgiMMggkbdfS9K1PimyTl9VKCjUgpSOo5rPx5T+Kom9R8QYgiqIo+gWN8FgJ8R1pljRShQd9375SVrezrMjo2z3PLgzjNJIXmtPTO1g30ux33D075/T8nDzPqIqEZxdrun5kso6/9/c+x3xxRNM2zGYljx8r1pstZVVweVHwhS98jV3b8s671zx58IjXX3+dzbojuBtu35lhtERpyWxREIJDaoFJFAHPbFZgEoV1njQrGcYBpTVZUSOFIs0KtDFYFxB2QglFmhcMg2Wzbjk+XvD08QaCYxwD0wC77Z6qqNhurjFaIpXGywGjEwQwjB1yRBxlx7u0yDdJkjVCqasXHbsoiqJfSEwAoiiKol/Qj/zIj+x/z+/5Pf9ESGmF4tUkk/vjk/rceYd1E3du30VnGWmWE4Jju92Q5SVZWbNa79iu97w/tBA8dTXjwYMtUuVInbBaPaJpWjabDmMM0+R58M5Ps95eszg55tmzGx48fEpZ1hRlwXrX4B50fNvH7qMUVLMEO1nSTEMQyCCRUlGUFUJKsqyk6zsWyyPKesY0OpQ2IDXDOBJwnJ2e0XQtUmusDYAnzxV2ErRNh1IGYwJdNxCCZxwdSmmSLKPZ7Rknh9Ta+wBCpfvZ7OipSJIrlLIvOnZRFEW/kJgARFEURb+Y8KM/+qNv/Uf/wX9g6tn83xQhS/a73iGEuv/yfZrtnourG9brNZvNCucmqqrG+0DbNCznR9RljTFQFSWJSWi7nqGfmNVHZFlO2+65unpGCApCoG021Is5dhrJ05LLqzUnQnF6OmO9bhjGibRQjN2E95AmipvLPXlaIIVCBEGVl4dRn+OIEAIpNUEGggA3Wdzk6JzFeU/TNkihuLnx5IVkPs/oGsc4DEzTiDEJWmq0Mkgl8c6SaUme5tzcXIHU3Wxx/M7i6ORtWRSft9Z+6TOf+lTzogMXRVH0C4k9AFEURdEvKYHLIimbtu8uLp9d9VoqyiLHOU9wDtu3YC2JUkgCN1cXdO2OzeaK3W5NUWQcnRyRFRkmVUghWC4X7Jsdj5++z/XNM4axYZh6ENC1h8lBQki0SfFekJqU2bxGCEmSaooyRSdg3UiapQQRUFqQFwU6yZA6wVrLOE5Mk4Mg2ay37LZ7xnFitVqx2Wx59uyKvh/YNz1Pn94wjJ5+GFEywXtJP0wM44C1lt1uizYJ1jn2Tcs0WUBO+Wz+MCvL93/n7/ydn//MZz4TX/6jKPqWFm8AoiiKol9SWRS+m/aPskx+tdnuji+eqmxxtFTOW5LEMJ/PSdOcQOD65opxGtFKcvvObc7Pz9Bas97sQQjSNCOYwPvvv8d6vYYgmM+XDMPI0dEpSmqsHciyhCFPWCzmnBzNmM0ykiwlSQzOWbQWVCbH2RGpHVJIdKIo6hqpU6QAuxtpmh3WWrIsZ7u5Yhon0kwzjD3vvfcuWiXstnukFPRDitY5Yz+QJgbnwbkJITzOjljnWK3WKK1JsxwpJXivpskxeX/5xS9+UX3f932fiVt/oyj6VhYTgCiKouiXNGVZz37fGCXfE3Kc7/fb8vT06PTOnVO2m5bNeo9RIzfrG7IspywqTk6OuHXrNt5b8I5ESabBIqVkt9+R5TnzANPYMw57ZsWc4+NzELDfbEmTlDc+8ipIRVkb0kJwelaS5wYtA2mq2e16sjzFaMd+P+G9I8kMRhuGvmUaR0AghMDaCectfd+wXV3hAuzYs5wfoZUhz1Ls5FjdbOjahuOjY/qhw1pHlhqEAK1yxrGn7QbqquTV119h8t2QGTXm6O7JkyehbVv/ouMVRVH0i4kJQBRFUfRL+qEf+qHm937v9z4Omle9DM9kCIOzlnpWIRF459gGx2RHvJ+oqpI0Sw99Ad6RmQwpFM55+nEEKRASTKJJszmr64ksL+n7nqPTGbfOXmGxKLhzt0YoAcGTpYqT0wolBcEDAoo8YxwnpISyzNhtG5QQlHXNdr8FIbDWHm4ngqMq5zgHJskpdEKSZxhlSJOEerbATiOTm7DW0/UjznuGcSQxCWmaMgwDaVIQwkjX93TNGiGkKMtyFKk6+e2//bf7j3/843H8ZxRF39JiAhBFURR9Q3SW/XdpCH5RL/4X6+v95vL62cnte8fZNDq01oQASZJRJgVaay4urkgSxXxWExJJP45YN7HZbCmrnM1mzenJOUopNqs1nsN4z5PjIzKTUdeGo6Mck2oEgqGbkEi0lnStRUqB1IHCZGw2e7SW5FmKdyMeQVFWDONA33ecnt0GQAiJSTNSk6KV5uzWLYJzeDyL5RxjFJvVBuccPjiyvKCezXHWIqQiTTRPnz6jqGru3D2m2yVcPH2o3DgmwnuzWq0kEBOAKIq+pcUEIIqiKPqGfPKTn2x/4id+4m/+tf/qvz7pmu54MZu9Mo49JhXoVJPmGbdv30FKWK1WFFnKfDFnNqsZxwnrLH3fUdcljx8/wPuAtZ5hmMiKnLPzU55dPUMJhbeQmMOPKG8nTk5rrqfpeS1/QZpC1/b44KnrhKoqaZuRgKcsE4ZhIC9Khr7DB8/x2SnDMCCURDV7loslSZpS1RXBe6RSzOYFRZaRJJp76R3afcd+16G1IXjP6mZNPSs4v31CEIoQQGtDXpRBCDGJEPK///f//inw+MVGKoqi6BcXpwBFURRF37CPf/zj7pOf/tSPn58ff6Htu13fDRyflVSzjLxI0Vrig6Oscm7fvsViXuM9CKHQSiNQXDy74OryGe1+D0AIHqGg63vmiyWT9eS5xiQaKSVpZg5Tf+qMJMlomp4sM2RFytB7um4gSRQIh0kki0VJWWR451kujtEmQWmN1IpqVpFk2WE7sArkZYbJDDo5lCet1mukFIzjgEkTggist1uatsVaR9MOlGXF6fnR84Vilr7r0nEc54MdU5z7tj/wB/5A8YLDFEVR9IuKCUAURVH0Tflf/77fe3uylmls//Z2u22cFSjlWSwqslxztJwzn82ZrGe7a8jyFG0U2+2O9WbDMIxU9YKXXnoZgqcoM+pqgVCKNEnZbVvywmDtgNIKIQQgqGc5CI9zjnG0pKkhSRRSKqSANEnQOlAUCbO6wgdPlmcIKem7EecCQUiyomDfNngXUM9/CqaJYRh70jTl8uKGrhnZrrZY6xAKAoHl8Zx7985QSpJoydFxyWw+Q5tU4/1CB/EEpX4KmF5kfKIoin4psQQoiqIo+qZMWjd5Fv6hPCpfDj70dhzL87MFF08bjJI0u45hHHHWMl/McNYemmfTHOct2+2KqpzRtg3GGIQUXF7ecHZ+QrVckiiJzqCsE6o6Y7vtaNuJsjJMO/dzG3mDEBRlBsA4TUipSRKL1pIkTUFKnAgIKVlvtiilyDJDnmV0bctqtWGxOCLLUhCQmEPpUN+PJEnC0dEMqQTbbYdWirJMyDNNXRe0TU+WKBbLGbPZSTL2m5d2+/2H/p8/8RN/VQgRpwBFUfQtLd4ARFEURd+UBG4rpe5pnXRN276z33eE4BDCoaRhGEbafYMAgg+0+5bgAkmi2W032MkhpWS7XZHnBU8eP6Ke5dy7e4tX7h+TZYLz8yV5kaKMwCSa1bpDaUOa5kyjQEnDOFikVDhnSZKUYZpQMiHLNVIFfIB+tGiT4exE22wJwTH0PUZrrHVcX62Z3GHhmBSaPMtZHs0xaYo2CbNZzt17C+pZxmxR0I+W1arFeU8Qgtkyo+lb2Q/2tp2mj/z+7/3eD73o+ERRFP1SYgIQRVEUfVMSuAom/H0r5f8rzYqf2e37p/1gycsUbTRgqesKrRSbzYa+H9htNqxursmLkldfe5Xr6wuKomb7fCLQ/VdeZnlSoXVgMS+ZzzR5phA4ijxltx3omhEhA0MvmKxDCk3T9DgrmCZLcIHdrqMoE5JEkCjF5uYGN420+x0311esrm/o+4Gu65DA+mbNdrVju96xXW8Zh4k0M4Tg2bctLoDWmsVRhjaHGwcXAjo1CCPJyoRXX7vF9epZ0ux3CzeOH/6J3/W71IuOURRF0S8mJgBRFEXRN+X//slPXj252n4xkyZXmgtj5Ho2LzG5pKg15+dn5EVON/RM44SdJqxzlGVJWRY8evSAxGSkSYE2ku/+9d+FtxNFZkAIjs9KgnQcnRV4BNMEfWvZbQ9Lubq+x+Ox3tJ3FpMYCIIsSwkh4Kzj7HhBCJ6ub7i+ekLb7piGgdXNimG0OBdIM42dRlJj0FrRdyPr1YrjoyNOTuekqWK3aQ67C3qL0po81yyXOWmuOblVUtQJt+/c5yNvfCzt993d7X7zsb9eFG+86BhFURT9YmIPQBRFUfRN+/aLC7FanDzNs7xJ09SNkyPNDFkxQYBd0+LsYT9A0zTMF3NcEFw+u2JWz5nPl0xTx7d9+2sYLXn5pROEdCyOa7IcjMlodhNppmn7lmpW0PUjZVkQCGijafcd1gq8CwgpMIlAKYEQgfk84/h0xnq7w2mDEJKqnKGThLOzc5rtFucGFicLdCpJ84S+HzE6cHl5xfmtJcv5nN22OzT9phohBG1ncVMgrw0ISZoXNF1L27RpopNMKykIfWwCjqLoW1pMAKIoiqJv2g/+5E9afvIn/8F//Hv+46d9P76Kn14t6rJotpZ9u8f5wGw5x44TCIHWhm6347VX7nF8suDy8op6dkRiFHmeM5uljEPPyXGOUoHHDy5JM8Odu3OUEJgUnIdxtDT7ESU0m3WPQNA0ijQLZFlOmh5q+00CL798yrNnO6ytSBKNlAqtJWmW0OwCRVmTFyXOWSAcGnrnGaubLX0f0NqRZjnrVctsYWiakaaxECRBTUyTQ2tJWdQYk4lpGk+a7f61rK5OgbdfdIyiKIp+IbEEKIqiKPplO3v57BHBfzlP05CYQJ4rZlXB2fmCW7dP0UlCPasYx57jkxn37p8fbgTmS/KsBGC+SPBOcHw6BxF48O6WyyctxghubnY45wnW46fAzVXLzUXDbtPRtYFxklxdtIxjwNoRk2i01mSppkgzyrx4vvBrRlmVCCnompb1ekdVl1hn2a4Hxt6x2ewZh4mqqtk1LQFJ1w84Dw/e2/P+uze03URWKu69suTsfEFA8PTJFe3QM/Tujh3dS2EMcQpQFEXf0mICEEVRFP2yPXnyJBNSZ23reh8k5Tzl/O6M+TJlsh3nt084PltwdueY07MTHj+6IEkSlkcF9dxQ1znaSNq2BRFYrTo224HlcUkICikU1kLfjdSLhMlZnl2seOedG64udzx5tObtr13QtQ4pBVIFgg+YRFJkCcfLEiUlbdtgrWUcJy4untK3e8ZxwgfBODmePblkaEfe+dpjnj5e8fDhEy6ebnjw7jUXl2uywuC9wxjBy6/NuXNvTl3lpJnh9NYRWV7Sj6No9k092fG7P/GJT8Qb9iiKvmXF/0FFURRFv2y1yj42ON/umu6tpSiOA4EgQGnI85y+n3Bu5ORkxvpmT1nkHB/XZLlkGCzBa9rdRN87trsJkxiy0tCPI9kk0Drn+mLL0UlOOU959qxntAHrAs4F7AR5kTONFiEF0guEkmxu9hRFwfFRzeMna9qhYxh6hq7l6uox3nvefPMrnBzfYr/b0O53nJ/fpu8daXpIRt786nsoqfF4xsny8stnvPTaES+9cgtjBHb0vP/OBYiUs/N7+NGpQHOn3e6/c/XkyR3g/RcdnyiKon+RmABEURRFv3zBvDeJ/V90k/92O7rfnCSCqRdImTCODVIJTk6XNJsRrQUnxwusDXinkAqadsJ5iQvQNAOLVFMtCnbbgSTLCBKur3fcfmlO8BKB4O7dE7JMUxQVm5uBu/driuJwSyCQhCAAzeXljv2uZ7/fIQ1sVhua3Ybdds18ueTRw3fRQuP8BBrW+zXLoxOyLOXkZElZ5gDU85Lj04KqNmR5gnMTBIEIgqOTOV/5maesV2uGcY/WQQzd8G3vvv32/+bj//a//V9+26//9T/1gz/4g7EkKIqibykxAYiiKIp+2X7oP/+hiz/4u//gmc38zBgVTCLFoB0IOL+zJC8Stus9KhEsZzO8hXHqmJznZrXh5KSgqheMduTW3QW7fQ9ScnpWcH6+YLeZkImkqErWq8P3OX+pxkhNmknGwbJYFtipZxzAewtBEDzcrDqeXO3QmUFJUFJQ1zOSTJMXM85u3aXvWs5O7mOSlMVywfHpDCECi0VNPU8RAoQIVDNDnifgPe2+RUpxSDQE3H5pxpf/8TX73YoklZPQtfMifDQE8fu/9uUvj8DnX3ScoiiKfr6YAERRFEW/IlsvpnvLPKnnhej7nsmOFIVBG8HX37pCCkU1M2ipQDrKekbbWe7PzqlnmqtnW2bHFR7Pfjdw+86SspRM48h61XB6eoS1gfW6o55VSCWxg0VrwzhZvPdkWcI0ANIx9CNKCax3SK2pqoq6rjBGI5VgPp/T9QNVXWOnkbqeIwSkWUJR5ICn60b6fkSKQJJqunYkzyfSVKG0IAQPSMbRUc8y7ty7w5urS9V1+6KalWut1DoQGNv2e//9f+ffST/9V/7K33vBYYqiKPo5MQGIoiiKfkXyfBRtM17vti1JJimKnHEIPHt8jdGKss6ZLxKchbJUOO8RWuAmz8MHG/IiRSeK6+uOqiq5vt7iXE6729MPE8po2nbCTgKtEqZ+ZLftSdKMthO0rSNZaHywBA+TFQQpmC0rmiHw6OEKKHjp5Vdo9g3lrESnDucm5kdLpsGDd3Tdln6cEBKkEBitMFowjg7dSDahIy8T5suMNFW0XYdUBp0pPvwdL/P00VOxWU9hGMeiXMwfVmX1NYLIvFK/7T/8D7/nwZ/+03/u4YuOVRRFEcQEIIqiKPoV2m63m2VZvydl8Eli5DQ4ggvUdc2QtGSFZrW2VLXBBUHbBDYbS9dOECQ600gEeabYrvYEATvleP/dS9746MtcXa0wpudrbz5ltLDbdAxdzzAErm/WPHys6YaC5TxBCAshEFDMFilBJGybhqZvQSpubtY0+w6pNUJL2ouWRKcUZY4PsN81FHlKWhiKQmO0xlmHcx6lDiVHq6sGkxx6GEwKIXiOzmZ85GNv8Ll/cH3cD+7VEMIXi2X91r/1O4e/8Tf+q/RYJrP2RccpiqLoZ8UEIIqiKPoV+exnP+v+rz/4fzTVrJBaS9ZT4Oam58nDDUopdLKjnyS7/cjNsy3OOXSasFyUXF2u0Lng7u0FwzSw2ux5/Y07XDzbkuYVV1drxt7z5uP3eOvNh/igaPYtU99zfLrk5nrDclmSJ5qqkBSpQkiPnTzGaIS0zGYLEAFCoB4dQnrqugIEXTdgdEKWGBazEuccRguSRGCMQklFcB6Lw3uBkIJp8ggkRiv6acJkimA8H/2uV5j8lH/5p790Z73evoxQv+Wv//XzL3zyM5958qJjFEVR9PPFBCCKoij6Ffmjf/SPnnSDfWOcfJhcEOM4keQJ1bzAjmCdY7ft2O8Hhn5EBMH8SDKMA1mRc3y8YLKez/2D9/jIR+9zdblnvw2UNQgvkQK+/MV3sdZzc9HSNnsuLp4xWYkShjLNmdcZiZEoI9BGsrlsmbzA+8NugL4fmS/mCCRVlaC15smjC7p2ZNIjUjjcNBGER5U5zkmE9UjjyXLDMAiGfiKEw44B5xxaSBCHcaDGSHQBb3zH67RNN3/y6NG/1XXTyW7V/C0gJgBRFH1LiQlAFEVR9CvSb/rFLM+SvDCi70eOT2Y8fbYlyRTDYOkHR9d0bHdbiqLATwJjUpxVLI8LVjcDb37lMcHlvPP2NSFYyqokS3Ka/cjQeSarUEoyuQGPZTarmYaRs1tHID1FFcgygRACgcA72GwnJufQxuDaHu8d7b5HAk+fPmCzXpNnGVVdst3s8N6TFyneOYwxVFWGsxNaKaRUCCEwRuOcxQFKKRIDaWHw3tN1gdm85PU37ov15W5h3f6+HfyHgZ98wSGKoij6Z8QEIIqiKPoVGRh2y+Naaynx1jMNjvk8w/tA10+06x1BHMpyNqsNVV3TtnukLNntPBJNmuTkC816s+Olu6esV3ve+vKGalaw3uw4PT1FKUmWa9LkcJKvRWC+LJjPE/I8IdESpECqCe8F223HZrsDKVksKnbbHVeXl6xvJFdXl0gRMFqx3W4x2qCUoNlPDP1ACIFdnnJ+6xSZSvp+RGtF2/Z0/YRSCoQgzVMgEELATzB0HcvjmjSXPHv3QpZ18sp/8p/8J7Mf/uEf3r7oOEVRFP2smABEURRFvyJKqfT27dnHpFQMgyPPU8ZxwmjBfJFQ1rd59mTLYlHQd46yVty7f0S7n0AoVtc7PvThM7ydeO1Dx+x3HZuNRyUpaa44SSrKArQKLJcVPkwED7MqIUkkd+7VeGcRWqGfJwnjODFNjpOTJW07QhDUszk35pqLp89QSqAU1HXFdrMmUQIlU6z35HmJ9yPDZLm62nD79glCKsbJkmcJ82WFkB4IZJlCG4W1Dms9/fPk4PU37rO6vDib+vHW1DSvf8/3fM/bf+7P/bmYBERR9C0hJgBRFEXRr8jHPvzKx49OF69eX9+gUmi6jv1uwCSK46Lk8mLHrfMCZ2G16rj/6jEmgfm84vGjG15745iiUOx3A82+I0kT5vOK+UKyXbccHdf4mT/U3csEpIcgWBxl2LFHKUWeG5yfEFKhpKSeG/Zv7nGTZ3IjWpeEEEizhLwo8N7ivSWvSooqY7/bI5Sirkrabsc4DKRZTnCBaRxACpw9NACnwVGkmrIwJEYiRCBPEwSK9aZBiMD9+7d48LXTrB322nmfCyGOgZgARFH0LUG+6A8QRVEU/dr1/d///XdObs/+PY9HqMMOAKk0LgjGyTGNgVmdcf/+klmdcXq7xIWBQGCYWorSkCQpFxc70jyjmmUI6clyjXcT02TZ71qyXFJXOWM3kCSSaZroOstkBetVg9YSCFhrGTpHWRxq+K11SGmw1rLb7jk+PQUBdhzJ0oxpnNBJxuL4iJOzE2bzObP5DOscQ9MTnMVayzR5EIGyPpT89O1E1w/0vaXrJpyzSBUoyxQpPFJBNSt113VjtVx+Bbh4waGKoij6OfEGIIqiKPplu31+8juKKnvDEzAmoe87pnGkrjOMUXRtx727x+y2PcM0UVc5JgmURcHlsz37bcf65pKsyNhtD/X3eWF4+GDN1996wEv37rDfd8xmNX3T8+ZbD/mOf+0+aZZzcbknNQaTJOz2HVmqkELinSd4Tz0rCKGHACZRJIlBSs3prWMev9dx+9YtnAhIITi9dcrl5Q3KC4IPKKFJ0oQgwJiMotY8e7pmtxUsjiokHmsdSiq0FDgHfT/ivUcbQ3AgtRRaquZP/Ik/cRNCeNGhiqIo+jnxBiCKoij6ZZuc25okkdZZPBNpmnB6PGdW5TgXWMwXrNct+7ajrBKMhrosCH5CKUgSTT1LWRwlXDxdo6Th+mJPXWe8/Mo5s2WO84KLZzv2zchqveG9dy+5vm6xFqYxsN3u2W0HJhsQSLJMYcwhEdCJ4fT2nNksZ1anSBV46eW7jG5i02x56ZXbnJzNETiM1NhhYhws292WzW6HB8ZxQErJbFbRtY6hm0gSibMgCBgjEHDYcCwk4zQxuYmT01NMmhW///f//vmLjlMURdHPF28AoiiKol+2o0X1hjGmDEJDAqvVjr6xTJMjzTIuL28oioKz0wUXT25IjERrQVFVrDcdH/22W/TDyM3NjvPbC/zkKCvDSy8fsbrOMCZlGif225Zbd+bMZhXbdQ9o8lxRHkmmIdC1I0pJqANCBrIsJUk1ow14H9BKo7VkMVPkeUJdV89n+mdkOVw+vWG33VHmFTerFq0VBIGzFp1qtpuWNEvoh45x0EgUszrDGIEMEDgkASBIkgQlBInJEU4f3749DC82SlEURf+smABEURRFvyx/7I/9sbqeV785TTPh8dhxoiwLpOgJPtB1PYvljOAFNzcteVHg/Ih3gaEbefn+GZv1huuLhqwsyPLA+mrk9Q+fIoJkNk+5c/cEJeDJoxtOz0rgHtNkyXKDEoH79+fstj3GKPrWYowG55FCsFwWOD8wjRDUSJ5lFIUmeCjLktl8zjCMFGWBEIqj4yOuLq7Y7XeHF3oRGIYJZx1KayCwPCpJE0WaGoRwCDTaKLwPKGkOZUFCIiR0Q8/N5uaVt79Q/3rg777gcEVRFP2cmABEURRF37T/9Af+0/JabX6bg0KlAiaH0gLXOggCrSRGGbp2ou8n2nZCK0mSQppJloua6+trJis5Oq4wiWKyjuxOTd8NhOBJ04Ltds98kdA2CfNlwun5OUPfUc8KJmspkhSTSLbrDq0V42BJE4OdGopMURQJjx5dc+t8ibUBBOSV5tatY5zTSAkP3lvhpkPy0PUjSaKpqgUSMCZht+2o6xKVG2bH5eE/gBQYYwBHkiYMwwhCAeC8JThw3rFt9+XN+ubf/P7v//6v/sk/+SevX1jAoiiKfp6YAERRFEXftCt9pa8fXH/+Y9/1mtImp+8G7OQYx4nNpmfoR7RO2W5HLi82gKEoDfO5Ic1L1uuG3c5irSDNQXko8gTv4OmjLVmuCc6y2YwUhWJxXIIAgSfPUgSCsjAE6ymKBGs9RarY7UZ2+xaQBO9Yr3bYySOlxDtHXZckCQih2O0G8npCSWi6Ae8tR0cn9FODFBqJJssMWh8SB6FAaRAIxnGiqnKkONwUaK3oh0MPgHMOPwb8GDhaLFfD5I5XTy6/G/jrLzhsURRFQGwCjqIoin4Z/vgf/+Ob8mRZzub5R7U2SKURSmF0Al4xdIZ/9I/e56d+6mvsWwcSxnHCeckXf+Z93nrrhsePt1xebdnvAjfXDW4MXD3dEIJEa8XV1R5rwduAnSB4CQG63tKPE207MPQWpQSzWUKaKpJMAB7cYWa/c4KzWwuUgYDEeYsQh9P7zWbDZr2naXtAYkeP9575fMZsXpOmmizXHJ/OWRzlVFVKCIHEaOzkUEqjtUJrTZomBOeYRosSijB5us3Oz4rqC3fu3v0rIkxfedExi6Io+lnxBiCKoij6pv3ET/yE+uJbX/kN+bw6HoNApwWZDzS7jr6feOfdJ4yD49atOVmRgRTUdcHF0x37fYeQgo997DZKS5482nJ2Zuj7nrYLCOnYrC3eBnbrgVQlCClwztJ3jmeXe155/TZd26B8IEkFQ+/IUoUxiizVNGNgmiwmMXjvGQZQCoSQSK1puo7NZs3Z2RlBgtbmcKpfpygZyDKNKAxpekgasjxB6cPtg5Ga4Ceafc/ZWY0QghAgyxOmwTFMFpDkWdbIJPzVT/+Fv/CTLzpeURRFP19MAKIoiqJv2uf/5udny4+c/Ka8rJRWGcJoptFSzituvyq5dX/JNFrGfuLpRcN2PfHO158yjYGyzpDKcXm1odlZiiJlnODRwy277cRimXPn1oy333qX01tztluN0h4hJYt5QZ4FusZSlQWryy2zecZ6tePsvEQIjxAepSVFkbF79zHOBpRMOb89I00MY+948vgCrQ1d15GToZRCSEdZ5RSFQkiPJ5Aag50mxmEieEldp+SpRJsCKUAqRQgB7zxSgtaCgMY6j3Ny43Cfe9GxiqIo+ufFBCCKoij6pqVn6fkbH37lt+ZFroauxzqLTlKW+Tnzpcc7y+rqipura2aVZrvp2e/6w6KsxKNNwuVlwztvPWVW52y2W7rW8sqr9+mGjqvLhmdPdxTzkvV2Rbffc/feEe1+RtOObLcD3/Xdtwh4bq73dK2lb0dMIkiThL4dkQJsP/Deew84OT7lpfsz7GhpG0vXWdIsxVnHbtccGn9nBSFYQHByWjO6ESMlWmm8l/SdpWsnEinwHrLCELxHSsW22ZPoBKktaaLZPt1wvbp+tzw5Wb/oWEVRFP3zYgIQRVEUfVN+4Ac+cZZk+j9KUvVRby3eO4I/NMdOk4Xg0UqTlyW3jEHIp5zblKmvWa333Ll3wqP3LvmHn/8iiTFsN5ph7NHK8PjRE6ZpIDDS7BuKuWG72rDfbNmuG9K8xCQpXdOwmKfUlUEphdIDeIkIh+Y2rQ37zYp23/Ds4bskWtCPL+GDYL9raHZ7jo6PkErivSfgCUHQDRYpBWkiYJD03YgqE7p24ORsjh0GxPPG3922o6oL+n7Ae8Fu15Imh/0Bu91AO+yeLYtz8aLjFUVR9M+LCUAURVH0TdntnqzmJ6++b5IsSGUwOoAfmCZL33dM48SsrjBZipSSs1t3eellxce+c2DsJ978yiPeeavjO/+1D5HnCeM0kKQZX/qZd2ibPW2zYrV6wvzolNXVBV/4J/+IulzQ7TqUUeRlSVHM+cm/8UV+47/+Oou54eysxlvHOEKz75hGQZYoHr3/DsGNGJUwtJ6ry4bPf+4LpEmCMQnTNOFdYD6fk2cJRZaQJoahnzg5ybm+Dgy9AxGYuh4lA0omtG1LCIHdrsNOFoSgHyzGKLrtxM/84y9ZJ2xzpHUcthFF0becmABEURRF37BPfOITSddNv/7W3aPfMV9UefAgpGCcRvquI88LyrJkmkaChaKsyTJH3zcgJV3fsjzO+K3/k4/hHCTGIJTg8tmaupqxXu+4enbFS9Ntyqrk4uKCxeIYGQQwMvSWfmiQBJ48XJOkmpfuLbn/8jFSeIbWIoJEaYHRgskOlLMl57dvIxF8/auPePTgkjt375HmOQJB1+6ZzSqcDxSFJs9Smmbk6CilLBMm25EmGq0kAU/TDjgnEEIwDhPj6MF70jSBoHj3a+/z6NG7T+69fv7ekNrkRccsiqLonxcTgCiKougbNo7JvF7q//nxafHdUnqsHfHeAhxKcYxGAGmaMU0T290G7y3jOBJ8ICtLyrpmHAb6bmK7aejaCWMShumGJEv4db/p20iN5t13HqJ1wuuvf4hUgxtHlBJMPrC6WePGkWbf0/eC68uGNJV0m4G0SJjNc9p24mhxTjVfUFY1XdPy9a+9RZakZFlBYg6Th7I8ZxgnkiQ5vNSPA7N5Stc7htGSFgnNtiURkiw3WGuZxgmpFEZrNjc7pFKkKdhJ8+TBhSvr7B8VVfGu3fmYAERR9C0nJgBRFEXRN2wcb9rz8py6qpZ28nhvEQKkgCRN0FozDgNSCJSSpGmCc5IQAvt9S3CBvEgYrWO9abCTJcsSemdR0jCOgcubPZePVzT7NUfHNaP1LJcls7xGK4lH8Or9U7brDhccCInSGmc92miUFEgZ6NoWKQ87Cq4vL7m8eML65pLT83t4Z6nKBUIElNJM44TWkmn01FXG6qbB2pQsFyxmGbiAnRzeg3fgPRit2W1alD7U/RthePTeJTerizdvvXTrHyZZ8Q8/+elPv/+iYxZFUfTPiwlAFEVR9A0TInn9aLn8LVVVahEESkqC9yRJilAKAuA8q82Ko6MjqnLGbrslMTCfK4J3GKMRQmKtp+t6/GS5c2+B85ZnFy2XN3vasaNte1770Ms8frrh2eUGc2cB40hiFLduVSg1cXp2wvVlQ1mlbNctyEA1zzk+L5hd7njy5Ovku5LgXbi5uSLPKjH0HdMw0Oz3SC1J8wTvA+Pg6FvLOOz40hff4jf+pg9zujxF2EBZGvpO0GwHrPWHZWDWYacAImCk5uai4+2vvnOTlPovHd1b/kgI2cWLjlcURdG/SEwAoiiKom9Ynut8flx9JHiPD/b5VwOBgJIChCTLU7q94unDJ9y5f5+sKCB4vHOHaT9GU0qFEIE0NfjRMY6WepZQVJo7Ly14dl7yxX/8dcbBsbpZczFaTKr59m+/gwgBC9x5+YhUa5JE0DQWrSTTYBHSYv1EWRW0w45Hl19HSolWiQ8BFu4oYK1q93uRFAWIkSQ1uCmwXm25udmgpKEscqbJ0Y2ONFdMvaXrHG70iFwxjh6BPPQjPHvKO2+9M4xT+9O3Xj77//wX/8WPPXqRcYqiKPrFxAQgiqIo+oZVp9VrJtMnCIVE4r1lGDuE8LRdIDEZWgpC8CS5oWnWZEVBCAGBwCiDc55psuR5QWpSdpuW3b5jNq/xwTNZy9WV4UPf/hpDP/LK67chCLyDm/XInTs5xkBWpQgnUEwUwiCDYhotRZ6TZSnWbVBSYv2EQnkhcevmUqvL0GvlfZbW5ZwzKVwgM4a23XL57JKyrnn99ZdQSjDYCecOycvYBPqdY7NuSBODCw43eZr1mq+//WbrRfvf3v/Iq5/+7t9y9lM/+uMvOlJRFEW/sJgARFEURd8osahm/6M0y3QIIPCMU8d2fUNW5hiTsrq8pGla7r50j2WW471HKUXXt4fNuQSmcWTsJ5yyJEZydDwjBEvbNnSdRUpNligePrrBJJqyypnNEqbBUy8M9azk+KjGaImWirbZ0rUjznYsTnJMITk6XqLEJakS/rieXVRVebPZb892bbe8WD/N+qlti3TmjvdrMV+eiGeXjrZpMDrj9t3baCnodj1hShiGEZxAeM3N5ZaLZ1dICbvVNc1+7cZx/zirzT+699pLf2tRzv72H/7DPzy86EBFURT9YmICEEVRFH1Dfsfv+J66rqvvMEph7YQIkJiU2WJJ3/c4P5ElhrI6QWmN946h7xjHHu8DAonWknHs2G33JMagZwW7ZgDhOT6pSJPy+Vz+HJMKQoB5XRL8SFlmLJYFfnIoAcNkScqUol4gdYcEAo68TFFas9s0HB8tL87M/O8mWdotmtnpg8dPvmvftkebZl2v92uerZ6QJCkSxYde+Sgnx8dgoVn39JuWLDPYyTP0Aw8fvc/N1TPGqWezu6Hr9+HkaP7F1994/W+hxGhM8aXTV16Jdf9RFH3LiwlAFEVR9A35ju945bQsk1elkrjRMjlLKlKUNPTNmr5tkSpw5+497DTQ9hPjMOC8Zb/r2Kx7yjzl+KxGCYX30LYD7d7SNHuOj2ds19dUVc7yqOQ3HL9OcJBlCbvtjsePrmn2I3WdoYxBIRnGEZMkBKXIyhyPJy0y6tmSafLM8tL//9u7txjLsrM+4P+11r5fzq1OVXVXX+Zi2vaMIU4MRBYKD45iFEhkRUIzE6EIQSQPQjxFPOQlklXP5M3CSC0UaQiMYk8uOEEREQQrCQaMid3E0z3TM9Pd1V3XrnNOnbPP2dd1zcMZ8hLZ3XbGqTL6fi+lUpWq9v72Q9V/rW+vrzeO70W9/JZzLuz3B+8eHB3/6KIorkmlkk7JuGnLKPIjY6Q0TVWAGy3q5VzIToHBwlqgk6299/C2KsqZl8fRKgyDST5Iu8HG4L0wTSb5YPDQAj7Wg4jteT8rQgj5TigAEEIIeSqXrgx/LIjFNSkljDEIwxDOWVijUa0a1HWDa89so5MSTdNgPi3xcG8CLjjieD1dVyuJebEAAMRJCDiGatUifv98fcYYFosVUm0gBEeepTDGIowCxPF6srDSFr1eDGctpDSw1iBNUmjPhx/4sA4QYYjR5gaqqu0Ntvpu60eu/94Io3rnmePf335w6ScODvb/wfzs7AWpdbJcrTaatu0dnrznjK4LKdWIe9x1TQuPMx2GqYJwEkx6WS81ERdyY7Sx//yNG78Xp/FEOevgR//V47XZ3d015/uUCCHkySgAEEIIeSpCuBt+EAgwDjCACx9wQLlcomoqbGxuQvgejHWI4hTjbYGN7T6atgVzDIvZCkVRIwwFBoMeisUSTaMxGCRYLGq0rcJsUsEYg6vXPYSRj1W5QuCHiJMIV69fwunjKbRWqMoaxkhYC+R5Aq0ktNEQnKNedVgUBT76sWfgxSLuGpmPPjZqd1/e1QCOAfy7V175R7c2F5t/r67rG1LL8XJebBbL4mrVzpOmaWtjVMDB9M7lS3eCgDfLstgZ9LNqtDE6YNzjztmgLAv74o//+L/f29uzr732Wnvez4cQQp4WBQBCCCFP9NJLL4n+ML0huIBUHQI/hNUGXVujWpXgTEAIB0/4WJU1rAN6WQbHHBwYdKfhnEVTKURBjtWywWLeQnAPxgLTSY3paQUGByUdrAb6gxhNK7G5nWCEAXzPx2g0xLJYoViUGG/24HkejLFYrSp4noBlHMYYzOcF0jzC5taQr5bVxuorp9sA9v/qfr74xd+9xxi799nPfna8nBz/jeeefT6aPn783Nls9ndEIDQD74zS0XA8eidN4mJ+NrvhCV+HUVQ454xSyqVJFp6+/fbGa1/84v53KB0hhFw4FAAIIYQ80Yevf/h63k9+QioJZy0YHOpqieVyASkdnLWIAh+rZQljgd6gDz/wwAVHrzdEuSwQhh4AD4tihbbWeLh3jDxLUBQlOPcgW4XAFxCew/7eFPeVRt7L0TYazPnY2MoQhB76wx60nkFrjbbtYK2DYGI9WKxtwTmgWoVKWWxvb7blqpqEnvd/rdA753Dz5s0pgD96/3P+T//Jy18xxnr9bHhmrBUuDJde23qD0eag6zpYp3/M88Jv/f3PfOat3//yl39IrUefEULIDxQKAIQQQp4oGSUx5yJ31iGMYihjYQFEaYzT6RRaOUijwITAeDyC5/vwgwBGWSxXBZbFAk3VIIqBoZfjRC7gXIA0i6GVQ13WYAxgDChXDWS3bqXvZAPZBni0dwLHNjEY5rDGYVlU4IwhCNbvDiRhCGEZpHNQygEw6NoOw+GY9fvL4r2fPDnD57/zPTLGLIA73+bLEwB49dVX927evKn+1W//NgDc/YDKSwgh/1/x874AQgghF5+1dmytXQ/WMhrCE/B9D86uV9K10bDOYjAcwjkL2dVYFnMs53Oc7B+iWlboWokkjTEcpGhrjWJRo646MLfeQVgtaxijYGGwvZPj8pU+PF9gOlvBOQY4h6Zp4fvrtas0SxGEAcIoRJxnEJ6HNI3heRxKGTjL0DTK87xo9OLtF9kHUYebN2+qD+LnEELIeaIdAEIIIU/EOZ8yxsDeXzYyUkIpCWstfM+Hn3lI0wx1VcHzODxfYH5WoK5b5L0UaZri5OQUxmgweHh8dIYkCpH3YnS1weWrfYANcOXqBoQA4thDUyscPFrAOQdjNKwBOGNQSv2f6+CcIQgDaCPhBR6Yc/B7GYqihJQKkR8zZ50+Pj7+QAIAIYT8dUA7AIQQQp7INI0RjDFr1/+My269Ep+lPVhrkCQ+pOrQtDWqqkXTtkiyCP1BitHGAMYpJHGILItw7+4h7t65j+EoxvRxidWqwaKo4Qc+pFTwPIGm6hB4HDs7A/T6ERjjOJuWqKoWd9/ah9aA1gbOMQRBCM8L4RyDsQbWWsRxCMGAclUJY0x69/Jl6tUnhJD3UQAghBDyRHVZKqWN1EYDzsJYAyF8dJ1CtVwhz3M0lQJjDNoYCO4jz1LkWQptFDzuo60lqpXDnduPABiMNzOMN/tgjCPP+uAM2Ls/RddaDIYDdFJDBEB/kODy1Q1YZ2C1hmwcwiBYtwtZA9lJKKnAGQMX61OFwBh834ezRhsl848+eLB93jUkhJCLggIAIYSQJ1JSGiOtYcyh6xQY47DWYTlfYnM8Rtd2WC1LWKORZwk84UGr9WAv1Wns3TsCA0fT1FitKrz4wz+E8VYOxi2CgENJCThgMEhxNltCa4fNrU30+znC0MPmVg/Xnt1CEPrgwkHJDlmWwvc9hGEAMIe2beEYYB2DtQ5KGmgltbWGced6511DQgi5KCgAEEIIeSqy6VRbN1C6g7EOXdcBsPBjgST20e/FcMahbdZTgRljaOoGMA7j8RBxEmC5qrFzZQtXrm5gNilRLBqkeQxjgfm8QZKFCGKBs8UC09kCbdsiyzMwMORZAiF8JFmMMAnWL/qCgXGGIAjABIdSGoxbSCXR1BJHJyfKi8RDZObkvOtHCCEXBQUAQgghT9S2TBWLatk0DZSSULIGHMOyqNd/SDgDe/+jdYDwOLRU6CoJByAIAGMlulbiIx+7iqvPDBFGYv19SsHzAGsd5mcVpicVPC4g2w6yW6/oZ1mGKIoheICyrDHeGINzBs4FtDawxsITAs4xMMsAa9A1Cm3VNM4Zp1SSnmsBCSHkAqEAQAgh5ImUpybzs+JbsrHoGgvVGjw+nmE2L5EkETgDuOBQnYTgHNYYKKmRZhGMsZBKwWi7btnxPWitMd7sY7yV4kMf3kSahbhydYDhIIYfCJxNS3DBwQXAuIFUEtZpOHCUSwkpJaIoBucc1lgYrcEABD6HEAIcHs4mFRh4yQXrfGOC864hIYRcFBQACCGEPNHNmzdVVXQPilmty6LD4qzF9KSA7wXrc/kdoJUGB7AqljBKAwCSLMFgmCOOQtSlhOcJeB5HFIToWoOuVXDOoWkaSKmR90KkqUDdSMznNc6mJepqvevQNjWK+RKcR3AOUEoCAJSUUFLC6PXvN0qiWio8fO/ICY9PkjSeK2Pi86seIYRcLDQHgBBCyFOZTeYnUrlpkmeXhAeUVYvxdg/GWFijYbQFZx6clei6dn0KD9YtPgwRHj44xZVrI8SxB60ZurYCA0fbWGyMU/h+gDgOoXoGYWSQ5TGaSmJ6usRwxBDHPrQ26LoWcA6eJ6CNQZzEUEpBKw2lFJzjWJ21eLT3oGaB2gdnVdzv7593/Qgh5KKgAEAIIeSpVPXi4Ojw5KjXG/fDKI21tdjZ2YQxClXVwGqgqSskqQ/nLMI4hGAeFqs5ymWLy1dGGAxTCE/ANgpccAyHKYyx2NzuoViUmEwL9Po5otggTSIEvkBTK1SrEs4lODxcIM1CtHULN8jgnEUQxrDWQjAPUmloY3F8OEXb1mdRxBur9fTzn//86rzrRwghFwW1ABFCCHkqnoe7Fs2b3/zGf1/e+p9/au7evoN77+5DKwaj8f5pPD58zwdjHIwxODD0ez10bYvNrR7SNEG16tatPXWL8XaOtpMABNI0Ri/LAWcBrF8wdjBgwsLzfTDGsCoqcC6QZgkYE7CWQXAPzjC0rYJsJYySOD48Qae7QvhcSSmvfO5zL/nnXT9CCLkoKAAQQgh5Kh968cX742H+n5LUuz+dPGjfeffruHf/Lpp6PQCMMQdjDKqqgzMcxrj3e/dbVGWHplZomhbCd/ADhvFWBmM18l6AYlGBMQ5jJAbDDFobCE/A93yEgY8sj6EU0NQGzlgordY7DEJAdhJtK1HXNTgHVKvQtdIq3TWcBzIIw9M7d2DOu36EEHJRUAAghBDyVHZ3d+3Ozs6fPPvcc18JPFb2876L/D6O9mdo6vVxn1wIKKXRNh26pkFdrbD34AhaG/T7GYx2OJvWaFsFzgSscRhtZGAMaJq/mi+gEPg+uq5F20o4Y+AJQAhAqgZpxtF2LZqmQxAIdG0DxgCtJWSjMH1coyjKWd5LD5yzjgl364033qAAQAgh76MAQAgh5Kn92q//+tF4a+v1q9evfXVn59Lj04NH7ut/fAeTkxqLWYvjowJl2YJzjvlsgZOjCVZLjThJ0HQNmGBwjmE4ytHUEkZbOA3AOeR5hiCMkCQJ8n6CMAigNWABdI0EB4MxQF1rwAHlaoVVUWPv/jFmkzPIRmJ51uLhu8dutVrsjzZHd0eb23/e87Pj864bIYRcJBQACCGEfFd+87d+6/YPf/zjv9bvhW8+PninffPW1/HO7Uc4OSoxnVVoaovDwymcZWhahVZKFEWJrtVwxiFNQ/T7KTzfw2LeYDZdoesUjHEQ3rqNKI4i5L0MXDBYCzjmAAD9PIeU69BglMTJ0RTv3T7F6cEKi0mLx/tLvPXm27UIzH6c59/qbW7+2994/fX5OZeMEEIuFAoAhBBCvmuv/MIv/EWahn88HMaTsjjBW395C0cPZ9ASKBYdmAtQ1x2sE+hajSwLwWChtEGShmhbibZVsEYgiCP4YQBjNXzfQ1XVqGsFP/DBPUB4PrwwhIGF1gyytlCdgbWA1Rr1SmI50zh6WODPvvoNnM0nR0kv+Uo2Hv/uF77whfK8a0UIIRcNHQNKCCHku/apT31K//LP/dy/rlbV39SGi/nkaOfW1/6MPf/RFzDcGMMZh6b14QU+rLGIIx9ceKhWHYKQo2k04AQYtxgO4/XMAOfgeR44t6jKGsLjyLMcq1UJrSyC0IdWLWRlMZuUcM7BKWD6+Azv3b3nymKql9V8cu25nT8aX9v8Lzdv3qzPu06EEHIR0Q4AIYSQ78lvvP76/esfeu5fXn/22lfDGIeHB2/bW3/xpzg9OsHZ6Qqz0waHD2eoK4nlskO5bFGVNaxxUJ1BnARIUg5rHTzPR7EoUSxKWOsghIe6Xr8UzBjDclEhED6iwMPZdIGmcjjcW6ApFSaTffvNW/9DHxy/VThWPw7jYD/1En3e9SGEkIuKdgAIIYR8z67eePw1pS69YazTbdf83dns4aVvfqPF9es3sH3pKiw4wsTHeJxBa4a2sQgjha410LpGfxDAWYem7rBaSSzOzrB9uQff8wHnMJ8tYZ2DswyrokQcBfj6u+8hTCP00hS6A9IsrcOYl0kaT+I0OZVS9qSUtMBFCCHfBgUAQggh37Pd3f+mv/SlL/2H//jGv8m06hLhH7w4nZx86K3bczGfnWIw3EYY93A06mHn+gCzWY3eIEVZ1ahLCc4yOOvgXIv5tMF0UiPLE9RVjX4vAOMepFLwmcDkqMTx4Rke7b+HVTm113aeqTbGg6qp62o4Gk56WXac9npH40sbX44nk4fnXRtCCLmoKAAQQgj5f/Lyyy+bf/ZLv/QHDJBhEHza44ydzhZX7j+6ncSTh+jl2xCeQxi9ACUdJo8LFIsGqjWolxF8T0Abg7Lo4DGO+aRBXXaAtrAOsNrBKI67b99zd27fLvxAV1KeLWYLd8S98X4QCnb1mWvTMAiqJM2/9jOf+dmvvfzyy3TuPyGEfBvsvC+AEELIXw+fe+mlYF/g00cHB/9wNp9/4vjxZGe5Wo58L2FJsuF95CMf967sXELWT1ndGHAORAFDf5AhiDkevDvFoJfCOANPcAjm0EqHalW5k+Pjav/gwUqa1UEvDee9XrY/HPYf5b3Rbd/z7nmc687a+vqNG3u7u7v2vGtBCCEXGQUAQgghH6iff+mlK0o3P11X9Yf3Huz9ZFXLUd00fYYoGQ22ok/86N/2Oq1ZmiUwUsIPAviBhztv3sfWeAudrBFHMXzfubJt5aOHj4q2m76zsTF6yLjTURic5Xn+IEmS//yctfu7b7whz/ueCSHkBwkFAEIIIR88xvArv/iLO8fH+/94uSz/1sHB4QuL1XJrVanRR559oUqzXjAeb8aC+77R2hpr5P+686ZJ08wYa6zHRcu4bYpyzja3xt9MY3E8GA5PwjDsoiB4nEfRH3zhtddOzvs2CSHkBxEFAEIIId83//zVV/uT1eJThyeHn3609+iTq7rrp1FaeF5QJ3HG0ySPqrZ1Z4upX3Yl4jieJEl8FobhMk/zZjKdXd+5vPXmaDg8Zpy3eZL8eZhl92nAFyGEfO8oABBCCPm+cs6xn3/llZ9aLpY/07Ztbpl7FIZhaYyzjDGure6XVTWwDCwMk6LXy96Kougdz3hLLfTQKrUVZ9le5NzsN3/ndw7O+34IIeQHHQUAQggh33fOOfYvfvVXrxog/sQnP7l3+w//0CvznNnFIlxI+SNd2xaeEMo53z7/0efv7e7uUl8/IYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCvjv/G4hj+Ujq//mNAAAAAElFTkSuQmCC\n" + }, + "metadata": {} } - ] -} \ No newline at end of file + ] + } + ] +} diff --git a/docs/readme/ru.md b/docs/readme/ru.md index c001adc..4dea7ea 100644 --- a/docs/readme/ru.md +++ b/docs/readme/ru.md @@ -25,13 +25,16 @@ ## πŸŽ† ΠžΡΠΎΠ±Π΅Π½Π½ΠΎΡΡ‚ΠΈ: - ВысокоС качСство Π²Ρ‹Ρ…ΠΎΠ΄Π½ΠΎΠ³ΠΎ изобраТСния +- Π Π°Π±ΠΎΡ‚Π°Π΅Ρ‚ Π² Π°Π²Ρ‚ΠΎΠ½ΠΎΠΌΠ½ΠΎΠΌ Ρ€Π΅ΠΆΠΈΠΌΠ΅ - ΠŸΠ°ΠΊΠ΅Ρ‚Π½Π°Ρ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ - ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° NVIDIA CUDA ΠΈ процСссорной ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ - ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° FP16: быстрая ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° с Π½ΠΈΠ·ΠΊΠΈΠΌ ΠΏΠΎΡ‚Ρ€Π΅Π±Π»Π΅Π½ΠΈΠ΅ΠΌ памяти - Π›Π΅Π³ΠΊΠΎΠ΅ взаимодСйствиС ΠΈ запуск - 100% совмСстимоС с remove.bg API FastAPI HTTP API - УдаляСт Ρ„ΠΎΠ½ с волос +- АвтоматичСский Π²Ρ‹Π±ΠΎΡ€ Π»ΡƒΡ‡ΡˆΠ΅Π³ΠΎ ΠΌΠ΅Ρ‚ΠΎΠ΄Π° для изобраТСния ΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚Π΅Π»Ρ - ΠŸΡ€ΠΎΡΡ‚Π°Ρ интСграция с вашим ΠΊΠΎΠ΄ΠΎΠΌ +- МодСли Ρ€Π°Π·ΠΌΠ΅Ρ‰Π΅Π½Ρ‹ Π½Π° [HuggingFace](https://huggingface.co/Carve) ## β›± ΠŸΠΎΠΏΡ€ΠΎΠ±ΡƒΠΉΡ‚Π΅ сами Π½Π° [Google Colab](https://colab.research.google.com/github/OPHoperHPO/image-background-remove-tool/blob/master/docs/other/carvekit_try.ipynb) ## ⛓️ Как это Ρ€Π°Π±ΠΎΡ‚Π°Π΅Ρ‚? @@ -40,6 +43,7 @@ 2. ΠŸΡ€ΠΎΠΈΡΡ…ΠΎΠ΄ΠΈΡ‚ ΠΏΡ€Π΅Π΄ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° Ρ„ΠΎΡ‚ΠΎΠ³Ρ€Π°Ρ„ΠΈΠΈ для обСспСчСния Π»ΡƒΡ‡ΡˆΠ΅Π³ΠΎ качСства Π²Ρ‹Ρ…ΠΎΠ΄Π½ΠΎΠ³ΠΎ изобраТСния 3. Π‘ ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ Ρ‚Π΅Ρ…Π½ΠΎΠ»ΠΎΠ³ΠΈΠΈ машинного обучСния убираСтся Ρ„ΠΎΠ½ Ρƒ изобраТСния 4. ΠŸΡ€ΠΎΠΈΡΡ…ΠΎΠ΄ΠΈΡ‚ постобработка изобраТСния для ΡƒΠ»ΡƒΡ‡ΡˆΠ΅Π½ΠΈΡ качСства ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚Π°Π½Π½ΠΎΠ³ΠΎ изобраТСния + ## πŸŽ“ Implemented Neural Networks: | НСйронныС сСти | ЦСлСвая ΠΎΠ±Π»Π°ΡΡ‚ΡŒ | Π’ΠΎΡ‡Π½ΠΎΡΡ‚ΡŒ | |:--------------:|:--------------------------------------------:|:--------------------------------:| @@ -47,14 +51,35 @@ | U^2-net | **Волосы** (hairs, people, animals, objects) | 80% (mean F1-Score, DUTS-TE) | | BASNet | **ΠžΠ±Ρ‰ΠΈΠΉ** (people, objects) | 80% (mean F1-Score, DUTS-TE) | | DeepLabV3 | People, Animals, Cars, etc | 67.4% (mean IoU, COCO val2017) | -> Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠΉΡ‚Π΅ U2-Net для волос ΠΈ Tracer-B7 для ΠΎΠ±Ρ‹Ρ‡Π½Ρ‹Ρ… ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ. -## πŸ–ΌοΈ ΠœΠ΅Ρ‚ΠΎΠ΄Ρ‹ ΠΏΡ€Π΅Π΄Π²Π°Ρ€ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠΉ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ ΠΈ постобработки ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ: -### πŸ” ΠœΠ΅Ρ‚ΠΎΠ΄Ρ‹ ΠΏΡ€Π΅Π΄ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ: -* `none` - ΠΌΠ΅Ρ‚ΠΎΠ΄Ρ‹ ΠΏΡ€Π΅Π΄ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ Π½Π΅ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΡŽΡ‚ΡΡ. -> Они Π±ΡƒΠ΄ΡƒΡ‚ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½Ρ‹ Π² Π±ΡƒΠ΄ΡƒΡ‰Π΅ΠΌ. + +### Recommended parameters for different models +| НСйронныС сСти | Π Π°Π·ΠΌΠ΅Ρ€ маски сСгмСнтации | ΠŸΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€Ρ‹ Trimap (Ρ€Π°ΡΡˆΠΈΡ€Π΅Π½ΠΈΠ΅, эрозия) | +|:--------------:|:------------------------:|:-------------------------------------:| +| `tracer_b7` | 640 | (30, 5) | +| `u2net` | 320 | (30, 5) | +| `basnet` | 320 | (30, 5) | +| `deeplabv3` | 1024 | (40, 20) | + +> ### Notes: +> 1. ΠžΠΊΠΎΠ½Ρ‡Π°Ρ‚Π΅Π»ΡŒΠ½ΠΎΠ΅ качСство ΠΌΠΎΠΆΠ΅Ρ‚ Π·Π°Π²ΠΈΡΠ΅Ρ‚ΡŒ ΠΎΡ‚ Ρ€Π°Π·Ρ€Π΅ΡˆΠ΅Π½ΠΈΡ вашСго изобраТСния, Ρ‚ΠΈΠΏΠ° сцСны ΠΈΠ»ΠΈ ΠΎΠ±ΡŠΠ΅ΠΊΡ‚Π°. +> 2. Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠΉΡ‚Π΅ U2-Net для волос ΠΈ Tracer-B7 для ΠΎΠ±Ρ‰ΠΈΡ… ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ ΠΈ ΠΏΡ€Π°Π²ΠΈΠ»ΡŒΠ½Ρ‹Ρ… ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ΠΎΠ². \ +> Π­Ρ‚ΠΎ ΠΎΡ‡Π΅Π½ΡŒ Π²Π°ΠΆΠ½ΠΎ для ΠΊΠΎΠ½Π΅Ρ‡Π½ΠΎΠ³ΠΎ качСства! ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹ ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ Π±Ρ‹Π»ΠΈ ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½Ρ‹ с использованиСм постобработки U2-Net ΠΈ FBA. + +## πŸ–ΌοΈ Image pre-processing and post-processing methods: +### πŸ” Preprocessing methods: +* `none` - No preprocessing methods used. +* [`autoscene`](https://huggingface.co/Carve/scene_classifier/) - АвтоматичСски опрСдСляСт Ρ‚ΠΈΠΏ сцСны с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ классификатора ΠΈ примСняСт ΡΠΎΠΎΡ‚Π²Π΅Ρ‚ΡΡ‚Π²ΡƒΡŽΡ‰ΡƒΡŽ модСль. (По ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ) +* `auto` - ВыполняСт Π³Π»ΡƒΠ±ΠΎΠΊΠΈΠΉ Π°Π½Π°Π»ΠΈΠ· изобраТСния ΠΈ Π±ΠΎΠ»Π΅Π΅ Ρ‚ΠΎΡ‡Π½ΠΎ опрСдСляСт Π»ΡƒΡ‡ΡˆΠΈΠΉ ΠΌΠ΅Ρ‚ΠΎΠ΄ удалСния Ρ„ΠΎΠ½Π°. Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ классификатор ΠΎΠ±ΡŠΠ΅ΠΊΡ‚ΠΎΠ² ΠΈ классификатор сцСны вмСстС. +> ### Notes: +> 1. `AutoScene` ΠΈ `auto` ΠΌΠΎΠ³ΡƒΡ‚ ΠΏΠ΅Ρ€Π΅ΠΎΠΏΡ€Π΅Π΄Π΅Π»ΠΈΡ‚ΡŒ модСль ΠΈ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€Ρ‹, ΡƒΠΊΠ°Π·Π°Π½Π½Ρ‹Π΅ ΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚Π΅Π»Π΅ΠΌ, Π±Π΅Π· увСдомлСния. +> Π˜Ρ‚Π°ΠΊ, Ссли Π²Ρ‹ Ρ…ΠΎΡ‚ΠΈΡ‚Π΅ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΡƒΡŽ модСль, ΡΠ΄Π΅Π»Π°Ρ‚ΡŒ всС постоянными ΠΈ Ρ‚. Π΄., Π²Π°ΠΌ слСдуСт сначала ΠΎΡ‚ΠΊΠ»ΡŽΡ‡ΠΈΡ‚ΡŒ ΠΌΠ΅Ρ‚ΠΎΠ΄Ρ‹ автоматичСской ΠΏΡ€Π΅Π΄Π²Π°Ρ€ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠΉ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ! +> 2. На Π΄Π°Π½Π½Ρ‹ΠΉ ΠΌΠΎΠΌΠ΅Π½Ρ‚ для ΠΌΠ΅Ρ‚ΠΎΠ΄Π° `auto` Π²Ρ‹Π±ΠΈΡ€Π°ΡŽΡ‚ΡΡ ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½Ρ‹Π΅ ΠΌΠΎΠ΄Π΅Π»ΠΈ для Π½Π΅ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Ρ… ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹Ρ… Π΄ΠΎΠΌΠ΅Π½ΠΎΠ², Ρ‚Π°ΠΊ ΠΊΠ°ΠΊ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½Π½Ρ‹Ρ… ΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ Π² настоящСС врСмя нСдостаточно для Ρ‚Π°ΠΊΠΎΠ³ΠΎ количСства Ρ‚ΠΈΠΏΠΎΠ² сцСн. +> Π’ Π±ΡƒΠ΄ΡƒΡ‰Π΅ΠΌ, ΠΊΠΎΠ³Π΄Π° Π±ΡƒΠ΄Π΅Ρ‚ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΎ Π½Π΅ΠΊΠΎΡ‚ΠΎΡ€ΠΎΠ΅ Ρ€Π°Π·Π½ΠΎΠΎΠ±Ρ€Π°Π·ΠΈΠ΅ ΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ, Π°Π²Ρ‚ΠΎΠΏΠΎΠ΄Π±ΠΎΡ€ Π±ΡƒΠ΄Π΅Ρ‚ пСрСписан Π² Π»ΡƒΡ‡ΡˆΡƒΡŽ сторону. + ### βœ‚ ΠœΠ΅Ρ‚ΠΎΠ΄Ρ‹ постобработки: * `none` - ΠΌΠ΅Ρ‚ΠΎΠ΄Ρ‹ постобработки Π½Π΅ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΡŽΡ‚ΡΡ -* `fba` (ΠΏΠΎ ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ) - Π­Ρ‚ΠΎΡ‚ Π°Π»Π³ΠΎΡ€ΠΈΡ‚ΠΌ ΡƒΠ»ΡƒΡ‡ΡˆΠ°Π΅Ρ‚ Π³Ρ€Π°Π½ΠΈΡ†Ρ‹ изобраТСния ΠΏΡ€ΠΈ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠΈ Ρ„ΠΎΠ½Π° с ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ с волосами ΠΈ Ρ‚.Π΄. с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ Π½Π΅ΠΉΡ€ΠΎΠ½Π½ΠΎΠΉ сСти FBA Matting. Π­Ρ‚ΠΎΡ‚ ΠΌΠ΅Ρ‚ΠΎΠ΄ Π΄Π°Π΅Ρ‚ Π½Π°ΠΈΠ»ΡƒΡ‡ΡˆΠΈΠΉ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ Π² сочСтании с u2net Π±Π΅Π· ΠΊΠ°ΠΊΠΈΡ…-Π»ΠΈΠ±ΠΎ ΠΌΠ΅Ρ‚ΠΎΠ΄ΠΎΠ² ΠΏΡ€Π΅Π΄Π²Π°Ρ€ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠΉ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ. +* `fba` - Π­Ρ‚ΠΎΡ‚ Π°Π»Π³ΠΎΡ€ΠΈΡ‚ΠΌ ΡƒΠ»ΡƒΡ‡ΡˆΠ°Π΅Ρ‚ Π³Ρ€Π°Π½ΠΈΡ†Ρ‹ изобраТСния ΠΏΡ€ΠΈ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠΈ Ρ„ΠΎΠ½Π° с ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ с волосами ΠΈ Ρ‚.Π΄. с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ Π½Π΅ΠΉΡ€ΠΎΠ½Π½ΠΎΠΉ сСти FBA Matting. +* `cascade_fba` (default) - Π­Ρ‚ΠΎΡ‚ Π°Π»Π³ΠΎΡ€ΠΈΡ‚ΠΌ уточняСт маску сСгмСнтации с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ Π½Π΅ΠΉΡ€ΠΎΠ½Π½ΠΎΠΉ сСти CascadePSP, Π° Π·Π°Ρ‚Π΅ΠΌ примСняСт Π°Π»Π³ΠΎΡ€ΠΈΡ‚ΠΌ FBA. ## 🏷 Настройка для ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ Π½Π° CPU: 1. `pip install carvekit --extra-index-url https://download.pytorch.org/whl/cpu` @@ -62,7 +87,7 @@ ## 🏷 Настройка для ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ Π½Π° GPU: 1. Π£Π±Π΅Π΄ΠΈΡ‚Π΅ΡΡŒ, Ρ‡Ρ‚ΠΎ Ρƒ вас Π΅ΡΡ‚ΡŒ графичСский процСссор NVIDIA с 8 Π“Π‘ видСопамяти. -2. УстановитС `CUDA Toolkit ΠΈ Π’ΠΈΠ΄Π΅ΠΎ Π΄Ρ€Π°Π²Π΅Ρ€ для вашСй Π²ΠΈΠ΄Π΅ΠΎΠΊΠ°Ρ€Ρ‚Ρ‹.` +2. УстановитС `CUDA Toolkit ΠΈ Π’ΠΈΠ΄Π΅ΠΎΠ΄Ρ€Π°ΠΉΠ²Π΅Ρ€ для вашСй Π²ΠΈΠ΄Π΅ΠΎΠΊΠ°Ρ€Ρ‚Ρ‹.` 3. `pip install carvekit --extra-index-url https://download.pytorch.org/whl/cu113` > ΠŸΡ€ΠΎΠ΅ΠΊΡ‚ ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΈΠ²Π°Π΅Ρ‚ вСрсии Python ΠΎΡ‚ 3.8 Π΄ΠΎ 3.10.4. @@ -73,12 +98,15 @@ import torch from carvekit.api.high import HiInterface # Check doc strings for more information -interface = HiInterface(object_type="hairs-like", # Can be "object" or "hairs-like". +interface = HiInterface(object_type="auto", # Can be "object" or "hairs-like" or "auto" batch_size_seg=5, + batch_size_pre=5, batch_size_matting=1, + batch_size_refine=1, device='cuda' if torch.cuda.is_available() else 'cpu', - seg_mask_size=640, + seg_mask_size=640, # Use 640 for Tracer B7 and 320 for U2Net matting_mask_size=2048, + refine_mask_size=900, trimap_prob_threshold=231, trimap_dilation=30, trimap_erosion_iters=5, @@ -89,33 +117,65 @@ cat_wo_bg.save('2.png') ``` - +### Аналог ΠΌΠ΅Ρ‚ΠΎΠ΄Π° ΠΏΡ€Π΅Π΄Π²Π°Ρ€ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠΉ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ `auto` ΠΈΠ· cli +``` python +from carvekit.api.autointerface import AutoInterface +from carvekit.ml.wrap.scene_classifier import SceneClassifier +from carvekit.ml.wrap.yolov4 import SimplifiedYoloV4 + +scene_classifier = SceneClassifier(device="cpu", batch_size=1) +object_classifier = SimplifiedYoloV4(device="cpu", batch_size=1) + +interface = AutoInterface(scene_classifier=scene_classifier, + object_classifier=object_classifier, + segmentation_batch_size=1, + postprocessing_batch_size=1, + postprocessing_image_size=2048, + refining_batch_size=1, + refining_image_size=900, + segmentation_device="cpu", + fp16=False, + postprocessing_device="cpu") +images_without_background = interface(['./tests/data/cat.jpg']) +cat_wo_bg = images_without_background[0] +cat_wo_bg.save('2.png') +``` ### Если Π²Ρ‹ Ρ…ΠΎΡ‚ΠΈΡ‚Π΅ провСсти Π΄Π΅Ρ‚Π°Π»ΡŒΠ½ΡƒΡŽ настройку ``` python import PIL.Image from carvekit.api.interface import Interface from carvekit.ml.wrap.fba_matting import FBAMatting +from carvekit.ml.wrap.scene_classifier import SceneClassifier +from carvekit.ml.wrap.cascadepsp import CascadePSP from carvekit.ml.wrap.tracer_b7 import TracerUniversalB7 -from carvekit.pipelines.postprocessing import MattingMethod -from carvekit.pipelines.preprocessing import PreprocessingStub +from carvekit.pipelines.postprocessing import CasMattingMethod +from carvekit.pipelines.preprocessing import AutoScene from carvekit.trimap.generator import TrimapGenerator # Check doc strings for more information seg_net = TracerUniversalB7(device='cpu', - batch_size=1) - + batch_size=1, fp16=False) +cascade_psp = CascadePSP(device='cpu', + batch_size=1, + input_tensor_size=900, + fp16=False, + processing_accelerate_image_size=2048, + global_step_only=False) fba = FBAMatting(device='cpu', input_tensor_size=2048, - batch_size=1) + batch_size=1, fp16=False) -trimap = TrimapGenerator() +trimap = TrimapGenerator(prob_threshold=231, kernel_size=30, erosion_iters=5) -preprocessing = PreprocessingStub() +scene_classifier = SceneClassifier(device='cpu', batch_size=5) +preprocessing = AutoScene(scene_classifier=scene_classifier) -postprocessing = MattingMethod(matting_module=fba, - trimap_generator=trimap, - device='cpu') +postprocessing = CasMattingMethod( + refining_module=cascade_psp, + matting_module=fba, + trimap_generator=trimap, + device='cpu') interface = Interface(pre_pipe=preprocessing, post_pipe=postprocessing, @@ -123,8 +183,7 @@ interface = Interface(pre_pipe=preprocessing, image = PIL.Image.open('tests/data/cat.jpg') cat_wo_bg = interface([image])[0] -cat_wo_bg.save('2.png') - +cat_wo_bg.save('2.png') ``` @@ -140,24 +199,27 @@ Usage: carvekit [OPTIONS] Options: -i ./2.jpg ΠŸΡƒΡ‚ΡŒ Π΄ΠΎ Π²Ρ…ΠΎΠ΄Π½ΠΎΠ³ΠΎ Ρ„Π°ΠΉΠ»Π° ΠΈΠ»ΠΈ Π΄ΠΈΡ€Π΅ΠΊΡ‚ΠΎΡ€ΠΈΠΈ [обязатСлСн] -o ./2.png ΠŸΡƒΡ‚ΡŒ для сохранСния Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π° ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ - --pre none ΠœΠ΅Ρ‚ΠΎΠ΄ ΠΏΡ€Π΅Π΄ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ - --post fba ΠœΠ΅Ρ‚ΠΎΠ΄ постобработки - --net u2net НСйронная ΡΠ΅Ρ‚ΡŒ для сСгмСнтации + --pre autoscene ΠœΠ΅Ρ‚ΠΎΠ΄ ΠΏΡ€Π΅Π΄ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ + --post cascade_fba ΠœΠ΅Ρ‚ΠΎΠ΄ постобработки + --net tracer_b7 НСйронная ΡΠ΅Ρ‚ΡŒ для сСгмСнтации --recursive Π’ΠΊΠ»ΡŽΡ‡Π΅Π½ΠΈΠ΅ рСкурсивного поиска ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ Π² ΠΏΠ°ΠΏΠΊΠ΅ --batch_size 10 Π Π°Π·ΠΌΠ΅Ρ€ ΠΏΠ°ΠΊΠ΅Ρ‚Π° ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ, Π·Π°Π³Ρ€ΡƒΠΆΠ΅Π½Π½Ρ‹Ρ… Π² ΠžΠ—Π£ - + --batch_size_pre 5 Π Π°Π·ΠΌΠ΅Ρ€ ΠΏΠ°ΠΊΠ΅Ρ‚Π° для списка ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ Π±ΡƒΠ΄ΡƒΡ‚ ΠΎΠ±Ρ€Π°Π±Π°Ρ‚Ρ‹Π²Π°Ρ‚ΡŒΡΡ + ΠΌΠ΅Ρ‚ΠΎΠ΄ΠΎΠΌ ΠΏΡ€Π΅Π΄Π²Π°Ρ€ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠΉ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ --batch_size_seg 5 Π Π°Π·ΠΌΠ΅Ρ€ ΠΏΠ°ΠΊΠ΅Ρ‚Π° ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ для ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ сСгмСнтации --batch_size_mat 1 Π Π°Π·ΠΌΠ΅Ρ€ ΠΏΠ°ΠΊΠ΅Ρ‚Π° ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ для ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ матирования - --seg_mask_size 320 Π Π°Π·ΠΌΠ΅Ρ€ исходного изобраТСния для ΡΠ΅Π³ΠΌΠ΅Π½Ρ‚ΠΈΡ€ΡƒΡŽΡ‰Π΅ΠΉ + --batch_size_refine 1 Π Π°Π·ΠΌΠ΅Ρ€ ΠΏΠ°ΠΊΠ΅Ρ‚Π° для списка ΠΈΠ·ΠΎΠ±Ρ€Π°ΠΆΠ΅Π½ΠΈΠΉ, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ Π±ΡƒΠ΄ΡƒΡ‚ ΠΎΠ±Ρ€Π°Π±Π°Ρ‚Ρ‹Π²Π°Ρ‚ΡŒΡΡ ΡƒΡ‚ΠΎΡ‡Π½ΡΡŽΡ‰Π΅ΠΉ ΡΠ΅Ρ‚ΡŒΡŽ + + --seg_mask_size 640 Π Π°Π·ΠΌΠ΅Ρ€ исходного изобраТСния для ΡΠ΅Π³ΠΌΠ΅Π½Ρ‚ΠΈΡ€ΡƒΡŽΡ‰Π΅ΠΉ Π½Π΅ΠΉΡ€ΠΎΠ½Π½ΠΎΠΉ сСти --matting_mask_size 2048 Π Π°Π·ΠΌΠ΅Ρ€ исходного изобраТСния для ΠΌΠ°Ρ‚ΠΈΡ€ΡƒΡŽΡ‰Π΅ΠΉ Π½Π΅ΠΉΡ€ΠΎΠ½Π½ΠΎΠΉ сСти - + --refine_mask_size 900 Π Π°Π·ΠΌΠ΅Ρ€ Π²Ρ…ΠΎΠ΄Π½ΠΎΠ³ΠΎ изобраТСния для ΡƒΡ‚ΠΎΡ‡Π½ΡΡŽΡ‰Π΅ΠΉ Π½Π΅ΠΉΡ€ΠΎΠ½Π½ΠΎΠΉ сСти. --trimap_dilation 30 Π Π°Π·ΠΌΠ΅Ρ€ радиуса смСщСния ΠΎΡ‚ маски ΠΎΠ±ΡŠΠ΅ΠΊΡ‚Π° Π² пиксСлях ΠΏΡ€ΠΈ Ρ„ΠΎΡ€ΠΌΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ нСизвСстной области diff --git a/requirements_dev.txt b/requirements_dev.txt index eb6a8d8..7536b7f 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,2 +1,2 @@ pre-commit==2.20.0 - +pdoc3==0.10.0 diff --git a/tests/test_models_utils.py b/tests/test_models_utils.py index 0f81409..7dfa85f 100644 --- a/tests/test_models_utils.py +++ b/tests/test_models_utils.py @@ -16,6 +16,7 @@ checkpoints_dir, downloader, tracer_b7_pretrained, + scene_classifier_pretrained, ) from carvekit.utils.models_utils import fix_seed, suppress_warnings @@ -70,3 +71,4 @@ def test_check_for_exists(): assert deeplab_pretrained().exists() assert basnet_pretrained().exists() assert tracer_b7_pretrained().exists() + assert scene_classifier_pretrained().exists() diff --git a/tests/test_scene_classifier.py b/tests/test_scene_classifier.py new file mode 100644 index 0000000..c0827b9 --- /dev/null +++ b/tests/test_scene_classifier.py @@ -0,0 +1,61 @@ +""" +Source url: https://github.com/OPHoperHPO/image-background-remove-tool +Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. +License: Apache License 2.0 +""" + +import torch + +from carvekit.ml.wrap.scene_classifier import SceneClassifier + + +def test_init(): + SceneClassifier() + + +def test_preprocessing(scene_classifier_model, converted_pil_image, black_image_pil): + scene_classifier_model = scene_classifier_model(False) + assert ( + isinstance( + scene_classifier_model.data_preprocessing(converted_pil_image), + torch.FloatTensor, + ) + is True + ) + assert ( + isinstance( + scene_classifier_model.data_preprocessing(black_image_pil), + torch.FloatTensor, + ) + is True + ) + + +def test_inf( + scene_classifier_model, + converted_pil_image, + image_pil, + image_str, + image_path, + black_image_pil, +): + scene_classifier_model = scene_classifier_model(False) + calc_result = scene_classifier_model( + [ + converted_pil_image, + black_image_pil, + image_pil, + image_str, + image_path, + black_image_pil, + ] + ) + assert calc_result[0][0][0] == "soft" + assert calc_result[1][0][0] == "hard" + + +def test_seg_with_fp16( + scene_classifier_model, image_pil, image_str, image_path, black_image_pil +): + scene_classifier_model = scene_classifier_model(True) + scene_classifier_model([image_pil, image_str, image_path, black_image_pil]) diff --git a/tests/test_trimap.py b/tests/test_trimap.py index 47ba728..44a1354 100644 --- a/tests/test_trimap.py +++ b/tests/test_trimap.py @@ -1,6 +1,8 @@ """ Source url: https://github.com/OPHoperHPO/image-background-remove-tool + Author: Nikita Selin (OPHoperHPO)[https://github.com/OPHoperHPO]. + License: Apache License 2.0 """ import PIL.Image