From 19b79a52058f2c1750c9e7405e1c900f4fa95dc3 Mon Sep 17 00:00:00 2001 From: "Kim, Vinnam" Date: Mon, 12 Jun 2023 19:05:15 +0900 Subject: [PATCH 1/6] Update to the latest OV API Signed-off-by: Kim, Vinnam --- src/datumaro/components/shift_analyzer.py | 2 +- src/datumaro/plugins/explorer.py | 5 +- .../plugins/openvino_plugin/launcher.py | 106 ++++++++++++------ .../samples/googlenet-v4-tf_interp.py | 9 -- .../plugins/openvino_plugin/shift_launcher.py | 12 +- src/datumaro/plugins/specs.json | 2 +- tests/assets/rise/model_interp.py | 2 +- tests/unit/test_shift_analyzer.py | 4 +- 8 files changed, 88 insertions(+), 54 deletions(-) diff --git a/src/datumaro/components/shift_analyzer.py b/src/datumaro/components/shift_analyzer.py index 06d0135ee9..011535b3d7 100644 --- a/src/datumaro/components/shift_analyzer.py +++ b/src/datumaro/components/shift_analyzer.py @@ -117,7 +117,7 @@ def __init__(self) -> None: """ self._model = ShiftLauncher( model_name="googlenet-v4-tf", - output_layers="InceptionV4/Logits/PreLogitsFlatten/flatten_1/Reshape", + output_layers="InceptionV4/Logits/PreLogitsFlatten/flatten_1/Reshape:0", ) def compute_covariate_shift(self, sources: List[IDataset], method: Optional[str] = "fid"): diff --git a/src/datumaro/plugins/explorer.py b/src/datumaro/plugins/explorer.py index 6ce5ee8087..5284108cef 100644 --- a/src/datumaro/plugins/explorer.py +++ b/src/datumaro/plugins/explorer.py @@ -27,7 +27,6 @@ def __init__( self._device = device or "cpu" self._output_blobs = next(iter(self._net.outputs)) - self._input_blobs = next(iter(self._net.input_info)) self._tokenizer = None def _tokenize(self, texts: str, context_length: int = 77, truncate: bool = True): @@ -61,7 +60,7 @@ def infer(self, inputs): else: prompt_text = f"a photo of a {inputs}" inputs = self._tokenize(prompt_text) - inputs = {self._input_blob: inputs} + inputs = {self._input_blob.get_any_name(): inputs} elif isinstance(inputs, np.ndarray): # when processing a query key, we expand HWC to NHWC if len(inputs.shape) == 3: @@ -70,7 +69,7 @@ def infer(self, inputs): else: raise ValueError(f"inputs={inputs} is not allowed type.") - results = self._net.infer(inputs) + results = self._request.infer(inputs) hash_key = self._compute_hash(results[self._output_blobs]) return hash_key diff --git a/src/datumaro/plugins/openvino_plugin/launcher.py b/src/datumaro/plugins/openvino_plugin/launcher.py index 9f151c1e94..f0b1c82eaf 100644 --- a/src/datumaro/plugins/openvino_plugin/launcher.py +++ b/src/datumaro/plugins/openvino_plugin/launcher.py @@ -8,14 +8,16 @@ import os.path as osp import shutil import urllib +from typing import Dict, Optional import cv2 import numpy as np -from openvino.inference_engine import IECore +from openvino.runtime import Core from tqdm import tqdm from datumaro.components.cli_plugin import CliPlugin from datumaro.components.launcher import Launcher +from datumaro.errors import DatumaroError from datumaro.util.definitions import DATUMARO_CACHE_DIR from datumaro.util.samples import get_samples_path @@ -98,16 +100,20 @@ def normalize(inputs): class OpenvinoLauncher(Launcher): cli_plugin = _OpenvinoImporter + ALLOWED_CHANNEL_FORMATS = {"NCHW", "NHWC"} def __init__( self, - description=None, - weights=None, - interpreter=None, - model_dir=None, - model_name=None, + description: Optional[str] = None, + weights: Optional[str] = None, + interpreter: Optional[str] = None, + model_dir: Optional[str] = None, + model_name: Optional[str] = None, output_layers=None, - device=None, + device: Optional[str] = None, + compile_model_config: Optional[Dict] = None, + channel_format: str = "NCHW", + to_rgb: bool = True, ): if model_name: model_dir = DATUMARO_CACHE_DIR @@ -138,28 +144,37 @@ def __init__( if not osp.isfile(description): description = osp.join(model_dir, description) if not osp.isfile(description): - raise Exception('Failed to open model description file "%s"' % (description)) + raise DatumaroError('Failed to open model description file "%s"' % (description)) if not osp.isfile(weights): weights = osp.join(model_dir, weights) if not osp.isfile(weights): - raise Exception('Failed to open model weights file "%s"' % (weights)) + raise DatumaroError('Failed to open model weights file "%s"' % (weights)) if not osp.isfile(interpreter): interpreter = osp.join(model_dir, interpreter) if not osp.isfile(interpreter): - raise Exception('Failed to open model interpreter script file "%s"' % (interpreter)) + raise DatumaroError('Failed to open model interpreter script file "%s"' % (interpreter)) self._interpreter = InterpreterScript(interpreter) self._device = device or "CPU" self._output_blobs = output_layers + self._compile_model_config = compile_model_config - self._ie = IECore() - self._network = self._ie.read_network(description, weights) + self._core = Core() + self._network = self._core.read_model(description, weights) self._check_model_support(self._network, self._device) self._load_executable_net() + if channel_format not in self.ALLOWED_CHANNEL_FORMATS: + raise DatumaroError( + f"channel_format={channel_format} is not in " + f"ALLOWED_CHANNEL_FORMATS={self.ALLOWED_CHANNEL_FORMATS}." + ) + self._channel_format = channel_format + self._to_rgb = to_rgb + def _download_file(self, url: str, file_root: str): req = urllib.request.Request(url) with urllib.request.urlopen(req) as source, open(file_root, "wb") as output: # nosec B310 @@ -181,7 +196,7 @@ def _download_file(self, url: str, file_root: str): def _check_model_support(self, net, device): not_supported_layers = set( - name for name, dev in self._ie.query_network(net, device).items() if not dev + name for name, dev in self._core.query_model(net, device).items() if not dev ) if len(not_supported_layers) != 0: log.error( @@ -190,30 +205,48 @@ def _check_model_support(self, net, device): ) raise NotImplementedError("Some layers are not supported on the device") - def _load_executable_net(self, batch_size=1): + def _load_executable_net(self, batch_size: int = 1): network = self._network if self._output_blobs: - network.add_outputs(self._output_blobs) + network.add_outputs([self._output_blobs]) - iter_inputs = iter(network.input_info) + iter_inputs = iter(network.inputs) self._input_blob = next(iter_inputs) - # NOTE: handling for the inclusion of `image_info` in OpenVino2019 - self._require_image_info = "image_info" in network.input_info - if self._input_blob == "image_info": - self._input_blob = next(iter_inputs) + is_dynamic_layout = False + try: + self._input_layout = self._input_blob.shape + except ValueError: + # In case of that the input has dynamic shape + self._input_layout = self._input_blob.partial_shape + is_dynamic_layout = True + + if is_dynamic_layout: + self._input_layout[0] = batch_size + network.reshape({self._input_blob: self._input_layout}) + else: + model_batch_size = self._input_layout[0] + if batch_size != model_batch_size: + log.warning( + "Input layout of the model is static, so that we cannot change " + f"the model batch size ({model_batch_size}) to batch size ({batch_size})! " + "Set the batch size to {model_batch_size}." + ) + batch_size = model_batch_size - self._input_layout = network.input_info[self._input_blob].input_data.shape - self._input_layout[0] = batch_size - network.reshape({self._input_blob: self._input_layout}) self._batch_size = batch_size - self._net = self._ie.load_network(network=network, num_requests=1, device_name=self._device) + self._net = self._core.compile_model( + model=network, + device_name=self._device, + config=self._compile_model_config, + ) + self._request = self._net.create_infer_request() def infer(self, inputs): inputs = self.process_inputs(inputs) - results = self._net.infer(inputs) + results = self._request.infer(inputs) if len(results) == 1: return next(iter(results.values())) else: @@ -244,22 +277,27 @@ def process_inputs(self, inputs): assert inputs.shape[3] == 3, "Expected BGR input, got %s" % (inputs.shape,) - n, c, h, w = self._input_layout + if self._channel_format == "NCHW": + n, c, h, w = self._input_layout + elif self._channel_format == "NHWC": + n, h, w, c = self._input_layout + else: + raise DatumaroError(f"Invliad channel_format: {self._channel_format}.") + if inputs.shape[1:3] != (h, w): resized_inputs = np.empty((n, h, w, c), dtype=inputs.dtype) for inp, resized_input in zip(inputs, resized_inputs): cv2.resize(inp, (w, h), resized_input) inputs = resized_inputs - inputs = inputs.transpose((0, 3, 1, 2)) # NHWC to NCHW + + if self._channel_format == "NCHW": + inputs = inputs.transpose((0, 3, 1, 2)) # NHWC to NCHW + + if self._to_rgb: + inputs = inputs[:, :, :, ::-1] # Convert from BGR to RGB inputs = self._interpreter.normalize(inputs) - inputs = {self._input_blob: inputs} - if self._require_image_info: - info = np.zeros([1, 3]) - info[0, 0] = h - info[0, 1] = w - info[0, 2] = 1.0 # scale - inputs["image_info"] = info + inputs = {self._input_blob.get_any_name(): inputs} return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/googlenet-v4-tf_interp.py b/src/datumaro/plugins/openvino_plugin/samples/googlenet-v4-tf_interp.py index 63fd4938b6..14448ef5b9 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/googlenet-v4-tf_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/googlenet-v4-tf_interp.py @@ -9,15 +9,6 @@ def normalize(inputs): - # https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/googlenet-v4-tf/README.md - mean = np.array([127.5] * 3) - std = np.array([127.5] * 3) - - normalized_inputs = np.empty_like(inputs, dtype=inputs.dtype) - for k, inp in enumerate(inputs): - normalized_inputs[k] = (inp - mean[:, None, None]) / std[:, None, None] - inputs = normalized_inputs - return inputs diff --git a/src/datumaro/plugins/openvino_plugin/shift_launcher.py b/src/datumaro/plugins/openvino_plugin/shift_launcher.py index 6321a34feb..71ad12ffed 100644 --- a/src/datumaro/plugins/openvino_plugin/shift_launcher.py +++ b/src/datumaro/plugins/openvino_plugin/shift_launcher.py @@ -19,16 +19,22 @@ def __init__( device=None, ): super().__init__( - description, weights, interpreter, model_dir, model_name, output_layers, device + description, + weights, + interpreter, + model_dir, + model_name, + output_layers, + device, + channel_format="NHWC", ) self._device = device or "cpu" self._output_blobs = next(iter(self._net.outputs)) - self._input_blobs = next(iter(self._net.input_info)) def infer(self, inputs): inputs = self.process_inputs(inputs) - features = self._net.infer(inputs) + features = self._request.infer(inputs) return features[self._output_blobs] def launch(self, inputs): diff --git a/src/datumaro/plugins/specs.json b/src/datumaro/plugins/specs.json index 0997ec7434..7b643b0f20 100644 --- a/src/datumaro/plugins/specs.json +++ b/src/datumaro/plugins/specs.json @@ -1 +1 @@ -[{"import_path":"datumaro.plugins.ndr.NDR","plugin_name":"ndr","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.splitter.Split","plugin_name":"split","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.AnnsToLabels","plugin_name":"anns_to_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BboxValuesDecrement","plugin_name":"bbox_values_decrement","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BoxesToMasks","plugin_name":"boxes_to_masks","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Correct","plugin_name":"correct","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.CropCoveredSegments","plugin_name":"crop_covered_segments","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.IdFromImageName","plugin_name":"id_from_image_name","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MapSubsets","plugin_name":"map_subsets","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MasksToPolygons","plugin_name":"masks_to_polygons","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MergeInstanceSegments","plugin_name":"merge_instance_segments","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.PolygonsToMasks","plugin_name":"polygons_to_masks","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectInfos","plugin_name":"project_infos","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectLabels","plugin_name":"project_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RandomSplit","plugin_name":"random_split","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Reindex","plugin_name":"reindex","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ReindexAnnotations","plugin_name":"reindex_annotations","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemapLabels","plugin_name":"remap_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAnnotations","plugin_name":"remove_annotations","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAttributes","plugin_name":"remove_attributes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveItems","plugin_name":"remove_items","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Rename","plugin_name":"rename","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ResizeTransform","plugin_name":"resize","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ShapesToBoxes","plugin_name":"shapes_to_boxes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Sort","plugin_name":"sort","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.tile.Tile","plugin_name":"tile","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.merge_tile.MergeTile","plugin_name":"merge_tile","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.LabelRandomSampler","plugin_name":"label_random_sampler","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.RandomSampler","plugin_name":"random_sampler","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirBase","plugin_name":"image_dir","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvBase","plugin_name":"mnist_csv","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarBase","plugin_name":"cifar","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeBase","plugin_name":"label_me","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidBase","plugin_name":"camvid","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqBase","plugin_name":"mot_seq","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Base","plugin_name":"vgg_face2","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationBase","plugin_name":"common_semantic_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_csv.VottCsvBase","plugin_name":"vott_csv","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2020.Ade20k2020Base","plugin_name":"ade20k2020","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_json.VottJsonBase","plugin_name":"vott_json","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesBase","plugin_name":"cityscapes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceBase","plugin_name":"wider_face","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats_numpy.BratsNumpyBase","plugin_name":"brats_numpy","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Base","plugin_name":"market1501","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetBase","plugin_name":"imagenet","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_super_resolution.CommonSuperResolutionBase","plugin_name":"common_super_resolution","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.nyu_depth_v2.NyuDepthV2Base","plugin_name":"nyu_depth_v2","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kinetics.KineticsBase","plugin_name":"kinetics","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsPngExtractor","plugin_name":"mots_png_extractor","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mars.MarsBase","plugin_name":"mars","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtBase","plugin_name":"imagenet_txt","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoFramesBase","plugin_name":"video_frames","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoKeyframesBase","plugin_name":"video_keyframes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwBase","plugin_name":"lfw","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipBase","plugin_name":"image_zip","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesBase","plugin_name":"open_images","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats.BratsBase","plugin_name":"brats","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistBase","plugin_name":"mnist","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2017.Ade20k2017Base","plugin_name":"ade20k2017","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaBase","plugin_name":"ava","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.base.TfDetectionApiBase","plugin_name":"tf_detection_api","plugin_type":"DatasetBase","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.cvat.base.CvatBase","plugin_name":"cvat","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.base.KittiRawBase","plugin_name":"kitti_raw","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecClassificationBase","plugin_name":"mvtec_classification","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecDetectionBase","plugin_name":"mvtec_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecSegmentationBase","plugin_name":"mvtec_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextLocalizationBase","plugin_name":"icdar_text_localization","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextSegmentationBase","plugin_name":"icdar_text_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarWordRecognitionBase","plugin_name":"icdar_word_recognition","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.base.SuperviselyPointCloudBase","plugin_name":"sly_pointcloud","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocActionBase","plugin_name":"voc_action","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocBase","plugin_name":"voc","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocClassificationBase","plugin_name":"voc_classification","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocDetectionBase","plugin_name":"voc_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocInstanceSegmentationBase","plugin_name":"voc_instance_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocLayoutBase","plugin_name":"voc_layout","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocSegmentationBase","plugin_name":"voc_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.base.KittiDetectionBase","plugin_name":"kitti_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.base.KittiSegmentationBase","plugin_name":"kitti_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_json.MpiiJsonBase","plugin_name":"mpii_json","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_mat.MpiiBase","plugin_name":"mpii","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoCaptionsBase","plugin_name":"coco_captions","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoImageInfoBase","plugin_name":"coco_image_info","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoInstancesBase","plugin_name":"coco_instances","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoLabelsBase","plugin_name":"coco_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoPanopticBase","plugin_name":"coco_panoptic","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoPersonKeypointsBase","plugin_name":"coco_person_keypoints","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoStuffBase","plugin_name":"coco_stuff","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaAlBase","plugin_name":"synthia_al","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaRandBase","plugin_name":"synthia_rand","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaSfBase","plugin_name":"synthia_sf","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.base.ArrowBase","plugin_name":"arrow","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro.base.DatumaroBase","plugin_name":"datumaro","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.align_celeba.AlignCelebaBase","plugin_name":"align_celeba","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.celeba.CelebaBase","plugin_name":"celeba","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.base.DatumaroBinaryBase","plugin_name":"datumaro_binary","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloLooseBase","plugin_name":"yolo_loose","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloStrictBase","plugin_name":"yolo_strict","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloUltralyticsBase","plugin_name":"yolo_ultralytics","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.base.MapillaryVistasInstancesBase","plugin_name":"mapillary_vistas_instances","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.base.MapillaryVistasPanopticBase","plugin_name":"mapillary_vistas_panoptic","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.base.SegmentAnythingBase","plugin_name":"segment_anything","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirImporter","plugin_name":"image_dir","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvImporter","plugin_name":"mnist_csv","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarImporter","plugin_name":"cifar","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeImporter","plugin_name":"label_me","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidImporter","plugin_name":"camvid","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqImporter","plugin_name":"mot_seq","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Importer","plugin_name":"vgg_face2","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationImporter","plugin_name":"common_semantic_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationWithSubsetDirsImporter","plugin_name":"common_semantic_segmentation_with_subset_dirs","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_csv.VottCsvImporter","plugin_name":"vott_csv","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2020.Ade20k2020Importer","plugin_name":"ade20k2020","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_json.VottJsonImporter","plugin_name":"vott_json","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesImporter","plugin_name":"cityscapes","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceImporter","plugin_name":"wider_face","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats_numpy.BratsNumpyImporter","plugin_name":"brats_numpy","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Importer","plugin_name":"market1501","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetImporter","plugin_name":"imagenet","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetWithSubsetDirsImporter","plugin_name":"imagenet_with_subset_dirs","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_super_resolution.CommonSuperResolutionImporter","plugin_name":"common_super_resolution","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.nyu_depth_v2.NyuDepthV2Importer","plugin_name":"nyu_depth_v2","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kinetics.KineticsImporter","plugin_name":"kinetics","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsImporter","plugin_name":"mots","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mars.MarsImporter","plugin_name":"mars","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtImporter","plugin_name":"imagenet_txt","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoFramesImporter","plugin_name":"video_frames","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoKeyframesImporter","plugin_name":"video_keyframes","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwImporter","plugin_name":"lfw","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipImporter","plugin_name":"image_zip","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesImporter","plugin_name":"open_images","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats.BratsImporter","plugin_name":"brats","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistImporter","plugin_name":"mnist","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2017.Ade20k2017Importer","plugin_name":"ade20k2017","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaImporter","plugin_name":"ava","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.base.TfDetectionApiImporter","plugin_name":"tf_detection_api","plugin_type":"Importer","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.cvat.base.CvatImporter","plugin_name":"cvat","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.base.KittiRawImporter","plugin_name":"kitti_raw","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecClassificationImporter","plugin_name":"mvtec_classification","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecDetectionImporter","plugin_name":"mvtec_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecImporter","plugin_name":"mvtec","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecSegmentationImporter","plugin_name":"mvtec_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextLocalizationImporter","plugin_name":"icdar_text_localization","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextSegmentationImporter","plugin_name":"icdar_text_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarWordRecognitionImporter","plugin_name":"icdar_word_recognition","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.base.SuperviselyPointCloudImporter","plugin_name":"sly_pointcloud","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocActionImporter","plugin_name":"voc_action","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocClassificationImporter","plugin_name":"voc_classification","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocDetectionImporter","plugin_name":"voc_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocImporter","plugin_name":"voc","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocInstanceSegmentationImporter","plugin_name":"voc_instance_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocLayoutImporter","plugin_name":"voc_layout","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocSegmentationImporter","plugin_name":"voc_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiDetectionImporter","plugin_name":"kitti_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiImporter","plugin_name":"kitti","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiSegmentationImporter","plugin_name":"kitti_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_json.MpiiJsonImporter","plugin_name":"mpii_json","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_mat.MpiiImporter","plugin_name":"mpii","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoCaptionsImporter","plugin_name":"coco_captions","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoImageInfoImporter","plugin_name":"coco_image_info","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoImporter","plugin_name":"coco","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoInstancesImporter","plugin_name":"coco_instances","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoLabelsImporter","plugin_name":"coco_labels","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoPanopticImporter","plugin_name":"coco_panoptic","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoPersonKeypointsImporter","plugin_name":"coco_person_keypoints","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoRoboflowImporter","plugin_name":"coco_roboflow","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoStuffImporter","plugin_name":"coco_stuff","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaAlImporter","plugin_name":"synthia_al","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaRandImporter","plugin_name":"synthia_rand","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaSfImporter","plugin_name":"synthia_sf","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.importer.ArrowImporter","plugin_name":"arrow","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.align_celeba.AlignCelebaImporter","plugin_name":"align_celeba","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.celeba.CelebaImporter","plugin_name":"celeba","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.importer.DatumaroBinaryImporter","plugin_name":"datumaro_binary","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro.importer.DatumaroImporter","plugin_name":"datumaro","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.importer.YoloImporter","plugin_name":"yolo","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasImporter","plugin_name":"mapillary_vistas","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasInstancesImporter","plugin_name":"mapillary_vistas_instances","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasPanopticImporter","plugin_name":"mapillary_vistas_panoptic","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.importer.SegmentAnythingImporter","plugin_name":"segment_anything","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.explorer.ExplorerLauncher","plugin_name":"explorer","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.openvino_plugin.launcher.OpenvinoLauncher","plugin_name":"openvino","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.openvino_plugin.shift_launcher.ShiftLauncher","plugin_name":"shift","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.accuracy_checker_plugin.ac_launcher.AcLauncher","plugin_name":"ac","plugin_type":"Launcher","extra_deps":["openvino.tools","tensorflow"]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirExporter","plugin_name":"image_dir","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvExporter","plugin_name":"mnist_csv","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarExporter","plugin_name":"cifar","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeExporter","plugin_name":"label_me","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidExporter","plugin_name":"camvid","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqGtExporter","plugin_name":"mot_seq_gt","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Exporter","plugin_name":"vgg_face2","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesExporter","plugin_name":"cityscapes","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceExporter","plugin_name":"wider_face","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Exporter","plugin_name":"market1501","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetExporter","plugin_name":"imagenet","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetWithSubsetDirsExporter","plugin_name":"imagenet_with_subset_dirs","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsPngExporter","plugin_name":"mots_png","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtExporter","plugin_name":"imagenet_txt","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwExporter","plugin_name":"lfw","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipExporter","plugin_name":"image_zip","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesExporter","plugin_name":"open_images","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistExporter","plugin_name":"mnist","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaExporter","plugin_name":"ava","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.exporter.TfDetectionApiExporter","plugin_name":"tf_detection_api","plugin_type":"Exporter","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.cvat.exporter.CvatExporter","plugin_name":"cvat","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.exporter.KittiRawExporter","plugin_name":"kitti_raw","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecClassificationExporter","plugin_name":"mvtec_classification","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecDetectionExporter","plugin_name":"mvtec_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecExporter","plugin_name":"mvtec","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecSegmentationExporter","plugin_name":"mvtec_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarTextLocalizationExporter","plugin_name":"icdar_text_localization","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarTextSegmentationExporter","plugin_name":"icdar_text_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarWordRecognitionExporter","plugin_name":"icdar_word_recognition","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.exporter.SuperviselyPointCloudExporter","plugin_name":"sly_pointcloud","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocActionExporter","plugin_name":"voc_action","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocClassificationExporter","plugin_name":"voc_classification","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocDetectionExporter","plugin_name":"voc_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocExporter","plugin_name":"voc","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocInstanceSegmentationExporter","plugin_name":"voc_instance_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocLayoutExporter","plugin_name":"voc_layout","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocSegmentationExporter","plugin_name":"voc_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiDetectionExporter","plugin_name":"kitti_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiExporter","plugin_name":"kitti","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiSegmentationExporter","plugin_name":"kitti_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoCaptionsExporter","plugin_name":"coco_captions","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoExporter","plugin_name":"coco","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoImageInfoExporter","plugin_name":"coco_image_info","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoInstancesExporter","plugin_name":"coco_instances","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoLabelsExporter","plugin_name":"coco_labels","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoPanopticExporter","plugin_name":"coco_panoptic","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoPersonKeypointsExporter","plugin_name":"coco_person_keypoints","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoStuffExporter","plugin_name":"coco_stuff","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.exporter.ArrowExporter","plugin_name":"arrow","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.exporter.DatumaroBinaryExporter","plugin_name":"datumaro_binary","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro.exporter.DatumaroExporter","plugin_name":"datumaro","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.exporter.YoloExporter","plugin_name":"yolo","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.exporter.YoloUltralyticsExporter","plugin_name":"yolo_ultralytics","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.exporter.SegmentAnythingExporter","plugin_name":"segment_anything","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.ndr.NDR","plugin_name":"ndr","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.splitter.Split","plugin_name":"split","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.AnnsToLabels","plugin_name":"anns_to_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BboxValuesDecrement","plugin_name":"bbox_values_decrement","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BoxesToMasks","plugin_name":"boxes_to_masks","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Correct","plugin_name":"correct","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.CropCoveredSegments","plugin_name":"crop_covered_segments","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.IdFromImageName","plugin_name":"id_from_image_name","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MapSubsets","plugin_name":"map_subsets","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MasksToPolygons","plugin_name":"masks_to_polygons","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MergeInstanceSegments","plugin_name":"merge_instance_segments","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.PolygonsToMasks","plugin_name":"polygons_to_masks","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectInfos","plugin_name":"project_infos","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectLabels","plugin_name":"project_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RandomSplit","plugin_name":"random_split","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Reindex","plugin_name":"reindex","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ReindexAnnotations","plugin_name":"reindex_annotations","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemapLabels","plugin_name":"remap_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAnnotations","plugin_name":"remove_annotations","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAttributes","plugin_name":"remove_attributes","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveItems","plugin_name":"remove_items","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Rename","plugin_name":"rename","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ResizeTransform","plugin_name":"resize","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ShapesToBoxes","plugin_name":"shapes_to_boxes","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Sort","plugin_name":"sort","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.tile.Tile","plugin_name":"tile","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.merge_tile.MergeTile","plugin_name":"merge_tile","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.LabelRandomSampler","plugin_name":"label_random_sampler","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.RandomSampler","plugin_name":"random_sampler","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.validators.ClassificationValidator","plugin_name":"classification","plugin_type":"Validator","extra_deps":[]},{"import_path":"datumaro.plugins.validators.DetectionValidator","plugin_name":"detection","plugin_type":"Validator","extra_deps":[]},{"import_path":"datumaro.plugins.validators.SegmentationValidator","plugin_name":"segmentation","plugin_type":"Validator","extra_deps":[]}] +[{"import_path":"datumaro.plugins.transforms.AnnsToLabels","plugin_name":"anns_to_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BboxValuesDecrement","plugin_name":"bbox_values_decrement","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BoxesToMasks","plugin_name":"boxes_to_masks","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Correct","plugin_name":"correct","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.CropCoveredSegments","plugin_name":"crop_covered_segments","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.IdFromImageName","plugin_name":"id_from_image_name","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MapSubsets","plugin_name":"map_subsets","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MasksToPolygons","plugin_name":"masks_to_polygons","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MergeInstanceSegments","plugin_name":"merge_instance_segments","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.PolygonsToMasks","plugin_name":"polygons_to_masks","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectInfos","plugin_name":"project_infos","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectLabels","plugin_name":"project_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RandomSplit","plugin_name":"random_split","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Reindex","plugin_name":"reindex","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ReindexAnnotations","plugin_name":"reindex_annotations","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemapLabels","plugin_name":"remap_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAnnotations","plugin_name":"remove_annotations","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAttributes","plugin_name":"remove_attributes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveItems","plugin_name":"remove_items","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Rename","plugin_name":"rename","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ResizeTransform","plugin_name":"resize","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ShapesToBoxes","plugin_name":"shapes_to_boxes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Sort","plugin_name":"sort","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.splitter.Split","plugin_name":"split","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.ndr.NDR","plugin_name":"ndr","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesBase","plugin_name":"cityscapes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2020.Ade20k2020Base","plugin_name":"ade20k2020","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidBase","plugin_name":"camvid","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirBase","plugin_name":"image_dir","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwBase","plugin_name":"lfw","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_json.VottJsonBase","plugin_name":"vott_json","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_super_resolution.CommonSuperResolutionBase","plugin_name":"common_super_resolution","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kinetics.KineticsBase","plugin_name":"kinetics","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtBase","plugin_name":"imagenet_txt","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsPngExtractor","plugin_name":"mots_png_extractor","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2017.Ade20k2017Base","plugin_name":"ade20k2017","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.nyu_depth_v2.NyuDepthV2Base","plugin_name":"nyu_depth_v2","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistBase","plugin_name":"mnist","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetBase","plugin_name":"imagenet","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceBase","plugin_name":"wider_face","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats.BratsBase","plugin_name":"brats","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Base","plugin_name":"vgg_face2","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoFramesBase","plugin_name":"video_frames","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoKeyframesBase","plugin_name":"video_keyframes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mars.MarsBase","plugin_name":"mars","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats_numpy.BratsNumpyBase","plugin_name":"brats_numpy","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipBase","plugin_name":"image_zip","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarBase","plugin_name":"cifar","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationBase","plugin_name":"common_semantic_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqBase","plugin_name":"mot_seq","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeBase","plugin_name":"label_me","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvBase","plugin_name":"mnist_csv","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Base","plugin_name":"market1501","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesBase","plugin_name":"open_images","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_csv.VottCsvBase","plugin_name":"vott_csv","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.tile.Tile","plugin_name":"tile","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.merge_tile.MergeTile","plugin_name":"merge_tile","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.LabelRandomSampler","plugin_name":"label_random_sampler","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.RandomSampler","plugin_name":"random_sampler","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.relevancy_sampler.RelevancySampler","plugin_name":"relevancy_sampler","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.celeba.CelebaBase","plugin_name":"celeba","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.align_celeba.AlignCelebaBase","plugin_name":"align_celeba","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.base.SegmentAnythingBase","plugin_name":"segment_anything","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.base.SuperviselyPointCloudBase","plugin_name":"sly_pointcloud","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextLocalizationBase","plugin_name":"icdar_text_localization","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextSegmentationBase","plugin_name":"icdar_text_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarWordRecognitionBase","plugin_name":"icdar_word_recognition","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloLooseBase","plugin_name":"yolo_loose","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloStrictBase","plugin_name":"yolo_strict","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloUltralyticsBase","plugin_name":"yolo_ultralytics","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_mat.MpiiBase","plugin_name":"mpii","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_json.MpiiJsonBase","plugin_name":"mpii_json","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaBase","plugin_name":"ava","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocActionBase","plugin_name":"voc_action","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocBase","plugin_name":"voc","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocClassificationBase","plugin_name":"voc_classification","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocDetectionBase","plugin_name":"voc_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocInstanceSegmentationBase","plugin_name":"voc_instance_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocLayoutBase","plugin_name":"voc_layout","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocSegmentationBase","plugin_name":"voc_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoCaptionsBase","plugin_name":"coco_captions","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoImageInfoBase","plugin_name":"coco_image_info","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoInstancesBase","plugin_name":"coco_instances","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoLabelsBase","plugin_name":"coco_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoPanopticBase","plugin_name":"coco_panoptic","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoPersonKeypointsBase","plugin_name":"coco_person_keypoints","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoStuffBase","plugin_name":"coco_stuff","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.base.ArrowBase","plugin_name":"arrow","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro.base.DatumaroBase","plugin_name":"datumaro","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaAlBase","plugin_name":"synthia_al","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaRandBase","plugin_name":"synthia_rand","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaSfBase","plugin_name":"synthia_sf","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecClassificationBase","plugin_name":"mvtec_classification","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecDetectionBase","plugin_name":"mvtec_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecSegmentationBase","plugin_name":"mvtec_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cvat.base.CvatBase","plugin_name":"cvat","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.base.KittiRawBase","plugin_name":"kitti_raw","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.base.TfDetectionApiBase","plugin_name":"tf_detection_api","plugin_type":"DatasetBase","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.kitti.base.KittiDetectionBase","plugin_name":"kitti_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.base.KittiSegmentationBase","plugin_name":"kitti_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.base.DatumaroBinaryBase","plugin_name":"datumaro_binary","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.base.MapillaryVistasInstancesBase","plugin_name":"mapillary_vistas_instances","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.base.MapillaryVistasPanopticBase","plugin_name":"mapillary_vistas_panoptic","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesImporter","plugin_name":"cityscapes","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2020.Ade20k2020Importer","plugin_name":"ade20k2020","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidImporter","plugin_name":"camvid","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirImporter","plugin_name":"image_dir","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwImporter","plugin_name":"lfw","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_json.VottJsonImporter","plugin_name":"vott_json","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_super_resolution.CommonSuperResolutionImporter","plugin_name":"common_super_resolution","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kinetics.KineticsImporter","plugin_name":"kinetics","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtImporter","plugin_name":"imagenet_txt","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsImporter","plugin_name":"mots","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2017.Ade20k2017Importer","plugin_name":"ade20k2017","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.nyu_depth_v2.NyuDepthV2Importer","plugin_name":"nyu_depth_v2","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistImporter","plugin_name":"mnist","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetImporter","plugin_name":"imagenet","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetWithSubsetDirsImporter","plugin_name":"imagenet_with_subset_dirs","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceImporter","plugin_name":"wider_face","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats.BratsImporter","plugin_name":"brats","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Importer","plugin_name":"vgg_face2","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoFramesImporter","plugin_name":"video_frames","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoKeyframesImporter","plugin_name":"video_keyframes","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mars.MarsImporter","plugin_name":"mars","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats_numpy.BratsNumpyImporter","plugin_name":"brats_numpy","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipImporter","plugin_name":"image_zip","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarImporter","plugin_name":"cifar","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationImporter","plugin_name":"common_semantic_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationWithSubsetDirsImporter","plugin_name":"common_semantic_segmentation_with_subset_dirs","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqImporter","plugin_name":"mot_seq","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeImporter","plugin_name":"label_me","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvImporter","plugin_name":"mnist_csv","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Importer","plugin_name":"market1501","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesImporter","plugin_name":"open_images","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_csv.VottCsvImporter","plugin_name":"vott_csv","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.celeba.CelebaImporter","plugin_name":"celeba","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.align_celeba.AlignCelebaImporter","plugin_name":"align_celeba","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.importer.SegmentAnythingImporter","plugin_name":"segment_anything","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.base.SuperviselyPointCloudImporter","plugin_name":"sly_pointcloud","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextLocalizationImporter","plugin_name":"icdar_text_localization","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextSegmentationImporter","plugin_name":"icdar_text_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarWordRecognitionImporter","plugin_name":"icdar_word_recognition","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.importer.YoloImporter","plugin_name":"yolo","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_mat.MpiiImporter","plugin_name":"mpii","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_json.MpiiJsonImporter","plugin_name":"mpii_json","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaImporter","plugin_name":"ava","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocActionImporter","plugin_name":"voc_action","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocClassificationImporter","plugin_name":"voc_classification","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocDetectionImporter","plugin_name":"voc_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocImporter","plugin_name":"voc","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocInstanceSegmentationImporter","plugin_name":"voc_instance_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocLayoutImporter","plugin_name":"voc_layout","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocSegmentationImporter","plugin_name":"voc_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoCaptionsImporter","plugin_name":"coco_captions","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoImageInfoImporter","plugin_name":"coco_image_info","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoImporter","plugin_name":"coco","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoInstancesImporter","plugin_name":"coco_instances","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoLabelsImporter","plugin_name":"coco_labels","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoPanopticImporter","plugin_name":"coco_panoptic","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoPersonKeypointsImporter","plugin_name":"coco_person_keypoints","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoRoboflowImporter","plugin_name":"coco_roboflow","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoStuffImporter","plugin_name":"coco_stuff","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.importer.ArrowImporter","plugin_name":"arrow","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaAlImporter","plugin_name":"synthia_al","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaRandImporter","plugin_name":"synthia_rand","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaSfImporter","plugin_name":"synthia_sf","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecClassificationImporter","plugin_name":"mvtec_classification","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecDetectionImporter","plugin_name":"mvtec_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecImporter","plugin_name":"mvtec","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecSegmentationImporter","plugin_name":"mvtec_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cvat.base.CvatImporter","plugin_name":"cvat","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.base.KittiRawImporter","plugin_name":"kitti_raw","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.base.TfDetectionApiImporter","plugin_name":"tf_detection_api","plugin_type":"Importer","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.datumaro.importer.DatumaroImporter","plugin_name":"datumaro","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiDetectionImporter","plugin_name":"kitti_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiImporter","plugin_name":"kitti","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiSegmentationImporter","plugin_name":"kitti_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.importer.DatumaroBinaryImporter","plugin_name":"datumaro_binary","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasImporter","plugin_name":"mapillary_vistas","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasInstancesImporter","plugin_name":"mapillary_vistas_instances","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasPanopticImporter","plugin_name":"mapillary_vistas_panoptic","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.explorer.ExplorerLauncher","plugin_name":"explorer","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.openvino_plugin.launcher.OpenvinoLauncher","plugin_name":"openvino","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.accuracy_checker_plugin.ac_launcher.AcLauncher","plugin_name":"ac","plugin_type":"Launcher","extra_deps":["openvino.tools","tensorflow"]},{"import_path":"datumaro.plugins.openvino_plugin.shift_launcher.ShiftLauncher","plugin_name":"shift","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesExporter","plugin_name":"cityscapes","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidExporter","plugin_name":"camvid","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirExporter","plugin_name":"image_dir","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwExporter","plugin_name":"lfw","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtExporter","plugin_name":"imagenet_txt","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsPngExporter","plugin_name":"mots_png","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistExporter","plugin_name":"mnist","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetExporter","plugin_name":"imagenet","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetWithSubsetDirsExporter","plugin_name":"imagenet_with_subset_dirs","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceExporter","plugin_name":"wider_face","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Exporter","plugin_name":"vgg_face2","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipExporter","plugin_name":"image_zip","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarExporter","plugin_name":"cifar","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqGtExporter","plugin_name":"mot_seq_gt","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeExporter","plugin_name":"label_me","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvExporter","plugin_name":"mnist_csv","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Exporter","plugin_name":"market1501","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesExporter","plugin_name":"open_images","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.exporter.SegmentAnythingExporter","plugin_name":"segment_anything","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.exporter.SuperviselyPointCloudExporter","plugin_name":"sly_pointcloud","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarTextLocalizationExporter","plugin_name":"icdar_text_localization","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarTextSegmentationExporter","plugin_name":"icdar_text_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarWordRecognitionExporter","plugin_name":"icdar_word_recognition","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.exporter.YoloExporter","plugin_name":"yolo","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.exporter.YoloUltralyticsExporter","plugin_name":"yolo_ultralytics","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaExporter","plugin_name":"ava","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocActionExporter","plugin_name":"voc_action","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocClassificationExporter","plugin_name":"voc_classification","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocDetectionExporter","plugin_name":"voc_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocExporter","plugin_name":"voc","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocInstanceSegmentationExporter","plugin_name":"voc_instance_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocLayoutExporter","plugin_name":"voc_layout","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocSegmentationExporter","plugin_name":"voc_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoCaptionsExporter","plugin_name":"coco_captions","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoExporter","plugin_name":"coco","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoImageInfoExporter","plugin_name":"coco_image_info","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoInstancesExporter","plugin_name":"coco_instances","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoLabelsExporter","plugin_name":"coco_labels","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoPanopticExporter","plugin_name":"coco_panoptic","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoPersonKeypointsExporter","plugin_name":"coco_person_keypoints","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoStuffExporter","plugin_name":"coco_stuff","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.exporter.ArrowExporter","plugin_name":"arrow","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecClassificationExporter","plugin_name":"mvtec_classification","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecDetectionExporter","plugin_name":"mvtec_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecExporter","plugin_name":"mvtec","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecSegmentationExporter","plugin_name":"mvtec_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cvat.exporter.CvatExporter","plugin_name":"cvat","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.exporter.KittiRawExporter","plugin_name":"kitti_raw","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.exporter.TfDetectionApiExporter","plugin_name":"tf_detection_api","plugin_type":"Exporter","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.datumaro.exporter.DatumaroExporter","plugin_name":"datumaro","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiDetectionExporter","plugin_name":"kitti_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiExporter","plugin_name":"kitti","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiSegmentationExporter","plugin_name":"kitti_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.exporter.DatumaroBinaryExporter","plugin_name":"datumaro_binary","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.AnnsToLabels","plugin_name":"anns_to_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BboxValuesDecrement","plugin_name":"bbox_values_decrement","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BoxesToMasks","plugin_name":"boxes_to_masks","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Correct","plugin_name":"correct","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.CropCoveredSegments","plugin_name":"crop_covered_segments","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.IdFromImageName","plugin_name":"id_from_image_name","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MapSubsets","plugin_name":"map_subsets","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MasksToPolygons","plugin_name":"masks_to_polygons","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MergeInstanceSegments","plugin_name":"merge_instance_segments","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.PolygonsToMasks","plugin_name":"polygons_to_masks","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectInfos","plugin_name":"project_infos","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectLabels","plugin_name":"project_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RandomSplit","plugin_name":"random_split","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Reindex","plugin_name":"reindex","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ReindexAnnotations","plugin_name":"reindex_annotations","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemapLabels","plugin_name":"remap_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAnnotations","plugin_name":"remove_annotations","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAttributes","plugin_name":"remove_attributes","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveItems","plugin_name":"remove_items","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Rename","plugin_name":"rename","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ResizeTransform","plugin_name":"resize","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ShapesToBoxes","plugin_name":"shapes_to_boxes","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Sort","plugin_name":"sort","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.splitter.Split","plugin_name":"split","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.ndr.NDR","plugin_name":"ndr","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.tile.Tile","plugin_name":"tile","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.merge_tile.MergeTile","plugin_name":"merge_tile","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.LabelRandomSampler","plugin_name":"label_random_sampler","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.RandomSampler","plugin_name":"random_sampler","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.relevancy_sampler.RelevancySampler","plugin_name":"relevancy_sampler","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.validators.ClassificationValidator","plugin_name":"classification","plugin_type":"Validator","extra_deps":[]},{"import_path":"datumaro.plugins.validators.DetectionValidator","plugin_name":"detection","plugin_type":"Validator","extra_deps":[]},{"import_path":"datumaro.plugins.validators.SegmentationValidator","plugin_name":"segmentation","plugin_type":"Validator","extra_deps":[]}] diff --git a/tests/assets/rise/model_interp.py b/tests/assets/rise/model_interp.py index 50018f6e9e..91e26ede50 100644 --- a/tests/assets/rise/model_interp.py +++ b/tests/assets/rise/model_interp.py @@ -7,7 +7,7 @@ def normalize(inputs): - pass + return inputs def process_outputs(inputs, outputs): diff --git a/tests/unit/test_shift_analyzer.py b/tests/unit/test_shift_analyzer.py index 40bb299b4d..ba1c26b213 100644 --- a/tests/unit/test_shift_analyzer.py +++ b/tests/unit/test_shift_analyzer.py @@ -69,8 +69,8 @@ def fxt_dataset_different(): [ ("fxt_dataset_ideal", "fid", 0), ("fxt_dataset_ideal", "emd", 0), - ("fxt_dataset_different", "fid", 0.1005), - ("fxt_dataset_different", "emd", 0.0031), + ("fxt_dataset_different", "fid", 0.0302), + ("fxt_dataset_different", "emd", 0.0017), ], ) def test_covariate_shift( From 2429d4684a6cd3a47927c7f63a0a59cbea3fed92 Mon Sep 17 00:00:00 2001 From: "Kim, Vinnam" Date: Tue, 13 Jun 2023 12:00:13 +0900 Subject: [PATCH 2/6] Make specs.json more interpretable Signed-off-by: Kim, Vinnam --- src/datumaro/plugins/specs.json | 1644 ++++++++++++++++++++++++++++++- src/datumaro/plugins/specs.py | 2 +- tests/unit/test_environment.py | 26 +- 3 files changed, 1662 insertions(+), 10 deletions(-) diff --git a/src/datumaro/plugins/specs.json b/src/datumaro/plugins/specs.json index 0997ec7434..0088746969 100644 --- a/src/datumaro/plugins/specs.json +++ b/src/datumaro/plugins/specs.json @@ -1 +1,1643 @@ -[{"import_path":"datumaro.plugins.ndr.NDR","plugin_name":"ndr","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.splitter.Split","plugin_name":"split","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.AnnsToLabels","plugin_name":"anns_to_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BboxValuesDecrement","plugin_name":"bbox_values_decrement","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BoxesToMasks","plugin_name":"boxes_to_masks","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Correct","plugin_name":"correct","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.CropCoveredSegments","plugin_name":"crop_covered_segments","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.IdFromImageName","plugin_name":"id_from_image_name","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MapSubsets","plugin_name":"map_subsets","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MasksToPolygons","plugin_name":"masks_to_polygons","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MergeInstanceSegments","plugin_name":"merge_instance_segments","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.PolygonsToMasks","plugin_name":"polygons_to_masks","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectInfos","plugin_name":"project_infos","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectLabels","plugin_name":"project_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RandomSplit","plugin_name":"random_split","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Reindex","plugin_name":"reindex","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ReindexAnnotations","plugin_name":"reindex_annotations","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemapLabels","plugin_name":"remap_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAnnotations","plugin_name":"remove_annotations","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAttributes","plugin_name":"remove_attributes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveItems","plugin_name":"remove_items","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Rename","plugin_name":"rename","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ResizeTransform","plugin_name":"resize","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ShapesToBoxes","plugin_name":"shapes_to_boxes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Sort","plugin_name":"sort","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.tile.Tile","plugin_name":"tile","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.merge_tile.MergeTile","plugin_name":"merge_tile","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.LabelRandomSampler","plugin_name":"label_random_sampler","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.RandomSampler","plugin_name":"random_sampler","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirBase","plugin_name":"image_dir","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvBase","plugin_name":"mnist_csv","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarBase","plugin_name":"cifar","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeBase","plugin_name":"label_me","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidBase","plugin_name":"camvid","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqBase","plugin_name":"mot_seq","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Base","plugin_name":"vgg_face2","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationBase","plugin_name":"common_semantic_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_csv.VottCsvBase","plugin_name":"vott_csv","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2020.Ade20k2020Base","plugin_name":"ade20k2020","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_json.VottJsonBase","plugin_name":"vott_json","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesBase","plugin_name":"cityscapes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceBase","plugin_name":"wider_face","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats_numpy.BratsNumpyBase","plugin_name":"brats_numpy","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Base","plugin_name":"market1501","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetBase","plugin_name":"imagenet","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_super_resolution.CommonSuperResolutionBase","plugin_name":"common_super_resolution","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.nyu_depth_v2.NyuDepthV2Base","plugin_name":"nyu_depth_v2","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kinetics.KineticsBase","plugin_name":"kinetics","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsPngExtractor","plugin_name":"mots_png_extractor","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mars.MarsBase","plugin_name":"mars","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtBase","plugin_name":"imagenet_txt","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoFramesBase","plugin_name":"video_frames","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoKeyframesBase","plugin_name":"video_keyframes","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwBase","plugin_name":"lfw","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipBase","plugin_name":"image_zip","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesBase","plugin_name":"open_images","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats.BratsBase","plugin_name":"brats","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistBase","plugin_name":"mnist","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2017.Ade20k2017Base","plugin_name":"ade20k2017","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaBase","plugin_name":"ava","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.base.TfDetectionApiBase","plugin_name":"tf_detection_api","plugin_type":"DatasetBase","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.cvat.base.CvatBase","plugin_name":"cvat","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.base.KittiRawBase","plugin_name":"kitti_raw","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecClassificationBase","plugin_name":"mvtec_classification","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecDetectionBase","plugin_name":"mvtec_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.base.MvtecSegmentationBase","plugin_name":"mvtec_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextLocalizationBase","plugin_name":"icdar_text_localization","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextSegmentationBase","plugin_name":"icdar_text_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarWordRecognitionBase","plugin_name":"icdar_word_recognition","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.base.SuperviselyPointCloudBase","plugin_name":"sly_pointcloud","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocActionBase","plugin_name":"voc_action","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocBase","plugin_name":"voc","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocClassificationBase","plugin_name":"voc_classification","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocDetectionBase","plugin_name":"voc_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocInstanceSegmentationBase","plugin_name":"voc_instance_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocLayoutBase","plugin_name":"voc_layout","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.base.VocSegmentationBase","plugin_name":"voc_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.base.KittiDetectionBase","plugin_name":"kitti_detection","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.base.KittiSegmentationBase","plugin_name":"kitti_segmentation","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_json.MpiiJsonBase","plugin_name":"mpii_json","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_mat.MpiiBase","plugin_name":"mpii","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoCaptionsBase","plugin_name":"coco_captions","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoImageInfoBase","plugin_name":"coco_image_info","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoInstancesBase","plugin_name":"coco_instances","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoLabelsBase","plugin_name":"coco_labels","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoPanopticBase","plugin_name":"coco_panoptic","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoPersonKeypointsBase","plugin_name":"coco_person_keypoints","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.base.CocoStuffBase","plugin_name":"coco_stuff","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaAlBase","plugin_name":"synthia_al","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaRandBase","plugin_name":"synthia_rand","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.base.SynthiaSfBase","plugin_name":"synthia_sf","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.base.ArrowBase","plugin_name":"arrow","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro.base.DatumaroBase","plugin_name":"datumaro","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.align_celeba.AlignCelebaBase","plugin_name":"align_celeba","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.celeba.CelebaBase","plugin_name":"celeba","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.base.DatumaroBinaryBase","plugin_name":"datumaro_binary","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloLooseBase","plugin_name":"yolo_loose","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloStrictBase","plugin_name":"yolo_strict","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.base.YoloUltralyticsBase","plugin_name":"yolo_ultralytics","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.base.MapillaryVistasInstancesBase","plugin_name":"mapillary_vistas_instances","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.base.MapillaryVistasPanopticBase","plugin_name":"mapillary_vistas_panoptic","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.base.SegmentAnythingBase","plugin_name":"segment_anything","plugin_type":"DatasetBase","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirImporter","plugin_name":"image_dir","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvImporter","plugin_name":"mnist_csv","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarImporter","plugin_name":"cifar","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeImporter","plugin_name":"label_me","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidImporter","plugin_name":"camvid","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqImporter","plugin_name":"mot_seq","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Importer","plugin_name":"vgg_face2","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationImporter","plugin_name":"common_semantic_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationWithSubsetDirsImporter","plugin_name":"common_semantic_segmentation_with_subset_dirs","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_csv.VottCsvImporter","plugin_name":"vott_csv","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2020.Ade20k2020Importer","plugin_name":"ade20k2020","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vott_json.VottJsonImporter","plugin_name":"vott_json","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesImporter","plugin_name":"cityscapes","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceImporter","plugin_name":"wider_face","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats_numpy.BratsNumpyImporter","plugin_name":"brats_numpy","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Importer","plugin_name":"market1501","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetImporter","plugin_name":"imagenet","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetWithSubsetDirsImporter","plugin_name":"imagenet_with_subset_dirs","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.common_super_resolution.CommonSuperResolutionImporter","plugin_name":"common_super_resolution","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.nyu_depth_v2.NyuDepthV2Importer","plugin_name":"nyu_depth_v2","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kinetics.KineticsImporter","plugin_name":"kinetics","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsImporter","plugin_name":"mots","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mars.MarsImporter","plugin_name":"mars","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtImporter","plugin_name":"imagenet_txt","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoFramesImporter","plugin_name":"video_frames","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.video.VideoKeyframesImporter","plugin_name":"video_keyframes","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwImporter","plugin_name":"lfw","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipImporter","plugin_name":"image_zip","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesImporter","plugin_name":"open_images","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.brats.BratsImporter","plugin_name":"brats","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistImporter","plugin_name":"mnist","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ade20k2017.Ade20k2017Importer","plugin_name":"ade20k2017","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaImporter","plugin_name":"ava","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.base.TfDetectionApiImporter","plugin_name":"tf_detection_api","plugin_type":"Importer","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.cvat.base.CvatImporter","plugin_name":"cvat","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.base.KittiRawImporter","plugin_name":"kitti_raw","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecClassificationImporter","plugin_name":"mvtec_classification","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecDetectionImporter","plugin_name":"mvtec_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecImporter","plugin_name":"mvtec","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.importer.MvtecSegmentationImporter","plugin_name":"mvtec_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextLocalizationImporter","plugin_name":"icdar_text_localization","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarTextSegmentationImporter","plugin_name":"icdar_text_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.base.IcdarWordRecognitionImporter","plugin_name":"icdar_word_recognition","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.base.SuperviselyPointCloudImporter","plugin_name":"sly_pointcloud","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocActionImporter","plugin_name":"voc_action","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocClassificationImporter","plugin_name":"voc_classification","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocDetectionImporter","plugin_name":"voc_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocImporter","plugin_name":"voc","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocInstanceSegmentationImporter","plugin_name":"voc_instance_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocLayoutImporter","plugin_name":"voc_layout","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.importer.VocSegmentationImporter","plugin_name":"voc_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiDetectionImporter","plugin_name":"kitti_detection","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiImporter","plugin_name":"kitti","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.importer.KittiSegmentationImporter","plugin_name":"kitti_segmentation","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_json.MpiiJsonImporter","plugin_name":"mpii_json","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mpii.mpii_mat.MpiiImporter","plugin_name":"mpii","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoCaptionsImporter","plugin_name":"coco_captions","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoImageInfoImporter","plugin_name":"coco_image_info","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoImporter","plugin_name":"coco","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoInstancesImporter","plugin_name":"coco_instances","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoLabelsImporter","plugin_name":"coco_labels","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoPanopticImporter","plugin_name":"coco_panoptic","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoPersonKeypointsImporter","plugin_name":"coco_person_keypoints","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoRoboflowImporter","plugin_name":"coco_roboflow","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.importer.CocoStuffImporter","plugin_name":"coco_stuff","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaAlImporter","plugin_name":"synthia_al","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaRandImporter","plugin_name":"synthia_rand","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.synthia.importer.SynthiaSfImporter","plugin_name":"synthia_sf","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.importer.ArrowImporter","plugin_name":"arrow","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.align_celeba.AlignCelebaImporter","plugin_name":"align_celeba","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.celeba.celeba.CelebaImporter","plugin_name":"celeba","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.importer.DatumaroBinaryImporter","plugin_name":"datumaro_binary","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro.importer.DatumaroImporter","plugin_name":"datumaro","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.importer.YoloImporter","plugin_name":"yolo","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasImporter","plugin_name":"mapillary_vistas","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasInstancesImporter","plugin_name":"mapillary_vistas_instances","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasPanopticImporter","plugin_name":"mapillary_vistas_panoptic","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.importer.SegmentAnythingImporter","plugin_name":"segment_anything","plugin_type":"Importer","extra_deps":[]},{"import_path":"datumaro.plugins.explorer.ExplorerLauncher","plugin_name":"explorer","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.openvino_plugin.launcher.OpenvinoLauncher","plugin_name":"openvino","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.openvino_plugin.shift_launcher.ShiftLauncher","plugin_name":"shift","plugin_type":"Launcher","extra_deps":[]},{"import_path":"datumaro.plugins.accuracy_checker_plugin.ac_launcher.AcLauncher","plugin_name":"ac","plugin_type":"Launcher","extra_deps":["openvino.tools","tensorflow"]},{"import_path":"datumaro.plugins.data_formats.image_dir.ImageDirExporter","plugin_name":"image_dir","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist_csv.MnistCsvExporter","plugin_name":"mnist_csv","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cifar.CifarExporter","plugin_name":"cifar","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.labelme.LabelMeExporter","plugin_name":"label_me","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.camvid.CamvidExporter","plugin_name":"camvid","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mot.MotSeqGtExporter","plugin_name":"mot_seq_gt","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.vgg_face2.VggFace2Exporter","plugin_name":"vgg_face2","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.cityscapes.CityscapesExporter","plugin_name":"cityscapes","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.widerface.WiderFaceExporter","plugin_name":"wider_face","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.market1501.Market1501Exporter","plugin_name":"market1501","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetExporter","plugin_name":"imagenet","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet.ImagenetWithSubsetDirsExporter","plugin_name":"imagenet_with_subset_dirs","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mots.MotsPngExporter","plugin_name":"mots_png","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtExporter","plugin_name":"imagenet_txt","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.lfw.LfwExporter","plugin_name":"lfw","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.image_zip.ImageZipExporter","plugin_name":"image_zip","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.open_images.OpenImagesExporter","plugin_name":"open_images","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mnist.MnistExporter","plugin_name":"mnist","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.ava.ava.AvaExporter","plugin_name":"ava","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.tf_detection_api.exporter.TfDetectionApiExporter","plugin_name":"tf_detection_api","plugin_type":"Exporter","extra_deps":["tensorflow"]},{"import_path":"datumaro.plugins.data_formats.cvat.exporter.CvatExporter","plugin_name":"cvat","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti_raw.exporter.KittiRawExporter","plugin_name":"kitti_raw","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecClassificationExporter","plugin_name":"mvtec_classification","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecDetectionExporter","plugin_name":"mvtec_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecExporter","plugin_name":"mvtec","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.mvtec.exporter.MvtecSegmentationExporter","plugin_name":"mvtec_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarTextLocalizationExporter","plugin_name":"icdar_text_localization","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarTextSegmentationExporter","plugin_name":"icdar_text_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.icdar.exporter.IcdarWordRecognitionExporter","plugin_name":"icdar_word_recognition","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.sly_pointcloud.exporter.SuperviselyPointCloudExporter","plugin_name":"sly_pointcloud","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocActionExporter","plugin_name":"voc_action","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocClassificationExporter","plugin_name":"voc_classification","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocDetectionExporter","plugin_name":"voc_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocExporter","plugin_name":"voc","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocInstanceSegmentationExporter","plugin_name":"voc_instance_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocLayoutExporter","plugin_name":"voc_layout","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.voc.exporter.VocSegmentationExporter","plugin_name":"voc_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiDetectionExporter","plugin_name":"kitti_detection","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiExporter","plugin_name":"kitti","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.kitti.exporter.KittiSegmentationExporter","plugin_name":"kitti_segmentation","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoCaptionsExporter","plugin_name":"coco_captions","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoExporter","plugin_name":"coco","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoImageInfoExporter","plugin_name":"coco_image_info","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoInstancesExporter","plugin_name":"coco_instances","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoLabelsExporter","plugin_name":"coco_labels","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoPanopticExporter","plugin_name":"coco_panoptic","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoPersonKeypointsExporter","plugin_name":"coco_person_keypoints","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.coco.exporter.CocoStuffExporter","plugin_name":"coco_stuff","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.arrow.exporter.ArrowExporter","plugin_name":"arrow","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro_binary.exporter.DatumaroBinaryExporter","plugin_name":"datumaro_binary","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.datumaro.exporter.DatumaroExporter","plugin_name":"datumaro","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.exporter.YoloExporter","plugin_name":"yolo","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.yolo.exporter.YoloUltralyticsExporter","plugin_name":"yolo_ultralytics","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.data_formats.segment_anything.exporter.SegmentAnythingExporter","plugin_name":"segment_anything","plugin_type":"Exporter","extra_deps":[]},{"import_path":"datumaro.plugins.ndr.NDR","plugin_name":"ndr","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.splitter.Split","plugin_name":"split","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.AnnsToLabels","plugin_name":"anns_to_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BboxValuesDecrement","plugin_name":"bbox_values_decrement","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.BoxesToMasks","plugin_name":"boxes_to_masks","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Correct","plugin_name":"correct","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.CropCoveredSegments","plugin_name":"crop_covered_segments","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.IdFromImageName","plugin_name":"id_from_image_name","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MapSubsets","plugin_name":"map_subsets","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MasksToPolygons","plugin_name":"masks_to_polygons","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.MergeInstanceSegments","plugin_name":"merge_instance_segments","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.PolygonsToMasks","plugin_name":"polygons_to_masks","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectInfos","plugin_name":"project_infos","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ProjectLabels","plugin_name":"project_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RandomSplit","plugin_name":"random_split","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Reindex","plugin_name":"reindex","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ReindexAnnotations","plugin_name":"reindex_annotations","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemapLabels","plugin_name":"remap_labels","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAnnotations","plugin_name":"remove_annotations","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveAttributes","plugin_name":"remove_attributes","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.RemoveItems","plugin_name":"remove_items","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Rename","plugin_name":"rename","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ResizeTransform","plugin_name":"resize","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.ShapesToBoxes","plugin_name":"shapes_to_boxes","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.transforms.Sort","plugin_name":"sort","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.tile.Tile","plugin_name":"tile","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.tiling.merge_tile.MergeTile","plugin_name":"merge_tile","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.LabelRandomSampler","plugin_name":"label_random_sampler","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.sampler.random_sampler.RandomSampler","plugin_name":"random_sampler","plugin_type":"Transform","extra_deps":[]},{"import_path":"datumaro.plugins.validators.ClassificationValidator","plugin_name":"classification","plugin_type":"Validator","extra_deps":[]},{"import_path":"datumaro.plugins.validators.DetectionValidator","plugin_name":"detection","plugin_type":"Validator","extra_deps":[]},{"import_path":"datumaro.plugins.validators.SegmentationValidator","plugin_name":"segmentation","plugin_type":"Validator","extra_deps":[]}] +[ + { + "import_path": "datumaro.plugins.transforms.AnnsToLabels", + "plugin_name": "anns_to_labels", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.BboxValuesDecrement", + "plugin_name": "bbox_values_decrement", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.BoxesToMasks", + "plugin_name": "boxes_to_masks", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.Correct", + "plugin_name": "correct", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.CropCoveredSegments", + "plugin_name": "crop_covered_segments", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.IdFromImageName", + "plugin_name": "id_from_image_name", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.MapSubsets", + "plugin_name": "map_subsets", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.MasksToPolygons", + "plugin_name": "masks_to_polygons", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.MergeInstanceSegments", + "plugin_name": "merge_instance_segments", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.PolygonsToMasks", + "plugin_name": "polygons_to_masks", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ProjectInfos", + "plugin_name": "project_infos", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ProjectLabels", + "plugin_name": "project_labels", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RandomSplit", + "plugin_name": "random_split", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.Reindex", + "plugin_name": "reindex", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ReindexAnnotations", + "plugin_name": "reindex_annotations", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RemapLabels", + "plugin_name": "remap_labels", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RemoveAnnotations", + "plugin_name": "remove_annotations", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RemoveAttributes", + "plugin_name": "remove_attributes", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RemoveItems", + "plugin_name": "remove_items", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.Rename", + "plugin_name": "rename", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ResizeTransform", + "plugin_name": "resize", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ShapesToBoxes", + "plugin_name": "shapes_to_boxes", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.Sort", + "plugin_name": "sort", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.splitter.Split", + "plugin_name": "split", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.ndr.NDR", + "plugin_name": "ndr", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cityscapes.CityscapesBase", + "plugin_name": "cityscapes", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.ade20k2020.Ade20k2020Base", + "plugin_name": "ade20k2020", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.camvid.CamvidBase", + "plugin_name": "camvid", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.image_dir.ImageDirBase", + "plugin_name": "image_dir", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.lfw.LfwBase", + "plugin_name": "lfw", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.vott_json.VottJsonBase", + "plugin_name": "vott_json", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.common_super_resolution.CommonSuperResolutionBase", + "plugin_name": "common_super_resolution", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kinetics.KineticsBase", + "plugin_name": "kinetics", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtBase", + "plugin_name": "imagenet_txt", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mots.MotsPngExtractor", + "plugin_name": "mots_png_extractor", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.ade20k2017.Ade20k2017Base", + "plugin_name": "ade20k2017", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.nyu_depth_v2.NyuDepthV2Base", + "plugin_name": "nyu_depth_v2", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mnist.MnistBase", + "plugin_name": "mnist", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.imagenet.ImagenetBase", + "plugin_name": "imagenet", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.widerface.WiderFaceBase", + "plugin_name": "wider_face", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.brats.BratsBase", + "plugin_name": "brats", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.vgg_face2.VggFace2Base", + "plugin_name": "vgg_face2", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.video.VideoFramesBase", + "plugin_name": "video_frames", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.video.VideoKeyframesBase", + "plugin_name": "video_keyframes", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mars.MarsBase", + "plugin_name": "mars", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.brats_numpy.BratsNumpyBase", + "plugin_name": "brats_numpy", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.image_zip.ImageZipBase", + "plugin_name": "image_zip", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cifar.CifarBase", + "plugin_name": "cifar", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationBase", + "plugin_name": "common_semantic_segmentation", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mot.MotSeqBase", + "plugin_name": "mot_seq", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.labelme.LabelMeBase", + "plugin_name": "label_me", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mnist_csv.MnistCsvBase", + "plugin_name": "mnist_csv", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.market1501.Market1501Base", + "plugin_name": "market1501", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.open_images.OpenImagesBase", + "plugin_name": "open_images", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.vott_csv.VottCsvBase", + "plugin_name": "vott_csv", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.tiling.tile.Tile", + "plugin_name": "tile", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.tiling.merge_tile.MergeTile", + "plugin_name": "merge_tile", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.sampler.random_sampler.LabelRandomSampler", + "plugin_name": "label_random_sampler", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.sampler.random_sampler.RandomSampler", + "plugin_name": "random_sampler", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.sampler.relevancy_sampler.RelevancySampler", + "plugin_name": "relevancy_sampler", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.celeba.celeba.CelebaBase", + "plugin_name": "celeba", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.celeba.align_celeba.AlignCelebaBase", + "plugin_name": "align_celeba", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.segment_anything.base.SegmentAnythingBase", + "plugin_name": "segment_anything", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.sly_pointcloud.base.SuperviselyPointCloudBase", + "plugin_name": "sly_pointcloud", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.base.IcdarTextLocalizationBase", + "plugin_name": "icdar_text_localization", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.base.IcdarTextSegmentationBase", + "plugin_name": "icdar_text_segmentation", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.base.IcdarWordRecognitionBase", + "plugin_name": "icdar_word_recognition", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.yolo.base.YoloLooseBase", + "plugin_name": "yolo_loose", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.yolo.base.YoloStrictBase", + "plugin_name": "yolo_strict", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.yolo.base.YoloUltralyticsBase", + "plugin_name": "yolo_ultralytics", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mpii.mpii_mat.MpiiBase", + "plugin_name": "mpii", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mpii.mpii_json.MpiiJsonBase", + "plugin_name": "mpii_json", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.ava.ava.AvaBase", + "plugin_name": "ava", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.base.VocActionBase", + "plugin_name": "voc_action", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.base.VocBase", + "plugin_name": "voc", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.base.VocClassificationBase", + "plugin_name": "voc_classification", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.base.VocDetectionBase", + "plugin_name": "voc_detection", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.base.VocInstanceSegmentationBase", + "plugin_name": "voc_instance_segmentation", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.base.VocLayoutBase", + "plugin_name": "voc_layout", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.base.VocSegmentationBase", + "plugin_name": "voc_segmentation", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.base.CocoCaptionsBase", + "plugin_name": "coco_captions", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.base.CocoImageInfoBase", + "plugin_name": "coco_image_info", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.base.CocoInstancesBase", + "plugin_name": "coco_instances", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.base.CocoLabelsBase", + "plugin_name": "coco_labels", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.base.CocoPanopticBase", + "plugin_name": "coco_panoptic", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.base.CocoPersonKeypointsBase", + "plugin_name": "coco_person_keypoints", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.base.CocoStuffBase", + "plugin_name": "coco_stuff", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.arrow.base.ArrowBase", + "plugin_name": "arrow", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.datumaro.base.DatumaroBase", + "plugin_name": "datumaro", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.synthia.base.SynthiaAlBase", + "plugin_name": "synthia_al", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.synthia.base.SynthiaRandBase", + "plugin_name": "synthia_rand", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.synthia.base.SynthiaSfBase", + "plugin_name": "synthia_sf", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.base.MvtecClassificationBase", + "plugin_name": "mvtec_classification", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.base.MvtecDetectionBase", + "plugin_name": "mvtec_detection", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.base.MvtecSegmentationBase", + "plugin_name": "mvtec_segmentation", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cvat.base.CvatBase", + "plugin_name": "cvat", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti_raw.base.KittiRawBase", + "plugin_name": "kitti_raw", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.tf_detection_api.base.TfDetectionApiBase", + "plugin_name": "tf_detection_api", + "plugin_type": "DatasetBase", + "extra_deps": [ + "tensorflow" + ] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti.base.KittiDetectionBase", + "plugin_name": "kitti_detection", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti.base.KittiSegmentationBase", + "plugin_name": "kitti_segmentation", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.datumaro_binary.base.DatumaroBinaryBase", + "plugin_name": "datumaro_binary", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mapillary_vistas.base.MapillaryVistasInstancesBase", + "plugin_name": "mapillary_vistas_instances", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mapillary_vistas.base.MapillaryVistasPanopticBase", + "plugin_name": "mapillary_vistas_panoptic", + "plugin_type": "DatasetBase", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cityscapes.CityscapesImporter", + "plugin_name": "cityscapes", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.ade20k2020.Ade20k2020Importer", + "plugin_name": "ade20k2020", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.camvid.CamvidImporter", + "plugin_name": "camvid", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.image_dir.ImageDirImporter", + "plugin_name": "image_dir", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.lfw.LfwImporter", + "plugin_name": "lfw", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.vott_json.VottJsonImporter", + "plugin_name": "vott_json", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.common_super_resolution.CommonSuperResolutionImporter", + "plugin_name": "common_super_resolution", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kinetics.KineticsImporter", + "plugin_name": "kinetics", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtImporter", + "plugin_name": "imagenet_txt", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mots.MotsImporter", + "plugin_name": "mots", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.ade20k2017.Ade20k2017Importer", + "plugin_name": "ade20k2017", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.nyu_depth_v2.NyuDepthV2Importer", + "plugin_name": "nyu_depth_v2", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mnist.MnistImporter", + "plugin_name": "mnist", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.imagenet.ImagenetImporter", + "plugin_name": "imagenet", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.imagenet.ImagenetWithSubsetDirsImporter", + "plugin_name": "imagenet_with_subset_dirs", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.widerface.WiderFaceImporter", + "plugin_name": "wider_face", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.brats.BratsImporter", + "plugin_name": "brats", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.vgg_face2.VggFace2Importer", + "plugin_name": "vgg_face2", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.video.VideoFramesImporter", + "plugin_name": "video_frames", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.video.VideoKeyframesImporter", + "plugin_name": "video_keyframes", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mars.MarsImporter", + "plugin_name": "mars", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.brats_numpy.BratsNumpyImporter", + "plugin_name": "brats_numpy", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.image_zip.ImageZipImporter", + "plugin_name": "image_zip", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cifar.CifarImporter", + "plugin_name": "cifar", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationImporter", + "plugin_name": "common_semantic_segmentation", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.common_semantic_segmentation.CommonSemanticSegmentationWithSubsetDirsImporter", + "plugin_name": "common_semantic_segmentation_with_subset_dirs", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mot.MotSeqImporter", + "plugin_name": "mot_seq", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.labelme.LabelMeImporter", + "plugin_name": "label_me", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mnist_csv.MnistCsvImporter", + "plugin_name": "mnist_csv", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.market1501.Market1501Importer", + "plugin_name": "market1501", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.open_images.OpenImagesImporter", + "plugin_name": "open_images", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.vott_csv.VottCsvImporter", + "plugin_name": "vott_csv", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.celeba.celeba.CelebaImporter", + "plugin_name": "celeba", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.celeba.align_celeba.AlignCelebaImporter", + "plugin_name": "align_celeba", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.segment_anything.importer.SegmentAnythingImporter", + "plugin_name": "segment_anything", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.sly_pointcloud.base.SuperviselyPointCloudImporter", + "plugin_name": "sly_pointcloud", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.base.IcdarTextLocalizationImporter", + "plugin_name": "icdar_text_localization", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.base.IcdarTextSegmentationImporter", + "plugin_name": "icdar_text_segmentation", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.base.IcdarWordRecognitionImporter", + "plugin_name": "icdar_word_recognition", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.yolo.importer.YoloImporter", + "plugin_name": "yolo", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mpii.mpii_mat.MpiiImporter", + "plugin_name": "mpii", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mpii.mpii_json.MpiiJsonImporter", + "plugin_name": "mpii_json", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.ava.ava.AvaImporter", + "plugin_name": "ava", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.importer.VocActionImporter", + "plugin_name": "voc_action", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.importer.VocClassificationImporter", + "plugin_name": "voc_classification", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.importer.VocDetectionImporter", + "plugin_name": "voc_detection", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.importer.VocImporter", + "plugin_name": "voc", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.importer.VocInstanceSegmentationImporter", + "plugin_name": "voc_instance_segmentation", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.importer.VocLayoutImporter", + "plugin_name": "voc_layout", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.importer.VocSegmentationImporter", + "plugin_name": "voc_segmentation", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoCaptionsImporter", + "plugin_name": "coco_captions", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoImageInfoImporter", + "plugin_name": "coco_image_info", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoImporter", + "plugin_name": "coco", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoInstancesImporter", + "plugin_name": "coco_instances", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoLabelsImporter", + "plugin_name": "coco_labels", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoPanopticImporter", + "plugin_name": "coco_panoptic", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoPersonKeypointsImporter", + "plugin_name": "coco_person_keypoints", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoRoboflowImporter", + "plugin_name": "coco_roboflow", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.importer.CocoStuffImporter", + "plugin_name": "coco_stuff", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.arrow.importer.ArrowImporter", + "plugin_name": "arrow", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.synthia.importer.SynthiaAlImporter", + "plugin_name": "synthia_al", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.synthia.importer.SynthiaRandImporter", + "plugin_name": "synthia_rand", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.synthia.importer.SynthiaSfImporter", + "plugin_name": "synthia_sf", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.importer.MvtecClassificationImporter", + "plugin_name": "mvtec_classification", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.importer.MvtecDetectionImporter", + "plugin_name": "mvtec_detection", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.importer.MvtecImporter", + "plugin_name": "mvtec", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.importer.MvtecSegmentationImporter", + "plugin_name": "mvtec_segmentation", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cvat.base.CvatImporter", + "plugin_name": "cvat", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti_raw.base.KittiRawImporter", + "plugin_name": "kitti_raw", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.tf_detection_api.base.TfDetectionApiImporter", + "plugin_name": "tf_detection_api", + "plugin_type": "Importer", + "extra_deps": [ + "tensorflow" + ] + }, + { + "import_path": "datumaro.plugins.data_formats.datumaro.importer.DatumaroImporter", + "plugin_name": "datumaro", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti.importer.KittiDetectionImporter", + "plugin_name": "kitti_detection", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti.importer.KittiImporter", + "plugin_name": "kitti", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti.importer.KittiSegmentationImporter", + "plugin_name": "kitti_segmentation", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.datumaro_binary.importer.DatumaroBinaryImporter", + "plugin_name": "datumaro_binary", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasImporter", + "plugin_name": "mapillary_vistas", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasInstancesImporter", + "plugin_name": "mapillary_vistas_instances", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mapillary_vistas.importer.MapillaryVistasPanopticImporter", + "plugin_name": "mapillary_vistas_panoptic", + "plugin_type": "Importer", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.explorer.ExplorerLauncher", + "plugin_name": "explorer", + "plugin_type": "Launcher", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.openvino_plugin.launcher.OpenvinoLauncher", + "plugin_name": "openvino", + "plugin_type": "Launcher", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.accuracy_checker_plugin.ac_launcher.AcLauncher", + "plugin_name": "ac", + "plugin_type": "Launcher", + "extra_deps": [ + "openvino.tools", + "tensorflow" + ] + }, + { + "import_path": "datumaro.plugins.openvino_plugin.shift_launcher.ShiftLauncher", + "plugin_name": "shift", + "plugin_type": "Launcher", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cityscapes.CityscapesExporter", + "plugin_name": "cityscapes", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.camvid.CamvidExporter", + "plugin_name": "camvid", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.image_dir.ImageDirExporter", + "plugin_name": "image_dir", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.lfw.LfwExporter", + "plugin_name": "lfw", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.imagenet_txt.ImagenetTxtExporter", + "plugin_name": "imagenet_txt", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mots.MotsPngExporter", + "plugin_name": "mots_png", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mnist.MnistExporter", + "plugin_name": "mnist", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.imagenet.ImagenetExporter", + "plugin_name": "imagenet", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.imagenet.ImagenetWithSubsetDirsExporter", + "plugin_name": "imagenet_with_subset_dirs", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.widerface.WiderFaceExporter", + "plugin_name": "wider_face", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.vgg_face2.VggFace2Exporter", + "plugin_name": "vgg_face2", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.image_zip.ImageZipExporter", + "plugin_name": "image_zip", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cifar.CifarExporter", + "plugin_name": "cifar", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mot.MotSeqGtExporter", + "plugin_name": "mot_seq_gt", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.labelme.LabelMeExporter", + "plugin_name": "label_me", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mnist_csv.MnistCsvExporter", + "plugin_name": "mnist_csv", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.market1501.Market1501Exporter", + "plugin_name": "market1501", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.open_images.OpenImagesExporter", + "plugin_name": "open_images", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.segment_anything.exporter.SegmentAnythingExporter", + "plugin_name": "segment_anything", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.sly_pointcloud.exporter.SuperviselyPointCloudExporter", + "plugin_name": "sly_pointcloud", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.exporter.IcdarTextLocalizationExporter", + "plugin_name": "icdar_text_localization", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.exporter.IcdarTextSegmentationExporter", + "plugin_name": "icdar_text_segmentation", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.icdar.exporter.IcdarWordRecognitionExporter", + "plugin_name": "icdar_word_recognition", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.yolo.exporter.YoloExporter", + "plugin_name": "yolo", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.yolo.exporter.YoloUltralyticsExporter", + "plugin_name": "yolo_ultralytics", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.ava.ava.AvaExporter", + "plugin_name": "ava", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.exporter.VocActionExporter", + "plugin_name": "voc_action", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.exporter.VocClassificationExporter", + "plugin_name": "voc_classification", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.exporter.VocDetectionExporter", + "plugin_name": "voc_detection", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.exporter.VocExporter", + "plugin_name": "voc", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.exporter.VocInstanceSegmentationExporter", + "plugin_name": "voc_instance_segmentation", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.exporter.VocLayoutExporter", + "plugin_name": "voc_layout", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.voc.exporter.VocSegmentationExporter", + "plugin_name": "voc_segmentation", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.exporter.CocoCaptionsExporter", + "plugin_name": "coco_captions", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.exporter.CocoExporter", + "plugin_name": "coco", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.exporter.CocoImageInfoExporter", + "plugin_name": "coco_image_info", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.exporter.CocoInstancesExporter", + "plugin_name": "coco_instances", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.exporter.CocoLabelsExporter", + "plugin_name": "coco_labels", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.exporter.CocoPanopticExporter", + "plugin_name": "coco_panoptic", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.exporter.CocoPersonKeypointsExporter", + "plugin_name": "coco_person_keypoints", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.coco.exporter.CocoStuffExporter", + "plugin_name": "coco_stuff", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.arrow.exporter.ArrowExporter", + "plugin_name": "arrow", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.exporter.MvtecClassificationExporter", + "plugin_name": "mvtec_classification", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.exporter.MvtecDetectionExporter", + "plugin_name": "mvtec_detection", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.exporter.MvtecExporter", + "plugin_name": "mvtec", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.mvtec.exporter.MvtecSegmentationExporter", + "plugin_name": "mvtec_segmentation", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.cvat.exporter.CvatExporter", + "plugin_name": "cvat", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti_raw.exporter.KittiRawExporter", + "plugin_name": "kitti_raw", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.tf_detection_api.exporter.TfDetectionApiExporter", + "plugin_name": "tf_detection_api", + "plugin_type": "Exporter", + "extra_deps": [ + "tensorflow" + ] + }, + { + "import_path": "datumaro.plugins.data_formats.datumaro.exporter.DatumaroExporter", + "plugin_name": "datumaro", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti.exporter.KittiDetectionExporter", + "plugin_name": "kitti_detection", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti.exporter.KittiExporter", + "plugin_name": "kitti", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.kitti.exporter.KittiSegmentationExporter", + "plugin_name": "kitti_segmentation", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.data_formats.datumaro_binary.exporter.DatumaroBinaryExporter", + "plugin_name": "datumaro_binary", + "plugin_type": "Exporter", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.AnnsToLabels", + "plugin_name": "anns_to_labels", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.BboxValuesDecrement", + "plugin_name": "bbox_values_decrement", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.BoxesToMasks", + "plugin_name": "boxes_to_masks", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.Correct", + "plugin_name": "correct", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.CropCoveredSegments", + "plugin_name": "crop_covered_segments", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.IdFromImageName", + "plugin_name": "id_from_image_name", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.MapSubsets", + "plugin_name": "map_subsets", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.MasksToPolygons", + "plugin_name": "masks_to_polygons", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.MergeInstanceSegments", + "plugin_name": "merge_instance_segments", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.PolygonsToMasks", + "plugin_name": "polygons_to_masks", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ProjectInfos", + "plugin_name": "project_infos", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ProjectLabels", + "plugin_name": "project_labels", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RandomSplit", + "plugin_name": "random_split", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.Reindex", + "plugin_name": "reindex", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ReindexAnnotations", + "plugin_name": "reindex_annotations", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RemapLabels", + "plugin_name": "remap_labels", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RemoveAnnotations", + "plugin_name": "remove_annotations", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RemoveAttributes", + "plugin_name": "remove_attributes", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.RemoveItems", + "plugin_name": "remove_items", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.Rename", + "plugin_name": "rename", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ResizeTransform", + "plugin_name": "resize", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.ShapesToBoxes", + "plugin_name": "shapes_to_boxes", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.transforms.Sort", + "plugin_name": "sort", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.splitter.Split", + "plugin_name": "split", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.ndr.NDR", + "plugin_name": "ndr", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.tiling.tile.Tile", + "plugin_name": "tile", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.tiling.merge_tile.MergeTile", + "plugin_name": "merge_tile", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.sampler.random_sampler.LabelRandomSampler", + "plugin_name": "label_random_sampler", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.sampler.random_sampler.RandomSampler", + "plugin_name": "random_sampler", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.sampler.relevancy_sampler.RelevancySampler", + "plugin_name": "relevancy_sampler", + "plugin_type": "Transform", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.validators.ClassificationValidator", + "plugin_name": "classification", + "plugin_type": "Validator", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.validators.DetectionValidator", + "plugin_name": "detection", + "plugin_type": "Validator", + "extra_deps": [] + }, + { + "import_path": "datumaro.plugins.validators.SegmentationValidator", + "plugin_name": "segmentation", + "plugin_type": "Validator", + "extra_deps": [] + } +] diff --git a/src/datumaro/plugins/specs.py b/src/datumaro/plugins/specs.py index 655d4a759c..2dcbd82617 100644 --- a/src/datumaro/plugins/specs.py +++ b/src/datumaro/plugins/specs.py @@ -56,4 +56,4 @@ def _enroll_to_plugin_specs(plugins, plugin_type): _enroll_to_plugin_specs(env.transforms, "Transform") _enroll_to_plugin_specs(env.validators, "Validator") - dump_json_file(_SPECS_JSON_PATH, plugin_specs, append_newline=True) + dump_json_file(_SPECS_JSON_PATH, plugin_specs, indent=True, append_newline=True) diff --git a/tests/unit/test_environment.py b/tests/unit/test_environment.py index f608c8f2cb..0d6dbcf565 100644 --- a/tests/unit/test_environment.py +++ b/tests/unit/test_environment.py @@ -5,7 +5,7 @@ import pytest import datumaro.components.lazy_plugin -from datumaro.components.environment import Environment +from datumaro.components.environment import Environment, PluginRegistry real_import_module = datumaro.components.lazy_plugin.import_module @@ -27,14 +27,24 @@ def fxt_no_lazy_import(self): yield env Environment.release_builtin_plugins() + def _test_equivalance(self, lazy_registry: PluginRegistry, no_lazy_registry: PluginRegistry): + lazy_plugin_names = set(sorted(lazy_registry)) + no_lazy_plugin_names = set(sorted(no_lazy_registry)) + + misregistered_names = lazy_plugin_names.difference(no_lazy_plugin_names) + unregistered_names = no_lazy_plugin_names.difference(lazy_plugin_names) + assert ( + lazy_plugin_names == no_lazy_plugin_names + ), f"misregistered_names={misregistered_names}, unregistered_names={unregistered_names}" + def test_equivalance(self, fxt_lazy_import: Environment, fxt_no_lazy_import: Environment): - assert sorted(fxt_lazy_import.extractors) == sorted(fxt_no_lazy_import.extractors) - assert sorted(fxt_lazy_import.importers) == sorted(fxt_no_lazy_import.importers) - assert sorted(fxt_lazy_import.launchers) == sorted(fxt_no_lazy_import.launchers) - assert sorted(fxt_lazy_import.exporters) == sorted(fxt_no_lazy_import.exporters) - assert sorted(fxt_lazy_import.generators) == sorted(fxt_no_lazy_import.generators) - assert sorted(fxt_lazy_import.transforms) == sorted(fxt_no_lazy_import.transforms) - assert sorted(fxt_lazy_import.validators) == sorted(fxt_no_lazy_import.validators) + self._test_equivalance(fxt_lazy_import.extractors, fxt_no_lazy_import.extractors) + self._test_equivalance(fxt_lazy_import.importers, fxt_no_lazy_import.importers) + self._test_equivalance(fxt_lazy_import.launchers, fxt_no_lazy_import.launchers) + self._test_equivalance(fxt_lazy_import.exporters, fxt_no_lazy_import.exporters) + self._test_equivalance(fxt_lazy_import.generators, fxt_no_lazy_import.generators) + self._test_equivalance(fxt_lazy_import.transforms, fxt_no_lazy_import.transforms) + self._test_equivalance(fxt_lazy_import.validators, fxt_no_lazy_import.validators) @pytest.fixture def fxt_tf_failure_env(self, monkeypatch): From d4d0bb7a13f4e97c5d8c3f8919769c7024924313 Mon Sep 17 00:00:00 2001 From: "Kim, Vinnam" Date: Tue, 13 Jun 2023 12:47:22 +0900 Subject: [PATCH 3/6] Add missing src/datumaro/plugins/sampler/algorithm/__init__.py Signed-off-by: Kim, Vinnam --- src/datumaro/plugins/sampler/algorithm/__init__.py | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 src/datumaro/plugins/sampler/algorithm/__init__.py diff --git a/src/datumaro/plugins/sampler/algorithm/__init__.py b/src/datumaro/plugins/sampler/algorithm/__init__.py new file mode 100644 index 0000000000..ff847f0120 --- /dev/null +++ b/src/datumaro/plugins/sampler/algorithm/__init__.py @@ -0,0 +1,3 @@ +# Copyright (C) 2023 Intel Corporation +# +# SPDX-License-Identifier: MIT From 5005cfbbf910f90ffcafbf0d3ff7280722ed2ba9 Mon Sep 17 00:00:00 2001 From: "Kim, Vinnam" Date: Tue, 13 Jun 2023 20:06:27 +0900 Subject: [PATCH 4/6] Change ModelInterpreter logic Signed-off-by: Kim, Vinnam --- src/datumaro/components/abstracts/__init__.py | 1 + .../components/abstracts/model_interpreter.py | 25 ++++ .../plugins/openvino_plugin/launcher.py | 60 ++++----- .../samples/clip_text_ViT-B_32_interp.py | 21 ++-- .../samples/clip_visual_ViT-B_32_interp.py | 45 +++---- .../samples/googlenet-v4-tf_interp.py | 54 ++++---- .../samples/mobilenet_v2_pytorch_interp.py | 53 ++++---- ...ustom_object_detection_gen3_atss_interp.py | 21 ++++ .../samples/ssd_face_detection_interp.py | 105 ++++++++-------- .../ssd_mobilenet_coco_detection_interp.py | 115 +++++++++--------- .../samples/ssd_person_detection_interp.py | 106 ++++++++-------- ...sd_person_vehicle_bike_detection_interp.py | 107 ++++++++-------- .../samples/ssd_vehicle_detection_interp.py | 103 ++++++++-------- tests/assets/rise/model_interp.py | 35 +++--- 14 files changed, 458 insertions(+), 393 deletions(-) create mode 100644 src/datumaro/components/abstracts/model_interpreter.py create mode 100644 src/datumaro/plugins/openvino_plugin/samples/otx_custom_object_detection_gen3_atss_interp.py diff --git a/src/datumaro/components/abstracts/__init__.py b/src/datumaro/components/abstracts/__init__.py index 35e631211f..eac84145a6 100644 --- a/src/datumaro/components/abstracts/__init__.py +++ b/src/datumaro/components/abstracts/__init__.py @@ -3,3 +3,4 @@ # SPDX-License-Identifier: MIT from .merger import * +from .model_interpreter import * diff --git a/src/datumaro/components/abstracts/model_interpreter.py b/src/datumaro/components/abstracts/model_interpreter.py new file mode 100644 index 0000000000..b31f041a8c --- /dev/null +++ b/src/datumaro/components/abstracts/model_interpreter.py @@ -0,0 +1,25 @@ +# Copyright (C) 2023 Intel Corporation +# +# SPDX-License-Identifier: MIT + +from abc import ABC, abstractmethod + +__all__ = ["IModelInterpreter"] + + +class IModelInterpreter(ABC): + @abstractmethod + def get_categories(self): + raise NotImplementedError("Function should be implemented.") + + @abstractmethod + def process_outputs(self, inputs, outputs): + raise NotImplementedError("Function should be implemented.") + + @abstractmethod + def normalize(self, inputs): + raise NotImplementedError("Function should be implemented.") + + @abstractmethod + def resize(self, inputs): + raise NotImplementedError("Function should be implemented.") diff --git a/src/datumaro/plugins/openvino_plugin/launcher.py b/src/datumaro/plugins/openvino_plugin/launcher.py index f0b1c82eaf..f12a6fe4c4 100644 --- a/src/datumaro/plugins/openvino_plugin/launcher.py +++ b/src/datumaro/plugins/openvino_plugin/launcher.py @@ -4,10 +4,12 @@ # pylint: disable=exec-used +import inspect import logging as log import os.path as osp import shutil import urllib +from importlib.util import module_from_spec, spec_from_file_location from typing import Dict, Optional import cv2 @@ -15,6 +17,7 @@ from openvino.runtime import Core from tqdm import tqdm +from datumaro.components.abstracts import IModelInterpreter from datumaro.components.cli_plugin import CliPlugin from datumaro.components.launcher import Launcher from datumaro.errors import DatumaroError @@ -62,42 +65,6 @@ def copy_model(model_dir, model): model["interpreter"] = osp.basename(model["interpreter"]) -class InterpreterScript: - def __init__(self, path): - with open(path, "r", encoding="utf-8") as f: - script = f.read() - - context = {} - exec(script, context, context) - - normalize = context.get("normalize") - if not callable(normalize): - raise Exception("Can't find 'normalize' function in the interpreter script") - self.__dict__["normalize"] = normalize - - process_outputs = context.get("process_outputs") - if not callable(process_outputs): - raise Exception("Can't find 'process_outputs' function in the interpreter script") - self.__dict__["process_outputs"] = process_outputs - - get_categories = context.get("get_categories") - assert get_categories is None or callable(get_categories) - if get_categories: - self.__dict__["get_categories"] = get_categories - - @staticmethod - def get_categories(): - return None - - @staticmethod - def process_outputs(inputs, outputs): - raise NotImplementedError("Function should be implemented in the interpreter script") - - @staticmethod - def normalize(inputs): - raise NotImplementedError("Function should be implemented in the interpreter script") - - class OpenvinoLauncher(Launcher): cli_plugin = _OpenvinoImporter ALLOWED_CHANNEL_FORMATS = {"NCHW", "NHWC"} @@ -156,7 +123,7 @@ def __init__( if not osp.isfile(interpreter): raise DatumaroError('Failed to open model interpreter script file "%s"' % (interpreter)) - self._interpreter = InterpreterScript(interpreter) + self._interpreter = self._load_interpreter(file_path=interpreter) self._device = device or "CPU" self._output_blobs = output_layers @@ -175,6 +142,22 @@ def __init__( self._channel_format = channel_format self._to_rgb = to_rgb + def _load_interpreter(self, file_path: str) -> IModelInterpreter: + fname, _ = osp.splitext(osp.basename(file_path)) + spec = spec_from_file_location(fname, file_path) + module = module_from_spec(spec) + spec.loader.exec_module(module) + for name, obj in inspect.getmembers(module): + if ( + inspect.isclass(obj) + and issubclass(obj, IModelInterpreter) + and obj is not IModelInterpreter + ): + log.info(f"Load {name} for model interpreter.") + return obj() + + raise DatumaroError(f"{file_path} has no class derived from IModelInterpreter.") + def _download_file(self, url: str, file_root: str): req = urllib.request.Request(url) with urllib.request.urlopen(req) as source, open(file_root, "wb") as output: # nosec B310 @@ -277,6 +260,9 @@ def process_inputs(self, inputs): assert inputs.shape[3] == 3, "Expected BGR input, got %s" % (inputs.shape,) + # Resize + inputs = self._interpreter.resize(inputs) + if self._channel_format == "NCHW": n, c, h, w = self._input_layout elif self._channel_format == "NHWC": diff --git a/src/datumaro/plugins/openvino_plugin/samples/clip_text_ViT-B_32_interp.py b/src/datumaro/plugins/openvino_plugin/samples/clip_text_ViT-B_32_interp.py index 6b01bb5156..4cc9d87047 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/clip_text_ViT-B_32_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/clip_text_ViT-B_32_interp.py @@ -2,18 +2,21 @@ # # SPDX-License-Identifier: MIT +from datumaro.components.abstracts import IModelInterpreter from datumaro.components.annotation import AnnotationType, HashKey, LabelCategories -def normalize(inputs): - return inputs +class ClipTextViTB32ModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + def process_outputs(self, inputs, outputs): + results = [[HashKey(outputs)]] + return results -def process_outputs(inputs, outputs): - results = [[HashKey(outputs)]] - return results + def get_categories(self): + label_categories = LabelCategories() + return {AnnotationType.label: label_categories} - -def get_categories(): - label_categories = LabelCategories() - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/clip_visual_ViT-B_32_interp.py b/src/datumaro/plugins/openvino_plugin/samples/clip_visual_ViT-B_32_interp.py index e673904be8..bad208c251 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/clip_visual_ViT-B_32_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/clip_visual_ViT-B_32_interp.py @@ -6,35 +6,38 @@ import numpy as np +from datumaro.components.abstracts import IModelInterpreter from datumaro.components.annotation import AnnotationType, HashKey, LabelCategories from datumaro.util.samples import get_samples_path -def normalize(inputs): - mean = 255 * np.array([0.485, 0.456, 0.406]) - std = 255 * np.array([0.229, 0.224, 0.225]) +class ClipVisualViTB32ModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + mean = 255 * np.array([0.485, 0.456, 0.406]) + std = 255 * np.array([0.229, 0.224, 0.225]) - normalized_inputs = np.empty_like(inputs, dtype=inputs.dtype) - for k, inp in enumerate(inputs): - normalized_inputs[k] = (inp - mean[:, None, None]) / std[:, None, None] - inputs = normalized_inputs + normalized_inputs = np.empty_like(inputs, dtype=inputs.dtype) + for k, inp in enumerate(inputs): + normalized_inputs[k] = (inp - mean[:, None, None]) / std[:, None, None] + inputs = normalized_inputs - return inputs + return inputs + def process_outputs(self, inputs, outputs): + results = [[HashKey(outputs)]] + return results -def process_outputs(inputs, outputs): - results = [[HashKey(outputs)]] - return results + def get_categories(self): + label_categories = LabelCategories() + openvino_plugin_samples_dir = get_samples_path() + imagenet_class_path = osp.join(openvino_plugin_samples_dir, "imagenet.class") + with open(imagenet_class_path, "r", encoding="utf-8") as file: + for line in file.readlines(): + label = line.strip() + label_categories.add(label) -def get_categories(): - label_categories = LabelCategories() + return {AnnotationType.label: label_categories} - openvino_plugin_samples_dir = get_samples_path() - imagenet_class_path = osp.join(openvino_plugin_samples_dir, "imagenet.class") - with open(imagenet_class_path, "r", encoding="utf-8") as file: - for line in file.readlines(): - label = line.strip() - label_categories.add(label) - - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/googlenet-v4-tf_interp.py b/src/datumaro/plugins/openvino_plugin/samples/googlenet-v4-tf_interp.py index 14448ef5b9..a39e5d6519 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/googlenet-v4-tf_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/googlenet-v4-tf_interp.py @@ -1,43 +1,45 @@ -# Copyright (C) 2021 Intel Corporation +# Copyright (C) 2023 Intel Corporation # # SPDX-License-Identifier: MIT -import numpy as np +from datumaro.components.abstracts import IModelInterpreter from datumaro.components.annotation import AnnotationType, Label, LabelCategories from datumaro.util.annotation_util import softmax -def normalize(inputs): - return inputs +class GooglenetV4TfModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + def process_outputs(self, inputs, outputs): + # inputs = model input; array or images; shape = (B, H, W, C) + # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. + # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] + # results = conversion result; [[ Annotation, ... ], ... ] -def process_outputs(inputs, outputs): - # inputs = model input; array or images; shape = (B, H, W, C) - # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. - # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] - # results = conversion result; [[ Annotation, ... ], ... ] + results = [] + for input_, output in zip(inputs, outputs): # pylint: disable=unused-variable + image_results = [] + output = softmax(output).tolist() + label = output.index(max(output)) + image_results.append(Label(label=label, attributes={"scores": output})) - results = [] - for input_, output in zip(inputs, outputs): # pylint: disable=unused-variable - image_results = [] - output = softmax(output).tolist() - label = output.index(max(output)) - image_results.append(Label(label=label, attributes={"scores": output})) + results.append(image_results) - results.append(image_results) + return results - return results + def get_categories(self): + # output categories - label map etc. + label_categories = LabelCategories() -def get_categories(): - # output categories - label map etc. + with open("samples/imagenet.class", "r", encoding="utf-8") as file: + for line in file.readlines(): + label = line.strip() + label_categories.add(label) - label_categories = LabelCategories() + return {AnnotationType.label: label_categories} - with open("samples/imagenet.class", "r", encoding="utf-8") as file: - for line in file.readlines(): - label = line.strip() - label_categories.add(label) - - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/mobilenet_v2_pytorch_interp.py b/src/datumaro/plugins/openvino_plugin/samples/mobilenet_v2_pytorch_interp.py index 8d348f222a..0c14f03224 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/mobilenet_v2_pytorch_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/mobilenet_v2_pytorch_interp.py @@ -1,41 +1,44 @@ -# Copyright (C) 2021 Intel Corporation +# Copyright (C) 2023 Intel Corporation # # SPDX-License-Identifier: MIT +from datumaro.components.abstracts import IModelInterpreter from datumaro.components.annotation import AnnotationType, Label, LabelCategories from datumaro.util.annotation_util import softmax -def normalize(inputs): - return inputs +class MobilenetV2PytorchModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + def process_outputs(self, inputs, outputs): + # inputs = model input; array or images; shape = (B, H, W, C) + # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. + # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] + # results = conversion result; [[ Annotation, ... ], ... ] -def process_outputs(inputs, outputs): - # inputs = model input; array or images; shape = (B, H, W, C) - # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. - # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] - # results = conversion result; [[ Annotation, ... ], ... ] + results = [] + for input_, output in zip(inputs, outputs): # pylint: disable=unused-variable + image_results = [] + output = softmax(output).tolist() + label = output.index(max(output)) + image_results.append(Label(label=label, attributes={"scores": output})) - results = [] - for input_, output in zip(inputs, outputs): # pylint: disable=unused-variable - image_results = [] - output = softmax(output).tolist() - label = output.index(max(output)) - image_results.append(Label(label=label, attributes={"scores": output})) + results.append(image_results) - results.append(image_results) + return results - return results + def get_categories(self): + # output categories - label map etc. + label_categories = LabelCategories() -def get_categories(): - # output categories - label map etc. + with open("samples/imagenet.class", "r", encoding="utf-8") as file: + for line in file.readlines(): + label = line.strip() + label_categories.add(label) - label_categories = LabelCategories() + return {AnnotationType.label: label_categories} - with open("samples/imagenet.class", "r", encoding="utf-8") as file: - for line in file.readlines(): - label = line.strip() - label_categories.add(label) - - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/otx_custom_object_detection_gen3_atss_interp.py b/src/datumaro/plugins/openvino_plugin/samples/otx_custom_object_detection_gen3_atss_interp.py new file mode 100644 index 0000000000..6aeb0a3251 --- /dev/null +++ b/src/datumaro/plugins/openvino_plugin/samples/otx_custom_object_detection_gen3_atss_interp.py @@ -0,0 +1,21 @@ +# Copyright (C) 2023 Intel Corporation +# +# SPDX-License-Identifier: MIT + +from datumaro.components.abstracts import IModelInterpreter + +__all__ = ["OTXAtssModelInterpreter"] + + +class OTXAtssModelInterpreter(IModelInterpreter): + def get_categories(self): + return None + + def process_outputs(self, inputs, outputs): + return outputs + + def normalize(self, inputs): + return inputs + + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/ssd_face_detection_interp.py b/src/datumaro/plugins/openvino_plugin/samples/ssd_face_detection_interp.py index 9d5d85efcb..be96e458bf 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/ssd_face_detection_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/ssd_face_detection_interp.py @@ -1,16 +1,13 @@ -# Copyright (C) 2021 Intel Corporation +# Copyright (C) 2023 Intel Corporation # # SPDX-License-Identifier: MIT +from datumaro.components.abstracts.model_interpreter import IModelInterpreter from datumaro.components.annotation import AnnotationType, Bbox, LabelCategories conf_thresh = 0.02 -def normalize(inputs): - return inputs - - def _match_confs(confs, detections): matches = [-1] * len(detections) @@ -37,56 +34,62 @@ def _match_confs(confs, detections): return matches -def process_outputs(inputs, outputs): - # inputs = model input; array or images; shape = (B, H, W, C) - # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. - # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] - # results = conversion result; [[ Annotation, ... ], ... ] - - results = [] - for input_, detections in zip(inputs, outputs["detection_out"]): - input_height, input_width = input_.shape[:2] - - confs = outputs["Softmax_189/Softmax_"] - detections = detections[0] - - conf_ids = _match_confs(confs, detections) - - image_results = [] - for i, det in enumerate(detections): - image_id = int(det[0]) # pylint: disable=unused-variable - label = int(det[1]) - conf = float(det[2]) - det_confs = confs[conf_ids[i]] - - if conf <= conf_thresh: - continue - - x = max(int(det[3] * input_width), 0) - y = max(int(det[4] * input_height), 0) - w = min(int(det[5] * input_width - x), input_width) - h = min(int(det[6] * input_height - y), input_height) - - image_results.append( - Bbox( - x, - y, - w, - h, - label=label, - attributes={"score": conf, "scores": list(map(float, det_confs))}, +class SsdFaceDetectionModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + + def process_outputs(self, inputs, outputs): + # inputs = model input; array or images; shape = (B, H, W, C) + # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. + # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] + # results = conversion result; [[ Annotation, ... ], ... ] + + results = [] + for input_, detections in zip(inputs, outputs["detection_out"]): + input_height, input_width = input_.shape[:2] + + confs = outputs["Softmax_189/Softmax_"] + detections = detections[0] + + conf_ids = _match_confs(confs, detections) + + image_results = [] + for i, det in enumerate(detections): + image_id = int(det[0]) # pylint: disable=unused-variable + label = int(det[1]) + conf = float(det[2]) + det_confs = confs[conf_ids[i]] + + if conf <= conf_thresh: + continue + + x = max(int(det[3] * input_width), 0) + y = max(int(det[4] * input_height), 0) + w = min(int(det[5] * input_width - x), input_width) + h = min(int(det[6] * input_height - y), input_height) + + image_results.append( + Bbox( + x, + y, + w, + h, + label=label, + attributes={"score": conf, "scores": list(map(float, det_confs))}, + ) ) - ) - results.append(image_results) + results.append(image_results) - return results + return results + def get_categories(self): + # output categories - label map etc. -def get_categories(): - # output categories - label map etc. + label_categories = LabelCategories() + label_categories.add("face") - label_categories = LabelCategories() - label_categories.add("face") + return {AnnotationType.label: label_categories} - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/ssd_mobilenet_coco_detection_interp.py b/src/datumaro/plugins/openvino_plugin/samples/ssd_mobilenet_coco_detection_interp.py index 752bb48062..4dc462bcdf 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/ssd_mobilenet_coco_detection_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/ssd_mobilenet_coco_detection_interp.py @@ -1,17 +1,14 @@ -# Copyright (C) 2021 Intel Corporation +# Copyright (C) 2023 Intel Corporation # # SPDX-License-Identifier: MIT +from datumaro.components.abstracts.model_interpreter import IModelInterpreter from datumaro.components.annotation import AnnotationType, Bbox, LabelCategories conf_thresh = 0.3 model_class_num = 91 -def normalize(inputs): - return inputs - - def _match_confs(confs, detections): matches = [-1] * len(detections) @@ -38,62 +35,68 @@ def _match_confs(confs, detections): return matches -def process_outputs(inputs, outputs): - # inputs = model input; array or images; shape = (B, H, W, C) - # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. - # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] - # results = conversion result; [[ Annotation, ... ], ... ] - - results = [] - for input_, confs, detections in zip( - inputs, outputs["do_ExpandDims_conf/sigmoid"], outputs["DetectionOutput"] - ): - input_height, input_width = input_.shape[:2] - - confs = confs[0].reshape(-1, model_class_num) - detections = detections[0] - - conf_ids = _match_confs(confs, detections) - - image_results = [] - for i, det in enumerate(detections): - image_id = int(det[0]) # pylint: disable=unused-variable - label = int(det[1]) - conf = float(det[2]) - det_confs = confs[conf_ids[i]] - - if conf <= conf_thresh: - continue - - x = max(int(det[3] * input_width), 0) - y = max(int(det[4] * input_height), 0) - w = min(int(det[5] * input_width - x), input_width) - h = min(int(det[6] * input_height - y), input_height) - - image_results.append( - Bbox( - x, - y, - w, - h, - label=label, - attributes={"score": conf, "scores": list(map(float, det_confs))}, +class SsdMobilenetCocoDetectionModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + + def process_outputs(self, inputs, outputs): + # inputs = model input; array or images; shape = (B, H, W, C) + # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. + # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] + # results = conversion result; [[ Annotation, ... ], ... ] + + results = [] + for input_, confs, detections in zip( + inputs, outputs["do_ExpandDims_conf/sigmoid"], outputs["DetectionOutput"] + ): + input_height, input_width = input_.shape[:2] + + confs = confs[0].reshape(-1, model_class_num) + detections = detections[0] + + conf_ids = _match_confs(confs, detections) + + image_results = [] + for i, det in enumerate(detections): + image_id = int(det[0]) # pylint: disable=unused-variable + label = int(det[1]) + conf = float(det[2]) + det_confs = confs[conf_ids[i]] + + if conf <= conf_thresh: + continue + + x = max(int(det[3] * input_width), 0) + y = max(int(det[4] * input_height), 0) + w = min(int(det[5] * input_width - x), input_width) + h = min(int(det[6] * input_height - y), input_height) + + image_results.append( + Bbox( + x, + y, + w, + h, + label=label, + attributes={"score": conf, "scores": list(map(float, det_confs))}, + ) ) - ) - results.append(image_results) + results.append(image_results) - return results + return results + def get_categories(self): + # output categories - label map etc. -def get_categories(): - # output categories - label map etc. + label_categories = LabelCategories() - label_categories = LabelCategories() + with open("samples/coco.class", "r", encoding="utf-8") as file: + for line in file.readlines(): + label = line.strip() + label_categories.add(label) - with open("samples/coco.class", "r", encoding="utf-8") as file: - for line in file.readlines(): - label = line.strip() - label_categories.add(label) + return {AnnotationType.label: label_categories} - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/ssd_person_detection_interp.py b/src/datumaro/plugins/openvino_plugin/samples/ssd_person_detection_interp.py index 94962fc5bd..5e7c12ab6e 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/ssd_person_detection_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/ssd_person_detection_interp.py @@ -1,16 +1,12 @@ -# Copyright (C) 2021 Intel Corporation +# Copyright (C) 2023 Intel Corporation # # SPDX-License-Identifier: MIT - +from datumaro.components.abstracts.model_interpreter import IModelInterpreter from datumaro.components.annotation import AnnotationType, Bbox, LabelCategories conf_thresh = 0.02 -def normalize(inputs): - return inputs - - def _match_confs(confs, detections): matches = [-1] * len(detections) @@ -37,56 +33,62 @@ def _match_confs(confs, detections): return matches -def process_outputs(inputs, outputs): - # inputs = model input; array or images; shape = (B, H, W, C) - # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. - # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] - # results = conversion result; [[ Annotation, ... ], ... ] - - results = [] - for input_, detections in zip(inputs, outputs["detection_out"]): - input_height, input_width = input_.shape[:2] - - confs = outputs["Softmax_189/Softmax_"] - detections = detections[0] - - conf_ids = _match_confs(confs, detections) - - image_results = [] - for i, det in enumerate(detections): - image_id = int(det[0]) # pylint: disable=unused-variable - label = int(det[1]) - conf = float(det[2]) - det_confs = confs[conf_ids[i]] - - if conf <= conf_thresh: - continue - - x = max(int(det[3] * input_width), 0) - y = max(int(det[4] * input_height), 0) - w = min(int(det[5] * input_width - x), input_width) - h = min(int(det[6] * input_height - y), input_height) - - image_results.append( - Bbox( - x, - y, - w, - h, - label=label, - attributes={"score": conf, "scores": list(map(float, det_confs))}, +class SsdPersonDetectionModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + + def process_outputs(self, inputs, outputs): + # inputs = model input; array or images; shape = (B, H, W, C) + # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. + # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] + # results = conversion result; [[ Annotation, ... ], ... ] + + results = [] + for input_, detections in zip(inputs, outputs["detection_out"]): + input_height, input_width = input_.shape[:2] + + confs = outputs["Softmax_189/Softmax_"] + detections = detections[0] + + conf_ids = _match_confs(confs, detections) + + image_results = [] + for i, det in enumerate(detections): + image_id = int(det[0]) # pylint: disable=unused-variable + label = int(det[1]) + conf = float(det[2]) + det_confs = confs[conf_ids[i]] + + if conf <= conf_thresh: + continue + + x = max(int(det[3] * input_width), 0) + y = max(int(det[4] * input_height), 0) + w = min(int(det[5] * input_width - x), input_width) + h = min(int(det[6] * input_height - y), input_height) + + image_results.append( + Bbox( + x, + y, + w, + h, + label=label, + attributes={"score": conf, "scores": list(map(float, det_confs))}, + ) ) - ) - results.append(image_results) + results.append(image_results) - return results + return results + def get_categories(self): + # output categories - label map etc. -def get_categories(): - # output categories - label map etc. + label_categories = LabelCategories() + label_categories.add("person") - label_categories = LabelCategories() - label_categories.add("person") + return {AnnotationType.label: label_categories} - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/ssd_person_vehicle_bike_detection_interp.py b/src/datumaro/plugins/openvino_plugin/samples/ssd_person_vehicle_bike_detection_interp.py index d0ec8182ac..3c83b24c7b 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/ssd_person_vehicle_bike_detection_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/ssd_person_vehicle_bike_detection_interp.py @@ -2,15 +2,12 @@ # # SPDX-License-Identifier: MIT +from datumaro.components.abstracts.model_interpreter import IModelInterpreter from datumaro.components.annotation import AnnotationType, Bbox, LabelCategories conf_thresh = 0.02 -def normalize(inputs): - return inputs - - def _match_confs(confs, detections): matches = [-1] * len(detections) @@ -37,58 +34,64 @@ def _match_confs(confs, detections): return matches -def process_outputs(inputs, outputs): - # inputs = model input; array or images; shape = (B, H, W, C) - # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. - # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] - # results = conversion result; [[ Annotation, ... ], ... ] - - results = [] - for input_, detections in zip(inputs, outputs["detection_out"]): - input_height, input_width = input_.shape[:2] - - confs = outputs["Softmax_189/Softmax_"] - detections = detections[0] - - conf_ids = _match_confs(confs, detections) - - image_results = [] - for i, det in enumerate(detections): - image_id = int(det[0]) # pylint: disable=unused-variable - label = int(det[1]) - conf = float(det[2]) - det_confs = confs[conf_ids[i]] - - if conf <= conf_thresh: - continue - - x = max(int(det[3] * input_width), 0) - y = max(int(det[4] * input_height), 0) - w = min(int(det[5] * input_width - x), input_width) - h = min(int(det[6] * input_height - y), input_height) - - image_results.append( - Bbox( - x, - y, - w, - h, - label=label, - attributes={"score": conf, "scores": list(map(float, det_confs))}, +class SsdPersonVehicleBikeDetectionModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + + def process_outputs(self, inputs, outputs): + # inputs = model input; array or images; shape = (B, H, W, C) + # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. + # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] + # results = conversion result; [[ Annotation, ... ], ... ] + + results = [] + for input_, detections in zip(inputs, outputs["detection_out"]): + input_height, input_width = input_.shape[:2] + + confs = outputs["Softmax_189/Softmax_"] + detections = detections[0] + + conf_ids = _match_confs(confs, detections) + + image_results = [] + for i, det in enumerate(detections): + image_id = int(det[0]) # pylint: disable=unused-variable + label = int(det[1]) + conf = float(det[2]) + det_confs = confs[conf_ids[i]] + + if conf <= conf_thresh: + continue + + x = max(int(det[3] * input_width), 0) + y = max(int(det[4] * input_height), 0) + w = min(int(det[5] * input_width - x), input_width) + h = min(int(det[6] * input_height - y), input_height) + + image_results.append( + Bbox( + x, + y, + w, + h, + label=label, + attributes={"score": conf, "scores": list(map(float, det_confs))}, + ) ) - ) - results.append(image_results) + results.append(image_results) - return results + return results + def get_categories(self): + # output categories - label map etc. -def get_categories(): - # output categories - label map etc. + label_categories = LabelCategories() + label_categories.add("vehicle") + label_categories.add("person") + label_categories.add("bike") - label_categories = LabelCategories() - label_categories.add("vehicle") - label_categories.add("person") - label_categories.add("bike") + return {AnnotationType.label: label_categories} - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/src/datumaro/plugins/openvino_plugin/samples/ssd_vehicle_detection_interp.py b/src/datumaro/plugins/openvino_plugin/samples/ssd_vehicle_detection_interp.py index 55fa7d9f6b..026388d714 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/ssd_vehicle_detection_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/ssd_vehicle_detection_interp.py @@ -2,15 +2,12 @@ # # SPDX-License-Identifier: MIT +from datumaro.components.abstracts.model_interpreter import IModelInterpreter from datumaro.components.annotation import AnnotationType, Bbox, LabelCategories conf_thresh = 0.02 -def normalize(inputs): - return inputs - - def _match_confs(confs, detections): matches = [-1] * len(detections) @@ -37,56 +34,62 @@ def _match_confs(confs, detections): return matches -def process_outputs(inputs, outputs): - # inputs = model input; array or images; shape = (B, H, W, C) - # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. - # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] - # results = conversion result; [[ Annotation, ... ], ... ] - - results = [] - for input_, detections in zip(inputs, outputs["detection_out"]): - input_height, input_width = input_.shape[:2] - - confs = outputs["Softmax_189/Softmax_"] - detections = detections[0] - - conf_ids = _match_confs(confs, detections) - - image_results = [] - for i, det in enumerate(detections): - image_id = int(det[0]) # pylint: disable=unused-variable - label = int(det[1]) - conf = float(det[2]) - det_confs = confs[conf_ids[i]] - - if conf <= conf_thresh: - continue - - x = max(int(det[3] * input_width), 0) - y = max(int(det[4] * input_height), 0) - w = min(int(det[5] * input_width - x), input_width) - h = min(int(det[6] * input_height - y), input_height) - - image_results.append( - Bbox( - x, - y, - w, - h, - label=label, - attributes={"score": conf, "scores": list(map(float, det_confs))}, +class SsdVehicleDetectionModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + + def process_outputs(self, inputs, outputs): + # inputs = model input; array or images; shape = (B, H, W, C) + # outputs = model output; shape = (1, 1, N, 7); N is the number of detected bounding boxes. + # det = [image_id, label(class id), conf, x_min, y_min, x_max, y_max] + # results = conversion result; [[ Annotation, ... ], ... ] + + results = [] + for input_, detections in zip(inputs, outputs["detection_out"]): + input_height, input_width = input_.shape[:2] + + confs = outputs["Softmax_189/Softmax_"] + detections = detections[0] + + conf_ids = _match_confs(confs, detections) + + image_results = [] + for i, det in enumerate(detections): + image_id = int(det[0]) # pylint: disable=unused-variable + label = int(det[1]) + conf = float(det[2]) + det_confs = confs[conf_ids[i]] + + if conf <= conf_thresh: + continue + + x = max(int(det[3] * input_width), 0) + y = max(int(det[4] * input_height), 0) + w = min(int(det[5] * input_width - x), input_width) + h = min(int(det[6] * input_height - y), input_height) + + image_results.append( + Bbox( + x, + y, + w, + h, + label=label, + attributes={"score": conf, "scores": list(map(float, det_confs))}, + ) ) - ) - results.append(image_results) + results.append(image_results) - return results + return results + def get_categories(self): + # output categories - label map etc. -def get_categories(): - # output categories - label map etc. + label_categories = LabelCategories() + label_categories.add("vehicle") - label_categories = LabelCategories() - label_categories.add("vehicle") + return {AnnotationType.label: label_categories} - return {AnnotationType.label: label_categories} + def resize(self, inputs): + return inputs diff --git a/tests/assets/rise/model_interp.py b/tests/assets/rise/model_interp.py index 91e26ede50..eebabea423 100644 --- a/tests/assets/rise/model_interp.py +++ b/tests/assets/rise/model_interp.py @@ -2,24 +2,31 @@ # # SPDX-License-Identifier: MIT +from datumaro.components.abstracts.model_interpreter import IModelInterpreter from datumaro.components.annotation import Label from datumaro.util.annotation_util import softmax -def normalize(inputs): - return inputs +class SsdMobilenetCocoDetectionModelInterpreter(IModelInterpreter): + def normalize(self, inputs): + return inputs + def process_outputs(self, inputs, outputs): + # inputs = model input; array or images; shape = (B, H, W, C) + # outputs = model output; shape = (B, 3); + # results = conversion result; + # [B x [a score for label0, a score for label1, a score for label2]]; -def process_outputs(inputs, outputs): - # inputs = model input; array or images; shape = (B, H, W, C) - # outputs = model output; shape = (B, 3); - # results = conversion result; - # [B x [a score for label0, a score for label1, a score for label2]]; - - return [ - [ - Label(label=label, attributes={"score": score}) - for label, score in enumerate(softmax(output)) + return [ + [ + Label(label=label, attributes={"score": score}) + for label, score in enumerate(softmax(output)) + ] + for output in outputs ] - for output in outputs - ] + + def get_categories(self): + return None + + def resize(self, inputs): + return inputs From 25a38e518d0c116044fd5134bf6390867e6f2d17 Mon Sep 17 00:00:00 2001 From: "Kim, Vinnam" Date: Wed, 14 Jun 2023 17:15:04 +0900 Subject: [PATCH 5/6] Did some refactoring for OpenvinoLauncher and AnnotationMatchers Signed-off-by: Kim, Vinnam --- src/datumaro/components/abstracts/merger.py | 11 +- .../components/annotations/matcher.py | 77 ++++++-- src/datumaro/components/comparator.py | 8 +- src/datumaro/components/merge/base.py | 7 +- .../components/merge/intersect_merge.py | 2 +- .../plugins/openvino_plugin/launcher.py | 170 ++++++++++++------ ...ustom_object_detection_gen3_atss_interp.py | 67 ++++++- tests/unit/test_openvino_launcher.py | 52 ++++++ 8 files changed, 316 insertions(+), 78 deletions(-) create mode 100644 tests/unit/test_openvino_launcher.py diff --git a/src/datumaro/components/abstracts/merger.py b/src/datumaro/components/abstracts/merger.py index 684b1f5ce2..714e209641 100644 --- a/src/datumaro/components/abstracts/merger.py +++ b/src/datumaro/components/abstracts/merger.py @@ -5,6 +5,7 @@ from abc import ABC, abstractmethod from typing import Dict, Optional, Sequence, Type +from datumaro.components.annotation import Annotation from datumaro.components.dataset_base import IDataset from datumaro.components.dataset_item_storage import ( DatasetItemStorage, @@ -12,10 +13,16 @@ ) from datumaro.components.media import MediaElement -__all__ = ["IMerger"] +__all__ = ["IMatcherContext", "IMergerContext"] -class IMerger(ABC): +class IMatcherContext(ABC): + @abstractmethod + def get_any_label_name(self, ann: Annotation, label_id: int) -> str: + raise NotImplementedError + + +class IMergerContext(IMatcherContext): @abstractmethod def merge_infos(self, sources: Sequence[IDataset]) -> Dict: raise NotImplementedError diff --git a/src/datumaro/components/annotations/matcher.py b/src/datumaro/components/annotations/matcher.py index 1ec0cbe762..a2e3090d6e 100644 --- a/src/datumaro/components/annotations/matcher.py +++ b/src/datumaro/components/annotations/matcher.py @@ -2,12 +2,13 @@ # # SPDX-License-Identifier: MIT -from typing import Optional +from typing import Optional, Union import numpy as np from attr import attrib, attrs -from datumaro.components.abstracts import IMerger +from datumaro.components.abstracts import IMergerContext +from datumaro.components.abstracts.merger import IMatcherContext from datumaro.util.annotation_util import ( OKS, approximate_line, @@ -18,7 +19,8 @@ ) __all__ = [ - "match_segments", + "match_segments_pair", + "match_segments_more_than_pair", "AnnotationMatcher", "LabelMatcher", "ShapeMatcher", @@ -34,13 +36,15 @@ ] -def match_segments( +def match_segments_pair( a_segms, b_segms, distance=segment_iou, dist_thresh=1.0, label_matcher=lambda a, b: a.label == b.label, ): + """Match segments and return pairs of the two matched segments""" + assert callable(distance), distance assert callable(label_matcher), label_matcher @@ -95,9 +99,61 @@ def match_segments( return matches, mispred, a_unmatched, b_unmatched +def match_segments_more_than_pair( + a_segms, + b_segms, + distance=segment_iou, + dist_thresh=1.0, + label_matcher=lambda a, b: a.label == b.label, +): + """Match segments and return sets of the matched segments which can be more than two""" + + assert callable(distance), distance + assert callable(label_matcher), label_matcher + + # a_matches: indices of b_segms matched to a bboxes + # b_matches: indices of a_segms matched to b bboxes + a_matches = -np.ones(len(a_segms), dtype=int) + b_matches = -np.ones(len(b_segms), dtype=int) + + distances = np.array([[distance(a, b) for b in b_segms] for a in a_segms]) + + # matches: boxes we succeeded to match completely + # mispred: boxes we succeeded to match, having label mismatch + matches = [] + mispred = [] + + # It needs len(a_segms) > 0 and len(b_segms) > 0 + if len(b_segms) > 0: + for a_idx, a_segm in enumerate(a_segms): + b_indices = np.argsort( + [not label_matcher(a_segm, b_segm) for b_segm in b_segms], kind="stable" + ) # prioritize those with same label, keep score order + for b_idx in b_indices: + d = distances[a_idx, b_idx] + if d < dist_thresh: + continue + + a_matches[a_idx] = b_idx + b_matches[b_idx] = a_idx + + b_segm = b_segms[b_idx] + + if label_matcher(a_segm, b_segm): + matches.append((a_segm, b_segm)) + else: + mispred.append((a_segm, b_segm)) + + # *_umatched: boxes of (*) we failed to match + a_unmatched = [a_segms[i] for i, m in enumerate(a_matches) if m < 0] + b_unmatched = [b_segms[i] for i, m in enumerate(b_matches) if m < 0] + + return matches, mispred, a_unmatched, b_unmatched + + @attrs(kw_only=True) class AnnotationMatcher: - _context: Optional[IMerger] = attrib(default=None) + _context: Optional[Union[IMatcherContext, IMergerContext]] = attrib(default=None) def match_annotations(self, sources): raise NotImplementedError() @@ -106,8 +162,8 @@ def match_annotations(self, sources): @attrs class LabelMatcher(AnnotationMatcher): def distance(self, a, b): - a_label = self._context._get_any_label_name(a, a.label) - b_label = self._context._get_any_label_name(b, b.label) + a_label = self._context.get_any_label_name(a, a.label) + b_label = self._context.get_any_label_name(b, b.label) return a_label == b_label def match_annotations(self, sources): @@ -118,6 +174,7 @@ def match_annotations(self, sources): class ShapeMatcher(AnnotationMatcher): pairwise_dist = attrib(converter=float, default=0.9) cluster_dist = attrib(converter=float, default=-1.0) + _match_segments = attrib(default=match_segments_pair) def match_annotations(self, sources): distance = self.distance @@ -152,7 +209,7 @@ def _has_same_source(cluster, extra_id): adjacent = {i: [] for i in id_segm} # id(sgm) -> [id(adj_sgm1), ...] for a_idx, src_a in enumerate(sources): for src_b in sources[a_idx + 1 :]: - matches, _, _, _ = match_segments( + matches, _, _, _ = self._match_segments( src_a, src_b, dist_thresh=pairwise_dist, @@ -194,8 +251,8 @@ def distance(self, a, b): return segment_iou(a, b) def label_matcher(self, a, b): - a_label = self._context._get_any_label_name(a, a.label) - b_label = self._context._get_any_label_name(b, b.label) + a_label = self._context.get_any_label_name(a, a.label) + b_label = self._context.get_any_label_name(b, b.label) return a_label == b_label diff --git a/src/datumaro/components/comparator.py b/src/datumaro/components/comparator.py index 976d9a4d6a..295261f046 100644 --- a/src/datumaro/components/comparator.py +++ b/src/datumaro/components/comparator.py @@ -14,7 +14,7 @@ from datumaro.cli.util.project import generate_next_file_name from datumaro.components.annotation import AnnotationType, LabelCategories -from datumaro.components.annotations.matcher import LineMatcher, PointsMatcher, match_segments +from datumaro.components.annotations.matcher import LineMatcher, PointsMatcher, match_segments_pair from datumaro.components.dataset import Dataset from datumaro.components.operations import ( compute_ann_statistics, @@ -69,7 +69,7 @@ def match_labels(self, item_a, item_b): def _match_segments(self, t, item_a, item_b): a_boxes = self._get_ann_type(t, item_a) b_boxes = self._get_ann_type(t, item_b) - return match_segments(a_boxes, b_boxes, dist_thresh=self.iou_threshold) + return match_segments_pair(a_boxes, b_boxes, dist_thresh=self.iou_threshold) def match_polygons(self, item_a, item_b): return self._match_segments(AnnotationType.polygon, item_a, item_b) @@ -93,7 +93,7 @@ def match_points(self, item_a, item_b): instance_map[id(ann)] = [inst, inst_bbox] matcher = PointsMatcher(instance_map=instance_map) - return match_segments( + return match_segments_pair( a_points, b_points, dist_thresh=self.iou_threshold, distance=matcher.distance ) @@ -103,7 +103,7 @@ def match_lines(self, item_a, item_b): matcher = LineMatcher() - return match_segments( + return match_segments_pair( a_lines, b_lines, dist_thresh=self.iou_threshold, distance=matcher.distance ) diff --git a/src/datumaro/components/merge/base.py b/src/datumaro/components/merge/base.py index 3060ebc3c0..c0af18a282 100644 --- a/src/datumaro/components/merge/base.py +++ b/src/datumaro/components/merge/base.py @@ -6,7 +6,7 @@ from collections import OrderedDict from typing import Dict, Optional, Sequence, Type -from datumaro.components.abstracts.merger import IMerger +from datumaro.components.abstracts.merger import IMergerContext from datumaro.components.cli_plugin import CliPlugin from datumaro.components.dataset_base import IDataset from datumaro.components.dataset_item_storage import DatasetItemStorageDatasetView @@ -20,7 +20,7 @@ from datumaro.util import dump_json_file -class Merger(IMerger, CliPlugin): +class Merger(IMergerContext, CliPlugin): """Merge multiple datasets into one dataset""" def __init__(self, **options): @@ -104,3 +104,6 @@ def save_merge_report(self, path: str) -> None: os.makedirs(os.path.dirname(path), exist_ok=True) dump_json_file(path, errors, indent=True) + + def get_any_label_name(self, ann, label_id): + raise NotImplementedError diff --git a/src/datumaro/components/merge/intersect_merge.py b/src/datumaro/components/merge/intersect_merge.py index 12a9439249..7c87fc6bbe 100644 --- a/src/datumaro/components/merge/intersect_merge.py +++ b/src/datumaro/components/merge/intersect_merge.py @@ -612,7 +612,7 @@ def _get_src_label_name(self, ann, label_id): self._dataset_map[dataset_id][0].categories()[AnnotationType.label].items[label_id].name ) - def _get_any_label_name(self, ann, label_id): + def get_any_label_name(self, ann, label_id): if label_id is None: return None try: diff --git a/src/datumaro/plugins/openvino_plugin/launcher.py b/src/datumaro/plugins/openvino_plugin/launcher.py index f12a6fe4c4..550e6c05c6 100644 --- a/src/datumaro/plugins/openvino_plugin/launcher.py +++ b/src/datumaro/plugins/openvino_plugin/launcher.py @@ -9,6 +9,7 @@ import os.path as osp import shutil import urllib +from dataclasses import dataclass, fields from importlib.util import module_from_spec, spec_from_file_location from typing import Dict, Optional @@ -65,6 +66,107 @@ def copy_model(model_dir, model): model["interpreter"] = osp.basename(model["interpreter"]) +@dataclass +class OpenvinoModelInfo: + interpreter: Optional[str] + description: Optional[str] + weights: Optional[str] + model_dir: Optional[str] + + def validate(self): + """Validate integrity of the member variables""" + + def _validate(key: str): + path = getattr(self, key) + if not osp.isfile(path): + path = osp.join(self.model_dir, path) + if not osp.isfile(path): + raise DatumaroError(f'Failed to open model {key} file "{path}"') + setattr(self, key, path) + + for field in fields(self): + if field.name != "model_dir": + _validate(field.name) + + +@dataclass +class BuiltinOpenvinoModelInfo(OpenvinoModelInfo): + downloadable_models = { + "clip_text_ViT-B_32", + "clip_visual_ViT-B_32", + "googlenet-v4-tf", + } + + @classmethod + def create_from_model_name(cls, model_name: str) -> "BuiltinOpenvinoModelInfo": + openvino_plugin_samples_dir = get_samples_path() + interpreter = osp.join(openvino_plugin_samples_dir, model_name + "_interp.py") + interpreter = interpreter if osp.exists(interpreter) else interpreter + + model_dir = DATUMARO_CACHE_DIR + + # Please visit open-model-zoo repository for OpenVINO public models if you are interested in + # https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/index.md + url_folder = "https://storage.openvinotoolkit.org/repositories/datumaro/models/" + + description = osp.join(model_dir, model_name + ".xml") + if not osp.exists(description): + description = ( + cls._download_file(osp.join(url_folder, model_name + ".xml"), description) + if model_name in cls.downloadable_models + else None + ) + + weights = osp.join(model_dir, model_name + ".bin") + if not osp.exists(weights): + weights = ( + cls._download_file(osp.join(url_folder, model_name + ".bin"), weights) + if model_name in cls.downloadable_models + else None + ) + + return cls( + interpreter=interpreter, + description=description, + weights=weights, + model_dir=model_dir, + ) + + @staticmethod + def _download_file(url: str, file_root: str) -> str: + log.info('Downloading: "{}" to {}\n'.format(url, file_root)) + req = urllib.request.Request(url) + with urllib.request.urlopen(req) as source, open(file_root, "wb") as output: # nosec B310 + with tqdm( + total=int(source.info().get("Content-Length")), + ncols=80, + unit="iB", + unit_scale=True, + unit_divisor=1024, + ) as loop: + while True: + buffer = source.read(8192) + if not buffer: + break + + output.write(buffer) + loop.update(len(buffer)) + return file_root + + def override(self, other: OpenvinoModelInfo) -> None: + """Override builtin model variables to other""" + + def _apply(key: str) -> None: + other_item = getattr(other, key) + self_item = getattr(self, key) + if other_item is None and self_item: + log.info(f"Override description with the builtin model {key}: {self.description}.") + setattr(other, key, self_item) + + for field in fields(self): + _apply(field.name) + + class OpenvinoLauncher(Launcher): cli_plugin = _OpenvinoImporter ALLOWED_CHANNEL_FORMATS = {"NCHW", "NHWC"} @@ -82,55 +184,30 @@ def __init__( channel_format: str = "NCHW", to_rgb: bool = True, ): + model_info = OpenvinoModelInfo( + interpreter=interpreter, + description=description, + weights=weights, + model_dir=model_dir, + ) if model_name: - model_dir = DATUMARO_CACHE_DIR - - # Please visit open-model-zoo repository for OpenVINO public models if you are interested in - # https://github.com/openvinotoolkit/open_model_zoo/blob/master/models/public/index.md - url_folder = "https://storage.openvinotoolkit.org/repositories/datumaro/models/" - - description = osp.join(model_dir, model_name + ".xml") - if not osp.exists(description): - cached_description_url = osp.join(url_folder, model_name + ".xml") - log.info('Downloading: "{}" to {}\n'.format(cached_description_url, description)) - self._download_file(cached_description_url, description) - - weights = osp.join(model_dir, model_name + ".bin") - if not osp.exists(weights): - cached_weights_url = osp.join(url_folder, model_name + ".bin") - log.info('Downloading: "{}" to {}\n'.format(cached_weights_url, weights)) - self._download_file(cached_weights_url, weights) - - if not interpreter: - openvino_plugin_samples_dir = get_samples_path() - interpreter = osp.join(openvino_plugin_samples_dir, model_name + "_interp.py") + builtin_model_info = BuiltinOpenvinoModelInfo.create_from_model_name(model_name) + builtin_model_info.override(model_info) if not model_dir: model_dir = "" - if not osp.isfile(description): - description = osp.join(model_dir, description) - if not osp.isfile(description): - raise DatumaroError('Failed to open model description file "%s"' % (description)) - - if not osp.isfile(weights): - weights = osp.join(model_dir, weights) - if not osp.isfile(weights): - raise DatumaroError('Failed to open model weights file "%s"' % (weights)) - - if not osp.isfile(interpreter): - interpreter = osp.join(model_dir, interpreter) - if not osp.isfile(interpreter): - raise DatumaroError('Failed to open model interpreter script file "%s"' % (interpreter)) + model_info.validate() + self.model_info = model_info - self._interpreter = self._load_interpreter(file_path=interpreter) + self._interpreter = self._load_interpreter(file_path=model_info.interpreter) self._device = device or "CPU" self._output_blobs = output_layers self._compile_model_config = compile_model_config self._core = Core() - self._network = self._core.read_model(description, weights) + self._network = self._core.read_model(model_info.description, model_info.weights) self._check_model_support(self._network, self._device) self._load_executable_net() @@ -158,25 +235,6 @@ def _load_interpreter(self, file_path: str) -> IModelInterpreter: raise DatumaroError(f"{file_path} has no class derived from IModelInterpreter.") - def _download_file(self, url: str, file_root: str): - req = urllib.request.Request(url) - with urllib.request.urlopen(req) as source, open(file_root, "wb") as output: # nosec B310 - with tqdm( - total=int(source.info().get("Content-Length")), - ncols=80, - unit="iB", - unit_scale=True, - unit_divisor=1024, - ) as loop: - while True: - buffer = source.read(8192) - if not buffer: - break - - output.write(buffer) - loop.update(len(buffer)) - return 0 - def _check_model_support(self, net, device): not_supported_layers = set( name for name, dev in self._core.query_model(net, device).items() if not dev diff --git a/src/datumaro/plugins/openvino_plugin/samples/otx_custom_object_detection_gen3_atss_interp.py b/src/datumaro/plugins/openvino_plugin/samples/otx_custom_object_detection_gen3_atss_interp.py index 6aeb0a3251..9381fbc82c 100644 --- a/src/datumaro/plugins/openvino_plugin/samples/otx_custom_object_detection_gen3_atss_interp.py +++ b/src/datumaro/plugins/openvino_plugin/samples/otx_custom_object_detection_gen3_atss_interp.py @@ -2,20 +2,81 @@ # # SPDX-License-Identifier: MIT +import cv2 +import numpy as np + from datumaro.components.abstracts import IModelInterpreter +from datumaro.components.annotation import Bbox __all__ = ["OTXAtssModelInterpreter"] class OTXAtssModelInterpreter(IModelInterpreter): + h_model = 736 + w_model = 992 + + def __init__(self) -> None: + self._scales = [] + def get_categories(self): return None def process_outputs(self, inputs, outputs): - return outputs + scale = self._scales.pop() + r_scale = 1 / scale + + def _create_anns(bboxes, labels): + idx = 0 + anns = [] + for bbox, label in zip(bboxes, labels): + points = r_scale * bbox[:4] + x1, y1, x2, y2 = points + conf = bbox[4] + anns.append( + Bbox( + x=x1, + y=y1, + w=x2 - x1, + h=y2 - y1, + id=idx, + label=label, + attributes={"score": conf}, + ) + ) + idx += 1 + return anns + + return [ + _create_anns(bboxes, labels) + for bboxes, labels in zip(outputs["boxes"], outputs["labels"]) + ] def normalize(self, inputs): return inputs - def resize(self, inputs): - return inputs + def resize(self, inputs: np.ndarray): + assert len(inputs.shape) == 4 + + h_img, w_img = inputs.shape[1:3] + + scale = min(self.h_model / h_img, self.w_model / w_img) + + h_resize = min(int(scale * h_img), self.h_model) + w_resize = min(int(scale * w_img), self.w_model) + + batch_size = inputs.shape[0] + num_channel = inputs.shape[-1] + + resized_inputs = np.zeros( + (batch_size, self.h_model, self.w_model, num_channel), dtype=np.uint8 + ) + + for i in range(batch_size): + resized_inputs[i, :h_resize, :w_resize, :] = cv2.resize( + inputs[i], + (w_resize, h_resize), + interpolation=cv2.INTER_LINEAR, + ) + + self._scales += [scale] + return resized_inputs diff --git a/tests/unit/test_openvino_launcher.py b/tests/unit/test_openvino_launcher.py new file mode 100644 index 0000000000..afc5357363 --- /dev/null +++ b/tests/unit/test_openvino_launcher.py @@ -0,0 +1,52 @@ +# Copyright (C) 2023 Intel Corporation +# +# SPDX-License-Identifier: MIT + + +from unittest.mock import patch + +import numpy as np +import pytest + +from datumaro.plugins.openvino_plugin.launcher import OpenvinoLauncher + +from ..requirements import Requirements, mark_requirement + +from tests.utils.assets import get_test_asset_path + + +class OpenvinoLauncherTest: + @pytest.fixture + def fxt_input(self): + return np.zeros([1, 10, 10, 3], dtype=np.uint8) + + @pytest.fixture + def fxt_normal(self): + model_dir = get_test_asset_path("rise") + return OpenvinoLauncher( + interpreter="model_interp.py", + description="model.xml", + weights="model.bin", + model_dir=model_dir, + ) + + @pytest.fixture + def fxt_override_interpreter_by_builtin_model_name(self): + model_dir = get_test_asset_path("rise") + return OpenvinoLauncher( + interpreter="model_interp.py", + description="model.xml", + weights="model.bin", + model_dir=model_dir, + model_name="otx_custom_object_detection_gen3_atss", + ) + + @pytest.fixture(params=["fxt_normal", "fxt_override_interpreter_by_builtin_model_name"]) + def fxt_launcher(self, request): + return request.getfixturevalue(request.param) + + @mark_requirement(Requirements.DATUM_GENERAL_REQ) + def test_launchers(self, fxt_launcher, fxt_input): + with patch.object(fxt_launcher._request, "infer") as mock_request: + fxt_launcher.infer(fxt_input) + mock_request.assert_called() From b1dbb4843a2be946660317a147def98ed173cd80 Mon Sep 17 00:00:00 2001 From: "Kim, Vinnam" Date: Mon, 19 Jun 2023 17:49:14 +0900 Subject: [PATCH 6/6] Update CHANGELOG.md Signed-off-by: Kim, Vinnam --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 21afddda8d..e6fa296a6f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## \[Unreleased\] ### New features +- Add OTX ATSS detector model interpreter & refactor interfaces + () ### Enhancements - Enhance import performance for built-in plugins