Skip to content

Commit 11c0ee5

Browse files
committed
Removing commented code
Signed-off-by: Eric Kerfoot <17726042+ericspod@users.noreply.github.com>
1 parent 2a88d83 commit 11c0ee5

File tree

2 files changed

+1
-28
lines changed

2 files changed

+1
-28
lines changed

monai/data/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@
7777
from .test_time_augmentation import TestTimeAugmentation
7878
from .thread_buffer import ThreadBuffer, ThreadDataLoader
7979
from .torchscript_utils import load_net_with_metadata, save_net_with_metadata
80-
from .utils import ( # PICKLE_KEY_SUFFIX,
80+
from .utils import (
8181
affine_to_spacing,
8282
compute_importance_map,
8383
compute_shape_offset,

monai/data/utils.py

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,6 @@
9292
"remove_keys",
9393
"remove_extra_metadata",
9494
"get_extra_metadata_keys",
95-
# "PICKLE_KEY_SUFFIX",
9695
"is_no_channel",
9796
]
9897

@@ -417,32 +416,6 @@ def dev_collate(batch, level: int = 1, logger_name: str = "dev_collate"):
417416
return
418417

419418

420-
# PICKLE_KEY_SUFFIX = TraceKeys.KEY_SUFFIX
421-
422-
423-
# def pickle_operations(data, key=PICKLE_KEY_SUFFIX, is_encode: bool = True):
424-
# """
425-
# Applied_operations are dictionaries with varying sizes, this method converts them to bytes so that we can (de-)collate.
426-
427-
# Args:
428-
# data: a list or dictionary with substructures to be pickled/unpickled.
429-
# key: the key suffix for the target substructures, defaults to "_transforms" (`data.utils.PICKLE_KEY_SUFFIX`).
430-
# is_encode: whether it's encoding using pickle.dumps (True) or decoding using pickle.loads (False).
431-
# """
432-
# if isinstance(data, Mapping):
433-
# data = dict(data)
434-
# for k in data:
435-
# if f"{k}".endswith(key):
436-
# if is_encode and not isinstance(data[k], bytes):
437-
# data[k] = pickle.dumps(data[k], 0)
438-
# if not is_encode and isinstance(data[k], bytes):
439-
# data[k] = pickle.loads(data[k])
440-
# return {k: pickle_operations(v, key=key, is_encode=is_encode) for k, v in data.items()}
441-
# elif isinstance(data, (list, tuple)):
442-
# return [pickle_operations(item, key=key, is_encode=is_encode) for item in data]
443-
# return data
444-
445-
446419
def collate_meta_tensor_fn(batch, *, collate_fn_map=None):
447420
"""
448421
Collate a sequence of meta tensor into a single batched metatensor. This is called by `collage_meta_tensor`

0 commit comments

Comments
 (0)