diff --git a/monai/apps/datasets.py b/monai/apps/datasets.py index 54829cb946..cd1713f9e8 100644 --- a/monai/apps/datasets.py +++ b/monai/apps/datasets.py @@ -110,7 +110,7 @@ def __init__( self.section = section self.val_frac = val_frac self.test_frac = test_frac - self.set_random_state(seed=seed) + self.set_random_generator(seed=seed) tarfile_name = root_dir / self.compressed_file_name dataset_dir = root_dir / self.dataset_folder_name self.num_class = 0 @@ -306,7 +306,7 @@ def __init__( raise ValueError("Root directory root_dir must be a directory.") self.section = section self.val_frac = val_frac - self.set_random_state(seed=seed) + self.set_random_generator(seed=seed) if task not in self.resource: raise ValueError(f"Unsupported task: {task}, available options are: {list(self.resource.keys())}.") dataset_dir = root_dir / task @@ -530,7 +530,7 @@ def __init__( self.ref_series_uid_tag = ref_series_uid_tag self.ref_sop_uid_tag = ref_sop_uid_tag - self.set_random_state(seed=seed) + self.set_random_generator(seed=seed) download_dir = os.path.join(root_dir, collection) load_tags = list(specific_tags) load_tags += [modality_tag] diff --git a/monai/apps/detection/transforms/dictionary.py b/monai/apps/detection/transforms/dictionary.py index 52b1a7d15d..440dfd1501 100644 --- a/monai/apps/detection/transforms/dictionary.py +++ b/monai/apps/detection/transforms/dictionary.py @@ -50,6 +50,7 @@ from monai.utils import InterpolateMode, NumpyPadMode, ensure_tuple, ensure_tuple_rep, fall_back_tuple from monai.utils.enums import PostFix, TraceKeys from monai.utils.type_conversion import convert_data_type, convert_to_tensor +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration __all__ = [ "StandardizeEmptyBoxd", @@ -566,9 +567,11 @@ def __init__( self.align_corners = ensure_tuple_rep(align_corners, len(self.image_keys)) self.keep_size = keep_size - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandZoomBoxd: - super().set_random_state(seed, state) - self.rand_zoom.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> Randomizable: + super().set_random_generator(seed, generator=generator) + self.rand_zoom.set_random_generator(seed, generator=generator) return self def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> dict[Hashable, torch.Tensor]: @@ -735,8 +738,10 @@ def __init__( self.flipper = Flip(spatial_axis=spatial_axis) self.box_flipper = FlipBox(spatial_axis=spatial_axis) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandFlipBoxd: - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandFlipBoxd: + super().set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> dict[Hashable, torch.Tensor]: @@ -1177,8 +1182,8 @@ def randomize( # type: ignore image_size, fg_indices_, bg_indices_, - self.R, - self.allow_smaller, + allow_smaller=self.allow_smaller, + generator=self.R, ) def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> list[dict[Hashable, torch.Tensor]]: @@ -1371,7 +1376,7 @@ def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> Mapping[Hashable, t return d def randomize(self, data: Any | None = None) -> None: - self._rand_k = self.R.randint(self.max_k) + 1 + self._rand_k = self.R.integers(self.max_k) + 1 super().randomize(None) def inverse(self, data: Mapping[Hashable, torch.Tensor]) -> dict[Hashable, torch.Tensor]: diff --git a/monai/apps/nuclick/transforms.py b/monai/apps/nuclick/transforms.py index f22ea764be..eb7914bd42 100644 --- a/monai/apps/nuclick/transforms.py +++ b/monai/apps/nuclick/transforms.py @@ -345,7 +345,7 @@ def _seed_point(self, label): indices = np.argwhere(convert_to_numpy(label) > 0) if len(indices) > 0: - index = self.R.randint(0, len(indices)) + index = self.R.integers(0, len(indices)) return indices[index, 0], indices[index, 1] return None @@ -382,8 +382,8 @@ def exclusion_map(self, others, dtype, jitter_range, drop_rate): x = int(math.floor(x)) y = int(math.floor(y)) if jitter_range: - x = x + self.R.randint(low=-jitter_range, high=jitter_range) - y = y + self.R.randint(low=-jitter_range, high=jitter_range) + x = x + self.R.integers(low=-jitter_range, high=jitter_range) + y = y + self.R.integers(low=-jitter_range, high=jitter_range) x = min(max(0, x), max_x) y = min(max(0, y), max_y) point_mask[x, y] = 1 diff --git a/monai/apps/reconstruction/transforms/array.py b/monai/apps/reconstruction/transforms/array.py index 911d7a06bb..6c20596960 100644 --- a/monai/apps/reconstruction/transforms/array.py +++ b/monai/apps/reconstruction/transforms/array.py @@ -98,7 +98,7 @@ def randomize_choose_acceleration(self) -> Sequence[float]: lines to exclude from under-sampling (2) acceleration: chosen acceleration factor """ - choice = self.R.randint(0, len(self.accelerations)) + choice = self.R.integers(0, len(self.accelerations)) center_fraction = self.center_fractions[choice] acceleration = self.accelerations[choice] return center_fraction, acceleration @@ -257,7 +257,7 @@ def __call__(self, kspace: NdarrayOrTensor) -> Sequence[Tensor]: # Determine acceleration rate by adjusting for the # number of low frequencies adjusted_accel = (acceleration * (num_low_freqs - num_cols)) / (num_low_freqs * acceleration - num_cols) - offset = self.R.randint(0, round(adjusted_accel)) + offset = self.R.integers(0, round(adjusted_accel)) accel_samples = np.arange(offset, num_cols - 1, adjusted_accel) accel_samples = np.around(accel_samples).astype(np.uint) diff --git a/monai/apps/reconstruction/transforms/dictionary.py b/monai/apps/reconstruction/transforms/dictionary.py index c166740768..71ef4a29cc 100644 --- a/monai/apps/reconstruction/transforms/dictionary.py +++ b/monai/apps/reconstruction/transforms/dictionary.py @@ -26,6 +26,7 @@ from monai.transforms.transform import MapTransform, RandomizableTransform from monai.utils import FastMRIKeys from monai.utils.type_conversion import convert_to_tensor +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration class ExtractDataKeyFromMetaKeyd(MapTransform): @@ -114,11 +115,11 @@ def __init__( is_complex=is_complex, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandomKspaceMaskd: - super().set_random_state(seed, state) - self.masker.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.masker.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, Tensor]: @@ -182,11 +183,11 @@ def __init__( is_complex=is_complex, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> EquispacedKspaceMaskd: - super().set_random_state(seed, state) - self.masker.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.masker.set_random_generator(seed, generator) return self diff --git a/monai/config/type_definitions.py b/monai/config/type_definitions.py index 57454a94e1..0f21cac930 100644 --- a/monai/config/type_definitions.py +++ b/monai/config/type_definitions.py @@ -12,7 +12,7 @@ from __future__ import annotations import os -from typing import Collection, Hashable, Iterable, Sequence, TypeVar, Union +from typing import Collection, Hashable, Iterable, Sequence, SupportsIndex, Tuple, TypeVar, Union import numpy as np import torch @@ -83,3 +83,8 @@ #: SequenceStr # string or a sequence of strings for `mode` types. SequenceStr = Union[Sequence[str], str] + +Shape = Tuple[int, ...] + +# Anything that can be coerced to a shape tuple +ShapeLike = Union[SupportsIndex, Sequence[SupportsIndex]] diff --git a/monai/data/dataloader.py b/monai/data/dataloader.py index 336f55b8c4..c593f046a1 100644 --- a/monai/data/dataloader.py +++ b/monai/data/dataloader.py @@ -54,7 +54,7 @@ class DataLoader(_TorchDataLoader): class RandomDataset(torch.utils.data.Dataset, Randomizable): def __getitem__(self, index): - return self.R.randint(0, 1000, (1,)) + return self.R.integers(0, 1000, (1,)) def __len__(self): return 16 diff --git a/monai/data/dataset.py b/monai/data/dataset.py index 4f2061426e..4301687a0d 100644 --- a/monai/data/dataset.py +++ b/monai/data/dataset.py @@ -1020,7 +1020,7 @@ def __init__( runtime_cache=False, ) -> None: if shuffle: - self.set_random_state(seed=seed) + self.set_random_generator(seed=seed) self.shuffle = shuffle self._start_pos: int = 0 @@ -1354,7 +1354,7 @@ def __init__( """ items = [(img, img_transform), (seg, seg_transform), (labels, label_transform)] - self.set_random_state(seed=get_seed()) + self.set_random_generator(seed=get_seed()) datasets = [Dataset(x[0], x[1]) for x in items if x[0] is not None] self.dataset = datasets[0] if len(datasets) == 1 else ZipDataset(datasets) @@ -1364,7 +1364,7 @@ def __len__(self) -> int: return len(self.dataset) def randomize(self, data: Any | None = None) -> None: - self._seed = self.R.randint(MAX_SEED, dtype="uint32") + self._seed = self.R.integers(MAX_SEED, dtype="uint32") def __getitem__(self, index: int): self.randomize() @@ -1373,10 +1373,10 @@ def __getitem__(self, index: int): for dataset in self.dataset.data: transform = getattr(dataset, "transform", None) if isinstance(transform, Randomizable): - transform.set_random_state(seed=self._seed) + transform.set_random_generator(seed=self._seed) transform = getattr(self.dataset, "transform", None) if isinstance(transform, Randomizable): - transform.set_random_state(seed=self._seed) + transform.set_random_generator(seed=self._seed) return self.dataset[index] diff --git a/monai/data/image_dataset.py b/monai/data/image_dataset.py index 6c8ddcf8de..5cd0085fea 100644 --- a/monai/data/image_dataset.py +++ b/monai/data/image_dataset.py @@ -90,14 +90,14 @@ def __init__( self.image_only = image_only self.transform_with_metadata = transform_with_metadata self.loader = LoadImage(reader, image_only, dtype, *args, **kwargs) - self.set_random_state(seed=get_seed()) + self.set_random_generator(seed=get_seed()) self._seed = 0 # transform synchronization seed def __len__(self) -> int: return len(self.image_files) def randomize(self, data: Any | None = None) -> None: - self._seed = self.R.randint(MAX_SEED, dtype="uint32") + self._seed = self.R.integers(MAX_SEED, dtype="uint32") def __getitem__(self, index: int): self.randomize() @@ -116,7 +116,7 @@ def __getitem__(self, index: int): # apply the transforms if self.transform is not None: if isinstance(self.transform, Randomizable): - self.transform.set_random_state(seed=self._seed) + self.transform.set_random_generator(seed=self._seed) if self.transform_with_metadata: img, meta_data = apply_transform(self.transform, (img, meta_data), map_items=False, unpack_items=True) @@ -125,7 +125,7 @@ def __getitem__(self, index: int): if self.seg_files is not None and self.seg_transform is not None: if isinstance(self.seg_transform, Randomizable): - self.seg_transform.set_random_state(seed=self._seed) + self.seg_transform.set_random_generator(seed=self._seed) if self.transform_with_metadata: seg, seg_meta_data = apply_transform( diff --git a/monai/data/iterable_dataset.py b/monai/data/iterable_dataset.py index 4c476b2f9d..c5ecff1a75 100644 --- a/monai/data/iterable_dataset.py +++ b/monai/data/iterable_dataset.py @@ -127,12 +127,12 @@ def __iter__(self): Multiple dataloader workers sharing this dataset will generate identical item sequences. """ self.seed += 1 - super().set_random_state(seed=self.seed) # make all workers in sync + super().set_random_generator(seed=self.seed) # make all workers in sync for _ in range(self.epochs) if self.epochs >= 0 else iter(int, 1): yield from IterableDataset(self.generate_item(), transform=self.transform) def randomize(self, size: int) -> None: - self._idx = self.R.randint(size) + self._idx = self.R.integers(size) class CSVIterableDataset(IterableDataset): diff --git a/monai/data/test_time_augmentation.py b/monai/data/test_time_augmentation.py index 23572dcef4..201ad01401 100644 --- a/monai/data/test_time_augmentation.py +++ b/monai/data/test_time_augmentation.py @@ -98,7 +98,7 @@ class TestTimeAugmentation: model = UNet(...).to(device) transform = Compose([RandAffined(keys, ...), ...]) - transform.set_random_state(seed=123) # ensure deterministic evaluation + transform.set_random_generator(seed=123) # ensure deterministic evaluation tt_aug = TestTimeAugmentation( transform, batch_size=5, num_workers=0, inferrer_fn=model, device=device diff --git a/monai/data/utils.py b/monai/data/utils.py index 0880d44b64..bfbf46ae3d 100644 --- a/monai/data/utils.py +++ b/monai/data/utils.py @@ -51,6 +51,8 @@ look_up_option, optional_import, ) +from monai.utils.deprecate_utils import deprecated_arg +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration, _handle_legacy_random_state pd, _ = optional_import("pandas") DataFrame, _ = optional_import("pandas", name="DataFrame") @@ -104,8 +106,14 @@ AFFINE_TOL = 1e-3 +@deprecated_arg( + "rand_state", since="1.3.0", removed="1.5.0", new_name="generator", msg_suffix="Please use `generator` instead." +) def get_random_patch( - dims: Sequence[int], patch_size: Sequence[int], rand_state: np.random.RandomState | None = None + dims: Sequence[int], + patch_size: Sequence[int], + rand_state: np.random.RandomState | None = None, + generator: SupportsRandomGeneration | None = None, ) -> tuple[slice, ...]: """ Returns a tuple of slices to define a random patch in an array of shape `dims` with size `patch_size` or the as @@ -121,9 +129,12 @@ def get_random_patch( (tuple of slice): a tuple of slice objects defining the patch """ + generator = _handle_legacy_random_state( + rand_state=rand_state, generator=generator, return_legacy_default_random=True + ) + # choose the minimal corner of the patch - rand_int = np.random.randint if rand_state is None else rand_state.randint - min_corner = tuple(rand_int(0, ms - ps + 1) if ms > ps else 0 for ms, ps in zip(dims, patch_size)) + min_corner = tuple(generator.integers(0, ms - ps + 1) if ms > ps else 0 for ms, ps in zip(dims, patch_size)) # create the slices for each dimension which define the patch in the source array return tuple(slice(mc, mc + ps) for mc, ps in zip(min_corner, patch_size)) @@ -703,8 +714,8 @@ def set_rnd(obj, seed: int) -> int: return seed if _seed == seed else seed + 1 # return a different seed if there are randomizable items if not hasattr(obj, "__dict__"): return seed # no attribute - if hasattr(obj, "set_random_state"): - obj.set_random_state(seed=seed % MAX_SEED) + if hasattr(obj, "set_random_generator"): + obj.set_random_generator(seed=seed % MAX_SEED) return seed + 1 # a different seed for the next component for key in obj.__dict__: if key.startswith("__"): # skip the private methods diff --git a/monai/data/wsi_datasets.py b/monai/data/wsi_datasets.py index 3488029a7a..4f7ae688f9 100644 --- a/monai/data/wsi_datasets.py +++ b/monai/data/wsi_datasets.py @@ -239,7 +239,7 @@ def __init__( **kwargs, ) self.overlap = overlap - self.set_random_state(seed) + self.set_random_generator(seed) # Set the offset config self.random_offset = False if isinstance(offset, str): @@ -281,7 +281,7 @@ def _get_offset(self, sample): offset_limits = tuple((-s, s) for s in self._get_size(sample)) else: offset_limits = self.offset_limits - return tuple(self.R.randint(low, high) for low, high in offset_limits) + return tuple(self.R.integers(low, high) for low, high in offset_limits) return self.offset def _evaluate_patch_locations(self, sample): diff --git a/monai/transforms/compose.py b/monai/transforms/compose.py index 236d3cc4c5..ae2ed4a7e9 100644 --- a/monai/transforms/compose.py +++ b/monai/transforms/compose.py @@ -38,6 +38,7 @@ apply_transform, ) from monai.utils import MAX_SEED, TraceKeys, TraceStatusKeys, ensure_tuple, get_seed +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration logger = get_logger(__name__) @@ -248,19 +249,21 @@ def __init__( self.map_items = map_items self.unpack_items = unpack_items self.log_stats = log_stats - self.set_random_state(seed=get_seed()) + self.set_random_generator(seed=get_seed()) self.overrides = overrides @LazyTransform.lazy.setter # type: ignore def lazy(self, val: bool): self._lazy = val - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> Compose: - super().set_random_state(seed=seed, state=state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> Compose: + super().set_random_generator(seed=seed, generator=generator) for _transform in self.transforms: if not isinstance(_transform, Randomizable): continue - _transform.set_random_state(seed=self.R.randint(MAX_SEED, dtype="uint32")) + _transform.set_random_generator(seed=self.R.integers(MAX_SEED, dtype="uint32")) return self def randomize(self, data: Any | None = None) -> None: @@ -731,7 +734,7 @@ def __call__(self, data, start=0, end=None, threading=False, lazy: bool | None = if len(self.transforms) == 0: return data - sample_size = self.R.randint(self.min_num_transforms, self.max_num_transforms + 1) + sample_size = self.R.integers(self.min_num_transforms, self.max_num_transforms + 1) applied_order = self.R.choice(len(self.transforms), sample_size, replace=self.replace, p=self.weights).tolist() _lazy = self._lazy if lazy is None else lazy diff --git a/monai/transforms/croppad/array.py b/monai/transforms/croppad/array.py index 1c84c473b5..803689dccc 100644 --- a/monai/transforms/croppad/array.py +++ b/monai/transforms/croppad/array.py @@ -58,6 +58,7 @@ look_up_option, pytorch_after, ) +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration __all__ = [ "Pad", @@ -614,10 +615,10 @@ def randomize(self, img_size: Sequence[int]) -> None: max_size = img_size if self.max_roi_size is None else fall_back_tuple(self.max_roi_size, img_size) if any(i > j for i, j in zip(self._size, max_size)): raise ValueError(f"min ROI size: {self._size} is larger than max ROI size: {max_size}.") - self._size = tuple(self.R.randint(low=self._size[i], high=max_size[i] + 1) for i in range(len(img_size))) + self._size = tuple(self.R.integers(low=self._size[i], high=max_size[i] + 1) for i in range(len(img_size))) if self.random_center: valid_size = get_valid_patch_size(img_size, self._size) - self._slices = get_random_patch(img_size, valid_size, self.R) + self._slices = get_random_patch(img_size, valid_size, generator=self.R) def __call__(self, img: torch.Tensor, randomize: bool = True, lazy: bool | None = None) -> torch.Tensor: # type: ignore """ @@ -753,11 +754,11 @@ def __init__( self.num_samples = num_samples self.cropper = RandSpatialCrop(roi_size, max_roi_size, random_center, random_size, lazy) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandSpatialCropSamples: - super().set_random_state(seed, state) - self.cropper.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.cropper.set_random_generator(seed, generator) return self @LazyTransform.lazy.setter # type: ignore @@ -996,7 +997,7 @@ def __init__( def randomize(self, weight_map: NdarrayOrTensor) -> None: self.centers = weighted_patch_samples( - spatial_size=self.spatial_size, w=weight_map[0], n_samples=self.num_samples, r_state=self.R + spatial_size=self.spatial_size, w=weight_map[0], n_samples=self.num_samples, generator=self.R ) # using only the first channel as weight map @LazyTransform.lazy.setter # type: ignore @@ -1167,8 +1168,8 @@ def randomize( _shape, fg_indices_, bg_indices_, - self.R, - self.allow_smaller, + allow_smaller=self.allow_smaller, + generator=self.R, ) @LazyTransform.lazy.setter # type: ignore @@ -1350,7 +1351,14 @@ def randomize( if _shape is None: raise ValueError("label or image must be provided to infer the output spatial shape.") self.centers = generate_label_classes_crop_centers( - self.spatial_size, self.num_samples, _shape, indices_, self.ratios, self.R, self.allow_smaller, self.warn + self.spatial_size, + self.num_samples, + _shape, + indices_, + self.ratios, + allow_smaller=self.allow_smaller, + warn=self.warn, + generator=self.R, ) @LazyTransform.lazy.setter # type: ignore diff --git a/monai/transforms/croppad/dictionary.py b/monai/transforms/croppad/dictionary.py index da3c38f0a2..8101eeea24 100644 --- a/monai/transforms/croppad/dictionary.py +++ b/monai/transforms/croppad/dictionary.py @@ -21,7 +21,6 @@ from copy import deepcopy from typing import Any -import numpy as np import torch from monai.config import IndexSelection, KeysCollection, SequenceStr @@ -51,6 +50,7 @@ from monai.transforms.transform import LazyTransform, MapTransform, Randomizable from monai.transforms.utils import is_positive from monai.utils import MAX_SEED, Method, PytorchPadMode, deprecated_arg_default, ensure_tuple_rep +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration __all__ = [ "Padd", @@ -391,10 +391,12 @@ class RandCropd(Cropd, Randomizable): def __init__(self, keys: KeysCollection, cropper: Crop, allow_missing_keys: bool = False, lazy: bool = False): super().__init__(keys, cropper=cropper, allow_missing_keys=allow_missing_keys, lazy=lazy) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandCropd: - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandCropd: + super().set_random_generator(seed, generator) if isinstance(self.cropper, Randomizable): - self.cropper.set_random_state(seed, state) + self.cropper.set_random_generator(seed, generator) return self def randomize(self, img_size: Sequence[int]) -> None: @@ -684,7 +686,7 @@ def lazy(self, value: bool) -> None: self.cropper.lazy = value def randomize(self, data: Any | None = None) -> None: - self.sub_seed = self.R.randint(MAX_SEED, dtype="uint32") + self.sub_seed = self.R.integers(MAX_SEED, dtype="uint32") def __call__( self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = None @@ -700,7 +702,7 @@ def __call__( lazy_ = self.lazy if lazy is None else lazy for key in self.key_iterator(dict(data)): - self.cropper.set_random_state(seed=self.sub_seed) + self.cropper.set_random_generator(seed=self.sub_seed) for i, im in enumerate(self.cropper(data[key], lazy=lazy_)): ret[i][key] = im return ret @@ -845,11 +847,11 @@ def __init__( self.w_key = w_key self.cropper = RandWeightedCrop(spatial_size, num_samples, lazy=lazy) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandWeightedCropd: - super().set_random_state(seed, state) - self.cropper.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.cropper.set_random_generator(seed, generator) return self def randomize(self, weight_map: NdarrayOrTensor) -> None: @@ -968,11 +970,11 @@ def __init__( lazy=lazy, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandCropByPosNegLabeld: - super().set_random_state(seed, state) - self.cropper.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.cropper.set_random_generator(seed, generator) return self def randomize( @@ -1130,11 +1132,11 @@ def __init__( lazy=lazy, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandCropByLabelClassesd: - super().set_random_state(seed, state) - self.cropper.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.cropper.set_random_generator(seed, generator) return self def randomize( diff --git a/monai/transforms/intensity/array.py b/monai/transforms/intensity/array.py index ed59bbc8f3..cb428b18a6 100644 --- a/monai/transforms/intensity/array.py +++ b/monai/transforms/intensity/array.py @@ -1689,7 +1689,7 @@ def randomize(self, data: Any | None = None) -> None: super().randomize(None) if not self._do_transform: return None - num_control_point = self.R.randint(self.num_control_points[0], self.num_control_points[1] + 1) + num_control_point = self.R.integers(self.num_control_points[0], self.num_control_points[1] + 1) self.reference_control_points = np.linspace(0, 1, num_control_point) self.floating_control_points = np.copy(self.reference_control_points) for i in range(1, num_control_point - 1): @@ -2083,11 +2083,11 @@ def randomize(self, img: NdarrayOrTensor, intensity_range: Sequence[Sequence[flo if self.channel_wise: # randomizing per channel for i, chan in enumerate(img): - self.sampled_locs.append((i,) + tuple(self.R.randint(0, k) for k in chan.shape)) + self.sampled_locs.append((i,) + tuple(self.R.integers(0, k) for k in chan.shape)) self.sampled_k_intensity.append(self.R.uniform(intensity_range[i][0], intensity_range[i][1])) else: # working with all channels together - spatial = tuple(self.R.randint(0, k) for k in img.shape[1:]) + spatial = tuple(self.R.integers(0, k) for k in img.shape[1:]) self.sampled_locs = [(i,) + spatial for i in range(img.shape[0])] if isinstance(intensity_range[0], Sequence): self.sampled_k_intensity = [self.R.uniform(p[0], p[1]) for p in intensity_range] @@ -2172,13 +2172,13 @@ def randomize(self, img_size: Sequence[int]) -> None: return None size = fall_back_tuple(self.spatial_size, img_size) self.hole_coords = [] # clear previously computed coords - num_holes = self.holes if self.max_holes is None else self.R.randint(self.holes, self.max_holes + 1) + num_holes = self.holes if self.max_holes is None else self.R.integers(self.holes, self.max_holes + 1) for _ in range(num_holes): if self.max_spatial_size is not None: max_size = fall_back_tuple(self.max_spatial_size, img_size) - size = tuple(self.R.randint(low=size[i], high=max_size[i] + 1) for i in range(len(img_size))) + size = tuple(self.R.integers(low=size[i], high=max_size[i] + 1) for i in range(len(img_size))) valid_size = get_valid_patch_size(img_size, size) - self.hole_coords.append((slice(None),) + get_random_patch(img_size, valid_size, self.R)) + self.hole_coords.append((slice(None),) + get_random_patch(img_size, valid_size, generator=self.R)) @abstractmethod def _transform_holes(self, img: np.ndarray) -> np.ndarray: diff --git a/monai/transforms/intensity/dictionary.py b/monai/transforms/intensity/dictionary.py index 32052ad406..1f659e06a4 100644 --- a/monai/transforms/intensity/dictionary.py +++ b/monai/transforms/intensity/dictionary.py @@ -63,6 +63,7 @@ from monai.transforms.utils import is_positive from monai.utils import convert_to_tensor, ensure_tuple, ensure_tuple_rep from monai.utils.enums import PostFix +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration __all__ = [ "RandGaussianNoised", @@ -200,11 +201,11 @@ def __init__( RandomizableTransform.__init__(self, prob) self.rand_gaussian_noise = RandGaussianNoise(mean=mean, std=std, prob=1.0, dtype=dtype) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandGaussianNoised: - super().set_random_state(seed, state) - self.rand_gaussian_noise.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.rand_gaussian_noise.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -279,9 +280,11 @@ def __init__( dtype=dtype, ) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandRicianNoised: - super().set_random_state(seed, state) - self.rand_rician_noise.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandRicianNoised: + super().set_random_generator(seed, generator) + self.rand_rician_noise.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -411,11 +414,11 @@ def __init__( self.meta_key_postfix = ensure_tuple_rep(meta_key_postfix, len(self.keys)) self.shifter = RandShiftIntensity(offsets=offsets, safe=safe, prob=1.0) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandShiftIntensityd: - super().set_random_state(seed, state) - self.shifter.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.shifter.set_random_generator(seed, generator) return self def __call__(self, data) -> dict[Hashable, NdarrayOrTensor]: @@ -509,11 +512,11 @@ def __init__( factors=factors, nonzero=nonzero, channel_wise=channel_wise, dtype=dtype, prob=1.0 ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandStdShiftIntensityd: - super().set_random_state(seed, state) - self.shifter.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.shifter.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -608,11 +611,11 @@ def __init__( RandomizableTransform.__init__(self, prob) self.scaler = RandScaleIntensity(factors=factors, dtype=dtype, prob=1.0, channel_wise=channel_wise) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandScaleIntensityd: - super().set_random_state(seed, state) - self.scaler.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.scaler.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -680,11 +683,11 @@ def __init__( factors=factors, fixed_mean=self.fixed_mean, preserve_range=preserve_range, dtype=dtype, prob=1.0 ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandScaleIntensityFixedMeand: - super().set_random_state(seed, state) - self.scaler.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.scaler.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -735,9 +738,11 @@ def __init__( self.rand_bias_field = RandBiasField(degree=degree, coeff_range=coeff_range, dtype=dtype, prob=1.0) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandBiasFieldd: - super().set_random_state(seed, state) - self.rand_bias_field.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandBiasFieldd: + super().set_random_generator(seed, generator) + self.rand_bias_field.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -962,11 +967,11 @@ def __init__( self.adjuster = RandAdjustContrast(gamma=gamma, prob=1.0, invert_image=invert_image, retain_stats=retain_stats) self.invert_image = invert_image - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandAdjustContrastd: - super().set_random_state(seed, state) - self.adjuster.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.adjuster.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -1204,11 +1209,11 @@ def __init__( sigma_x=sigma_x, sigma_y=sigma_y, sigma_z=sigma_z, approx=approx, prob=1.0 ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandGaussianSmoothd: - super().set_random_state(seed, state) - self.rand_smooth.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.rand_smooth.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -1321,11 +1326,11 @@ def __init__( prob=1.0, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandGaussianSharpend: - super().set_random_state(seed, state) - self.rand_sharpen.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.rand_sharpen.set_random_generator(seed, generator) return self def __call__(self, data: dict[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -1371,11 +1376,11 @@ def __init__( RandomizableTransform.__init__(self, prob) self.shifter = RandHistogramShift(num_control_points=num_control_points, prob=1.0) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandHistogramShiftd: - super().set_random_state(seed, state) - self.shifter.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.shifter.set_random_generator(seed, generator) return self def __call__(self, data: dict[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -1431,9 +1436,11 @@ def __init__( RandomizableTransform.__init__(self, prob=prob) self.rand_gibbs_noise = RandGibbsNoise(alpha=alpha, prob=1.0) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandGibbsNoised: - super().set_random_state(seed, state) - self.rand_gibbs_noise.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandGibbsNoised: + super().set_random_generator(seed, generator) + self.rand_gibbs_noise.set_random_generator(seed, generator) return self def __call__(self, data: dict[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -1599,11 +1606,11 @@ def __init__( RandomizableTransform.__init__(self, prob=prob) self.rand_noise = RandKSpaceSpikeNoise(prob=1.0, intensity_range=intensity_range, channel_wise=channel_wise) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandKSpaceSpikeNoised: - super().set_random_state(seed, state) - self.rand_noise.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.rand_noise.set_random_generator(seed, generator) return self def __call__(self, data: dict[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -1677,11 +1684,11 @@ def __init__( prob=1.0, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandCoarseDropoutd: - super().set_random_state(seed, state) - self.dropper.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.dropper.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -1750,11 +1757,11 @@ def __init__( holes=holes, spatial_size=spatial_size, max_holes=max_holes, max_spatial_size=max_spatial_size, prob=1.0 ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandCoarseShuffled: - super().set_random_state(seed, state) - self.shuffle.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.shuffle.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: diff --git a/monai/transforms/smooth_field/array.py b/monai/transforms/smooth_field/array.py index c9df5f1dbb..abb3213de3 100644 --- a/monai/transforms/smooth_field/array.py +++ b/monai/transforms/smooth_field/array.py @@ -15,7 +15,6 @@ from collections.abc import Sequence from typing import Any -import numpy as np import torch from torch.nn.functional import grid_sample, interpolate @@ -28,6 +27,7 @@ from monai.utils.enums import TransformBackends from monai.utils.module import look_up_option from monai.utils.type_conversion import convert_to_dst_type, convert_to_tensor +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration __all__ = ["SmoothField", "RandSmoothFieldAdjustContrast", "RandSmoothFieldAdjustIntensity", "RandSmoothDeform"] @@ -200,11 +200,11 @@ def __init__( device=device, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandSmoothFieldAdjustContrast: - super().set_random_state(seed, state) - self.sfield.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.sfield.set_random_generator(seed, generator) return self def randomize(self, data: Any | None = None) -> None: @@ -301,11 +301,11 @@ def __init__( device=device, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandSmoothFieldAdjustIntensity: - super().set_random_state(seed, state) - self.sfield.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.sfield.set_random_generator(seed, generator) return self def randomize(self, data: Any | None = None) -> None: @@ -415,9 +415,11 @@ def __init__( self.grid = torch.stack(grid).unsqueeze(0).to(self.device, self.grid_dtype) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> Randomizable: - super().set_random_state(seed, state) - self.sfield.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> Randomizable: + super().set_random_generator(seed, generator) + self.sfield.set_random_generator(seed, generator) return self def randomize(self, data: Any | None = None) -> None: diff --git a/monai/transforms/smooth_field/dictionary.py b/monai/transforms/smooth_field/dictionary.py index 99d19064f8..dc42096d0f 100644 --- a/monai/transforms/smooth_field/dictionary.py +++ b/monai/transforms/smooth_field/dictionary.py @@ -14,7 +14,6 @@ from collections.abc import Hashable, Mapping, Sequence from typing import Any -import numpy as np import torch from monai.config import KeysCollection, SequenceStr @@ -27,6 +26,7 @@ ) from monai.transforms.transform import MapTransform, RandomizableTransform from monai.utils import GridSampleMode, GridSamplePadMode, InterpolateMode, convert_to_tensor, ensure_tuple_rep +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration __all__ = [ "RandSmoothFieldAdjustContrastd", @@ -91,11 +91,11 @@ def __init__( device=device, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandSmoothFieldAdjustContrastd: - super().set_random_state(seed, state) - self.trans.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.trans.set_random_generator(seed, generator) return self def randomize(self, data: Any | None = None) -> None: @@ -169,11 +169,11 @@ def __init__( device=device, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandSmoothFieldAdjustIntensityd: - super().set_random_state(seed, state) - self.trans.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.trans.set_random_generator(seed, generator) return self def randomize(self, data: Any | None = None) -> None: @@ -259,11 +259,11 @@ def __init__( device=device, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandSmoothDeformd: - super().set_random_state(seed, state) - self.trans.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.trans.set_random_generator(seed, generator) return self def randomize(self, data: Any | None = None) -> None: diff --git a/monai/transforms/spatial/array.py b/monai/transforms/spatial/array.py index 01bdd45a74..c00f8d9710 100644 --- a/monai/transforms/spatial/array.py +++ b/monai/transforms/spatial/array.py @@ -77,6 +77,7 @@ from monai.utils.misc import ImageMetaKey as Key from monai.utils.module import look_up_option from monai.utils.type_conversion import convert_data_type, get_equivalent_dtype, get_torch_dtype_from_string +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration nib, has_nib = optional_import("nibabel") cupy, _ = optional_import("cupy") @@ -1228,7 +1229,7 @@ def randomize(self, data: Any | None = None) -> None: super().randomize(None) if not self._do_transform: return None - self._rand_k = self.R.randint(self.max_k) + 1 + self._rand_k = self.R.integers(self.max_k) + 1 def __call__(self, img: torch.Tensor, randomize: bool = True, lazy: bool | None = None) -> torch.Tensor: """ @@ -1481,7 +1482,7 @@ def randomize(self, data: NdarrayOrTensor) -> None: super().randomize(None) if not self._do_transform: return None - self._axis = self.R.randint(data.ndim - 1) + self._axis = self.R.integers(data.ndim - 1) def __call__(self, img: torch.Tensor, randomize: bool = True, lazy: bool | None = None) -> torch.Tensor: """ @@ -2464,9 +2465,11 @@ def get_identity_grid(self, spatial_size: Sequence[int], lazy: bool): else self._cached_grid ) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandAffine: - self.rand_affine_grid.set_random_state(seed, state) - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandAffine: + self.rand_affine_grid.set_random_generator(seed, generator) + super().set_random_generator(seed, generator) return self def randomize(self, data: Any | None = None) -> None: @@ -2670,10 +2673,12 @@ def __init__( self.mode = mode self.padding_mode: str = padding_mode - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> Rand2DElastic: - self.deform_grid.set_random_state(seed, state) - self.rand_affine_grid.set_random_state(seed, state) - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> Rand2DElastic: + self.deform_grid.set_random_generator(seed, generator) + self.rand_affine_grid.set_random_generator(seed, generator) + super().set_random_generator(seed, generator) return self def set_device(self, device): @@ -2844,9 +2849,11 @@ def __init__( self.magnitude = 1.0 self.sigma = 1.0 - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> Rand3DElastic: - self.rand_affine_grid.set_random_state(seed, state) - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> Rand3DElastic: + self.rand_affine_grid.set_random_generator(seed, generator) + super().set_random_generator(seed, generator) return self def set_device(self, device): @@ -3439,7 +3446,7 @@ def randomize(self, array): else: max_offset = ensure_tuple_rep(self.max_offset, len(self.patch_size)) - self.offset = tuple(self.R.randint(low=low, high=high + 1) for low, high in zip(min_offset, max_offset)) + self.offset = tuple(self.R.integers(low=low, high=high + 1) for low, high in zip(min_offset, max_offset)) def filter_count(self, image_np: NdarrayOrTensor, locations: np.ndarray) -> tuple[NdarrayOrTensor, np.ndarray]: if self.sort_fn == GridPatchSort.RANDOM: diff --git a/monai/transforms/spatial/dictionary.py b/monai/transforms/spatial/dictionary.py index 01fadcfb69..08cf2f32ea 100644 --- a/monai/transforms/spatial/dictionary.py +++ b/monai/transforms/spatial/dictionary.py @@ -70,6 +70,7 @@ ) from monai.utils.enums import TraceKeys from monai.utils.module import optional_import +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration nib, _ = optional_import("nibabel") @@ -714,7 +715,7 @@ def __init__( self._rand_k = 0 def randomize(self, data: Any | None = None) -> None: - self._rand_k = self.R.randint(self.max_k) + 1 + self._rand_k = self.R.integers(self.max_k) + 1 super().randomize(None) def __call__( @@ -1103,9 +1104,11 @@ def lazy(self, val: bool) -> None: self._lazy = val self.rand_affine.lazy = val - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandAffined: - self.rand_affine.set_random_state(seed, state) - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandAffined: + self.rand_affine.set_random_generator(seed, generator) + super().set_random_generator(seed, generator) return self def __call__( @@ -1268,9 +1271,11 @@ def __init__( self.mode = ensure_tuple_rep(mode, len(self.keys)) self.padding_mode = ensure_tuple_rep(padding_mode, len(self.keys)) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> Rand2DElasticd: - self.rand_2d_elastic.set_random_state(seed, state) - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> Rand2DElasticd: + self.rand_2d_elastic.set_random_generator(seed, generator) + super().set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -1419,9 +1424,11 @@ def __init__( self.mode = ensure_tuple_rep(mode, len(self.keys)) self.padding_mode = ensure_tuple_rep(padding_mode, len(self.keys)) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> Rand3DElasticd: - self.rand_3d_elastic.set_random_state(seed, state) - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> Rand3DElasticd: + self.rand_3d_elastic.set_random_generator(seed, generator) + super().set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> dict[Hashable, torch.Tensor]: @@ -1566,8 +1573,10 @@ def lazy(self, val: bool): self.flipper.lazy = val self._lazy = val - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandFlipd: - super().set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandFlipd: + super().set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = None) -> dict[Hashable, torch.Tensor]: @@ -1639,9 +1648,11 @@ def lazy(self, val: bool): self.flipper.lazy = val self._lazy = val - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandAxisFlipd: - super().set_random_state(seed, state) - self.flipper.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandAxisFlipd: + super().set_random_generator(seed, generator) + self.flipper.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = None) -> dict[Hashable, torch.Tensor]: @@ -1850,9 +1861,11 @@ def lazy(self, val: bool): self.rand_rotate.lazy = val self._lazy = val - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandRotated: - super().set_random_state(seed, state) - self.rand_rotate.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandRotated: + super().set_random_generator(seed, generator) + self.rand_rotate.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = None) -> dict[Hashable, torch.Tensor]: @@ -2078,9 +2091,11 @@ def lazy(self, val: bool): self.rand_zoom.lazy = val self._lazy = val - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandZoomd: - super().set_random_state(seed, state) - self.rand_zoom.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandZoomd: + super().set_random_generator(seed, generator) + self.rand_zoom.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = None) -> dict[Hashable, torch.Tensor]: @@ -2250,11 +2265,11 @@ def __init__( self.mode = ensure_tuple_rep(mode, len(self.keys)) self.padding_mode = ensure_tuple_rep(padding_mode, len(self.keys)) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandGridDistortiond: - super().set_random_state(seed, state) - self.rand_grid_distortion.set_random_state(seed, state) + super().set_random_generator(seed, generator) + self.rand_grid_distortion.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> dict[Hashable, torch.Tensor]: @@ -2498,9 +2513,11 @@ def __init__( **pad_kwargs, ) - def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> RandGridPatchd: - super().set_random_state(seed, state) - self.patcher.set_random_state(seed, state) + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> RandGridPatchd: + super().set_random_generator(seed, generator) + self.patcher.set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: @@ -2581,10 +2598,10 @@ def __init__( device=self.device, ) - def set_random_state( - self, seed: int | None = None, state: np.random.RandomState | None = None + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None ) -> RandSimulateLowResolutiond: - super().set_random_state(seed, state) + super().set_random_generator(seed, generator) return self def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: diff --git a/monai/transforms/transform.py b/monai/transforms/transform.py index e35335ba0e..66d71c6797 100644 --- a/monai/transforms/transform.py +++ b/monai/transforms/transform.py @@ -27,8 +27,10 @@ from monai.data.meta_tensor import MetaTensor from monai.transforms.traits import LazyTrait, RandomizableTrait, ThreadUnsafe from monai.utils import MAX_SEED, ensure_tuple, first +from monai.utils.deprecate_utils import deprecated from monai.utils.enums import TransformBackends from monai.utils.misc import MONAIEnvVars +from monai.utils.utils_random_generator_adaptor import SupportsRandomGeneration, _LegacyRandomStateAdaptor __all__ = [ "ThreadUnsafe", @@ -171,6 +173,9 @@ def _log_stats(data, prefix: str | None = "Data"): raise RuntimeError(f"applying transform {transform}") from e +_default_random_generator_factory: Callable[[Any | None], SupportsRandomGeneration] = _LegacyRandomStateAdaptor + + class Randomizable(ThreadUnsafe, RandomizableTrait): """ An interface for handling random state locally, currently based on a class @@ -183,8 +188,14 @@ class Randomizable(ThreadUnsafe, RandomizableTrait): the random states will be duplicated. """ - R: np.random.RandomState = np.random.RandomState() + R: SupportsRandomGeneration = _LegacyRandomStateAdaptor() # FIXME Why is this initialized here? + @deprecated( + since="1.3.0", + removed="1.5.0", + msg_suffix="np.random.RandomState is deprecated in favor of np.random.Generator." + " Please use `Randomizable.set_random_generator(...)` instead.", + ) def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> Randomizable: """ Set the random state locally, to control the randomness, the derived @@ -194,7 +205,7 @@ def set_random_state(self, seed: int | None = None, state: np.random.RandomState Args: seed: set the random state with an integer seed. state: set the random state with a `np.random.RandomState` object. - + generator: the random generator Raises: TypeError: When ``state`` is not an ``Optional[np.random.RandomState]``. @@ -202,19 +213,51 @@ def set_random_state(self, seed: int | None = None, state: np.random.RandomState a Randomizable instance. """ + if sum(x is not None for x in (seed, state)) > 1: + raise ValueError("Only one of `seed` or `state` can be set.") + if state is not None: + if not isinstance(state, np.random.RandomState): + raise TypeError(f"state must be None or a np.random.RandomState but is {type(state).__name__}.") + return self.set_random_generator(seed=seed, generator=_LegacyRandomStateAdaptor(random_state=state)) + return self.set_random_generator(seed=seed) + + def set_random_generator( + self, seed: int | None = None, generator: SupportsRandomGeneration | None = None + ) -> Randomizable: + """ + Set the random generator locally, to control the randomness, the derived + classes should use :py:attr:`self.R` instead of `np.random` to introduce random + factors. + + Args: + seed: set the random generator with an integer seed. + generator: set the random generator with a object implementing the protocol ``SupportsRandomGeneration``. + Raises: + TypeError: When ``generator`` is not an ``Optional[SupportsRandomGeneration]``. + + Returns: + a Randomizable instance. + + """ + if sum(x is not None for x in (seed, generator)) > 1: + raise ValueError("Only one of `seed`, `state` or `generator` can be set.") + if seed is not None: _seed = id(seed) if not isinstance(seed, (int, np.integer)) else seed _seed = _seed % MAX_SEED - self.R = np.random.RandomState(_seed) + self.R = _default_random_generator_factory(seed=_seed) return self - if state is not None: - if not isinstance(state, np.random.RandomState): - raise TypeError(f"state must be None or a np.random.RandomState but is {type(state).__name__}.") - self.R = state + if generator is not None: + if not isinstance(generator, SupportsRandomGeneration): + raise TypeError( + "generator must be None or a class implementing the protocol " + f"SupportsRandomGeneration but is {type(generator).__name__}." + ) + self.R = generator return self - self.R = np.random.RandomState() + self.R = _default_random_generator_factory() return self def randomize(self, data: Any) -> None: @@ -339,7 +382,7 @@ def __call__(self, img): return img + self._offset transform = RandShiftIntensity() - transform.set_random_state(seed=0) + transform.set_random_generator(seed=0) print(transform(10)) """ @@ -357,7 +400,7 @@ def randomize(self, data: Any) -> None: This method can generate the random factors based on properties of the input data. """ - self._do_transform = self.R.rand() < self.prob + self._do_transform = self.R.random() < self.prob class MapTransform(Transform): diff --git a/monai/transforms/utility/array.py b/monai/transforms/utility/array.py index 54fcdc8d59..8011670fc4 100644 --- a/monai/transforms/utility/array.py +++ b/monai/transforms/utility/array.py @@ -1126,7 +1126,7 @@ def __init__(self, background: int = 0, pert: float = 0.0) -> None: self._points: list[tuple[int, ...]] = [] def randomize(self, label: NdarrayOrTensor) -> None: - self._points = get_extreme_points(label, rand_state=self.R, background=self._background, pert=self._pert) + self._points = get_extreme_points(label, generator=self.R, background=self._background, pert=self._pert) def __call__( self, diff --git a/monai/transforms/utility/dictionary.py b/monai/transforms/utility/dictionary.py index 7f4f22a475..7d55afb807 100644 --- a/monai/transforms/utility/dictionary.py +++ b/monai/transforms/utility/dictionary.py @@ -1405,7 +1405,7 @@ def __init__( self.rescale_max = rescale_max def randomize(self, label: NdarrayOrTensor) -> None: - self.points = get_extreme_points(label, rand_state=self.R, background=self.background, pert=self.pert) + self.points = get_extreme_points(label, generator=self.R, background=self.background, pert=self.pert) def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> dict[Hashable, NdarrayOrTensor]: d = dict(data) diff --git a/monai/transforms/utils.py b/monai/transforms/utils.py index 33d468b86e..e5e525f6f8 100644 --- a/monai/transforms/utils.py +++ b/monai/transforms/utils.py @@ -65,8 +65,14 @@ optional_import, pytorch_after, ) +from monai.utils.deprecate_utils import deprecated_arg from monai.utils.enums import TransformBackends from monai.utils.type_conversion import convert_data_type, convert_to_cupy, convert_to_dst_type, convert_to_tensor +from monai.utils.utils_random_generator_adaptor import ( + SupportsRandomGeneration, + _handle_legacy_random_state, + _LegacyRandomStateAdaptor, +) measure, has_measure = optional_import("skimage.measure", "0.14.2", min_version) morphology, has_morphology = optional_import("skimage.morphology") @@ -425,11 +431,15 @@ def map_classes_to_indices( return indices +@deprecated_arg( + "r_state", since="1.3.0", removed="1.5.0", new_name="generator", msg_suffix="Please use `generator` instead." +) def weighted_patch_samples( spatial_size: int | Sequence[int], w: NdarrayOrTensor, n_samples: int = 1, r_state: np.random.RandomState | None = None, + generator: SupportsRandomGeneration | None = None, ) -> list: """ Computes `n_samples` of random patch sampling locations, given the sampling weight map `w` and patch `spatial_size`. @@ -441,6 +451,7 @@ def weighted_patch_samples( The weight map shape is assumed ``(spatial_dim_0, spatial_dim_1, ..., spatial_dim_n)``. n_samples: number of patch samples r_state: a random state container + generator: a random number generator Returns: a list of `n_samples` N-D integers representing the spatial sampling location of patches. @@ -449,8 +460,10 @@ def weighted_patch_samples( check_non_lazy_pending_ops(w, name="weighted_patch_samples") if w is None: raise ValueError("w must be an ND array, got None.") - if r_state is None: - r_state = np.random.RandomState() + generator = _handle_legacy_random_state(rand_state=r_state, generator=generator, return_legacy_default_random=False) + del r_state + if generator is None: + generator = _LegacyRandomStateAdaptor() img_size = np.asarray(w.shape, dtype=int) win_size = np.asarray(fall_back_tuple(spatial_size, img_size), dtype=int) @@ -462,9 +475,9 @@ def weighted_patch_samples( v -= v.min() # shifting to non-negative v = cumsum(v) if not v[-1] or not isfinite(v[-1]) or v[-1] < 0: # uniform sampling - idx = r_state.randint(0, len(v), size=n_samples) + idx = generator.integers(0, len(v), size=n_samples) else: - r, *_ = convert_to_dst_type(r_state.random(n_samples), v) + r, *_ = convert_to_dst_type(generator.random(n_samples), v) idx = searchsorted(v, r * v[-1], right=True) # type: ignore idx, *_ = convert_to_dst_type(idx, v, dtype=torch.int) # type: ignore # compensate 'valid' mode @@ -517,6 +530,9 @@ def correct_crop_centers( return ensure_tuple(valid_centers) # type: ignore +@deprecated_arg( + "rand_state", since="1.3.0", removed="1.5.0", new_name="generator", msg_suffix="Please use `generator` instead." +) def generate_pos_neg_label_crop_centers( spatial_size: Sequence[int] | int, num_samples: int, @@ -526,6 +542,8 @@ def generate_pos_neg_label_crop_centers( bg_indices: NdarrayOrTensor, rand_state: np.random.RandomState | None = None, allow_smaller: bool = False, + generator: SupportsRandomGeneration + | None = None, # TODO How to handle the positional arguments to be backward compatible? Should we have keyword-only arguments? ) -> tuple[tuple]: """ Generate valid sample locations based on the label with option for specifying foreground ratio @@ -548,9 +566,10 @@ def generate_pos_neg_label_crop_centers( ValueError: When the foreground and background indices lengths are 0. """ - if rand_state is None: - rand_state = np.random.random.__self__ # type: ignore - + generator = _handle_legacy_random_state( + rand_state=rand_state, generator=generator, return_legacy_default_random=True + ) + del rand_state centers = [] fg_indices = np.asarray(fg_indices) if isinstance(fg_indices, Sequence) else fg_indices bg_indices = np.asarray(bg_indices) if isinstance(bg_indices, Sequence) else bg_indices @@ -565,8 +584,8 @@ def generate_pos_neg_label_crop_centers( ) for _ in range(num_samples): - indices_to_use = fg_indices if rand_state.rand() < pos_ratio else bg_indices - random_int = rand_state.randint(len(indices_to_use)) + indices_to_use = fg_indices if generator.random() < pos_ratio else bg_indices + random_int = generator.integers(len(indices_to_use)) idx = indices_to_use[random_int] center = unravel_index(idx, label_spatial_shape).tolist() # shift center to range of valid centers @@ -575,6 +594,9 @@ def generate_pos_neg_label_crop_centers( return ensure_tuple(centers) # type: ignore +@deprecated_arg( + "rand_state", since="1.3.0", removed="1.5.0", new_name="generator", msg_suffix="Please use `generator` instead." +) def generate_label_classes_crop_centers( spatial_size: Sequence[int] | int, num_samples: int, @@ -584,6 +606,8 @@ def generate_label_classes_crop_centers( rand_state: np.random.RandomState | None = None, allow_smaller: bool = False, warn: bool = True, + generator: SupportsRandomGeneration + | None = None, # TODO How to handle the positional arguments to be backward compatible? Should we have keyword-only arguments? ) -> tuple[tuple]: """ Generate valid sample locations based on the specified ratios of label classes. @@ -603,8 +627,10 @@ def generate_label_classes_crop_centers( warn: if `True` prints a warning if a class is not present in the label. """ - if rand_state is None: - rand_state = np.random.random.__self__ # type: ignore + generator = _handle_legacy_random_state( + rand_state=rand_state, generator=generator, return_legacy_default_random=True + ) + del rand_state if num_samples < 1: raise ValueError(f"num_samples must be an int number and greater than 0, got {num_samples}.") @@ -623,11 +649,11 @@ def generate_label_classes_crop_centers( warnings.warn(f"no available indices of class {i} to crop, set the crop ratio of this class to zero.") centers = [] - classes = rand_state.choice(len(ratios_), size=num_samples, p=np.asarray(ratios_) / np.sum(ratios_)) + classes = generator.choice(len(ratios_), size=num_samples, p=np.asarray(ratios_) / np.sum(ratios_)) for i in classes: # randomly select the indices of a class based on the ratios indices_to_use = indices[i] - random_int = rand_state.randint(len(indices_to_use)) + random_int = generator.integers(len(indices_to_use)) center = unravel_index(indices_to_use[random_int], label_spatial_shape).tolist() # shift center to range of valid centers centers.append(correct_crop_centers(center, spatial_size, label_spatial_shape, allow_smaller)) @@ -1193,7 +1219,11 @@ def fill_holes( def get_extreme_points( - img: NdarrayOrTensor, rand_state: np.random.RandomState | None = None, background: int = 0, pert: float = 0.0 + img: NdarrayOrTensor, + rand_state: np.random.RandomState | None = None, + background: int = 0, + pert: float = 0.0, + generator: SupportsRandomGeneration | None = None, ) -> list[tuple[int, ...]]: """ Generate extreme points from an image. These are used to generate initial segmentation @@ -1216,8 +1246,10 @@ def get_extreme_points( ValueError: When the input image does not have any foreground pixel. """ check_non_lazy_pending_ops(img, name="get_extreme_points") - if rand_state is None: - rand_state = np.random.random.__self__ # type: ignore + generator = _handle_legacy_random_state( + generator=generator, rand_state=rand_state, return_legacy_default_random=True + ) + del rand_state indices = where(img != background) if np.size(indices[0]) == 0: raise ValueError("get_extreme_points: no foreground object in mask!") @@ -1232,11 +1264,13 @@ def _get_point(val, dim): """ idx = where(indices[dim] == val)[0] idx = idx.cpu() if isinstance(idx, torch.Tensor) else idx - idx = rand_state.choice(idx) if rand_state is not None else idx + idx = generator.choice( + idx + ) # TODO: Since this was using the default is rand_state was None, this condition is not needed, right? pt = [] for j in range(img.ndim): # add +- pert to each dimension - val = int(indices[j][idx] + 2.0 * pert * (rand_state.rand() if rand_state is not None else 0.5 - 0.5)) + val = int(indices[j][idx] + 2.0 * pert * (generator.random() - 0.5)) val = max(val, 0) val = min(val, img.shape[j] - 1) pt.append(val) diff --git a/monai/transforms/utils_create_transform_ims.py b/monai/transforms/utils_create_transform_ims.py index a98cdfe936..82ed8f0596 100644 --- a/monai/transforms/utils_create_transform_ims.py +++ b/monai/transforms/utils_create_transform_ims.py @@ -426,7 +426,7 @@ def create_transform_im( if isinstance(transform, Randomizable): # increment the seed for map transforms so they're different to the array versions. seed = seed + 1 if isinstance(transform, MapTransform) else seed - transform.set_random_state(seed) + transform.set_random_generator(seed) out_dir = MONAIEnvVars.doc_images() if out_dir is None: diff --git a/monai/utils/misc.py b/monai/utils/misc.py index 18f05b6e9f..42ef6a6159 100644 --- a/monai/utils/misc.py +++ b/monai/utils/misc.py @@ -334,7 +334,7 @@ def set_determinism( Note: This function will not affect the randomizable objects in :py:class:`monai.transforms.Randomizable`, which - have independent random states. For those objects, the ``set_random_state()`` method should be used to + have independent random states. For those objects, the ``set_random_generator()`` method should be used to ensure the deterministic behavior (alternatively, :py:class:`monai.data.DataLoader` by default sets the seeds according to the global random state, please see also: :py:class:`monai.data.utils.worker_init_fn` and :py:class:`monai.data.utils.set_rnd`). diff --git a/monai/utils/utils_random_generator_adaptor.py b/monai/utils/utils_random_generator_adaptor.py new file mode 100644 index 0000000000..249ab57096 --- /dev/null +++ b/monai/utils/utils_random_generator_adaptor.py @@ -0,0 +1,190 @@ +# Copyright (c) MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import Protocol, runtime_checkable + +import numpy as np +from numpy._typing import _ArrayLikeFloat_co, _ArrayLikeInt_co + +from monai.config.type_definitions import DtypeLike, NdarrayTensor, ShapeLike +from monai.utils.deprecate_utils import deprecated + + +# FIXME What should the type be for the array in and out? +@runtime_checkable +class SupportsRandomGeneration(Protocol): + def integers( + self, + low: int, + high: int | None = None, + size: ShapeLike | None = None, + dtype: DtypeLike = np.int64, + endpoint: bool = False, + ) -> NdarrayTensor: + ... + + def random( + self, size: ShapeLike | None = None, dtype: DtypeLike = np.int64, out: NdarrayTensor | None = None + ) -> NdarrayTensor: + ... + + def choice( + self, + a: NdarrayTensor, + size: ShapeLike | None = None, + replace: bool = True, + p: NdarrayTensor | None = None, + axis: int = 0, + shuffle: bool = True, + ) -> NdarrayTensor: + ... + + def bytes(self, length: int) -> str: + ... + + def shuffle(self, x: NdarrayTensor, axis: int = 0) -> None: + ... + + def permutation(self, x: NdarrayTensor, axis: int = 0) -> NdarrayTensor: + ... + + def multinomial( + self, n: _ArrayLikeInt_co, pvals: _ArrayLikeFloat_co, size: ShapeLike | None = None + ) -> NdarrayTensor: + ... + + def normal(self, loc: float = 0.0, scale: float = 1.0, size: ShapeLike | None = None) -> NdarrayTensor: + ... + + def uniform(self, low: float = 0.0, high: float = 1.0, size: ShapeLike | None = None) -> NdarrayTensor: + ... + + +def _generate_legacy_random_state_deprecation_message(new_method_name: str) -> str: + return ( + f"Legacy numpy.random.RandomState is deprecated for self.R in transforms and will be removed in v1.5.0. " + f"Please use `.{new_method_name}(...)` of numpy.random.Generator instead." + ) + + +class _LegacyRandomStateAdaptor(SupportsRandomGeneration): + random_staters: np.random.RandomState + + def __init__(self, /, seed: int | None = None, random_state: np.random.RandomState | None = None): + if random_state is not None and seed is not None: + raise ValueError("Cannot specify both rs and seed.") + self.random_state = np.random.RandomState(seed=seed) if random_state is None else random_state + + def integers( + self, + low: int, + high: int | None = None, + size: ShapeLike | None = None, + dtype: DtypeLike = np.int64, + endpoint: bool = False, + ) -> NdarrayTensor: + if endpoint: + if high is not None: + high += 1 + else: + low += 1 + return self.random_state.randint(low=low, high=high, size=size, dtype=dtype) + + def random( + self, size: ShapeLike | None = None, dtype: DtypeLike = np.float64, out: NdarrayTensor | None = None + ) -> NdarrayTensor: + if out is not None: + raise NotImplementedError("out is not implemented") + if dtype is not None and dtype != np.float64: + raise NotImplementedError("dtype is not implemented") + return self.random_state.random(size) + + def choice( + self, + a: NdarrayTensor, + size: ShapeLike | None = None, + replace: bool = True, + p: NdarrayTensor | None = None, + axis: int = 0, + shuffle: bool = True, + ) -> NdarrayTensor: + if axis != 0: + raise NotImplementedError("axis is not implemented") + if not shuffle: + raise NotImplementedError("shuffle is not implemented") + return self.random_state.choice(a, size, replace, p) + + def permutation(self, x: NdarrayTensor, axis: int = 0) -> NdarrayTensor: + if axis != 0: + raise NotImplementedError("axis is not implemented") + return self.random_state.permutation(x) + + def shuffle(self, x: NdarrayTensor, axis: int = 0) -> None: + if axis != 0: + raise NotImplementedError("axis is not implemented") + return self.random_state.shuffle(x) + + def multinomial( + self, n: _ArrayLikeInt_co, pvals: _ArrayLikeFloat_co, size: ShapeLike | None = None + ) -> NdarrayTensor: + return self.random_state.multinomial(n, pvals, size) + + def normal(self, loc: float = 0.0, scale: float = 1.0, size: ShapeLike | None = None) -> NdarrayTensor: + return self.random_state.normal(loc, scale, size) + + def uniform(self, low: float = 0.0, high: float = 1.0, size: ShapeLike | None = None) -> NdarrayTensor: + return self.random_state.uniform(low, high, size) + + @deprecated(since="1.3.0", removed="1.5.0", msg_suffix=_generate_legacy_random_state_deprecation_message("random")) + def random_sample(self, size: ShapeLike | None = None) -> NdarrayTensor: + return self.random_state.random_sample(size) + + @deprecated(since="1.3.0", removed="1.5.0", msg_suffix=_generate_legacy_random_state_deprecation_message("random")) + def rand(self, *args: int) -> NdarrayTensor: + return self.random_state.rand(*args) + + @deprecated( + since="1.3.0", removed="1.5.0", msg_suffix=_generate_legacy_random_state_deprecation_message("integers") + ) + def randint( + self, low: int, high: int | None = None, size: ShapeLike | None = None, dtype: DtypeLike = np.int64 + ) -> NdarrayTensor: + return self.random_state.randint(low, high, size, dtype) + + @deprecated( + since="1.3.0", removed="1.5.0", msg_suffix=_generate_legacy_random_state_deprecation_message("integers") + ) + def random_integers( + self, low: int, high: int | None = None, size: ShapeLike | None = None, dtype: DtypeLike = np.int64 + ) -> NdarrayTensor: + return self.random_state.random_integers(low, high, size, dtype) + + +def _handle_legacy_random_state( + rand_state: np.random.RandomState | None = None, + generator: SupportsRandomGeneration | None = None, + return_legacy_default_random: bool = False, +) -> SupportsRandomGeneration | None: + if generator is not None and rand_state is not None: + raise ValueError("rand_state and generator cannot be set at the same time.") + + if rand_state is not None: + generator = rand_state + rand_state = None + if isinstance(generator, np.random.RandomState): + generator = _LegacyRandomStateAdaptor(random_state=generator) + + if generator is None and return_legacy_default_random: + generator = _LegacyRandomStateAdaptor(random_state=np.random.random.__self__) + + return generator diff --git a/tests/croppers.py b/tests/croppers.py index 8c9b43bf0a..8125fbc572 100644 --- a/tests/croppers.py +++ b/tests/croppers.py @@ -81,7 +81,7 @@ def crop_test_value(self, input_param, input_arr, expected_array): def multi_inverse(self, input_shape, init_params): input_data = np.arange(np.prod(input_shape)).reshape(*input_shape) + 1 xform = self.Cropper(**init_params) - xform.set_random_state(1234) + xform.set_random_generator(1234) out = xform(input_data) if "num_samples" in init_params: self.assertEqual(len(out), init_params["num_samples"]) @@ -148,7 +148,7 @@ def crop_test_combine_ops(self, funcs, input_shape): non_lazy_result = input_data for _func in _funcs: if isinstance(_func, Randomizable): - _func.set_random_state(seed=123) + _func.set_random_generator(seed=123) non_lazy_result = _func(non_lazy_result) expected = non_lazy_result["img"] if is_map else non_lazy_result self.assertIsInstance(expected, MetaTensor) @@ -158,7 +158,7 @@ def crop_test_combine_ops(self, funcs, input_shape): for _func in _funcs: _func.lazy = True if isinstance(_func, Randomizable): - _func.set_random_state(seed=123) + _func.set_random_generator(seed=123) pending_result = _func(pending_result) pending_result = pending_result["img"] if is_map else pending_result self.assertIsInstance(pending_result, MetaTensor) diff --git a/tests/lazy_transforms_utils.py b/tests/lazy_transforms_utils.py index 1681e26037..7d250ddcfd 100644 --- a/tests/lazy_transforms_utils.py +++ b/tests/lazy_transforms_utils.py @@ -59,7 +59,7 @@ def test_resampler_lazy( """ if isinstance(resampler, Randomizable): - resampler.set_random_state(seed=seed) + resampler.set_random_generator(seed=seed) set_track_meta(True) resampler.lazy = True pending_output = resampler(**deepcopy(call_param)) diff --git a/tests/test_arraydataset.py b/tests/test_arraydataset.py index 6f373cd9db..d3c0de3c5f 100644 --- a/tests/test_arraydataset.py +++ b/tests/test_arraydataset.py @@ -94,7 +94,7 @@ def test_shape(self, img_transform, label_transform, indices, expected_shape): test_labels = [1, 1] dataset = ArrayDataset(test_images, img_transform, test_segs, label_transform, test_labels, None) self.assertEqual(len(dataset), 2) - dataset.set_random_state(1234) + dataset.set_random_generator(1234) data1 = dataset[0] data2 = dataset[1] @@ -106,7 +106,7 @@ def test_shape(self, img_transform, label_transform, indices, expected_shape): np.testing.assert_allclose(data2[indices[0]], data2[indices[0]]) dataset = ArrayDataset(test_images, img_transform, test_segs, label_transform, test_labels, None) - dataset.set_random_state(1234) + dataset.set_random_generator(1234) _ = dataset[0] data2_new = dataset[1] np.testing.assert_allclose(data2[indices[0]], data2_new[indices[0]], atol=1e-3) @@ -122,14 +122,14 @@ def test_default_none(self, img_transform, expected_shape): test_images = [test_image1, test_image2] dataset = ArrayDataset(test_images, img_transform) self.assertEqual(len(dataset), 2) - dataset.set_random_state(1234) + dataset.set_random_generator(1234) data1 = dataset[0] data2 = dataset[1] self.assertTupleEqual(data1.shape, expected_shape) self.assertTupleEqual(data2.shape, expected_shape) dataset = ArrayDataset(test_images, img_transform) - dataset.set_random_state(1234) + dataset.set_random_generator(1234) _ = dataset[0] data2_new = dataset[1] np.testing.assert_allclose(data2, data2_new, atol=1e-3) @@ -145,13 +145,13 @@ def test_dataloading_img(self, img_transform, expected_shape): test_images = [test_image1, test_image2] dataset = ArrayDataset(test_images, img_transform) self.assertEqual(len(dataset), 2) - dataset.set_random_state(1234) + dataset.set_random_generator(1234) n_workers = 0 if sys.platform == "win32" else 2 loader = DataLoader(dataset, batch_size=10, num_workers=n_workers) imgs = next(iter(loader)) # test batching np.testing.assert_allclose(imgs.shape, [2] + list(expected_shape)) - dataset.set_random_state(1234) + dataset.set_random_generator(1234) new_imgs = next(iter(loader)) # test batching np.testing.assert_allclose(imgs, new_imgs, atol=1e-3) @@ -171,13 +171,13 @@ def test_dataloading_img_label(self, img_transform, expected_shape): test_labels = [test_label1, test_label2] dataset = ArrayDataset(test_images, img_transform, test_labels, img_transform) self.assertEqual(len(dataset), 2) - dataset.set_random_state(1234) + dataset.set_random_generator(1234) n_workers = 0 if sys.platform == "win32" else 2 loader = DataLoader(dataset, batch_size=10, num_workers=n_workers) data = next(iter(loader)) # test batching np.testing.assert_allclose(data[0].shape, [2] + list(expected_shape)) - dataset.set_random_state(1234) + dataset.set_random_generator(1234) new_data = next(iter(loader)) # test batching np.testing.assert_allclose(data[0], new_data[0], atol=1e-3) diff --git a/tests/test_compose.py b/tests/test_compose.py index 453ae3868d..295b902f39 100644 --- a/tests/test_compose.py +++ b/tests/test_compose.py @@ -40,7 +40,7 @@ def data_from_keys(keys, h, w): class _RandXform(Randomizable): def randomize(self): - self.val = self.R.random_sample() + self.val = self.R.random() def __call__(self, __unused): self.randomize() @@ -167,7 +167,7 @@ class _Acc(Randomizable): self.rand = 0.0 def randomize(self, data=None): - self.rand = self.R.rand() + self.rand = self.R.random() def __call__(self, data): self.randomize() @@ -175,9 +175,9 @@ def __call__(self, data): c = mt.Compose([_Acc(), _Acc()]) self.assertNotAlmostEqual(c(0), c(0)) - c.set_random_state(123) + c.set_random_generator(123) self.assertAlmostEqual(c(1), 1.61381597) - c.set_random_state(223) + c.set_random_generator(223) c.randomize() self.assertAlmostEqual(c(1), 1.90734751) @@ -202,7 +202,7 @@ def test_data_loader(self): xform_1 = mt.Compose([_RandXform()]) train_ds = Dataset([1], transform=xform_1) - xform_1.set_random_state(123) + xform_1.set_random_generator(123) out_1 = train_ds[0] self.assertAlmostEqual(out_1, 0.2045649) diff --git a/tests/test_dataloader.py b/tests/test_dataloader.py index 2ee69687a6..e274f34400 100644 --- a/tests/test_dataloader.py +++ b/tests/test_dataloader.py @@ -60,7 +60,7 @@ def test_exception(self, datalist): class _RandomDataset(torch.utils.data.Dataset, Randomizable): def __getitem__(self, index): - return self.R.randint(0, 1000, (1,)) + return self.R.integers(0, 1000, (1,)) def __len__(self): return 8 diff --git a/tests/test_deepedit_transforms.py b/tests/test_deepedit_transforms.py index 7f4d4eee1e..799982db5b 100644 --- a/tests/test_deepedit_transforms.py +++ b/tests/test_deepedit_transforms.py @@ -229,7 +229,7 @@ class TestAddInitialSeedPointMissingLabelsd(unittest.TestCase): def test_correct_results(self, arguments, input_data, expected_result): seed = 0 add_fn = AddInitialSeedPointMissingLabelsd(**arguments) - add_fn.set_random_state(seed) + add_fn.set_random_generator(seed) result = add_fn(input_data) self.assertEqual(result[arguments["guidance"]], expected_result) diff --git a/tests/test_deepgrow_transforms.py b/tests/test_deepgrow_transforms.py index 1328e13439..67aebd7496 100644 --- a/tests/test_deepgrow_transforms.py +++ b/tests/test_deepgrow_transforms.py @@ -372,7 +372,7 @@ class TestAddInitialSeedPointd(unittest.TestCase): def test_correct_results(self, arguments, input_data, expected_result): seed = 0 add_fn = AddInitialSeedPointd(**arguments) - add_fn.set_random_state(seed) + add_fn.set_random_generator(seed) result = add_fn(input_data) self.assertEqual(result[arguments["guidance"]], expected_result) @@ -396,7 +396,7 @@ class TestAddRandomGuidanced(unittest.TestCase): def test_correct_results(self, arguments, input_data, expected_result): seed = 0 add_fn = AddRandomGuidanced(**arguments) - add_fn.set_random_state(seed) + add_fn.set_random_generator(seed) result = add_fn(input_data) self.assertEqual(result[arguments["guidance"]], expected_result) diff --git a/tests/test_integration_classification_2d.py b/tests/test_integration_classification_2d.py index 4fc92c4068..f595a8cd67 100644 --- a/tests/test_integration_classification_2d.py +++ b/tests/test_integration_classification_2d.py @@ -71,7 +71,7 @@ def run_training_test(root_dir, train_x, train_y, val_x, val_y, device="cuda:0", RandZoom(min_zoom=0.9, max_zoom=1.1, prob=0.5), ] ) - train_transforms.set_random_state(1234) + train_transforms.set_random_generator(1234) val_transforms = Compose( [ LoadImage(image_only=True, simple_keys=True), diff --git a/tests/test_integration_determinism.py b/tests/test_integration_determinism.py index 6821279080..e74c0d7342 100644 --- a/tests/test_integration_determinism.py +++ b/tests/test_integration_determinism.py @@ -33,9 +33,9 @@ def __init__(self, transforms): def __getitem__(self, _unused_id): im, seg = create_test_image_2d(128, 128, noise_max=1, num_objs=4, num_seg_classes=1) seed = np.random.randint(2147483647) - self.transforms.set_random_state(seed=seed) + self.transforms.set_random_generator(seed=seed) im = self.transforms(im) - self.transforms.set_random_state(seed=seed) + self.transforms.set_random_generator(seed=seed) seg = self.transforms(seg) return im, seg diff --git a/tests/test_integration_segmentation_3d.py b/tests/test_integration_segmentation_3d.py index 2e4cc31645..eefde8b1d4 100644 --- a/tests/test_integration_segmentation_3d.py +++ b/tests/test_integration_segmentation_3d.py @@ -71,7 +71,7 @@ def run_training_test(root_dir, device="cuda:0", cachedataset=0, readers=(None, RandRotate90d(keys=["img", "seg"], prob=0.8, spatial_axes=[0, 2]), ] ) - train_transforms.set_random_state(1234) + train_transforms.set_random_generator(1234) val_transforms = Compose( [ LoadImaged(keys=["img", "seg"], reader=readers[1]), diff --git a/tests/test_inverse.py b/tests/test_inverse.py index 3f07b43d6d..3e171d252d 100644 --- a/tests/test_inverse.py +++ b/tests/test_inverse.py @@ -435,7 +435,7 @@ def test_inverse(self, _, data_name, acceptable_diff, is_meta, *transforms): # Apply forwards for t in transforms: if isinstance(t, Randomizable): - t.set_random_state(seed=get_seed()) + t.set_random_generator(seed=get_seed()) forwards.append(t(forwards[-1])) # Apply inverses diff --git a/tests/test_kspace_mask.py b/tests/test_kspace_mask.py index 5d6d9c18ea..7ac5bb9534 100644 --- a/tests/test_kspace_mask.py +++ b/tests/test_kspace_mask.py @@ -30,7 +30,7 @@ class TestMRIUtils(unittest.TestCase): def test_mask(self, test_data): # random mask masker = RandomKspaceMask(center_fractions=[0.08], accelerations=[4.0], spatial_dims=1, is_complex=True) - masker.set_random_state(seed=0) + masker.set_random_generator(seed=0) result, _ = masker(test_data) mask = masker.mask result = result[..., mask.squeeze() == 0, :].sum() @@ -38,7 +38,7 @@ def test_mask(self, test_data): # equispaced mask masker = EquispacedKspaceMask(center_fractions=[0.08], accelerations=[4.0], spatial_dims=1, is_complex=True) - masker.set_random_state(seed=0) + masker.set_random_generator(seed=0) result, _ = masker(test_data) mask = masker.mask result = result[..., mask.squeeze() == 0, :].sum() diff --git a/tests/test_matshow3d.py b/tests/test_matshow3d.py index a6cb3fcee3..256baf7284 100644 --- a/tests/test_matshow3d.py +++ b/tests/test_matshow3d.py @@ -72,7 +72,7 @@ def test_samples(self): ] ) image_path = os.path.join(testing_dir, "anatomical.nii") - xforms.set_random_state(0) + xforms.set_random_generator(0) ims = xforms({keys: image_path}) fig, mat = matshow3d( [im[keys] for im in ims], title=f"testing {keys}", figsize=(2, 2), frames_per_row=5, every_n=2, show=False diff --git a/tests/test_meta_affine.py b/tests/test_meta_affine.py index b95ea3f1ac..cfe21540c3 100644 --- a/tests/test_meta_affine.py +++ b/tests/test_meta_affine.py @@ -141,7 +141,7 @@ def run_transform(self, img, xform_cls, args_dict): args_dict.update({"keys": keys}) xform = xform_cls(**args_dict) if isinstance(xform, Randomizable): - xform.set_random_state(5) + xform.set_random_generator(5) output = xform(img) return output diff --git a/tests/test_one_of.py b/tests/test_one_of.py index 2909597507..cad2236c33 100644 --- a/tests/test_one_of.py +++ b/tests/test_one_of.py @@ -200,7 +200,7 @@ def test_inverse_compose(self): ), ] ) - transform.set_random_state(seed=0) + transform.set_random_generator(seed=0) result = transform({"img": np.ones((1, 101, 102, 103))}) result = transform.inverse(result) # invert to the original spatial shape @@ -214,7 +214,7 @@ def test_inverse_metatensor(self): OneOf([RandScaleIntensity(factors=0.5, prob=1.0), RandShiftIntensity(offsets=0.5, prob=1.0)]), ] ) - transform.set_random_state(seed=0) + transform.set_random_generator(seed=0) result = transform(np.ones((1, 101, 102, 103))) self.assertTupleEqual(result.shape, (1, 100, 100, 100)) result = transform.inverse(result) diff --git a/tests/test_rand_affine.py b/tests/test_rand_affine.py index 915b14bf51..44d4a0af57 100644 --- a/tests/test_rand_affine.py +++ b/tests/test_rand_affine.py @@ -143,7 +143,7 @@ class TestRandAffine(unittest.TestCase): @parameterized.expand(TESTS) def test_rand_affine(self, input_param, input_data, expected_val): g = RandAffine(**input_param) - g.set_random_state(123) + g.set_random_generator(123) result = g(**input_data) g.rand_affine_grid.affine = torch.eye(4, dtype=torch.float64) # reset affine test_resampler_lazy(g, result, input_param, input_data, seed=123) diff --git a/tests/test_rand_affine_grid.py b/tests/test_rand_affine_grid.py index 113987a85c..ac6b835f13 100644 --- a/tests/test_rand_affine_grid.py +++ b/tests/test_rand_affine_grid.py @@ -201,7 +201,7 @@ class TestRandAffineGrid(unittest.TestCase): @parameterized.expand(TESTS) def test_rand_affine_grid(self, input_param, input_data, expected_val): g = RandAffineGrid(**input_param) - g.set_random_state(123) + g.set_random_generator(123) result = g(**input_data) if "device" in input_data: self.assertEqual(result.device, input_data[device]) diff --git a/tests/test_rand_affined.py b/tests/test_rand_affined.py index a607029c1a..2362317dea 100644 --- a/tests/test_rand_affined.py +++ b/tests/test_rand_affined.py @@ -219,7 +219,7 @@ class TestRandAffined(unittest.TestCase): @parameterized.expand(x + [y] for x, y in itertools.product(TESTS, (False, True))) def test_rand_affined(self, input_param, input_data, expected_val, track_meta): set_track_meta(track_meta) - g = RandAffined(**input_param).set_random_state(123) + g = RandAffined(**input_param).set_random_generator(123) call_param = {"data": input_data} res = g(**call_param) # test lazy @@ -231,7 +231,7 @@ def test_rand_affined(self, input_param, input_data, expected_val, track_meta): lazy_init_param = input_param.copy() for key, mode in zip(input_param["keys"], input_param["mode"]): lazy_init_param["keys"], lazy_init_param["mode"] = key, mode - resampler = RandAffined(**lazy_init_param).set_random_state(123) + resampler = RandAffined(**lazy_init_param).set_random_generator(123) expected_output = resampler(**call_param) test_resampler_lazy(resampler, expected_output, lazy_init_param, call_param, seed=123, output_key=key) resampler.lazy = False @@ -248,7 +248,7 @@ def test_rand_affined(self, input_param, input_data, expected_val, track_meta): expected = expected_val[key] if isinstance(expected_val, dict) else expected_val assert_allclose(result, expected, rtol=_rtol, atol=1e-3, type_test=False) - g.set_random_state(4) + g.set_random_generator(4) res = g(**call_param) if not track_meta: return diff --git a/tests/test_rand_axis_flip.py b/tests/test_rand_axis_flip.py index 81e42372db..cc21fcd5ba 100644 --- a/tests/test_rand_axis_flip.py +++ b/tests/test_rand_axis_flip.py @@ -26,7 +26,7 @@ class TestRandAxisFlip(NumpyImageTestCase2D): def test_correct_results(self): for p in TEST_NDARRAYS_ALL: flip = RandAxisFlip(prob=1.0) - flip.set_random_state(seed=321) + flip.set_random_generator(seed=321) im = p(self.imt[0]) call_param = {"img": im} result = flip(**call_param) diff --git a/tests/test_rand_axis_flipd.py b/tests/test_rand_axis_flipd.py index 75357b23e1..0481dde418 100644 --- a/tests/test_rand_axis_flipd.py +++ b/tests/test_rand_axis_flipd.py @@ -26,7 +26,7 @@ class TestRandAxisFlip(NumpyImageTestCase3D): def test_correct_results(self): for p in TEST_NDARRAYS_ALL: flip = RandAxisFlipd(keys="img", prob=1.0) - flip.set_random_state(seed=1234) + flip.set_random_generator(seed=1234) im = p(self.imt[0]) call_param = {"data": {"img": im}} result = flip(**call_param) diff --git a/tests/test_rand_coarse_shuffle.py b/tests/test_rand_coarse_shuffle.py index adfb722b42..da6df05b38 100644 --- a/tests/test_rand_coarse_shuffle.py +++ b/tests/test_rand_coarse_shuffle.py @@ -55,7 +55,7 @@ class TestRandCoarseShuffle(unittest.TestCase): @parameterized.expand(TEST_CASES) def test_shuffle(self, input_param, input_data, expected_val): g = RandCoarseShuffle(**input_param) - g.set_random_state(seed=12) + g.set_random_generator(seed=12) result = g(**input_data) np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) diff --git a/tests/test_rand_coarse_shuffled.py b/tests/test_rand_coarse_shuffled.py index 3b5a1434f4..8813880635 100644 --- a/tests/test_rand_coarse_shuffled.py +++ b/tests/test_rand_coarse_shuffled.py @@ -49,7 +49,7 @@ class TestRandCoarseShuffled(unittest.TestCase): @parameterized.expand(TEST_CASES) def test_shuffle(self, input_param, input_data, expected_val): g = RandCoarseShuffled(**input_param) - g.set_random_state(seed=12) + g.set_random_generator(seed=12) result = g(input_data) np.testing.assert_allclose(result["img"], expected_val, rtol=1e-4, atol=1e-4) diff --git a/tests/test_rand_crop_by_label_classes.py b/tests/test_rand_crop_by_label_classes.py index 88d2631ca5..354fc30d04 100644 --- a/tests/test_rand_crop_by_label_classes.py +++ b/tests/test_rand_crop_by_label_classes.py @@ -149,11 +149,11 @@ def test_indices(self, input_param, input_data, expected_type, expected_shape): def test_pending_ops(self, input_param, input_data, _expected_type, _expected_shape): cropper = RandCropByLabelClasses(**input_param) # non-lazy - cropper.set_random_state(0) + cropper.set_random_generator(0) expected = cropper(**input_data) self.assertIsInstance(expected[0], MetaTensor) # lazy - cropper.set_random_state(0) + cropper.set_random_generator(0) cropper.lazy = True pending_result = cropper(**input_data) for i, _pending_result in enumerate(pending_result): diff --git a/tests/test_rand_crop_by_label_classesd.py b/tests/test_rand_crop_by_label_classesd.py index 748f26f1ff..8d62eecb6a 100644 --- a/tests/test_rand_crop_by_label_classesd.py +++ b/tests/test_rand_crop_by_label_classesd.py @@ -138,11 +138,11 @@ def test_type_shape(self, input_param, input_data, expected_type, expected_shape def test_pending_ops(self, input_param, input_data, _expected_type, _expected_shape): cropper = RandCropByLabelClassesd(**input_param) # non-lazy - cropper.set_random_state(0) + cropper.set_random_generator(0) expected = cropper(input_data) self.assertIsInstance(expected[0]["img"], MetaTensor) # lazy - cropper.set_random_state(0) + cropper.set_random_generator(0) cropper.lazy = True pending_result = cropper(input_data) for i, _pending_result in enumerate(pending_result): diff --git a/tests/test_rand_crop_by_pos_neg_label.py b/tests/test_rand_crop_by_pos_neg_label.py index 98af6b0b5e..3b09ace698 100644 --- a/tests/test_rand_crop_by_pos_neg_label.py +++ b/tests/test_rand_crop_by_pos_neg_label.py @@ -111,7 +111,7 @@ def test_type_shape(self, input_param, input_data, expected_shape): input_param_mod = self.convert_data_type(p, input_param) input_data_mod = self.convert_data_type(p, input_data) cropper = RandCropByPosNegLabel(**input_param_mod) - cropper.set_random_state(0) + cropper.set_random_generator(0) result = cropper(**input_data_mod) self.assertListEqual(cropper.spatial_size, input_param["spatial_size"]) @@ -131,11 +131,11 @@ def test_pending_ops(self, input_param, input_data, _expected_shape): input_data_mod = self.convert_data_type(p, input_data) cropper = RandCropByPosNegLabel(**input_param_mod) # non-lazy - cropper.set_random_state(0) + cropper.set_random_generator(0) expected = cropper(**input_data_mod) self.assertIsInstance(expected[0], MetaTensor) # lazy - cropper.set_random_state(0) + cropper.set_random_generator(0) cropper.lazy = True pending_result = cropper(**input_data_mod) for i, _pending_result in enumerate(pending_result): diff --git a/tests/test_rand_crop_by_pos_neg_labeld.py b/tests/test_rand_crop_by_pos_neg_labeld.py index 1b57548d12..3d9f5be4b0 100644 --- a/tests/test_rand_crop_by_pos_neg_labeld.py +++ b/tests/test_rand_crop_by_pos_neg_labeld.py @@ -121,7 +121,7 @@ def test_type_shape(self, input_param, input_data, expected_shape): input_param_mod = self.convert_data_type(p, input_param) input_data_mod = self.convert_data_type(p, input_data) cropper = RandCropByPosNegLabeld(**input_param_mod) - cropper.set_random_state(0) + cropper.set_random_generator(0) result = cropper(input_data_mod) self.assertListEqual(cropper.cropper.spatial_size, input_param["spatial_size"]) @@ -136,7 +136,7 @@ def test_type_shape(self, input_param, input_data, expected_shape): def test_correct_center(self): cropper = RandCropByPosNegLabeld(keys="label", label_key="label", spatial_size=[3, 3]) - cropper.set_random_state(0) + cropper.set_random_generator(0) test_image = {"label": np.asarray([[[1, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 1]]])} result = cropper(test_image) np.testing.assert_allclose(result[0]["label"], np.asarray([[[0, 0, 1], [0, 0, 0], [0, 0, 0]]])) @@ -148,11 +148,11 @@ def test_pending_ops(self, input_param, input_data, _expected_shape): input_data_mod = self.convert_data_type(p, input_data) cropper = RandCropByPosNegLabeld(**input_param_mod) # non-lazy - cropper.set_random_state(0) + cropper.set_random_generator(0) expected = cropper(input_data_mod) self.assertIsInstance(expected[0]["image"], MetaTensor) # lazy - cropper.set_random_state(0) + cropper.set_random_generator(0) cropper.lazy = True pending_result = cropper(input_data_mod) for i, _pending_result in enumerate(pending_result): diff --git a/tests/test_rand_deform_grid.py b/tests/test_rand_deform_grid.py index 58b64ae596..5a4028eed1 100644 --- a/tests/test_rand_deform_grid.py +++ b/tests/test_rand_deform_grid.py @@ -129,7 +129,7 @@ class TestRandDeformGrid(unittest.TestCase): @parameterized.expand(TEST_CASES) def test_rand_deform_grid(self, input_param, input_data, expected_val): g = RandDeformGrid(**input_param) - g.set_random_state(123) + g.set_random_generator(123) result = g(**input_data) assert_allclose(result, expected_val, type_test=False, rtol=1e-3, atol=1e-3) diff --git a/tests/test_rand_elastic_2d.py b/tests/test_rand_elastic_2d.py index c59052854f..9b08789179 100644 --- a/tests/test_rand_elastic_2d.py +++ b/tests/test_rand_elastic_2d.py @@ -118,7 +118,7 @@ def test_rand_2d_elastic(self, input_param, input_data, expected_val): self.assertNotIsInstance(result, MetaTensor) self.assertIsInstance(result, torch.Tensor) set_track_meta(True) - g.set_random_state(123) + g.set_random_generator(123) result = g(**input_data) assert_allclose(result, expected_val, type_test=False, rtol=_rtol, atol=1e-4) diff --git a/tests/test_rand_elastic_3d.py b/tests/test_rand_elastic_3d.py index 0ff3ef6129..45b38c7bbe 100644 --- a/tests/test_rand_elastic_3d.py +++ b/tests/test_rand_elastic_3d.py @@ -90,12 +90,12 @@ class TestRand3DElastic(unittest.TestCase): def test_rand_3d_elastic(self, input_param, input_data, expected_val): g = Rand3DElastic(**input_param) set_track_meta(False) - g.set_random_state(123) + g.set_random_generator(123) result = g(**input_data) self.assertNotIsInstance(result, MetaTensor) self.assertIsInstance(result, torch.Tensor) set_track_meta(True) - g.set_random_state(123) + g.set_random_generator(123) result = g(**input_data) assert_allclose(result, expected_val, type_test=False, rtol=1e-1, atol=1e-1) diff --git a/tests/test_rand_elasticd_2d.py b/tests/test_rand_elasticd_2d.py index d0fbd5aa88..06ac25758c 100644 --- a/tests/test_rand_elasticd_2d.py +++ b/tests/test_rand_elasticd_2d.py @@ -165,7 +165,7 @@ def test_rand_2d_elasticd(self, input_param, input_data, expected_val): g = Rand2DElasticd(**input_param) if input_param.get("device", None) is None and isinstance(input_data["img"], torch.Tensor): input_data["img"].to("cuda:0" if torch.cuda.is_available() else "cpu") - g.set_random_state(123) + g.set_random_generator(123) res = g(input_data) for key in res: result = res[key] diff --git a/tests/test_rand_elasticd_3d.py b/tests/test_rand_elasticd_3d.py index e058293584..d3a8bd94c5 100644 --- a/tests/test_rand_elasticd_3d.py +++ b/tests/test_rand_elasticd_3d.py @@ -142,7 +142,7 @@ class TestRand3DElasticd(unittest.TestCase): @parameterized.expand(TESTS) def test_rand_3d_elasticd(self, input_param, input_data, expected_val): g = Rand3DElasticd(**input_param) - g.set_random_state(123) + g.set_random_generator(123) if input_param.get("device", None) is None and isinstance(input_data["img"], torch.Tensor): input_data["img"].to("cuda:0" if torch.cuda.is_available() else "cpu") res = g(input_data) diff --git a/tests/test_rand_gaussian_noise.py b/tests/test_rand_gaussian_noise.py index 7d4d04ff3f..a45e31375c 100644 --- a/tests/test_rand_gaussian_noise.py +++ b/tests/test_rand_gaussian_noise.py @@ -31,7 +31,7 @@ class TestRandGaussianNoise(NumpyImageTestCase2D): def test_correct_results(self, _, im_type, mean, std): seed = 0 gaussian_fn = RandGaussianNoise(prob=1.0, mean=mean, std=std) - gaussian_fn.set_random_state(seed) + gaussian_fn.set_random_generator(seed) im = im_type(self.imt) noised = gaussian_fn(im) np.random.seed(seed) diff --git a/tests/test_rand_gaussian_noised.py b/tests/test_rand_gaussian_noised.py index 24fc19f226..392c4c7641 100644 --- a/tests/test_rand_gaussian_noised.py +++ b/tests/test_rand_gaussian_noised.py @@ -32,7 +32,7 @@ class TestRandGaussianNoised(NumpyImageTestCase2D): @parameterized.expand(TESTS) def test_correct_results(self, _, im_type, keys, mean, std): gaussian_fn = RandGaussianNoised(keys=keys, prob=1.0, mean=mean, std=std, dtype=np.float64) - gaussian_fn.set_random_state(seed) + gaussian_fn.set_random_generator(seed) im = im_type(self.imt) noised = gaussian_fn({k: im for k in keys}) np.random.seed(seed) diff --git a/tests/test_rand_gaussian_sharpen.py b/tests/test_rand_gaussian_sharpen.py index 8dff69cd4c..bd660082fd 100644 --- a/tests/test_rand_gaussian_sharpen.py +++ b/tests/test_rand_gaussian_sharpen.py @@ -131,7 +131,7 @@ class TestRandGaussianSharpen(unittest.TestCase): @parameterized.expand(TESTS) def test_value(self, arguments, image, expected_data): converter = RandGaussianSharpen(**arguments) - converter.set_random_state(seed=0) + converter.set_random_generator(seed=0) result = converter(image) assert_allclose(result, expected_data, atol=0, rtol=1e-4, type_test="tensor") diff --git a/tests/test_rand_gaussian_sharpend.py b/tests/test_rand_gaussian_sharpend.py index 4c32880053..24b55b49a4 100644 --- a/tests/test_rand_gaussian_sharpend.py +++ b/tests/test_rand_gaussian_sharpend.py @@ -134,7 +134,7 @@ class TestRandGaussianSharpend(unittest.TestCase): @parameterized.expand(TESTS) def test_value(self, arguments, image, expected_data): converter = RandGaussianSharpend(**arguments) - converter.set_random_state(seed=0) + converter.set_random_generator(seed=0) result = converter(image) assert_allclose(result["img"], expected_data, rtol=1e-4, type_test=False) diff --git a/tests/test_rand_gaussian_smooth.py b/tests/test_rand_gaussian_smooth.py index 9fb91a38a1..ff77420a4d 100644 --- a/tests/test_rand_gaussian_smooth.py +++ b/tests/test_rand_gaussian_smooth.py @@ -89,7 +89,7 @@ class TestRandGaussianSmooth(unittest.TestCase): @parameterized.expand(TESTS) def test_value(self, arguments, image, expected_data): converter = RandGaussianSmooth(**arguments) - converter.set_random_state(seed=0) + converter.set_random_generator(seed=0) result = converter(image) assert_allclose(result, expected_data, rtol=1e-4, type_test="tensor") diff --git a/tests/test_rand_gaussian_smoothd.py b/tests/test_rand_gaussian_smoothd.py index d312494e46..26fdf0fb4e 100644 --- a/tests/test_rand_gaussian_smoothd.py +++ b/tests/test_rand_gaussian_smoothd.py @@ -89,7 +89,7 @@ class TestRandGaussianSmoothd(unittest.TestCase): @parameterized.expand(TESTS) def test_value(self, arguments, image, expected_data): converter = RandGaussianSmoothd(**arguments) - converter.set_random_state(seed=0) + converter.set_random_generator(seed=0) result = converter(image) assert_allclose(result["img"], expected_data, rtol=1e-4, type_test=False) diff --git a/tests/test_rand_gibbs_noise.py b/tests/test_rand_gibbs_noise.py index a0d18ae7f3..4513ca93c7 100644 --- a/tests/test_rand_gibbs_noise.py +++ b/tests/test_rand_gibbs_noise.py @@ -58,9 +58,9 @@ def test_same_result(self, im_shape, input_type): im = self.get_data(im_shape, input_type) alpha = [0.5, 0.8] t = RandGibbsNoise(1.0, alpha) - t.set_random_state(42) + t.set_random_generator(42) out1 = t(deepcopy(im)) - t.set_random_state(42) + t.set_random_generator(42) out2 = t(deepcopy(im)) assert_allclose(out1, out2, rtol=1e-7, atol=1e-2, type_test="tensor") diff --git a/tests/test_rand_gibbs_noised.py b/tests/test_rand_gibbs_noised.py index 4120f967e2..abd1c7e1d8 100644 --- a/tests/test_rand_gibbs_noised.py +++ b/tests/test_rand_gibbs_noised.py @@ -61,9 +61,9 @@ def test_same_result(self, im_shape, input_type): data = self.get_data(im_shape, input_type) alpha = [0.5, 0.8] t = RandGibbsNoised(KEYS, 1.0, alpha) - t.set_random_state(42) + t.set_random_generator(42) out1 = t(deepcopy(data)) - t.set_random_state(42) + t.set_random_generator(42) out2 = t(deepcopy(data)) for k in KEYS: assert_allclose(out1[k], out2[k], rtol=1e-7, atol=0, type_test="tensor") diff --git a/tests/test_rand_grid_distortion.py b/tests/test_rand_grid_distortion.py index 8131a2382a..096227d830 100644 --- a/tests/test_rand_grid_distortion.py +++ b/tests/test_rand_grid_distortion.py @@ -87,7 +87,7 @@ class TestRandGridDistortion(unittest.TestCase): @parameterized.expand(TESTS) def test_rand_grid_distortion(self, input_param, seed, input_data, expected_val): g = RandGridDistortion(**input_param) - g.set_random_state(seed=seed) + g.set_random_generator(seed=seed) result = g(input_data) if input_param["padding_mode"] != "reflection": assert_allclose(result, expected_val, type_test="tensor", rtol=1e-4, atol=1e-4) diff --git a/tests/test_rand_grid_distortiond.py b/tests/test_rand_grid_distortiond.py index 9f8ed3b9e6..9a934ba6e8 100644 --- a/tests/test_rand_grid_distortiond.py +++ b/tests/test_rand_grid_distortiond.py @@ -80,7 +80,7 @@ class TestRandGridDistortiond(unittest.TestCase): @parameterized.expand(TESTS) def test_rand_grid_distortiond(self, input_param, seed, input_data, expected_val_img, expected_val_mask): g = RandGridDistortiond(**input_param) - g.set_random_state(seed=seed) + g.set_random_generator(seed=seed) result = g(input_data) assert_allclose(result["img"], expected_val_img, type_test=False, rtol=1e-4, atol=1e-4) assert_allclose(result["mask"], expected_val_mask, type_test=False, rtol=1e-4, atol=1e-4) diff --git a/tests/test_rand_grid_patch.py b/tests/test_rand_grid_patch.py index 494330584a..f55429185c 100644 --- a/tests/test_rand_grid_patch.py +++ b/tests/test_rand_grid_patch.py @@ -116,7 +116,7 @@ def tearDown(self): def test_rand_grid_patch(self, in_type, input_parameters, image, expected): input_image = in_type(image) splitter = RandGridPatch(**input_parameters) - splitter.set_random_state(1234) + splitter.set_random_generator(1234) output = splitter(input_image) self.assertEqual(len(output), len(expected)) for output_patch, expected_patch in zip(output, expected): @@ -132,7 +132,7 @@ def test_rand_grid_patch(self, in_type, input_parameters, image, expected): def test_rand_grid_patch_meta(self, input_parameters, image, expected, expected_meta): set_track_meta(True) splitter = RandGridPatch(**input_parameters) - splitter.set_random_state(1234) + splitter.set_random_generator(1234) output = splitter(image) self.assertEqual(len(output), len(expected)) if "path" in expected_meta[0]: diff --git a/tests/test_rand_grid_patchd.py b/tests/test_rand_grid_patchd.py index 23ca4a7881..fcef57682e 100644 --- a/tests/test_rand_grid_patchd.py +++ b/tests/test_rand_grid_patchd.py @@ -101,7 +101,7 @@ def test_rand_grid_patchd(self, in_type, input_parameters, image_dict, expected) if k == image_key: input_dict[k] = in_type(v) splitter = RandGridPatchd(keys=image_key, **input_parameters) - splitter.set_random_state(1234) + splitter.set_random_generator(1234) output = splitter(input_dict) self.assertEqual(len(output[image_key]), len(expected)) for output_patch, expected_patch in zip(output[image_key], expected): diff --git a/tests/test_rand_histogram_shift.py b/tests/test_rand_histogram_shift.py index 318dad9dfa..fecc847b42 100644 --- a/tests/test_rand_histogram_shift.py +++ b/tests/test_rand_histogram_shift.py @@ -59,7 +59,7 @@ class TestRandHistogramShift(unittest.TestCase): @parameterized.expand(TESTS) def test_rand_histogram_shift(self, input_param, input_data, expected_val): g = RandHistogramShift(**input_param) - g.set_random_state(123) + g.set_random_generator(123) result = g(**input_data) assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4, type_test="tensor") diff --git a/tests/test_rand_histogram_shiftd.py b/tests/test_rand_histogram_shiftd.py index 45e81ab012..db055f2b40 100644 --- a/tests/test_rand_histogram_shiftd.py +++ b/tests/test_rand_histogram_shiftd.py @@ -64,7 +64,7 @@ class TestRandHistogramShiftD(unittest.TestCase): @parameterized.expand(TESTS) def test_rand_histogram_shiftd(self, input_param, input_data, expected_val): g = RandHistogramShiftd(**input_param) - g.set_random_state(123) + g.set_random_generator(123) res = g(input_data) for key in ("img",): result = res[key] diff --git a/tests/test_rand_k_space_spike_noise.py b/tests/test_rand_k_space_spike_noise.py index 4e7d59329b..f843e1a118 100644 --- a/tests/test_rand_k_space_spike_noise.py +++ b/tests/test_rand_k_space_spike_noise.py @@ -65,9 +65,9 @@ def test_same_result(self, im_shape, im_type, channel_wise): im = self.get_data(im_shape, im_type) intensity_range = [14, 15] t = RandKSpaceSpikeNoise(0.0, intensity_range, channel_wise) - t.set_random_state(42) + t.set_random_generator(42) out1 = t(deepcopy(im)) - t.set_random_state(42) + t.set_random_generator(42) out2 = t(deepcopy(im)) assert_allclose(out1, out2, type_test="tensor") diff --git a/tests/test_rand_k_space_spike_noised.py b/tests/test_rand_k_space_spike_noised.py index 3e1c11b2d9..505ff9f6e1 100644 --- a/tests/test_rand_k_space_spike_noised.py +++ b/tests/test_rand_k_space_spike_noised.py @@ -49,10 +49,10 @@ def test_same_result(self, im_shape, im_type): data = self.get_data(im_shape, im_type) t = RandKSpaceSpikeNoised(KEYS, prob=1.0, intensity_range=(13, 15), channel_wise=True) - t.set_random_state(42) + t.set_random_generator(42) out1 = t(deepcopy(data)) - t.set_random_state(42) + t.set_random_generator(42) out2 = t(deepcopy(data)) for k in KEYS: diff --git a/tests/test_rand_lambda.py b/tests/test_rand_lambda.py index 1f14499bc0..6147352124 100644 --- a/tests/test_rand_lambda.py +++ b/tests/test_rand_lambda.py @@ -64,9 +64,9 @@ def test_rand_lambdad_identity(self, t): img_t = type(img) test_func = RandTest() - test_func.set_random_state(seed=134) + test_func.set_random_generator(seed=134) expected = test_func(img) - test_func.set_random_state(seed=134) + test_func.set_random_generator(seed=134) # default prob tr = RandLambda(func=test_func) @@ -78,7 +78,7 @@ def test_rand_lambdad_identity(self, t): self.check(tr, img, img_t, ret, expected=img) trans = RandLambda(func=test_func, prob=0.5) - trans.set_random_state(seed=123) + trans.set_random_generator(seed=123) ret = trans(img) self.check(trans, img, img_t, ret, expected=img) diff --git a/tests/test_rand_lambdad.py b/tests/test_rand_lambdad.py index 6b60a3fe70..48d1a45001 100644 --- a/tests/test_rand_lambdad.py +++ b/tests/test_rand_lambdad.py @@ -54,9 +54,9 @@ def test_rand_lambdad_identity(self, t): data = {"img": img, "prop": 1.0} test_func = RandTest() - test_func.set_random_state(seed=134) + test_func.set_random_generator(seed=134) expected = {"img": test_func(data["img"]), "prop": 1.0} - test_func.set_random_state(seed=134) + test_func.set_random_generator(seed=134) # default prob tr = RandLambdad(keys=["img", "prop"], func=test_func, overwrite=[True, False]) @@ -68,7 +68,7 @@ def test_rand_lambdad_identity(self, t): self.check(tr, data, ret, expected=data) trans = RandLambdad(keys=["img", "prop"], func=test_func, prob=0.5) - trans.set_random_state(seed=123) + trans.set_random_generator(seed=123) ret = trans(deepcopy(data)) self.check(trans, data, ret, expected=data) diff --git a/tests/test_rand_rician_noise.py b/tests/test_rand_rician_noise.py index fe7135835e..7175b0e768 100644 --- a/tests/test_rand_rician_noise.py +++ b/tests/test_rand_rician_noise.py @@ -31,7 +31,7 @@ class TestRandRicianNoise(NumpyImageTestCase2D): def test_correct_results(self, _, in_type, mean, std): seed = 0 rician_fn = RandRicianNoise(prob=1.0, mean=mean, std=std) - rician_fn.set_random_state(seed) + rician_fn.set_random_generator(seed) im = in_type(self.imt) noised = rician_fn(im) if isinstance(im, torch.Tensor): diff --git a/tests/test_rand_rician_noised.py b/tests/test_rand_rician_noised.py index ae0acab4eb..5fc0eea905 100644 --- a/tests/test_rand_rician_noised.py +++ b/tests/test_rand_rician_noised.py @@ -32,7 +32,7 @@ class TestRandRicianNoisedNumpy(NumpyImageTestCase2D): @parameterized.expand(TESTS) def test_correct_results(self, _, in_type, keys, mean, std): rician_fn = RandRicianNoised(keys=keys, prob=1.0, mean=mean, std=std, dtype=np.float64) - rician_fn.set_random_state(seed) + rician_fn.set_random_generator(seed) noised = rician_fn({k: in_type(self.imt) for k in keys}) np.random.seed(seed) for k in keys: diff --git a/tests/test_rand_rotate.py b/tests/test_rand_rotate.py index ca3eda3b12..7dd334bcd6 100644 --- a/tests/test_rand_rotate.py +++ b/tests/test_rand_rotate.py @@ -85,7 +85,7 @@ def test_correct_results(self, im_type, degrees, keep_size, mode, padding_mode, "dtype": np.float64, } rotate_fn = RandRotate(**init_param) - rotate_fn.set_random_state(243) + rotate_fn.set_random_generator(243) call_param = {"img": im_type(self.imt[0])} rotated = rotate_fn(**call_param) @@ -126,7 +126,7 @@ def test_correct_results(self, im_type, x, y, z, keep_size, mode, padding_mode, "dtype": np.float64, } rotate_fn = RandRotate(**init_param) - rotate_fn.set_random_state(243) + rotate_fn.set_random_generator(243) im = im_type(self.imt[0]) call_param = {"img": im} rotated = rotate_fn(**call_param) diff --git a/tests/test_rand_rotate90.py b/tests/test_rand_rotate90.py index 88f88bf422..9444cd52e3 100644 --- a/tests/test_rand_rotate90.py +++ b/tests/test_rand_rotate90.py @@ -26,7 +26,7 @@ class TestRandRotate90(NumpyImageTestCase2D): def test_default(self): rotate = RandRotate90() for p in TEST_NDARRAYS_ALL: - rotate.set_random_state(123) + rotate.set_random_generator(123) im = p(self.imt[0]) call_param = {"img": im} rotated = rotate(**call_param) @@ -50,7 +50,7 @@ def test_k(self): self.assertIsInstance(rotated, torch.Tensor) set_track_meta(True) - rotate.set_random_state(123) + rotate.set_random_generator(123) call_param = {"img": im} rotated = rotate(**call_param) test_local_inversion(rotate, rotated, im) @@ -65,7 +65,7 @@ def test_k(self): def test_spatial_axes(self): rotate = RandRotate90(spatial_axes=(0, 1), prob=1.0) for p in TEST_NDARRAYS_ALL: - rotate.set_random_state(1234) + rotate.set_random_generator(1234) im = p(self.imt[0]) call_param = {"img": im} rotated = rotate(**call_param) @@ -82,7 +82,7 @@ def test_spatial_axes(self): def test_prob_k_spatial_axes(self): rotate = RandRotate90(prob=1.0, max_k=2, spatial_axes=(0, 1)) for p in TEST_NDARRAYS_ALL: - rotate.set_random_state(234) + rotate.set_random_generator(234) im = p(self.imt[0]) call_param = {"img": im} rotated = rotate(**call_param) diff --git a/tests/test_rand_rotate90d.py b/tests/test_rand_rotate90d.py index 23e9025c08..59acc135ee 100644 --- a/tests/test_rand_rotate90d.py +++ b/tests/test_rand_rotate90d.py @@ -27,7 +27,7 @@ def test_default(self): key = "test" rotate = RandRotate90d(keys=key) for p in TEST_NDARRAYS_ALL: - rotate.set_random_state(1323) + rotate.set_random_generator(1323) im = {key: p(self.imt[0])} call_param = {"data": im} rotated = rotate(**call_param) @@ -51,7 +51,7 @@ def test_k(self): key = "test" rotate = RandRotate90d(keys=key, max_k=2) for p in TEST_NDARRAYS_ALL: - rotate.set_random_state(234) + rotate.set_random_generator(234) im = {key: p(self.imt[0])} call_param = {"data": im} rotated = rotate(**call_param) @@ -69,7 +69,7 @@ def test_spatial_axes(self): key = "test" rotate = RandRotate90d(keys=key, spatial_axes=(0, 1)) for p in TEST_NDARRAYS_ALL: - rotate.set_random_state(234) + rotate.set_random_generator(234) im = {key: p(self.imt[0])} call_param = {"data": im} rotated = rotate(**call_param) @@ -87,7 +87,7 @@ def test_prob_k_spatial_axes(self): key = "test" rotate = RandRotate90d(keys=key, prob=1.0, max_k=2, spatial_axes=(0, 1)) for p in TEST_NDARRAYS_ALL: - rotate.set_random_state(234) + rotate.set_random_generator(234) im = {key: p(self.imt[0])} call_param = {"data": im} rotated = rotate(**call_param) diff --git a/tests/test_rand_rotated.py b/tests/test_rand_rotated.py index a5a377b02f..968c64db13 100644 --- a/tests/test_rand_rotated.py +++ b/tests/test_rand_rotated.py @@ -123,7 +123,7 @@ def test_correct_results(self, im_type, degrees, keep_size, mode, padding_mode, } rotate_fn = RandRotated(**init_param) im = im_type(self.imt[0]) - rotate_fn.set_random_state(243) + rotate_fn.set_random_generator(243) call_param = {"data": {"img": im, "seg": im_type(self.segn[0])}} rotated = rotate_fn(**call_param) @@ -168,7 +168,7 @@ def test_correct_shapes(self, im_type, x, y, z, keep_size, mode, padding_mode, a "dtype": np.float64, } rotate_fn = RandRotated(**init_param) - rotate_fn.set_random_state(243) + rotate_fn.set_random_generator(243) call_param = {"data": {"img": im_type(self.imt[0]), "seg": im_type(self.segn[0])}} rotated = rotate_fn(**call_param) diff --git a/tests/test_rand_scale_crop.py b/tests/test_rand_scale_crop.py index bf43273fcf..2a6f7492ee 100644 --- a/tests/test_rand_scale_crop.py +++ b/tests/test_rand_scale_crop.py @@ -64,7 +64,7 @@ def test_random_shape(self, input_param, input_shape, expected_shape): for im_type in TEST_NDARRAYS_ALL: with self.subTest(im_type=im_type): cropper = RandScaleCrop(**input_param) - cropper.set_random_state(seed=123) + cropper.set_random_generator(seed=123) input_data = im_type(np.random.randint(0, 2, input_shape)) result = cropper(input_data) self.assertTupleEqual(result.shape, expected_shape) diff --git a/tests/test_rand_scale_cropd.py b/tests/test_rand_scale_cropd.py index 15a48a55d7..0482856a42 100644 --- a/tests/test_rand_scale_cropd.py +++ b/tests/test_rand_scale_cropd.py @@ -84,7 +84,7 @@ def test_random_shape(self, input_param, input_shape, expected_shape): for im_type in TEST_NDARRAYS_ALL: with self.subTest(im_type=im_type): cropper = self.Cropper(**input_param) - cropper.set_random_state(seed=123) + cropper.set_random_generator(seed=123) input_data = {"img": im_type(np.random.randint(0, 2, input_shape))} result = cropper(input_data)["img"] self.assertTupleEqual(result.shape, expected_shape) diff --git a/tests/test_rand_scale_intensity.py b/tests/test_rand_scale_intensity.py index a857c0cefb..163918e123 100644 --- a/tests/test_rand_scale_intensity.py +++ b/tests/test_rand_scale_intensity.py @@ -24,7 +24,7 @@ class TestRandScaleIntensity(NumpyImageTestCase2D): @parameterized.expand([[p] for p in TEST_NDARRAYS]) def test_value(self, p): scaler = RandScaleIntensity(factors=0.5, prob=1.0) - scaler.set_random_state(seed=0) + scaler.set_random_generator(seed=0) im = p(self.imt) result = scaler(im) np.random.seed(0) @@ -36,7 +36,7 @@ def test_value(self, p): @parameterized.expand([[p] for p in TEST_NDARRAYS]) def test_channel_wise(self, p): scaler = RandScaleIntensity(factors=0.5, channel_wise=True, prob=1.0) - scaler.set_random_state(seed=0) + scaler.set_random_generator(seed=0) im = p(self.imt) result = scaler(im) np.random.seed(0) diff --git a/tests/test_rand_scale_intensity_fixed_mean.py b/tests/test_rand_scale_intensity_fixed_mean.py index f43adab32f..01696f274d 100644 --- a/tests/test_rand_scale_intensity_fixed_mean.py +++ b/tests/test_rand_scale_intensity_fixed_mean.py @@ -24,7 +24,7 @@ class TestRandScaleIntensity(NumpyImageTestCase2D): @parameterized.expand([[p] for p in TEST_NDARRAYS]) def test_value(self, p): scaler = RandScaleIntensityFixedMean(prob=1.0, factors=0.5) - scaler.set_random_state(seed=0) + scaler.set_random_generator(seed=0) im = p(self.imt) result = scaler(im) np.random.seed(0) diff --git a/tests/test_rand_scale_intensity_fixed_meand.py b/tests/test_rand_scale_intensity_fixed_meand.py index c85c764a55..99e56b10e2 100644 --- a/tests/test_rand_scale_intensity_fixed_meand.py +++ b/tests/test_rand_scale_intensity_fixed_meand.py @@ -24,7 +24,7 @@ def test_value(self): key = "img" for p in TEST_NDARRAYS: scaler = RandScaleIntensityFixedMeand(keys=[key], factors=0.5, prob=1.0) - scaler.set_random_state(seed=0) + scaler.set_random_generator(seed=0) result = scaler({key: p(self.imt)}) np.random.seed(0) # simulate the randomize function of transform diff --git a/tests/test_rand_scale_intensityd.py b/tests/test_rand_scale_intensityd.py index 8d928ac157..eebef94111 100644 --- a/tests/test_rand_scale_intensityd.py +++ b/tests/test_rand_scale_intensityd.py @@ -24,7 +24,7 @@ def test_value(self): key = "img" for p in TEST_NDARRAYS: scaler = RandScaleIntensityd(keys=[key], factors=0.5, prob=1.0) - scaler.set_random_state(seed=0) + scaler.set_random_generator(seed=0) result = scaler({key: p(self.imt)}) np.random.seed(0) # simulate the randomize function of transform @@ -36,7 +36,7 @@ def test_channel_wise(self): key = "img" for p in TEST_NDARRAYS: scaler = RandScaleIntensityd(keys=[key], factors=0.5, prob=1.0, channel_wise=True) - scaler.set_random_state(seed=0) + scaler.set_random_generator(seed=0) result = scaler({key: p(self.imt)}) np.random.seed(0) # simulate the randomize function of transform diff --git a/tests/test_rand_shift_intensity.py b/tests/test_rand_shift_intensity.py index 12b7ccf526..8896b4a742 100644 --- a/tests/test_rand_shift_intensity.py +++ b/tests/test_rand_shift_intensity.py @@ -24,7 +24,7 @@ class TestRandShiftIntensity(NumpyImageTestCase2D): @parameterized.expand([[p] for p in TEST_NDARRAYS]) def test_value(self, p): shifter = RandShiftIntensity(offsets=1.0, prob=1.0) - shifter.set_random_state(seed=0) + shifter.set_random_generator(seed=0) im = p(self.imt) result = shifter(im, factor=1.0) np.random.seed(0) diff --git a/tests/test_rand_shift_intensityd.py b/tests/test_rand_shift_intensityd.py index 92bc39dd20..315b34c450 100644 --- a/tests/test_rand_shift_intensityd.py +++ b/tests/test_rand_shift_intensityd.py @@ -25,7 +25,7 @@ def test_value(self): key = "img" for p in TEST_NDARRAYS: shifter = RandShiftIntensityd(keys=[key], offsets=1.0, prob=1.0) - shifter.set_random_state(seed=0) + shifter.set_random_generator(seed=0) result = shifter({key: p(self.imt)}) np.random.seed(0) # simulate the randomize() of transform @@ -38,7 +38,7 @@ def test_factor(self): stats = IntensityStatsd(keys=key, ops="max", key_prefix="orig") shifter = RandShiftIntensityd(keys=[key], offsets=1.0, factor_key=["orig_max"], prob=1.0) data = {key: self.imt, PostFix.meta(key): {"affine": None}} - shifter.set_random_state(seed=0) + shifter.set_random_generator(seed=0) result = shifter(stats(data)) np.random.seed(0) # simulate the randomize() of transform diff --git a/tests/test_rand_simulate_low_resolution.py b/tests/test_rand_simulate_low_resolution.py index 7d05faad36..251cf7ab09 100644 --- a/tests/test_rand_simulate_low_resolution.py +++ b/tests/test_rand_simulate_low_resolution.py @@ -74,7 +74,7 @@ class TestRandGaussianSmooth(unittest.TestCase): @parameterized.expand(TESTS) def test_value(self, arguments, image, expected_data): randsimlowres = RandSimulateLowResolution(**arguments) - randsimlowres.set_random_state(seed=0) + randsimlowres.set_random_generator(seed=0) result = randsimlowres(image) assert_allclose(result, expected_data, rtol=1e-4, type_test="tensor") diff --git a/tests/test_rand_simulate_low_resolutiond.py b/tests/test_rand_simulate_low_resolutiond.py index f058ec3b2b..ebd625cdf7 100644 --- a/tests/test_rand_simulate_low_resolutiond.py +++ b/tests/test_rand_simulate_low_resolutiond.py @@ -63,7 +63,7 @@ class TestRandGaussianSmoothd(unittest.TestCase): @parameterized.expand(TESTS) def test_value(self, arguments, image, expected_data): converter = RandSimulateLowResolutiond(**arguments) - converter.set_random_state(seed=0) + converter.set_random_generator(seed=0) result = converter(image) assert_allclose(result["img"], expected_data, rtol=1e-4, type_test=False) assert_allclose(result["seg"], expected_data, rtol=1e-4, type_test=False) diff --git a/tests/test_rand_spatial_crop.py b/tests/test_rand_spatial_crop.py index df121e2220..666d00ba1d 100644 --- a/tests/test_rand_spatial_crop.py +++ b/tests/test_rand_spatial_crop.py @@ -76,14 +76,14 @@ def test_random_shape(self, input_param, input_shape, expected_shape): for im_type in TEST_NDARRAYS_ALL: with self.subTest(im_type=im_type): cropper = RandSpatialCrop(**input_param) - cropper.set_random_state(seed=123) + cropper.set_random_generator(seed=123) input_data = im_type(np.random.randint(0, 2, input_shape)) expected = cropper(input_data) self.assertTupleEqual(expected.shape, expected_shape) # lazy # reset random seed to ensure the same results - cropper.set_random_state(seed=123) + cropper.set_random_generator(seed=123) cropper.lazy = True pending_result = cropper(input_data) self.assertIsInstance(pending_result, MetaTensor) diff --git a/tests/test_rand_spatial_crop_samples.py b/tests/test_rand_spatial_crop_samples.py index 92f0f9d9be..72e69a3dd4 100644 --- a/tests/test_rand_spatial_crop_samples.py +++ b/tests/test_rand_spatial_crop_samples.py @@ -90,7 +90,7 @@ def test_shape(self, input_param, input_shape, expected_shape, expected_last_ite for p in TEST_NDARRAYS_ALL: xform = RandSpatialCropSamples(**input_param) - xform.set_random_state(1234) + xform.set_random_generator(1234) result = xform(p(input_data)) np.testing.assert_equal(len(result), input_param["num_samples"]) @@ -107,11 +107,11 @@ def test_pending_ops(self, input_param, input_shape, _expected_shape, _expected_ xform = RandSpatialCropSamples(**input_param) image = p(input_data) # non-lazy - xform.set_random_state(1234) + xform.set_random_generator(1234) expected = xform(image) self.assertIsInstance(expected[0], MetaTensor) # lazy - xform.set_random_state(1234) + xform.set_random_generator(1234) xform.lazy = True pending_result = xform(image) for i, _pending_result in enumerate(pending_result): diff --git a/tests/test_rand_spatial_crop_samplesd.py b/tests/test_rand_spatial_crop_samplesd.py index ec0d63cc50..3a14e3d7fa 100644 --- a/tests/test_rand_spatial_crop_samplesd.py +++ b/tests/test_rand_spatial_crop_samplesd.py @@ -87,7 +87,7 @@ class TestRandSpatialCropSamplesd(unittest.TestCase): @parameterized.expand([TEST_CASE_1, *TEST_CASE_2]) def test_shape(self, input_param, input_data, expected_shape, expected_last): xform = RandSpatialCropSamplesd(**input_param) - xform.set_random_state(1234) + xform.set_random_generator(1234) result = xform(input_data) _len = len(tuple(input_data.keys())) self.assertTupleEqual(tuple(result[0].keys())[:_len], tuple(input_data.keys())) @@ -116,12 +116,12 @@ def test_deep_copy(self): def test_pending_ops(self, input_param, input_data, _expected_shape, _expected_last): xform = RandSpatialCropSamplesd(**input_param) # non-lazy - xform.set_random_state(1234) + xform.set_random_generator(1234) expected = xform(input_data) self.assertIsInstance(expected[0]["img"], MetaTensor) # lazy - xform.set_random_state(1234) + xform.set_random_generator(1234) xform.lazy = True pending_result = xform(input_data) for i, _pending_result in enumerate(pending_result): diff --git a/tests/test_rand_spatial_cropd.py b/tests/test_rand_spatial_cropd.py index 123459235f..b341617a01 100644 --- a/tests/test_rand_spatial_cropd.py +++ b/tests/test_rand_spatial_cropd.py @@ -81,14 +81,14 @@ def test_random_shape(self, input_param, input_shape, expected_shape): for im_type in TEST_NDARRAYS_ALL: with self.subTest(im_type=im_type): cropper = self.Cropper(**input_param) - cropper.set_random_state(seed=123) + cropper.set_random_generator(seed=123) input_data = {"img": im_type(np.random.randint(0, 2, input_shape))} expected = cropper(input_data)["img"] self.assertTupleEqual(expected.shape, expected_shape) # lazy # reset random seed to ensure the same results - cropper.set_random_state(seed=123) + cropper.set_random_generator(seed=123) cropper.lazy = True pending_result = cropper(input_data)["img"] self.assertIsInstance(pending_result, MetaTensor) diff --git a/tests/test_rand_std_shift_intensity.py b/tests/test_rand_std_shift_intensity.py index 535fb7cb20..1b6cd20b70 100644 --- a/tests/test_rand_std_shift_intensity.py +++ b/tests/test_rand_std_shift_intensity.py @@ -31,7 +31,7 @@ def test_value(self, p): offset = factor * np.std(self.imt) expected = p(self.imt + offset) shifter = RandStdShiftIntensity(factors=1.0, prob=1.0) - shifter.set_random_state(seed=0) + shifter.set_random_generator(seed=0) _imt = p(self.imt) result = shifter(_imt) if isinstance(_imt, torch.Tensor): diff --git a/tests/test_rand_std_shift_intensityd.py b/tests/test_rand_std_shift_intensityd.py index 31209ee754..68f747162d 100644 --- a/tests/test_rand_std_shift_intensityd.py +++ b/tests/test_rand_std_shift_intensityd.py @@ -29,7 +29,7 @@ def test_value(self): factor = np.random.uniform(low=-1.0, high=1.0) expected = self.imt + factor * np.std(self.imt) shifter = RandStdShiftIntensityd(keys=[key], factors=1.0, prob=1.0) - shifter.set_random_state(seed=0) + shifter.set_random_generator(seed=0) result = shifter({key: p(self.imt)})[key] assert_allclose(result, expected, rtol=1e-5, type_test="tensor") diff --git a/tests/test_rand_weighted_crop.py b/tests/test_rand_weighted_crop.py index 47a8f3bfa2..f546943fd2 100644 --- a/tests/test_rand_weighted_crop.py +++ b/tests/test_rand_weighted_crop.py @@ -157,7 +157,7 @@ class TestRandWeightedCrop(CropTest): @parameterized.expand(TESTS) def test_rand_weighted_crop(self, _, input_params, img, weight, expected_shape, expected_vals): crop = RandWeightedCrop(**input_params) - crop.set_random_state(10) + crop.set_random_generator(10) result = crop(img, weight) self.assertTrue(len(result) == input_params["num_samples"]) assert_allclose(result[0].shape, expected_shape) @@ -173,11 +173,11 @@ def test_rand_weighted_crop(self, _, input_params, img, weight, expected_shape, def test_pending_ops(self, _, input_param, img, weight, expected_shape, expected_vals): crop = RandWeightedCrop(**input_param) # non-lazy - crop.set_random_state(10) + crop.set_random_generator(10) expected = crop(img, weight) self.assertIsInstance(expected[0], MetaTensor) # lazy - crop.set_random_state(10) + crop.set_random_generator(10) crop.lazy = True pending_result = crop(img, weight) for i, _pending_result in enumerate(pending_result): diff --git a/tests/test_rand_weighted_cropd.py b/tests/test_rand_weighted_cropd.py index 9d37779613..cf18b4e8f0 100644 --- a/tests/test_rand_weighted_cropd.py +++ b/tests/test_rand_weighted_cropd.py @@ -151,7 +151,7 @@ class TestRandWeightedCrop(unittest.TestCase): @parameterized.expand(TESTS) def test_rand_weighted_cropd(self, _, init_params, input_data, expected_shape, expected_centers): crop = RandWeightedCropd(**init_params) - crop.set_random_state(10) + crop.set_random_generator(10) result = crop(input_data) self.assertTrue(len(result) == init_params["num_samples"]) _len = len(tuple(input_data.keys())) @@ -161,11 +161,11 @@ def test_rand_weighted_cropd(self, _, init_params, input_data, expected_shape, e def test_pending_ops(self, _, input_param, input_data, expected_shape, expected_centers): crop = RandWeightedCropd(**input_param) # non-lazy - crop.set_random_state(10) + crop.set_random_generator(10) expected = crop(input_data) self.assertIsInstance(expected[0]["img"], MetaTensor) # lazy - crop.set_random_state(10) + crop.set_random_generator(10) crop.lazy = True pending_result = crop(input_data) for i, _pending_result in enumerate(pending_result): diff --git a/tests/test_rand_zoom.py b/tests/test_rand_zoom.py index d52b79d8cf..adabca19dd 100644 --- a/tests/test_rand_zoom.py +++ b/tests/test_rand_zoom.py @@ -46,7 +46,7 @@ def test_correct_results(self, min_zoom, max_zoom, mode, keep_size, align_corner "align_corners": align_corners, } random_zoom = RandZoom(**init_param) - random_zoom.set_random_state(1234) + random_zoom.set_random_generator(1234) im = p(self.imt[0]) call_param = {"img": im} zoomed = random_zoom(**call_param) @@ -71,7 +71,7 @@ def test_keep_size(self): for p in TEST_NDARRAYS_ALL: im = p(self.imt[0]) random_zoom = RandZoom(prob=1.0, min_zoom=0.6, max_zoom=0.7, keep_size=True) - random_zoom.set_random_state(12) + random_zoom.set_random_generator(12) zoomed = random_zoom(im) test_local_inversion(random_zoom, zoomed, im) self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape[1:])) @@ -94,7 +94,7 @@ def test_invalid_inputs(self, _, min_zoom, max_zoom, mode, raises): def test_auto_expand_3d(self): for p in TEST_NDARRAYS_ALL: random_zoom = RandZoom(prob=1.0, min_zoom=[0.8, 0.7], max_zoom=[1.2, 1.3], mode="nearest", keep_size=False) - random_zoom.set_random_state(1234) + random_zoom.set_random_generator(1234) test_data = p(np.random.randint(0, 2, size=[2, 2, 3, 4])) zoomed = random_zoom(test_data) assert_allclose(random_zoom._zoom, (1.048844, 1.048844, 0.962637), atol=1e-2, type_test=False) diff --git a/tests/test_rand_zoomd.py b/tests/test_rand_zoomd.py index bb0495c793..056cd2f0cc 100644 --- a/tests/test_rand_zoomd.py +++ b/tests/test_rand_zoomd.py @@ -46,7 +46,7 @@ def test_correct_results(self, min_zoom, max_zoom, mode, align_corners, keep_siz } random_zoom = RandZoomd(**init_param) for p in TEST_NDARRAYS_ALL: - random_zoom.set_random_state(1234) + random_zoom.set_random_generator(1234) im = p(self.imt[0]) call_param = {"data": {key: im}} @@ -97,7 +97,7 @@ def test_auto_expand_3d(self): keys="img", prob=1.0, min_zoom=[0.8, 0.7], max_zoom=[1.2, 1.3], mode="nearest", keep_size=False ) for p in TEST_NDARRAYS_ALL: - random_zoom.set_random_state(1234) + random_zoom.set_random_generator(1234) test_data = {"img": p(np.random.randint(0, 2, size=[2, 2, 3, 4]))} zoomed = random_zoom(test_data) assert_allclose(random_zoom.rand_zoom._zoom, (1.048844, 1.048844, 0.962637), atol=1e-2) diff --git a/tests/test_randomizable.py b/tests/test_randomizable.py index 96854a6db8..83123116af 100644 --- a/tests/test_randomizable.py +++ b/tests/test_randomizable.py @@ -16,6 +16,7 @@ import numpy as np from monai.transforms.transform import Randomizable +from monai.utils.utils_random_generator_adaptor import _LegacyRandomStateAdaptor class RandTest(Randomizable): @@ -25,21 +26,42 @@ def randomize(self, data=None): class TestRandomizable(unittest.TestCase): def test_default(self): + inst = RandTest() + r1 = inst.R.random() + self.assertTrue(isinstance(inst.R, _LegacyRandomStateAdaptor)) + inst.set_random_generator() + r2 = inst.R.random() + self.assertNotAlmostEqual(r1, r2) + + def test_seed(self): + inst = RandTest() + inst.set_random_generator(seed=123) + self.assertAlmostEqual(inst.R.random(), 0.69646918) + inst.set_random_generator(123) + self.assertAlmostEqual(inst.R.random(), 0.69646918) + + def test_generator(self): + inst = RandTest() + inst_r = _LegacyRandomStateAdaptor(random_state=np.random.RandomState(123)) + inst.set_random_generator(generator=inst_r) + self.assertAlmostEqual(inst.R.random(), 0.69646918) + + def test_legacy_default(self): inst = RandTest() r1 = inst.R.rand() - self.assertTrue(isinstance(inst.R, np.random.RandomState)) + self.assertTrue(isinstance(inst.R, _LegacyRandomStateAdaptor)) inst.set_random_state() r2 = inst.R.rand() self.assertNotAlmostEqual(r1, r2) - def test_seed(self): + def test_legacy_seed(self): inst = RandTest() inst.set_random_state(seed=123) self.assertAlmostEqual(inst.R.rand(), 0.69646918) inst.set_random_state(123) self.assertAlmostEqual(inst.R.rand(), 0.69646918) - def test_state(self): + def test_legacy_state(self): inst = RandTest() inst_r = np.random.RandomState(123) inst.set_random_state(state=inst_r) diff --git a/tests/test_randomizable_transform_type.py b/tests/test_randomizable_transform_type.py index 3a0995be68..9d851d468a 100644 --- a/tests/test_randomizable_transform_type.py +++ b/tests/test_randomizable_transform_type.py @@ -30,6 +30,6 @@ def test_is_randomizable_transform_type(self): inst = InheritsInterface() self.assertIsInstance(inst, RandomizableTrait) - def test_set_random_state_randomizable_transform(self): + def test_set_random_generator_randomizable_transform(self): inst = InheritsImplementation() - inst.set_random_state(0) + inst.set_random_generator(0) diff --git a/tests/test_smooth_field.py b/tests/test_smooth_field.py index c525311478..669f1c0dd2 100644 --- a/tests/test_smooth_field.py +++ b/tests/test_smooth_field.py @@ -91,7 +91,7 @@ class TestSmoothField(unittest.TestCase): @parameterized.expand(TESTS_CONTRAST) def test_rand_smooth_field_adjust_contrastd(self, input_param, input_data, expected_val): g = RandSmoothFieldAdjustContrastd(**input_param) - g.set_random_state(123) + g.set_random_generator(123) res = g(input_data) for key, result in res.items(): @@ -102,7 +102,7 @@ def test_rand_smooth_field_adjust_contrastd_pad(self): input_param, input_data, expected_val = TESTS_CONTRAST[0] g = RandSmoothFieldAdjustContrastd(pad=1, **input_param) - g.set_random_state(123) + g.set_random_generator(123) res = g(input_data) for key, result in res.items(): @@ -112,7 +112,7 @@ def test_rand_smooth_field_adjust_contrastd_pad(self): @parameterized.expand(TESTS_INTENSITY) def test_rand_smooth_field_adjust_intensityd(self, input_param, input_data, expected_val): g = RandSmoothFieldAdjustIntensityd(**input_param) - g.set_random_state(123) + g.set_random_generator(123) res = g(input_data) for key, result in res.items(): @@ -123,7 +123,7 @@ def test_rand_smooth_field_adjust_intensityd_pad(self): input_param, input_data, expected_val = TESTS_INTENSITY[0] g = RandSmoothFieldAdjustIntensityd(pad=1, **input_param) - g.set_random_state(123) + g.set_random_generator(123) res = g(input_data) for key, result in res.items(): @@ -133,7 +133,7 @@ def test_rand_smooth_field_adjust_intensityd_pad(self): @parameterized.expand(TESTS_DEFORM) def test_rand_smooth_deformd(self, input_param, input_data, expected_val): g = RandSmoothDeformd(**input_param) - g.set_random_state(123) + g.set_random_generator(123) res = g(input_data) for key, result in res.items(): @@ -149,7 +149,7 @@ def test_rand_smooth_nodeformd(self): g = RandSmoothDeformd( keys=(KEY,), spatial_size=im.shape, rand_size=rsize, prob=1.0, device=device, def_range=1e-20 ) - g.set_random_state(123) + g.set_random_generator(123) expected_val = {KEY: im[None]} @@ -165,7 +165,7 @@ def test_rand_smooth_deformd_pad(self): input_param, input_data, expected_val = TESTS_DEFORM[0] g = RandSmoothDeformd(pad=1, **input_param) - g.set_random_state(123) + g.set_random_generator(123) res = g(input_data) for key, result in res.items(): diff --git a/tests/test_spatial_combine_transforms.py b/tests/test_spatial_combine_transforms.py index 8594daed16..68bfd35e3f 100644 --- a/tests/test_spatial_combine_transforms.py +++ b/tests/test_spatial_combine_transforms.py @@ -155,7 +155,7 @@ def test_combine_transforms(self, input_shape, funcs): non_lazy_result = input_data for _func in _funcs: if isinstance(_func, mt.Randomizable): - _func.set_random_state(seed=seed) + _func.set_random_generator(seed=seed) non_lazy_result = _func(non_lazy_result) expected = non_lazy_result["img"] if is_map else non_lazy_result @@ -164,7 +164,7 @@ def test_combine_transforms(self, input_shape, funcs): for _func in _funcs: _func.lazy = True if isinstance(_func, mt.Randomizable): - _func.set_random_state(seed=seed) + _func.set_random_generator(seed=seed) pending_result = _func(pending_result) pending_result = pending_result["img"] if is_map else pending_result