From 2312bdf7689ec739dcb8e03096beee5bdce9faae Mon Sep 17 00:00:00 2001 From: Marcel Rosier Date: Wed, 17 Jan 2024 22:07:50 +0100 Subject: [PATCH] - renaming - rm config options --- brainles_aurora/inferer/dataclasses.py | 4 --- brainles_aurora/inferer/inferer.py | 19 +++++-------- segmentation_test.py | 37 +++++++++++++++++++------- 3 files changed, 35 insertions(+), 25 deletions(-) diff --git a/brainles_aurora/inferer/dataclasses.py b/brainles_aurora/inferer/dataclasses.py index f8230dc..a3109bf 100644 --- a/brainles_aurora/inferer/dataclasses.py +++ b/brainles_aurora/inferer/dataclasses.py @@ -33,8 +33,6 @@ class AuroraInfererConfig(BaseConfig): sliding_window_overlap (float, optional): Overlap ratio for sliding window inference. Defaults to 0.5. crop_size (Tuple[int, int, int], optional): Crop size for sliding window inference. Defaults to (192, 192, 32). model_selection (ModelSelection, optional): Model selection strategy. Defaults to ModelSelection.BEST. - include_whole_network_in_numpy_output_mode (bool, optional): Whether to include the whole network in numpy output mode. Defaults to False. - include_metastasis_network_in_numpy_output_mode (bool, optional): Whether to include the metastasis network in numpy output mode. Defaults to False. """ tta: bool = True @@ -44,5 +42,3 @@ class AuroraInfererConfig(BaseConfig): sliding_window_overlap: float = 0.5 crop_size: Tuple[int, int, int] = (192, 192, 32) model_selection: ModelSelection = ModelSelection.BEST - include_whole_network_in_numpy_output_mode: bool = False - include_metastasis_network_in_numpy_output_mode: bool = False diff --git a/brainles_aurora/inferer/inferer.py b/brainles_aurora/inferer/inferer.py index 14c27cd..64fe50e 100644 --- a/brainles_aurora/inferer/inferer.py +++ b/brainles_aurora/inferer/inferer.py @@ -449,11 +449,6 @@ def _sliding_window_inference(self) -> None | Dict[str, np.ndarray]: onehot_model_outputs_CHWD=outputs, ) if self.config.output_mode == DataMode.NUMPY: - # rm whole/ metastasus network if not requested - if not self.config.include_whole_network_in_numpy_output_mode: - _ = postprocessed_data.pop(Output.WHOLE_NETWORK) - if not self.config.include_metastasis_network_in_numpy_output_mode: - _ = postprocessed_data.pop(Output.METASTASIS_NETWORK) return postprocessed_data else: self._save_as_nifti(postproc_data=postprocessed_data) @@ -476,8 +471,8 @@ def infer( t2: str | Path | np.ndarray | None = None, fla: str | Path | np.ndarray | None = None, segmentation_file: str | Path | None = None, - whole_network_file: str | Path | None = None, - metastasis_network_file: str | Path | None = None, + whole_tumor_unbinarized_floats_file: str | Path | None = None, + metastasis_unbinarized_floats_file: str | Path | None = None, log_file: str | Path | None = None, ) -> Dict[str, np.ndarray] | None: """Perform inference on the provided images. @@ -491,8 +486,8 @@ def infer( ### The following file paths are only required when in Nifti output mode: segmentation_file (str | Path | None, optional): Path where the segementation file should be stored. Defaults to None. Should be a nifti file. Defaults internally to a './segmentation.nii.gz'. - whole_network_file (str | Path | None, optional): Path. Defaults to None. - metastasis_network_file (str | Path | None, optional): _description_. Defaults to None. + whole_tumor_unbinarized_floats_file (str | Path | None, optional): Path. Defaults to None. + metastasis_unbinarized_floats_file (str | Path | None, optional): _description_. Defaults to None. ### Custom log file path. BY default this is set internally to the same path as segmentation_file with the extension .log or to ./{self.__class__.__name__}.log if no segmentation_file is provided @@ -528,7 +523,7 @@ def infer( self.log.info( f"Same inference mode {self.inference_mode} as previous infer call. Re-using loaded model" ) - + # self.model.eval() self.log.info("Setting up Dataloader") self.data_loader = self._get_data_loader() @@ -542,8 +537,8 @@ def infer( ) self.output_file_mapping = { Output.SEGMENTATION: segmentation_file or default_segmentation_path, - Output.WHOLE_NETWORK: whole_network_file, - Output.METASTASIS_NETWORK: metastasis_network_file, + Output.WHOLE_NETWORK: whole_tumor_unbinarized_floats_file, + Output.METASTASIS_NETWORK: metastasis_unbinarized_floats_file, } ######## diff --git a/segmentation_test.py b/segmentation_test.py index 60a54d0..427cbb8 100644 --- a/segmentation_test.py +++ b/segmentation_test.py @@ -22,17 +22,36 @@ def load_np_from_nifti(path): def gpu_nifti(): config = AuroraInfererConfig( + tta=False + ) # disable tta for faster inference in this showcase + + # If you don-t have a GPU that supports CUDA use the CPU version: AuroraInferer(config=config) + inferer = AuroraGPUInferer(config=config) + + inferer.infer( t1=t1, - t1c=t1c, - t2=t2, - fla=fla, - output_metastasis_network=True, - output_whole_network=True, + segmentation_file="test_output/segmentation.nii.gz", ) - inferer = AuroraGPUInferer( - config=config, + + +def gpu_nifti_2(): + config = AuroraInfererConfig( + tta=False + ) # disable tta for faster inference in this showcase + + # If you don-t have a GPU that supports CUDA use the CPU version: AuroraInferer(config=config) + inferer = AuroraGPUInferer(config=config) + + inferer.infer( + t1=t1, + segmentation_file="test_output/nevergonna_seg.nii.gz", + whole_tumor_unbinarized_floats_file="test_output/whole_network.nii.gz", + metastasis_unbinarized_floats_file="test_output/metastasis_network.nii.gz", + ) + inferer.infer( + t1=t1, + segmentation_file="test_output2/randomseg.nii.gz", ) - inferer.infer() def cpu_nifti(): @@ -77,4 +96,4 @@ def gpu_output_np(): if __name__ == "__main__": - gpu_nifti() + gpu_nifti_2()