Skip to content

Commit

Permalink
Merge pull request #74 from Remi-Gau/logger
Browse files Browse the repository at this point in the history
[ENH] use logger instead of print statements
  • Loading branch information
Remi-Gau authored Dec 14, 2023
2 parents d62c9fd + cfe904e commit 321705e
Show file tree
Hide file tree
Showing 11 changed files with 136 additions and 54 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
*/_version.py
*/data/test_data/
output/
work/

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down
8 changes: 6 additions & 2 deletions giga_connectome/atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
from nibabel import Nifti1Image
from pkg_resources import resource_filename

from giga_connectome.logger import gc_logger

gc_log = gc_logger()


PRESET_ATLAS = ["DiFuMo", "MIST", "Schaefer20187Networks"]

Expand Down Expand Up @@ -42,7 +46,7 @@ def load_atlas_setting(atlas: Union[str, Path, dict]):
Path to the atlas files.
"""
atlas_config = _check_altas_config(atlas)
print(atlas_config)
gc_log.info(atlas_config)

# load template flow
templateflow_dir = atlas_config.get("templateflow_dir")
Expand Down Expand Up @@ -104,7 +108,7 @@ def resample_atlas_collection(
List of pathlib.Path
Paths to atlases sampled to group level grey matter mask.
"""
print("Resample atlas to group grey matter mask.")
gc_log.info("Resample atlas to group grey matter mask.")
resampled_atlases = []
for desc in tqdm(atlas_config["file_paths"]):
parcellation = atlas_config["file_paths"][desc]
Expand Down
20 changes: 20 additions & 0 deletions giga_connectome/logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""General logger for the cohort_creator package."""
from __future__ import annotations

import logging

from rich.logging import RichHandler


def gc_logger(log_level: str = "INFO") -> logging.Logger:
# FORMAT = '\n%(asctime)s - %(name)s - %(levelname)s\n\t%(message)s\n'
FORMAT = "%(message)s"

logging.basicConfig(
level=log_level,
format=FORMAT,
datefmt="[%X]",
handlers=[RichHandler()],
)

return logging.getLogger("giga_connectome")
64 changes: 29 additions & 35 deletions giga_connectome/mask.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@

from giga_connectome.atlas import resample_atlas_collection

from giga_connectome.logger import gc_logger

gc_log = gc_logger()


def generate_gm_mask_atlas(
working_dir: Path,
Expand Down Expand Up @@ -63,7 +67,6 @@ def generate_group_mask(
template: str = "MNI152NLin2009cAsym",
templateflow_dir: Optional[Path] = None,
n_iter: int = 2,
verbose: int = 1,
) -> Nifti1Image:
"""
Generate a group EPI grey matter mask, and overlaid with a MNI grey
Expand All @@ -88,9 +91,6 @@ def generate_group_mask(
Number of repetitions of dilation and erosion steps performed in
scipy.ndimage.binary_closing function.
verbose :
Level of verbosity.
Keyword Arguments
-----------------
Used to filter the cirret
Expand All @@ -102,12 +102,10 @@ def generate_group_mask(
nibabel.nifti1.Nifti1Image
EPI (grey matter) mask for the current group of subjects.
"""
if verbose > 1:
print(f"Found {len(imgs)} masks")
if exclude := _check_mask_affine(imgs, verbose):
gc_log.debug(f"Found {len(imgs)} masks")
if exclude := _check_mask_affine(imgs):
imgs, __annotations__ = _get_consistent_masks(imgs, exclude)
if verbose > 1:
print(f"Remaining: {len(imgs)} masks")
gc_log.debug(f"Remaining: {len(imgs)} masks")

# templateflow environment setting to get around network issue
if templateflow_dir and templateflow_dir.exists():
Expand All @@ -129,7 +127,7 @@ def generate_group_mask(
memory=None,
verbose=0,
)
print(
gc_log.info(
f"Group EPI mask affine:\n{group_epi_mask.affine}"
f"\nshape: {group_epi_mask.shape}"
)
Expand Down Expand Up @@ -204,7 +202,7 @@ def _get_consistent_masks(


def _check_mask_affine(
mask_imgs: List[Union[Path, str, Nifti1Image]], verbose: int = 1
mask_imgs: List[Union[Path, str, Nifti1Image]]
) -> Union[list, None]:
"""Given a list of input mask images, show the most common affine matrix
and subjects with different values.
Expand All @@ -215,9 +213,6 @@ def _check_mask_affine(
See :ref:`extracting_data`.
3D or 4D EPI image with same affine.
verbose :
Level of verbosity.
Returns
-------
Expand All @@ -244,12 +239,11 @@ def _check_mask_affine(
common_affine = max(
set(header_info["affine"]), key=header_info["affine"].count
)
if verbose > 0:
print(
f"We found {len(set(header_info['affine']))} unique affine "
f"matrices. The most common one is "
f"{key_to_header[common_affine]}"
)
gc_log.info(
f"We found {len(set(header_info['affine']))} unique affine "
f"matrices. The most common one is "
f"{key_to_header[common_affine]}"
)
odd_balls = set(header_info["affine"]) - {common_affine}
if not odd_balls:
return None
Expand All @@ -259,18 +253,16 @@ def _check_mask_affine(
ob_index = [
i for i, aff in enumerate(header_info["affine"]) if aff == ob
]
if verbose > 1:
print(
"The following subjects has a different affine matrix "
f"({key_to_header[ob]}) comparing to the most common value: "
f"{mask_imgs[ob_index]}."
)
exclude += ob_index
if verbose > 0:
print(
f"{len(exclude)} out of {len(mask_imgs)} has "
"different affine matrix. Ignore when creating group mask."
gc_log.debug(
"The following subjects has a different affine matrix "
f"({key_to_header[ob]}) comparing to the most common value: "
f"{mask_imgs[ob_index]}."
)
exclude += ob_index
gc_log.info(
f"{len(exclude)} out of {len(mask_imgs)} has "
"different affine matrix. Ignore when creating group mask."
)
return sorted(exclude)


Expand All @@ -284,7 +276,9 @@ def _check_pregenerated_masks(template, working_dir, atlas):
if not group_mask.exists():
group_mask = None
else:
print(f"Found pregenerated group level grey matter mask: {group_mask}")
gc_log.info(
f"Found pregenerated group level grey matter mask: {group_mask}"
)

# atlas
resampled_atlases = []
Expand All @@ -301,8 +295,8 @@ def _check_pregenerated_masks(template, working_dir, atlas):
if not all(all_exist):
resampled_atlases = None
else:
print(
f"Found resampled atlases: {resampled_atlases}. Skipping group "
"level mask generation step."
gc_log.info(
f"Found resampled atlases:\n{[str(x) for x in resampled_atlases]}."
"\nSkipping group level mask generation step."
)
return group_mask, resampled_atlases
21 changes: 18 additions & 3 deletions giga_connectome/postprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,13 @@
from nilearn.connectome import ConnectivityMeasure
from nilearn.maskers import NiftiLabelsMasker, NiftiMapsMasker
from bids.layout import BIDSImageFile

from giga_connectome import utils
from giga_connectome.connectome import generate_timeseries_connectomes
from giga_connectome.denoise import denoise_nifti_voxel
from giga_connectome.logger import gc_logger

gc_log = gc_logger()


def run_postprocessing_dataset(
Expand Down Expand Up @@ -97,8 +101,13 @@ def run_postprocessing_dataset(
)

# transform data
print("processing subjects")
gc_log.info("Processing subject")

for img in tqdm(images):

print()
gc_log.info(f"Processing image:\n{img.filename}")

# process timeseries
denoised_img = denoise_nifti_voxel(
strategy, group_mask, standardize, smoothing_fwhm, img.path
Expand All @@ -109,7 +118,9 @@ def run_postprocessing_dataset(
attribute_name = f"{subject}_{specifier}_atlas-{atlas}_desc-{desc}"
if not denoised_img:
time_series_atlas, correlation_matrix = None, None
print(f"{attribute_name}: no volume after scrubbing")

gc_log.info(f"{attribute_name}: no volume after scrubbing")

continue

# extract timeseries and connectomes
Expand Down Expand Up @@ -139,8 +150,12 @@ def run_postprocessing_dataset(
f"{attribute_name}_connectome", data=correlation_matrix
)

gc_log.info(f"Saved to:\n{output_path}")

if analysis_level == "group":
print("create group connectome")

gc_log.info("Create group connectome")

for desc in connectomes:
average_connectome = np.mean(
np.array(connectomes[desc]), axis=0
Expand Down
11 changes: 11 additions & 0 deletions giga_connectome/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,17 @@ def global_parser() -> argparse.ArgumentParser:
"pipeline (option A). The default is False.",
action="store_true",
)
parser.add_argument(
"--verbosity",
help="""
Verbosity level.
""",
required=False,
choices=[0, 1, 2, 3],
default=2,
type=int,
nargs=1,
)
return parser


Expand Down
3 changes: 1 addition & 2 deletions giga_connectome/tests/test_mask.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@ def test_check_mask_affine():
weird = Nifti1Image(processed_vol, np.eye(4) * np.array([1, 1, 1.5, 1]).T)
weird2 = Nifti1Image(processed_vol, np.eye(4) * np.array([1, 1, 1.6, 1]).T)
exclude = mask._check_mask_affine(
[processed, processed, processed, processed, weird, weird, weird2],
verbose=2,
[processed, processed, processed, processed, weird, weird, weird2]
)
assert len(exclude) == 3
assert exclude == [4, 5, 6]
Expand Down
11 changes: 8 additions & 3 deletions giga_connectome/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@
from bids.layout import Query
from bids import BIDSLayout

from giga_connectome.logger import gc_logger

gc_log = gc_logger()


def get_bids_images(
subjects: List[str],
Expand Down Expand Up @@ -175,7 +179,7 @@ def get_subject_lists(
]


def check_path(path: Path, verbose=True):
def check_path(path: Path):
"""Check if given path (file or dir) already exists, and if so returns a
new path with _<n> appended (n being the number of paths with the same name
that exist already).
Expand All @@ -196,6 +200,7 @@ def check_path(path: Path, verbose=True):
]
n = str(max(existing_numbers) + 1) if existing_numbers else "1"
path = path_parent / f"{path.stem}_{n}{ext}"
if verbose:
print(f"Specified path already exists, using {path} instead.")

gc_log.debug(f"Specified path already exists, using {path} instead.")

return path
42 changes: 35 additions & 7 deletions giga_connectome/workflow.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
"""
Process fMRIPrep outputs to timeseries based on denoising strategy.
"""
from __future__ import annotations

from giga_connectome import (
generate_gm_mask_atlas,
load_atlas_setting,
Expand All @@ -10,10 +12,29 @@

from giga_connectome.denoise import is_ica_aroma
from giga_connectome import utils
from giga_connectome.logger import gc_logger


gc_log = gc_logger()


def set_verbosity(verbosity: int | list[int]) -> None:
if isinstance(verbosity, list):
verbosity = verbosity[0]
if verbosity == 0:
gc_log.setLevel("ERROR")
elif verbosity == 1:
gc_log.setLevel("WARNING")
elif verbosity == 2:
gc_log.setLevel("INFO")
elif verbosity == 3:
gc_log.setLevel("DEBUG")


def workflow(args):
print(vars(args))

gc_log.info(vars(args))

# set file paths
bids_dir = args.bids_dir
output_dir = args.output_dir
Expand All @@ -30,6 +51,8 @@ def workflow(args):
strategy = get_denoise_strategy(args.denoise_strategy)
atlas = load_atlas_setting(args.atlas)

set_verbosity(args.verbosity)

# check output path
output_dir.mkdir(parents=True, exist_ok=True)
working_dir.mkdir(parents=True, exist_ok=True)
Expand All @@ -38,7 +61,8 @@ def workflow(args):
template = (
"MNI152NLin6Asym" if is_ica_aroma(strategy) else "MNI152NLin2009cAsym"
)
print("Indexing BIDS directory")

gc_log.info(f"Indexing BIDS directory:\n\t{bids_dir}")

# create subject ts and connectomes
# refactor the two cases into one
Expand All @@ -55,8 +79,10 @@ def workflow(args):
f"sub-{subject}_atlas-{atlas['name']}"
f"_desc-{strategy['name']}.h5"
)
connectome_path = utils.check_path(connectome_path, verbose=True)
print("Generate subject level connectomes")
connectome_path = utils.check_path(connectome_path)

gc_log.info(f"Generate subject level connectomes: sub-{subject}")

run_postprocessing_dataset(
strategy,
resampled_atlases,
Expand All @@ -80,9 +106,11 @@ def workflow(args):
connectome_path = (
output_dir / f"atlas-{atlas['name']}_desc-{strategy['name']}.h5"
)
connectome_path = utils.check_path(connectome_path, verbose=True)
print(connectome_path)
print("Generate subject level connectomes")
connectome_path = utils.check_path(connectome_path)

gc_log.info(connectome_path)
gc_log.info("Generate subject level connectomes")

run_postprocessing_dataset(
strategy,
resampled_atlases,
Expand Down
Loading

0 comments on commit 321705e

Please sign in to comment.