Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Doc] add an example of single subject rois; reduce example time #999

Merged
merged 6 commits into from
Aug 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion AFQ/api/bundle_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def _cond_load(self, roi_or_sl, resample_to):
raise ValueError((
"Attempted to load an ROI using BIDS description without "
"First providing BIDS information."))
suffix = roi_or_sl.pop("suffix", "dwi")
suffix = roi_or_sl.get("suffix", "dwi")
roi_or_sl = find_file(
self._bids_info, self._bids_path,
roi_or_sl,
Expand Down
43 changes: 9 additions & 34 deletions AFQ/api/participant.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@
from AFQ.tasks.tractography import get_tractography_plan
from AFQ.tasks.segmentation import get_segmentation_plan
from AFQ.tasks.viz import get_viz_plan
from AFQ.utils.path import drop_extension
from AFQ.data.s3bids import read_json
from AFQ.utils.path import drop_extension, apply_cmd_to_afq_derivs


__all__ = ["ParticipantAFQ"]
Expand Down Expand Up @@ -230,17 +229,6 @@ def cmd_outputs(self, cmd="rm", dependent_on=None, exceptions=[],
Parts of command that are used after the filename.
Default: ""
"""
if dependent_on is None:
dependent_on_list = ["trk", "rec", "dwi"]
elif dependent_on.lower() == "track":
dependent_on_list = ["trk", "rec"]
elif dependent_on.lower() == "recog":
dependent_on_list = ["rec"]
else:
raise ValueError((
"dependent_on must be one of "
"None, 'track', or 'recog'."))

exception_file_names = []
for exception in exceptions:
file_name = self.export(exception)
Expand All @@ -251,27 +239,14 @@ def cmd_outputs(self, cmd="rm", dependent_on=None, exceptions=[],
f"The exception '{exception}' does not correspond"
" to a filename and will be ignored."))

for filename in os.listdir(self.output_dir):
full_path = os.path.join(self.output_dir, filename)
if (full_path in exception_file_names)\
or (not full_path.startswith(self.export("base_fname")))\
or filename.endswith("json"):
continue
if os.path.isfile(full_path) or os.path.islink(full_path):
sidecar_file = f'{drop_extension(full_path)}.json'
if op.exists(sidecar_file):
sidecar_info = read_json(sidecar_file)
if "dependent" in sidecar_info\
and sidecar_info["dependent"]\
in dependent_on_list:
os.system(f"{cmd} {full_path} {suffix}")
os.system(f"{cmd} {sidecar_file} {suffix}")
else:
os.system(f"{cmd} {full_path} {suffix}")
elif os.path.isdir(full_path):
# other than ROIs, folders are dependent on everything
if dependent_on is None or filename != "ROIs":
os.system(f"{cmd} -r {full_path} {suffix}")
apply_cmd_to_afq_derivs(
self.output_dir,
self.export("base_fname"),
cmd=cmd,
exception_file_names=exception_file_names,
suffix=suffix,
dependent_on=dependent_on
)

# do not assume previous calculations are still valid
# after file operations
Expand Down
26 changes: 19 additions & 7 deletions AFQ/data/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
load_tractogram, save_tractogram, StatefulTractogram, Space)
from dipy.data.fetcher import _make_fetcher
import dipy.data as dpd
from AFQ.utils.path import drop_extension
from AFQ.utils.path import drop_extension, apply_cmd_to_afq_derivs

import os
import os.path as op
Expand Down Expand Up @@ -895,7 +895,7 @@ def organize_cfin_data(path=None):
"PipelineDescription": {"Name": "dipy"}})


def organize_stanford_data(path=None, clear_previous_afq=False):
def organize_stanford_data(path=None, clear_previous_afq=None):
"""
If necessary, downloads the Stanford HARDI dataset into DIPY directory and
creates a BIDS compliant file-system structure in AFQ data directory:
Expand All @@ -921,8 +921,14 @@ def organize_stanford_data(path=None, clear_previous_afq=False):
├── sub-01_ses-01_dwi.bvec
└── sub-01_ses-01_dwi.nii.gz

If clear_previous_afq is True and there is an afq folder in derivatives,
it will be removed.
Parameters
----------
path : str or None
Path to download dataset to, by default it is ~/AFQ_data/.
clear_previous_afq : str or None
Whether to clear previous afq results in the stanford
hardi dataset. If not None, can be "all", "track", or "recog".
Default: None
"""
logger = logging.getLogger('AFQ')

Expand All @@ -941,10 +947,16 @@ def organize_stanford_data(path=None, clear_previous_afq=False):
dmriprep_folder = op.join(derivatives_path, 'vistasoft')
freesurfer_folder = op.join(derivatives_path, 'freesurfer')

if clear_previous_afq:
if clear_previous_afq is not None and op.exists(derivatives_path):
afq_folder = op.join(derivatives_path, 'afq')
if op.exists(afq_folder):
shutil.rmtree(afq_folder)
if clear_previous_afq == "all":
if op.exists(afq_folder):
shutil.rmtree(afq_folder)
else:
apply_cmd_to_afq_derivs(
op.join(afq_folder, "sub-01/ses-01"),
op.join(afq_folder, "sub-01/ses-01/sub-01_ses-01_dwi"),
dependent_on=clear_previous_afq)

if not op.exists(derivatives_path):
logger.info(f'creating derivatives directory: {derivatives_path}')
Expand Down
39 changes: 0 additions & 39 deletions AFQ/data/s3bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from dask.diagnostics import ProgressBar

from pathlib import Path
import os
import os.path as op

import logging
Expand Down Expand Up @@ -1222,44 +1221,6 @@ def s3fs_nifti_read(fname, fs=None, anon=False):
return img


def write_json(fname, data):
"""
Write data to JSON file.

Parameters
----------
fname : str
Full path to the file to write.

data : dict
A dict containing the data to write.

Returns
-------
None
"""
with open(fname, 'w') as ff:
json.dump(data, ff, default=lambda obj: "Not Serializable")


def read_json(fname):
"""
Read data from a JSON file.

Parameters
----------
fname : str
Full path to the data-containing file

Returns
-------
dict
"""
with open(fname, 'r') as ff:
out = json.load(ff)
return out


def s3fs_json_read(fname, fs=None, anon=False):
"""
Reads json directly from S3
Expand Down
6 changes: 3 additions & 3 deletions AFQ/definitions/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from AFQ.definitions.utils import Definition, find_file
from dipy.align import syn_registration, affine_registration
import AFQ.registration as reg
import AFQ.data.s3bids as afs
from AFQ.utils.path import write_json
from AFQ.tasks.utils import get_fname

from dipy.align.imaffine import AffineMap
Expand Down Expand Up @@ -230,7 +230,7 @@ def prealign(self, base_fname, reg_subject, reg_template, save=True):
np.save(prealign_file, aff)
meta_fname = get_fname(
base_fname, f'{prealign_file_desc}.json')
afs.write_json(meta_fname, meta)
write_json(meta_fname, meta)
return prealign_file if save else np.load(prealign_file)

def get_for_subses(self, base_fname, dwi, bids_info, reg_subject,
Expand Down Expand Up @@ -264,7 +264,7 @@ def get_for_subses(self, base_fname, dwi, bids_info, reg_subject,
meta["reg_subject"] = reg_subject
if isinstance(reg_template, str):
meta["reg_template"] = reg_template
afs.write_json(meta_fname, meta)
write_json(meta_fname, meta)
reg_prealign_inv = np.linalg.inv(reg_prealign) if self.use_prealign\
else None
mapping = reg.read_mapping(
Expand Down
4 changes: 2 additions & 2 deletions AFQ/definitions/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,13 @@ def find_file(bids_layout, path, filters, suffix, session, subject,
"""
if "extension" not in filters:
filters["extension"] = extension
if "suffix" not in filters:
filters["suffix"] = suffix

# First, try to match the session.
nearest = bids_layout.get_nearest(
path,
**filters,
suffix=suffix,
session=session,
subject=subject,
full_search=True,
Expand All @@ -96,7 +97,6 @@ def find_file(bids_layout, path, filters, suffix, session, subject,
nearest = bids_layout.get_nearest(
path,
**filters,
suffix=suffix,
subject=subject,
full_search=True,
strict=False,
Expand Down
3 changes: 1 addition & 2 deletions AFQ/tasks/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
from AFQ.tasks.utils import get_fname, with_name, str_to_desc
import AFQ.api.bundle_dict as abd
import AFQ.data.fetch as afd
from AFQ.utils.path import drop_extension
from AFQ.data.s3bids import write_json
from AFQ.utils.path import drop_extension, write_json
from AFQ._fixes import gwi_odf

from AFQ.definitions.utils import Definition
Expand Down
3 changes: 1 addition & 2 deletions AFQ/tasks/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@
import nibabel as nib
from dipy.io.streamline import save_tractogram
from dipy.io.stateful_tractogram import StatefulTractogram
from AFQ.data.s3bids import write_json

import numpy as np

from AFQ.tasks.utils import get_fname
from AFQ.utils.path import drop_extension
from AFQ.utils.path import drop_extension, write_json


# These should only be used with pimms.calc
Expand Down
4 changes: 1 addition & 3 deletions AFQ/tasks/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@
from AFQ.tasks.decorators import as_file
from AFQ.tasks.utils import with_name, str_to_desc
import AFQ.data.fetch as afd
from AFQ.data.s3bids import write_json
from AFQ.utils.path import drop_extension
import AFQ.utils.volume as auv
from AFQ.utils.path import drop_extension, write_json
from AFQ.definitions.mapping import SynMap
from AFQ.definitions.utils import Definition
from AFQ.definitions.image import ImageDefinition
Expand Down
3 changes: 1 addition & 2 deletions AFQ/tasks/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,9 @@
from AFQ.tasks.decorators import as_file, as_img
from AFQ.tasks.utils import get_fname, with_name, str_to_desc
import AFQ.segmentation as seg
from AFQ.utils.path import drop_extension
from AFQ.utils.path import drop_extension, write_json
import AFQ.utils.streamlines as aus
from AFQ.tasks.utils import get_default_args
from AFQ.data.s3bids import write_json
import AFQ.utils.volume as auv

from dipy.io.streamline import load_tractogram, save_tractogram
Expand Down
2 changes: 1 addition & 1 deletion AFQ/tasks/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@

from AFQ.tasks.utils import get_fname, with_name, str_to_desc
import AFQ.utils.volume as auv
from AFQ.data.s3bids import write_json
from AFQ.viz.utils import Viz
import AFQ.utils.streamlines as aus
from AFQ.utils.path import write_json

from plotly.subplots import make_subplots

Expand Down
77 changes: 77 additions & 0 deletions AFQ/utils/path.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,83 @@
import os.path as op
import os
import json


def write_json(fname, data):
"""
Write data to JSON file.

Parameters
----------
fname : str
Full path to the file to write.

data : dict
A dict containing the data to write.

Returns
-------
None
"""
with open(fname, 'w') as ff:
json.dump(data, ff, default=lambda obj: "Not Serializable")


def read_json(fname):
"""
Read data from a JSON file.

Parameters
----------
fname : str
Full path to the data-containing file

Returns
-------
dict
"""
with open(fname, 'r') as ff:
out = json.load(ff)
return out


def drop_extension(path):
base_fname = op.basename(path).split('.')[0]
return path.split(base_fname)[0] + base_fname


def apply_cmd_to_afq_derivs(
derivs_dir, base_fname, cmd="rm", exception_file_names=[], suffix="",
dependent_on=None):
if dependent_on is None:
dependent_on_list = ["trk", "rec", "dwi"]
elif dependent_on.lower() == "track":
dependent_on_list = ["trk", "rec"]
elif dependent_on.lower() == "recog":
dependent_on_list = ["rec"]
else:
raise ValueError((
"dependent_on must be one of "
"None, 'track', or 'recog'."))

for filename in os.listdir(derivs_dir):
full_path = os.path.join(derivs_dir, filename)
if (full_path in exception_file_names)\
or (not full_path.startswith(base_fname))\
or filename.endswith("json"):
continue
if os.path.isfile(full_path) or os.path.islink(full_path):
sidecar_file = f'{drop_extension(full_path)}.json'
if op.exists(sidecar_file):
sidecar_info = read_json(sidecar_file)
if "dependent" in sidecar_info\
and sidecar_info["dependent"]\
in dependent_on_list:
os.system(f"{cmd} {full_path} {suffix}")
os.system(f"{cmd} {sidecar_file} {suffix}")
else:
os.system(f"{cmd} {full_path} {suffix}")
elif os.path.isdir(full_path):
# other than ROIs, folders are dependent on everything
if dependent_on is None or filename != "ROIs":
os.system(f"{cmd} -r {full_path} {suffix}")
3 changes: 1 addition & 2 deletions AFQ/utils/streamlines.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@
from dipy.io.streamline import load_tractogram
import numpy as np
from dipy.io.stateful_tractogram import StatefulTractogram, Space
from AFQ.data.s3bids import read_json
import os.path as op

from AFQ.utils.path import drop_extension
from AFQ.utils.path import drop_extension, read_json


class SegmentedSFT():
Expand Down
2 changes: 1 addition & 1 deletion examples/howto_examples/plot_afq_callosal.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
# Retrieves `Stanford HARDI dataset <https://purl.stanford.edu/ng782rw8378>`_.
#

afd.organize_stanford_data(clear_previous_afq=True)
afd.organize_stanford_data(clear_previous_afq="track")

##########################################################################
# Set tractography parameters (optional)
Expand Down
Loading
Loading