diff --git a/docs/workflows.rst b/docs/workflows.rst index 0088e38af..cdeea1b5a 100644 --- a/docs/workflows.rst +++ b/docs/workflows.rst @@ -477,29 +477,27 @@ Amplitude of low-frequency fluctuation (ALFF) is a measure that ostensibly local spontaneous neural activity in resting-state BOLD data. It is calculated by the following: -1. The ``denoised, interpolated BOLD`` is passed along to the ALFF workflow. -2. If censoring+interpolation was performed, then the interpolated time series is censored at this - point. -3. Voxel-wise BOLD time series are normalized (mean-centered and scaled to unit standard deviation) +1. The ``denoised BOLD`` is passed along to the ALFF workflow. +2. Voxel-wise BOLD time series are normalized (mean-centered and scaled to unit standard deviation) over time. This will ensure that the power spectrum from ``periodogram`` and ``lombscargle`` are roughly equivalent. -4. The power spectrum and associated frequencies are estimated from the BOLD data. +3. The power spectrum and associated frequencies are estimated from the BOLD data. - If censoring+interpolation was not performed, then this uses :func:`scipy.signal.periodogram`. - If censoring+interpolation was performed, then this uses :func:`scipy.signal.lombscargle`. -5. The square root of the power spectrum is calculated. -6. The power spectrum values corresponding to the frequency range retained by the +4. The square root of the power spectrum is calculated. +5. The power spectrum values corresponding to the frequency range retained by the temporal filtering step are extracted from the full power spectrum. -7. The mean of the within-band power spectrum is calculated and multiplied by 2. -8. The ALFF value is multiplied by the standard deviation of the voxel-wise +6. The mean of the within-band power spectrum is calculated and multiplied by 2. +7. The ALFF value is multiplied by the standard deviation of the voxel-wise ``denoised, interpolated BOLD`` time series. This brings ALFF back to its original scale, as if the time series was not normalized. ALFF will only be calculated if the bandpass filter is enabled (i.e., if the ``--disable-bandpass-filter`` flag is not used). -Smoothed ALFF derivatives will also be generated if the ``--smoothing`` flag is used. +ALFF will also be calculated from the smoothed, denoised BOLD if the ``--smoothing`` flag is used. ReHo @@ -507,13 +505,18 @@ ReHo :func:`~xcp_d.workflows.restingstate.init_reho_nifti_wf`, :func:`~xcp_d.workflows.restingstate.init_reho_cifti_wf` -Regional Homogeneity (ReHo) is a measure of local temporal uniformity in the BOLD signal computed at each voxel of the processed image. -Greater ReHo values correspond to greater synchrony among BOLD activity patterns measured in a local neighborhood of voxels, with neighborhood size determined by a user-specified radius of voxels. +Regional Homogeneity (ReHo) is a measure of local temporal uniformity in the BOLD signal computed at each voxel of the processed image. +Greater ReHo values correspond to greater synchrony among BOLD activity patterns measured in a local neighborhood of voxels, +with neighborhood size determined by a user-specified radius of voxels. ReHo is calculated as the coefficient of concordance among all voxels in a sphere centered on the target voxel. -For NIfTIs, ReHo is always calculated via AFNI’s 3dReho with 27 voxels in each neighborhood, using Kendall's coefficient of concordance (KCC). -For CIFTIs, the left and right hemisphere are extracted into GIFTI format via Connectome Workbench’s CIFTISeparateMetric. Next, the mesh adjacency matrix is obtained,and Kendall's coefficient of concordance (KCC) is calculated, with each vertex having four neighbors. -For subcortical voxels in the CIFTIs, 3dReho is used with the same parameters that are used for NIfTIs. +For NIfTIs, ReHo is always calculated via AFNI's 3dReho with 27 voxels in each neighborhood, +using Kendall's coefficient of concordance (KCC). +For CIFTIs, the left and right hemisphere are extracted into GIFTI format via +Connectome Workbench's CIFTISeparateMetric. +Next, the mesh adjacency matrix is obtained, and Kendall's coefficient of concordance (KCC) is calculated, +with each vertex having 5-6 neighbors. +For subcortical voxels in the CIFTIs, 3dReho is used with the same parameters that are used for NIfTIs. Parcellation and functional connectivity estimation [OPTIONAL] diff --git a/xcp_d/tests/test_cli.py b/xcp_d/tests/test_cli.py index 24f248d46..4bec02338 100644 --- a/xcp_d/tests/test_cli.py +++ b/xcp_d/tests/test_cli.py @@ -349,6 +349,7 @@ def test_nibabies(data_dir, output_dir, working_dir): "--head_radius=auto", "--smoothing=0", "--fd-thresh=0", + "--mem-gb=1", ] _run_and_generate( test_name=test_name, diff --git a/xcp_d/tests/test_workflows_restingstate.py b/xcp_d/tests/test_workflows_restingstate.py index 5177819e5..4eb2bfc4e 100644 --- a/xcp_d/tests/test_workflows_restingstate.py +++ b/xcp_d/tests/test_workflows_restingstate.py @@ -51,12 +51,13 @@ def test_nifti_alff(ds001419_data, tmp_path_factory): alff_wf.base_dir = tempdir alff_wf.inputs.inputnode.bold_mask = bold_mask alff_wf.inputs.inputnode.denoised_bold = bold_file + alff_wf.inputs.inputnode.smoothed_bold = bold_file compute_alff_res = alff_wf.run() nodes = get_nodes(compute_alff_res) # Let's get the mean of the ALFF for later comparison - original_alff = nodes["alff_wf.alff_compt"].get_output("alff") + original_alff = nodes["alff_wf.compute_alff"].get_output("alff") original_alff_data_mean = nb.load(original_alff).get_fdata().mean() # Now let's do an FFT @@ -92,11 +93,12 @@ def test_nifti_alff(ds001419_data, tmp_path_factory): alff_wf.base_dir = tempdir alff_wf.inputs.inputnode.bold_mask = bold_mask alff_wf.inputs.inputnode.denoised_bold = filename + alff_wf.inputs.inputnode.smoothed_bold = filename compute_alff_res = alff_wf.run() nodes = get_nodes(compute_alff_res) # Let's get the new ALFF mean - new_alff = nodes["alff_wf.alff_compt"].get_output("alff") + new_alff = nodes["alff_wf.compute_alff"].get_output("alff") assert os.path.isfile(new_alff) new_alff_data_mean = nb.load(new_alff).get_fdata().mean() @@ -142,7 +144,7 @@ def test_cifti_alff(ds001419_data, tmp_path_factory): nodes = get_nodes(compute_alff_res) # Let's get the mean of the data for later comparison - original_alff = nodes["alff_wf.alff_compt"].get_output("alff") + original_alff = nodes["alff_wf.compute_alff"].get_output("alff") original_alff_data_mean = nb.load(original_alff).get_fdata().mean() # Now let's do an FFT @@ -172,7 +174,7 @@ def test_cifti_alff(ds001419_data, tmp_path_factory): nodes = get_nodes(compute_alff_res) # Let's get the new ALFF mean - new_alff = nodes["alff_wf.alff_compt"].get_output("alff") + new_alff = nodes["alff_wf.compute_alff"].get_output("alff") assert os.path.isfile(new_alff) new_alff_data_mean = nb.load(new_alff).get_fdata().mean() diff --git a/xcp_d/utils/doc.py b/xcp_d/utils/doc.py index 598bf8017..790b7618e 100644 --- a/xcp_d/utils/doc.py +++ b/xcp_d/utils/doc.py @@ -145,7 +145,6 @@ smoothing : :obj:`float` The full width at half maximum (FWHM), in millimeters, of the Gaussian smoothing kernel that will be applied to the post-processed and denoised data. - ALFF and ReHo outputs will also be smoothing with this kernel. """ docdict[ diff --git a/xcp_d/utils/restingstate.py b/xcp_d/utils/restingstate.py index 13296d73b..02dadca65 100644 --- a/xcp_d/utils/restingstate.py +++ b/xcp_d/utils/restingstate.py @@ -107,7 +107,7 @@ def compute_alff(data_matrix, low_pass, high_pass, TR, sample_mask=None): Parameters ---------- data_matrix : numpy.ndarray - data matrix points by timepoints + data matrix points by timepoints, after optional censoring low_pass : float low pass frequency in Hz high_pass : float @@ -140,7 +140,11 @@ def compute_alff(data_matrix, low_pass, high_pass, TR, sample_mask=None): if sample_mask is not None: sample_mask = sample_mask.astype(bool) - assert sample_mask.size == n_volumes, f"{sample_mask.size} != {n_volumes}" + assert sample_mask.sum() == n_volumes, f"{sample_mask.sum()} != {n_volumes}" + + time_arr = np.arange(0, sample_mask.size * TR, TR) + time_arr = time_arr[sample_mask] + assert time_arr.size == n_volumes, f"{time_arr.size} != {n_volumes}" alff = np.zeros(n_voxels) for i_voxel in range(n_voxels): @@ -156,26 +160,19 @@ def compute_alff(data_matrix, low_pass, high_pass, TR, sample_mask=None): # have the same scale. # However, this also changes ALFF's scale, so we retain the SD to rescale ALFF. sd_scale = np.std(voxel_data) + voxel_data -= np.mean(voxel_data) + voxel_data /= np.std(voxel_data) if sample_mask is not None: - voxel_data_censored = voxel_data[sample_mask] - voxel_data_censored -= np.mean(voxel_data_censored) - voxel_data_censored /= np.std(voxel_data_censored) - - time_arr = np.arange(0, n_volumes * TR, TR) - assert sample_mask.size == time_arr.size, f"{sample_mask.size} != {time_arr.size}" - time_arr = time_arr[sample_mask] frequencies_hz = np.linspace(0, 0.5 * fs, (n_volumes // 2) + 1)[1:] angular_frequencies = 2 * np.pi * frequencies_hz power_spectrum = signal.lombscargle( time_arr, - voxel_data_censored, + voxel_data, angular_frequencies, normalize=True, ) else: - voxel_data -= np.mean(voxel_data) - voxel_data /= np.std(voxel_data) # get array of sample frequencies + power spectrum density frequencies_hz, power_spectrum = signal.periodogram( voxel_data, diff --git a/xcp_d/workflows/bold.py b/xcp_d/workflows/bold.py index 4d6b1e3b7..fe26052a1 100644 --- a/xcp_d/workflows/bold.py +++ b/xcp_d/workflows/bold.py @@ -296,7 +296,8 @@ def init_postprocess_nifti_wf( ("outputnode.temporal_mask", "inputnode.temporal_mask"), ]), (denoise_bold_wf, alff_wf, [ - ("outputnode.denoised_interpolated_bold", "inputnode.denoised_bold"), + ("outputnode.censored_denoised_bold", "inputnode.denoised_bold"), + ("outputnode.smoothed_denoised_bold", "inputnode.smoothed_bold"), ]), ]) # fmt:skip diff --git a/xcp_d/workflows/cifti.py b/xcp_d/workflows/cifti.py index 4ac82b0b5..a2601201b 100644 --- a/xcp_d/workflows/cifti.py +++ b/xcp_d/workflows/cifti.py @@ -280,7 +280,8 @@ def init_postprocess_cifti_wf( ("outputnode.temporal_mask", "inputnode.temporal_mask"), ]), (denoise_bold_wf, alff_wf, [ - ("outputnode.denoised_interpolated_bold", "inputnode.denoised_bold"), + ("outputnode.censored_denoised_bold", "inputnode.denoised_bold"), + ("outputnode.smoothed_denoised_bold", "inputnode.smoothed_bold"), ]), ]) # fmt:skip diff --git a/xcp_d/workflows/restingstate.py b/xcp_d/workflows/restingstate.py index 53cf494ad..0e6b71c6e 100644 --- a/xcp_d/workflows/restingstate.py +++ b/xcp_d/workflows/restingstate.py @@ -3,24 +3,19 @@ """Workflows for calculating resting state-specific metrics.""" from nipype import Function from nipype.interfaces import utility as niu -from nipype.interfaces.workbench.cifti import CiftiSmooth from nipype.pipeline import engine as pe from niworkflows.engine.workflows import LiterateWorkflow as Workflow -from templateflow.api import get as get_template from xcp_d import config from xcp_d.interfaces.bids import DerivativesDataSink -from xcp_d.interfaces.nilearn import Smooth from xcp_d.interfaces.restingstate import ComputeALFF, ReHoNamePatch, SurfaceReHo from xcp_d.interfaces.workbench import ( CiftiCreateDenseScalar, CiftiSeparateMetric, CiftiSeparateVolumeAll, - FixCiftiIntent, ) from xcp_d.utils.doc import fill_doc from xcp_d.utils.plotting import plot_alff_reho_surface, plot_alff_reho_volumetric -from xcp_d.utils.utils import fwhm2sigma @fill_doc @@ -61,8 +56,9 @@ def init_alff_wf( Inputs ------ denoised_bold - This is the ``filtered, interpolated, denoised BOLD``, - although interpolation is not necessary if the data were not originally censored. + This is the denoised BOLD after optional re-censoring. + smoothed_bold + Smoothed BOLD after optional re-censoring. bold_mask bold mask if bold is nifti temporal_mask @@ -118,7 +114,9 @@ def init_alff_wf( """ inputnode = pe.Node( - niu.IdentityInterface(fields=["denoised_bold", "bold_mask", "temporal_mask"]), + niu.IdentityInterface( + fields=["denoised_bold", "smoothed_bold", "bold_mask", "temporal_mask"], + ), name="inputnode", ) outputnode = pe.Node( @@ -126,99 +124,33 @@ def init_alff_wf( name="outputnode", ) - # compute alff - alff_compt = pe.Node( + compute_alff = pe.Node( ComputeALFF(TR=TR, low_pass=low_pass, high_pass=high_pass), mem_gb=mem_gb["resampled"], - name="alff_compt", + name="compute_alff", n_procs=omp_nthreads, ) - alff_plot = pe.Node( + plot_alff = pe.Node( Function( input_names=["output_path", "filename", "name_source"], output_names=["output_path"], function=plot_alff_reho_surface if cifti else plot_alff_reho_volumetric, ), - name="alff_plot", + name="plot_alff", ) - alff_plot.inputs.output_path = "alff.svg" - alff_plot.inputs.name_source = name_source + plot_alff.inputs.output_path = "alff.svg" + plot_alff.inputs.name_source = name_source - # fmt:off workflow.connect([ - (inputnode, alff_compt, [ + (inputnode, compute_alff, [ ("denoised_bold", "in_file"), ("bold_mask", "mask"), ("temporal_mask", "temporal_mask"), ]), - (alff_compt, alff_plot, [("alff", "filename")]), - (alff_compt, outputnode, [("alff", "alff")]) - ]) - # fmt:on - - if smoothing: # If we want to smooth - if not cifti: # If nifti - workflow.__desc__ = workflow.__desc__ + ( - " The ALFF maps were smoothed with Nilearn using a Gaussian kernel " - f"(FWHM={str(smoothing)} mm)." - ) - # Smooth via Nilearn - smooth_data = pe.Node( - Smooth(fwhm=smoothing), - name="niftismoothing", - n_procs=omp_nthreads, - ) - # fmt:off - workflow.connect([ - (alff_compt, smooth_data, [("alff", "in_file")]), - (smooth_data, outputnode, [("out_file", "smoothed_alff")]) - ]) - # fmt:on - - else: # If cifti - workflow.__desc__ = workflow.__desc__ + ( - " The ALFF maps were smoothed with the Connectome Workbench using a Gaussian " - f"kernel (FWHM={str(smoothing)} mm)." - ) - - # Smooth via Connectome Workbench - sigma_lx = fwhm2sigma(smoothing) # Convert fwhm to standard deviation - # Get templates for each hemisphere - lh_midthickness = str( - get_template("fsLR", hemi="L", suffix="sphere", density="32k")[0] - ) - rh_midthickness = str( - get_template("fsLR", hemi="R", suffix="sphere", density="32k")[0] - ) - smooth_data = pe.Node( - CiftiSmooth( - sigma_surf=sigma_lx, - sigma_vol=sigma_lx, - direction="COLUMN", - right_surf=rh_midthickness, - left_surf=lh_midthickness, - ), - name="ciftismoothing", - mem_gb=mem_gb["resampled"], - n_procs=omp_nthreads, - ) - - # Always check the intent code in CiftiSmooth's output file - fix_cifti_intent = pe.Node( - FixCiftiIntent(), - name="fix_cifti_intent", - mem_gb=mem_gb["resampled"], - n_procs=omp_nthreads, - ) - - # fmt:off - workflow.connect([ - (alff_compt, smooth_data, [("alff", "in_file")]), - (smooth_data, fix_cifti_intent, [("out_file", "in_file")]), - (fix_cifti_intent, outputnode, [("out_file", "smoothed_alff")]), - ]) - # fmt:on + (compute_alff, plot_alff, [("alff", "filename")]), + (compute_alff, outputnode, [("alff", "alff")]) + ]) # fmt:skip ds_alff_plot = pe.Node( DerivativesDataSink( @@ -230,10 +162,52 @@ def init_alff_wf( name="ds_alff_plot", run_without_submitting=False, ) + workflow.connect([(plot_alff, ds_alff_plot, [("output_path", "in_file")])]) - # fmt:off - workflow.connect([(alff_plot, ds_alff_plot, [("output_path", "in_file")])]) - # fmt:on + if smoothing: + workflow.__desc__ += " ALFF was also calculated from the smoothed, denoised BOLD data." + + compute_smoothed_alff = pe.Node( + ComputeALFF(TR=TR, low_pass=low_pass, high_pass=high_pass), + mem_gb=mem_gb["resampled"], + name="compute_smoothed_alff", + n_procs=omp_nthreads, + ) + + plot_smoothed_alff = pe.Node( + Function( + input_names=["output_path", "filename", "name_source"], + output_names=["output_path"], + function=plot_alff_reho_surface if cifti else plot_alff_reho_volumetric, + ), + name="plot_smoothed_alff", + ) + plot_smoothed_alff.inputs.output_path = "alff.svg" + plot_smoothed_alff.inputs.name_source = name_source + + workflow.connect([ + (inputnode, compute_smoothed_alff, [ + ("denoised_bold", "in_file"), + ("bold_mask", "mask"), + ("temporal_mask", "temporal_mask"), + ]), + (compute_smoothed_alff, plot_smoothed_alff, [("alff", "filename")]), + (compute_smoothed_alff, outputnode, [("alff", "smoothed_alff")]) + ]) # fmt:skip + + ds_smoothed_alff_plot = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + source_file=name_source, + desc="alffSmoothedSurfacePlot" if cifti else "alffSmoothedVolumetricPlot", + datatype="figures", + ), + name="ds_smoothed_alff_plot", + run_without_submitting=False, + ) + workflow.connect([ + (plot_smoothed_alff, ds_smoothed_alff_plot, [("output_path", "in_file")]), + ]) # fmt:skip return workflow