From 9ccb8ec54ec17dd21b2e1347cfd93f0d9c4e60ec Mon Sep 17 00:00:00 2001 From: "Timothy B. Brown" Date: Fri, 27 Apr 2018 15:09:57 -0500 Subject: [PATCH 1/5] Updated version.txt --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index aaf369088..448267798 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -Post-v3.26.0 +Post-v3.26.1 From b39fa1ba15072ac64d590e74d26a2a2a70f3d595 Mon Sep 17 00:00:00 2001 From: Greg Burgess Date: Tue, 8 May 2018 11:15:16 -0500 Subject: [PATCH 2/5] Edit TaskfMRIAnalysis.sh (outer-loop wrapper script) to 1) add and clarify comments for better understanding by user and 2) moved and consolidated individual commands into 'functional blocks' of code performing similar functions. --- TaskfMRIAnalysis/TaskfMRIAnalysis.sh | 204 +++++++++++++-------------- 1 file changed, 97 insertions(+), 107 deletions(-) diff --git a/TaskfMRIAnalysis/TaskfMRIAnalysis.sh b/TaskfMRIAnalysis/TaskfMRIAnalysis.sh index 65cf07ae4..4f911f56f 100755 --- a/TaskfMRIAnalysis/TaskfMRIAnalysis.sh +++ b/TaskfMRIAnalysis/TaskfMRIAnalysis.sh @@ -41,15 +41,14 @@ # and does not attempt any further processing. set -e -# Load function libraries -source ${HCPPIPEDIR}/global/scripts/log.shlib # Logging related functions -source ${HCPPIPEDIR}/global/scripts/opts.shlib # Command line option funtions -# Establish tool name for logging -log_SetToolName "TaskfMRIAnalysis.sh" +########################################## PREPARE FUNCTIONS ########################################## -# Other utility functions +# Load function libraries +source ${HCPPIPEDIR}/global/scripts/log.shlib # Logging related functions +source ${HCPPIPEDIR}/global/scripts/opts.shlib # Command line option functions +# Create utility functions to determine and test FSL versions get_fsl_version() { local fsl_version_file @@ -70,13 +69,12 @@ get_fsl_version() eval $__functionResultVar="'${fsl_version}'" } -# -# NOTE: -# Don't echo anything in this function other than the last echo -# that outputs the return value -# determine_old_or_new_fsl() { + # NOTE: + # Don't echo anything in this function other than the last echo + # that outputs the return value + # local fsl_version=${1} local old_or_new local fsl_version_array @@ -130,134 +128,126 @@ determine_old_or_new_fsl() } +########################################## READ COMMAND-LINE ARGUMENTS ################################## + +# Explcitly set tool name for logging +log_SetToolName "TaskfMRIAnalysis.sh" + # Show version of HCP Pipeline Scripts in use if requested opts_ShowVersionIfRequested $@ +# Parse expected arguments from command-line array log_Msg "Parsing Command Line Options" - Path=`opts_GetOpt1 "--path" $@` -log_Msg "Path: ${Path}" - Subject=`opts_GetOpt1 "--subject" $@` -log_Msg "Subject: ${Subject}" - LevelOnefMRINames=`opts_GetOpt1 "--lvl1tasks" $@` -log_Msg "LevelOnefMRINames: ${LevelOnefMRINames}" - LevelOnefsfNames=`opts_GetOpt1 "--lvl1fsfs" $@` -log_Msg "LevelOnefsfNames: ${LevelOnefsfNames}" - LevelTwofMRIName=`opts_GetOpt1 "--lvl2task" $@` -log_Msg "LevelTwofMRIName: ${LevelTwofMRIName}" - LevelTwofsfNames=`opts_GetOpt1 "--lvl2fsf" $@` -log_Msg "LevelTwofsfNames: ${LevelTwofsfNames}" - LowResMesh=`opts_GetOpt1 "--lowresmesh" $@` -log_Msg "LowResMesh: ${LowResMesh}" - GrayordinatesResolution=`opts_GetOpt1 "--grayordinatesres" $@` -log_Msg "GrayordinatesResolution: ${GrayordinatesResolution}" - OriginalSmoothingFWHM=`opts_GetOpt1 "--origsmoothingFWHM" $@` -log_Msg "OriginalSmoothingFWHM: ${OriginalSmoothingFWHM}" - Confound=`opts_GetOpt1 "--confound" $@` -log_Msg "Confound: ${Confound}" - FinalSmoothingFWHM=`opts_GetOpt1 "--finalsmoothingFWHM" $@` -log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" - TemporalFilter=`opts_GetOpt1 "--temporalfilter" $@` -log_Msg "TemporalFilter: ${TemporalFilter}" - VolumeBasedProcessing=`opts_GetOpt1 "--vba" $@` -log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" - RegName=`opts_GetOpt1 "--regname" $@` -log_Msg "RegName: ${RegName}" - Parcellation=`opts_GetOpt1 "--parcellation" $@` -log_Msg "Parcellation: ${Parcellation}" - ParcellationFile=`opts_GetOpt1 "--parcellationfile" $@` + +# Level 1 analysis names were delimited by '@' in command-line. +# Change to space delimiter to use in for loops. +LevelOnefMRINames=`echo $LevelOnefMRINames | sed 's/@/ /g'` +LevelOnefsfNames=`echo $LevelOnefsfNames | sed 's/@/ /g'` + +# Write command-line arguments to log file +log_Msg "Path: ${Path}" +log_Msg "Subject: ${Subject}" +log_Msg "LevelOnefMRINames: ${LevelOnefMRINames}" +log_Msg "LevelOnefsfNames: ${LevelOnefsfNames}" +log_Msg "LevelTwofMRIName: ${LevelTwofMRIName}" +log_Msg "LevelTwofsfNames: ${LevelTwofsfNames}" +log_Msg "LowResMesh: ${LowResMesh}" +log_Msg "GrayordinatesResolution: ${GrayordinatesResolution}" +log_Msg "OriginalSmoothingFWHM: ${OriginalSmoothingFWHM}" +log_Msg "Confound: ${Confound}" +log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" +log_Msg "TemporalFilter: ${TemporalFilter}" +log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" +log_Msg "RegName: ${RegName}" +log_Msg "Parcellation: ${Parcellation}" log_Msg "ParcellationFile: ${ParcellationFile}" -# Determine the version of FSL that is in use + +########################################## MAIN ######################################### + +# Determine if required FSL version is present get_fsl_version fsl_ver log_Msg "FSL version: ${fsl_ver}" -# Determine whether to invoke the "OLD" (v1.0) or "NEW" (v2.0) version of Task fMRI Analysis old_or_new_version=$(determine_old_or_new_fsl ${fsl_ver}) - if [ "${old_or_new_version}" == "OLD" ] then # Need to exit script due to incompatible FSL VERSION!!!! - log_Msg "ERROR: Detected pre-5.0.7 version of FSL is in use. Task fMRI Analysis not invoked." + log_Msg "ERROR: Detected pre-5.0.7 version of FSL in use (version ${fsl_ver}). Task fMRI Analysis not invoked. Exiting." exit 1 else - log_Msg "INFO: Detected version 5.0.7 or newer of FSL is in use. Invoking Level1 and Level2 scripts." - - #Naming Conventions - AtlasFolder="${Path}/${Subject}/MNINonLinear" - ResultsFolder="${AtlasFolder}/Results" - ROIsFolder="${AtlasFolder}/ROIs" - DownSampleFolder="${AtlasFolder}/fsaverage_LR${LowResMesh}k" - - #Run Level One Analysis for Both Phase Encoding Directions - log_Msg "Run Level One Analysis for Both Phase Encoding Directions" - - LevelOnefMRINames=`echo $LevelOnefMRINames | sed 's/@/ /g'` - LevelOnefsfNames=`echo $LevelOnefsfNames | sed 's/@/ /g'` - - i=1 - for LevelOnefMRIName in $LevelOnefMRINames ; do - log_Msg "LevelOnefMRIName: ${LevelOnefMRIName}" - LevelOnefsfName=`echo $LevelOnefsfNames | cut -d " " -f $i` - log_Msg "Issued command: ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel1.sh $Subject $ResultsFolder $ROIsFolder $DownSampleFolder $LevelOnefMRIName $LevelOnefsfName $LowResMesh $GrayordinatesResolution $OriginalSmoothingFWHM $Confound $FinalSmoothingFWHM $TemporalFilter $VolumeBasedProcessing $RegName $Parcellation $ParcellationFile" - ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel1.sh \ - $Subject \ - $ResultsFolder \ - $ROIsFolder \ - $DownSampleFolder \ - $LevelOnefMRIName \ - $LevelOnefsfName \ - $LowResMesh \ - $GrayordinatesResolution \ - $OriginalSmoothingFWHM \ - $Confound \ - $FinalSmoothingFWHM \ - $TemporalFilter \ - $VolumeBasedProcessing \ - $RegName \ - $Parcellation \ - $ParcellationFile - i=$(($i+1)) - done - - if [ "$LevelTwofMRIName" != "NONE" ] - then - LevelOnefMRINames=`echo $LevelOnefMRINames | sed 's/ /@/g'` - LevelOnefsfNames=`echo $LevelOnefMRINames | sed 's/ /@/g'` + log_Msg "Detected FSL version ${fsl_ver}" +fi - #Combine Data Across Phase Encoding Directions in the Level Two Analysis - log_Msg "Combine Data Across Phase Encoding Directions in the Level Two Analysis" - log_Msg "Issued command: ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel2.sh $Subject $ResultsFolder $DownSampleFolder $LevelOnefMRINames $LevelOnefsfNames $LevelTwofMRIName $LevelTwofsfNames $LowResMesh $FinalSmoothingFWHM $TemporalFilter $VolumeBasedProcessing $RegName $Parcellation" - ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel2.sh \ - $Subject \ - $ResultsFolder \ - $DownSampleFolder \ - $LevelOnefMRINames \ - $LevelOnefsfNames \ - $LevelTwofMRIName \ - $LevelTwofsfNames \ - $LowResMesh \ - $FinalSmoothingFWHM \ - $TemporalFilter \ - $VolumeBasedProcessing \ - $RegName \ - $Parcellation - fi +# Determine locations of necessary directories (using expected naming convention) +AtlasFolder="${Path}/${Subject}/MNINonLinear" +ResultsFolder="${AtlasFolder}/Results" +ROIsFolder="${AtlasFolder}/ROIs" +DownSampleFolder="${AtlasFolder}/fsaverage_LR${LowResMesh}k" + +# Run Level 1 analyses for each phase encoding direction (from command line arguments) +log_Msg "Running Level 1 Analysis for Both Phase Encoding Directions" +i=1 +for LevelOnefMRIName in $LevelOnefMRINames ; do + log_Msg "LevelOnefMRIName: ${LevelOnefMRIName}" + # Get corresponding fsf name from $LevelOnefsfNames list + LevelOnefsfName=`echo $LevelOnefsfNames | cut -d " " -f $i` + log_Msg "Issuing command: ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel1.sh $Subject $ResultsFolder $ROIsFolder $DownSampleFolder $LevelOnefMRIName $LevelOnefsfName $LowResMesh $GrayordinatesResolution $OriginalSmoothingFWHM $Confound $FinalSmoothingFWHM $TemporalFilter $VolumeBasedProcessing $RegName $Parcellation $ParcellationFile" + ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel1.sh \ + $Subject \ + $ResultsFolder \ + $ROIsFolder \ + $DownSampleFolder \ + $LevelOnefMRIName \ + $LevelOnefsfName \ + $LowResMesh \ + $GrayordinatesResolution \ + $OriginalSmoothingFWHM \ + $Confound \ + $FinalSmoothingFWHM \ + $TemporalFilter \ + $VolumeBasedProcessing \ + $RegName \ + $Parcellation \ + $ParcellationFile + i=$(($i+1)) +done + +if [ "$LevelTwofMRIName" != "NONE" ] +then + # Combine Data Across Phase Encoding Directions in the Level 2 Analysis + log_Msg "Combine Data Across Phase Encoding Directions in the Level 2 Analysis" + log_Msg "Issuing command: ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel2.sh $Subject $ResultsFolder $DownSampleFolder $LevelOnefMRINames $LevelOnefsfNames $LevelTwofMRIName $LevelTwofsfNames $LowResMesh $FinalSmoothingFWHM $TemporalFilter $VolumeBasedProcessing $RegName $Parcellation" + ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel2.sh \ + $Subject \ + $ResultsFolder \ + $DownSampleFolder \ + $LevelOnefMRINames \ + $LevelOnefsfNames \ + $LevelTwofMRIName \ + $LevelTwofsfNames \ + $LowResMesh \ + $FinalSmoothingFWHM \ + $TemporalFilter \ + $VolumeBasedProcessing \ + $RegName \ + $Parcellation fi log_Msg "Completed" From 56f10c0820a64b9a5bd659ca0217a254c40ea081 Mon Sep 17 00:00:00 2001 From: "Timothy B. Brown" Date: Thu, 10 May 2018 15:51:16 -0500 Subject: [PATCH 3/5] Fixing paths to use in interactive MATLAB call to MSMregression in MSMAll.sh --- .../Scripts/PreFreeSurferPipelineBatch.sh | 214 +++++++++--------- Examples/Scripts/SetUpHCPPipeline.sh | 2 +- MSMAll/scripts/MSMAll.sh | 5 +- 3 files changed, 111 insertions(+), 110 deletions(-) diff --git a/Examples/Scripts/PreFreeSurferPipelineBatch.sh b/Examples/Scripts/PreFreeSurferPipelineBatch.sh index 612531cd9..f5a29c6a3 100755 --- a/Examples/Scripts/PreFreeSurferPipelineBatch.sh +++ b/Examples/Scripts/PreFreeSurferPipelineBatch.sh @@ -1,13 +1,13 @@ -#!/bin/bash +#!/bin/bash #~ND~FORMAT~MARKDOWN~ #~ND~START~ # # # PreFreeSurferPipelineBatch.sh -# +# # ## Copyright Notice # -# Copyright (C) 2013-2016 The Human Connectome Project +# Copyright (C) 2013-2018 The Human Connectome Project # # * Washington University in St. Louis # * University of Minnesota @@ -15,7 +15,7 @@ # # ## Author(s) # -# * Matthey F. Glasser, Department of Anatomy and Neurobiology, +# * Matthew F. Glasser, Department of Anatomy and Neurobiology, # Washington University in St. Louis # * Timothy B. Brown, Neuroinformatics Research Group, # Washington University in St. Louis @@ -30,7 +30,7 @@ # # ## Description: # -# Example script for running the Pre-FreeSurfer phase of the HCP Structural +# Example script for running the Pre-FreeSurfer phase of the HCP Structural # Preprocessing pipeline # # See [Glasser et al. 2013][GlasserEtAl]. @@ -63,9 +63,9 @@ # Function: get_batch_options # Description -# +# # Retrieve the following command line parameter values if specified -# +# # --StudyFolder= - primary study folder containing subject ID subdirectories # --Subjlist= - quoted, space separated list of subject IDs on which # to run the pipeline @@ -80,43 +80,43 @@ # command_line_specified_subj_list # command_line_specified_run_local # -# These values are intended to be used to override any values set +# These values are intended to be used to override any values set # directly within this script file get_batch_options() { - local arguments=("$@") - - unset command_line_specified_study_folder - unset command_line_specified_subj - unset command_line_specified_run_local - - local index=0 - local numArgs=${#arguments[@]} - local argument - - while [ ${index} -lt ${numArgs} ]; do - argument=${arguments[index]} - - case ${argument} in - --StudyFolder=*) - command_line_specified_study_folder=${argument#*=} - index=$(( index + 1 )) - ;; - --Subject=*) - command_line_specified_subj=${argument#*=} - index=$(( index + 1 )) - ;; - --runlocal) - command_line_specified_run_local="TRUE" - index=$(( index + 1 )) - ;; - *) - echo "" - echo "ERROR: Unrecognized Option: ${argument}" - echo "" - exit 1 - ;; - esac - done + local arguments=("$@") + + unset command_line_specified_study_folder + unset command_line_specified_subj + unset command_line_specified_run_local + + local index=0 + local numArgs=${#arguments[@]} + local argument + + while [ ${index} -lt ${numArgs} ]; do + argument=${arguments[index]} + + case ${argument} in + --StudyFolder=*) + command_line_specified_study_folder=${argument#*=} + index=$(( index + 1 )) + ;; + --Subject=*) + command_line_specified_subj=${argument#*=} + index=$(( index + 1 )) + ;; + --runlocal) + command_line_specified_run_local="TRUE" + index=$(( index + 1 )) + ;; + *) + echo "" + echo "ERROR: Unrecognized Option: ${argument}" + echo "" + exit 1 + ;; + esac + done } # Function: main @@ -129,7 +129,7 @@ main() StudyFolder="${HOME}/projects/Pipelines_ExampleData" # Location of Subject folders (named by subjectID) Subjlist="100307" # Space delimited list of subject IDs - # Set variable value that set up environment + # Set variable value that sets up environment EnvironmentScript="${HOME}/projects/Pipelines/Examples/Scripts/SetUpHCPPipeline.sh" # Pipeline environment script # Use any command line specified options to override any of the variable settings above @@ -154,11 +154,11 @@ main() # if [ X$SGE_ROOT != X ] ; then # QUEUE="-q long.q" # QUEUE="-q veryshort.q" - QUEUE="-q hcp_priority.q" + QUEUE="-q hcp_priority.q" # fi # If PRINTCOM is not a null or empty string variable, then - # this script and other scripts that it calls will simply + # this script and other scripts that it calls will simply # print out the primary commands it otherwise would run. # This printing will be done using the command specified # in the PRINTCOM variable @@ -169,7 +169,7 @@ main() # Inputs: # # Scripts called by this script do NOT assume anything about the form of the - # input names or paths. This batch script assumes the HCP raw data naming + # input names or paths. This batch script assumes the HCP raw data naming # convention, e.g. # # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_T1w_MPR1.nii.gz @@ -183,30 +183,30 @@ main() # Scan settings: # - # Change the Scan Settings (e.g. Sample Spacings and $UnwarpDir) to match your + # Change the Scan Settings (e.g. Sample Spacings and $UnwarpDir) to match your # structural images. These are set to match the HCP-YA ("Young Adult") Protocol by default. # (i.e., the study collected on the customized Connectom scanner). # Readout Distortion Correction: - # + # # You have the option of using either gradient echo field maps or spin echo # field maps to perform readout distortion correction on your structural # images, or not to do readout distortion correction at all. # - # The HCP Pipeline Scripts currently support the use of gradient echo field - # maps or spin echo field maps as they are produced by the Siemens Connectom + # The HCP Pipeline Scripts currently support the use of gradient echo field + # maps or spin echo field maps as they are produced by the Siemens Connectom # Scanner. They also support the use of gradient echo field maps as generated # by General Electric scanners. - # - # Change either the gradient echo field map or spin echo field map scan - # settings to match your data. This script is setup to use gradient echo + # + # Change either the gradient echo field map or spin echo field map scan + # settings to match your data. This script is setup to use gradient echo # field maps from the Siemens Connectom Scanner collected using the HCP-YA Protocol. # Gradient Distortion Correction: # - # If using gradient distortion correction, use the coefficents from your + # If using gradient distortion correction, use the coefficents from your # scanner. The HCP gradient distortion coefficents are only available through - # Siemens. Gradient distortion in standard scanners like the Trio is much + # Siemens. Gradient distortion in standard scanners like the Trio is much # less than for the HCP Connectom scanner. # DO WORK @@ -217,7 +217,7 @@ main() # Input Images - # Detect Number of T1w Images and build list of full paths to + # Detect Number of T1w Images and build list of full paths to # T1w images numT1ws=`ls ${StudyFolder}/${Subject}/unprocessed/3T | grep 'T1w_MPR.$' | wc -l` echo "Found ${numT1ws} T1w Images for subject ${Subject}" @@ -241,9 +241,9 @@ main() # Readout Distortion Correction: # - # Currently supported Averaging and readout distortion correction + # Currently supported Averaging and readout distortion correction # methods: (i.e. supported values for the AvgrdcSTRING variable in this - # script and the --avgrdcmethod= command line option for the + # script and the --avgrdcmethod= command line option for the # PreFreeSurferPipeline.sh script.) # # "NONE" @@ -251,32 +251,32 @@ main() # # "FIELDMAP" # This value is equivalent to the "SiemensFieldMap" value described - # below. Use of the "SiemensFieldMap" value is prefered, but + # below. Use of the "SiemensFieldMap" value is prefered, but # "FIELDMAP" is included for backward compatibility with the versions - # of these scripts that only supported use of Siemens-specific - # Gradient Echo Field Maps and did not support Gradient Echo Field + # of these scripts that only supported use of Siemens-specific + # Gradient Echo Field Maps and did not support Gradient Echo Field # Maps from any other scanner vendor. # # "TOPUP" - # Average any repeats and use Spin Echo Field Maps for readout + # Average any repeats and use Spin Echo Field Maps for readout # distortion correction # # "GeneralElectricFieldMap" - # Average any repeats and use General Electric specific Gradient + # Average any repeats and use General Electric specific Gradient # Echo Field Map for readout distortion correction # # "SiemensFieldMap" - # Average any repeats and use Siemens specific Gradient Echo + # Average any repeats and use Siemens specific Gradient Echo # Field Maps for readout distortion correction # # Current Setup is for Siemens specific Gradient Echo Field Maps # - # The following settings for AvgrdcSTRING, MagnitudeInputName, - # PhaseInputName, and TE are for using the Siemens specific - # Gradient Echo Field Maps that are collected and used in the - # standard HCP-YA protocol. + # The following settings for AvgrdcSTRING, MagnitudeInputName, + # PhaseInputName, and TE are for using the Siemens specific + # Gradient Echo Field Maps that are collected and used in the + # standard HCP-YA protocol. # - # Note: The AvgrdcSTRING variable could also be set to the value + # Note: The AvgrdcSTRING variable could also be set to the value # "FIELDMAP" which is equivalent to "SiemensFieldMap". AvgrdcSTRING="SiemensFieldMap" @@ -286,11 +286,11 @@ main() # The MagnitudeInputName variable should be set to a 4D magitude volume # with two 3D timepoints or "NONE" if not used - MagnitudeInputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_FieldMap_Magnitude.nii.gz" + MagnitudeInputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_FieldMap_Magnitude.nii.gz" - # The PhaseInputName variable should be set to a 3D phase difference + # The PhaseInputName variable should be set to a 3D phase difference # volume or "NONE" if not used - PhaseInputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_FieldMap_Phase.nii.gz" + PhaseInputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_FieldMap_Phase.nii.gz" # The TE variable should be set to 2.46ms for 3T scanner, 1.02ms for 7T # scanner or "NONE" if not using @@ -303,7 +303,7 @@ main() # The following variables would be set to values other than "NONE" for # using Spin Echo Field Maps (i.e. when AvgrdcSTRING="TOPUP") - # The SpinEchoPhaseEncodeNegative variable should be set to the + # The SpinEchoPhaseEncodeNegative variable should be set to the # spin echo field map volume with a negative phase encoding direction # (LR if using a pair of LR/RL Siemens Spin Echo Field Maps (SEFMs); # AP if using a pair of AP/PA Siemens SEFMS) @@ -313,22 +313,22 @@ main() # Example values for when using Spin Echo Field Maps from a Siemens machine: # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_SpinEchoFieldMap_LR.nii.gz # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_SpinEchoFieldMap_AP.nii.gz - SpinEchoPhaseEncodeNegative="NONE" + SpinEchoPhaseEncodeNegative="NONE" - # The SpinEchoPhaseEncodePositive variable should be set to the + # The SpinEchoPhaseEncodePositive variable should be set to the # spin echo field map volume with positive phase encoding direction # (RL if using a pair of LR/RL SEFMs; PA if using a AP/PA pair), # and set to "NONE" if not using Spin Echo Field Maps # (i.e. if AvgrdcSTRING is not equal to "TOPUP") - # + # # Example values for when using Spin Echo Field Maps from a Siemens machine: # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_SpinEchoFieldMap_RL.nii.gz # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_SpinEchoFieldMap_PA.nii.gz SpinEchoPhaseEncodePositive="NONE" # Echo Spacing or Dwelltime of spin echo EPI MRI image. Specified in seconds. - # Set to "NONE" if not used. - # + # Set to "NONE" if not used. + # # Dwelltime = 1/(BandwidthPerPixelPhaseEncode * # of phase encoding samples) # DICOM field (0019,1028) = BandwidthPerPixelPhaseEncode # DICOM field (0051,100b) = AcquisitionMatrixText first value (# of phase encoding samples). @@ -336,12 +336,12 @@ main() # # Example value for when using Spin Echo Field Maps: # 0.000580002668012 - DwellTime="NONE" + DwellTime="NONE" # Spin Echo Unwarping Direction # x or y (minus or not does not matter) # "NONE" if not used - # + # # Example values for when using Spin Echo Field Maps: x, -x, y, -y # Note: +x or +y are not supported. For positive values, DO NOT include the + sign ## MPH: Why do we say that "minus or not does not matter", but then list -x and -y as example values?? @@ -352,92 +352,92 @@ main() TopupConfig="NONE" # ---------------------------------------------------------------------- - # Variables related to using General Electric specific Gradient Echo + # Variables related to using General Electric specific Gradient Echo # Field Maps # ---------------------------------------------------------------------- # The following variables would be set to values other than "NONE" for - # using General Electric specific Gradient Echo Field Maps (i.e. when + # using General Electric specific Gradient Echo Field Maps (i.e. when # AvgrdcSTRING="GeneralElectricFieldMap") - + # Example value for when using General Electric Gradient Echo Field Map # - # GEB0InputName should be a General Electric style B0 fieldmap with two + # GEB0InputName should be a General Electric style B0 fieldmap with two # volumes # 1) fieldmap in deg and - # 2) magnitude, + # 2) magnitude, # set to NONE if using TOPUP or FIELDMAP/SiemensFieldMap # - # GEB0InputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_GradientEchoFieldMap.nii.gz" + # GEB0InputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_GradientEchoFieldMap.nii.gz" GEB0InputName="NONE" - + # Templates # Hires T1w MNI template T1wTemplate="${HCPPIPEDIR_Templates}/MNI152_T1_0.7mm.nii.gz" # Hires brain extracted MNI template - T1wTemplateBrain="${HCPPIPEDIR_Templates}/MNI152_T1_0.7mm_brain.nii.gz" + T1wTemplateBrain="${HCPPIPEDIR_Templates}/MNI152_T1_0.7mm_brain.nii.gz" # Lowres T1w MNI template - T1wTemplate2mm="${HCPPIPEDIR_Templates}/MNI152_T1_2mm.nii.gz" + T1wTemplate2mm="${HCPPIPEDIR_Templates}/MNI152_T1_2mm.nii.gz" # Hires T2w MNI Template - T2wTemplate="${HCPPIPEDIR_Templates}/MNI152_T2_0.7mm.nii.gz" + T2wTemplate="${HCPPIPEDIR_Templates}/MNI152_T2_0.7mm.nii.gz" # Hires T2w brain extracted MNI Template - T2wTemplateBrain="${HCPPIPEDIR_Templates}/MNI152_T2_0.7mm_brain.nii.gz" + T2wTemplateBrain="${HCPPIPEDIR_Templates}/MNI152_T2_0.7mm_brain.nii.gz" # Lowres T2w MNI Template - T2wTemplate2mm="${HCPPIPEDIR_Templates}/MNI152_T2_2mm.nii.gz" + T2wTemplate2mm="${HCPPIPEDIR_Templates}/MNI152_T2_2mm.nii.gz" # Hires MNI brain mask template TemplateMask="${HCPPIPEDIR_Templates}/MNI152_T1_0.7mm_brain_mask.nii.gz" - # Lowres MNI brain mask template - Template2mmMask="${HCPPIPEDIR_Templates}/MNI152_T1_2mm_brain_mask_dil.nii.gz" + # Lowres MNI brain mask template + Template2mmMask="${HCPPIPEDIR_Templates}/MNI152_T1_2mm_brain_mask_dil.nii.gz" - # Structural Scan Settings + # Structural Scan Settings # # Note that "UnwarpDir" is the *readout* direction of the *structural* (T1w,T2w) # images, and should not be confused with "SEUnwarpDir" which is the *phase* encoding direction # of the Spin Echo Field Maps (if using them). # # set all these values to NONE if not doing readout distortion correction - # + # # Sample values for when using General Electric structurals # T1wSampleSpacing="0.000011999" # For General Electric scanners, 1/((0018,0095)*(0028,0010)) # T2wSampleSpacing="0.000008000" # For General Electric scanners, 1/((0018,0095)*(0028,0010)) # UnwarpDir="y" ## MPH: This doesn't seem right. Is this accurate?? - # The values set below are for the HCP-YA Protocol using the Siemens + # The values set below are for the HCP-YA Protocol using the Siemens # Connectom Scanner # DICOM field (0019,1018) in s or "NONE" if not used - T1wSampleSpacing="0.0000074" + T1wSampleSpacing="0.0000074" # DICOM field (0019,1018) in s or "NONE" if not used - T2wSampleSpacing="0.0000021" + T2wSampleSpacing="0.0000021" # z appears to be the appropriate polarity for the 3D structurals collected on Siemens scanners # or "NONE" if not used - UnwarpDir="z" + UnwarpDir="z" # Other Config Settings # BrainSize in mm, 150 for humans - BrainSize="150" + BrainSize="150" # FNIRT 2mm T1w Config - FNIRTConfig="${HCPPIPEDIR_Config}/T1_2_MNI152_2mm.cnf" + FNIRTConfig="${HCPPIPEDIR_Config}/T1_2_MNI152_2mm.cnf" # Location of Coeffs file or "NONE" to skip - # GradientDistortionCoeffs="${HCPPIPEDIR_Config}/coeff_SC72C_Skyra.grad" + # GradientDistortionCoeffs="${HCPPIPEDIR_Config}/coeff_SC72C_Skyra.grad" # Set to NONE to skip gradient distortion correction - GradientDistortionCoeffs="NONE" + GradientDistortionCoeffs="NONE" - # Establish queuing command based on command line option + # Establish queuing command based on command line option if [ -n "${command_line_specified_run_local}" ] ; then echo "About to run ${HCPPIPEDIR}/PreFreeSurfer/PreFreeSurferPipeline.sh" queuing_command="" @@ -479,9 +479,9 @@ main() --avgrdcmethod="$AvgrdcSTRING" \ --topupconfig="$TopupConfig" \ --printcom=$PRINTCOM - + done } - + # Invoke the main function to get things started main "$@" diff --git a/Examples/Scripts/SetUpHCPPipeline.sh b/Examples/Scripts/SetUpHCPPipeline.sh index 0cf4c6297..50f6c3c29 100755 --- a/Examples/Scripts/SetUpHCPPipeline.sh +++ b/Examples/Scripts/SetUpHCPPipeline.sh @@ -5,7 +5,7 @@ echo "This script must be SOURCED to correctly setup the environment prior to ru # Set up FSL (if not already done so in the running environment) # Uncomment the following 2 lines (remove the leading #) and correct the FSLDIR setting for your setup #export FSLDIR=/usr/share/fsl/5.0 -#. ${FSLDIR}/etc/fslconf/fsl.sh +#source ${FSLDIR}/etc/fslconf/fsl.sh # Let FreeSurfer know what version of FSL to use # FreeSurfer uses FSL_DIR instead of FSLDIR to determine the FSL version diff --git a/MSMAll/scripts/MSMAll.sh b/MSMAll/scripts/MSMAll.sh index c93bcf9bc..05e6173a1 100755 --- a/MSMAll/scripts/MSMAll.sh +++ b/MSMAll/scripts/MSMAll.sh @@ -748,11 +748,12 @@ main() 1) # Use interpreted MATLAB mPath="${HCPPIPEDIR}/MSMAll/scripts" + mGlobalPath="${HCPPIPEDIR}/global/matlab" matlab -nojvm -nodisplay -nosplash < Date: Thu, 10 May 2018 22:50:08 -0500 Subject: [PATCH 4/5] Edit scripts/TaskfMRILevel1.sh and scripts/TaskfMRILevel2.sh to 1) add and clarify comments for better understanding by user, 2) move and consolidate individual commands into 'functional blocks' of code performing specific functions, 3) corrected syntax of string comparisons in if statements (quoted all variables, consolidated multiple tests into single test), and modify output scalar names to include "zstat" and "cope". --- TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh | 472 +++++++++++--------- TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh | 476 +++++++++++---------- 2 files changed, 527 insertions(+), 421 deletions(-) diff --git a/TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh b/TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh index 2fd634ce3..72db40367 100755 --- a/TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh +++ b/TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh @@ -1,13 +1,16 @@ #!/bin/bash set -e -g_script_name=`basename ${0}` -# Load Function Libraries -source ${HCPPIPEDIR}/global/scripts/log.shlib # Logging related functions -source ${HCPPIPEDIR}/global/scripts/fsl_version.shlib # Function for getting FSL version +# Must first source SetUpHCPPipeline.sh to set up pipeline environment variables and software +# Requirements for this script +# installed versions of FSL 5.0.7 or greater +# environment: FSLDIR , HCPPIPEDIR , CARET7DIR -# Establish tool name for logging -log_SetToolName "${g_script_name}" + +########################################## PREPARE FUNCTIONS ########################################## + +source ${HCPPIPEDIR}/global/scripts/log.shlib # Logging related functions +source ${HCPPIPEDIR}/global/scripts/fsl_version.shlib # Function for getting FSL version show_tool_versions() { @@ -25,98 +28,113 @@ show_tool_versions() log_Msg "FSL version: ${fsl_ver}" } -Subject="$1" -log_Msg "Subject: ${Subject}" -ResultsFolder="$2" -log_Msg "ResultsFolder: ${ResultsFolder}" +########################################## READ COMMAND-LINE ARGUMENTS ################################## -ROIsFolder="$3" -log_Msg "ROIsFolder: ${ROIsFolder}" +# Explicitly set tool name for logging +g_script_name=`basename ${0}` +log_SetToolName "${g_script_name}" +log_Msg "${g_script_name} arguments: $@" +# Set variables from positional arguments to command line +Subject="$1" +ResultsFolder="$2" +ROIsFolder="$3" DownSampleFolder="$4" -log_Msg "DownSampleFolder: ${DownSampleFolder}" - LevelOnefMRIName="$5" -log_Msg "LevelOnefMRIName: ${LevelOnefMRIName}" - LevelOnefsfName="$6" -log_Msg "LevelOnefsfName: ${LevelOnefsfName}" - LowResMesh="$7" -log_Msg "LowResMesh: ${LowResMesh}" - GrayordinatesResolution="$8" -log_Msg "GrayordinatesResolution: ${GrayordinatesResolution}" - OriginalSmoothingFWHM="$9" -log_Msg "OriginalSmoothingFWHM: ${OriginalSmoothingFWHM}" - Confound="${10}" -log_Msg "Confound: ${Confound}" - FinalSmoothingFWHM="${11}" -log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" - TemporalFilter="${12}" -log_Msg "TemporalFilter: ${TemporalFilter}" - VolumeBasedProcessing="${13}" -log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" - RegName="${14}" -log_Msg "RegName: ${RegName}" - Parcellation="${15}" -log_Msg "Parcellation: ${Parcellation}" - ParcellationFile="${16}" + +log_Msg "Subject: ${Subject}" +log_Msg "ResultsFolder: ${ResultsFolder}" +log_Msg "ROIsFolder: ${ROIsFolder}" +log_Msg "DownSampleFolder: ${DownSampleFolder}" +log_Msg "LevelOnefMRIName: ${LevelOnefMRIName}" +log_Msg "LevelOnefsfName: ${LevelOnefsfName}" +log_Msg "LowResMesh: ${LowResMesh}" +log_Msg "GrayordinatesResolution: ${GrayordinatesResolution}" +log_Msg "OriginalSmoothingFWHM: ${OriginalSmoothingFWHM}" +log_Msg "Confound: ${Confound}" +log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" +log_Msg "TemporalFilter: ${TemporalFilter}" +log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" +log_Msg "RegName: ${RegName}" +log_Msg "Parcellation: ${Parcellation}" log_Msg "ParcellationFile: ${ParcellationFile}" show_tool_versions -if [ ! ${Parcellation} = "NONE" ] ; then - ParcellationString="_${Parcellation}" - Extension="ptseries.nii" -else - ParcellationString="" - Extension="dtseries.nii" -fi -log_Msg "ParcellationString: ${ParcellationString}" -log_Msg "Extension: ${Extension}" +########################################## MAIN ################################## +##### SET VARIABLES REQUIRED FOR FILE NAMING ##### -if [ ! ${RegName} = "NONE" ] ; then +# Set smoothing and filtering string variables used for file naming +SmoothingString="_s${FinalSmoothingFWHM}" +TemporalFilterString="_hp""$TemporalFilter" +log_Msg "SmoothingString: ${SmoothingString}" +log_Msg "TemporalFilterString: ${TemporalFilterString}" +# Set variables used for different registration procedures +if [ "${RegName}" != "NONE" ] ; then RegString="_${RegName}" else RegString="" fi log_Msg "RegString: ${RegString}" -#Parcellate data if a Parcellation was provided -log_Msg "Parcellate data if a Parcellation was provided" -if [ ! ${Parcellation} = "NONE" ] ; then - log_Msg "Parcellating data" - ${CARET7DIR}/wb_command -cifti-parcellate ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${ParcellationFile} COLUMN ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.ptseries.nii + +##### DETERMINE ANALYSES TO RUN (DENSE, PARCELLATED, VOLUME) ##### + +# initialize run variables +runParcellated=false; runVolume=false; runDense=false; + +# Determine whether to run Parcellated, and set strings used for filenaming +if [ "${Parcellation}" != "NONE" ] ; then + # Run Parcellated Analyses + runParcellated=true; + ParcellationString="_${Parcellation}" + Extension="ptseries.nii" +fi + +# Determine whether to run Dense, and set strings used for filenaming +if [ "${Parcellation}" = "NONE" ]; then + # Run Dense Analyses + runDense=true; + ParcellationString="" + Extension="dtseries.nii" fi +# Determine whether to run Volume, and set strings used for filenaming +if [ "$VolumeBasedProcessing" = "YES" ] ; then + runVolume=true; +fi + +log_Msg "ParcellationString: ${ParcellationString}" +log_Msg "Extension: ${Extension}" + + +##### DETERMINE TR AND SCAN LENGTH ##### + +# Extract TR information from input time series files TR_vol=`${CARET7DIR}/wb_command -file-information ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.${Extension} -no-map-info -only-step-interval` log_Msg "TR_vol: ${TR_vol}" -#Only do the additional spatial smoothing required to hit the target (theoretical) final smoothing for CIFTI. -#Additional smoothing is not recommended -- if looking for area-sized effects, use parcellation for -#greater sensitivity and statistical power -AdditionalSmoothingFWHM=`echo "sqrt(( $FinalSmoothingFWHM ^ 2 ) - ( $OriginalSmoothingFWHM ^ 2 ))" | bc -l` -log_Msg "AdditionalSmoothingFWHM: ${AdditionalSmoothingFWHM}" +# Extract number of time points in CIFTI time series file +npts=`${CARET7DIR}/wb_command -file-information ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.${Extension} -no-map-info -only-number-of-maps` +log_Msg "npts: ${npts}" -AdditionalSigma=`echo "$AdditionalSmoothingFWHM / ( 2 * ( sqrt ( 2 * l ( 2 ) ) ) )" | bc -l` -log_Msg "AdditionalSigma: ${AdditionalSigma}" -SmoothingString="_s${FinalSmoothingFWHM}" -TemporalFilterString="_hp""$TemporalFilter" -log_Msg "SmoothingString: ${SmoothingString}" -log_Msg "TemporalFilterString: ${TemporalFilterString}" +##### MAKE DESIGN FILES ##### +# Create output .feat directory ($FEATDir) for this analysis FEATDir="${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefsfName}${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}.feat" log_Msg "FEATDir: ${FEATDir}" if [ -e ${FEATDir} ] ; then @@ -126,145 +144,100 @@ else mkdir -p ${FEATDir} fi -if [ $TemporalFilter = "200" ] ; then - #Don't edit the fsf file if the temporal filter is the same - log_Msg "Don't edit the fsf file if the temporal filter is the same" - cp ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefsfName}_hp200_s4_level1.fsf ${FEATDir}/temp.fsf +### Edit fsf file to record the parameters used in this analysis +# Copy template fsf file into $FEATDir +log_Msg "Copying fsf file to .feat directory" +cp ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefsfName}_hp200_s4_level1.fsf ${FEATDir}/design.fsf + +# Change the highpass filter string to the desired highpass filter +log_Msg "Change design.fsf: Set highpass filter string to the desired highpass filter to ${TemporalFilter}" +sed -i -e "s|set fmri(paradigm_hp) \"200\"|set fmri(paradigm_hp) \"${TemporalFilter}\"|g" ${FEATDir}/design.fsf + +# Change smoothing to be equal to additional smoothing in FSF file +log_Msg "Change design.fsf: Set smoothing to be equal to final smoothing to ${FinalSmoothingFWHM}" +sed -i -e "s|set fmri(smooth) \"4\"|set fmri(smooth) \"${FinalSmoothingFWHM}\"|g" ${FEATDir}/design.fsf + +# Change output directory name to match total smoothing and highpass +log_Msg "Change design.fsf: Set output directory name to ${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}" +sed -i -e "s|_hp200_s4|${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}|g" ${FEATDir}/design.fsf + +# find current value for npts in template.fsf +fsfnpts=`grep "set fmri(npts)" ${FEATDir}/design.fsf | cut -d " " -f 3 | sed 's|"||g'`; + +# Ensure number of time points in fsf matches time series image +if [ "$fsfnpts" -eq "$npts" ] ; then + log_Msg "Change design.fsf: Scan length matches number of timepoints in template.fsf: ${fsfnpts}" else - #Change the highpass filter string to the desired highpass filter - log_Msg "Change the highpass filter string to the desired highpass filter" - cat ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefsfName}_hp200_s4_level1.fsf | sed s/"set fmri(paradigm_hp) \"200\""/"set fmri(paradigm_hp) \"${TemporalFilter}\""/g > ${FEATDir}/temp.fsf + log_Msg "Change design.fsf: Warning! Scan length does not match template.fsf!" + log_Msg "Change design.fsf: Warning! Changing Number of Timepoints in fsf (""${fsfnpts}"") to match time series image (""${npts}"")" + sed -i -e "s|set fmri(npts) \"\?${fsfnpts}\"\?|set fmri(npts) ${npts}|g" ${FEATDir}/design.fsf fi -#Change smoothing to be equal to additional smoothing in FSF file and change output directory to match total smoothing and highpass -log_Msg "Change smoothing to be equal to additional smoothing in FSF file and change output directory to match total smoothing and highpass" -cat ${FEATDir}/temp.fsf | sed s/"set fmri(smooth) \"4\""/"set fmri(smooth) \"${AdditionalSmoothingFWHM}\""/g | sed s/_hp200_s4/${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}/g > ${FEATDir}/design.fsf -rm ${FEATDir}/temp.fsf - -#Change number of timepoints to match timeseries so that template fsf files can be used -log_Msg "Change number of timepoints to match timeseries so that template fsf files can be used" -fsfnpts=`cat ${FEATDir}/design.fsf | grep "set fmri(npts)" | cut -d " " -f 3 | sed 's/"//g'` -log_Msg "fsfnpts: ${fsfnpts}" -CIFTInpts=`${CARET7DIR}/wb_command -file-information ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.${Extension} -no-map-info -only-number-of-maps` -log_Msg "CIFTInpts: ${CIFTInpts}" -if [ $fsfnpts -ne $CIFTInpts ] ; then - cat ${FEATDir}/design.fsf | sed s/"set fmri(npts) \"\?${fsfnpts}\"\?"/"set fmri(npts) ${CIFTInpts}"/g > ${FEATDir}/temp.fsf - mv ${FEATDir}/temp.fsf ${FEATDir}/design.fsf - log_Msg "Short Run! Reseting FSF Number of Timepoints (""${fsfnpts}"") to Match CIFTI (""${CIFTInpts}"")" -fi -#Create design files, model confounds if desired +### Use fsf to create additional design files used by film_gls log_Msg "Create design files, model confounds if desired" -DIR=`pwd` -cd ${FEATDir} -if [ $Confound = "NONE" ] ; then - feat_model ${FEATDir}/design -else - feat_model ${FEATDir}/design ${ResultsFolder}/${LevelOnefMRIName}/${Confound} +# Determine if there is a confound matrix text file (e.g., output of fsl_motion_outliers) +confound_matrix=""; +if [ "$Confound" != "NONE" ] ; then + confound_matrix=$( ls -d ${ResultsFolder}/${LevelOnefMRIName}/${Confound} 2>/dev/null ) fi -cd $DIR -#Prepare files and folders -log_Msg "Prepare files and folders" +# Run feat_model inside $FEATDir +cd $FEATDir # so feat_model can interpret relative paths in fsf file +feat_model ${FEATDir}/design ${confound_matrix}; # $confound_matrix string is blank if file is missing +cd $OLDPWD # OLDPWD is shell variable previous working directory + +# Set variables for additional design files DesignMatrix=${FEATDir}/design.mat DesignContrasts=${FEATDir}/design.con DesignfContrasts=${FEATDir}/design.fts # An F-test may not always be requested as part of the design.fsf ExtraArgs="" -if [ -e ${DesignfContrasts} ] ; then +if [ -e "${DesignfContrasts}" ] ; then ExtraArgs="$ExtraArgs --fcon=${DesignfContrasts}" fi -###CIFTI Processing### -log_Msg "CIFTI Processing" -#Add any additional spatial smoothing, does not do anything if parcellation has been specified. -#Additional smoothing is not recommended -- if looking for area-sized effects, use parcellation for -#greater sensitivity and statistical power -if [[ ! $FinalSmoothingFWHM -eq $OriginalSmoothingFWHM && -z ${ParcellationString} ]] ; then - ${CARET7DIR}/wb_command -cifti-smoothing ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${AdditionalSigma} ${AdditionalSigma} COLUMN ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}.dtseries.nii -left-surface "$DownSampleFolder"/"$Subject".L.midthickness."$LowResMesh"k_fs_LR.surf.gii -right-surface "$DownSampleFolder"/"$Subject".R.midthickness."$LowResMesh"k_fs_LR.surf.gii -else - cp ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.${Extension} ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}.${Extension} -fi -#Add temporal filtering -log_Msg "Add temporal filtering" -# Temporal filtering is conducted by fslmaths. -# First, fslmaths is not CIFTI-compliant. -# So, convert CIFTI to fake NIFTI file, use fslmaths, then convert fake NIFTI back to CIFTI. -# Second, fslmaths -bptf removes timeseries mean (for FSL 5.0.7 onward), which is expected by film_gls. -# So, save the mean to file, then add it back after -bptf. -${CARET7DIR}/wb_command -cifti-convert -to-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}.${Extension} ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz -fslmaths ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz -Tmean ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI_mean.nii.gz -hp_sigma=`echo "0.5 * $TemporalFilter / $TR_vol" | bc -l` -fslmaths ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz -bptf ${hp_sigma} -1 \ - -add ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI_mean.nii.gz \ - ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz -${CARET7DIR}/wb_command -cifti-convert -from-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}.${Extension} ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$TemporalFilterString""$SmoothingString"${RegString}${ParcellationString}.${Extension} -rm ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI_mean.nii.gz - -#Check if data are Parcellated, if not, do Dense Grayordinates Analysis# -log_Msg "Check if data are Parcellated, if not, do Dense Grayordinates Analysis" -if [ -z ${ParcellationString} ] ; then - - ###Dense Grayordinates Processing### - log_Msg "Dense Grayordinates Processing" - #Split into surface and volume - log_Msg "Split into surface and volume" - ${CARET7DIR}/wb_command -cifti-separate-all ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$TemporalFilterString""$SmoothingString"${RegString}.dtseries.nii -volume ${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical"$TemporalFilterString""$SmoothingString".nii.gz -left ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.L."$LowResMesh"k_fs_LR.func.gii -right ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.R."$LowResMesh"k_fs_LR.func.gii +##### APPLY SPATIAL SMOOTHING (or PARCELLATION ##### - #Run film_gls on subcortical volume data - log_Msg "Run film_gls on subcortical volume data" - film_gls --rn=${FEATDir}/SubcorticalVolumeStats --sa --ms=5 --in=${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical"$TemporalFilterString""$SmoothingString".nii.gz --pd="$DesignMatrix" --con=${DesignContrasts} ${ExtraArgs} --thr=1 --mode=volumetric - rm ${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical"$TemporalFilterString""$SmoothingString".nii.gz +# Parcellation may be better than adding spatial smoothing to dense time series. +# Parcellation increases sensitivity and statistical power, but avoids blurring signal +# across region boundaries into adjacent, non-activated regions. - #Run film_gls on cortical surface data - log_Msg "Run film_gls on cortical surface data" - for Hemisphere in L R ; do - #Prepare for film_gls - log_Msg "Prepare for film_gls" - ${CARET7DIR}/wb_command -metric-dilate ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi."$Hemisphere"."$LowResMesh"k_fs_LR.func.gii "$DownSampleFolder"/"$Subject"."$Hemisphere".midthickness."$LowResMesh"k_fs_LR.surf.gii 50 ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil."$Hemisphere"."$LowResMesh"k_fs_LR.func.gii -nearest - - #Run film_gls on surface data - log_Msg "Run film_gls on surface data" - film_gls --rn=${FEATDir}/"$Hemisphere"_SurfaceStats --sa --ms=15 --epith=5 --in2="$DownSampleFolder"/"$Subject"."$Hemisphere".midthickness."$LowResMesh"k_fs_LR.surf.gii --in=${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil."$Hemisphere"."$LowResMesh"k_fs_LR.func.gii --pd="$DesignMatrix" --con=${DesignContrasts} ${ExtraArgs} --mode=surface - rm ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil."$Hemisphere"."$LowResMesh"k_fs_LR.func.gii ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi."$Hemisphere"."$LowResMesh"k_fs_LR.func.gii - done - - #Merge Cortical Surface and Subcortical Volume into Grayordinates - log_Msg "Merge Cortical Surface and Subcortical Volume into Grayordinates" - mkdir ${FEATDir}/GrayordinatesStats - cat ${FEATDir}/SubcorticalVolumeStats/dof > ${FEATDir}/GrayordinatesStats/dof - cat ${FEATDir}/SubcorticalVolumeStats/logfile > ${FEATDir}/GrayordinatesStats/logfile - cat ${FEATDir}/L_SurfaceStats/logfile >> ${FEATDir}/GrayordinatesStats/logfile - cat ${FEATDir}/R_SurfaceStats/logfile >> ${FEATDir}/GrayordinatesStats/logfile - cd ${FEATDir}/SubcorticalVolumeStats - Files=`ls | grep .nii.gz | cut -d "." -f 1` - cd $DIR - for File in $Files ; do - ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${FEATDir}/GrayordinatesStats/${File}.dtseries.nii -volume ${FEATDir}/SubcorticalVolumeStats/${File}.nii.gz $ROIsFolder/Atlas_ROIs.${GrayordinatesResolution}.nii.gz -left-metric ${FEATDir}/L_SurfaceStats/${File}.func.gii -roi-left "$DownSampleFolder"/"$Subject".L.atlasroi."$LowResMesh"k_fs_LR.shape.gii -right-metric ${FEATDir}/R_SurfaceStats/${File}.func.gii -roi-right "$DownSampleFolder"/"$Subject".R.atlasroi."$LowResMesh"k_fs_LR.shape.gii - done - rm -r ${FEATDir}/SubcorticalVolumeStats ${FEATDir}/L_SurfaceStats ${FEATDir}/R_SurfaceStats - -else +### Parcellate data if a Parcellation was provided +log_Msg "Parcellate data if a Parcellation was provided" +if $runParcellated; then + log_Msg "Parcellating data" + ${CARET7DIR}/wb_command -cifti-parcellate ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${ParcellationFile} COLUMN ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.ptseries.nii +fi - ###Parcellated Processing### - log_Msg "Parcellated Processing" - ${CARET7DIR}/wb_command -cifti-convert -to-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$TemporalFilterString""$SmoothingString"${RegString}${ParcellationString}.${Extension} ${FEATDir}/${LevelOnefMRIName}_Atlas"$TemporalFilterString""$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz - film_gls --rn=${FEATDir}/ParcellatedStats --in=${FEATDir}/${LevelOnefMRIName}_Atlas"$TemporalFilterString""$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz --pd="$DesignMatrix" --con=${DesignContrasts} ${ExtraArgs} --thr=1 --mode=volumetric - rm ${FEATDir}/${LevelOnefMRIName}_Atlas"$TemporalFilterString""$SmoothingString"${RegString}${ParcellationString}_FAKENIFTI.nii.gz - cd ${FEATDir}/ParcellatedStats - Files=`ls | grep .nii.gz | cut -d "." -f 1` - cd $DIR - for File in $Files ; do - ${CARET7DIR}/wb_command -cifti-convert -from-nifti ${FEATDir}/ParcellatedStats/${File}.nii.gz ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas"$TemporalFilterString""$SmoothingString"${RegString}${ParcellationString}.ptseries.nii ${FEATDir}/ParcellatedStats/${File}.ptseries.nii -reset-timepoints 1 1 - done - rm ${FEATDir}/ParcellatedStats/*.nii.gz +### Apply spatial smoothing to CIFTI dense analysis +if $runDense ; then + if [ "$FinalSmoothingFWHM" -gt "$OriginalSmoothingFWHM" ] ; then + # Some smoothing was already conducted in fMRISurface Pipeline. To reach the desired + # total level of smoothing, the additional spatial smoothing added here must be reduced + # by the original smoothing applied earlier + AdditionalSmoothingFWHM=`echo "sqrt(( $FinalSmoothingFWHM ^ 2 ) - ( $OriginalSmoothingFWHM ^ 2 ))" | bc -l` + AdditionalSigma=`echo "$AdditionalSmoothingFWHM / ( 2 * ( sqrt ( 2 * l ( 2 ) ) ) )" | bc -l` + log_Msg "AdditionalSmoothingFWHM: ${AdditionalSmoothingFWHM}" + log_Msg "AdditionalSigma: ${AdditionalSigma}" + log_Msg "Applying additional surface smoothing to CIFTI Dense data" + ${CARET7DIR}/wb_command -cifti-smoothing ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${AdditionalSigma} ${AdditionalSigma} COLUMN ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}.dtseries.nii -left-surface ${DownSampleFolder}/${Subject}.L.midthickness.${LowResMesh}k_fs_LR.surf.gii -right-surface ${DownSampleFolder}/${Subject}.R.midthickness.${LowResMesh}k_fs_LR.surf.gii + else + if [ "$FinalSmoothingFWHM" -eq "$OriginalSmoothingFWHM" ]; then + log_Msg "No additional surface smoothing requested for CIFTI Dense data" + else + log_Msg "WARNING: For CIFTI Dense data, the surface smoothing requested \($FinalSmoothingFWHM\) is LESS than the surface smoothing already applied \(${OriginalSmoothingFWHM}\)." + log_Msg "Continuing analysis with ${OriginalSmoothingFWHM} of total surface smoothing." + fi + cp ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}.dtseries.nii + fi fi -###Standard NIFTI Volume-based Processsing### -log_Msg "Standard NIFTI Volume-based Processsing" -if [ $VolumeBasedProcessing = "YES" ] ; then +### Apply spatial smoothing to volume analysis +if $runVolume ; then + log_Msg "Standard NIFTI Volume-based Processsing" #Add edge-constrained volume smoothing log_Msg "Add edge-constrained volume smoothing" @@ -275,7 +248,7 @@ if [ $VolumeBasedProcessing = "YES" ] ; then fslmaths ${FEATDir}/mask_orig -kernel gauss ${FinalSmoothingSigma} -fmean ${FEATDir}/mask_orig_weight -odt float fslmaths ${InputfMRI} -kernel gauss ${FinalSmoothingSigma} -fmean \ -div ${FEATDir}/mask_orig_weight -mas ${FEATDir}/mask_orig \ - ${FEATDir}/${LevelOnefMRIName}"$SmoothingString" -odt float + ${FEATDir}/${LevelOnefMRIName}${SmoothingString} -odt float #Add volume dilation # @@ -319,40 +292,133 @@ if [ $VolumeBasedProcessing = "YES" ] ; then -kernel gauss ${FinalSmoothingSigma} -fmean ${FEATDir}/mask_dilM_weight -odt float fslmaths ${InputfMRI} -dilM -kernel gauss ${FinalSmoothingSigma} -fmean \ -div ${FEATDir}/mask_dilM_weight -mas ${FEATDir}/mask_dilM \ - ${FEATDir}/${LevelOnefMRIName}_dilM"$SmoothingString" -odt float + ${FEATDir}/${LevelOnefMRIName}_dilM${SmoothingString} -odt float # Take just the additional "rim" voxels from the dilated then smoothed time series, and add them # into the smoothed time series (that didn't have any dilation) - SmoothedDilatedResultFile=${FEATDir}/${LevelOnefMRIName}"$SmoothingString"_dilMrim + SmoothedDilatedResultFile=${FEATDir}/${LevelOnefMRIName}${SmoothingString}_dilMrim fslmaths ${FEATDir}/mask_orig -binv ${FEATDir}/mask_orig_inv - fslmaths ${FEATDir}/${LevelOnefMRIName}_dilM"$SmoothingString" \ + fslmaths ${FEATDir}/${LevelOnefMRIName}_dilM${SmoothingString} \ -mas ${FEATDir}/mask_orig_inv \ - -add ${FEATDir}/${LevelOnefMRIName}"$SmoothingString" \ + -add ${FEATDir}/${LevelOnefMRIName}${SmoothingString} \ ${SmoothedDilatedResultFile} - #Add temporal filtering to the output from above - log_Msg "Add temporal filtering" - # Temporal filtering is conducted by fslmaths. - # fslmaths -bptf removes timeseries mean (for FSL 5.0.7 onward), which is expected by film_gls. - # So, save the mean to file, then add it back after -bptf. - # We drop the "dilMrim" string from the output file name, so as to avoid breaking - # any downstream scripts. - fslmaths ${SmoothedDilatedResultFile} -Tmean ${SmoothedDilatedResultFile}_mean - hp_sigma=`echo "0.5 * $TemporalFilter / $TR_vol" | bc -l` - fslmaths ${SmoothedDilatedResultFile} -bptf ${hp_sigma} -1 \ +fi # end Volume spatial smoothing + + +##### APPLY TEMPORAL FILTERING ##### + +# Issue 1: Temporal filtering is conducted by fslmaths, but fslmaths is not CIFTI-compliant. +# Convert CIFTI to "fake" NIFTI file, use FSL tools (fslmaths), then convert "fake" NIFTI back to CIFTI. +# Issue 2: fslmaths -bptf removes timeseries mean (for FSL 5.0.7 onward). film_gls expects mean in image. +# So, save the mean to file, then add it back after -bptf. +if [[ $runParcellated == true || $runDense == true ]]; then + log_Msg "Add temporal filtering" + # Convert CIFTI to "fake" NIFTI + ${CARET7DIR}/wb_command -cifti-convert -to-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}.${Extension} ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz + # Save mean image + fslmaths ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz -Tmean ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI_mean.nii.gz + # Compute smoothing kernel sigma + hp_sigma=`echo "0.5 * $TemporalFilter / $TR_vol" | bc -l`; + # Use fslmaths to apply high pass filter and then add mean back to image + fslmaths ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz -bptf ${hp_sigma} -1 \ + -add ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI_mean.nii.gz \ + ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz + # Convert "fake" NIFTI back to CIFTI + ${CARET7DIR}/wb_command -cifti-convert -from-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}.${Extension} ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${Extension} + # Cleanup the "fake" NIFTI files + rm ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI_mean.nii.gz +fi + +if $runVolume; then + #Add temporal filtering to the output from above + log_Msg "Add temporal filtering" + # Temporal filtering is conducted by fslmaths. + # fslmaths -bptf removes timeseries mean (for FSL 5.0.7 onward), which is expected by film_gls. + # So, save the mean to file, then add it back after -bptf. + # We drop the "dilMrim" string from the output file name, so as to avoid breaking + # any downstream scripts. + fslmaths ${SmoothedDilatedResultFile} -Tmean ${SmoothedDilatedResultFile}_mean + hp_sigma=`echo "0.5 * $TemporalFilter / $TR_vol" | bc -l` + fslmaths ${SmoothedDilatedResultFile} -bptf ${hp_sigma} -1 \ -add ${SmoothedDilatedResultFile}_mean \ - ${FEATDir}/${LevelOnefMRIName}"$TemporalFilterString""$SmoothingString".nii.gz + ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}.nii.gz +fi + + +##### RUN film_gls (GLM ANALYSIS ON LEVEL 1) ##### + +# Run CIFTI Dense Grayordinates Analysis (if requested) +if $runDense ; then + # Dense Grayordinates Processing + log_Msg "Dense Grayordinates Processing" + #Split into surface and volume + log_Msg "Split into surface and volume" + ${CARET7DIR}/wb_command -cifti-separate-all ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}.dtseries.nii -volume ${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz -left ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.L.${LowResMesh}k_fs_LR.func.gii -right ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.R.${LowResMesh}k_fs_LR.func.gii + + #Run film_gls on subcortical volume data + log_Msg "Run film_gls on subcortical volume data" + film_gls --rn=${FEATDir}/SubcorticalVolumeStats --sa --ms=5 --in=${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --thr=1 --mode=volumetric + rm ${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz + + #Run film_gls on cortical surface data + log_Msg "Run film_gls on cortical surface data" + for Hemisphere in L R ; do + #Prepare for film_gls + log_Msg "Prepare for film_gls" + ${CARET7DIR}/wb_command -metric-dilate ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii ${DownSampleFolder}/${Subject}.${Hemisphere}.midthickness.${LowResMesh}k_fs_LR.surf.gii 50 ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii -nearest + + #Run film_gls on surface data + log_Msg "Run film_gls on surface data" + film_gls --rn=${FEATDir}/${Hemisphere}_SurfaceStats --sa --ms=15 --epith=5 --in2=${DownSampleFolder}/${Subject}.${Hemisphere}.midthickness.${LowResMesh}k_fs_LR.surf.gii --in=${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --mode=surface + rm ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii + done + + # Merge Cortical Surface and Subcortical Volume into Grayordinates + log_Msg "Merge Cortical Surface and Subcortical Volume into Grayordinates" + mkdir ${FEATDir}/GrayordinatesStats + cat ${FEATDir}/SubcorticalVolumeStats/dof > ${FEATDir}/GrayordinatesStats/dof + cat ${FEATDir}/SubcorticalVolumeStats/logfile > ${FEATDir}/GrayordinatesStats/logfile + cat ${FEATDir}/L_SurfaceStats/logfile >> ${FEATDir}/GrayordinatesStats/logfile + cat ${FEATDir}/R_SurfaceStats/logfile >> ${FEATDir}/GrayordinatesStats/logfile - #Run film_gls on volume data - log_Msg "Run film_gls on volume data" - film_gls --rn=${FEATDir}/StandardVolumeStats --sa --ms=5 --in=${FEATDir}/${LevelOnefMRIName}"$TemporalFilterString""$SmoothingString".nii.gz --pd="$DesignMatrix" --con=${DesignContrasts} ${ExtraArgs} --thr=1000 + for Subcortical in ${FEATDir}/SubcorticalVolumeStats/*nii.gz ; do + File=$( basename $Subcortical .nii.gz ); + ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${FEATDir}/GrayordinatesStats/${File}.dtseries.nii -volume $Subcortical $ROIsFolder/Atlas_ROIs.${GrayordinatesResolution}.nii.gz -left-metric ${FEATDir}/L_SurfaceStats/${File}.func.gii -roi-left ${DownSampleFolder}/${Subject}.L.atlasroi.${LowResMesh}k_fs_LR.shape.gii -right-metric ${FEATDir}/R_SurfaceStats/${File}.func.gii -roi-right ${DownSampleFolder}/${Subject}.R.atlasroi.${LowResMesh}k_fs_LR.shape.gii + done + rm -r ${FEATDir}/SubcorticalVolumeStats ${FEATDir}/L_SurfaceStats ${FEATDir}/R_SurfaceStats +fi - #Cleanup - rm -f ${FEATDir}/mask_*.nii.gz - rm -f ${FEATDir}/${LevelOnefMRIName}"$SmoothingString".nii.gz - rm -f ${FEATDir}/${LevelOnefMRIName}_dilM"$SmoothingString".nii.gz - rm -f ${SmoothedDilatedResultFile}*.nii.gz +# Run CIFTI Parcellated Analysis (if requested) +if $runParcellated ; then + # Parcellated Processing + log_Msg "Parcellated Processing" + # Convert CIFTI to "fake" NIFTI + ${CARET7DIR}/wb_command -cifti-convert -to-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${Extension} ${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz + # Now run film_gls on the fakeNIFTI file + film_gls --rn=${FEATDir}/ParcellatedStats --in=${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --thr=1 --mode=volumetric + # Remove "fake" NIFTI time series file + rm ${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz + # Convert "fake" NIFTI output files (copes, varcopes, zstats) back to CIFTI + templateCIFTI=${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.ptseries.nii + for fakeNIFTI in `ls ${FEATDir}/ParcellatedStats/*.nii.gz` ; do + CIFTI=$( echo $fakeNIFTI | sed -e "s|.nii.gz|.${Extension}|" ); + ${CARET7DIR}/wb_command -cifti-convert -from-nifti $fakeNIFTI $templateCIFTI $CIFTI -reset-timepoints 1 1 + rm $fakeNIFTI; + done +fi +# Standard NIFTI Volume-based Processsing### +if $runVolume ; then + log_Msg "Standard NIFTI Volume-based Processsing" + log_Msg "Run film_gls on volume data" + film_gls --rn=${FEATDir}/StandardVolumeStats --sa --ms=5 --in=${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}.nii.gz --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --thr=1000 + + # Cleanup + rm -f ${FEATDir}/mask_*.nii.gz + rm -f ${FEATDir}/${LevelOnefMRIName}${SmoothingString}.nii.gz + rm -f ${FEATDir}/${LevelOnefMRIName}_dilM${SmoothingString}.nii.gz + rm -f ${SmoothedDilatedResultFile}*.nii.gz fi log_Msg "Complete" diff --git a/TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh b/TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh index 9ee600b18..126bf34cd 100755 --- a/TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh +++ b/TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh @@ -1,14 +1,11 @@ #!/bin/bash set -e -g_script_name=`basename ${0}` -# Load Function Libraries +########################################## PREPARE FUNCTIONS ########################################## + source ${HCPPIPEDIR}/global/scripts/log.shlib # Logging related functions source ${HCPPIPEDIR}/global/scripts/fsl_version.shlib # Function for getting FSL version -# Establish tool name for logging -log_SetToolName "${g_script_name}" - show_tool_versions() { # Show HCP pipelines version @@ -25,91 +22,126 @@ show_tool_versions() log_Msg "FSL version: ${fsl_ver}" } -Subject="$1" -log_Msg "Subject: ${Subject}" -ResultsFolder="$2" -log_Msg "ResultsFolder: ${ResultsFolder}" -DownSampleFolder="$3" -log_Msg "DownSampleFolder: ${DownSampleFolder}" +########################################## READ COMMAND-LINE ARGUMENTS ################################## +Subject="$1" +ResultsFolder="$2" +DownSampleFolder="$3" LevelOnefMRINames="$4" -log_Msg "LevelOnefMRINames: ${LevelOnefMRINames}" - LevelOnefsfNames="$5" -log_Msg "LevelOnefsfNames: ${LevelOnefsfNames}" - LevelTwofMRIName="$6" -log_Msg "LevelTwofMRIName: ${LevelTwofMRIName}" - LevelTwofsfName="$7" -log_Msg "LevelTwofsfName: ${LevelTwofsfName}" - LowResMesh="$8" -log_Msg "LowResMesh: ${LowResMesh}" - FinalSmoothingFWHM="$9" -log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" - TemporalFilter="${10}" -log_Msg "TemporalFilter: ${TemporalFilter}" - VolumeBasedProcessing="${11}" -log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" - RegName="${12}" -log_Msg "RegName: ${RegName}" - Parcellation="${13}" + +# Log how the script was launched +g_script_name=`basename ${0}` +log_SetToolName "${g_script_name}" +log_Msg "${g_script_name} arguments: $@" + +# Log variables parsed from command line arguments +log_Msg "Subject: ${Subject}" +log_Msg "ResultsFolder: ${ResultsFolder}" +log_Msg "DownSampleFolder: ${DownSampleFolder}" +log_Msg "LevelOnefMRINames: ${LevelOnefMRINames}" +log_Msg "LevelOnefsfNames: ${LevelOnefsfNames}" +log_Msg "LevelTwofMRIName: ${LevelTwofMRIName}" +log_Msg "LevelTwofsfName: ${LevelTwofsfName}" +log_Msg "LowResMesh: ${LowResMesh}" +log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" +log_Msg "TemporalFilter: ${TemporalFilter}" +log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" +log_Msg "RegName: ${RegName}" log_Msg "Parcellation: ${Parcellation}" +# Log versions of tools used by this script show_tool_versions -#Set up some things -LevelOnefMRINames=`echo $LevelOnefMRINames | sed 's/@/ /g'` -LevelOnefsfNames=`echo $LevelOnefsfNames | sed 's/@/ /g'` +########################################## MAIN ################################## -if [ ! ${Parcellation} = "NONE" ] ; then +##### DETERMINE ANALYSES TO RUN (DENSE, PARCELLATED, VOLUME) ##### + +# initialize run variables +runParcellated=false; runVolume=false; runDense=false; Analyses=""; + +# Determine whether to run Parcellated, and set strings used for filenaming +if [ "${Parcellation}" != "NONE" ] ; then + # Run Parcellated Analyses + runParcellated=true; ParcellationString="_${Parcellation}" Extension="ptseries.nii" ScalarExtension="pscalar.nii" -else + Analyses="${Analyses}ParcellatedStats "; # space character at end to separate multiple analyses +fi + +# Determine whether to run Dense, and set strings used for filenaming +if [ "${Parcellation}" = "NONE" ]; then + # Run Dense Analyses + runDense=true; ParcellationString="" Extension="dtseries.nii" ScalarExtension="dscalar.nii" + Analyses="${Analyses}GrayordinatesStats "; # space character at end to separate multiple analyses fi +# Determine whether to run Volume, and set strings used for filenaming +if [ $VolumeBasedProcessing = "YES" ] ; then + runVolume=true; + Extension=".nii.gz" + Analyses="${Analyses}StandardVolumeStats "; # space character at end to separate multiple analyses +fi + +log_Msg "Analyses: ${Analyses}" log_Msg "ParcellationString: ${ParcellationString}" log_Msg "Extension: ${Extension}" log_Msg "ScalarExtension: ${ScalarExtension}" -if [ ! ${RegName} = "NONE" ] ; then - RegString="_${RegName}" -else - RegString="" -fi -log_Msg "RegString: ${RegString}" +##### SET VARIABLES REQUIRED FOR FILE NAMING ##### +### Set smoothing and filtering string variables used for file naming SmoothingString="_s${FinalSmoothingFWHM}" -log_Msg "SmoothingString: ${SmoothingString}" - TemporalFilterString="_hp""$TemporalFilter" +log_Msg "SmoothingString: ${SmoothingString}" log_Msg "TemporalFilterString: ${TemporalFilterString}" +### Set variables used for different registration procedures +if [ "${RegName}" != "NONE" ] ; then + RegString="_${RegName}" +else + RegString="" +fi +log_Msg "RegString: ${RegString}" + +### Figure out where the Level1 .feat directories are located +# Change '@' delimited arguments to space-delimited lists for use in for loops +LevelOnefMRINames=`echo $LevelOnefMRINames | sed 's/@/ /g'` +LevelOnefsfNames=`echo $LevelOnefsfNames | sed 's/@/ /g'` +# Loop over list to make string with paths to the Level1 .feat directories LevelOneFEATDirSTRING="" -i=1 +NumFirstLevelFolders=0; # counter for LevelOnefMRIName in $LevelOnefMRINames ; do - LevelOnefsfName=`echo $LevelOnefsfNames | cut -d " " -f $i` - LevelOneFEATDirSTRING="${LevelOneFEATDirSTRING}${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefsfName}${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}.feat " - i=$(($i+1)) + NumFirstLevelFolders=$(($NumFirstLevelFolders+1)); + # get fsf name that corresponds to fMRI name + LevelOnefsfName=`echo $LevelOnefsfNames | cut -d " " -f $NumFirstLevelFolders`; + LevelOneFEATDirSTRING="${LevelOneFEATDirSTRING}${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefsfName}${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}.feat "; # space character at end is needed to separate multiple FEATDir strings done -NumFirstLevelFolders=$(($i-1)) +### Determine list of contrasts for this analysis FirstFolder=`echo $LevelOneFEATDirSTRING | cut -d " " -f 1` ContrastNames=`cat ${FirstFolder}/design.con | grep "ContrastName" | cut -f 2` NumContrasts=`echo ${ContrastNames} | wc -w` + + +##### MAKE DESIGN FILES AND LEVEL2 DIRECTORY ##### + +# Make LevelTwoFEATDir LevelTwoFEATDir="${ResultsFolder}/${LevelTwofMRIName}/${LevelTwofsfName}${TemporalFilterString}${SmoothingString}_level2${RegString}${ParcellationString}.feat" if [ -e ${LevelTwoFEATDir} ] ; then rm -r ${LevelTwoFEATDir} @@ -118,187 +150,195 @@ else mkdir -p ${LevelTwoFEATDir} fi +# Edit template.fsf and place it in LevelTwoFEATDir cat ${ResultsFolder}/${LevelTwofMRIName}/${LevelTwofsfName}_hp200_s4_level2.fsf | sed s/_hp200_s4/${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}/g > ${LevelTwoFEATDir}/design.fsf -#Make design files +# Make additional design files required by flameo log_Msg "Make design files" -DIR=`pwd` -cd ${LevelTwoFEATDir} +cd ${LevelTwoFEATDir}; # Run feat_model inside LevelTwoFEATDir so relative paths work feat_model ${LevelTwoFEATDir}/design -cd $DIR - -#Loop over Grayordinates and Standard Volume (if requested) Level 2 Analyses -log_Msg "Loop over Grayordinates and Standard Volume (if requested) Level 2 Analyses" -if [ ${VolumeBasedProcessing} = "YES" ] ; then - Analyses="GrayordinatesStats StandardVolumeStats" -elif [ -z ${ParcellationString} ] ; then - Analyses="GrayordinatesStats" -else - Analyses="ParcellatedStats" -fi -log_Msg "Analyses: ${Analyses}" +cd $OLDPWD; # Go back to previous directory using bash built-in $OLDPWD + + +##### RUN flameo (FIXED-EFFECTS GLM ANALYSIS ON LEVEL2) ##### +### Loop over Level 2 Analyses requested +log_Msg "Loop over Level 2 Analyses requested: ${Analyses}" for Analysis in ${Analyses} ; do - log_Msg "Analysis: ${Analysis}" - mkdir -p ${LevelTwoFEATDir}/${Analysis} - - #Copy over level one folders and convert CIFTI to NIFTI if required - log_Msg "Copy over level one folders and convert CIFTI to NIFTI if required" - if [ -e ${FirstFolder}/${Analysis}/cope1.nii.gz ] ; then - Grayordinates="NO" - i=1 - for LevelOneFEATDir in ${LevelOneFEATDirSTRING} ; do - mkdir -p ${LevelTwoFEATDir}/${Analysis}/${i} - cp ${LevelOneFEATDir}/${Analysis}/* ${LevelTwoFEATDir}/${Analysis}/${i} - i=$(($i+1)) - done - elif [ -e ${FirstFolder}/${Analysis}/cope1.${Extension} ] ; then - Grayordinates="YES" - i=1 - for LevelOneFEATDir in ${LevelOneFEATDirSTRING} ; do - mkdir -p ${LevelTwoFEATDir}/${Analysis}/${i} - cp ${LevelOneFEATDir}/${Analysis}/* ${LevelTwoFEATDir}/${Analysis}/${i} - cd ${LevelTwoFEATDir}/${Analysis}/${i} - Files=`ls | grep .${Extension} | cut -d "." -f 1` - cd $DIR - for File in $Files ; do - ${CARET7DIR}/wb_command -cifti-convert -to-nifti ${LevelTwoFEATDir}/${Analysis}/${i}/${File}.${Extension} ${LevelTwoFEATDir}/${Analysis}/${i}/${File}.nii.gz - rm ${LevelTwoFEATDir}/${Analysis}/${i}/${File}.${Extension} - done - i=$(($i+1)) - done - else - echo "Level One Folder Not Found" - fi - - #Create dof and Mask - log_Msg "Create dof and Mask" - MERGESTRING="" - i=1 - while [ $i -le ${NumFirstLevelFolders} ] ; do - dof=`cat ${LevelTwoFEATDir}/${Analysis}/${i}/dof` - fslmaths ${LevelTwoFEATDir}/${Analysis}/${i}/res4d.nii.gz -Tstd -bin -mul $dof ${LevelTwoFEATDir}/${Analysis}/${i}/dofmask.nii.gz - MERGESTRING=`echo "${MERGESTRING}${LevelTwoFEATDir}/${Analysis}/${i}/dofmask.nii.gz "` - i=$(($i+1)) - done - fslmerge -t ${LevelTwoFEATDir}/${Analysis}/dof.nii.gz $MERGESTRING - fslmaths ${LevelTwoFEATDir}/${Analysis}/dof.nii.gz -Tmin -bin ${LevelTwoFEATDir}/${Analysis}/mask.nii.gz - - #Merge COPES and VARCOPES and run 2nd level analysis - log_Msg "Merge COPES and VARCOPES and run 2nd level analysis" - log_Msg "NumContrasts: ${NumContrasts}" - i=1 - while [ $i -le ${NumContrasts} ] ; do - log_Msg "Contrast Number i: ${i}" - COPEMERGE="" - VARCOPEMERGE="" - j=1 - while [ $j -le ${NumFirstLevelFolders} ] ; do - COPEMERGE="${COPEMERGE}${LevelTwoFEATDir}/${Analysis}/${j}/cope${i}.nii.gz " - VARCOPEMERGE="${VARCOPEMERGE}${LevelTwoFEATDir}/${Analysis}/${j}/varcope${i}.nii.gz " - j=$(($j+1)) - done - fslmerge -t ${LevelTwoFEATDir}/${Analysis}/cope${i}.nii.gz $COPEMERGE - fslmerge -t ${LevelTwoFEATDir}/${Analysis}/varcope${i}.nii.gz $VARCOPEMERGE - - log_Msg "About to use flameo" - which flameo - curdir=`pwd` - cd ${LevelTwoFEATDir} - - log_Msg "Command: flameo --cope=${Analysis}/cope${i}.nii.gz \\" - log_Msg " --vc=${Analysis}/varcope${i}.nii.gz \\" - log_Msg " --dvc=${Analysis}/dof.nii.gz \\" - log_Msg " --mask=${Analysis}/mask.nii.gz \\" - log_Msg " --ld=${Analysis}/cope${i}.feat \\" - log_Msg " --dm=design.mat \\" - log_Msg " --cs=design.grp \\" - log_Msg " --tc=design.con \\" - log_Msg " --runmode=fe" - - flameo --cope=${Analysis}/cope${i}.nii.gz \ - --vc=${Analysis}/varcope${i}.nii.gz \ - --dvc=${Analysis}/dof.nii.gz \ - --mask=${Analysis}/mask.nii.gz \ - --ld=${Analysis}/cope${i}.feat \ - --dm=design.mat \ - --cs=design.grp \ - --tc=design.con \ - --runmode=fe - - log_Msg "Successfully completed flameo" - cd ${curdir} - i=$(($i+1)) - done - - #Cleanup Temporary Files - log_Msg "Cleanup Temporary Files" - j=1 - while [ $j -le ${NumFirstLevelFolders} ] ; do - rm -r ${LevelTwoFEATDir}/${Analysis}/${j} - j=$(($j+1)) - done - - #Convert Grayordinates NIFTI Files to CIFTI if necessary - log_Msg "Convert Grayordinates NIFTI Files to CIFTI if necessary" - if [ $Grayordinates = "YES" ] ; then - cd ${LevelTwoFEATDir}/${Analysis} - Files=`ls | grep .nii.gz | cut -d "." -f 1` - cd $DIR - for File in $Files ; do - ${CARET7DIR}/wb_command -cifti-convert -from-nifti ${LevelTwoFEATDir}/${Analysis}/${File}.nii.gz ${LevelOneFEATDir}/${Analysis}/pe1.${Extension} ${LevelTwoFEATDir}/${Analysis}/${File}.${Extension} -reset-timepoints 1 1 - rm ${LevelTwoFEATDir}/${Analysis}/${File}.nii.gz - done - i=1 - while [ $i -le ${NumContrasts} ] ; do - cd ${LevelTwoFEATDir}/${Analysis}/cope${i}.feat - Files=`ls | grep .nii.gz | cut -d "." -f 1` - cd $DIR - for File in $Files ; do - ${CARET7DIR}/wb_command -cifti-convert -from-nifti ${LevelTwoFEATDir}/${Analysis}/cope${i}.feat/${File}.nii.gz ${LevelOneFEATDir}/${Analysis}/pe1.${Extension} ${LevelTwoFEATDir}/${Analysis}/cope${i}.feat/${File}.${Extension} -reset-timepoints 1 1 - rm ${LevelTwoFEATDir}/${Analysis}/cope${i}.feat/${File}.nii.gz - done - i=$(($i+1)) - done - fi -done - -#Generate Files for Viewing + log_Msg "Run Analysis: ${Analysis}" + + ### Exit if cope files are not present in Level 1 folders + fileCount=$( ls ${FirstFolder}/${Analysis}/cope1.${Extension} 2>/dev/null | wc -l ); + if [ "$fileCount" -eq 0 ]; then + log_Msg "ERROR: Missing expected cope files in ${FirstFolder}/${Analysis}" + log_Msg "ERROR: Exiting $g_script_name" + exit 1 + fi + + ### Copy Level 1 stats folders into Level 2 analysis directory + log_Msg "Copy over Level 1 stats folders and convert CIFTI to NIFTI if required" + mkdir -p ${LevelTwoFEATDir}/${Analysis} + i=1 + for LevelOneFEATDir in ${LevelOneFEATDirSTRING} ; do + mkdir -p ${LevelTwoFEATDir}/${Analysis}/${i} + cp ${LevelOneFEATDir}/${Analysis}/* ${LevelTwoFEATDir}/${Analysis}/${i} + i=$(($i+1)) + done + + ### convert CIFTI files to fakeNIFTI if required + if [ "${Analysis}" != "StandardVolumeStats" ] ; then + log_Msg "Convert CIFTI files to fakeNIFTI" + fakeNIFTIused="YES" + for CIFTI in ${LevelTwoFEATDir}/${Analysis}/*/*.${Extension} ; do + fakeNIFTI=$( echo $CIFTI | sed -e "s|.${Extension}|.nii.gz|" ); + ${CARET7DIR}/wb_command -cifti-convert -to-nifti $CIFTI $fakeNIFTI + rm $CIFTI + done + else + fakeNIFTIused="NO" + fi + + ### Create dof and Mask files for input to flameo (Level 2 analysis) + log_Msg "Create dof and Mask files for input to flameo (Level 2 analysis)" + MERGESTRING="" + i=1 + while [ "$i" -le "${NumFirstLevelFolders}" ] ; do + dof=`cat ${LevelTwoFEATDir}/${Analysis}/${i}/dof` + fslmaths ${LevelTwoFEATDir}/${Analysis}/${i}/res4d.nii.gz -Tstd -bin -mul $dof ${LevelTwoFEATDir}/${Analysis}/${i}/dofmask.nii.gz + MERGESTRING=`echo "${MERGESTRING}${LevelTwoFEATDir}/${Analysis}/${i}/dofmask.nii.gz "` + i=$(($i+1)) + done + fslmerge -t ${LevelTwoFEATDir}/${Analysis}/dof.nii.gz $MERGESTRING + fslmaths ${LevelTwoFEATDir}/${Analysis}/dof.nii.gz -Tmin -bin ${LevelTwoFEATDir}/${Analysis}/mask.nii.gz + + ### Create merged cope and varcope files for input to flameo (Level 2 analysis) + log_Msg "Merge COPES and VARCOPES for ${NumContrasts} Contrasts" + copeCounter=1 + while [ "$copeCounter" -le "${NumContrasts}" ] ; do + log_Msg "Contrast Number: ${copeCounter}" + COPEMERGE="" + VARCOPEMERGE="" + i=1 + while [ "$i" -le "${NumFirstLevelFolders}" ] ; do + COPEMERGE="${COPEMERGE}${LevelTwoFEATDir}/${Analysis}/${i}/cope${copeCounter}.nii.gz " + VARCOPEMERGE="${VARCOPEMERGE}${LevelTwoFEATDir}/${Analysis}/${i}/varcope${copeCounter}.nii.gz " + i=$(($i+1)) + done + fslmerge -t ${LevelTwoFEATDir}/${Analysis}/cope${copeCounter}.nii.gz $COPEMERGE + fslmerge -t ${LevelTwoFEATDir}/${Analysis}/varcope${copeCounter}.nii.gz $VARCOPEMERGE + copeCounter=$(($copeCounter+1)) + done + + ### Run 2nd level analysis using flameo + log_Msg "Run flameo (Level 2 analysis) for ${NumContrasts} Contrasts" + copeCounter=1 + while [ "$copeCounter" -le "${NumContrasts}" ] ; do + log_Msg "Contrast Number: ${copeCounter}" + log_Msg "$( which flameo )" + log_Msg "Command: flameo --cope=${Analysis}/cope${copeCounter}.nii.gz \\" + log_Msg " --vc=${Analysis}/varcope${copeCounter}.nii.gz \\" + log_Msg " --dvc=${Analysis}/dof.nii.gz \\" + log_Msg " --mask=${Analysis}/mask.nii.gz \\" + log_Msg " --ld=${Analysis}/cope${copeCounter}.feat \\" + log_Msg " --dm=design.mat \\" + log_Msg " --cs=design.grp \\" + log_Msg " --tc=design.con \\" + log_Msg " --runmode=fe" + + cd ${LevelTwoFEATDir}; # run flameo within LevelTwoFEATDir so relative paths work + flameo --cope=${Analysis}/cope${copeCounter}.nii.gz \ + --vc=${Analysis}/varcope${copeCounter}.nii.gz \ + --dvc=${Analysis}/dof.nii.gz \ + --mask=${Analysis}/mask.nii.gz \ + --ld=${Analysis}/cope${copeCounter}.feat \ + --dm=design.mat \ + --cs=design.grp \ + --tc=design.con \ + --runmode=fe + + log_Msg "Successfully completed flameo for Contrast Number: ${copeCounter}" + cd $OLDPWD; # Go back to previous directory using bash built-in $OLDPWD + copeCounter=$(($copeCounter+1)) + done + + ### Cleanup Temporary Files (which were copied from Level1 stats directories) + log_Msg "Cleanup Temporary Files" + i=1 + while [ "$i" -le "${NumFirstLevelFolders}" ] ; do + rm -r ${LevelTwoFEATDir}/${Analysis}/${i} + i=$(($i+1)) + done + + ### Convert fakeNIFTI Files back to CIFTI (if necessary) + if [ "$fakeNIFTIused" = "YES" ] ; then + log_Msg "Convert fakeNIFTI files back to CIFTI" + CIFTItemplate="${LevelOneFEATDir}/${Analysis}/pe1.${Extension}" + + # convert flameo input files for review: ${LevelTwoFEATDir}/${Analysis}/*.nii.gz + # convert flameo output files for each cope: ${LevelTwoFEATDir}/${Analysis}/cope*.feat/*.nii.gz + for fakeNIFTI in ${LevelTwoFEATDir}/${Analysis}/*.nii.gz ${LevelTwoFEATDir}/${Analysis}/cope*.feat/*.nii.gz; do + CIFTI=$( echo $fakeNIFTI | sed -e "s|.nii.gz|.${Extension}|" ); + ${CARET7DIR}/wb_command -cifti-convert -from-nifti $fakeNIFTI $CIFTItemplate $CIFTI -reset-timepoints 1 1 + rm $fakeNIFTI + done + fi + +done # end loop: for Analysis in ${Analyses} + + + +### Generate Files for Viewing log_Msg "Generate Files for Viewing" -i=1 -MergeSTRING="" -if [ ${VolumeBasedProcessing} = "YES" ] ; then - VolMergeSTRING="" + +# Initialize strings used for fslmerge command +zMergeSTRING="" +bMergeSTRING="" +touch ${LevelTwoFEATDir}/Contrasttemp.txt + +if $runVolume ; then + VolzMergeSTRING="" + VolbMergeSTRING="" + touch ${LevelTwoFEATDir}/wbtemp.txt fi -if [ -e ${LevelTwoFEATDir}/Contrasts.txt ] ; then - rm ${LevelTwoFEATDir}/Contrasts.txt + +if [ -e "${LevelTwoFEATDir}/Contrasts.txt" ] ; then + rm ${LevelTwoFEATDir}/Contrasts.txt fi -while [ $i -le ${NumContrasts} ] ; do - Contrast=`echo $ContrastNames | cut -d " " -f $i` - echo "${Subject}_${LevelTwofsfName}_level2_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}" >> ${LevelTwoFEATDir}/Contrasttemp.txt - echo ${Contrast} >> ${LevelTwoFEATDir}/Contrasts.txt - ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Analysis}/cope${i}.feat/zstat1.${Extension} ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt - ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Analysis}/cope${i}.feat/cope1.${Extension} ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_beta_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt - zMergeSTRING=`echo "${zMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} "` - bMergeSTRING=`echo "${bMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_beta_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} "` - - if [ ${VolumeBasedProcessing} = "YES" ] ; then - echo "OTHER" >> ${LevelTwoFEATDir}/wbtemp.txt - echo "1 255 255 255 255" >> ${LevelTwoFEATDir}/wbtemp.txt - ${CARET7DIR}/wb_command -volume-label-import ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz ${LevelTwoFEATDir}/wbtemp.txt ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -discard-others -unlabeled-value 0 - rm ${LevelTwoFEATDir}/wbtemp.txt - ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii -volume ${LevelTwoFEATDir}/StandardVolumeStats/cope${i}.feat/zstat1.nii.gz ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -timestep 1 -timestart 1 - ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii -name-file ${LevelTwoFEATDir}/Contrasttemp.txt - rm ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii - VolMergeSTRING=`echo "${VolMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii "` - fi - rm ${LevelTwoFEATDir}/Contrasttemp.txt - i=$(($i+1)) + +# Loop over contrasts to identify cope and zstat files to merge into wb_view scalars +copeCounter=1; +while [ "$copeCounter" -le "${NumContrasts}" ] ; do + Contrast=`echo $ContrastNames | cut -d " " -f $copeCounter` + echo "${Subject}_${LevelTwofsfName}_level2_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}" >> ${LevelTwoFEATDir}/Contrasttemp.txt + echo ${Contrast} >> ${LevelTwoFEATDir}/Contrasts.txt + ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Analysis}/cope${copeCounter}.feat/zstat1.${Extension} ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt + ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Analysis}/cope${copeCounter}.feat/cope1.${Extension} ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt + rm ${LevelTwoFEATDir}/Contrasttemp.txt + zMergeSTRING="${zMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} " + bMergeSTRING="${bMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} " + + if $runVolume ; then + echo "OTHER" >> ${LevelTwoFEATDir}/wbtemp.txt + echo "1 255 255 255 255" >> ${LevelTwoFEATDir}/wbtemp.txt + ${CARET7DIR}/wb_command -volume-label-import ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz ${LevelTwoFEATDir}/wbtemp.txt ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -discard-others -unlabeled-value 0 + rm ${LevelTwoFEATDir}/wbtemp.txt + ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii -volume ${LevelTwoFEATDir}/StandardVolumeStats/cope${copeCounter}.feat/zstat1.nii.gz ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -timestep 1 -timestart 1 + ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii -name-file ${LevelTwoFEATDir}/Contrasttemp.txt + ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii -volume ${LevelTwoFEATDir}/StandardVolumeStats/cope${copeCounter}.feat/cope1.nii.gz ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -timestep 1 -timestart 1 + ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii -name-file ${LevelTwoFEATDir}/Contrasttemp.txt + rm ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_{cope,zstat}_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii + VolzMergeSTRING="${VolzMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii " + VolbMergeSTRING="${VolbMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii " + fi + copeCounter=$(($copeCounter+1)) done -${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} ${zMergeSTRING} -${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_beta${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} ${bMergeSTRING} -if [ ${VolumeBasedProcessing} = "YES" ] ; then - ${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol${TemporalFilterString}${SmoothingString}.dscalar.nii ${VolMergeSTRING} + +# Perform the merge into viewable scalar files +${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} ${zMergeSTRING} +${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} ${bMergeSTRING} +if $runVolume ; then + ${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat${TemporalFilterString}${SmoothingString}.dscalar.nii ${VolzMergeSTRING} + ${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope${TemporalFilterString}${SmoothingString}.dscalar.nii ${VolbMergeSTRING} fi log_Msg "Complete" From ae57352f0158f56b67d64aa1df7034d4b901209f Mon Sep 17 00:00:00 2001 From: Greg Burgess Date: Tue, 15 May 2018 23:13:32 -0500 Subject: [PATCH 5/5] 1) Correct bug that appended the wrong under certain circumstances, 2) add text to logMsg statements to help user traceback errors --- TaskfMRIAnalysis/TaskfMRIAnalysis.sh | 89 ++--- TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh | 395 ++++++++++----------- TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh | 208 +++++------ 3 files changed, 338 insertions(+), 354 deletions(-) diff --git a/TaskfMRIAnalysis/TaskfMRIAnalysis.sh b/TaskfMRIAnalysis/TaskfMRIAnalysis.sh index 4f911f56f..0f1a47714 100755 --- a/TaskfMRIAnalysis/TaskfMRIAnalysis.sh +++ b/TaskfMRIAnalysis/TaskfMRIAnalysis.sh @@ -47,28 +47,10 @@ set -e # Load function libraries source ${HCPPIPEDIR}/global/scripts/log.shlib # Logging related functions source ${HCPPIPEDIR}/global/scripts/opts.shlib # Command line option functions +source ${HCPPIPEDIR}/global/scripts/fsl_version.shlib # Function for getting FSL version -# Create utility functions to determine and test FSL versions -get_fsl_version() -{ - local fsl_version_file - local fsl_version - local __functionResultVar=${1} - - fsl_version_file="${FSLDIR}/etc/fslversion" - - if [ -f ${fsl_version_file} ] - then - fsl_version=`cat ${fsl_version_file}` - log_Msg "INFO: Determined that the FSL version in use is ${fsl_version}" - else - log_Msg "ERROR: Cannot tell which version of FSL you are using." - exit 1 - fi - - eval $__functionResultVar="'${fsl_version}'" -} +# function to test FSL versions determine_old_or_new_fsl() { # NOTE: @@ -128,7 +110,7 @@ determine_old_or_new_fsl() } -########################################## READ COMMAND-LINE ARGUMENTS ################################## +########################################## READ_ARGS ################################## # Explcitly set tool name for logging log_SetToolName "TaskfMRIAnalysis.sh" @@ -137,7 +119,7 @@ log_SetToolName "TaskfMRIAnalysis.sh" opts_ShowVersionIfRequested $@ # Parse expected arguments from command-line array -log_Msg "Parsing Command Line Options" +log_Msg "READ_ARGS: Parsing Command Line Options" Path=`opts_GetOpt1 "--path" $@` Subject=`opts_GetOpt1 "--subject" $@` LevelOnefMRINames=`opts_GetOpt1 "--lvl1tasks" $@` @@ -155,44 +137,37 @@ RegName=`opts_GetOpt1 "--regname" $@` Parcellation=`opts_GetOpt1 "--parcellation" $@` ParcellationFile=`opts_GetOpt1 "--parcellationfile" $@` -# Level 1 analysis names were delimited by '@' in command-line. -# Change to space delimiter to use in for loops. -LevelOnefMRINames=`echo $LevelOnefMRINames | sed 's/@/ /g'` -LevelOnefsfNames=`echo $LevelOnefsfNames | sed 's/@/ /g'` - # Write command-line arguments to log file -log_Msg "Path: ${Path}" -log_Msg "Subject: ${Subject}" -log_Msg "LevelOnefMRINames: ${LevelOnefMRINames}" -log_Msg "LevelOnefsfNames: ${LevelOnefsfNames}" -log_Msg "LevelTwofMRIName: ${LevelTwofMRIName}" -log_Msg "LevelTwofsfNames: ${LevelTwofsfNames}" -log_Msg "LowResMesh: ${LowResMesh}" -log_Msg "GrayordinatesResolution: ${GrayordinatesResolution}" -log_Msg "OriginalSmoothingFWHM: ${OriginalSmoothingFWHM}" -log_Msg "Confound: ${Confound}" -log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" -log_Msg "TemporalFilter: ${TemporalFilter}" -log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" -log_Msg "RegName: ${RegName}" -log_Msg "Parcellation: ${Parcellation}" -log_Msg "ParcellationFile: ${ParcellationFile}" +log_Msg "READ_ARGS: Path: ${Path}" +log_Msg "READ_ARGS: Subject: ${Subject}" +log_Msg "READ_ARGS: LevelOnefMRINames: ${LevelOnefMRINames}" +log_Msg "READ_ARGS: LevelOnefsfNames: ${LevelOnefsfNames}" +log_Msg "READ_ARGS: LevelTwofMRIName: ${LevelTwofMRIName}" +log_Msg "READ_ARGS: LevelTwofsfNames: ${LevelTwofsfNames}" +log_Msg "READ_ARGS: LowResMesh: ${LowResMesh}" +log_Msg "READ_ARGS: GrayordinatesResolution: ${GrayordinatesResolution}" +log_Msg "READ_ARGS: OriginalSmoothingFWHM: ${OriginalSmoothingFWHM}" +log_Msg "READ_ARGS: Confound: ${Confound}" +log_Msg "READ_ARGS: FinalSmoothingFWHM: ${FinalSmoothingFWHM}" +log_Msg "READ_ARGS: TemporalFilter: ${TemporalFilter}" +log_Msg "READ_ARGS: VolumeBasedProcessing: ${VolumeBasedProcessing}" +log_Msg "READ_ARGS: RegName: ${RegName}" +log_Msg "READ_ARGS: Parcellation: ${Parcellation}" +log_Msg "READ_ARGS: ParcellationFile: ${ParcellationFile}" ########################################## MAIN ######################################### # Determine if required FSL version is present -get_fsl_version fsl_ver -log_Msg "FSL version: ${fsl_ver}" - +fsl_version_get fsl_ver old_or_new_version=$(determine_old_or_new_fsl ${fsl_ver}) if [ "${old_or_new_version}" == "OLD" ] then # Need to exit script due to incompatible FSL VERSION!!!! - log_Msg "ERROR: Detected pre-5.0.7 version of FSL in use (version ${fsl_ver}). Task fMRI Analysis not invoked. Exiting." + log_Msg "MAIN: TEST_FSL_VERSION: ERROR: Detected pre-5.0.7 version of FSL in use (version ${fsl_ver}). Task fMRI Analysis not invoked. Exiting." exit 1 else - log_Msg "Detected FSL version ${fsl_ver}" + log_Msg "MAIN: TEST_FSL_VERSION: Beginning analyses with FSL version ${fsl_ver}" fi # Determine locations of necessary directories (using expected naming convention) @@ -201,14 +176,16 @@ ResultsFolder="${AtlasFolder}/Results" ROIsFolder="${AtlasFolder}/ROIs" DownSampleFolder="${AtlasFolder}/fsaverage_LR${LowResMesh}k" + # Run Level 1 analyses for each phase encoding direction (from command line arguments) -log_Msg "Running Level 1 Analysis for Both Phase Encoding Directions" +log_Msg "MAIN: RUN_LEVEL1: Running Level 1 Analysis for Both Phase Encoding Directions" i=1 -for LevelOnefMRIName in $LevelOnefMRINames ; do - log_Msg "LevelOnefMRIName: ${LevelOnefMRIName}" +# Level 1 analysis names were delimited by '@' in command-line; change to space in for loop +for LevelOnefMRIName in $( echo $LevelOnefMRINames | sed 's/@/ /g' ) ; do + log_Msg "MAIN: RUN_LEVEL1: LevelOnefMRIName: ${LevelOnefMRIName}" # Get corresponding fsf name from $LevelOnefsfNames list - LevelOnefsfName=`echo $LevelOnefsfNames | cut -d " " -f $i` - log_Msg "Issuing command: ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel1.sh $Subject $ResultsFolder $ROIsFolder $DownSampleFolder $LevelOnefMRIName $LevelOnefsfName $LowResMesh $GrayordinatesResolution $OriginalSmoothingFWHM $Confound $FinalSmoothingFWHM $TemporalFilter $VolumeBasedProcessing $RegName $Parcellation $ParcellationFile" + LevelOnefsfName=`echo $LevelOnefsfNames | cut -d "@" -f $i` + log_Msg "MAIN: RUN_LEVEL1: Issuing command: ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel1.sh $Subject $ResultsFolder $ROIsFolder $DownSampleFolder $LevelOnefMRIName $LevelOnefsfName $LowResMesh $GrayordinatesResolution $OriginalSmoothingFWHM $Confound $FinalSmoothingFWHM $TemporalFilter $VolumeBasedProcessing $RegName $Parcellation $ParcellationFile" ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel1.sh \ $Subject \ $ResultsFolder \ @@ -232,8 +209,8 @@ done if [ "$LevelTwofMRIName" != "NONE" ] then # Combine Data Across Phase Encoding Directions in the Level 2 Analysis - log_Msg "Combine Data Across Phase Encoding Directions in the Level 2 Analysis" - log_Msg "Issuing command: ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel2.sh $Subject $ResultsFolder $DownSampleFolder $LevelOnefMRINames $LevelOnefsfNames $LevelTwofMRIName $LevelTwofsfNames $LowResMesh $FinalSmoothingFWHM $TemporalFilter $VolumeBasedProcessing $RegName $Parcellation" + log_Msg "MAIN: RUN_LEVEL2: Combine Data Across Phase Encoding Directions in the Level 2 Analysis" + log_Msg "MAIN: RUN_LEVEL2: Issuing command: ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel2.sh $Subject $ResultsFolder $DownSampleFolder $LevelOnefMRINames $LevelOnefsfNames $LevelTwofMRIName $LevelTwofsfNames $LowResMesh $FinalSmoothingFWHM $TemporalFilter $VolumeBasedProcessing $RegName $Parcellation" ${HCPPIPEDIR_tfMRIAnalysis}/TaskfMRILevel2.sh \ $Subject \ $ResultsFolder \ @@ -250,5 +227,5 @@ then $Parcellation fi -log_Msg "Completed" +log_Msg "MAIN: Completed" diff --git a/TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh b/TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh index 72db40367..0b3aa2458 100755 --- a/TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh +++ b/TaskfMRIAnalysis/scripts/TaskfMRILevel1.sh @@ -15,26 +15,19 @@ source ${HCPPIPEDIR}/global/scripts/fsl_version.shlib # Function for getting FSL show_tool_versions() { # Show HCP pipelines version - log_Msg "Showing HCP Pipelines version" + log_Msg "TOOL_VERSIONS: Showing HCP Pipelines version" cat ${HCPPIPEDIR}/version.txt # Show wb_command version - log_Msg "Showing Connectome Workbench (wb_command) version" + log_Msg "TOOL_VERSIONS: Showing Connectome Workbench (wb_command) version" ${CARET7DIR}/wb_command -version # Show fsl version - log_Msg "Showing FSL version" fsl_version_get fsl_ver - log_Msg "FSL version: ${fsl_ver}" } -########################################## READ COMMAND-LINE ARGUMENTS ################################## - -# Explicitly set tool name for logging -g_script_name=`basename ${0}` -log_SetToolName "${g_script_name}" -log_Msg "${g_script_name} arguments: $@" +########################################## READ_ARGS ################################## # Set variables from positional arguments to command line Subject="$1" @@ -54,43 +47,31 @@ RegName="${14}" Parcellation="${15}" ParcellationFile="${16}" -log_Msg "Subject: ${Subject}" -log_Msg "ResultsFolder: ${ResultsFolder}" -log_Msg "ROIsFolder: ${ROIsFolder}" -log_Msg "DownSampleFolder: ${DownSampleFolder}" -log_Msg "LevelOnefMRIName: ${LevelOnefMRIName}" -log_Msg "LevelOnefsfName: ${LevelOnefsfName}" -log_Msg "LowResMesh: ${LowResMesh}" -log_Msg "GrayordinatesResolution: ${GrayordinatesResolution}" -log_Msg "OriginalSmoothingFWHM: ${OriginalSmoothingFWHM}" -log_Msg "Confound: ${Confound}" -log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" -log_Msg "TemporalFilter: ${TemporalFilter}" -log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" -log_Msg "RegName: ${RegName}" -log_Msg "Parcellation: ${Parcellation}" -log_Msg "ParcellationFile: ${ParcellationFile}" +# Explicitly set tool name for logging +g_script_name=`basename ${0}` +log_SetToolName "${g_script_name}" +log_Msg "READ_ARGS: ${g_script_name} arguments: $@" +log_Msg "READ_ARGS: Subject: ${Subject}" +log_Msg "READ_ARGS: ResultsFolder: ${ResultsFolder}" +log_Msg "READ_ARGS: ROIsFolder: ${ROIsFolder}" +log_Msg "READ_ARGS: DownSampleFolder: ${DownSampleFolder}" +log_Msg "READ_ARGS: LevelOnefMRIName: ${LevelOnefMRIName}" +log_Msg "READ_ARGS: LevelOnefsfName: ${LevelOnefsfName}" +log_Msg "READ_ARGS: LowResMesh: ${LowResMesh}" +log_Msg "READ_ARGS: GrayordinatesResolution: ${GrayordinatesResolution}" +log_Msg "READ_ARGS: OriginalSmoothingFWHM: ${OriginalSmoothingFWHM}" +log_Msg "READ_ARGS: Confound: ${Confound}" +log_Msg "READ_ARGS: FinalSmoothingFWHM: ${FinalSmoothingFWHM}" +log_Msg "READ_ARGS: TemporalFilter: ${TemporalFilter}" +log_Msg "READ_ARGS: VolumeBasedProcessing: ${VolumeBasedProcessing}" +log_Msg "READ_ARGS: RegName: ${RegName}" +log_Msg "READ_ARGS: Parcellation: ${Parcellation}" +log_Msg "READ_ARGS: ParcellationFile: ${ParcellationFile}" show_tool_versions ########################################## MAIN ################################## -##### SET VARIABLES REQUIRED FOR FILE NAMING ##### - -# Set smoothing and filtering string variables used for file naming -SmoothingString="_s${FinalSmoothingFWHM}" -TemporalFilterString="_hp""$TemporalFilter" -log_Msg "SmoothingString: ${SmoothingString}" -log_Msg "TemporalFilterString: ${TemporalFilterString}" -# Set variables used for different registration procedures -if [ "${RegName}" != "NONE" ] ; then - RegString="_${RegName}" -else - RegString="" -fi -log_Msg "RegString: ${RegString}" - - ##### DETERMINE ANALYSES TO RUN (DENSE, PARCELLATED, VOLUME) ##### # initialize run variables @@ -102,6 +83,7 @@ if [ "${Parcellation}" != "NONE" ] ; then runParcellated=true; ParcellationString="_${Parcellation}" Extension="ptseries.nii" + log_Msg "MAIN: DETERMINE_ANALYSES: Parcellated Analysis requested" fi # Determine whether to run Dense, and set strings used for filenaming @@ -110,55 +92,71 @@ if [ "${Parcellation}" = "NONE" ]; then runDense=true; ParcellationString="" Extension="dtseries.nii" + log_Msg "MAIN: DETERMINE_ANALYSES: Dense Analysis requested" fi # Determine whether to run Volume, and set strings used for filenaming if [ "$VolumeBasedProcessing" = "YES" ] ; then runVolume=true; + log_Msg "MAIN: DETERMINE_ANALYSES: Volume Analysis requested" fi -log_Msg "ParcellationString: ${ParcellationString}" -log_Msg "Extension: ${Extension}" +##### SET_NAME_STRINGS: smoothing and filtering string variables used for file naming ##### +SmoothingString="_s${FinalSmoothingFWHM}" +TemporalFilterString="_hp""$TemporalFilter" +# Set variables used for different registration procedures +if [ "${RegName}" != "NONE" ] ; then + RegString="_${RegName}" +else + RegString="" +fi + +log_Msg "MAIN: SET_NAME_STRINGS: SmoothingString: ${SmoothingString}" +log_Msg "MAIN: SET_NAME_STRINGS: TemporalFilterString: ${TemporalFilterString}" +log_Msg "MAIN: SET_NAME_STRINGS: RegString: ${RegString}" +log_Msg "MAIN: SET_NAME_STRINGS: ParcellationString: ${ParcellationString}" +log_Msg "MAIN: SET_NAME_STRINGS: Extension: ${Extension}" -##### DETERMINE TR AND SCAN LENGTH ##### +##### IMAGE_INFO: DETERMINE TR AND SCAN LENGTH ##### +# Caution: Reading information for Parcellated and Volume analyses from original CIFTI file # Extract TR information from input time series files -TR_vol=`${CARET7DIR}/wb_command -file-information ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.${Extension} -no-map-info -only-step-interval` -log_Msg "TR_vol: ${TR_vol}" +TR_vol=`${CARET7DIR}/wb_command -file-information ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii -no-map-info -only-step-interval` +log_Msg "MAIN: IMAGE_INFO: TR_vol: ${TR_vol}" # Extract number of time points in CIFTI time series file -npts=`${CARET7DIR}/wb_command -file-information ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.${Extension} -no-map-info -only-number-of-maps` -log_Msg "npts: ${npts}" +npts=`${CARET7DIR}/wb_command -file-information ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii -no-map-info -only-number-of-maps` +log_Msg "MAIN: IMAGE_INFO: npts: ${npts}" -##### MAKE DESIGN FILES ##### +##### MAKE_DESIGNS: MAKE DESIGN FILES ##### # Create output .feat directory ($FEATDir) for this analysis FEATDir="${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefsfName}${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}.feat" -log_Msg "FEATDir: ${FEATDir}" +log_Msg "MAIN: MAKE_DESIGNS: FEATDir: ${FEATDir}" if [ -e ${FEATDir} ] ; then - rm -r ${FEATDir} - mkdir ${FEATDir} + rm -r ${FEATDir} + mkdir ${FEATDir} else - mkdir -p ${FEATDir} + mkdir -p ${FEATDir} fi ### Edit fsf file to record the parameters used in this analysis # Copy template fsf file into $FEATDir -log_Msg "Copying fsf file to .feat directory" +log_Msg "MAIN: MAKE_DESIGNS: Copying fsf file to .feat directory" cp ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefsfName}_hp200_s4_level1.fsf ${FEATDir}/design.fsf # Change the highpass filter string to the desired highpass filter -log_Msg "Change design.fsf: Set highpass filter string to the desired highpass filter to ${TemporalFilter}" +log_Msg "MAIN: MAKE_DESIGNS: Change design.fsf: Set highpass filter string to the desired highpass filter to ${TemporalFilter}" sed -i -e "s|set fmri(paradigm_hp) \"200\"|set fmri(paradigm_hp) \"${TemporalFilter}\"|g" ${FEATDir}/design.fsf # Change smoothing to be equal to additional smoothing in FSF file -log_Msg "Change design.fsf: Set smoothing to be equal to final smoothing to ${FinalSmoothingFWHM}" +log_Msg "MAIN: MAKE_DESIGNS: Change design.fsf: Set smoothing to be equal to final smoothing to ${FinalSmoothingFWHM}" sed -i -e "s|set fmri(smooth) \"4\"|set fmri(smooth) \"${FinalSmoothingFWHM}\"|g" ${FEATDir}/design.fsf # Change output directory name to match total smoothing and highpass -log_Msg "Change design.fsf: Set output directory name to ${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}" +log_Msg "MAIN: MAKE_DESIGNS: Change design.fsf: Change string in output directory name to ${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}" sed -i -e "s|_hp200_s4|${TemporalFilterString}${SmoothingString}_level1${RegString}${ParcellationString}|g" ${FEATDir}/design.fsf # find current value for npts in template.fsf @@ -166,16 +164,16 @@ fsfnpts=`grep "set fmri(npts)" ${FEATDir}/design.fsf | cut -d " " -f 3 | sed 's| # Ensure number of time points in fsf matches time series image if [ "$fsfnpts" -eq "$npts" ] ; then - log_Msg "Change design.fsf: Scan length matches number of timepoints in template.fsf: ${fsfnpts}" + log_Msg "MAIN: MAKE_DESIGNS: Change design.fsf: Scan length matches number of timepoints in template.fsf: ${fsfnpts}" else - log_Msg "Change design.fsf: Warning! Scan length does not match template.fsf!" - log_Msg "Change design.fsf: Warning! Changing Number of Timepoints in fsf (""${fsfnpts}"") to match time series image (""${npts}"")" + log_Msg "MAIN: MAKE_DESIGNS: Change design.fsf: Warning! Scan length does not match template.fsf!" + log_Msg "MAIN: MAKE_DESIGNS: Change design.fsf: Warning! Changing Number of Timepoints in fsf (""${fsfnpts}"") to match time series image (""${npts}"")" sed -i -e "s|set fmri(npts) \"\?${fsfnpts}\"\?|set fmri(npts) ${npts}|g" ${FEATDir}/design.fsf fi ### Use fsf to create additional design files used by film_gls -log_Msg "Create design files, model confounds if desired" +log_Msg "MAIN: MAKE_DESIGNS: Create design files, model confounds if desired" # Determine if there is a confound matrix text file (e.g., output of fsl_motion_outliers) confound_matrix=""; if [ "$Confound" != "NONE" ] ; then @@ -199,17 +197,18 @@ if [ -e "${DesignfContrasts}" ] ; then fi -##### APPLY SPATIAL SMOOTHING (or PARCELLATION ##### +##### SMOOTH_OR_PARCELLATE: APPLY SPATIAL SMOOTHING (or parcellation) ##### +### Parcellate data if a Parcellation was provided # Parcellation may be better than adding spatial smoothing to dense time series. # Parcellation increases sensitivity and statistical power, but avoids blurring signal # across region boundaries into adjacent, non-activated regions. - -### Parcellate data if a Parcellation was provided -log_Msg "Parcellate data if a Parcellation was provided" +log_Msg "MAIN: SMOOTH_OR_PARCELLATE: PARCELLATE: Parcellate data if a Parcellation was provided" if $runParcellated; then - log_Msg "Parcellating data" - ${CARET7DIR}/wb_command -cifti-parcellate ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${ParcellationFile} COLUMN ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}${ParcellationString}.ptseries.nii + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: PARCELLATE: Parcellating data" + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: PARCELLATE: Notice: currently parcellated time series has $SmoothingString in file name, but no additional smoothing was applied!" + # SmoothingString in parcellated filename allows subsequent commands to work for either dtseries or ptseries + ${CARET7DIR}/wb_command -cifti-parcellate ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${ParcellationFile} COLUMN ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}.ptseries.nii fi ### Apply spatial smoothing to CIFTI dense analysis @@ -220,16 +219,16 @@ if $runDense ; then # by the original smoothing applied earlier AdditionalSmoothingFWHM=`echo "sqrt(( $FinalSmoothingFWHM ^ 2 ) - ( $OriginalSmoothingFWHM ^ 2 ))" | bc -l` AdditionalSigma=`echo "$AdditionalSmoothingFWHM / ( 2 * ( sqrt ( 2 * l ( 2 ) ) ) )" | bc -l` - log_Msg "AdditionalSmoothingFWHM: ${AdditionalSmoothingFWHM}" - log_Msg "AdditionalSigma: ${AdditionalSigma}" - log_Msg "Applying additional surface smoothing to CIFTI Dense data" + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_CIFTI: AdditionalSmoothingFWHM: ${AdditionalSmoothingFWHM}" + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_CIFTI: AdditionalSigma: ${AdditionalSigma}" + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_CIFTI: Applying additional surface smoothing to CIFTI Dense data" ${CARET7DIR}/wb_command -cifti-smoothing ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${AdditionalSigma} ${AdditionalSigma} COLUMN ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}.dtseries.nii -left-surface ${DownSampleFolder}/${Subject}.L.midthickness.${LowResMesh}k_fs_LR.surf.gii -right-surface ${DownSampleFolder}/${Subject}.R.midthickness.${LowResMesh}k_fs_LR.surf.gii else if [ "$FinalSmoothingFWHM" -eq "$OriginalSmoothingFWHM" ]; then - log_Msg "No additional surface smoothing requested for CIFTI Dense data" + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_CIFTI: No additional surface smoothing requested for CIFTI Dense data" else - log_Msg "WARNING: For CIFTI Dense data, the surface smoothing requested \($FinalSmoothingFWHM\) is LESS than the surface smoothing already applied \(${OriginalSmoothingFWHM}\)." - log_Msg "Continuing analysis with ${OriginalSmoothingFWHM} of total surface smoothing." + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_CIFTI: WARNING: For CIFTI Dense data, the surface smoothing requested \($FinalSmoothingFWHM\) is LESS than the surface smoothing already applied \(${OriginalSmoothingFWHM}\)." + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_CIFTI: Continuing analysis with ${OriginalSmoothingFWHM} of total surface smoothing." fi cp ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${RegString}.dtseries.nii ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}.dtseries.nii fi @@ -237,71 +236,71 @@ fi ### Apply spatial smoothing to volume analysis if $runVolume ; then - log_Msg "Standard NIFTI Volume-based Processsing" - - #Add edge-constrained volume smoothing - log_Msg "Add edge-constrained volume smoothing" - FinalSmoothingSigma=`echo "$FinalSmoothingFWHM / ( 2 * ( sqrt ( 2 * l ( 2 ) ) ) )" | bc -l` - InputfMRI=${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName} - InputSBRef=${InputfMRI}_SBRef - fslmaths ${InputSBRef} -bin ${FEATDir}/mask_orig - fslmaths ${FEATDir}/mask_orig -kernel gauss ${FinalSmoothingSigma} -fmean ${FEATDir}/mask_orig_weight -odt float - fslmaths ${InputfMRI} -kernel gauss ${FinalSmoothingSigma} -fmean \ - -div ${FEATDir}/mask_orig_weight -mas ${FEATDir}/mask_orig \ - ${FEATDir}/${LevelOnefMRIName}${SmoothingString} -odt float - - #Add volume dilation - # - # For some subjects, FreeSurfer-derived brain masks (applied to the time - # series data in IntensityNormalization.sh as part of - # GenericfMRIVolumeProcessingPipeline.sh) do not extend to the edge of brain - # in the MNI152 space template. This is due to the limitations of volume-based - # registration. So, to avoid a lack of coverage in a group analysis around the - # penumbra of cortex, we will add a single dilation step to the input prior to - # creating the Level1 maps. - # - # Ideally, we would condition this dilation on the resolution of the fMRI - # data. Empirically, a single round of dilation gives very good group - # coverage of MNI brain for the 2 mm resolution of HCP fMRI data. So a single - # dilation is what we use below. - # - # Note that for many subjects, this dilation will result in signal extending - # BEYOND the limits of brain in the MNI152 template. However, that is easily - # fixed by masking with the MNI space brain template mask if so desired. - # - # The specific implementation involves: - # a) Edge-constrained spatial smoothing on the input fMRI time series (and masking - # that back to the original mask). This step was completed above. - # b) Spatial dilation of the input fMRI time series, followed by edge constrained smoothing - # c) Adding the voxels from (b) that are NOT part of (a) into (a). - # - # The motivation for this implementation is that: - # 1) Identical voxel-wise results are obtained within the original mask. So, users - # that desire the original ("tight") FreeSurfer-defined brain mask (which is - # implicitly represented as the non-zero voxels in the InputSBRef volume) can - # mask back to that if they chose, with NO impact on the voxel-wise results. - # 2) A simpler possible approach of just dilating the result of step (a) results in - # an unnatural pattern of dark/light/dark intensities at the edge of brain, - # whereas the combination of steps (b) and (c) yields a more natural looking - # transition of intensities in the added voxels. - log_Msg "Add volume dilation" - - # Dilate the original BOLD time series, then do (edge-constrained) smoothing - fslmaths ${FEATDir}/mask_orig -dilM -bin ${FEATDir}/mask_dilM - fslmaths ${FEATDir}/mask_dilM \ - -kernel gauss ${FinalSmoothingSigma} -fmean ${FEATDir}/mask_dilM_weight -odt float - fslmaths ${InputfMRI} -dilM -kernel gauss ${FinalSmoothingSigma} -fmean \ - -div ${FEATDir}/mask_dilM_weight -mas ${FEATDir}/mask_dilM \ - ${FEATDir}/${LevelOnefMRIName}_dilM${SmoothingString} -odt float - - # Take just the additional "rim" voxels from the dilated then smoothed time series, and add them - # into the smoothed time series (that didn't have any dilation) - SmoothedDilatedResultFile=${FEATDir}/${LevelOnefMRIName}${SmoothingString}_dilMrim - fslmaths ${FEATDir}/mask_orig -binv ${FEATDir}/mask_orig_inv - fslmaths ${FEATDir}/${LevelOnefMRIName}_dilM${SmoothingString} \ - -mas ${FEATDir}/mask_orig_inv \ - -add ${FEATDir}/${LevelOnefMRIName}${SmoothingString} \ - ${SmoothedDilatedResultFile} + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_NIFTI: Standard NIFTI Volume-based Processsing" + + #Add edge-constrained volume smoothing + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_NIFTI: Add edge-constrained volume smoothing" + FinalSmoothingSigma=`echo "$FinalSmoothingFWHM / ( 2 * ( sqrt ( 2 * l ( 2 ) ) ) )" | bc -l` + InputfMRI=${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName} + InputSBRef=${InputfMRI}_SBRef + fslmaths ${InputSBRef} -bin ${FEATDir}/mask_orig + fslmaths ${FEATDir}/mask_orig -kernel gauss ${FinalSmoothingSigma} -fmean ${FEATDir}/mask_orig_weight -odt float + fslmaths ${InputfMRI} -kernel gauss ${FinalSmoothingSigma} -fmean \ + -div ${FEATDir}/mask_orig_weight -mas ${FEATDir}/mask_orig \ + ${FEATDir}/${LevelOnefMRIName}${SmoothingString} -odt float + + #Add volume dilation + # + # For some subjects, FreeSurfer-derived brain masks (applied to the time + # series data in IntensityNormalization.sh as part of + # GenericfMRIVolumeProcessingPipeline.sh) do not extend to the edge of brain + # in the MNI152 space template. This is due to the limitations of volume-based + # registration. So, to avoid a lack of coverage in a group analysis around the + # penumbra of cortex, we will add a single dilation step to the input prior to + # creating the Level1 maps. + # + # Ideally, we would condition this dilation on the resolution of the fMRI + # data. Empirically, a single round of dilation gives very good group + # coverage of MNI brain for the 2 mm resolution of HCP fMRI data. So a single + # dilation is what we use below. + # + # Note that for many subjects, this dilation will result in signal extending + # BEYOND the limits of brain in the MNI152 template. However, that is easily + # fixed by masking with the MNI space brain template mask if so desired. + # + # The specific implementation involves: + # a) Edge-constrained spatial smoothing on the input fMRI time series (and masking + # that back to the original mask). This step was completed above. + # b) Spatial dilation of the input fMRI time series, followed by edge constrained smoothing + # c) Adding the voxels from (b) that are NOT part of (a) into (a). + # + # The motivation for this implementation is that: + # 1) Identical voxel-wise results are obtained within the original mask. So, users + # that desire the original ("tight") FreeSurfer-defined brain mask (which is + # implicitly represented as the non-zero voxels in the InputSBRef volume) can + # mask back to that if they chose, with NO impact on the voxel-wise results. + # 2) A simpler possible approach of just dilating the result of step (a) results in + # an unnatural pattern of dark/light/dark intensities at the edge of brain, + # whereas the combination of steps (b) and (c) yields a more natural looking + # transition of intensities in the added voxels. + log_Msg "MAIN: SMOOTH_OR_PARCELLATE: SMOOTH_NIFTI: Add volume dilation" + + # Dilate the original BOLD time series, then do (edge-constrained) smoothing + fslmaths ${FEATDir}/mask_orig -dilM -bin ${FEATDir}/mask_dilM + fslmaths ${FEATDir}/mask_dilM \ + -kernel gauss ${FinalSmoothingSigma} -fmean ${FEATDir}/mask_dilM_weight -odt float + fslmaths ${InputfMRI} -dilM -kernel gauss ${FinalSmoothingSigma} -fmean \ + -div ${FEATDir}/mask_dilM_weight -mas ${FEATDir}/mask_dilM \ + ${FEATDir}/${LevelOnefMRIName}_dilM${SmoothingString} -odt float + + # Take just the additional "rim" voxels from the dilated then smoothed time series, and add them + # into the smoothed time series (that didn't have any dilation) + SmoothedDilatedResultFile=${FEATDir}/${LevelOnefMRIName}${SmoothingString}_dilMrim + fslmaths ${FEATDir}/mask_orig -binv ${FEATDir}/mask_orig_inv + fslmaths ${FEATDir}/${LevelOnefMRIName}_dilM${SmoothingString} \ + -mas ${FEATDir}/mask_orig_inv \ + -add ${FEATDir}/${LevelOnefMRIName}${SmoothingString} \ + ${SmoothedDilatedResultFile} fi # end Volume spatial smoothing @@ -313,7 +312,7 @@ fi # end Volume spatial smoothing # Issue 2: fslmaths -bptf removes timeseries mean (for FSL 5.0.7 onward). film_gls expects mean in image. # So, save the mean to file, then add it back after -bptf. if [[ $runParcellated == true || $runDense == true ]]; then - log_Msg "Add temporal filtering" + log_Msg "MAIN: TEMPORAL_FILTER: Add temporal filtering to CIFTI file" # Convert CIFTI to "fake" NIFTI ${CARET7DIR}/wb_command -cifti-convert -to-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}.${Extension} ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz # Save mean image @@ -332,7 +331,7 @@ fi if $runVolume; then #Add temporal filtering to the output from above - log_Msg "Add temporal filtering" + log_Msg "MAIN: TEMPORAL_FILTER: Add temporal filtering to NIFTI file" # Temporal filtering is conducted by fslmaths. # fslmaths -bptf removes timeseries mean (for FSL 5.0.7 onward), which is expected by film_gls. # So, save the mean to file, then add it back after -bptf. @@ -341,8 +340,8 @@ if $runVolume; then fslmaths ${SmoothedDilatedResultFile} -Tmean ${SmoothedDilatedResultFile}_mean hp_sigma=`echo "0.5 * $TemporalFilter / $TR_vol" | bc -l` fslmaths ${SmoothedDilatedResultFile} -bptf ${hp_sigma} -1 \ - -add ${SmoothedDilatedResultFile}_mean \ - ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}.nii.gz + -add ${SmoothedDilatedResultFile}_mean \ + ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}.nii.gz fi @@ -350,68 +349,68 @@ fi # Run CIFTI Dense Grayordinates Analysis (if requested) if $runDense ; then - # Dense Grayordinates Processing - log_Msg "Dense Grayordinates Processing" - #Split into surface and volume - log_Msg "Split into surface and volume" - ${CARET7DIR}/wb_command -cifti-separate-all ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}.dtseries.nii -volume ${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz -left ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.L.${LowResMesh}k_fs_LR.func.gii -right ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.R.${LowResMesh}k_fs_LR.func.gii - - #Run film_gls on subcortical volume data - log_Msg "Run film_gls on subcortical volume data" - film_gls --rn=${FEATDir}/SubcorticalVolumeStats --sa --ms=5 --in=${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --thr=1 --mode=volumetric - rm ${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz - - #Run film_gls on cortical surface data - log_Msg "Run film_gls on cortical surface data" - for Hemisphere in L R ; do - #Prepare for film_gls - log_Msg "Prepare for film_gls" - ${CARET7DIR}/wb_command -metric-dilate ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii ${DownSampleFolder}/${Subject}.${Hemisphere}.midthickness.${LowResMesh}k_fs_LR.surf.gii 50 ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii -nearest - - #Run film_gls on surface data - log_Msg "Run film_gls on surface data" - film_gls --rn=${FEATDir}/${Hemisphere}_SurfaceStats --sa --ms=15 --epith=5 --in2=${DownSampleFolder}/${Subject}.${Hemisphere}.midthickness.${LowResMesh}k_fs_LR.surf.gii --in=${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --mode=surface - rm ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii - done - - # Merge Cortical Surface and Subcortical Volume into Grayordinates - log_Msg "Merge Cortical Surface and Subcortical Volume into Grayordinates" - mkdir ${FEATDir}/GrayordinatesStats - cat ${FEATDir}/SubcorticalVolumeStats/dof > ${FEATDir}/GrayordinatesStats/dof - cat ${FEATDir}/SubcorticalVolumeStats/logfile > ${FEATDir}/GrayordinatesStats/logfile - cat ${FEATDir}/L_SurfaceStats/logfile >> ${FEATDir}/GrayordinatesStats/logfile - cat ${FEATDir}/R_SurfaceStats/logfile >> ${FEATDir}/GrayordinatesStats/logfile - - for Subcortical in ${FEATDir}/SubcorticalVolumeStats/*nii.gz ; do - File=$( basename $Subcortical .nii.gz ); - ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${FEATDir}/GrayordinatesStats/${File}.dtseries.nii -volume $Subcortical $ROIsFolder/Atlas_ROIs.${GrayordinatesResolution}.nii.gz -left-metric ${FEATDir}/L_SurfaceStats/${File}.func.gii -roi-left ${DownSampleFolder}/${Subject}.L.atlasroi.${LowResMesh}k_fs_LR.shape.gii -right-metric ${FEATDir}/R_SurfaceStats/${File}.func.gii -roi-right ${DownSampleFolder}/${Subject}.R.atlasroi.${LowResMesh}k_fs_LR.shape.gii - done - rm -r ${FEATDir}/SubcorticalVolumeStats ${FEATDir}/L_SurfaceStats ${FEATDir}/R_SurfaceStats + # Dense Grayordinates Processing + log_Msg "MAIN: RUN_GLM: Dense Grayordinates Analysis" + #Split into surface and volume + log_Msg "MAIN: RUN_GLM: Split into surface and volume" + ${CARET7DIR}/wb_command -cifti-separate-all ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}.dtseries.nii -volume ${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz -left ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.L.${LowResMesh}k_fs_LR.func.gii -right ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.R.${LowResMesh}k_fs_LR.func.gii + + #Run film_gls on subcortical volume data + log_Msg "MAIN: RUN_GLM: Run film_gls on subcortical volume data" + film_gls --rn=${FEATDir}/SubcorticalVolumeStats --sa --ms=5 --in=${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --thr=1 --mode=volumetric + rm ${FEATDir}/${LevelOnefMRIName}_AtlasSubcortical${TemporalFilterString}${SmoothingString}.nii.gz + + #Run film_gls on cortical surface data + log_Msg "MAIN: RUN_GLM: Run film_gls on cortical surface data" + for Hemisphere in L R ; do + #Prepare for film_gls + log_Msg "MAIN: RUN_GLM: Prepare for film_gls" + ${CARET7DIR}/wb_command -metric-dilate ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii ${DownSampleFolder}/${Subject}.${Hemisphere}.midthickness.${LowResMesh}k_fs_LR.surf.gii 50 ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii -nearest + + #Run film_gls on surface data + log_Msg "MAIN: RUN_GLM: Run film_gls on surface data" + film_gls --rn=${FEATDir}/${Hemisphere}_SurfaceStats --sa --ms=15 --epith=5 --in2=${DownSampleFolder}/${Subject}.${Hemisphere}.midthickness.${LowResMesh}k_fs_LR.surf.gii --in=${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --mode=surface + rm ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi_dil.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii ${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}${RegString}.atlasroi.${Hemisphere}.${LowResMesh}k_fs_LR.func.gii + done + + # Merge Cortical Surface and Subcortical Volume into Grayordinates + log_Msg "MAIN: RUN_GLM: Merge Cortical Surface and Subcortical Volume into Grayordinates" + mkdir ${FEATDir}/GrayordinatesStats + cat ${FEATDir}/SubcorticalVolumeStats/dof > ${FEATDir}/GrayordinatesStats/dof + cat ${FEATDir}/SubcorticalVolumeStats/logfile > ${FEATDir}/GrayordinatesStats/logfile + cat ${FEATDir}/L_SurfaceStats/logfile >> ${FEATDir}/GrayordinatesStats/logfile + cat ${FEATDir}/R_SurfaceStats/logfile >> ${FEATDir}/GrayordinatesStats/logfile + + for Subcortical in ${FEATDir}/SubcorticalVolumeStats/*nii.gz ; do + File=$( basename $Subcortical .nii.gz ); + ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${FEATDir}/GrayordinatesStats/${File}.dtseries.nii -volume $Subcortical $ROIsFolder/Atlas_ROIs.${GrayordinatesResolution}.nii.gz -left-metric ${FEATDir}/L_SurfaceStats/${File}.func.gii -roi-left ${DownSampleFolder}/${Subject}.L.atlasroi.${LowResMesh}k_fs_LR.shape.gii -right-metric ${FEATDir}/R_SurfaceStats/${File}.func.gii -roi-right ${DownSampleFolder}/${Subject}.R.atlasroi.${LowResMesh}k_fs_LR.shape.gii + done + rm -r ${FEATDir}/SubcorticalVolumeStats ${FEATDir}/L_SurfaceStats ${FEATDir}/R_SurfaceStats fi # Run CIFTI Parcellated Analysis (if requested) if $runParcellated ; then - # Parcellated Processing - log_Msg "Parcellated Processing" - # Convert CIFTI to "fake" NIFTI - ${CARET7DIR}/wb_command -cifti-convert -to-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${Extension} ${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz - # Now run film_gls on the fakeNIFTI file - film_gls --rn=${FEATDir}/ParcellatedStats --in=${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --thr=1 --mode=volumetric - # Remove "fake" NIFTI time series file - rm ${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz - # Convert "fake" NIFTI output files (copes, varcopes, zstats) back to CIFTI - templateCIFTI=${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.ptseries.nii - for fakeNIFTI in `ls ${FEATDir}/ParcellatedStats/*.nii.gz` ; do - CIFTI=$( echo $fakeNIFTI | sed -e "s|.nii.gz|.${Extension}|" ); - ${CARET7DIR}/wb_command -cifti-convert -from-nifti $fakeNIFTI $templateCIFTI $CIFTI -reset-timepoints 1 1 - rm $fakeNIFTI; - done + # Parcellated Processing + log_Msg "MAIN: RUN_GLM: Parcellated Analysis" + # Convert CIFTI to "fake" NIFTI + ${CARET7DIR}/wb_command -cifti-convert -to-nifti ${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${Extension} ${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz + # Now run film_gls on the fakeNIFTI file + film_gls --rn=${FEATDir}/ParcellatedStats --in=${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --thr=1 --mode=volumetric + # Remove "fake" NIFTI time series file + rm ${FEATDir}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}_FAKENIFTI.nii.gz + # Convert "fake" NIFTI output files (copes, varcopes, zstats) back to CIFTI + templateCIFTI=${ResultsFolder}/${LevelOnefMRIName}/${LevelOnefMRIName}_Atlas${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.ptseries.nii + for fakeNIFTI in `ls ${FEATDir}/ParcellatedStats/*.nii.gz` ; do + CIFTI=$( echo $fakeNIFTI | sed -e "s|.nii.gz|.${Extension}|" ); + ${CARET7DIR}/wb_command -cifti-convert -from-nifti $fakeNIFTI $templateCIFTI $CIFTI -reset-timepoints 1 1 + rm $fakeNIFTI; + done fi # Standard NIFTI Volume-based Processsing### if $runVolume ; then - log_Msg "Standard NIFTI Volume-based Processsing" - log_Msg "Run film_gls on volume data" + log_Msg "MAIN: RUN_GLM: Standard NIFTI Volume Analysis" + log_Msg "MAIN: RUN_GLM: Run film_gls on volume data" film_gls --rn=${FEATDir}/StandardVolumeStats --sa --ms=5 --in=${FEATDir}/${LevelOnefMRIName}${TemporalFilterString}${SmoothingString}.nii.gz --pd=${DesignMatrix} --con=${DesignContrasts} ${ExtraArgs} --thr=1000 # Cleanup @@ -421,4 +420,4 @@ if $runVolume ; then rm -f ${SmoothedDilatedResultFile}*.nii.gz fi -log_Msg "Complete" +log_Msg "MAIN: Complete" diff --git a/TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh b/TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh index 126bf34cd..2f5804d84 100755 --- a/TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh +++ b/TaskfMRIAnalysis/scripts/TaskfMRILevel2.sh @@ -9,17 +9,15 @@ source ${HCPPIPEDIR}/global/scripts/fsl_version.shlib # Function for getting FSL show_tool_versions() { # Show HCP pipelines version - log_Msg "Showing HCP Pipelines version" + log_Msg "TOOL_VERSIONS: Showing HCP Pipelines version" cat ${HCPPIPEDIR}/version.txt # Show wb_command version - log_Msg "Showing Connectome Workbench (wb_command) version" + log_Msg "TOOL_VERSIONS: Showing Connectome Workbench (wb_command) version" ${CARET7DIR}/wb_command -version # Show fsl version - log_Msg "Showing FSL version" fsl_version_get fsl_ver - log_Msg "FSL version: ${fsl_ver}" } @@ -43,22 +41,22 @@ Parcellation="${13}" # Log how the script was launched g_script_name=`basename ${0}` log_SetToolName "${g_script_name}" -log_Msg "${g_script_name} arguments: $@" +log_Msg "READ_ARGS: ${g_script_name} arguments: $@" # Log variables parsed from command line arguments -log_Msg "Subject: ${Subject}" -log_Msg "ResultsFolder: ${ResultsFolder}" -log_Msg "DownSampleFolder: ${DownSampleFolder}" -log_Msg "LevelOnefMRINames: ${LevelOnefMRINames}" -log_Msg "LevelOnefsfNames: ${LevelOnefsfNames}" -log_Msg "LevelTwofMRIName: ${LevelTwofMRIName}" -log_Msg "LevelTwofsfName: ${LevelTwofsfName}" -log_Msg "LowResMesh: ${LowResMesh}" -log_Msg "FinalSmoothingFWHM: ${FinalSmoothingFWHM}" -log_Msg "TemporalFilter: ${TemporalFilter}" -log_Msg "VolumeBasedProcessing: ${VolumeBasedProcessing}" -log_Msg "RegName: ${RegName}" -log_Msg "Parcellation: ${Parcellation}" +log_Msg "READ_ARGS: Subject: ${Subject}" +log_Msg "READ_ARGS: ResultsFolder: ${ResultsFolder}" +log_Msg "READ_ARGS: DownSampleFolder: ${DownSampleFolder}" +log_Msg "READ_ARGS: LevelOnefMRINames: ${LevelOnefMRINames}" +log_Msg "READ_ARGS: LevelOnefsfNames: ${LevelOnefsfNames}" +log_Msg "READ_ARGS: LevelTwofMRIName: ${LevelTwofMRIName}" +log_Msg "READ_ARGS: LevelTwofsfName: ${LevelTwofsfName}" +log_Msg "READ_ARGS: LowResMesh: ${LowResMesh}" +log_Msg "READ_ARGS: FinalSmoothingFWHM: ${FinalSmoothingFWHM}" +log_Msg "READ_ARGS: TemporalFilter: ${TemporalFilter}" +log_Msg "READ_ARGS: VolumeBasedProcessing: ${VolumeBasedProcessing}" +log_Msg "READ_ARGS: RegName: ${RegName}" +log_Msg "READ_ARGS: Parcellation: ${Parcellation}" # Log versions of tools used by this script show_tool_versions @@ -68,56 +66,61 @@ show_tool_versions ##### DETERMINE ANALYSES TO RUN (DENSE, PARCELLATED, VOLUME) ##### # initialize run variables -runParcellated=false; runVolume=false; runDense=false; Analyses=""; +runParcellated=false; runVolume=false; runDense=false; +Analyses=""; ExtensionList=""; ScalarExtensionList=""; # Determine whether to run Parcellated, and set strings used for filenaming if [ "${Parcellation}" != "NONE" ] ; then - # Run Parcellated Analyses - runParcellated=true; - ParcellationString="_${Parcellation}" - Extension="ptseries.nii" - ScalarExtension="pscalar.nii" - Analyses="${Analyses}ParcellatedStats "; # space character at end to separate multiple analyses + # Run Parcellated Analyses + runParcellated=true; + ParcellationString="_${Parcellation}" + ExtensionList="${ExtensionList}ptseries.nii " + ScalarExtensionList="${ScalarExtensionList}pscalar.nii " + Analyses="${Analyses}ParcellatedStats "; # space character at end to separate multiple analyses + log_Msg "MAIN: DETERMINE_ANALYSES: Parcellated Analysis requested" fi # Determine whether to run Dense, and set strings used for filenaming if [ "${Parcellation}" = "NONE" ]; then - # Run Dense Analyses - runDense=true; - ParcellationString="" - Extension="dtseries.nii" - ScalarExtension="dscalar.nii" - Analyses="${Analyses}GrayordinatesStats "; # space character at end to separate multiple analyses + # Run Dense Analyses + runDense=true; + ParcellationString="" + ExtensionList="${ExtensionList}dtseries.nii " + ScalarExtensionList="${ScalarExtensionList}dscalar.nii " + Analyses="${Analyses}GrayordinatesStats "; # space character at end to separate multiple analyses + log_Msg "MAIN: DETERMINE_ANALYSES: Dense Analysis requested" fi # Determine whether to run Volume, and set strings used for filenaming if [ $VolumeBasedProcessing = "YES" ] ; then runVolume=true; - Extension=".nii.gz" + ExtensionList="${ExtensionList}nii.gz " + ScalarExtensionList="${ScalarExtensionList}volume.dscalar.nii " Analyses="${Analyses}StandardVolumeStats "; # space character at end to separate multiple analyses + log_Msg "MAIN: DETERMINE_ANALYSES: Volume Analysis requested" fi -log_Msg "Analyses: ${Analyses}" -log_Msg "ParcellationString: ${ParcellationString}" -log_Msg "Extension: ${Extension}" -log_Msg "ScalarExtension: ${ScalarExtension}" +log_Msg "MAIN: DETERMINE_ANALYSES: Analyses: ${Analyses}" +log_Msg "MAIN: DETERMINE_ANALYSES: ParcellationString: ${ParcellationString}" +log_Msg "MAIN: DETERMINE_ANALYSES: ExtensionList: ${ExtensionList}" +log_Msg "MAIN: DETERMINE_ANALYSES: ScalarExtensionList: ${ScalarExtensionList}" ##### SET VARIABLES REQUIRED FOR FILE NAMING ##### ### Set smoothing and filtering string variables used for file naming -SmoothingString="_s${FinalSmoothingFWHM}" -TemporalFilterString="_hp""$TemporalFilter" -log_Msg "SmoothingString: ${SmoothingString}" -log_Msg "TemporalFilterString: ${TemporalFilterString}" -### Set variables used for different registration procedures +# Set variables used for different registration procedures if [ "${RegName}" != "NONE" ] ; then - RegString="_${RegName}" + RegString="_${RegName}" else - RegString="" + RegString="" fi -log_Msg "RegString: ${RegString}" +SmoothingString="_s${FinalSmoothingFWHM}" +TemporalFilterString="_hp""$TemporalFilter" +log_Msg "MAIN: SET_NAME_STRINGS: SmoothingString: ${SmoothingString}" +log_Msg "MAIN: SET_NAME_STRINGS: TemporalFilterString: ${TemporalFilterString}" +log_Msg "MAIN: SET_NAME_STRINGS: RegString: ${RegString}" ### Figure out where the Level1 .feat directories are located # Change '@' delimited arguments to space-delimited lists for use in for loops @@ -164,8 +167,11 @@ cd $OLDPWD; # Go back to previous directory using bash built-in $OLDPWD ### Loop over Level 2 Analyses requested log_Msg "Loop over Level 2 Analyses requested: ${Analyses}" +analysisCounter=1; for Analysis in ${Analyses} ; do log_Msg "Run Analysis: ${Analysis}" + Extension=`echo $ExtensionList | cut -d' ' -f $analysisCounter`; + ScalarExtension=`echo $ScalarExtensionList | cut -d' ' -f $analysisCounter`; ### Exit if cope files are not present in Level 1 folders fileCount=$( ls ${FirstFolder}/${Analysis}/cope1.${Extension} 2>/dev/null | wc -l ); @@ -276,69 +282,71 @@ for Analysis in ${Analyses} ; do # convert flameo input files for review: ${LevelTwoFEATDir}/${Analysis}/*.nii.gz # convert flameo output files for each cope: ${LevelTwoFEATDir}/${Analysis}/cope*.feat/*.nii.gz - for fakeNIFTI in ${LevelTwoFEATDir}/${Analysis}/*.nii.gz ${LevelTwoFEATDir}/${Analysis}/cope*.feat/*.nii.gz; do + for fakeNIFTI in ${LevelTwoFEATDir}/${Analysis}/*.nii.gz ${LevelTwoFEATDir}/${Analysis}/cope*.feat/*.nii.gz; do CIFTI=$( echo $fakeNIFTI | sed -e "s|.nii.gz|.${Extension}|" ); ${CARET7DIR}/wb_command -cifti-convert -from-nifti $fakeNIFTI $CIFTItemplate $CIFTI -reset-timepoints 1 1 rm $fakeNIFTI done fi + + ### Generate Files for Viewing + log_Msg "Generate Files for Viewing" + # Initialize strings used for fslmerge command + zMergeSTRING="" + bMergeSTRING="" + touch ${LevelTwoFEATDir}/Contrasttemp.txt + [ "${Analysis}" = "StandardVolumeStats" ] && touch ${LevelTwoFEATDir}/wbtemp.txt + [ -e "${LevelTwoFEATDir}/Contrasts.txt" ] && rm ${LevelTwoFEATDir}/Contrasts.txt + + # Loop over contrasts to identify cope and zstat files to merge into wb_view scalars + copeCounter=1; + while [ "$copeCounter" -le "${NumContrasts}" ] ; do + Contrast=`echo $ContrastNames | cut -d " " -f $copeCounter` + # Contrasts.txt is used to store the contrast names for this analysis + echo ${Contrast} >> ${LevelTwoFEATDir}/Contrasts.txt + # Contrasttemp.txt is a temporary file used to name the maps in the CIFTI scalar file + echo "${Subject}_${LevelTwofsfName}_level2_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}" >> ${LevelTwoFEATDir}/Contrasttemp.txt + + if [ "${Analysis}" = "StandardVolumeStats" ] ; then + + ### Make temporary dtseries files to convert into scalar files + # Converting volume to dense timeseries requires a volume label file + echo "OTHER" >> ${LevelTwoFEATDir}/wbtemp.txt + echo "1 255 255 255 255" >> ${LevelTwoFEATDir}/wbtemp.txt + ${CARET7DIR}/wb_command -volume-label-import ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz ${LevelTwoFEATDir}/wbtemp.txt ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -discard-others -unlabeled-value 0 + rm ${LevelTwoFEATDir}/wbtemp.txt + + # Convert temporary volume CIFTI timeseries files + ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.volume.dtseries.nii -volume ${LevelTwoFEATDir}/StandardVolumeStats/cope${copeCounter}.feat/zstat1.nii.gz ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -timestep 1 -timestart 1 + ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.volume.dtseries.nii -volume ${LevelTwoFEATDir}/StandardVolumeStats/cope${copeCounter}.feat/cope1.nii.gz ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -timestep 1 -timestart 1 + + # Convert volume CIFTI timeseries files to scalar files + ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.volume.dtseries.nii ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt + ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.volume.dtseries.nii ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt + + # Delete the temporary volume CIFTI timeseries files + rm ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_{cope,zstat}_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.volume.dtseries.nii + else + ### Convert CIFTI dense or parcellated timeseries to scalar files + ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Analysis}/cope${copeCounter}.feat/zstat1.${Extension} ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt + ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Analysis}/cope${copeCounter}.feat/cope1.${Extension} ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt + fi + + # These merge strings are used below to combine the multiple scalar files into a single file for visualization + zMergeSTRING="${zMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} " + bMergeSTRING="${bMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} " + + # Remove Contrasttemp.txt file + rm ${LevelTwoFEATDir}/Contrasttemp.txt + copeCounter=$(($copeCounter+1)) + done + # Perform the merge into viewable scalar files + ${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} ${zMergeSTRING} + ${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} ${bMergeSTRING} + + analysisCounter=$(($analysisCounter+1)) done # end loop: for Analysis in ${Analyses} - -### Generate Files for Viewing -log_Msg "Generate Files for Viewing" - -# Initialize strings used for fslmerge command -zMergeSTRING="" -bMergeSTRING="" -touch ${LevelTwoFEATDir}/Contrasttemp.txt - -if $runVolume ; then - VolzMergeSTRING="" - VolbMergeSTRING="" - touch ${LevelTwoFEATDir}/wbtemp.txt -fi - -if [ -e "${LevelTwoFEATDir}/Contrasts.txt" ] ; then - rm ${LevelTwoFEATDir}/Contrasts.txt -fi - -# Loop over contrasts to identify cope and zstat files to merge into wb_view scalars -copeCounter=1; -while [ "$copeCounter" -le "${NumContrasts}" ] ; do - Contrast=`echo $ContrastNames | cut -d " " -f $copeCounter` - echo "${Subject}_${LevelTwofsfName}_level2_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}" >> ${LevelTwoFEATDir}/Contrasttemp.txt - echo ${Contrast} >> ${LevelTwoFEATDir}/Contrasts.txt - ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Analysis}/cope${copeCounter}.feat/zstat1.${Extension} ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt - ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Analysis}/cope${copeCounter}.feat/cope1.${Extension} ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} -name-file ${LevelTwoFEATDir}/Contrasttemp.txt - rm ${LevelTwoFEATDir}/Contrasttemp.txt - zMergeSTRING="${zMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} " - bMergeSTRING="${bMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope_${Contrast}${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} " - - if $runVolume ; then - echo "OTHER" >> ${LevelTwoFEATDir}/wbtemp.txt - echo "1 255 255 255 255" >> ${LevelTwoFEATDir}/wbtemp.txt - ${CARET7DIR}/wb_command -volume-label-import ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz ${LevelTwoFEATDir}/wbtemp.txt ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -discard-others -unlabeled-value 0 - rm ${LevelTwoFEATDir}/wbtemp.txt - ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii -volume ${LevelTwoFEATDir}/StandardVolumeStats/cope${copeCounter}.feat/zstat1.nii.gz ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -timestep 1 -timestart 1 - ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii -name-file ${LevelTwoFEATDir}/Contrasttemp.txt - ${CARET7DIR}/wb_command -cifti-create-dense-timeseries ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii -volume ${LevelTwoFEATDir}/StandardVolumeStats/cope${copeCounter}.feat/cope1.nii.gz ${LevelTwoFEATDir}/StandardVolumeStats/mask.nii.gz -timestep 1 -timestart 1 - ${CARET7DIR}/wb_command -cifti-convert-to-scalar ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii ROW ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii -name-file ${LevelTwoFEATDir}/Contrasttemp.txt - rm ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_{cope,zstat}_${Contrast}${TemporalFilterString}${SmoothingString}.dtseries.nii - VolzMergeSTRING="${VolzMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii " - VolbMergeSTRING="${VolbMergeSTRING}-cifti ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope_${Contrast}${TemporalFilterString}${SmoothingString}.dscalar.nii " - fi - copeCounter=$(($copeCounter+1)) -done - -# Perform the merge into viewable scalar files -${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_zstat${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} ${zMergeSTRING} -${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2_cope${TemporalFilterString}${SmoothingString}${RegString}${ParcellationString}.${ScalarExtension} ${bMergeSTRING} -if $runVolume ; then - ${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_zstat${TemporalFilterString}${SmoothingString}.dscalar.nii ${VolzMergeSTRING} - ${CARET7DIR}/wb_command -cifti-merge ${LevelTwoFEATDir}/${Subject}_${LevelTwofsfName}_level2vol_cope${TemporalFilterString}${SmoothingString}.dscalar.nii ${VolbMergeSTRING} -fi - log_Msg "Complete"