diff --git a/.gitignore b/.gitignore old mode 100755 new mode 100644 diff --git a/Examples/Scripts/FreeSurferPipelineBatch-long.sh b/Examples/Scripts/FreeSurferPipelineBatch-long.sh new file mode 100755 index 000000000..b1dc3bb17 --- /dev/null +++ b/Examples/Scripts/FreeSurferPipelineBatch-long.sh @@ -0,0 +1,139 @@ +#!/bin/bash + +command_line_specified_run_local=FALSE +while [ -n "$1" ]; do + case "$1" in + --runlocal) shift; command_line_specified_run_local=TRUE ;; + *) shift ;; + esac +done + +################################################################################################# +# General input variables +################################################################################################## +#Location of Subject folders (named by subjectID) +StudyFolder="/my/study/path" +#The list of subject labels, space separated +Subjects=(HCA6002236 HCA6002237 HCA6002238) +#The list of possible visits (aka timepoints, sessions) that each subject may have. Timepoint directories should be named _. +PossibleVisits=(V1_MR V2_MR V3_MR) +#The list of possible visits that each subject may have. Timepoint (visit) is expected to be named _. +#Actual visits (timepoints) are determined based on existing directories that match the visit name pattern. +#ExcludeVisits=(HCA6002237_V1_MR HCA6002238_V1_MR) +ExcludeVisits=() +#Longitudinal template labels, one per each subject. +Templates=(HCA6002236_V1_V2 HCA6002237_V1_V2 HCA6002238_V1_V2) + +EnvironmentScript="/scripts/SetUpHCPPipeline.sh" #Pipeline environment script + +# Requirements for this script +# installed versions of: FSL, FreeSurfer, Connectome Workbench (wb_command), gradunwarp (HCP version) +# environment: HCPPIPEDIR, FSLDIR, FREESURFER_HOME, CARET7DIR, PATH for gradient_unwarp.py + +# If you want to use FreeSurfer 5.3, change the ${queuing_command} line below to use +# ${HCPPIPEDIR}/FreeSurfer/FreeSurferPipeline-v5.3.0-HCP.sh + +#Set up pipeline environment variables and software +source "$EnvironmentScript" + +# Log the originating call +echo "$@" + +#NOTE: syntax for QUEUE has changed compared to earlier pipeline releases, +#DO NOT include "-q " at the beginning +#default to no queue, implying run local +QUEUE="long.q" +#QUEUE="hcp_priority.q" + +#parallel options +parallel_mode=BUILTIN + +# This setting is for BUILTIN mode. Set to -1 to auto-detect the # of CPU cores on the node where each per-subject job is run. +# Note that in case when multiple subject jobs are run on the same node and are submitted +# in parallel by e.g. fsl_sub, max_jobs should be set manually to not significantly exceed +# (number of available cores)/(number of subjects) in the batch. +max_jobs=-1 +#max_jobs=4 + +#TEMPLATE stage must be run before TIMEPOINTS stage +start_stage=TEMPLATE +end_stage=TIMEPOINTS + +#Processing goes here. +function identify_timepoints +{ + local subject=$1 + local tplist="" + local tp visit n + + #build the list of timepoints (sessions) + n=0 + for session in ${PossibleVisits[*]}; do + tp="${subject}_${session}" + if [ -d "$StudyFolder/$tp" ] && ! [[ " ${ExcludeVisits[*]+${ExcludeVisits[*]}} " =~ [[:space:]]"$tp"[[:space:]] ]]; then + if (( n==0 )); then + tplist="$tp" + else + tplist="$tplist@$tp" + fi + fi + ((n++)) + done + echo $tplist +} + +########################################## INPUTS ########################################## +#Scripts called by this script do assume they run on the outputs of the PreFreeSurfer Pipeline +######################################### DO WORK ########################################## + +for i in ${!Subjects[@]}; do + Subject=${Subjects[i]} + #Subject's time point list, @ separated. + TPlist=(`identify_timepoints $Subject`) + #Array with timepoints + IFS=@ read -ra Timepoints <<< "$TPlist" + #Freesurfer longitudinal average template label + LongitudinalTemplate=${Templates[i]} + + #Longitudinal FreeSurfer Input Variables + SubjectID="$Subject" #FreeSurfer Subject ID Name + SubjectDIR="${StudyFolder}/${Subject}/T1w" #Location to Put FreeSurfer Subject's Folder + T1wImage="${StudyFolder}/${Subject}/T1w/T1w_acpc_dc_restore.nii.gz" #T1w FreeSurfer Input (Full Resolution) + T1wImageBrain="${StudyFolder}/${Subject}/T1w/T1w_acpc_dc_restore_brain.nii.gz" #T1w FreeSurfer Input (Full Resolution) + T2wImage="${StudyFolder}/${Subject}/T1w/T2w_acpc_dc_restore.nii.gz" #T2w FreeSurfer Input (Full Resolution) + + if [ "$parallel_mode" != FSLSUB ]; then #fsl_sub does not allow nested submissions + if [[ "${command_line_specified_run_local}" == "TRUE" || "$QUEUE" == "" ]] ; then + echo "About to locally run ${HCPPIPEDIR}/FreeSurfer/LongitudinalFreeSurferPipeline.sh" + #NOTE: fsl_sub without -q runs locally and captures output in files + queuing_command=("$FSLDIR/bin/fsl_sub") + else + echo "About to use fsl_sub to queue ${HCPPIPEDIR}/FreeSurfer/LongitudinalFreeSurferPipeline.sh" + queuing_command=("$FSLDIR/bin/fsl_sub" -q "$QUEUE") + fi + else + queuing_command=() + fi + + #DO NOT PUT timepoint-specific options here!!! + cmd=(${queuing_command[@]+"${queuing_command[@]}"} "$HCPPIPEDIR"/FreeSurfer/LongitudinalFreeSurferPipeline.sh \ + --subject="$Subject" \ + --path="$StudyFolder" \ + --sessions="$TPlist" \ + --fslsub-queue="$QUEUE" \ + --longitudinal-template="$LongitudinalTemplate" \ + --parallel-mode="$parallel_mode" \ + --max-jobs="$max_jobs" \ + --start-stage="$start_stage" \ + --end-stage="$end_stage" \ + ) + + #--extra-reconall-arg-base=-conf2hires Freesurfer reports this is unneeded. + echo "Running command: ${cmd[*]}" + "${cmd[@]}" + + # The following lines are used for interactive debugging to set the positional parameters: $1 $2 $3 ... + # echo set --subject=$Subject --subjectDIR=$SubjectDIR --t1=$T1wImage --t1brain=$T1wImageBrain --t2=$T2wImage --extra-reconall-arg-long="-i \"$SubjectDIR\"/T1w/T1w_acpc_dc_restore.nii.gz -emregmask \"$SubjectDIR\"/T1w/T1w_acpc_dc_restore_brain.nii.gz -T2 $SubjectDIR\"/T1w/T2w_acpc_dc_restore.nii.gz -T2pial" + #echo ". ${EnvironmentScript}" + +done diff --git a/Examples/Scripts/FreeSurferPipelineBatch.sh b/Examples/Scripts/FreeSurferPipelineBatch.sh index 2f33cae56..d32461585 100755 --- a/Examples/Scripts/FreeSurferPipelineBatch.sh +++ b/Examples/Scripts/FreeSurferPipelineBatch.sh @@ -4,7 +4,7 @@ get_batch_options() { local arguments=("$@") command_line_specified_study_folder="" - command_line_specified_subj="" + command_line_specified_session="" command_line_specified_run_local="FALSE" local index=0 @@ -19,8 +19,12 @@ get_batch_options() { command_line_specified_study_folder=${argument#*=} index=$(( index + 1 )) ;; - --Subject=*) - command_line_specified_subj=${argument#*=} + --Subject=*) #legacy option, please use '--Session' instead + command_line_specified_session=${argument#*=} + index=$(( index + 1 )) + ;; + --Session=*) + command_line_specified_session=${argument#*=} index=$(( index + 1 )) ;; --runlocal) @@ -39,16 +43,16 @@ get_batch_options() { get_batch_options "$@" -StudyFolder="${HOME}/projects/Pipelines_ExampleData" #Location of Subject folders (named by subjectID) -Subjlist="100307 100610" #Space delimited list of subject IDs +StudyFolder="${HOME}/projects/Pipelines_ExampleData" #Location of Session folders (named by sessionID) +Sessionlist="100307 100610" #Space delimited list of session IDs EnvironmentScript="${HOME}/projects/Pipelines/Examples/Scripts/SetUpHCPPipeline.sh" #Pipeline environment script if [ -n "${command_line_specified_study_folder}" ]; then StudyFolder="${command_line_specified_study_folder}" fi -if [ -n "${command_line_specified_subj}" ]; then - Subjlist="${command_line_specified_subj}" +if [ -n "${command_line_specified_session}" ]; then + Sessionlist="${command_line_specified_session}" fi # Requirements for this script @@ -76,15 +80,15 @@ QUEUE="" ######################################### DO WORK ########################################## -for Subject in $Subjlist ; do - echo $Subject +for Session in $Sessionlist ; do + echo $Session #Input Variables - SubjectID="$Subject" #FreeSurfer Subject ID Name - SubjectDIR="${StudyFolder}/${Subject}/T1w" #Location to Put FreeSurfer Subject's Folder - T1wImage="${StudyFolder}/${Subject}/T1w/T1w_acpc_dc_restore.nii.gz" #T1w FreeSurfer Input (Full Resolution) - T1wImageBrain="${StudyFolder}/${Subject}/T1w/T1w_acpc_dc_restore_brain.nii.gz" #T1w FreeSurfer Input (Full Resolution) - T2wImage="${StudyFolder}/${Subject}/T1w/T2w_acpc_dc_restore.nii.gz" #T2w FreeSurfer Input (Full Resolution) + SessionID="$Session" #FreeSurfer Subject ID Name + SessionDIR="${StudyFolder}/${Session}/T1w" #Location to Put FreeSurfer Subject's Folder + T1wImage="${StudyFolder}/${Session}/T1w/T1w_acpc_dc_restore.nii.gz" #T1w FreeSurfer Input (Full Resolution) + T1wImageBrain="${StudyFolder}/${Session}/T1w/T1w_acpc_dc_restore_brain.nii.gz" #T1w FreeSurfer Input (Full Resolution) + T2wImage="${StudyFolder}/${Session}/T1w/T2w_acpc_dc_restore.nii.gz" #T2w FreeSurfer Input (Full Resolution) if [[ "${command_line_specified_run_local}" == "TRUE" || "$QUEUE" == "" ]] ; then echo "About to locally run ${HCPPIPEDIR}/FreeSurfer/FreeSurferPipeline.sh" @@ -95,16 +99,16 @@ for Subject in $Subjlist ; do fi "${queuing_command[@]}" "$HCPPIPEDIR"/FreeSurfer/FreeSurferPipeline.sh \ - --subject="$Subject" \ - --subject-dir="$SubjectDIR" \ + --session="$Session" \ + --session-dir="$SessionDIR" \ --t1w-image="$T1wImage" \ --t1w-brain="$T1wImageBrain" \ --t2w-image="$T2wImage" # The following lines are used for interactive debugging to set the positional parameters: $1 $2 $3 ... - echo "set -- --subject=$Subject \ - --subject-dir=$SubjectDIR \ + echo "set -- --session=$Session \ + --session-dir=$SessionDIR \ --t1w-image=$T1wImage \ --t1w-brain=$T1wImageBrain \ --t2w-image=$T2wImage" diff --git a/Examples/Scripts/PostFreeSurferPipelineBatch-long.sh b/Examples/Scripts/PostFreeSurferPipelineBatch-long.sh new file mode 100755 index 000000000..77f255997 --- /dev/null +++ b/Examples/Scripts/PostFreeSurferPipelineBatch-long.sh @@ -0,0 +1,169 @@ +#!/bin/bash + +command_line_specified_run_local=FALSE +while [ -n "$1" ]; do + case "$1" in + --runlocal) shift; command_line_specified_run_local=TRUE ;; + *) shift ;; + esac +done + +################################################################################################# +# General input variables +################################################################################################## +#Location of Subject folders (named by subjectID) +StudyFolder="" +#The list of subject labels, space separated +Subjects=(HCA6002236 HCA6002237 HCA6002238) +#The list of possible visits that each subject may have. Timepoint (visit) is expected to be named _. +PossibleVisits=(V1_MR V2_MR V3_MR) +#Actual visits (timepoints) are determined based on existing directories that match the visit name pattern _. +#Excluded timepoint directories +#ExcludeVisits=(HCA6002237_V1_MR HCA6002238_V1_MR) +ExcludeVisits=() +#Longitudinal template labels, one per each subject. +Templates=(HCA6002236_V1_V2 HCA6002237_V1_V2 HCA6002238_V1_V2) +#EnvironmentScript="${HOME}/projects/HCPPipelines/Examples/Scripts/SetUpHCPPipeline.sh" #Pipeline environment script +EnvironmentScript="${HOME}/projects/HCPPipelines/Examples/Scripts/SetUpHCPPipeline.sh" +source "$EnvironmentScript" + +################################################################################################## +# Input variables used by PostFreeSurferPipelineLongPrep +################################################################################################## +# Hires T1w MNI template +T1wTemplate="${HCPPIPEDIR_Templates}/MNI152_T1_0.8mm.nii.gz" +# Hires brain extracted MNI template1 +T1wTemplateBrain="${HCPPIPEDIR_Templates}/MNI152_T1_0.8mm_brain.nii.gz" +# Lowres T1w MNI template +T1wTemplate2mm="${HCPPIPEDIR_Templates}/MNI152_T1_2mm.nii.gz" +# Hires T2w MNI Template +T2wTemplate="${HCPPIPEDIR_Templates}/MNI152_T2_0.8mm.nii.gz" +# Hires T2w brain extracted MNI Template +T2wTemplateBrain="${HCPPIPEDIR_Templates}/MNI152_T2_0.8mm_brain.nii.gz" +# Lowres T2w MNI Template +T2wTemplate2mm="${HCPPIPEDIR_Templates}/MNI152_T2_2mm.nii.gz" +# Hires MNI brain mask template +TemplateMask="${HCPPIPEDIR_Templates}/MNI152_T1_0.8mm_brain_mask.nii.gz" +# Lowres MNI brain mask template +Template2mmMask="${HCPPIPEDIR_Templates}/MNI152_T1_2mm_brain_mask_dil.nii.gz" +# FNIRT 2mm T1w Config +FNIRTConfig="${HCPPIPEDIR_Config}/T1_2_MNI152_2mm.cnf" + +################################################################################################## +# Input variables used by PostFreesurferPipeline (longitudinal mode) +################################################################################################## +SurfaceAtlasDIR="${HCPPIPEDIR_Templates}/standard_mesh_atlases" +GrayordinatesSpaceDIR="${HCPPIPEDIR_Templates}/91282_Greyordinates" +GrayordinatesResolutions="2" #Usually 2mm, if multiple delimit with @, must already exist in templates dir +HighResMesh="164" #Usually 164k vertices +LowResMeshes="32" #Usually 32k vertices, if multiple delimit with @, must already exist in templates dir +SubcorticalGrayLabels="${HCPPIPEDIR_Config}/FreeSurferSubcorticalLabelTableLut.txt" +FreeSurferLabels="${HCPPIPEDIR_Config}/FreeSurferAllLut.txt" +ReferenceMyelinMaps="${HCPPIPEDIR_Templates}/standard_mesh_atlases/Conte69.MyelinMap_BC.164k_fs_LR.dscalar.nii" +RegName="MSMSulc" #MSMSulc is recommended, if binary is not available use FS (FreeSurfer) + +# Requirements for this script +# installed versions of: FSL, Connectome Workbench (wb_command) +# environment: HCPPIPEDIR, FSLDIR, CARET7DIR + +#fslsub queue +#QUEUE=short.q +#top level parallelization + +QUEUE="long.q" + +#pipeline level parallelization +#parallel mode: FSLSUB, BUILTIN, NONE +parallel_mode=BUILTIN + +if [ "$parallel_mode" != FSLSUB ]; then #fsl_sub does not allow nested submissions +if [[ "${command_line_specified_run_local}" == "TRUE" || "$QUEUE" == "" ]] ; then + echo "About to locally run ${HCPPIPEDIR}/PostFreeSurfer/PostFreeSurferPipelineLongLauncher.sh" +# #NOTE: fsl_sub without -q runs locally and captures output in files + queuing_command=("$FSLDIR/bin/fsl_sub") + QUEUE=long.q +else + echo "About to use fsl_sub to queue ${HCPPIPEDIR}/FreeSurfer/PostFreeSurferPipelineLongLauncher.sh" + queuing_command=("$FSLDIR/bin/fsl_sub" -q "$QUEUE") + QUEUE="" +fi +else + queuing_command="" +fi +# This setting is for BUILTIN mode. Set to -1 to auto-detect the # of CPU cores on the node where each per-subject job is run. +# Note that in case when multiple subject jobs are run on the same node and are submitted +# in parallel by e.g. fsl_sub, it may be inefficient to have the +# (number of fsl_sub jobs running per node) * (BUILTIN parallelism) substantially exceed +# the number of cores per node +max_jobs=-1 +#max_jobs=4 + +# Stages to run. Must be run in the following order: +# 1. PREP-TP, 2.PREP-T, 3. POSTFS-TP1, 4. POSTFS-T, 5. POSTFS-TP2 +start_stage=PREP-TP +end_stage=POSTFS-TP2 + + +function identify_timepoints +{ + local subject=$1 + local tplist="" + local tp visit n + + #build the list of timepoints + n=0 + for visit in ${PossibleVisits[*]}; do + tp="${subject}_${visit}" + if [ -d "$StudyFolder/$tp" ] && ! [[ " ${ExcludeVisits[*]+${ExcludeVisits[*]}} " =~ [[:space:]]"$tp"[[:space:]] ]]; then + if (( n==0 )); then + tplist="$tp" + else + tplist="$tplist@$tp" + fi + fi + ((n++)) + done + echo $tplist +} + +#iterate over all subjects. +for i in ${!Subjects[@]}; do + Subject=${Subjects[i]} + LongitudinalTemplate=${Templates[i]} + Timepoint_list=(`identify_timepoints $Subject`) + + echo Subject: $Subject + echo Template: $LongitudinalTemplate + echo Timepoints: $Timepoint_list + + cmd=(${queuing_command[@]+"${queuing_command[@]}"} ${HCPPIPEDIR}/PostFreeSurfer/PostFreeSurferPipelineLongLauncher.sh \ + --study-folder="$StudyFolder" \ + --subject="$Subject" \ + --longitudinal-template="$LongitudinalTemplate" \ + --sessions="$Timepoint_list" \ + --parallel-mode=$parallel_mode \ + --fslsub-queue=$QUEUE \ + --start-stage=$start_stage \ + --end-stage=$end_stage \ + --t1template="$T1wTemplate" \ + --t1templatebrain="$T1wTemplateBrain" \ + --t1template2mm="$T1wTemplate2mm" \ + --t2template="$T2wTemplate" \ + --t2templatebrain="$T2wTemplateBrain" \ + --t2template2mm="$T2wTemplate2mm" \ + --templatemask="$TemplateMask" \ + --template2mmmask="$Template2mmMask" \ + --fnirtconfig="$FNIRTConfig" \ + --freesurferlabels="$FreeSurferLabels" \ + --surfatlasdir="$SurfaceAtlasDIR" \ + --grayordinatesres="$GrayordinatesResolutions" \ + --grayordinatesdir="$GrayordinatesSpaceDIR" \ + --hiresmesh="$HighResMesh" \ + --lowresmesh="$LowResMeshes" \ + --subcortgraylabels="$SubcorticalGrayLabels" \ + --refmyelinmaps="$ReferenceMyelinMaps" \ + --regname="$RegName" \ + ) + echo "Running $cmd" + "${cmd[@]}" +done diff --git a/Examples/Scripts/PostFreeSurferPipelineBatch.sh b/Examples/Scripts/PostFreeSurferPipelineBatch.sh index 134ee2a67..6f5c9893b 100755 --- a/Examples/Scripts/PostFreeSurferPipelineBatch.sh +++ b/Examples/Scripts/PostFreeSurferPipelineBatch.sh @@ -39,9 +39,9 @@ get_batch_options() { get_batch_options "$@" -StudyFolder="${HOME}/projects/Pipelines_ExampleData" #Location of Subject folders (named by subjectID) +StudyFolder="${HOME}/projects/HCPPipelines_ExampleData" #Location of Subject folders (named by subjectID) Subjlist="100307 100610" #Space delimited list of subject IDs -EnvironmentScript="${HOME}/projects/Pipelines/Examples/Scripts/SetUpHCPPipeline.sh" #Pipeline environment script +EnvironmentScript="${HOME}/projects/HCPPipelines/Examples/Scripts/SetUpHCPPipeline.sh" #Pipeline environment script if [ -n "${command_line_specified_study_folder}" ]; then StudyFolder="${command_line_specified_study_folder}" @@ -95,8 +95,7 @@ for Subject in $Subjlist ; do echo "About to use fsl_sub to queue ${HCPPIPEDIR}/PostFreeSurfer/PostFreeSurferPipeline.sh" queuing_command=("$FSLDIR/bin/fsl_sub" -q "$QUEUE") fi - - "${queuing_command[@]}" "$HCPPIPEDIR"/PostFreeSurfer/PostFreeSurferPipeline.sh \ + job=("${queuing_command[@]}" "$HCPPIPEDIR"/PostFreeSurfer/PostFreeSurferPipeline.sh \ --study-folder="$StudyFolder" \ --subject="$Subject" \ --surfatlasdir="$SurfaceAtlasDIR" \ @@ -108,7 +107,9 @@ for Subject in $Subjlist ; do --freesurferlabels="$FreeSurferLabels" \ --refmyelinmaps="$ReferenceMyelinMaps" \ --regname="$RegName" \ - --use-ind-mean="$UseIndMean" + --use-ind-mean="$UseIndMean") + + "${job[@]}" # The following lines are used for interactive debugging to set the positional parameters: $1 $2 $3 ... diff --git a/Examples/Scripts/PreFreeSurferPipelineBatch.sh b/Examples/Scripts/PreFreeSurferPipelineBatch.sh index 17bd10ab6..815025ae1 100755 --- a/Examples/Scripts/PreFreeSurferPipelineBatch.sh +++ b/Examples/Scripts/PreFreeSurferPipelineBatch.sh @@ -66,8 +66,8 @@ # # Retrieve the following command line parameter values if specified # -# --StudyFolder= - primary study folder containing subject ID subdirectories -# --Subjlist= - quoted, space separated list of subject IDs on which +# --StudyFolder= - primary study folder containing session ID subdirectories +# --Sessionlist= - quoted, space separated list of session IDs on which # to run the pipeline # --runlocal - if specified (without an argument), processing is run # on "this" machine as opposed to being submitted to a @@ -77,7 +77,7 @@ # line specified parameters # # command_line_specified_study_folder -# command_line_specified_subj_list +# command_line_specified_session_list # command_line_specified_run_local # # These values are intended to be used to override any values set @@ -86,7 +86,7 @@ get_batch_options() { local arguments=("$@") command_line_specified_study_folder="" - command_line_specified_subj="" + command_line_specified_session="" command_line_specified_run_local="FALSE" local index=0 @@ -101,8 +101,12 @@ get_batch_options() { command_line_specified_study_folder=${argument#*=} index=$(( index + 1 )) ;; - --Subject=*) - command_line_specified_subj=${argument#*=} + --Subject=*) #legacy field, use 'Session' instead + command_line_specified_session=${argument#*=} + index=$(( index + 1 )) + ;; + --Session=*) + command_line_specified_session=${argument#*=} index=$(( index + 1 )) ;; --runlocal) @@ -126,8 +130,8 @@ main() get_batch_options "$@" # Set variable values that locate and specify data to process - StudyFolder="${HOME}/projects/Pipelines_ExampleData" # Location of Subject folders (named by subjectID) - Subjlist="100307 100610" # Space delimited list of subject IDs + StudyFolder="${HOME}/projects/Pipelines_ExampleData" # Location of Session folders (named by sessionID) + Sessionlist="100307 100610" # Space delimited list of session IDs # Set variable value that sets up environment EnvironmentScript="${HOME}/projects/Pipelines/Examples/Scripts/SetUpHCPPipeline.sh" # Pipeline environment script @@ -137,13 +141,13 @@ main() StudyFolder="${command_line_specified_study_folder}" fi - if [ -n "${command_line_specified_subj}" ]; then - Subjlist="${command_line_specified_subj}" + if [ -n "${command_line_specified_session}" ]; then + Sessionlist="${command_line_specified_session}" fi # Report major script control variables to user echo "StudyFolder: ${StudyFolder}" - echo "Subjlist: ${Subjlist}" + echo "Sessionlist: ${Sessionlist}" echo "EnvironmentScript: ${EnvironmentScript}" echo "Run locally: ${command_line_specified_run_local}" @@ -163,14 +167,14 @@ main() # input names or paths. This batch script assumes the HCP raw data naming # convention, e.g. # - # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_T1w_MPR1.nii.gz - # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR2/${Subject}_3T_T1w_MPR2.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_T1w_MPR1.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR2/${Session}_3T_T1w_MPR2.nii.gz # - # ${StudyFolder}/${Subject}/unprocessed/3T/T2w_SPC1/${Subject}_3T_T2w_SPC1.nii.gz - # ${StudyFolder}/${Subject}/unprocessed/3T/T2w_SPC2/${Subject}_3T_T2w_SPC2.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T2w_SPC1/${Session}_3T_T2w_SPC1.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T2w_SPC2/${Session}_3T_T2w_SPC2.nii.gz # - # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_FieldMap_Magnitude.nii.gz - # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_FieldMap_Phase.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_FieldMap_Magnitude.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_FieldMap_Phase.nii.gz # Scan settings: # @@ -202,9 +206,9 @@ main() # DO WORK - # Cycle through specified subjects - for Subject in $Subjlist ; do - echo $Subject + # Cycle through specified sessions + for Session in $Sessionlist ; do + echo $Session # Input Images @@ -212,23 +216,23 @@ main() # T1w images T1wInputImages="" numT1ws=0 - for folder in "${StudyFolder}/${Subject}/unprocessed/3T"/T1w_MPR?; do + for folder in "${StudyFolder}/${Session}/unprocessed/3T"/T1w_MPR?; do folderbase=$(basename "$folder") - T1wInputImages+="$folder/${Subject}_3T_$folderbase.nii.gz@" + T1wInputImages+="$folder/${Session}_3T_$folderbase.nii.gz@" numT1ws=$((numT1ws + 1)) done - echo "Found ${numT1ws} T1w Images for subject ${Subject}" + echo "Found ${numT1ws} T1w Images for session ${Session}" # Detect Number of T2w Images and build list of full paths to # T2w images T2wInputImages="" numT2ws=0 - for folder in "${StudyFolder}/${Subject}/unprocessed/3T"/T2w_SPC?; do + for folder in "${StudyFolder}/${Session}/unprocessed/3T"/T2w_SPC?; do folderbase=$(basename "$folder") - T2wInputImages+="$folder/${Subject}_3T_$folderbase.nii.gz@" + T2wInputImages+="$folder/${Session}_3T_$folderbase.nii.gz@" numT2ws=$((numT2ws + 1)) done - echo "Found ${numT2ws} T2w Images for subject ${Subject}" + echo "Found ${numT2ws} T2w Images for session ${Session}" # Readout Distortion Correction: # @@ -286,11 +290,11 @@ main() # The MagnitudeInputName variable should be set to a 4D magitude volume # with two 3D timepoints or "NONE" if not used - MagnitudeInputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_FieldMap_Magnitude.nii.gz" + MagnitudeInputName="${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_FieldMap_Magnitude.nii.gz" # The PhaseInputName variable should be set to a 3D phase difference # volume or "NONE" if not used - PhaseInputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_FieldMap_Phase.nii.gz" + PhaseInputName="${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_FieldMap_Phase.nii.gz" # The DeltaTE (echo time difference) of the fieldmap. For HCP Young Adult data, this variable would typically be 2.46ms for 3T scans, 1.02ms for 7T # scans, or "NONE" if not using readout distortion correction @@ -311,8 +315,8 @@ main() # (i.e. if AvgrdcSTRING is not equal to "TOPUP") # # Example values for when using Spin Echo Field Maps from a Siemens machine: - # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_SpinEchoFieldMap_LR.nii.gz - # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_SpinEchoFieldMap_AP.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_SpinEchoFieldMap_LR.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_SpinEchoFieldMap_AP.nii.gz SpinEchoPhaseEncodeNegative="NONE" # The SpinEchoPhaseEncodePositive variable should be set to the @@ -322,8 +326,8 @@ main() # (i.e. if AvgrdcSTRING is not equal to "TOPUP") # # Example values for when using Spin Echo Field Maps from a Siemens machine: - # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_SpinEchoFieldMap_RL.nii.gz - # ${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_SpinEchoFieldMap_PA.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_SpinEchoFieldMap_RL.nii.gz + # ${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_SpinEchoFieldMap_PA.nii.gz SpinEchoPhaseEncodePositive="NONE" # "Effective" Echo Spacing of *Spin Echo Field Maps*. Specified in seconds. @@ -372,7 +376,7 @@ main() # set to NONE if using TOPUP or FIELDMAP/SiemensFieldMap or GEHealthCareFieldMap # # For Example: - # GEB0InputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_GradientEchoFieldMap.nii.gz" + # GEB0InputName="${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_GradientEchoFieldMap.nii.gz" # DeltaTE=2.304 # Here DeltaTE refers to the DeltaTE in ms # NOTE: At 3T, the DeltaTE is *usually* 2.304ms for 2D-B0MAP and 2.272ms 3D-B0MAP. @@ -394,8 +398,8 @@ main() # to the input fieldmap in Hertz and DeltaTE # (for DeltaTE see NOTE above) # - # MagnitudeInputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_BOMap_Magnitude.nii.gz" - # PhaseInputName="${StudyFolder}/${Subject}/unprocessed/3T/T1w_MPR1/${Subject}_3T_B0Map_fieldmaphz.nii.gz" + # MagnitudeInputName="${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_BOMap_Magnitude.nii.gz" + # PhaseInputName="${StudyFolder}/${Session}/unprocessed/3T/T1w_MPR1/${Session}_3T_B0Map_fieldmaphz.nii.gz" # DeltaTE="2.272" # --------------------------------------------------------------- @@ -482,7 +486,7 @@ main() "${queuing_command[@]}" "$HCPPIPEDIR"/PreFreeSurfer/PreFreeSurferPipeline.sh \ --path="$StudyFolder" \ - --subject="$Subject" \ + --session="$Session" \ --t1="$T1wInputImages" \ --t2="$T2wInputImages" \ --t1template="$T1wTemplate" \ diff --git a/FreeSurfer/FreeSurferPipeline.sh b/FreeSurfer/FreeSurferPipeline.sh index f959852cb..e034964bd 100755 --- a/FreeSurfer/FreeSurferPipeline.sh +++ b/FreeSurfer/FreeSurferPipeline.sh @@ -24,7 +24,7 @@ # # ## License # -# See the [LICENSE](https://github.com/Washington-University/Pipelines/blob/master/LICENSE.md) file +# See the [LICENSE](https://github.com/Washington-University/HCPPipelines/blob/master/LICENSE.md) file # #~ND~END~ @@ -75,14 +75,14 @@ then changeargs=1 fi ;; - (--existing-subject) + (--existing-session|--existing-subject) #same logic if ((i + 1 < ${#origargs[@]})) && (opts_StringToBool "${origargs[i + 1]}" &> /dev/null) then - newargs+=(--existing-subject "${origargs[i + 1]}") + newargs+=(--existing-session "${origargs[i + 1]}") i=$((i + 1)) else - newargs+=(--existing-subject=TRUE) + newargs+=(--existing-session=TRUE) changeargs=1 fi ;; @@ -127,26 +127,26 @@ fi opts_SetScriptDescription "Runs the FreeSurfer HCP pipline on data processed by prefresurfer" # Show usage information -opts_AddMandatory '--subject' 'SubjectID' 'subject' "Subject ID (required). Used with --path input to create full path to root directory for all outputs generated as path/subject" +opts_AddMandatory '--session' 'SessionID' 'session' "Session ID (required). Used with --path input to create full path to root directory for all outputs generated as path/session" "--subject" -opts_AddOptional '--subjectDIR' 'SubjectDIR' 'subject' 'path to subject directory required, unless --existing-subject is set' "" "--subject-dir" +opts_AddOptional '--session-dir' 'SessionDIR' 'session' 'path to session directory required, unless --existing-session is set' "--subject-dir" -opts_AddOptional '--t1w-image' 'T1wImage' "T1" 'path to T1w image required, unless --existing-subject is set' "" "--t1" +opts_AddOptional '--t1w-image' 'T1wImage' "T1" 'path to T1w image required, unless --existing-session is set' "" "--t1" -opts_AddOptional '--t1w-brain' 'T1wImageBrain' "T1Brain" 'path to T1w brain mask required, unless --existing-subject is set' "" "--t1brain" +opts_AddOptional '--t1w-brain' 'T1wImageBrain' "T1Brain" 'path to T1w brain mask required, unless --existing-session is set' "" "--t1brain" -opts_AddOptional '--t2w-image' 'T2wImage' "T2" "path to T2w image required, unless --existing-subject is set" "" "--t2" +opts_AddOptional '--t2w-image' 'T2wImage' "T2" "path to T2w image required, unless --existing-session is set" "" "--t2" opts_AddOptional '--seed' 'recon_all_seed' "Seed" 'recon-all seed value' opts_AddOptional '--flair' 'flairString' 'TRUE/FALSE' "Indicates that recon-all is to be run with the -FLAIR/-FLAIRpial options (rather than the -T2/-T2pial options). The FLAIR input image itself should still be provided via the '--t2' argument. NOTE: This is experimental" "FALSE" -opts_AddOptional '--existing-subject' 'existing_subjectString' 'TRUE/FALSE' "Indicates that the script is to be run on top of an already existing analysis/subject. This excludes the '-i' and '-T2/-FLAIR' flags from the invocation of recon-all (i.e., uses previous input volumes). The --t1w-image, --t1w-brain and --t2w-image arguments, if provided, are ignored. It also excludes the -all' flag from the invocation of recon-all. Consequently, user needs to explicitly specify which recon-all stage(s) to run using the --extra-reconall-arg flag. This flag allows for the application of FreeSurfer edits." "FALSE" +opts_AddOptional '--existing-session' 'existing_sessionString' 'TRUE/FALSE' "Indicates that the script is to be run on top of an already existing analysis/session. This excludes the '-i' and '-T2/-FLAIR' flags from the invocation of recon-all (i.e., uses previous input volumes). The --t1w-image, --t1w-brain and --t2w-image arguments, if provided, are ignored. It also excludes the -all' flag from the invocation of recon-all. Consequently, user needs to explicitly specify which recon-all stage(s) to run using the --extra-reconall-arg flag. This flag allows for the application of FreeSurfer edits." "FALSE" "--existing-subject" #TSC: repeatable options aren't currently supported in newopts, do them manually and fake the help info for now -opts_AddOptional '--extra-reconall-arg' 'extra_reconall_args' 'token' "(repeatable) Generic single token argument to pass to recon-all. Provides a mechanism to customize the recon-all command and/or specify the recon-all stage(s) to be run (e.g., in the case of FreeSurfer edits). If you want to avoid running all the stages inherent to the '-all' flag in recon-all, you also need to include the --existing-subject flag. The token itself may include dashes and equal signs (although Freesurfer doesn't currently use equal signs in its argument specification). e.g., --extra-reconall-arg=-3T is the correct syntax for adding the stand-alone '-3T' flag to recon-all, but --extra-reconall-arg='-norm3diters 3' is NOT acceptable. For recon-all flags that themselves require an argument, you can handle that by specifying --extra-reconall-arg multiple times (in the proper sequential fashion), e.g. --extra-reconall-arg=-norm3diters --extra-reconall-arg=3 will be translated to '-norm3diters 3' when passed to recon-all." +opts_AddOptional '--extra-reconall-arg' 'extra_reconall_args' 'token' "(repeatable) Generic single token argument to pass to recon-all. Provides a mechanism to customize the recon-all command and/or specify the recon-all stage(s) to be run (e.g., in the case of FreeSurfer edits). If you want to avoid running all the stages inherent to the '-all' flag in recon-all, you also need to include the --existing-session flag. The token itself may include dashes and equal signs (although Freesurfer doesn't currently use equal signs in its argument specification). e.g., --extra-reconall-arg=-3T is the correct syntax for adding the stand-alone '-3T' flag to recon-all, but --extra-reconall-arg='-norm3diters 3' is NOT acceptable. For recon-all flags that themselves require an argument, you can handle that by specifying --extra-reconall-arg multiple times (in the proper sequential fashion), e.g. --extra-reconall-arg=-norm3diters --extra-reconall-arg=3 will be translated to '-norm3diters 3' when passed to recon-all." -opts_AddOptional '--conf2hires' 'conf2hiresString' 'TRUE/FALSE' "Indicates that the script should include -conf2hires as an argument to recon-all. By default, -conf2hires is included, so that recon-all will place the surfaces on the hires T1 (and T2). Setting this to false is an advanced option, intended for situations where: (i) the original T1w and T2w images are NOT 'hires' (i.e., they are 1 mm isotropic or worse), or (ii) you want to be able to run some flag in recon-all, without also regenerating the surfaces, e.g. --existing-subject --extra-reconall-arg=-show-edits --conf2hires=FALSE" "TRUE" +opts_AddOptional '--conf2hires' 'conf2hiresString' 'TRUE/FALSE' "Indicates that the script should include -conf2hires as an argument to recon-all. By default, -conf2hires is included, so that recon-all will place the surfaces on the hires T1 (and T2). Setting this to false is an advanced option, intended for situations where: (i) the original T1w and T2w images are NOT 'hires' (i.e., they are 1 mm isotropic or worse), or (ii) you want to be able to run some flag in recon-all, without also regenerating the surfaces, e.g. --existing-session --extra-reconall-arg=-show-edits --conf2hires=FALSE" "TRUE" opts_AddOptional '--processing-mode' 'ProcessingMode' 'HCPStyleData or LegacyStyleData' "Controls whether the HCP acquisition and processing guidelines should be treated as requirements. 'HCPStyleData' (the default) follows the processing steps described in Glasser et al. (2013) and requires 'HCP-Style' data acquistion. 'LegacyStyleData' allows additional processing functionality and use of some acquisitions that do not conform to 'HCP-Style' expectations. In this script, it allows not having a high-resolution T2w image." "HCPStyleData" @@ -168,7 +168,7 @@ extra_reconall_args=(${extra_reconall_args_manual[@]+"${extra_reconall_args_manu #parse booleans flair=$(opts_StringToBool "$flairString") -existing_subject=$(opts_StringToBool "$existing_subjectString") +existing_session=$(opts_StringToBool "$existing_sessionString") conf2hires=$(opts_StringToBool "$conf2hiresString") #deal with NONE convention @@ -182,19 +182,19 @@ if [[ "$T2wImage" == "NONE" ]]; then T2wImage="" fi -#check if existing_subject is set, if not t1 has to be set, and if t2 is not set, set processing mode flag to legacy +#check if existing_session is set, if not t1 has to be set, and if t2 is not set, set processing mode flag to legacy Compliance="HCPStyleData" ComplianceMsg="" -if ((! existing_subject)) +if ((! existing_session)) then if [[ "${T1wImage}" = "" ]] then - log_Err_Abort "--t1 not set and '--existing-subject' not used" + log_Err_Abort "--t1 not set and '--existing-session' not used" fi if [[ "${T1wImageBrain}" = "" ]] then - log_Err_Abort "--t1brain not set and '--existing-subject' not used" + log_Err_Abort "--t1brain not set and '--existing-session' not used" fi if [[ "${T2wImage}" = "" ]] @@ -484,36 +484,36 @@ log_Msg "Starting main functionality" # ---------------------------------------------------------------------- log_Msg "Retrieve positional parameters" # ---------------------------------------------------------------------- -SubjectDIR="${SubjectDIR}" -SubjectID="${SubjectID}" -T1wImage="${T1wImage}" # Irrelevant if '--existing-subject' flag is set -T1wImageBrain="${T1wImageBrain}" # Irrelevant if '--existing-subject' flag is set -T2wImage="${T2wImage}" # Irrelevant if '--existing-subject' flag is set +SessionDIR="${SessionDIR}" +SessionID="${SessionID}" +T1wImage="${T1wImage}" # Irrelevant if '--existing-session' flag is set +T1wImageBrain="${T1wImageBrain}" # Irrelevant if '--existing-session' flag is set +T2wImage="${T2wImage}" # Irrelevant if '--existing-session' flag is set recon_all_seed="${recon_all_seed}" # ---------------------------------------------------------------------- # Log values retrieved from positional parameters # ---------------------------------------------------------------------- -log_Msg "SubjectDIR: ${SubjectDIR}" -log_Msg "SubjectID: ${SubjectID}" +log_Msg "SessionDIR: ${SessionDIR}" +log_Msg "SessionID: ${SessionID}" log_Msg "T1wImage: ${T1wImage}" log_Msg "T1wImageBrain: ${T1wImageBrain}" log_Msg "T2wImage: ${T2wImage}" log_Msg "recon_all_seed: ${recon_all_seed}" log_Msg "flair: ${flair}" -log_Msg "existing_subject: ${existing_subject}" +log_Msg "existing_session: ${existing_session}" log_Msg "extra_reconall_args: ${extra_reconall_args[*]+"${extra_reconall_args[*]}"}" log_Msg "conf2hires: ${conf2hires}" -if ((! existing_subject)); then +if ((! existing_session)); then - # If --existing-subject is NOT set, AND PostFreeSurfer has been run, then + # If --existing-session is NOT set, AND PostFreeSurfer has been run, then # certain files need to be reverted to their PreFreeSurfer output versions - if [ `imtest ${SubjectDIR}/xfms/${OutputOrigT1wToT1w}` = 1 ]; then - log_Err "The --existing-subject flag was not invoked AND PostFreeSurfer has already been run." - log_Err "If attempting to run FreeSurfer de novo, certain files (e.g., /T1w/{T1w,T2w}_acpc_dc*) need to be reverted to their PreFreeSurfer outputs." - log_Err_Abort "If this is the goal, delete ${SubjectDIR}/${SubjectID} AND re-run PreFreeSurfer, before invoking FreeSurfer again." + if [ `imtest ${SessionDIR}/xfms/${OutputOrigT1wToT1w}` = 1 ]; then + log_Err "The --existing-session flag was not invoked AND PostFreeSurfer has already been run." + log_Err "If attempting to run FreeSurfer de novo, certain files (e.g., /T1w/{T1w,T2w}_acpc_dc*) need to be reverted to their PreFreeSurfer outputs." + log_Err_Abort "If this is the goal, delete ${SessionDIR}/${SessionID} AND re-run PreFreeSurfer, before invoking FreeSurfer again." fi # ---------------------------------------------------------------------- @@ -533,8 +533,8 @@ fi log_Msg "Call custom recon-all: recon-all.v6.hires" # ---------------------------------------------------------------------- -recon_all_cmd=(recon-all.v6.hires -subjid "$SubjectID" -sd "$SubjectDIR") -if ((! existing_subject)); then # input volumes only necessary first time through +recon_all_cmd=(recon-all.v6.hires -subjid "$SessionID" -sd "$SessionDIR") +if ((! existing_session)); then # input volumes only necessary first time through recon_all_cmd+=(-all -i "$zero_threshold_T1wImage" -emregmask "$T1wImageBrain") if [ "${T2wImage}" != "" ]; then if ((flair)); then @@ -576,7 +576,7 @@ if [ "${return_code}" != "0" ]; then log_Err_Abort "recon-all command failed with return_code: ${return_code}" fi -if ((! existing_subject)); then +if ((! existing_session)); then # ---------------------------------------------------------------------- log_Msg "Clean up file: ${zero_threshold_T1wImage}" # ---------------------------------------------------------------------- @@ -588,14 +588,14 @@ if ((! existing_subject)); then fi -## MPH: Portions of the following are unnecesary in the case of ${existing_subject} = "TRUE" +## MPH: Portions of the following are unnecesary in the case of ${existing_session} = "TRUE" ## but rather than identify what is and isn't strictly necessary (which itself may interact ## with the specific stages run in recon-all), we'll simply run it all to be safe that all ## files created following recon-all are appropriately updated # ---------------------------------------------------------------------- log_Msg "Creating eye.dat" # ---------------------------------------------------------------------- -mridir=${SubjectDIR}/${SubjectID}/mri +mridir=${SessionDIR}/${SessionID}/mri transformsdir=${mridir}/transforms mkdir -p ${transformsdir} @@ -603,7 +603,7 @@ mkdir -p ${transformsdir} eye_dat_file=${transformsdir}/eye.dat log_Msg "...This creates ${eye_dat_file}" -echo "${SubjectID}" > ${eye_dat_file} +echo "${SessionID}" > ${eye_dat_file} echo "1" >> ${eye_dat_file} echo "1" >> ${eye_dat_file} echo "1" >> ${eye_dat_file} @@ -634,7 +634,7 @@ if [[ "${T2wImage}" != "" ]]; then tkregister_cmd+=" --noedit" tkregister_cmd+=" --reg deleteme.dat" tkregister_cmd+=" --ltaout transforms/orig-to-rawavg.lta" - tkregister_cmd+=" --s ${SubjectID}" + tkregister_cmd+=" --s ${SessionID}" log_Msg "......The following produces deleteme.dat and transforms/orig-to-rawavg.lta" log_Msg "......tkregister_cmd: ${tkregister_cmd}" @@ -689,7 +689,7 @@ log_Msg "Creating white surface files in rawavg space" pushd ${mridir} -export SUBJECTS_DIR="$SubjectDIR" +export SUBJECTS_DIR="$SessionDIR" reg=$mridir/transforms/orig2rawavg.dat # generate registration between conformed and hires based on headers @@ -699,13 +699,13 @@ reg=$mridir/transforms/orig2rawavg.dat tkregister2 --mov ${mridir}/rawavg.mgz --targ ${mridir}/orig.mgz --noedit --regheader --reg $reg #The ?h.white.deformed surfaces are used in FreeSurfer BBR registrations for fMRI and diffusion and have been moved into the HCP's T1w space so that BBR produces a transformation containing only the minor adjustment to the registration. -mri_surf2surf --s ${SubjectID} --sval-xyz white --reg $reg --tval-xyz ${mridir}/rawavg.mgz --tval white.deformed --surfreg white --hemi lh +mri_surf2surf --s ${SessionID} --sval-xyz white --reg $reg --tval-xyz ${mridir}/rawavg.mgz --tval white.deformed --surfreg white --hemi lh return_code=$? if [ "${return_code}" != "0" ]; then log_Err_Abort "mri_surf2surf command for left hemisphere failed with return_code: ${return_code}" fi -mri_surf2surf --s ${SubjectID} --sval-xyz white --reg $reg --tval-xyz ${mridir}/rawavg.mgz --tval white.deformed --surfreg white --hemi rh +mri_surf2surf --s ${SessionID} --sval-xyz white --reg $reg --tval-xyz ${mridir}/rawavg.mgz --tval white.deformed --surfreg white --hemi rh return_code=$? if [ "${return_code}" != "0" ]; then log_Err_Abort "mri_surf2surf command for right hemisphere failed with return_code: ${return_code}" diff --git a/FreeSurfer/LongitudinalFreeSurferPipeline.sh b/FreeSurfer/LongitudinalFreeSurferPipeline.sh new file mode 100755 index 000000000..07e12f4c1 --- /dev/null +++ b/FreeSurfer/LongitudinalFreeSurferPipeline.sh @@ -0,0 +1,349 @@ +#!/bin/bash + +#~ND~FORMAT~MARKDOWN~ +#~ND~START~ +# +# # LongitudinalFreeSurferPipeline.sh +# +# ## Copyright Notice +# +# Copyright (C) 2015-2024 The Human Connectome Project/Connectome Coordination Facility +# +# * Washington University in St. Louis +# * Univeristy of Ljubljana +# +# ## Author(s) +# +# * Jure Demsar, Faculty of Computer and Information Science, University of Ljubljana +# * Matthew F. Glasser, Department of Anatomy and Neurobiology, Washington University in St. Louis +# * Mikhail V. Milchenko, Department of Radiology, Washington University in St. Louis +# + +# Version: v.0.9, 09/18/2024 + +# ## Product +# +# [Human Connectome Project](http://www.humanconnectome.org) (HCP) Pipelines +# +# ## License +# +# See the [LICENSE](https://github.com/Washington-University/HCPPipelines/blob/master/LICENSE.md) file +# +#~ND~END~ + +# Define Sources and pipe-dir +# ----------------------------------------------------------------------------------- + +set -eu + +pipedirguessed=0 +if [[ "${HCPPIPEDIR:-}" == "" ]] +then + pipedirguessed=1 + #fix this if the script is more than one level below HCPPIPEDIR + export HCPPIPEDIR="$(dirname -- "$0")/.." +fi + +source "${HCPPIPEDIR}/global/scripts/debug.shlib" "$@" # Debugging functions; also sources log.shlib +source "${HCPPIPEDIR}/global/scripts/newopts.shlib" "$@" +source "${HCPPIPEDIR}/global/scripts/processingmodecheck.shlib" # Check processing mode requirements +source "$HCPPIPEDIR/global/scripts/parallel.shlib" "$@" + +#description to use in usage - syntax of parameters is now explained automatically +opts_SetScriptDescription "Runs the Longitudinal FreeSurfer HCP pipeline" + +# Show usage information +opts_AddMandatory '--subject' 'SubjectID' 'subject' "Subject ID (required) Used with --path input to create full path to root directory for all sessions" +opts_AddMandatory '--path' 'StudyFolder' 'path' "Path to subject's data folder (required) Used with --subject input to create full path to root directory for all outputs generated as path/subject)" +opts_AddMandatory '--sessions' 'Sessions' 'sessions' "@ separated list of session (timepoint, visit) IDs (required). Also used to generate full path to each longitudinal session directory" +opts_AddMandatory '--longitudinal-template' 'TemplateID' 'template-id' "Longitudinal template label" +opts_AddOptional '--use-T2w' 'UseT2wString' 'boolean' "Set to 0/false/no for no T2-weighted processing [1]" "1" +opts_AddOptional '--seed' 'recon_all_seed' "Seed" "recon-all seed value" + +#parallel mode options. +opts_AddOptional '--parallel-mode' 'parallel_mode' 'string' "parallel mode, one of FSLSUB, BUILTIN, NONE [NONE]" 'NONE' +opts_AddOptional '--fslsub-queue' 'fslsub_queue' 'name' "FSLSUB queue name" "" +opts_AddOptional '--max-jobs' 'max_jobs' 'number' "Maximum number of concurrent processes in BUILTIN mode. Set to -1 to auto-detect [-1]." -1 +opts_AddOptional '--start-stage' 'StartStage' 'stage_id' "Starting stage. One of TEMPLATE, TIMEPOINTS [TEMPLATE]." 'TEMPLATE' +opts_AddOptional '--end-stage' 'EndStage' 'stage_id' "End stage. Full pipeline includes 0) TEMPLATE, 1) TIMEPOINTS stages. One of TEMPLATE, TIMEPOINTS [TIMEPOINTS]" 'TIMEPOINTS' +opts_AddOptional '--logdir' 'LogDir' 'string' "directory where logs will be written (default: current directory)" "" + +opts_ParseArguments "$@" + +if ((pipedirguessed)) +then + log_Err_Abort "HCPPIPEDIR is not set, you must first source your edited copy of Examples/Scripts/SetUpHCPPipeline.sh" +fi + +if [ -n "$LogDir" ]; then + mkdir -p "$LogDir" + if [ -d "$LogDir" ]; then + par_set_log_dir "$LogDir" + else + log_Err_Abort "Directory specified for logs $LogDir does not exist and cannot be created." + fi +fi + +#display the parsed/default values +opts_ShowValues + +# Show HCP pipelines version +log_Msg "Showing HCP Pipelines version" +${HCPPIPEDIR}/show_version + +# ------------------------------------------------------------------------------ +# Verify required environment variables are set and log value +# ------------------------------------------------------------------------------ + +log_Check_Env_Var HCPPIPEDIR +log_Check_Env_Var FREESURFER_HOME + +# Platform info +log_Msg "Platform Information Follows: " +uname -a + + + +# Configure custom tools +# - Determine if the PATH is configured so that the custom FreeSurfer v6 tools used by this script +# (the recon-all.v6.hires script and other scripts called by the recon-all.v6.hires script) +# are found on the PATH. If all such custom scripts are found, then we do nothing here. +# If any one of them is not found on the PATH, then we change the PATH so that the +# versions of these scripts found in ${HCPPIPEDIR}/FreeSurfer/custom are used. +configure_custom_tools() +{ + local which_recon_all + local which_conf2hires + local which_longmc + + which_recon_all=$(which recon-all.v6.hires || true) + which_conf2hires=$(which conf2hires || true) + which_longmc=$(which longmc || true) + + if [[ "${which_recon_all}" = "" || "${which_conf2hires}" == "" || "${which_longmc}" = "" ]] ; then + export PATH="${HCPPIPEDIR}/FreeSurfer/custom:${PATH}" + log_Warn "We were not able to locate one of the following required tools:" + log_Warn "recon-all.v6.hires, conf2hires, or longmc" + log_Warn "" + log_Warn "To be able to run this script using the standard versions of these tools," + log_Warn "we added ${HCPPIPEDIR}/FreeSurfer/custom to the beginning of the PATH." + log_Warn "" + log_Warn "If you intended to use some other version of these tools, please configure" + log_Warn "your PATH before invoking this script, such that the tools you intended to" + log_Warn "use can be found on the PATH." + log_Warn "" + log_Warn "PATH set to: ${PATH}" + fi +} + +# Show tool versions +show_tool_versions() +{ + # Show recon-all version + log_Msg "Showing recon-all.v6.hires version" + local which_recon_all=$(which recon-all.v6.hires) + log_Msg ${which_recon_all} + recon-all.v6.hires -version + + # Show tkregister version + log_Msg "Showing tkregister version" + which tkregister + tkregister -version + + # Show mri_concatenate_lta version + log_Msg "Showing mri_concatenate_lta version" + which mri_concatenate_lta + mri_concatenate_lta -version + + # Show mri_surf2surf version + log_Msg "Showing mri_surf2surf version" + which mri_surf2surf + mri_surf2surf -version + + # Show fslmaths location + log_Msg "Showing fslmaths location" + which fslmaths +} + +validate_freesurfer_version() +{ + if [ -z "${FREESURFER_HOME}" ]; then + log_Err_Abort "FREESURFER_HOME must be set" + fi + + freesurfer_version_file="${FREESURFER_HOME}/build-stamp.txt" + + if [ -f "${freesurfer_version_file}" ]; then + freesurfer_version_string=$(cat "${freesurfer_version_file}") + log_Msg "INFO: Determined that FreeSurfer full version string is: ${freesurfer_version_string}" + else + log_Err_Abort "Cannot tell which version of FreeSurfer you are using." + fi + + # strip out extraneous stuff from FreeSurfer version string + freesurfer_version_string_array=(${freesurfer_version_string//-/ }) + freesurfer_version=${freesurfer_version_string_array[5]} + freesurfer_version=${freesurfer_version#v} # strip leading "v" + + log_Msg "INFO: Determined that FreeSurfer version is: ${freesurfer_version}" + + # break FreeSurfer version into components + # primary, secondary, and tertiary + # version X.Y.Z ==> X primary, Y secondary, Z tertiary + freesurfer_version_array=(${freesurfer_version//./ }) + + freesurfer_primary_version="${freesurfer_version_array[0]}" + freesurfer_primary_version=${freesurfer_primary_version//[!0-9]/} + + freesurfer_secondary_version="${freesurfer_version_array[1]}" + freesurfer_secondary_version=${freesurfer_secondary_version//[!0-9]/} + + freesurfer_tertiary_version="${freesurfer_version_array[2]}" + freesurfer_tertiary_version=${freesurfer_tertiary_version//[!0-9]/} + + if [[ $(( ${freesurfer_primary_version} )) -lt 6 ]]; then + # e.g. 4.y.z, 5.y.z + log_Err_Abort "FreeSurfer version 6.0.0 or greater is required. (Use FreeSurferPipeline-v5.3.0-HCP.sh if you want to continue using FreeSurfer 5.3)" + fi +} + +# Configure the use of FreeSurfer v6 custom tools +configure_custom_tools + +# Show tool versions +show_tool_versions + +# Validate version of FreeSurfer in use +validate_freesurfer_version + +UseT2w=$(opts_StringToBool "$UseT2wString") + +#processing code goes here +echo "parallel mode: $parallel_mode" +if [ "$parallel_mode" != "FSLSUB" -a "$parallel_mode" != "NONE" -a "$parallel_mode" != "BUILTIN" ]; then + log_Err_Abort "Unknown parallel mode $parallel_mode. Plese specify one of FSLSUB, BUILTIN, NONE" +fi + +start_stage=0 +if [ -n "$StartStage" ]; then + case $StartStage in + TEMPLATE) start_stage=0 ;; + TIMEPOINTS) start_stage=1 ;; + *) log_Err_Abort "Unrecognized option for start-stage: $StartStage" + esac +fi +end_stage=1 +if [ -n "$EndStage" ]; then + case $EndStage in + TEMPLATE) end_stage=0 ;; + TIMEPOINTS) end_stage=1 ;; + *) log_Err_Abort "Unrecognized option for end-stage: $EndStage" + esac +fi + +# ---------------------------------------------------------------------- +log_Msg "Starting main functionality" +# ---------------------------------------------------------------------- + +# ---------------------------------------------------------------------- +log_Msg "Preparing the folder structure" +# ---------------------------------------------------------------------- +Sessions=`echo ${Sessions} | sed 's/@/ /g'` + +extra_reconall_args_base="" +extra_reconall_args_long="" + +Session0=( $Sessions ); Session0=${Session0[0]} +if (( UseT2w )); then + extra_reconall_args_base="-T2pial -T2 ${StudyFolder}/${Session0}/T1w/T2w_acpc_dc_restore.nii.gz" + extra_reconall_args_long="-T2pial" +fi + +log_Msg "extra_reconall_args_base: $extra_reconall_args_base" +log_Msg "extra_reconall_args_long: $extra_reconall_args_long" +log_Msg "After delimiter substitution, Sessions: ${Sessions}" + +LongDIR="${StudyFolder}/${SubjectID}.long.${TemplateID}/T1w" +mkdir -p "${LongDIR}" + +if (( start_stage < 1 )); then + + #prepare session folder structure + for Session in ${Sessions} ; do + Source="${StudyFolder}/${Session}/T1w/${Session}" + Target="${LongDIR}/${Session}" + log_Msg "Creating a link: ${Source} => ${Target}" + ln -sf ${Source} ${Target} + done + + # ---------------------------------------------------------------------- + log_Msg "Creating the base template: ${TemplateID}" + # ---------------------------------------------------------------------- + + recon_all_cmd="recon-all.v6.hires" + recon_all_cmd+=" -sd ${LongDIR}" + recon_all_cmd+=" -base ${TemplateID}" + for Session in ${Sessions} ; do + recon_all_cmd+=" -tp ${Session}" + done + recon_all_cmd+=" -all" + + if [ ! -z "${recon_all_seed}" ]; then + recon_all_cmd+=" -norandomness -rng-seed ${recon_all_seed}" + fi + + #--------------------------------------------------------------------------------------- + log_Msg "Running the recon-all to generate common template" + #--------------------------------------------------------------------------------------- + + #recon_all_cmd+=(${extra_reconall_args_base[@]+"${extra_reconall_args_base[@]}"}) + recon_all_cmd+=" $extra_reconall_args_base " + echo "recon_all_cmd:" + echo ${recon_all_cmd} + log_Msg "...recon_all_cmd: ${recon_all_cmd}" + + par_add_job_to_stage $parallel_mode "$fslsub_queue" ${recon_all_cmd} + par_finalize_stage $parallel_mode $max_jobs +fi + +if (( end_stage > 0 )); then + # ---------------------------------------------------------------------- + log_Msg "Running the longitudinal recon-all on each timepoint" + # ---------------------------------------------------------------------- + for Session in ${Sessions} ; do + log_Msg "Running longitudinal recon all for session: ${Session}" + recon_all_cmd="recon-all.v6.hires" + recon_all_cmd+=" -sd ${LongDIR}" + recon_all_cmd+=" -long ${Session} ${TemplateID} -all" + + recon_all_cmd+=" $extra_reconall_args_long " + T2w=${StudyFolder}/${Session}/T1w/T2w_acpc_dc_restore.nii.gz + + if [ -f "$T2w" ]; then + recon_all_cmd+=" -T2 $T2w" + else + log_Msg "WARNING: No T2-weighted image $T2w, T2-weighted processing will not run." + fi + + T1w=${StudyFolder}/${Session}/T1w/T1w_acpc_dc_restore.nii.gz + emregmask=${StudyFolder}/${Session}/T1w/T1w_acpc_dc_restore_brain.nii.gz + + if [ -f "$emregmask" ]; then + recon_all_cmd+=" -emregmask $emregmask" + else + log_Msg "Required $emregmask file is missing" + exit -1 + fi + + log_Msg "...recon_all_cmd: ${recon_all_cmd}" + echo ${recon_all_cmd} + par_add_job_to_stage $parallel_mode "$fslsub_queue" ${recon_all_cmd} + done + + #Finalize jobs in this stage. + par_finalize_stage $parallel_mode $max_jobs +fi + +# ---------------------------------------------------------------------- +log_Msg "Completed main functionality" +# ---------------------------------------------------------------------- diff --git a/PostFreeSurfer/PostFreeSurferPipeline.sh b/PostFreeSurfer/PostFreeSurferPipeline.sh index 84bc6255c..963593920 100755 --- a/PostFreeSurfer/PostFreeSurferPipeline.sh +++ b/PostFreeSurfer/PostFreeSurferPipeline.sh @@ -1,5 +1,29 @@ #!/bin/bash set -eu +# +# # PostFreeSurferPipeline.sh +# +# ## Copyright Notice +# +# Copyright (C) 2015-2024 The Human Connectome Project/Connectome Coordination Facility +# +# * Washington University in St. Louis +# * University of Minnesota +# * Oxford University +# +# ## Author(s) +# +# * Matthew F. Glasser, Department of Anatomy and Neurobiology, Washington University in St. Louis +# * (longitudinal mode): Mikhail Milchenko, Department of Radiology, Washington University in St. Louis +# +# ## Product +# +# [Human Connectome Project](http://www.humanconnectome.org) (HCP) Pipelines +# +# ## License +# +# See the [LICENSE](https://github.com/Washington-University/HCPPipelines/blob/master/LICENSE.md) file +# # Requirements for this script # installed versions of: FSL @@ -54,10 +78,10 @@ defaultSigma=$(echo "sqrt(200)" | bc -l) #TSC:should --path or --study-folder be the flag displayed by the usage? opts_AddMandatory '--study-folder' 'StudyFolder' 'path' "folder containing all subjects" "--path" -opts_AddMandatory '--subject' 'Subject' 'subject ID' "" -opts_AddMandatory '--surfatlasdir' 'SurfaceAtlasDIR' 'path' "/global/templates/standard_mesh_atlases or equivalent" +opts_AddMandatory '--session' 'Session' 'session ID' "session (timepoint, visit) label." "--subject" #legacy --subject option +opts_AddMandatory '--surfatlasdir' 'SurfaceAtlasDIR' 'path' "/global/templates/standard_mesh_atlases or equivalent" opts_AddMandatory '--grayordinatesres' 'GrayordinatesResolutions' 'number' "usually '2', resolution of grayordinates to use" -opts_AddMandatory '--grayordinatesdir' 'GrayordinatesSpaceDIR' 'path' "/global/templates/_Greyordinates or equivalent, for the given --grayordinatesres" +opts_AddMandatory '--grayordinatesdir' 'GrayordinatesSpaceDIR' 'path' "/global/templates/_Greyordinates or equivalent, for the given --grayordinatesres" opts_AddMandatory '--hiresmesh' 'HighResMesh' 'number' "usually '164', the standard mesh for T1w-resolution data data" opts_AddMandatory '--lowresmesh' 'LowResMeshes' 'number' "usually '32', the standard mesh for fMRI data" opts_AddMandatory '--subcortgraylabels' 'SubcorticalGrayLabels' 'file' "location of FreeSurferSubcorticalLabelTableLut.txt" @@ -69,7 +93,31 @@ opts_AddOptional '--regname' 'RegName' 'name' "surface registration to use, defa opts_AddOptional '--inflatescale' 'InflateExtraScale' 'number' "surface inflation scaling factor to deal with different resolutions, default '1'" '1' opts_AddOptional '--processing-mode' 'ProcessingMode' 'HCPStyleData|LegacyStyleData' "disable some HCP preprocessing requirements to allow processing of data that doesn't meet HCP acquisition guidelines - don't use this if you don't need to" 'HCPStyleData' opts_AddOptional '--structural-qc' 'QCMode' 'yes|no|only' "whether to run structural QC, default 'yes'" 'yes' -opts_AddOptional '--use-ind-mean' 'UseIndMean' 'YES or NO' "whether to use the mean of the subject's myelin map as reference map's myelin map mean, defaults to 'YES'" 'YES' +opts_AddOptional '--use-ind-mean' 'UseIndMean' 'YES or NO' "whether to use the mean of the session's myelin map as reference map's myelin map mean, defaults to 'YES'" 'YES' + +opts_AddOptional '--subject-long' 'Subject' 'subject ID' "subject label (used in longitudinal mode), may be different from Session" +opts_AddOptional '--longitudinal-mode' 'LongitudinalMode' 'NONE|TIMEPOINT_STAGE1|TIMEPOINT_STAGE2|TEMPLATE' "longitudinal processing mode +Longitudinal modes: +NONE: cross-sectional processing (default) +TIMEPOINT_STAGE[1|2]: timepoint processing stage 1 or 2 +TEMPLATE: template processing (must be run after all timepoints) + +There are some specific conventions on timepoint and template processing directories: +In cross-sectional legacy mode, Subject label and Session (timepoint) labels were previously treated as being the same by HCP in the original design. +Currently both in cross-sectional and longitudinal modes, Session is a study within a subject and, since there may be multiple timepoints (sessions) per subject, +they must be labeled differently. +Timepoint (Session) label may be arbitrary but conventionally, should contain subject as part of name. E.g. if for subject +HCA6002236 there are two timepoints, thay may be labeled HCA6002236_V1 and HCA6002236_V2. +For crossectional (initial) processing, these are supplied to PreFreesurferPipeline. +For the FreesurferPipeline-long, these are also supplied as timepoint labels, as well as chosen template label, e.g. HCA6002236_V1_V2. +Then the same are supplied to PostFreeSurferPipelineLongPrep and PostFreesurferPipeline in longitudinal mode. +internally, longitudinal timepoint directories will be named as: .long.