From 4399664be1c7e79daba0a335ee8a83926245ba34 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 2 Nov 2022 18:05:30 -0400 Subject: [PATCH 01/20] Updates to mrtpipelines.smk - Add associated citation in comments - Renamed rules to match mrtrix function names - Updated mrtrix to latest release version - Updated pybids_inputs for diffusion - Updated names to more similarly match what was used in a different workflow (https://github.com/kaitj/mrtproc.git) - Updated notes to indicate whether TODO was for v0.1.x or v0.2.x - Define variables at top of .smk file to avoid grabbing from config for each rule - Fix remaining group directives that were missed --- dbsc/config/snakebids.yml | 87 ++-- dbsc/run.py | 6 +- dbsc/workflow/Snakefile | 9 +- dbsc/workflow/rules/mrtpipelines.smk | 704 ++++++++++++++------------- 4 files changed, 410 insertions(+), 396 deletions(-) diff --git a/dbsc/config/snakebids.yml b/dbsc/config/snakebids.yml index 240093e5..458f7cbb 100644 --- a/dbsc/config/snakebids.yml +++ b/dbsc/config/snakebids.yml @@ -3,7 +3,7 @@ output_dir: "/path/to/output/dir" # don't use "." snakebids_dir: "." # DO NOT MODIFY - used to refer to files need by workflow running with CLI -debug: False # Enable printing of debug statements during parsing -- disable if generating dag visualizations +debug: False # Enable printing of debug statements during parsing -- disable if generating dag visualizations derivatives: False # Search in bids/derivatives if True; can also be path(s) to derivatives datasets @@ -23,28 +23,37 @@ targets_by_analysis_level: # indexed by name of input dictionary for each input is passed directly to pybids get() # https://bids-standard.github.io/pybids/generated/bids.layout.BIDSLayout.html#bids.layout.BIDSLayout.get pybids_inputs: - T1w: + dwi: filters: - suffix: "T1w" - extension: ".nii.gz" - datatype: "anat" - space: null + suffix: dwi + extension: .nii.gz + datatype: dwi + space: T1w + wildcards: + - subject + - session + mask: + filters: + suffix: mask + extension: .nii.gz + datatype: dwi + space: T1w wildcards: - subject - session # Configuration for the command-line parameters to make available # passed on the argparse add_argument() -parse_args: +parse_args: -#--- core BIDS-app options --- (do not modify below) ---# +#--- core BIDS-app options --- (do not modify below) ---# bids_dir: - help: The directory with the input dataset formatted according to the + help: The directory with the input dataset formatted according to the BIDS standard. output_dir: - help: The directory where the output files should be stored. If you are - running group level analysis, this folder should be prepopulated + help: The directory where the output files should be stored. If you are + running group level analysis, this folder should be prepopulated with the results of the participant level analysis. analysis_level: @@ -62,57 +71,57 @@ parse_args: --exclude_participant_label: help: The label(s) of the participant(s) that should be excluded. The label corresponds to sub- from the BIDS spec (so it does - not include "sub-"). If this parameter is not provided, all subjects + not include "sub-"). If this parameter is not provided, all subjects will be analyzed. Multiple participants can be specified with a space sepearated list. nargs: "+" -#-----------------------------------------------------# +#-----------------------------------------------------# #--- additional BIDS-app options --- (add in below) --# --shells: - help: '(Mrtrix3) specify one or more b-values to use during processing, as - a comma-separated list of the desired approximate b-values (b-values - are clustered to allow for small deviations). Note that some - commands are incompatible with multiple b-values, and will report an - error if more than one b-value is provided. + help: '(Mrtrix3) specify one or more b-values to use during processing, as + a comma-separated list of the desired approximate b-values (b-values + are clustered to allow for small deviations). Note that some + commands are incompatible with multiple b-values, and will report an + error if more than one b-value is provided. WARNING: note that, even though b=0 volumes are never referred to as - shells in the literature, they still have to be explicitly included - in the list of b-values as provided to the -shell option! Several - algorithms which include the b=0 volumes in their computations may + shells in the literature, they still have to be explicitly included + in the list of b-values as provided to the -shell option! Several + algorithms which include the b=0 volumes in their computations may otherwise return an undesired reuslt.' nargs: "+" --lmax: - help: '(Mrtrix3) the maximum spherical harmonic order for the output - FOD(s). For algorithms with multiple outputs, this should be - provided as a comma-sperated list of integers, one for each output - image; for single-output algorithms, only a single integer - should be provided. If omitted, commands will use the lmax of the - corresponding response function (i.e. based on its number of + help: '(Mrtrix3) the maximum spherical harmonic order for the output + FOD(s). For algorithms with multiple outputs, this should be + provided as a comma-sperated list of integers, one for each output + image; for single-output algorithms, only a single integer + should be provided. If omitted, commands will use the lmax of the + corresponding response function (i.e. based on its number of coefficients), up to a maximum of 8.' nargs: "+" --step: help: '(Mrtrix3) set the step size of the algorithm in mm. Should set to - 4 steps per voxel (e.g. 1/4 * voxel size) in order to sample more + 4 steps per voxel (e.g. 1/4 * voxel size) in order to sample more frequently in compact region. (default: 0.35mm)' default: 0.35 --sl_count: - help: '(Mrtrix3) set the desired number of streamlines to be selected, - after all selection criteria has been applied (i.e. - inclusion/exclusion ROIS, min/max length, etc. Streamlines will be - seeded until this number of streamlines have been selected, or the - maximum allowed number of seeds has been exceeded. Set to zero to + help: '(Mrtrix3) set the desired number of streamlines to be selected, + after all selection criteria has been applied (i.e. + inclusion/exclusion ROIS, min/max length, etc. Streamlines will be + seeded until this number of streamlines have been selected, or the + maximum allowed number of seeds has been exceeded. Set to zero to disable, which will result in streamlines being seeded until - the maximum allowed number of seeds has been reached. + the maximum allowed number of seeds has been reached. (default: 20,000,000 streamlines)' default: 20000000 --radial_search: - help: '(Mrtrix3) perform a radial search from each streamline endpoint to - locate the nearest node. Argument is the maximum radius in mm; if no - node is found within this radius, the streamline endpoint is not + help: '(Mrtrix3) perform a radial search from each streamline endpoint to + locate the nearest node. Argument is the maximum radius in mm; if no + node is found within this radius, the streamline endpoint is not assigned to any node. (default: 1.5mm)' default: 1.5 #-----------------------------------------------------# @@ -140,6 +149,6 @@ freesurfer: singularity: freesurfer: "docker://pwighton/freesurfer:7.2.0" neuroglia-core: "docker://khanlab/neuroglia-core:latest" - mrtpipelines: "docker://kaitj/mrtpipelines:0.1.6" + mrtrix: "docker://brainlife/mrtrix:3.0.3" -fs_license: /path/to/fs/license \ No newline at end of file +fs_license: /path/to/fs/license diff --git a/dbsc/run.py b/dbsc/run.py index ebaced76..7d23de77 100644 --- a/dbsc/run.py +++ b/dbsc/run.py @@ -1,9 +1,11 @@ import os + from snakebids.app import SnakeBidsApp + def get_parser(): """Exposes parser for sphinx doc generation, cwd is the docs dir""" - app = SnakeBidsApp('../surftexture',skip_parse_args=True) + app = SnakeBidsApp("../dbsc", skip_parse_args=True) return app.parser @@ -13,4 +15,4 @@ def main(): if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/dbsc/workflow/Snakefile b/dbsc/workflow/Snakefile index 542d624c..dbeef867 100644 --- a/dbsc/workflow/Snakefile +++ b/dbsc/workflow/Snakefile @@ -12,15 +12,16 @@ config.update( pybids_inputs=config["pybids_inputs"], derivatives=config["derivatives"], participant_label=config["participant_label"], + exclude_participant_label=config["exclude_partiicpant_label"], ) ) # this adds constraints to the bids naming -wildcard_constraints: **snakebids.get_wildcard_constraints(\ - config["pybids_inputs"]\ -) +wildcard_constraints: + **snakebids.get_wildcard_constraints(config["pybids_inputs"]) + #---- end snakebids boilerplate ------------------------------------------------ # Rules include: "rules/freesurfer.smk" -include: "rules/zona_bb_subcortex" \ No newline at end of file +include: "rules/zona_bb_subcortex" diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 7223bc80..4661daa0 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -3,358 +3,365 @@ from os.path import join include: 'zona_bb_subcortex.smk' include: 'freesurfer.smk' -# DWI Processing -rule nii_to_mif: +# Directories +mrtrix_dir = join(config["root"], "mrtrix") + +# Paramaters +responsemean_flag = config.get('responsemean_dir', None) +shells = config.get('shells', '') +lmaxes = config.get('lmax', '') + +# Mrtrix3 citation (additional citations are included per rule as necessary): +# Tournier, J.-D.; Smith, R. E.; Raffelt, D.; Tabbara, R.; Dhollander, T.; Pietsch, M.; Christiaens, D.; Jeurissen, B.; Yeh, C.-H. & Connelly, A. MRtrix3: A fast, flexible and open software framework for medical image processing and visualisation. NeuroImage, 2019, 202, 116137 + + +#------------ MRTRIX PREPROC BEGIN ----------# +rule nii2mif: input: - dwi=bids( - root=join(config["bids_dir"], 'derivatives/prepdwi') - datatype='dwi', - suffix='dwi.nii.gz', - space='T1w', - **config['subj_wildcards'], - ) - bval=bids( - root=join(config["bids_dir"], 'derivatives/prepdwi') - datatype='dwi', - suffix='dwi.bval', - space='T1w', - **config['subj_wildcards'], - ) - bvec=bids( - root=join(config["bids_dir"], 'derivatives/prepdwi') - datatype='dwi', - suffix='dwi.bvec', - space='T1w', - **config['subj_wildcards'], - ) - mask=bids( - root=join(config["bids_dir"], 'derivatives/prepdwi') - datatype='dwi', - suffix='brainmask.nii.gz', - space='T1w', - **config['subj_wildcards'], - ) - params: - threads=workflow.cores, + dwi=config["input_path"]["dwi"], + bval=lambda wildcards: re.sub(".nii.gz", ".bval", config["input_path"]["dwi"])), + bvec=lambda wildcards: re.sub(".nii.gz", ".bvec", config["input_path"]["dwi"])), + mask=config["input_path"]["mask"] output: dwi=bids( - root=join(config["output_dir"], 'mrtpipelines'), + root=mrtrix_dir, datatype='dwi', suffix='dwi.mif', - space='T1w', **config['subj_wildcards'] - ) + ), mask=bids( - root=join(config['output_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='dwi', suffix='brainmask.mif', - space='T1w', **config['subj_wildcards'] ) + threads: workflow.cores group: "subject_1" container: - config['singularity']['mrtpipelines'] - shell: - 'mrconvert -nthreads {threads} -fslgrad {input.bvecs} {input.bvals} {input.dwi} {output.dwi} ' + config['singularity']['mrtrix'] + shell: + 'mrconvert -nthreads {threads} -fslgrad {input.bvecs} {input.bvals} {input.dwi} {output.dwi} && ' 'mrconvert -nthreads {threads} {input.mask} {output.mask}' -rule estimate_response: - """Estimate response functions using Dhollander algorithm """ +rule dwi2response: + """ + Estimate response functions using Dhollander algorithm + + Dhollander, T.; Mito, R.; Raffelt, D. & Connelly, A. Improved white matter response function estimation for 3-tissue constrained spherical deconvolution. Proc Intl Soc Mag Reson Med, 2019, 555 + """ input: - dwi=rules.nii_to_mif.output.dwi, - mask=rules.nii_to_mif.output.mask + dwi=rules.nii2mif.output.dwi, + mask=rules.nii2mif.output.mask params: - threads=workflow.cores, + shells=shells, + lmax=lmaxes output: - sfwm=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='dwi', - desc='sfwm', + wm_rf=bids( + root=mrtrix_dir, + datatype='response', + desc='wm', suffix='response.txt', **config['subj_wildcards'], - ) - gm=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='dwi', + ), + gm_rf=bids( + root=mrtrix_dir, + datatype='response', desc='gm' suffix='response.txt', **config['subj_wildcards'], - ) - csf=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='dwi', + ), + csf_rf=bids( + root=mrtrix_dir, + datatype='response, desc='csf' suffix='response.txt', **config['subj_wildcards'], ) + threads: workflow.cores group: "subject_1" container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: - 'dwi2response dhollander {input.dwi} {output.sfwm} {output.gm} {output.csf} -nthreads {threads}' + 'if [ {params.shell} == "" ] && [ {params.lmax} == ""]; then' + ' dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask}' + 'else ' + ' dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask} -shells {params.shells} -lmax {params.lmax} ' + 'fi' -# TODO: Add check for pre-computed group response function -rule avg_response: +rule responsemean: """Compute average response function""" input: - sfwm=expand(rules.estimate_response.output.sfwm, subject=config['subjects']), - gm=expand(rules.estimate_response.output.gm, subject=config['subjects']), - csf=expand(rules.estimate_response.output.csf, subject=config['subjects']) + wm_rf=expand( + rules.dwi2response.output.wm_rf, subject=config['subjects'] + ), + gm_rf=expand( + rules.dwi2response.output.gm_rf, subject=config['subjects'] + ), + csf_rf=expand( + rules.dwi2response.output.csf_rf, subject=config['subjects'] + ) output: - avg_sfwm=bids( - root=join(config['output_dir'], 'mrtpipelines/avg'), - datatype='dwi', - desc='sfwm', + wm_avg_rf=bids( + root=join(mrtrix_dir, 'avg'), + datatype='response', + desc='wm', suffix='response.txt', - ) - avg_gm=bids( - root=join(config['output_dir'], 'mrtpipelines/avg'), - datatype='dwi', + ), + gm_avg_rf=bids( + root=join(mrtrix_dir, 'avg'), + datatype='response' desc='gm', suffix='response.txt', - ) - avg_csf=bids( - root=join(config['output_dir'], 'mrtpipelines/avg'), - datatype='dwi', + ), + csf_avg_rf=bids( + root=join(mrtrix_dir, 'avg'), + datatype='response' desc='csf', suffix='response.txt', ) + threads: workflow.cores group: "group" container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: - 'average_response {input.sfwm} {output.avg_sfwm} {input.gm} {output.avg_gm} {input.csf} {output.csf}' + 'responsemean {input.wm_rf} {output.wm_avg_rg} -nthreads {threads} &&' + 'responsemean {input.gm_rf} {output.gm_avg_rg} -nthreads {threads} &&' + 'responsemean {input.csf_rf} {output.csf_avg_rg} -nthreads {threads}' -rule compute_fod: +rule dwi2fod: + """Jeurissen, B; Tournier, J-D; Dhollander, T; Connelly, A & Sijbers, J. Multi-tissue constrained spherical deconvolution for improved analysis of multi-shell diffusion MRI data. NeuroImage, 2014, 103, 411-426""" input: - avg_sfwm=rules.avg_response.output.avg_sfwm, - avg_gm=rules.avg_response.output.avg_gm, - avg_csf=rules.avg_response.output.avg_csf, - dwi=rules.nii_to_mif.output.dwi, - mask=rules.nii_to_mif.output.mask, + dwi=rules.nii2mif.output.dwi, + mask=rules.nii2mif.output.mask, + wm_rf=join(config['responsemean_dir'], 'desc-wm_response.txt') if responsemean_flag else rules.responsemean.output.wm_avg_rf, + gm_rf=join(config['responsemean_dir'], 'desc-gm_response.txt') if responsemean_flag else rules.responsemean.output.gm_avg_rf, + csf_rf=join(config['responsemean_dir'], 'desc-csf_response.txt') if responsemean_flag else rules.responsemean.output.csf_avg_rf params: - shell=config.get('shells', ''), - lmax=config.get('lmax', ''), - threads=workflow.threads, + shell=shells, output: wm_fod=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='fod', + root=mrtrix_dir, + datatype='response', model='csd', desc='wm', - suffix='fod.mif' + suffix='fod.mif', + **config['subj_wildcards'], ), gm_fod=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='fod', + root=mrtrix_dir, + datatype='response', model='csd', desc='gm', - suffix='fod.mif' + suffix='fod.mif', + **config['subj_wildcards'], ), csf_fod=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='fod', + root=mrtrix_dir, + datatype='response', model='csd', desc='csf', - suffix='fod.mif' + suffix='fod.mif', + **config['subj_wildcards'], ), + threads: workflow.cores group: "subject_2" container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: 'if [ {params.shell} == "" ] && [ {params.lmax} == "" ]; then' - ' dwi2fod -nthreads {params.threads} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.avg_csf} ' + ' dwi2fod -nthreads {threads} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.avg_csf} ' 'else ' - ' dwi2fod -nthreads {params.threads} -shell {params.shell} -lmax {params.lmax} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf} ' + ' dwi2fod -nthreads {threads} -shell {params.shell} -lmax {params.lmax} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf} ' 'fi' -rule normalise_fod: +rule mtnormalise: + """ + Raffelt, D.; Dhollander, T.; Tournier, J.-D.; Tabbara, R.; Smith, R. E.; Pierre, E. & Connelly, A. Bias Field Correction and Intensity Normalisation for Quantitative Analysis of Apparent Fibre Density. In Proc. ISMRM, 2017, 26, 3541 + + Dhollander, T.; Tabbara, R.; Rosnarho-Tornstrand, J.; Tournier, J.-D.; Raffelt, D. & Connelly, A. Multi-tissue log-domain intensity and inhomogeneity normalisation for quantitative apparent fibre density. In Proc. ISMRM, 2021, 29, 2472 + """" input: - wm_fod=rules.compute_fod.output.wm_fod, - gm_fod=rules.compute_fod.output.gm_fod, - csf_fod=rules.compute_fod.output.csf_fod, - mask=rules.nii_to_mif.output.mask, + wm_fod=rules.dwi2fod.output.wm_fod, + gm_fod=rules.dwi2fod.output.gm_fod, + csf_fod=rules.dwi2fod.output.csf_fod, + mask=rules.nii2mif.output.mask, params: threads=workflow.threads, output: wm_fod=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='fod', + root=mrtrix_dir, + datatype='response', model='csd', - desc='wm', - suffix='fodnorm.mif', + desc='normalized', + suffix='wm_fod.mif', **config['subj_wildcards'], ), gm_fod=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='fod', + root=mrtrix_dir, + datatype='response', model='csd', - desc='gm', - suffix='fodnorm.mif', + desc='normalized', + suffix='gm_fod.mif', **config['subj_wildcards'], ), csf_fod=bids( - root=join(config['output_dir'], 'mrtpipelines'), - datatype='fod', + root=mrtrix_dir, + datatype='response', model='csd', - desc='csf', - suffix='fodnorm.mif', + desc='normalized', + suffix='csf_fod.mif', **config['subj_wildcards'], ), + threads: workflow.cores group: "subject_2" container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: - 'mtnormalise -nthreads {params.threads} -mask {input.mask} {input.wm_fod} {output.wm_fod} {input.gm_fod} {output.gm_fod} {input.csf_fod} {output.csf_fod}' + 'mtnormalise -nthreads {threads} -mask {input.mask} {input.wm_fod} {output.wm_fod} {input.gm_fod} {output.gm_fod} {input.csf_fod} {output.csf_fod}' # DTI (Tensor) Processing -rule dwi_normalise: +rule dwinormalise: input: - dwi=rules.nii_to_mif.output.dwi, - mask=rules.nii_to_mif.output.mask, + dwi=rules.nii2mif.output.dwi, + mask=rules.nii2mif.output.mask, params: threads=workflow.threads, output: dwi=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='dwi', - space='T1w', - desc='norm', + desc='normalized', suffix='dwi.mif', **config['subj_wildcards'], ) container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: - 'dwinormalise -nthreads {params.threads} {input.dwi} {input.mask} {output.dwi}' - group: participant1 + 'dwinormalise individual -nthreads {threads} {input.dwi} {input.mask} {output.dwi}' + group: "subject_1" -rule compute_tensor: +rule dwi2tensor: input: - dwi=rules.dwi_noramlise.dwi, - mask=rules.nii_to_mif.output.mask, + dwi=rules.dwinoramlise.dwi, + mask=rules.nii2mif.output.mask, params: threads=workflow.threads, output: dti=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='dti', - space='T1w', suffix='dti.mif', **config['subj_wildcards'], ), fa=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='dti', - space='T1w', model='dti', suffix='fa.mif', **config['subj_wildcards'], ), ad=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtix_dir, datatype='dti', - space='T1w', model='dti', suffix='ad.mif', **config['subj_wildcards'], ), rd=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='dti', - space='T1w', model='dti', suffix='rd.mif', **config['subj_wildcards'], ), md=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='dti', - space='T1w', model='dti', suffix='fa.mif', **config['subj_wildcards'], ) group: "subject_1" container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: - 'dwi2tensor -nthreads {params.threads} -mask {input.mask} {input.dwi} {output.dti}' - 'tensor2metric -nthreads {params.threads} -mask {input.mask} {output.dti} -fa {output.fa} -ad {output.ad} -rd {output.rd} -adc {output.md}' - + 'dwi2tensor -nthreads {threads} -mask {input.mask} {input.dwi} {output.dti} && ' + 'tensor2metric -nthreads {threads} -mask {input.mask} {output.dti} -fa {output.fa} -ad {output.ad} -rd {output.rd} -adc {output.md}' +#--------------- MRTRIX PREPROC END --------------# -# Tractography processing -rule gen_tractography: +#------------ MRTRIX TRACTOGRAPHY BEGIN ----------# +# TODO (v0.1): CHECK TO MAKE SURE RULES ARE IMPORTED CORRECTLY FROM OTHER SMK FILES +rule tckgen: + # Tournier, J.-D.; Calamante, F. & Connelly, A. Improved probabilistic streamlines tractography by 2nd order integration over fibre orientation distributions. Proceedings of the International Society for Magnetic Resonance in Medicine, 2010, 1670 input: - fod=rules.normalise_fod.output.wm_fod, + fod=rules.mtnormalise.output.wm_fod, + mask=rules.nii2mif.output.mask, cortical_ribbon=rules.fs_xfm_to_native.output.ribbon, convex_hull=rules.create_convex_hull.output.convex_hull, subcortical_seg=rules.add_brainstem_new_seg.output.seg, - mask=rules.nii_to_mif.output.mask, params: - threads=workflow.threads, step=config['step'], sl=config['sl_count'], output: tck=bids( root=join(config['out_dir'], mrtpipelines), datatype='tractography', - space='T1w', desc='iFOD2', suffix='tractography.tck', **config["subj_wildcards"], ) + threads: workflow.cores group: "subject_2" container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: - 'tckgen -nthreads {params.threads} -algorithm iFOD2 -step {params.step} -select {params.sl_count} -exclude {input.cortical_ribbon} -exclude {input.convex_hull} -include {input.subcortical_seg} -mask {input.mask} -seed_image {input.mask} {input.fod} {output.tck}' + 'tckgen -nthreads {threads} -algorithm iFOD2 -step {params.step} -select {params.sl_count} -exclude {input.cortical_ribbon} -exclude {input.convex_hull} -include {input.subcortical_seg} -mask {input.mask} -seed_image {input.mask} {input.fod} {output.tck}' -rule weight_tractography: +rule tcksift2: + # Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. The effects of SIFT on the reproducibility and biological accuracy of the structural connectome. NeuroImage, 2015, 104, 253-265 input: - tck=rules.gen_tractography.output.tck, - fod=rules.normalise_fod.output.wm_fod, - params: - threads=workflow.threads, + tck=rules.tckgen.output.tck, + fod=rules.mtnormalise.output.wm_fod, output: weights=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='tractography', - space='T1w', desc='iFOD2', suffix='tckweights.txt', **config['subj_wildcards'], ), mu=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='tractography', - space='T1w', desc='iFOD2', suffix='mucoefficient.txt', **config['subj_wildcards'], ] ), + threads: workflow.cores group: "subject_2" container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: - 'tcksift2 -nthreads {params.threads} -out_mu {output.mu} {input.tck} {input.fod} {output.weights}' + 'tcksift2 -nthreads {threads} -out_mu {output.mu} {input.tck} {input.fod} {output.weights}' + +# TODO (v0.2): ADD OPTION TO OUTPUT TDI MAP -# TODO: ADD OPTION TO OUTPUT TDI MAP +rule tck2connectome: +""" +Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. The effects of SIFT on the reproducibility and biological accuracy of the structural connectome. NeuroImage, 2015, 104, 253-265" -# Connectivity map -# TODO: ADD OPTION TO MULTIPLY BY MU COEFFICIENT -rule connectome_map: +TODO (v0.2): ADD OPTION TO MULTIPLY BY MU COEFFICIENT +""" input: - weights=rules.weight_tractography.output.weights, - tck=rules.gen_tractography.output.tck, + weights=rules.tcksift2.output.weights, + tck=rules.tckgen.output.tck, subcortical_seg=rules.add_brainstem_new_seg.output.seg, params: - threads=workflow.threads, radius=config['radial_search'] output: sl_assignment=bids( @@ -373,199 +380,194 @@ rule connectome_map: suffix='nodeweights.csv' **config['subj_wildcards'], ) + threads: workflow.cores group: "subject_2" container: - config['singularity']['mrtpipelines'] + config['singularity']['mrtrix'] shell: - 'tck2connectome -nthreads {params.threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} ' - - rule extract_tck: - input: - node_weights=rules.connectome_map.output.node_weights, - sl_assignment=rules.connectome_map.output.sl_assignment, - tck=rules.gen_tractography.output.tck, - params: - threads=workflow.threads, - output: - edge_weight=temp( - bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='tractography', - space='T1w', - desc='subcortical', - suffix='tckweights', - **config['subj_wildcards'], - ) - ), - edge_tck=temp( - bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='tractography', - space='T1w', - desc='subcortical', - suffix='from_', - **config['subj_wildcards'], - ) - ), - group: "subject_2" - container: - config['singularity']['mrtpipelines'], - shell: - 'for i in `seq 2 72`; do ' - ' nodes=$nodes,$i ' - 'done ' - 'connectome2tck -nthreads {params.threads} -nodes $nodes -exclusive -filters_per_edge -tck_weights_in {input.node_weights} -prefix_tck_weights_out {output.edge_weight} {input.tck} {input.sl_assignment} {output.edge_tck} ' + 'tck2connectome -nthreads {threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} ' - # NOTE: Pass labelmerge split segs here? - rule create_roi_mask: - # EXPAND OVER NODE1 IN RULE ALL - input: - subcortical_seg=rules.add_brainstem_new_seg.output.seg, - params: - threads=workflow.threads, - output: - roi_mask=temp( - bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='anat', - space='T1w', - desc='{node1}', - suffix='mask.mif' - ) - ), - group: "subject_2" - container: - config['singularity']['mrtpipelines'], - shell: - 'mrcalc -nthreads {params.threads} {input.subcortical_seg} {wildcards.node1} -eq {output.roi_mask}' +rule connectome2tck: + input: + node_weights=rules.tck2connectome.output.node_weights, + sl_assignment=rules.tck2connectome.output.sl_assignment, + tck=rules.tckgen.output.tck, + output: + edge_weight=temp( + bids( + root=mrtrix_dir, + datatype='tractography', + desc='subcortical', + suffix='tckweights', + **config['subj_wildcards'], + ) + ), + edge_tck=temp( + bids( + root=mrtix_dir, + datatype='tractography', + desc='subcortical', + suffix='from_', + **config['subj_wildcards'], + ) + ), + threads: workflow.cores + group: "subject_2" + container: + config['singularity']['mrtrix'], + shell: + 'for i in `seq 2 72`; do ' + ' nodes=$nodes,$i ' + 'done ' + 'connectome2tck -nthreads {threads} -nodes $nodes -exclusive -filters_per_edge -tck_weights_in {input.node_weights} -prefix_tck_weights_out {output.edge_weight} {input.tck} {input.sl_assignment} {output.edge_tck} ' - rule filter_tck: - # Node1 should be same as prev rule, need to iterate over node2 - input: - roi1=bids( - root=join(config['out_dir'], 'mrtpipelines'), +# NOTE: Use labelmerge split segs here? +rule create_roi_mask: + # EXPAND OVER NODE1 IN RULE ALL + input: + subcortical_seg=rules.add_brainstem_new_seg.output.seg, + output: + roi_mask=temp( + bids( + root=mrtrix_dir, datatype='anat', - space='T1w', desc='{node1}', - suffix='mask.mif' - ), - roi2=bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='anat', - space='T1w', - desc='{node2}', - suffix='mask.mif' - ), - # ZI rois (do these need to be separately defined in output?) - lZI=bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='anat', - space='T1w', - desc='21', - suffix='mask.mif' - ), - rZI=bids( - root=join(config['out_dir'], 'mrtpipelines'), + suffix='mask.mif', + **config["subj_wildcards"] + ) + ), + threads: workflow.cores + group: "subject_2" + container: + config['singularity']['mrtrix'], + shell: + 'mrcalc -nthreads {threads} {input.subcortical_seg} {wildcards.node1} -eq {output.roi_mask}' + +rule filter_tck: + # Node1 should be same as prev rule, need to iterate over node2 + input: + roi1=bids( + root=mrtrix_dir, + datatype='anat', + desc='{node1}', + suffix='mask.mif', + **config["subj_wildcards"] + ), + roi2=bids( + root=mrtrix_dir, + datatype='anat', + desc='{node2}', + suffix='mask.mif', + **config["subj_wildcards"] + ), + # ZI rois (do these need to be separately defined in output?) + lZI=bids( + root=mrtrix_dir, + datatype='anat', + desc='21', + suffix='mask.mif', + **config["subj_wildcards"] + ), + rZI=bids( + root=mrtrix_dir, + datatype='anat', + desc='22', + suffix='mask.mif', + **config["subj_wildcards"] + ), + tck=rules.connectome2tck.output.edge_tck, + weights=rules.tck2connectome.outputs.node_weights, + subcortical_seg=rules.add_brainstem_new_seg.output.seg, + output: + filter_mask=temp( + bids( + root=mrtrix_dir, datatype='anat', - space='T1w', - desc='22', - suffix='mask.mif' - ), - subcortical_seg=rules.add_brainstem_new_seg.output.seg, - tck=rules.extract_tck.output.edge_tck, - weights=rules.connectome_map.outputs.node_weights - params: - threads=workflow.threads, - output: - filter_mask=temp( - bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='anat', - space='T1w', - desc='exclude', - suffix='mask.mif' - ) + desc='exclude', + suffix='mask.mif', + **config["subj_wildcards"] ) - filtered_tck=temp( - bids( - root=join(config['out_dir'], 'mrtpipelines'), + ), + filtered_tck=temp( + bids( + root=mrtrix_dir, datatype='tractography', - space='T1w', desc='from_{node1}-{node2}', - suffix='tractography.tck' - ) + suffix='tractography.tck', + **config["subj_wildcards"] ) - filtered_weights=temp( - bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='tractography', - space='T1w', - desc='from_{node1}-{node2}', - suffix='weights.csv' - ) - ) - group: "subject_2" - containers: - config['singularity']['mrtpipelines'], - shell: - 'mrcalc -nthreads {params.threads} {input.subcortical_seg} 0 -neq {input.roi1} -sub {input.roi2} -sub {input.lZI} -sub {input.rZI} -sub {output.filter_mask} ' - 'tckedit -nthreads {params.therads} -exclude {output.filter_mask} -tck_weights_in {input.weights} -tck_weights_out {output.filtered_weights} {input.tck} {output.filtered_tck} ' - - rule combine_filtered: - input: - tck=expand(rules.filter_tck.output.filtered_tck, zip, **config['input_zip_lists']['T1w']), - weights=expand(rules.filter_tck.output.filtered_weights, zip, **config['input_zip_lists']['T1w']), - params: - threads=workflow.threads, - output: - combined_tck=bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='tractography', - space='T1w', - desc='subcortical', - suffix='tractography.tck' - ) - combined_weights=bids( - root=join(config['out_dir'], 'mrtpipelines'), + ), + filtered_weights=temp( + bids( + root=mrtrix_dir, datatype='tractography', - space='T1w', - desc='subcortical', - suffix='tckweights.txt' + desc='from_{node1}-{node2}', + suffix='weights.csv', + **config["subj_wildcards"] ) - group: "subject_2" - container: - config['singularity']['mrtpipelines'], - shell: - 'tckedit {input.tck} {output.combined_tck} ' - 'cat {input.weights} >> {output.combined_weights} ' + ) + threads: workflow.cores + group: "subject_2" + containers: + config['singularity']['mrtrix'], + shell: + 'mrcalc -nthreads {threads} {input.subcortical_seg} 0 -neq {input.roi1} -sub {input.roi2} -sub {input.lZI} -sub {input.rZI} -sub {output.filter_mask} ' + 'tckedit -nthreads {params.therads} -exclude {output.filter_mask} -tck_weights_in {input.weights} -tck_weights_out {output.filtered_weights} {input.tck} {output.filtered_tck} ' - rule filtered_connectome_map: - input: - weights=rules.combine_filtered.output.combined_weights, - tck=rules.combine_filtered.output.combined_tck, - subcortical_seg=rules.add_brainstem_new_seg.output.seg, - params: - threads=workflow.threads, - radius=config['radial_search'] - output: - sl_assignment=bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='tractography', - space='T1w', - desc='subcortical', - suffix='nodeassignment.txt', - **config['subj_wildcards'], - ), - node_weights=bids( - root=join(config['out_dir'], 'mrtpipelines'), - datatype='tractography', - space='T1w', - desc='subcortical', - suffix='nodeweights.csv' - **config['subj_wildcards'], - ) - group: "subject_2" - container: - config['singularity']['mrtpipelines'] - shell: - 'tck2connectome -nthreads {params.threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} -force' \ No newline at end of file +rule combine_filtered: + input: + tck=expand(rules.filter_tck.output.filtered_tck, zip, **config['input_zip_lists']['dwi']), + weights=expand(rules.filter_tck.output.filtered_weights, zip, **config['input_zip_lists']['dwi']), + params: + threads=workflow.threads, + output: + combined_tck=bids( + root=mrtrix_dir, + datatype='tractography', + desc='subcortical', + suffix='tractography.tck', + **config["subj_wildcards"] + ) + combined_weights=bids( + root=mrtrix_dir, + datatype='tractography', + desc='subcortical', + suffix='tckweights.txt', + **config["subj_wildcards"] + ) + threads: workflow.cores + group: "subject_2" + container: + config['singularity']['mrtrix'], + shell: + 'tckedit {input.tck} {output.combined_tck} ' + 'cat {input.weights} >> {output.combined_weights} ' + +rule filtered_tck2connectome: + input: + weights=rules.combine_filtered.output.combined_weights, + tck=rules.combine_filtered.output.combined_tck, + subcortical_seg=rules.add_brainstem_new_seg.output.seg, + params: + radius=config['radial_search'] + output: + sl_assignment=bids( + root=mrtrix_dir, + datatype='tractography', + desc='subcortical', + suffix='nodeassignment.txt', + **config['subj_wildcards'], + ), + node_weights=bids( + root=mrtrix_dir, + datatype='tractography', + desc='subcortical', + suffix='nodeweights.csv' + **config['subj_wildcards'], + ) + threads: workflow.core + group: "subject_2" + container: + config['singularity']['mrtrix'] + shell: + 'tck2connectome -nthreads {threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} -force' From 177614df9479e673ad96befd42df7509e87dba03 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 2 Nov 2022 18:10:52 -0400 Subject: [PATCH 02/20] Fix root for bids call, params.threads --- dbsc/workflow/rules/mrtpipelines.smk | 30 ++++++++++++++-------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 4661daa0..bab2fa9d 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -188,8 +188,6 @@ rule mtnormalise: gm_fod=rules.dwi2fod.output.gm_fod, csf_fod=rules.dwi2fod.output.csf_fod, mask=rules.nii2mif.output.mask, - params: - threads=workflow.threads, output: wm_fod=bids( root=mrtrix_dir, @@ -227,8 +225,6 @@ rule dwinormalise: input: dwi=rules.nii2mif.output.dwi, mask=rules.nii2mif.output.mask, - params: - threads=workflow.threads, output: dwi=bids( root=mrtrix_dir, @@ -237,6 +233,7 @@ rule dwinormalise: suffix='dwi.mif', **config['subj_wildcards'], ) + threads: workflow.cores container: config['singularity']['mrtrix'] shell: @@ -248,8 +245,6 @@ rule dwi2tensor: input: dwi=rules.dwinoramlise.dwi, mask=rules.nii2mif.output.mask, - params: - threads=workflow.threads, output: dti=bids( root=mrtrix_dir, @@ -285,6 +280,7 @@ rule dwi2tensor: suffix='fa.mif', **config['subj_wildcards'], ) + threads: workflow.cores group: "subject_1" container: config['singularity']['mrtrix'] @@ -365,17 +361,15 @@ TODO (v0.2): ADD OPTION TO MULTIPLY BY MU COEFFICIENT radius=config['radial_search'] output: sl_assignment=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtix_dir, datatype='tractography', - space='T1w', desc='subcortical', suffix='nodeassignment.txt', **config['subj_wildcards'], ), node_weights=bids( - root=join(config['out_dir'], 'mrtpipelines'), + root=mrtrix_dir, datatype='tractography', - space='T1w', desc='subcortical', suffix='nodeweights.csv' **config['subj_wildcards'], @@ -516,10 +510,16 @@ rule filter_tck: rule combine_filtered: input: - tck=expand(rules.filter_tck.output.filtered_tck, zip, **config['input_zip_lists']['dwi']), - weights=expand(rules.filter_tck.output.filtered_weights, zip, **config['input_zip_lists']['dwi']), - params: - threads=workflow.threads, + tck=expand( + rules.filter_tck.output.filtered_tck, + zip, + **config['input_zip_lists']['dwi'] + ), + weights=expand( + rules.filter_tck.output.filtered_weights, + zip, + **config['input_zip_lists']['dwi'] + ), output: combined_tck=bids( root=mrtrix_dir, @@ -527,7 +527,7 @@ rule combine_filtered: desc='subcortical', suffix='tractography.tck', **config["subj_wildcards"] - ) + ), combined_weights=bids( root=mrtrix_dir, datatype='tractography', From c313eb7e0c0553b662684984f1d203b5a4f36d47 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 2 Nov 2022 18:12:37 -0400 Subject: [PATCH 03/20] Remove lmax param in dwi2fod rule --- dbsc/workflow/rules/mrtpipelines.smk | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index bab2fa9d..ec22015b 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -170,10 +170,10 @@ rule dwi2fod: container: config['singularity']['mrtrix'] shell: - 'if [ {params.shell} == "" ] && [ {params.lmax} == "" ]; then' + 'if [ {params.shell} == "" ]; then' ' dwi2fod -nthreads {threads} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.avg_csf} ' 'else ' - ' dwi2fod -nthreads {threads} -shell {params.shell} -lmax {params.lmax} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf} ' + ' dwi2fod -nthreads {threads} -shell {params.shell} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf} ' 'fi' From 455c8d828b777c0e92ae1d45d11d94f0f8709f7c Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 2 Nov 2022 18:18:25 -0400 Subject: [PATCH 04/20] Update call for input path --- dbsc/workflow/rules/mrtpipelines.smk | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index ec22015b..50f81835 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -18,10 +18,10 @@ lmaxes = config.get('lmax', '') #------------ MRTRIX PREPROC BEGIN ----------# rule nii2mif: input: - dwi=config["input_path"]["dwi"], - bval=lambda wildcards: re.sub(".nii.gz", ".bval", config["input_path"]["dwi"])), - bvec=lambda wildcards: re.sub(".nii.gz", ".bvec", config["input_path"]["dwi"])), - mask=config["input_path"]["mask"] + dwi=inputs["dwi"].input_path, + bval=lambda wildcards: re.sub(".nii.gz", ".bval", inputs["dwi"].input_path)), + bvec=lambda wildcards: re.sub(".nii.gz", ".bvec", inputs["dwi"].input_path)), + mask=inputs["mask"].input_path output: dwi=bids( root=mrtrix_dir, From 72b4a4d9661dc9f5b2e50eee069cc4c6003b6ee6 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 2 Nov 2022 18:22:26 -0400 Subject: [PATCH 05/20] Add flag for responsemean_dir --- dbsc/config/snakebids.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/dbsc/config/snakebids.yml b/dbsc/config/snakebids.yml index 458f7cbb..18166ff9 100644 --- a/dbsc/config/snakebids.yml +++ b/dbsc/config/snakebids.yml @@ -124,6 +124,11 @@ parse_args: node is found within this radius, the streamline endpoint is not assigned to any node. (default: 1.5mm)' default: 1.5 + + --responsemean_dir: + help: 'Provide directory containing average response functions. If not + provided, one will be computed from the subjects in the input + directory.' #-----------------------------------------------------# # Workflow specific config From 1247ee4ea672cf11c9d89ceb7c34a5febcee334f Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 2 Nov 2022 18:24:30 -0400 Subject: [PATCH 06/20] Replace root dir with derivatives dir --- dbsc/workflow/rules/mrtpipelines.smk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 50f81835..66e62109 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -4,7 +4,7 @@ include: 'zona_bb_subcortex.smk' include: 'freesurfer.smk' # Directories -mrtrix_dir = join(config["root"], "mrtrix") +mrtrix_dir = join(config["derivatives"], "mrtrix") # Paramaters responsemean_flag = config.get('responsemean_dir', None) From f2b9e5ae0d765b7523be124467fc15e71f159e60 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 2 Nov 2022 18:54:40 -0400 Subject: [PATCH 07/20] Update pybids_inputs --- dbsc/config/snakebids.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/dbsc/config/snakebids.yml b/dbsc/config/snakebids.yml index 18166ff9..bc3b3b25 100644 --- a/dbsc/config/snakebids.yml +++ b/dbsc/config/snakebids.yml @@ -25,19 +25,19 @@ targets_by_analysis_level: pybids_inputs: dwi: filters: - suffix: dwi - extension: .nii.gz - datatype: dwi - space: T1w + suffix: "dwi" + extension: ".nii.gz" + datatype: "dwi" + space: "T1w" wildcards: - subject - session mask: filters: - suffix: mask - extension: .nii.gz - datatype: dwi - space: T1w + suffix: "mask" + extension: ".nii.gz" + datatype: "dwi" + space: "T1w" wildcards: - subject - session From 2a8f402ef1e04680606da8f16799f56ef7e02f1c Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 2 Nov 2022 18:59:19 -0400 Subject: [PATCH 08/20] Snakefmt updates --- dbsc/workflow/Snakefile | 15 +- dbsc/workflow/rules/mrtpipelines.smk | 551 ++++++++++++++------------- 2 files changed, 296 insertions(+), 270 deletions(-) diff --git a/dbsc/workflow/Snakefile b/dbsc/workflow/Snakefile index dbeef867..351b616d 100644 --- a/dbsc/workflow/Snakefile +++ b/dbsc/workflow/Snakefile @@ -6,14 +6,13 @@ from snakebids import bids configfile: 'config/snakebids.yml' # writes inputs_config.yml and updates config dict -config.update( - snakebids.generate_inputs( - bids_dir=config["bids_dir"], - pybids_inputs=config["pybids_inputs"], - derivatives=config["derivatives"], - participant_label=config["participant_label"], - exclude_participant_label=config["exclude_partiicpant_label"], - ) +inputs = snakebids.generate_inputs( + bids_dir=config["bids_dir"], + pybids_inputs=config["pybids_inputs"], + derivatives=config["derivatives"], + participant_label=config["participant_label"], + exclude_participant_label=config["exclude_participant_label"], + use_bids_inputs=True, ) # this adds constraints to the bids naming diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 66e62109..e7be11b3 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -1,47 +1,50 @@ from os.path import join -include: 'zona_bb_subcortex.smk' -include: 'freesurfer.smk' + +include: "zona_bb_subcortex.smk" +include: "freesurfer.smk" + # Directories mrtrix_dir = join(config["derivatives"], "mrtrix") # Paramaters -responsemean_flag = config.get('responsemean_dir', None) -shells = config.get('shells', '') -lmaxes = config.get('lmax', '') +responsemean_flag = config.get("responsemean_dir", None) +shells = config.get("shells", "") +lmaxes = config.get("lmax", "") # Mrtrix3 citation (additional citations are included per rule as necessary): # Tournier, J.-D.; Smith, R. E.; Raffelt, D.; Tabbara, R.; Dhollander, T.; Pietsch, M.; Christiaens, D.; Jeurissen, B.; Yeh, C.-H. & Connelly, A. MRtrix3: A fast, flexible and open software framework for medical image processing and visualisation. NeuroImage, 2019, 202, 116137 -#------------ MRTRIX PREPROC BEGIN ----------# +# ------------ MRTRIX PREPROC BEGIN ----------# rule nii2mif: input: dwi=inputs["dwi"].input_path, - bval=lambda wildcards: re.sub(".nii.gz", ".bval", inputs["dwi"].input_path)), - bvec=lambda wildcards: re.sub(".nii.gz", ".bvec", inputs["dwi"].input_path)), - mask=inputs["mask"].input_path + bval=lambda wildcards: re.sub(".nii.gz", ".bval", inputs["dwi"].input_path), + bvec=lambda wildcards: re.sub(".nii.gz", ".bvec", inputs["dwi"].input_path), + mask=inputs["mask"].input_path, output: dwi=bids( root=mrtrix_dir, - datatype='dwi', - suffix='dwi.mif', - **config['subj_wildcards'] + datatype="dwi", + suffix="dwi.mif", + **config["subj_wildcards"] ), mask=bids( root=mrtrix_dir, - datatype='dwi', - suffix='brainmask.mif', - **config['subj_wildcards'] - ) + datatype="dwi", + suffix="brainmask.mif", + **config["subj_wildcards"] + ), threads: workflow.cores - group: "subject_1" + group: + "subject_1" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'mrconvert -nthreads {threads} -fslgrad {input.bvecs} {input.bvals} {input.dwi} {output.dwi} && ' - 'mrconvert -nthreads {threads} {input.mask} {output.mask}' + "mrconvert -nthreads {threads} -fslgrad {input.bvecs} {input.bvals} {input.dwi} {output.dwi} && " + "mrconvert -nthreads {threads} {input.mask} {output.mask}" rule dwi2response: @@ -52,82 +55,79 @@ rule dwi2response: """ input: dwi=rules.nii2mif.output.dwi, - mask=rules.nii2mif.output.mask + mask=rules.nii2mif.output.mask, params: shells=shells, - lmax=lmaxes + lmax=lmaxes, output: wm_rf=bids( root=mrtrix_dir, - datatype='response', - desc='wm', - suffix='response.txt', - **config['subj_wildcards'], + datatype="response", + desc="wm", + suffix="response.txt", + **config["subj_wildcards"], ), gm_rf=bids( root=mrtrix_dir, - datatype='response', - desc='gm' - suffix='response.txt', - **config['subj_wildcards'], + datatype="response", + desc="gm", + suffix="response.txt", + **config["subj_wildcards"], ), csf_rf=bids( root=mrtrix_dir, - datatype='response, - desc='csf' - suffix='response.txt', - **config['subj_wildcards'], - ) + datatype="response", + desc="csf", + suffix="response.txt", + **config["subj_wildcards"], + ), threads: workflow.cores - group: "subject_1" + group: + "subject_1" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: 'if [ {params.shell} == "" ] && [ {params.lmax} == ""]; then' - ' dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask}' - 'else ' - ' dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask} -shells {params.shells} -lmax {params.lmax} ' - 'fi' + " dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask}" + "else " + " dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask} -shells {params.shells} -lmax {params.lmax} " + "fi" + rule responsemean: """Compute average response function""" input: - wm_rf=expand( - rules.dwi2response.output.wm_rf, subject=config['subjects'] - ), - gm_rf=expand( - rules.dwi2response.output.gm_rf, subject=config['subjects'] - ), - csf_rf=expand( - rules.dwi2response.output.csf_rf, subject=config['subjects'] - ) + wm_rf=expand(rules.dwi2response.output.wm_rf, subject=config["subjects"]), + gm_rf=expand(rules.dwi2response.output.gm_rf, subject=config["subjects"]), + csf_rf=expand(rules.dwi2response.output.csf_rf, subject=config["subjects"]), output: wm_avg_rf=bids( - root=join(mrtrix_dir, 'avg'), - datatype='response', - desc='wm', - suffix='response.txt', + root=join(mrtrix_dir, "avg"), + datatype="response", + desc="wm", + suffix="response.txt", ), gm_avg_rf=bids( - root=join(mrtrix_dir, 'avg'), - datatype='response' - desc='gm', - suffix='response.txt', + root=join(mrtrix_dir, "avg"), + datatype="response", + desc="gm", + suffix="response.txt", ), csf_avg_rf=bids( - root=join(mrtrix_dir, 'avg'), - datatype='response' - desc='csf', - suffix='response.txt', - ) + root=join(mrtrix_dir, "avg"), + datatype="response", + desc="csf", + suffix="response.txt", + ), threads: workflow.cores - group: "group" + group: + "group" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'responsemean {input.wm_rf} {output.wm_avg_rg} -nthreads {threads} &&' - 'responsemean {input.gm_rf} {output.gm_avg_rg} -nthreads {threads} &&' - 'responsemean {input.csf_rf} {output.csf_avg_rg} -nthreads {threads}' + "responsemean {input.wm_rf} {output.wm_avg_rg} -nthreads {threads} &&" + "responsemean {input.gm_rf} {output.gm_avg_rg} -nthreads {threads} &&" + "responsemean {input.csf_rf} {output.csf_avg_rg} -nthreads {threads}" rule dwi2fod: @@ -135,46 +135,53 @@ rule dwi2fod: input: dwi=rules.nii2mif.output.dwi, mask=rules.nii2mif.output.mask, - wm_rf=join(config['responsemean_dir'], 'desc-wm_response.txt') if responsemean_flag else rules.responsemean.output.wm_avg_rf, - gm_rf=join(config['responsemean_dir'], 'desc-gm_response.txt') if responsemean_flag else rules.responsemean.output.gm_avg_rf, - csf_rf=join(config['responsemean_dir'], 'desc-csf_response.txt') if responsemean_flag else rules.responsemean.output.csf_avg_rf + wm_rf=join(config["responsemean_dir"], "desc-wm_response.txt") + if responsemean_flag + else rules.responsemean.output.wm_avg_rf, + gm_rf=join(config["responsemean_dir"], "desc-gm_response.txt") + if responsemean_flag + else rules.responsemean.output.gm_avg_rf, + csf_rf=join(config["responsemean_dir"], "desc-csf_response.txt") + if responsemean_flag + else rules.responsemean.output.csf_avg_rf, params: shell=shells, output: wm_fod=bids( root=mrtrix_dir, - datatype='response', - model='csd', - desc='wm', - suffix='fod.mif', - **config['subj_wildcards'], + datatype="response", + model="csd", + desc="wm", + suffix="fod.mif", + **config["subj_wildcards"], ), gm_fod=bids( root=mrtrix_dir, - datatype='response', - model='csd', - desc='gm', - suffix='fod.mif', - **config['subj_wildcards'], + datatype="response", + model="csd", + desc="gm", + suffix="fod.mif", + **config["subj_wildcards"], ), csf_fod=bids( root=mrtrix_dir, - datatype='response', - model='csd', - desc='csf', - suffix='fod.mif', - **config['subj_wildcards'], + datatype="response", + model="csd", + desc="csf", + suffix="fod.mif", + **config["subj_wildcards"], ), threads: workflow.cores - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: 'if [ {params.shell} == "" ]; then' - ' dwi2fod -nthreads {threads} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.avg_csf} ' - 'else ' - ' dwi2fod -nthreads {threads} -shell {params.shell} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf} ' - 'fi' + " dwi2fod -nthreads {threads} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.avg_csf} " + "else " + " dwi2fod -nthreads {threads} -shell {params.shell} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf} " + "fi" rule mtnormalise: @@ -182,7 +189,7 @@ rule mtnormalise: Raffelt, D.; Dhollander, T.; Tournier, J.-D.; Tabbara, R.; Smith, R. E.; Pierre, E. & Connelly, A. Bias Field Correction and Intensity Normalisation for Quantitative Analysis of Apparent Fibre Density. In Proc. ISMRM, 2017, 26, 3541 Dhollander, T.; Tabbara, R.; Rosnarho-Tornstrand, J.; Tournier, J.-D.; Raffelt, D. & Connelly, A. Multi-tissue log-domain intensity and inhomogeneity normalisation for quantitative apparent fibre density. In Proc. ISMRM, 2021, 29, 2472 - """" + """ input: wm_fod=rules.dwi2fod.output.wm_fod, gm_fod=rules.dwi2fod.output.gm_fod, @@ -191,34 +198,36 @@ rule mtnormalise: output: wm_fod=bids( root=mrtrix_dir, - datatype='response', - model='csd', - desc='normalized', - suffix='wm_fod.mif', - **config['subj_wildcards'], + datatype="response", + model="csd", + desc="normalized", + suffix="wm_fod.mif", + **config["subj_wildcards"], ), gm_fod=bids( root=mrtrix_dir, - datatype='response', - model='csd', - desc='normalized', - suffix='gm_fod.mif', - **config['subj_wildcards'], + datatype="response", + model="csd", + desc="normalized", + suffix="gm_fod.mif", + **config["subj_wildcards"], ), csf_fod=bids( root=mrtrix_dir, - datatype='response', - model='csd', - desc='normalized', - suffix='csf_fod.mif', - **config['subj_wildcards'], + datatype="response", + model="csd", + desc="normalized", + suffix="csf_fod.mif", + **config["subj_wildcards"], ), threads: workflow.cores - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'mtnormalise -nthreads {threads} -mask {input.mask} {input.wm_fod} {output.wm_fod} {input.gm_fod} {output.gm_fod} {input.csf_fod} {output.csf_fod}' + "mtnormalise -nthreads {threads} -mask {input.mask} {input.wm_fod} {output.wm_fod} {input.gm_fod} {output.gm_fod} {input.csf_fod} {output.csf_fod}" + # DTI (Tensor) Processing rule dwinormalise: @@ -228,17 +237,18 @@ rule dwinormalise: output: dwi=bids( root=mrtrix_dir, - datatype='dwi', - desc='normalized', - suffix='dwi.mif', - **config['subj_wildcards'], - ) + datatype="dwi", + desc="normalized", + suffix="dwi.mif", + **config["subj_wildcards"], + ), threads: workflow.cores container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'dwinormalise individual -nthreads {threads} {input.dwi} {input.mask} {output.dwi}' - group: "subject_1" + "dwinormalise individual -nthreads {threads} {input.dwi} {input.mask} {output.dwi}" + group: + "subject_1" rule dwi2tensor: @@ -248,48 +258,52 @@ rule dwi2tensor: output: dti=bids( root=mrtrix_dir, - datatype='dti', - suffix='dti.mif', - **config['subj_wildcards'], + datatype="dti", + suffix="dti.mif", + **config["subj_wildcards"], ), fa=bids( root=mrtrix_dir, - datatype='dti', - model='dti', - suffix='fa.mif', - **config['subj_wildcards'], + datatype="dti", + model="dti", + suffix="fa.mif", + **config["subj_wildcards"], ), ad=bids( root=mrtix_dir, - datatype='dti', - model='dti', - suffix='ad.mif', - **config['subj_wildcards'], + datatype="dti", + model="dti", + suffix="ad.mif", + **config["subj_wildcards"], ), rd=bids( root=mrtrix_dir, - datatype='dti', - model='dti', - suffix='rd.mif', - **config['subj_wildcards'], + datatype="dti", + model="dti", + suffix="rd.mif", + **config["subj_wildcards"], ), md=bids( root=mrtrix_dir, - datatype='dti', - model='dti', - suffix='fa.mif', - **config['subj_wildcards'], - ) + datatype="dti", + model="dti", + suffix="fa.mif", + **config["subj_wildcards"], + ), threads: workflow.cores - group: "subject_1" + group: + "subject_1" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'dwi2tensor -nthreads {threads} -mask {input.mask} {input.dwi} {output.dti} && ' - 'tensor2metric -nthreads {threads} -mask {input.mask} {output.dti} -fa {output.fa} -ad {output.ad} -rd {output.rd} -adc {output.md}' -#--------------- MRTRIX PREPROC END --------------# + "dwi2tensor -nthreads {threads} -mask {input.mask} {input.dwi} {output.dti} && " + "tensor2metric -nthreads {threads} -mask {input.mask} {output.dti} -fa {output.fa} -ad {output.ad} -rd {output.rd} -adc {output.md}" + + +# --------------- MRTRIX PREPROC END --------------# -#------------ MRTRIX TRACTOGRAPHY BEGIN ----------# + +# ------------ MRTRIX TRACTOGRAPHY BEGIN ----------# # TODO (v0.1): CHECK TO MAKE SURE RULES ARE IMPORTED CORRECTLY FROM OTHER SMK FILES rule tckgen: # Tournier, J.-D.; Calamante, F. & Connelly, A. Improved probabilistic streamlines tractography by 2nd order integration over fibre orientation distributions. Proceedings of the International Society for Magnetic Resonance in Medicine, 2010, 1670 @@ -300,22 +314,24 @@ rule tckgen: convex_hull=rules.create_convex_hull.output.convex_hull, subcortical_seg=rules.add_brainstem_new_seg.output.seg, params: - step=config['step'], - sl=config['sl_count'], + step=config["step"], + sl=config["sl_count"], output: tck=bids( - root=join(config['out_dir'], mrtpipelines), - datatype='tractography', - desc='iFOD2', - suffix='tractography.tck', + root=mrtrix_dir, + datatype="tractography", + desc="iFOD2", + suffix="tractography.tck", **config["subj_wildcards"], - ) + ), threads: workflow.cores - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'tckgen -nthreads {threads} -algorithm iFOD2 -step {params.step} -select {params.sl_count} -exclude {input.cortical_ribbon} -exclude {input.convex_hull} -include {input.subcortical_seg} -mask {input.mask} -seed_image {input.mask} {input.fod} {output.tck}' + "tckgen -nthreads {threads} -algorithm iFOD2 -step {params.step} -select {params.sl_count} -exclude {input.cortical_ribbon} -exclude {input.convex_hull} -include {input.subcortical_seg} -mask {input.mask} -seed_image {input.mask} {input.fod} {output.tck}" + rule tcksift2: # Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. The effects of SIFT on the reproducibility and biological accuracy of the structural connectome. NeuroImage, 2015, 104, 253-265 @@ -325,61 +341,64 @@ rule tcksift2: output: weights=bids( root=mrtrix_dir, - datatype='tractography', - desc='iFOD2', - suffix='tckweights.txt', - **config['subj_wildcards'], + datatype="tractography", + desc="iFOD2", + suffix="tckweights.txt", + **config["subj_wildcards"], ), mu=bids( root=mrtrix_dir, - datatype='tractography', - desc='iFOD2', - suffix='mucoefficient.txt', - **config['subj_wildcards'], - ] + datatype="tractography", + desc="iFOD2", + suffix="mucoefficient.txt", + **config["subj_wildcards"], ), threads: workflow.cores - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'tcksift2 -nthreads {threads} -out_mu {output.mu} {input.tck} {input.fod} {output.weights}' + "tcksift2 -nthreads {threads} -out_mu {output.mu} {input.tck} {input.fod} {output.weights}" + # TODO (v0.2): ADD OPTION TO OUTPUT TDI MAP + rule tck2connectome: -""" -Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. The effects of SIFT on the reproducibility and biological accuracy of the structural connectome. NeuroImage, 2015, 104, 253-265" + """ + Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. The effects of SIFT on the reproducibility and biological accuracy of the structural connectome. NeuroImage, 2015, 104, 253-265" -TODO (v0.2): ADD OPTION TO MULTIPLY BY MU COEFFICIENT -""" + TODO (v0.2): ADD OPTION TO MULTIPLY BY MU COEFFICIENT + """ input: weights=rules.tcksift2.output.weights, tck=rules.tckgen.output.tck, subcortical_seg=rules.add_brainstem_new_seg.output.seg, params: - radius=config['radial_search'] + radius=config["radial_search"], output: sl_assignment=bids( root=mrtix_dir, - datatype='tractography', - desc='subcortical', - suffix='nodeassignment.txt', - **config['subj_wildcards'], + datatype="tractography", + desc="subcortical", + suffix="nodeassignment.txt", + **config["subj_wildcards"], ), node_weights=bids( root=mrtrix_dir, - datatype='tractography', - desc='subcortical', - suffix='nodeweights.csv' - **config['subj_wildcards'], - ) + datatype="tractography", + desc="subcortical", + suffix="nodeweights.csv" ** config["subj_wildcards"], + ), threads: workflow.cores - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'tck2connectome -nthreads {threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} ' + "tck2connectome -nthreads {threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} " + rule connectome2tck: input: @@ -390,30 +409,32 @@ rule connectome2tck: edge_weight=temp( bids( root=mrtrix_dir, - datatype='tractography', - desc='subcortical', - suffix='tckweights', - **config['subj_wildcards'], + datatype="tractography", + desc="subcortical", + suffix="tckweights", + **config["subj_wildcards"], ) ), edge_tck=temp( bids( root=mrtix_dir, - datatype='tractography', - desc='subcortical', - suffix='from_', - **config['subj_wildcards'], + datatype="tractography", + desc="subcortical", + suffix="from_", + **config["subj_wildcards"], ) ), threads: workflow.cores - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'], + config["singularity"]["mrtrix"] shell: - 'for i in `seq 2 72`; do ' - ' nodes=$nodes,$i ' - 'done ' - 'connectome2tck -nthreads {threads} -nodes $nodes -exclusive -filters_per_edge -tck_weights_in {input.node_weights} -prefix_tck_weights_out {output.edge_weight} {input.tck} {input.sl_assignment} {output.edge_tck} ' + "for i in `seq 2 72`; do " + " nodes=$nodes,$i " + "done " + "connectome2tck -nthreads {threads} -nodes $nodes -exclusive -filters_per_edge -tck_weights_in {input.node_weights} -prefix_tck_weights_out {output.edge_weight} {input.tck} {input.sl_assignment} {output.edge_tck} " + # NOTE: Use labelmerge split segs here? rule create_roi_mask: @@ -424,49 +445,51 @@ rule create_roi_mask: roi_mask=temp( bids( root=mrtrix_dir, - datatype='anat', - desc='{node1}', - suffix='mask.mif', + datatype="anat", + desc="{node1}", + suffix="mask.mif", **config["subj_wildcards"] ) ), threads: workflow.cores - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'], + config["singularity"]["mrtrix"] shell: - 'mrcalc -nthreads {threads} {input.subcortical_seg} {wildcards.node1} -eq {output.roi_mask}' + "mrcalc -nthreads {threads} {input.subcortical_seg} {wildcards.node1} -eq {output.roi_mask}" + rule filter_tck: # Node1 should be same as prev rule, need to iterate over node2 input: roi1=bids( root=mrtrix_dir, - datatype='anat', - desc='{node1}', - suffix='mask.mif', + datatype="anat", + desc="{node1}", + suffix="mask.mif", **config["subj_wildcards"] ), roi2=bids( root=mrtrix_dir, - datatype='anat', - desc='{node2}', - suffix='mask.mif', + datatype="anat", + desc="{node2}", + suffix="mask.mif", **config["subj_wildcards"] ), # ZI rois (do these need to be separately defined in output?) lZI=bids( root=mrtrix_dir, - datatype='anat', - desc='21', - suffix='mask.mif', + datatype="anat", + desc="21", + suffix="mask.mif", **config["subj_wildcards"] ), rZI=bids( root=mrtrix_dir, - datatype='anat', - desc='22', - suffix='mask.mif', + datatype="anat", + desc="22", + suffix="mask.mif", **config["subj_wildcards"] ), tck=rules.connectome2tck.output.edge_tck, @@ -476,72 +499,76 @@ rule filter_tck: filter_mask=temp( bids( root=mrtrix_dir, - datatype='anat', - desc='exclude', - suffix='mask.mif', + datatype="anat", + desc="exclude", + suffix="mask.mif", **config["subj_wildcards"] ) ), filtered_tck=temp( bids( root=mrtrix_dir, - datatype='tractography', - desc='from_{node1}-{node2}', - suffix='tractography.tck', + datatype="tractography", + desc="from_{node1}-{node2}", + suffix="tractography.tck", **config["subj_wildcards"] ) ), filtered_weights=temp( - bids( + bids( root=mrtrix_dir, - datatype='tractography', - desc='from_{node1}-{node2}', - suffix='weights.csv', + datatype="tractography", + desc="from_{node1}-{node2}", + suffix="weights.csv", **config["subj_wildcards"] ) - ) + ), threads: workflow.cores - group: "subject_2" - containers: - config['singularity']['mrtrix'], + group: + "subject_2" + container: + config["singularity"]["mrtrix"] shell: - 'mrcalc -nthreads {threads} {input.subcortical_seg} 0 -neq {input.roi1} -sub {input.roi2} -sub {input.lZI} -sub {input.rZI} -sub {output.filter_mask} ' - 'tckedit -nthreads {params.therads} -exclude {output.filter_mask} -tck_weights_in {input.weights} -tck_weights_out {output.filtered_weights} {input.tck} {output.filtered_tck} ' + "mrcalc -nthreads {threads} {input.subcortical_seg} 0 -neq {input.roi1} -sub {input.roi2} -sub {input.lZI} -sub {input.rZI} -sub {output.filter_mask} " + "tckedit -nthreads {params.therads} -exclude {output.filter_mask} -tck_weights_in {input.weights} -tck_weights_out {output.filtered_weights} {input.tck} {output.filtered_tck} " + rule combine_filtered: input: tck=expand( - rules.filter_tck.output.filtered_tck, - zip, - **config['input_zip_lists']['dwi'] + rules.filter_tck.output.filtered_tck, + zip, + **config["input_zip_lists"]["dwi"] ), weights=expand( - rules.filter_tck.output.filtered_weights, - zip, - **config['input_zip_lists']['dwi'] + rules.filter_tck.output.filtered_weights, + zip, + **config["input_zip_lists"]["dwi"] ), output: combined_tck=bids( root=mrtrix_dir, - datatype='tractography', - desc='subcortical', - suffix='tractography.tck', + datatype="tractography", + desc="subcortical", + suffix="tractography.tck", **config["subj_wildcards"] ), combined_weights=bids( root=mrtrix_dir, - datatype='tractography', - desc='subcortical', - suffix='tckweights.txt', + datatype="tractography", + desc="subcortical", + suffix="tckweights.txt", **config["subj_wildcards"] - ) + ), threads: workflow.cores - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'], + config["singularity"]["mrtrix"] shell: - 'tckedit {input.tck} {output.combined_tck} ' - 'cat {input.weights} >> {output.combined_weights} ' + "tckedit {input.tck} {output.combined_tck} " + "cat {input.weights} >> {output.combined_weights} " + rule filtered_tck2connectome: input: @@ -549,25 +576,25 @@ rule filtered_tck2connectome: tck=rules.combine_filtered.output.combined_tck, subcortical_seg=rules.add_brainstem_new_seg.output.seg, params: - radius=config['radial_search'] + radius=config["radial_search"], output: sl_assignment=bids( root=mrtrix_dir, - datatype='tractography', - desc='subcortical', - suffix='nodeassignment.txt', - **config['subj_wildcards'], + datatype="tractography", + desc="subcortical", + suffix="nodeassignment.txt", + **config["subj_wildcards"], ), node_weights=bids( root=mrtrix_dir, - datatype='tractography', - desc='subcortical', - suffix='nodeweights.csv' - **config['subj_wildcards'], - ) + datatype="tractography", + desc="subcortical", + suffix="nodeweights.csv" ** config["subj_wildcards"], + ), threads: workflow.core - group: "subject_2" + group: + "subject_2" container: - config['singularity']['mrtrix'] + config["singularity"]["mrtrix"] shell: - 'tck2connectome -nthreads {threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} -force' + "tck2connectome -nthreads {threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} -force" From 9669f8ab4c3d017f7a7fa4d7d3e1301e7176c885 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Thu, 3 Nov 2022 13:05:14 -0400 Subject: [PATCH 09/20] Simplify parts of mrtpipelines.smk - Replace config['derivatives'] with config['bids_dir'] and update join - Change config to get to return None if flags not defined - Remove unnecessary lambda calls - Update params for passing shell and lmax variables to command - Simplify shell commands - Add missing ',' for tck2connectome - Update param for defining node labels in connectome2tck - Update naming of filtered files to be more BIDSesque --- dbsc/workflow/rules/mrtpipelines.smk | 44 ++++++++++++---------------- 1 file changed, 18 insertions(+), 26 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index e7be11b3..d697df2f 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -6,12 +6,12 @@ include: "freesurfer.smk" # Directories -mrtrix_dir = join(config["derivatives"], "mrtrix") +mrtrix_dir = join(config["bids_dir"], "derivatives", "mrtrix") -# Paramaters +# Parameters responsemean_flag = config.get("responsemean_dir", None) -shells = config.get("shells", "") -lmaxes = config.get("lmax", "") +shells = config.get("shells", None) +lmax = config.get("lmax", None) # Mrtrix3 citation (additional citations are included per rule as necessary): # Tournier, J.-D.; Smith, R. E.; Raffelt, D.; Tabbara, R.; Dhollander, T.; Pietsch, M.; Christiaens, D.; Jeurissen, B.; Yeh, C.-H. & Connelly, A. MRtrix3: A fast, flexible and open software framework for medical image processing and visualisation. NeuroImage, 2019, 202, 116137 @@ -21,8 +21,8 @@ lmaxes = config.get("lmax", "") rule nii2mif: input: dwi=inputs["dwi"].input_path, - bval=lambda wildcards: re.sub(".nii.gz", ".bval", inputs["dwi"].input_path), - bvec=lambda wildcards: re.sub(".nii.gz", ".bvec", inputs["dwi"].input_path), + bval=re.sub(".nii.gz", ".bval", inputs["dwi"].input_path), + bvec=re.sub(".nii.gz", ".bvec", inputs["dwi"].input_path), mask=inputs["mask"].input_path, output: dwi=bids( @@ -57,8 +57,8 @@ rule dwi2response: dwi=rules.nii2mif.output.dwi, mask=rules.nii2mif.output.mask, params: - shells=shells, - lmax=lmaxes, + shells=f"-shells {shells}" if shells else "", + lmax=f"-lmax {lmax}" if lmax else "", output: wm_rf=bids( root=mrtrix_dir, @@ -87,11 +87,7 @@ rule dwi2response: container: config["singularity"]["mrtrix"] shell: - 'if [ {params.shell} == "" ] && [ {params.lmax} == ""]; then' - " dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask}" - "else " - " dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask} -shells {params.shells} -lmax {params.lmax} " - "fi" + "dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask} {params.shells} {params.lmax} " rule responsemean: @@ -145,7 +141,7 @@ rule dwi2fod: if responsemean_flag else rules.responsemean.output.csf_avg_rf, params: - shell=shells, + shells=f"-shells {shells}" if shells else "", output: wm_fod=bids( root=mrtrix_dir, @@ -177,11 +173,7 @@ rule dwi2fod: container: config["singularity"]["mrtrix"] shell: - 'if [ {params.shell} == "" ]; then' - " dwi2fod -nthreads {threads} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.avg_csf} " - "else " - " dwi2fod -nthreads {threads} -shell {params.shell} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf} " - "fi" + "dwi2fod -nthreads {threads} {params.shells} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf}" rule mtnormalise: @@ -389,7 +381,8 @@ rule tck2connectome: root=mrtrix_dir, datatype="tractography", desc="subcortical", - suffix="nodeweights.csv" ** config["subj_wildcards"], + suffix="nodeweights.csv", + **config["subj_wildcards"], ), threads: workflow.cores group: @@ -405,6 +398,8 @@ rule connectome2tck: node_weights=rules.tck2connectome.output.node_weights, sl_assignment=rules.tck2connectome.output.sl_assignment, tck=rules.tckgen.output.tck, + params: + nodes=",".join(str(num) for num in range(2, 73)), output: edge_weight=temp( bids( @@ -430,10 +425,7 @@ rule connectome2tck: container: config["singularity"]["mrtrix"] shell: - "for i in `seq 2 72`; do " - " nodes=$nodes,$i " - "done " - "connectome2tck -nthreads {threads} -nodes $nodes -exclusive -filters_per_edge -tck_weights_in {input.node_weights} -prefix_tck_weights_out {output.edge_weight} {input.tck} {input.sl_assignment} {output.edge_tck} " + "connectome2tck -nthreads {threads} -nodes {params.nodes} -exclusive -filters_per_edge -tck_weights_in {input.node_weights} -prefix_tck_weights_out {output.edge_weight} {input.tck} {input.sl_assignment} {output.edge_tck} " # NOTE: Use labelmerge split segs here? @@ -509,7 +501,7 @@ rule filter_tck: bids( root=mrtrix_dir, datatype="tractography", - desc="from_{node1}-{node2}", + desc="from{node1}to{node2}", suffix="tractography.tck", **config["subj_wildcards"] ) @@ -518,7 +510,7 @@ rule filter_tck: bids( root=mrtrix_dir, datatype="tractography", - desc="from_{node1}-{node2}", + desc="from{node1}to{node2}", suffix="weights.csv", **config["subj_wildcards"] ) From 1e81fde9452a47f0b884e369b6d2e9a9699bbeb7 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Thu, 3 Nov 2022 13:41:30 -0400 Subject: [PATCH 10/20] Use wildcards for preproc rules - Redefined outputs to make use of wildcards to describe different tissue types - Kept responsemean call in one rule as the task it is performing is the same rather than split into separate rules - Consider using wildcards for tractograpy down the line --- dbsc/workflow/rules/mrtpipelines.smk | 186 ++++++++++----------------- 1 file changed, 69 insertions(+), 117 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index d697df2f..0e40b2f4 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -47,6 +47,15 @@ rule nii2mif: "mrconvert -nthreads {threads} {input.mask} {output.mask}" +dwi2response_out = bids( + root=mrtrix_dir, + datatype="response", + desc="{tissue}", + suffix="response.txt", + **config["subj_wildcards"], +) + + rule dwi2response: """ Estimate response functions using Dhollander algorithm @@ -60,27 +69,9 @@ rule dwi2response: shells=f"-shells {shells}" if shells else "", lmax=f"-lmax {lmax}" if lmax else "", output: - wm_rf=bids( - root=mrtrix_dir, - datatype="response", - desc="wm", - suffix="response.txt", - **config["subj_wildcards"], - ), - gm_rf=bids( - root=mrtrix_dir, - datatype="response", - desc="gm", - suffix="response.txt", - **config["subj_wildcards"], - ), - csf_rf=bids( - root=mrtrix_dir, - datatype="response", - desc="csf", - suffix="response.txt", - **config["subj_wildcards"], - ), + wm_rf=expand(dwi2response_out, tissue="wm", allow_missing=True), + gm_rf=expand(dwi2response_out, tissue="gm", allow_missing=True), + csf_rf=expand(dwi2response_out, tissue="csf", allow_missing=True), threads: workflow.cores group: "subject_1" @@ -90,6 +81,14 @@ rule dwi2response: "dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask} {params.shells} {params.lmax} " +responsemean_out = bids( + root=join(mrtrix_dir, "avg"), + datatype="response", + desc="{tissue}", + suffix="response.txt", +) + + rule responsemean: """Compute average response function""" input: @@ -97,24 +96,9 @@ rule responsemean: gm_rf=expand(rules.dwi2response.output.gm_rf, subject=config["subjects"]), csf_rf=expand(rules.dwi2response.output.csf_rf, subject=config["subjects"]), output: - wm_avg_rf=bids( - root=join(mrtrix_dir, "avg"), - datatype="response", - desc="wm", - suffix="response.txt", - ), - gm_avg_rf=bids( - root=join(mrtrix_dir, "avg"), - datatype="response", - desc="gm", - suffix="response.txt", - ), - csf_avg_rf=bids( - root=join(mrtrix_dir, "avg"), - datatype="response", - desc="csf", - suffix="response.txt", - ), + wm_avg_rf=expand(responsemean_out, tissue="wm", allow_missing=True), + gm_avg_rf=expand(responsemean_out, tissue="gm", allow_missing=True), + csf_avg_rf=expand(responsemean_out, tissue="csf", allow_missing=True), threads: workflow.cores group: "group" @@ -126,6 +110,16 @@ rule responsemean: "responsemean {input.csf_rf} {output.csf_avg_rg} -nthreads {threads}" +dwi2fod_out = bids( + root=mrtrix_dir, + datatype="response", + model="csd", + desc="{tissue}", + suffix="fod.mif", + **config["subj_wildcards"], +) + + rule dwi2fod: """Jeurissen, B; Tournier, J-D; Dhollander, T; Connelly, A & Sijbers, J. Multi-tissue constrained spherical deconvolution for improved analysis of multi-shell diffusion MRI data. NeuroImage, 2014, 103, 411-426""" input: @@ -143,30 +137,9 @@ rule dwi2fod: params: shells=f"-shells {shells}" if shells else "", output: - wm_fod=bids( - root=mrtrix_dir, - datatype="response", - model="csd", - desc="wm", - suffix="fod.mif", - **config["subj_wildcards"], - ), - gm_fod=bids( - root=mrtrix_dir, - datatype="response", - model="csd", - desc="gm", - suffix="fod.mif", - **config["subj_wildcards"], - ), - csf_fod=bids( - root=mrtrix_dir, - datatype="response", - model="csd", - desc="csf", - suffix="fod.mif", - **config["subj_wildcards"], - ), + wm_fod=expand(dwi2fod_out, tissue="wm", allow_missing=True), + gm_fod=expand(dwi2fod_out, tissue="gm", allow_missing=True), + csf_fod=expand(dwi2fod_out, tissue="csf", allow_missing=True), threads: workflow.cores group: "subject_2" @@ -176,6 +149,15 @@ rule dwi2fod: "dwi2fod -nthreads {threads} {params.shells} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf}" +mtnormalise_out = bids( + root=mrtrix_dir, + datatype="response", + model="csd", + desc="normalized", + suffix="{tissue}_fod.mif", +) + + rule mtnormalise: """ Raffelt, D.; Dhollander, T.; Tournier, J.-D.; Tabbara, R.; Smith, R. E.; Pierre, E. & Connelly, A. Bias Field Correction and Intensity Normalisation for Quantitative Analysis of Apparent Fibre Density. In Proc. ISMRM, 2017, 26, 3541 @@ -188,30 +170,9 @@ rule mtnormalise: csf_fod=rules.dwi2fod.output.csf_fod, mask=rules.nii2mif.output.mask, output: - wm_fod=bids( - root=mrtrix_dir, - datatype="response", - model="csd", - desc="normalized", - suffix="wm_fod.mif", - **config["subj_wildcards"], - ), - gm_fod=bids( - root=mrtrix_dir, - datatype="response", - model="csd", - desc="normalized", - suffix="gm_fod.mif", - **config["subj_wildcards"], - ), - csf_fod=bids( - root=mrtrix_dir, - datatype="response", - model="csd", - desc="normalized", - suffix="csf_fod.mif", - **config["subj_wildcards"], - ), + wm_fod=expand(mtnormalise_out, tissue="wm", allow_missing=True), + gm_fod=expand(mtnormalise_out, tissue="gm", allow_missing=True), + csf_fod=expand(mtnormalise_out, tissue="csf", allow_missing=True), threads: workflow.cores group: "subject_2" @@ -243,6 +204,15 @@ rule dwinormalise: "subject_1" +dwi2tensor_out = bids( + root=mrtrix_dir, + datatype="dti", + model="dti", + suffix="{dti}.mif", + **config["subj_wildcards"], +) + + rule dwi2tensor: input: dwi=rules.dwinoramlise.dwi, @@ -254,34 +224,10 @@ rule dwi2tensor: suffix="dti.mif", **config["subj_wildcards"], ), - fa=bids( - root=mrtrix_dir, - datatype="dti", - model="dti", - suffix="fa.mif", - **config["subj_wildcards"], - ), - ad=bids( - root=mrtix_dir, - datatype="dti", - model="dti", - suffix="ad.mif", - **config["subj_wildcards"], - ), - rd=bids( - root=mrtrix_dir, - datatype="dti", - model="dti", - suffix="rd.mif", - **config["subj_wildcards"], - ), - md=bids( - root=mrtrix_dir, - datatype="dti", - model="dti", - suffix="fa.mif", - **config["subj_wildcards"], - ), + fa=expand(dwi2tensor_out, dti="fa", allow_missing=True), + ad=expand(dwi2tensor_out, dti="ad", allow_missing=True), + rd=expand(dwi2tensor_out, dti="rd", allow_missing=True), + md=expand(dwi2tensor_out, dti="md", allow_missing=True), threads: workflow.cores group: "subject_1" @@ -297,6 +243,8 @@ rule dwi2tensor: # ------------ MRTRIX TRACTOGRAPHY BEGIN ----------# # TODO (v0.1): CHECK TO MAKE SURE RULES ARE IMPORTED CORRECTLY FROM OTHER SMK FILES + + rule tckgen: # Tournier, J.-D.; Calamante, F. & Connelly, A. Improved probabilistic streamlines tractography by 2nd order integration over fibre orientation distributions. Proceedings of the International Society for Magnetic Resonance in Medicine, 2010, 1670 input: @@ -415,7 +363,7 @@ rule connectome2tck: root=mrtix_dir, datatype="tractography", desc="subcortical", - suffix="from_", + suffix="from", **config["subj_wildcards"], ) ), @@ -581,7 +529,8 @@ rule filtered_tck2connectome: root=mrtrix_dir, datatype="tractography", desc="subcortical", - suffix="nodeweights.csv" ** config["subj_wildcards"], + suffix="nodeweights.csv", + **config["subj_wildcards"], ), threads: workflow.core group: @@ -590,3 +539,6 @@ rule filtered_tck2connectome: config["singularity"]["mrtrix"] shell: "tck2connectome -nthreads {threads} -zero_diagonal -stat_edge sum -assignment_radial_search {params.radius} -tck_weights_in {input.weights} -out_assignments {output.sl_assignment} -symmetric {input.tck} {input.subcortical_seg} {output.node_weights} -force" + + +# ------------ MRTRIX TRACTOGRAPHY END ----------# From 62c16861bd5744b5ed0f663df5e9972477ac1102 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Thu, 3 Nov 2022 13:49:43 -0400 Subject: [PATCH 11/20] rm non-existant group target --- dbsc/config/snakebids.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/dbsc/config/snakebids.yml b/dbsc/config/snakebids.yml index bc3b3b25..2088288f 100644 --- a/dbsc/config/snakebids.yml +++ b/dbsc/config/snakebids.yml @@ -16,8 +16,6 @@ analysis_levels: &analysis_levels targets_by_analysis_level: participant: - "all" # if "", then the first rule is run - group: - - "all_group_tsv" # this configures the pybids grabber - create an entry for each type of input you want to grab # indexed by name of input dictionary for each input is passed directly to pybids get() @@ -126,8 +124,8 @@ parse_args: default: 1.5 --responsemean_dir: - help: 'Provide directory containing average response functions. If not - provided, one will be computed from the subjects in the input + help: 'Provide directory containing average response functions. If not + provided, one will be computed from the subjects in the input directory.' #-----------------------------------------------------# From a828d659f8a5c6051ebeada25918b6873dbf7798 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Thu, 3 Nov 2022 13:56:08 -0400 Subject: [PATCH 12/20] fix responsemean_flag if statement --- dbsc/workflow/rules/mrtpipelines.smk | 32 +++++++++++++++++++--------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 0e40b2f4..822f9adb 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -6,6 +6,7 @@ include: "freesurfer.smk" # Directories +prepdwi_dir = join(config["bids_dir"], "derivatives", "prepdwi") mrtrix_dir = join(config["bids_dir"], "derivatives", "mrtrix") # Parameters @@ -20,7 +21,12 @@ lmax = config.get("lmax", None) # ------------ MRTRIX PREPROC BEGIN ----------# rule nii2mif: input: - dwi=inputs["dwi"].input_path, + dwi=bids( + root=prepdwi_dir, + datatype="dwi", + suffix="dwi.nii.gz", + **config["subj_wildcards"] + ), bval=re.sub(".nii.gz", ".bval", inputs["dwi"].input_path), bvec=re.sub(".nii.gz", ".bvec", inputs["dwi"].input_path), mask=inputs["mask"].input_path, @@ -125,15 +131,21 @@ rule dwi2fod: input: dwi=rules.nii2mif.output.dwi, mask=rules.nii2mif.output.mask, - wm_rf=join(config["responsemean_dir"], "desc-wm_response.txt") - if responsemean_flag - else rules.responsemean.output.wm_avg_rf, - gm_rf=join(config["responsemean_dir"], "desc-gm_response.txt") - if responsemean_flag - else rules.responsemean.output.gm_avg_rf, - csf_rf=join(config["responsemean_dir"], "desc-csf_response.txt") - if responsemean_flag - else rules.responsemean.output.csf_avg_rf, + wm_rf=( + join(config["responsemean_dir"], "desc-wm_response.txt") + if responsemean_flag + else rules.responsemean.output.wm_avg_rf + ), + gm_rf=( + join(config["responsemean_dir"], "desc-gm_response.txt") + if responsemean_flag + else rules.responsemean.output.gm_avg_rf + ), + csf_rf=( + join(config["responsemean_dir"], "desc-csf_response.txt") + if responsemean_flag + else rules.responsemean.output.csf_avg_rf + ), params: shells=f"-shells {shells}" if shells else "", output: From 0ddb5d254ed34153587c5c90a39742b1c4a3d41a Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Thu, 3 Nov 2022 14:01:45 -0400 Subject: [PATCH 13/20] Make change back to config dict - Revert usage back to config["input_path"] - Set mrtrix_dir to make use of the output_dir rather than bids_dir --- dbsc/workflow/Snakefile | 16 +++++++++------- dbsc/workflow/rules/mrtpipelines.smk | 16 +++++----------- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/dbsc/workflow/Snakefile b/dbsc/workflow/Snakefile index 351b616d..a00097fa 100644 --- a/dbsc/workflow/Snakefile +++ b/dbsc/workflow/Snakefile @@ -6,13 +6,15 @@ from snakebids import bids configfile: 'config/snakebids.yml' # writes inputs_config.yml and updates config dict -inputs = snakebids.generate_inputs( - bids_dir=config["bids_dir"], - pybids_inputs=config["pybids_inputs"], - derivatives=config["derivatives"], - participant_label=config["participant_label"], - exclude_participant_label=config["exclude_participant_label"], - use_bids_inputs=True, +config.update( + snakebids.generate_inputs( + bids_dir=config["bids_dir"], + pybids_inputs=config["pybids_inputs"], + derivatives=config["derivatives"], + participant_label=config["participant_label"], + exclude_participant_label=config["exclude_participant_label"], + use_bids_inputs=True, + ) ) # this adds constraints to the bids naming diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 822f9adb..b7a0d3eb 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -6,8 +6,7 @@ include: "freesurfer.smk" # Directories -prepdwi_dir = join(config["bids_dir"], "derivatives", "prepdwi") -mrtrix_dir = join(config["bids_dir"], "derivatives", "mrtrix") +mrtrix_dir = join(config["output_dir"], "mrtrix") # Parameters responsemean_flag = config.get("responsemean_dir", None) @@ -21,15 +20,10 @@ lmax = config.get("lmax", None) # ------------ MRTRIX PREPROC BEGIN ----------# rule nii2mif: input: - dwi=bids( - root=prepdwi_dir, - datatype="dwi", - suffix="dwi.nii.gz", - **config["subj_wildcards"] - ), - bval=re.sub(".nii.gz", ".bval", inputs["dwi"].input_path), - bvec=re.sub(".nii.gz", ".bvec", inputs["dwi"].input_path), - mask=inputs["mask"].input_path, + dwi=config["input_path"]["dwi"], + bval=re.sub(".nii.gz", ".bval", config["input_path"]["dwi"]), + bvec=re.sub(".nii.gz", ".bvec", config["input_path"]["dwi"]), + mask=config["input_path"]["mask"], output: dwi=bids( root=mrtrix_dir, From 3df24f4126dee498cde517ebe6b170eae3910e4f Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Fri, 4 Nov 2022 13:31:48 -0400 Subject: [PATCH 14/20] Multiple fixes per tkuehn's comments - Changes `use_bids_inputs=False` in config - Removed group analysis level in `snakebids.yml` - Added `desc` as a wildcard in `snakebids.yml` - Added missing '&&' in mrtpipelines.smk --- dbsc/config/snakebids.yml | 3 ++- dbsc/workflow/Snakefile | 2 +- dbsc/workflow/rules/mrtpipelines.smk | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/dbsc/config/snakebids.yml b/dbsc/config/snakebids.yml index 2088288f..d3c52163 100644 --- a/dbsc/config/snakebids.yml +++ b/dbsc/config/snakebids.yml @@ -10,7 +10,6 @@ derivatives: False # Search in bids/derivatives if True; can also be path(s) to # List of analysis levels in bids app analysis_levels: &analysis_levels - participant - - group # Mapping from analysis_level to set of target rules or files targets_by_analysis_level: @@ -30,6 +29,7 @@ pybids_inputs: wildcards: - subject - session + - desc mask: filters: suffix: "mask" @@ -39,6 +39,7 @@ pybids_inputs: wildcards: - subject - session + - desc # Configuration for the command-line parameters to make available # passed on the argparse add_argument() diff --git a/dbsc/workflow/Snakefile b/dbsc/workflow/Snakefile index a00097fa..bb1361a5 100644 --- a/dbsc/workflow/Snakefile +++ b/dbsc/workflow/Snakefile @@ -13,7 +13,7 @@ config.update( derivatives=config["derivatives"], participant_label=config["participant_label"], exclude_participant_label=config["exclude_participant_label"], - use_bids_inputs=True, + use_bids_inputs=False, ) ) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index b7a0d3eb..59cbddae 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -475,7 +475,7 @@ rule filter_tck: container: config["singularity"]["mrtrix"] shell: - "mrcalc -nthreads {threads} {input.subcortical_seg} 0 -neq {input.roi1} -sub {input.roi2} -sub {input.lZI} -sub {input.rZI} -sub {output.filter_mask} " + "mrcalc -nthreads {threads} {input.subcortical_seg} 0 -neq {input.roi1} -sub {input.roi2} -sub {input.lZI} -sub {input.rZI} -sub {output.filter_mask} &&" "tckedit -nthreads {params.therads} -exclude {output.filter_mask} -tck_weights_in {input.weights} -tck_weights_out {output.filtered_weights} {input.tck} {output.filtered_tck} " @@ -512,7 +512,7 @@ rule combine_filtered: container: config["singularity"]["mrtrix"] shell: - "tckedit {input.tck} {output.combined_tck} " + "tckedit {input.tck} {output.combined_tck} &&" "cat {input.weights} >> {output.combined_weights} " From d80dbe2be4ccbe82589020cda0440dfc25d309fd Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Fri, 4 Nov 2022 17:17:14 -0400 Subject: [PATCH 15/20] Replace expand with functools.partial - Make use of functools.partial to iterate over different wildcards rather than expand --- dbsc/workflow/rules/mrtpipelines.smk | 277 ++++++++++++--------------- 1 file changed, 123 insertions(+), 154 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 59cbddae..256cf324 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -1,4 +1,5 @@ from os.path import join +from functools import partial include: "zona_bb_subcortex.smk" @@ -13,6 +14,36 @@ responsemean_flag = config.get("responsemean_dir", None) shells = config.get("shells", None) lmax = config.get("lmax", None) +# BIDS partials +bids_response_out = partial( + bids, + root=mrtrix_dir, + datatype="response", + suffix="response.txt", +) + +bids_dti_out = partial( + bids, + root=mrtrix_dir, + datatype="dti", + model="dti", + **config["subj_wildcards"], +) + +bids_tractography_out = partial( + bids, + root=mrtrix_dir, + datatype="tractography", + **config["subj_wildcards"], +) + +bids_anat_out = partial( + bids, + root=mrtrix_dir, + datatype="anat", + **config["subj_wildcards"], +) + # Mrtrix3 citation (additional citations are included per rule as necessary): # Tournier, J.-D.; Smith, R. E.; Raffelt, D.; Tabbara, R.; Dhollander, T.; Pietsch, M.; Christiaens, D.; Jeurissen, B.; Yeh, C.-H. & Connelly, A. MRtrix3: A fast, flexible and open software framework for medical image processing and visualisation. NeuroImage, 2019, 202, 116137 @@ -47,15 +78,6 @@ rule nii2mif: "mrconvert -nthreads {threads} {input.mask} {output.mask}" -dwi2response_out = bids( - root=mrtrix_dir, - datatype="response", - desc="{tissue}", - suffix="response.txt", - **config["subj_wildcards"], -) - - rule dwi2response: """ Estimate response functions using Dhollander algorithm @@ -69,9 +91,21 @@ rule dwi2response: shells=f"-shells {shells}" if shells else "", lmax=f"-lmax {lmax}" if lmax else "", output: - wm_rf=expand(dwi2response_out, tissue="wm", allow_missing=True), - gm_rf=expand(dwi2response_out, tissue="gm", allow_missing=True), - csf_rf=expand(dwi2response_out, tissue="csf", allow_missing=True), + wm_rf=bids_response_out( + desc="wm", + suffix="response.txt", + **config["subj_wildcards"], + ), + gm_rf=bids_response_out( + desc="gm", + suffix="response.txt", + **config["subj_wildcards"], + ), + csf_rf=bids_response_out( + desc="csf", + suffix="response.txt", + **config["subj_wildcards"], + ), threads: workflow.cores group: "subject_1" @@ -81,14 +115,6 @@ rule dwi2response: "dwi2response dhollander {input.dwi} {output.wm_rf} {output.gm_rf} {output.csf_rf} -nthreads {threads} -mask {input.mask} {params.shells} {params.lmax} " -responsemean_out = bids( - root=join(mrtrix_dir, "avg"), - datatype="response", - desc="{tissue}", - suffix="response.txt", -) - - rule responsemean: """Compute average response function""" input: @@ -96,9 +122,18 @@ rule responsemean: gm_rf=expand(rules.dwi2response.output.gm_rf, subject=config["subjects"]), csf_rf=expand(rules.dwi2response.output.csf_rf, subject=config["subjects"]), output: - wm_avg_rf=expand(responsemean_out, tissue="wm", allow_missing=True), - gm_avg_rf=expand(responsemean_out, tissue="gm", allow_missing=True), - csf_avg_rf=expand(responsemean_out, tissue="csf", allow_missing=True), + wm_avg_rf=bids_response_out( + root=join(mrtrix_dir, "avg"), + desc="wm", + ), + gm_avg_rf=bids_response_out( + root=join(mrtrix_dir, "avg"), + desc="gm", + ), + csf_avg_rf=bids_response_out( + root=join(mrtrix_dir, "avg"), + desc="csf", + ), threads: workflow.cores group: "group" @@ -110,16 +145,6 @@ rule responsemean: "responsemean {input.csf_rf} {output.csf_avg_rg} -nthreads {threads}" -dwi2fod_out = bids( - root=mrtrix_dir, - datatype="response", - model="csd", - desc="{tissue}", - suffix="fod.mif", - **config["subj_wildcards"], -) - - rule dwi2fod: """Jeurissen, B; Tournier, J-D; Dhollander, T; Connelly, A & Sijbers, J. Multi-tissue constrained spherical deconvolution for improved analysis of multi-shell diffusion MRI data. NeuroImage, 2014, 103, 411-426""" input: @@ -143,9 +168,24 @@ rule dwi2fod: params: shells=f"-shells {shells}" if shells else "", output: - wm_fod=expand(dwi2fod_out, tissue="wm", allow_missing=True), - gm_fod=expand(dwi2fod_out, tissue="gm", allow_missing=True), - csf_fod=expand(dwi2fod_out, tissue="csf", allow_missing=True), + wm_fod=bids_response_out( + model="csd", + desc="wm", + suffix="fod.mif", + **config["subj_wildcards"], + ), + gm_fod=bids_response_out( + model="csd", + desc="gm", + suffix="fod.mif", + **config["subj_wildcards"], + ), + csf_fod=bids_response_out( + model="csd", + desc="csf", + suffix="fod.mif", + **config["subj_wildcards"], + ), threads: workflow.cores group: "subject_2" @@ -155,15 +195,6 @@ rule dwi2fod: "dwi2fod -nthreads {threads} {params.shells} -mask {input.mask} msmt_csd {input.dwi} {input.avg_sfwm} {output.wm_fod} {input.avg_gm} {output.gm_fod} {input.avg_csf} {output.csf}" -mtnormalise_out = bids( - root=mrtrix_dir, - datatype="response", - model="csd", - desc="normalized", - suffix="{tissue}_fod.mif", -) - - rule mtnormalise: """ Raffelt, D.; Dhollander, T.; Tournier, J.-D.; Tabbara, R.; Smith, R. E.; Pierre, E. & Connelly, A. Bias Field Correction and Intensity Normalisation for Quantitative Analysis of Apparent Fibre Density. In Proc. ISMRM, 2017, 26, 3541 @@ -176,9 +207,24 @@ rule mtnormalise: csf_fod=rules.dwi2fod.output.csf_fod, mask=rules.nii2mif.output.mask, output: - wm_fod=expand(mtnormalise_out, tissue="wm", allow_missing=True), - gm_fod=expand(mtnormalise_out, tissue="gm", allow_missing=True), - csf_fod=expand(mtnormalise_out, tissue="csf", allow_missing=True), + wm_fod=bids_response_out( + model="csd", + desc="wm", + suffix="fodNormalized.mif", + **config["subj_wildcards"], + ), + gm_fod=bids_response_out( + model="csd", + desc="gm", + suffix="fodNormalized.mif", + **config["subj_wildcards"], + ), + csf_fod=bids_response_out( + model="csd", + desc="csf", + suffix="fodNormalized.mif", + **config["subj_wildcards"], + ), threads: workflow.cores group: "subject_2" @@ -210,30 +256,16 @@ rule dwinormalise: "subject_1" -dwi2tensor_out = bids( - root=mrtrix_dir, - datatype="dti", - model="dti", - suffix="{dti}.mif", - **config["subj_wildcards"], -) - - rule dwi2tensor: input: dwi=rules.dwinoramlise.dwi, mask=rules.nii2mif.output.mask, output: - dti=bids( - root=mrtrix_dir, - datatype="dti", - suffix="dti.mif", - **config["subj_wildcards"], - ), - fa=expand(dwi2tensor_out, dti="fa", allow_missing=True), - ad=expand(dwi2tensor_out, dti="ad", allow_missing=True), - rd=expand(dwi2tensor_out, dti="rd", allow_missing=True), - md=expand(dwi2tensor_out, dti="md", allow_missing=True), + dti=bids_dti_out(suffix="tensor.mif"), + fa=bids_dti_out(suffix="fa.mif"), + ad=bids_dti_out(suffix="ad.mif"), + rd=bids_dti_out(suffix="rd.mif"), + md=bids_dti_out(suffix="md.mif"), threads: workflow.cores group: "subject_1" @@ -263,12 +295,9 @@ rule tckgen: step=config["step"], sl=config["sl_count"], output: - tck=bids( - root=mrtrix_dir, - datatype="tractography", + tck=bids_tractography_out( desc="iFOD2", suffix="tractography.tck", - **config["subj_wildcards"], ), threads: workflow.cores group: @@ -286,19 +315,10 @@ rule tcksift2: fod=rules.mtnormalise.output.wm_fod, output: weights=bids( - root=mrtrix_dir, - datatype="tractography", - desc="iFOD2", - suffix="tckweights.txt", - **config["subj_wildcards"], - ), - mu=bids( - root=mrtrix_dir, - datatype="tractography", desc="iFOD2", - suffix="mucoefficient.txt", - **config["subj_wildcards"], + suffix="tckWeights.txt", ), + mu=bids(desc="iFOD2", suffix="muCoefficient.txt"), threads: workflow.cores group: "subject_2" @@ -324,19 +344,13 @@ rule tck2connectome: params: radius=config["radial_search"], output: - sl_assignment=bids( - root=mrtix_dir, - datatype="tractography", + sl_assignment=bids_tractography_out( desc="subcortical", - suffix="nodeassignment.txt", - **config["subj_wildcards"], + suffix="nodeAssignment.txt", ), - node_weights=bids( - root=mrtrix_dir, - datatype="tractography", + node_weights=bids_tractography_out( desc="subcortical", - suffix="nodeweights.csv", - **config["subj_wildcards"], + suffix="nodeWeights.csv", ), threads: workflow.cores group: @@ -356,21 +370,15 @@ rule connectome2tck: nodes=",".join(str(num) for num in range(2, 73)), output: edge_weight=temp( - bids( - root=mrtrix_dir, - datatype="tractography", + bids_tractography_out( desc="subcortical", - suffix="tckweights", - **config["subj_wildcards"], + suffix="tckWeights", ) ), edge_tck=temp( bids( - root=mrtix_dir, - datatype="tractography", desc="subcortical", suffix="from", - **config["subj_wildcards"], ) ), threads: workflow.cores @@ -389,12 +397,9 @@ rule create_roi_mask: subcortical_seg=rules.add_brainstem_new_seg.output.seg, output: roi_mask=temp( - bids( - root=mrtrix_dir, - datatype="anat", + bids_anat_out( desc="{node1}", suffix="mask.mif", - **config["subj_wildcards"] ) ), threads: workflow.cores @@ -409,64 +414,40 @@ rule create_roi_mask: rule filter_tck: # Node1 should be same as prev rule, need to iterate over node2 input: - roi1=bids( - root=mrtrix_dir, - datatype="anat", + roi1=bids_anat_out( desc="{node1}", suffix="mask.mif", - **config["subj_wildcards"] ), - roi2=bids( - root=mrtrix_dir, - datatype="anat", + roi2=bids_anat_out( desc="{node2}", suffix="mask.mif", - **config["subj_wildcards"] ), # ZI rois (do these need to be separately defined in output?) - lZI=bids( - root=mrtrix_dir, - datatype="anat", + lZI=bids_anat_out( desc="21", suffix="mask.mif", - **config["subj_wildcards"] - ), - rZI=bids( - root=mrtrix_dir, - datatype="anat", - desc="22", - suffix="mask.mif", - **config["subj_wildcards"] ), + rZI=bids_anat_out(desc="22", suffix="mask.mif"), tck=rules.connectome2tck.output.edge_tck, weights=rules.tck2connectome.outputs.node_weights, subcortical_seg=rules.add_brainstem_new_seg.output.seg, output: filter_mask=temp( - bids( - root=mrtrix_dir, - datatype="anat", + bids_anat_out( desc="exclude", suffix="mask.mif", - **config["subj_wildcards"] ) ), filtered_tck=temp( - bids( - root=mrtrix_dir, - datatype="tractography", + bids_tractography_out( desc="from{node1}to{node2}", suffix="tractography.tck", - **config["subj_wildcards"] ) ), filtered_weights=temp( - bids( - root=mrtrix_dir, - datatype="tractography", + bids_tractography_out( desc="from{node1}to{node2}", suffix="weights.csv", - **config["subj_wildcards"] ) ), threads: workflow.cores @@ -492,19 +473,13 @@ rule combine_filtered: **config["input_zip_lists"]["dwi"] ), output: - combined_tck=bids( - root=mrtrix_dir, - datatype="tractography", + combined_tck=bids_tractography_out( desc="subcortical", suffix="tractography.tck", - **config["subj_wildcards"] ), - combined_weights=bids( - root=mrtrix_dir, - datatype="tractography", + combined_weights=bids_tractography_out( desc="subcortical", - suffix="tckweights.txt", - **config["subj_wildcards"] + suffix="tckWeights.txt", ), threads: workflow.cores group: @@ -524,19 +499,13 @@ rule filtered_tck2connectome: params: radius=config["radial_search"], output: - sl_assignment=bids( - root=mrtrix_dir, - datatype="tractography", + sl_assignment=bids_tractography_out( desc="subcortical", - suffix="nodeassignment.txt", - **config["subj_wildcards"], + suffix="nodeAssignment.txt", ), - node_weights=bids( - root=mrtrix_dir, - datatype="tractography", + node_weights=bids_tractography_out( desc="subcortical", - suffix="nodeweights.csv", - **config["subj_wildcards"], + suffix="nodeWeights.csv", ), threads: workflow.core group: From 0ff97da417332908a91ad696bb432b1840a78607 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Fri, 4 Nov 2022 17:49:23 -0400 Subject: [PATCH 16/20] rm desc from wildcards --- dbsc/config/snakebids.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/dbsc/config/snakebids.yml b/dbsc/config/snakebids.yml index d3c52163..c339875e 100644 --- a/dbsc/config/snakebids.yml +++ b/dbsc/config/snakebids.yml @@ -29,7 +29,6 @@ pybids_inputs: wildcards: - subject - session - - desc mask: filters: suffix: "mask" @@ -39,7 +38,6 @@ pybids_inputs: wildcards: - subject - session - - desc # Configuration for the command-line parameters to make available # passed on the argparse add_argument() From d59cb044ed4352f84343697fe6a306e01820592c Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Sat, 5 Nov 2022 14:46:06 -0400 Subject: [PATCH 17/20] replace os.path.join with pathlib.Path --- dbsc/workflow/rules/mrtpipelines.smk | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 256cf324..1a82a9c8 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -1,4 +1,4 @@ -from os.path import join +from pathlib import Path from functools import partial @@ -7,7 +7,10 @@ include: "freesurfer.smk" # Directories -mrtrix_dir = join(config["output_dir"], "mrtrix") +mrtrix_dir = str(Path(config["output_dir"]) / "mrtrix") + +# Make directory if it doesn't exist +Path(mrtrix_dir).mkdir(parents=True) # Parameters responsemean_flag = config.get("responsemean_dir", None) @@ -123,15 +126,15 @@ rule responsemean: csf_rf=expand(rules.dwi2response.output.csf_rf, subject=config["subjects"]), output: wm_avg_rf=bids_response_out( - root=join(mrtrix_dir, "avg"), + root=str(Path(mrtrix_dir) / "avg"), desc="wm", ), gm_avg_rf=bids_response_out( - root=join(mrtrix_dir, "avg"), + root=str(Path(mrtrix_dir) / "avg"), desc="gm", ), csf_avg_rf=bids_response_out( - root=join(mrtrix_dir, "avg"), + root=str(Path(mrtrix_dir) / "avg"), desc="csf", ), threads: workflow.cores @@ -151,17 +154,17 @@ rule dwi2fod: dwi=rules.nii2mif.output.dwi, mask=rules.nii2mif.output.mask, wm_rf=( - join(config["responsemean_dir"], "desc-wm_response.txt") + str(Path(config["responsemean_dir"]) / "desc-wm_response.txt") if responsemean_flag else rules.responsemean.output.wm_avg_rf ), gm_rf=( - join(config["responsemean_dir"], "desc-gm_response.txt") + str(Path(config["responsemean_dir"]) / "desc-gm_response.txt") if responsemean_flag else rules.responsemean.output.gm_avg_rf ), csf_rf=( - join(config["responsemean_dir"], "desc-csf_response.txt") + str(Path(config["responsemean_dir"]) / "desc-csf_response.txt") if responsemean_flag else rules.responsemean.output.csf_avg_rf ), From 5df79d531674b7b33195c9da447751579ef9fb86 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Mon, 7 Nov 2022 17:18:23 -0500 Subject: [PATCH 18/20] Remove unneeded suffix entity in bids_response_out call --- dbsc/workflow/rules/mrtpipelines.smk | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 1a82a9c8..0fd8b64a 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -96,17 +96,14 @@ rule dwi2response: output: wm_rf=bids_response_out( desc="wm", - suffix="response.txt", **config["subj_wildcards"], ), gm_rf=bids_response_out( desc="gm", - suffix="response.txt", **config["subj_wildcards"], ), csf_rf=bids_response_out( desc="csf", - suffix="response.txt", **config["subj_wildcards"], ), threads: workflow.cores @@ -426,10 +423,7 @@ rule filter_tck: suffix="mask.mif", ), # ZI rois (do these need to be separately defined in output?) - lZI=bids_anat_out( - desc="21", - suffix="mask.mif", - ), + lZI=bids_anat_out(desc="21", suffix="mask.mif"), rZI=bids_anat_out(desc="22", suffix="mask.mif"), tck=rules.connectome2tck.output.edge_tck, weights=rules.tck2connectome.outputs.node_weights, From 06a6ee921609bceeeb8df9969876d9735f69088b Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Mon, 7 Nov 2022 17:51:02 -0500 Subject: [PATCH 19/20] fix nodes params -> range(1,73) --- dbsc/workflow/rules/mrtpipelines.smk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 0fd8b64a..744307bd 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -367,7 +367,7 @@ rule connectome2tck: sl_assignment=rules.tck2connectome.output.sl_assignment, tck=rules.tckgen.output.tck, params: - nodes=",".join(str(num) for num in range(2, 73)), + nodes=",".join(str(num) for num in range(1, 73)), output: edge_weight=temp( bids_tractography_out( From 2f1850776443344f5a40625890b0601dd5e55d16 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Thu, 10 Nov 2022 13:52:45 -0500 Subject: [PATCH 20/20] fix mrtpipelines, add to Snakefile - Fixes pattern for and expands over node1 and node2 - Snakefmt fixes --- dbsc/workflow/Snakefile | 3 +- dbsc/workflow/rules/mrtpipelines.smk | 75 +++++++++++++++------------- 2 files changed, 43 insertions(+), 35 deletions(-) diff --git a/dbsc/workflow/Snakefile b/dbsc/workflow/Snakefile index bb1361a5..db973afc 100644 --- a/dbsc/workflow/Snakefile +++ b/dbsc/workflow/Snakefile @@ -25,4 +25,5 @@ wildcard_constraints: # Rules include: "rules/freesurfer.smk" -include: "rules/zona_bb_subcortex" +include: "rules/zona_bb_subcortex.smk" +include: "rules/mrtpipelines.smk" diff --git a/dbsc/workflow/rules/mrtpipelines.smk b/dbsc/workflow/rules/mrtpipelines.smk index 744307bd..66969eed 100644 --- a/dbsc/workflow/rules/mrtpipelines.smk +++ b/dbsc/workflow/rules/mrtpipelines.smk @@ -118,21 +118,14 @@ rule dwi2response: rule responsemean: """Compute average response function""" input: - wm_rf=expand(rules.dwi2response.output.wm_rf, subject=config["subjects"]), - gm_rf=expand(rules.dwi2response.output.gm_rf, subject=config["subjects"]), - csf_rf=expand(rules.dwi2response.output.csf_rf, subject=config["subjects"]), - output: - wm_avg_rf=bids_response_out( - root=str(Path(mrtrix_dir) / "avg"), - desc="wm", - ), - gm_avg_rf=bids_response_out( - root=str(Path(mrtrix_dir) / "avg"), - desc="gm", + subject_rf=bids_response_out( + desc="{tissue}", + **config["subj_wildcards"], ), - csf_avg_rf=bids_response_out( + output: + avg_rf=bids_response_out( root=str(Path(mrtrix_dir) / "avg"), - desc="csf", + desc="{tissue}", ), threads: workflow.cores group: @@ -140,9 +133,7 @@ rule responsemean: container: config["singularity"]["mrtrix"] shell: - "responsemean {input.wm_rf} {output.wm_avg_rg} -nthreads {threads} &&" - "responsemean {input.gm_rf} {output.gm_avg_rg} -nthreads {threads} &&" - "responsemean {input.csf_rf} {output.csf_avg_rg} -nthreads {threads}" + "responsemean {input.subject_rf} {output.avg_rf} -nthreads {threads}" rule dwi2fod: @@ -153,17 +144,17 @@ rule dwi2fod: wm_rf=( str(Path(config["responsemean_dir"]) / "desc-wm_response.txt") if responsemean_flag - else rules.responsemean.output.wm_avg_rf + else expand(rules.responsemean.output.avg_rf, tissue="wm") ), gm_rf=( str(Path(config["responsemean_dir"]) / "desc-gm_response.txt") if responsemean_flag - else rules.responsemean.output.gm_avg_rf + else expand(rules.responsemean.output.avg_rf, tissue="gm") ), csf_rf=( str(Path(config["responsemean_dir"]) / "desc-csf_response.txt") if responsemean_flag - else rules.responsemean.output.csf_avg_rf + else expand(rules.responsemean.output.csf_avg_rf, tissue="csf") ), params: shells=f"-shells {shells}" if shells else "", @@ -250,10 +241,10 @@ rule dwinormalise: threads: workflow.cores container: config["singularity"]["mrtrix"] - shell: - "dwinormalise individual -nthreads {threads} {input.dwi} {input.mask} {output.dwi}" group: "subject_1" + shell: + "dwinormalise individual -nthreads {threads} {input.dwi} {input.mask} {output.dwi}" rule dwi2tensor: @@ -376,7 +367,7 @@ rule connectome2tck: ) ), edge_tck=temp( - bids( + bids_tractography_out( desc="subcortical", suffix="from", ) @@ -411,8 +402,7 @@ rule create_roi_mask: "mrcalc -nthreads {threads} {input.subcortical_seg} {wildcards.node1} -eq {output.roi_mask}" -rule filter_tck: - # Node1 should be same as prev rule, need to iterate over node2 +rule create_exclude_mask: input: roi1=bids_anat_out( desc="{node1}", @@ -422,30 +412,43 @@ rule filter_tck: desc="{node2}", suffix="mask.mif", ), - # ZI rois (do these need to be separately defined in output?) lZI=bids_anat_out(desc="21", suffix="mask.mif"), rZI=bids_anat_out(desc="22", suffix="mask.mif"), - tck=rules.connectome2tck.output.edge_tck, - weights=rules.tck2connectome.outputs.node_weights, subcortical_seg=rules.add_brainstem_new_seg.output.seg, output: filter_mask=temp( bids_anat_out( - desc="exclude", + desc="exclude{node1}AND{node2}", suffix="mask.mif", ) ), + threads: workflow.cores + group: + "subject_2" + container: + config["singularity"]["mrtrix"] + shell: + "mrcalc -nthreads {threads} {input.subcortical_seg} 0 -neq {input.roi1} -sub {input.roi2} -sub {input.lZI} -sub {input.rZI} -sub {output.filter_mask}" + + +rule filter_tck: + input: + filter_mask=rules.create_exclude_mask.outputs.filter_mask, + tck=rules.connectome2tck.output.edge_tck, + weights=rules.tck2connectome.outputs.node_weights, + subcortical_seg=rules.add_brainstem_new_seg.output.seg, + output: filtered_tck=temp( bids_tractography_out( desc="from{node1}to{node2}", suffix="tractography.tck", - ) + ), ), filtered_weights=temp( bids_tractography_out( desc="from{node1}to{node2}", suffix="weights.csv", - ) + ), ), threads: workflow.cores group: @@ -453,8 +456,10 @@ rule filter_tck: container: config["singularity"]["mrtrix"] shell: - "mrcalc -nthreads {threads} {input.subcortical_seg} 0 -neq {input.roi1} -sub {input.roi2} -sub {input.lZI} -sub {input.rZI} -sub {output.filter_mask} &&" - "tckedit -nthreads {params.therads} -exclude {output.filter_mask} -tck_weights_in {input.weights} -tck_weights_out {output.filtered_weights} {input.tck} {output.filtered_tck} " + "tckedit -nthreads {threads} -exclude {input.filter_mask} -tck_weights_in {input.weights} -tck_weights_out {output.filtered_weights} {input.tck} {output.filtered_tck}" + + +idxes = np.triu_indices(72, k=1) rule combine_filtered: @@ -462,12 +467,14 @@ rule combine_filtered: tck=expand( rules.filter_tck.output.filtered_tck, zip, - **config["input_zip_lists"]["dwi"] + node1=list(idxes[0] + 1), + node2=list(idxes[1] + 1), ), weights=expand( rules.filter_tck.output.filtered_weights, zip, - **config["input_zip_lists"]["dwi"] + node1=list(idxes[0] + 1), + node2=list(idxes[1] + 1), ), output: combined_tck=bids_tractography_out(