Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Expand over sessions, update config call #70

Merged
merged 8 commits into from
Apr 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 2 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,5 @@ jobs:
run: poetry run poe test_labelmerge
- name: Test using single atlas (skipping labelmerge)
run: poetry run poe test_skip_labelmerge
- name: Test sessions
run: poetry run poe test_sessions
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
test/data/derivatives/.snakemake/
test/data/derivatives/config/
test/data/derivatives/.snakebids
test/data/derivatives_sessions/.snakemake/
test/data/derivatives_sessions/config/
test/data/derivatives_sessions/.snakebids

# Tar files
*.tar
Expand Down
166 changes: 83 additions & 83 deletions poetry.lock

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,8 @@ test_responsemean = { shell = "python ./scattr/run.py ./test/data/bids test/data
test_dwi = { shell = "python ./scattr/run.py ./test/data/bids_nodwi test/data/derivatives/ participant --dwi_dir ./test/data/derivatives/prepdwi --fs-license ./test/.fs_license -np --force-output" }
test_labelmerge = { shell = "python ./scattr/run.py ./test/data/bids test/data/derivatives/ participant --labelmerge_base_dir ./base_dir --labelmerge_overlay_dir ./overlay_dir --skip_brainstem --skip_thal_seg --fs-license ./test/.fs_license -np --force-output" }
test_skip_labelmerge = { shell = "python ./scattr/run.py ./test/data/bids test/data/derivatives/ participant --skip_labelmerge --skip_brainstem --skip_thal_seg --fs-license ./test/.fs_license -np --force-output" }
test = ["test_base", "test_freesurfer", "test_responsemean", "test_dwi", "test_labelmerge", "test_skip_labelmerge"]
test_sessions = { shell = "python ./scattr/run.py ./test/data/bids_sessions test/data/derivatives_sessions/ participant --fs-license ./test.fs_license -np --force-output" }
test = ["test_base", "test_freesurfer", "test_responsemean", "test_dwi", "test_labelmerge", "test_skip_labelmerge", "test_sessions"]

[tool.isort]
profile = "black"
Expand Down
33 changes: 16 additions & 17 deletions scattr/workflow/Snakefile
Original file line number Diff line number Diff line change
@@ -1,29 +1,26 @@
# ---- begin snakebids boilerplate ----------------------------------------------

import snakebids
from snakebids import bids
from snakebids import bids, generate_inputs, get_wildcard_constraints


configfile: "config/snakebids.yml"


# writes inputs_config.yml and updates config dict
config.update(
snakebids.generate_inputs(
bids_dir=config["bids_dir"],
pybids_inputs=config["pybids_inputs"],
pybids_config=["bids", "derivatives"],
derivatives=config["derivatives"],
participant_label=config["participant_label"],
exclude_participant_label=config["exclude_participant_label"],
use_bids_inputs=False,
)
inputs = generate_inputs(
bids_dir=config["bids_dir"],
pybids_inputs=config["pybids_inputs"],
pybids_config=["bids", "derivatives"],
derivatives=config["derivatives"],
participant_label=config["participant_label"],
exclude_participant_label=config["exclude_participant_label"],
use_bids_inputs=True,
)


# this adds constraints to the bids naming
wildcard_constraints:
**snakebids.get_wildcard_constraints(config["pybids_inputs"]),
**get_wildcard_constraints(config["pybids_inputs"]),


# ---- end snakebids boilerplate ------------------------------------------------
Expand Down Expand Up @@ -53,11 +50,11 @@ rule all:
input:
tck_files=expand(
rules.filtered_tck2connectome.output.sl_assignment,
subject=config["input_lists"]["T1w"]["subject"],
zip,
**inputs["T1w"].input_zip_lists
),
dti_files=expand(
rules.dwi2tensor.output.dti,
subject=config["input_lists"]["T1w"]["subject"],
rules.dwi2tensor.output.dti, zip, **inputs["T1w"].input_zip_lists
),
qc_files=rules.gather_qc.input,
params:
Expand All @@ -66,8 +63,10 @@ rule all:
root=str(Path(config["output_dir"]) / "mrtrix"),
subject="{subject}",
include_subject_dir=False,
include_session_Dir=False,
),
subject=config["input_lists"]["T1w"]["subject"],
zip,
**inputs["T1w"].input_zip_lists
),
shell: # Clean up intermediate files that left behind
"rm -r {params.mrtrix_dir}/exclude_mask {params.mrtrix_dir}/roi_masks {params.mrtrix_dir}/unfiltered"
27 changes: 19 additions & 8 deletions scattr/workflow/rules/freesurfer.smk
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ freesurfer_dir = str(Path(config["output_dir"]) / "freesurfer")
if config.get("freesurfer_dir"):
freesurfer_dir = config.get("freesurfer_dir")

log_dir = str(Path(config["output_dir"]) / "logs" / "freesurfer")

# Licenses
if config.get("fs_license"):
fs_license = config["fs_license"]
Expand All @@ -17,7 +19,13 @@ bids_fs_out = partial(
bids,
root=freesurfer_dir,
datatype="anat",
**config["subj_wildcards"],
**inputs.subj_wildcards,
)

bids_log = partial(
bids,
root=log_dir,
**inputs["T1w"].input_wildcards,
)

# Freesurfer references (with additional in rules as necessary)
Expand Down Expand Up @@ -59,15 +67,16 @@ rule thalamic_segmentation:
fs_license=fs_license,
output:
thal_seg=str(
Path(freesurfer_dir)
/ "sub-{subject}/mri/ThalamicNuclei.v12.T1.mgz"
Path(bids(root=freesurfer_dir, **inputs.subj_wildcards)).parent
/ "mri"
/ "ThalamicNuclei.v12.T1.mgz"
),
threads: 4
resources:
mem_mb=16000,
time=60,
log:
f"{config['output_dir']}/logs/freesurfer/sub-{{subject}}/thalamic_segmentation.log",
bids_log(suffix="thalamicSegmentation.log"),
group:
"freesurfer"
container:
Expand All @@ -91,7 +100,9 @@ rule mgz2nii:
if not config.get("skip_thal_seg")
else [],
aparcaseg=str(
Path(freesurfer_dir) / "sub-{subject}/mri/aparc+aseg.mgz"
Path(bids(root=freesurfer_dir, **inputs.subj_wildcards)).parent
/ "mri"
/ "aparc+aseg.mgz"
),
params:
freesurfer_dir=freesurfer_dir,
Expand All @@ -112,7 +123,7 @@ rule mgz2nii:
mem_mb=16000,
time=10,
log:
f"{config['output_dir']}/logs/freesurfer/sub-{{subject}}/mgz2nii.log",
bids_log(suffix="mgz2nii.log"),
group:
"freesurfer"
container:
Expand All @@ -130,7 +141,7 @@ rule fs_xfm_to_native:
input:
thal=rules.mgz2nii.output.thal,
aparcaseg=rules.mgz2nii.output.aparcaseg,
ref=config["input_path"]["T1w"],
ref=inputs["T1w"].path,
output:
thal=bids_fs_out(
space="T1w",
Expand All @@ -147,7 +158,7 @@ rule fs_xfm_to_native:
mem_mb=16000,
time=60,
log:
f"{config['output_dir']}/logs/freesurfer/sub-{{subject}}/fs_xfm_to_native.log",
bids_log(suffix="fsXfmToNative.log"),
group:
"freesurfer"
container:
Expand Down
Loading