Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Aligning robustICA with current Main + #5

Merged
merged 35 commits into from
Aug 30, 2024
Merged
Changes from 1 commit
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
62e15ab
Limit current adaptive mask method to brain mask (#1060)
tsalo Apr 12, 2024
4e091fd
Update nilearn requirement from <=0.10.3,>=0.7 to >=0.7,<=0.10.4 (#1077)
dependabot[bot] Apr 16, 2024
ee714f3
Add adaptive mask plot to report (#1073)
tsalo Apr 16, 2024
49978ca
Update scikit-learn requirement (#1075)
dependabot[bot] Apr 16, 2024
953aa65
Update pandas requirement from <=2.2.1,>=2.0 to >=2.0,<=2.2.2 (#1076)
dependabot[bot] Apr 16, 2024
5835d3a
Update bokeh requirement from <=3.4.0,>=1.0.0 to >=1.0.0,<=3.4.1 (#1078)
dependabot[bot] Apr 16, 2024
461bc38
Load user-defined mask as expected by plot_adaptive_mask (#1079)
mvdoc Apr 17, 2024
35a26dd
DOC desc-optcomDenoised -> desc-denoised (#1080)
mvdoc Apr 18, 2024
1660435
docs: add mvdoc as a contributor for code, bug, and doc (#1082)
allcontributors[bot] Apr 18, 2024
af5e99a
Identify the last good echo in adaptive mask instead of sum of good e…
tsalo Apr 18, 2024
0f6cbe1
Output RMSE map and time series for decay model fit (#1044)
tsalo Apr 29, 2024
12aea7f
minimum nilearn 0.10.3 (#1094)
handwerkerd May 7, 2024
2e9831b
Use nearest-neighbors interpolation in `plot_component` (#1098)
tsalo May 22, 2024
dd552dc
Update scipy requirement from <=1.13.0,>=1.2.0 to >=1.2.0,<=1.13.1 (#…
dependabot[bot] May 28, 2024
7ca9c27
Update scikit-learn requirement from <=1.4.2,>=0.21 to >=0.21,<=1.5.0…
dependabot[bot] May 28, 2024
daeaf28
Update numpy requirement from <=1.26.4,>=1.16 to >=1.16,<=2.0.0 (#1104)
dependabot[bot] Jul 2, 2024
323dd59
Filter out non-diagonal affine warning (#1103)
tsalo Jul 3, 2024
18081c6
Update bokeh requirement from <=3.4.1,>=1.0.0 to <=3.5.0,>=3.5.0 (#1109)
dependabot[bot] Jul 8, 2024
f3f4740
Update scikit-learn requirement from <=1.5.0,>=0.21 to <=1.5.1,>=1.5.…
dependabot[bot] Jul 10, 2024
8a7d282
Update scipy requirement from <=1.13.1,>=1.2.0 to <=1.14.0,>=1.14.0 (…
dependabot[bot] Jul 10, 2024
4b813e8
Update numpy requirement from <=2.0.0,>=1.16 to >=1.16,<=2.0.1 (#1112)
dependabot[bot] Jul 23, 2024
73542f5
Cleaning up installation instructions (#1113)
handwerkerd Jul 24, 2024
07611ec
Update bokeh requirement from <=3.5.0,>=1.0.0 to >=1.0.0,<=3.5.1 (#1116)
dependabot[bot] Jul 31, 2024
f3be8f9
Update list of multi-echo datasets (#1115)
tsalo Jul 31, 2024
c7df469
Generate metrics from external regressors using F stats (#1064)
handwerkerd Aug 5, 2024
18a408e
Link to the open-multi-echo-data website (#1117)
tsalo Aug 7, 2024
811b9fd
Refactor `metrics.dependence` module (#1088)
tsalo Aug 7, 2024
b215083
documentation and resource updates (#1114)
handwerkerd Aug 8, 2024
45c95ce
aligning adding-robustica with Main
handwerkerd Aug 13, 2024
8e6878f
Adding already requested changes
handwerkerd Aug 13, 2024
88fd148
fixed failing tests
handwerkerd Aug 16, 2024
a221e72
updated documentation in faq.rst
handwerkerd Aug 27, 2024
8622a9b
more documentation changes
handwerkerd Aug 29, 2024
419e9d4
Update docs/faq.rst
handwerkerd Aug 30, 2024
d29a91b
Update docs/faq.rst
handwerkerd Aug 30, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Refactor metrics.dependence module (ME-ICA#1088)
* Add type hints to metric functions.

* Use keyword arguments.

* Update tests.

* Update dependence.py

* Update collect.py

* Fix other stuff.
tsalo authored Aug 7, 2024
commit 811b9fd979b8d9bc228d10313d357b7ee232cb77
122 changes: 74 additions & 48 deletions tedana/metrics/collect.py
Original file line number Diff line number Diff line change
@@ -148,7 +148,10 @@ def generate_metrics(
metric_maps = {}
if "map weight" in required_metrics:
LGR.info("Calculating weight maps")
metric_maps["map weight"] = dependence.calculate_weights(data_optcom, mixing)
metric_maps["map weight"] = dependence.calculate_weights(
data_optcom=data_optcom,
mixing=mixing,
)
signs = determine_signs(metric_maps["map weight"], axis=0)
comptable["optimal sign"] = signs
metric_maps["map weight"], mixing = flip_components(
@@ -157,31 +160,42 @@ def generate_metrics(

if "map optcom betas" in required_metrics:
LGR.info("Calculating parameter estimate maps for optimally combined data")
metric_maps["map optcom betas"] = dependence.calculate_betas(data_optcom, mixing)
metric_maps["map optcom betas"] = dependence.calculate_betas(
data=data_optcom,
mixing=mixing,
)
if io_generator.verbose:
metric_maps["map echo betas"] = dependence.calculate_betas(data_cat, mixing)
metric_maps["map echo betas"] = dependence.calculate_betas(
data=data_cat,
mixing=mixing,
)

if "map percent signal change" in required_metrics:
LGR.info("Calculating percent signal change maps")
# used in kundu v3.2 tree
metric_maps["map percent signal change"] = dependence.calculate_psc(
data_optcom, metric_maps["map optcom betas"]
data_optcom=data_optcom,
optcom_betas=metric_maps["map optcom betas"],
)

if "map Z" in required_metrics:
LGR.info("Calculating z-statistic maps")
metric_maps["map Z"] = dependence.calculate_z_maps(metric_maps["map weight"])
metric_maps["map Z"] = dependence.calculate_z_maps(weights=metric_maps["map weight"])

if io_generator.verbose:
io_generator.save_file(
utils.unmask(metric_maps["map Z"] ** 2, mask),
label + " component weights img",
f"{label} component weights img",
)

if ("map FT2" in required_metrics) or ("map FS0" in required_metrics):
LGR.info("Calculating F-statistic maps")
m_t2, m_s0, p_m_t2, p_m_s0 = dependence.calculate_f_maps(
data_cat, metric_maps["map Z"], mixing, adaptive_mask, tes
data_cat=data_cat,
z_maps=metric_maps["map Z"],
mixing=mixing,
adaptive_mask=adaptive_mask,
tes=tes,
)
metric_maps["map FT2"] = m_t2
metric_maps["map FS0"] = m_s0
@@ -191,58 +205,73 @@ def generate_metrics(
if io_generator.verbose:
io_generator.save_file(
utils.unmask(metric_maps["map FT2"], mask),
label + " component F-T2 img",
f"{label} component F-T2 img",
)
io_generator.save_file(
utils.unmask(metric_maps["map FS0"], mask),
label + " component F-S0 img",
f"{label} component F-S0 img",
)

if "map Z clusterized" in required_metrics:
LGR.info("Thresholding z-statistic maps")
z_thresh = 1.95
metric_maps["map Z clusterized"] = dependence.threshold_map(
metric_maps["map Z"], mask, ref_img, z_thresh
maps=metric_maps["map Z"],
mask=mask,
ref_img=ref_img,
threshold=z_thresh,
)

if "map FT2 clusterized" in required_metrics:
LGR.info("Calculating T2* F-statistic maps")
f_thresh, _, _ = getfbounds(len(tes))
metric_maps["map FT2 clusterized"] = dependence.threshold_map(
metric_maps["map FT2"], mask, ref_img, f_thresh
maps=metric_maps["map FT2"],
mask=mask,
ref_img=ref_img,
threshold=f_thresh,
)

if "map FS0 clusterized" in required_metrics:
LGR.info("Calculating S0 F-statistic maps")
f_thresh, _, _ = getfbounds(len(tes))
metric_maps["map FS0 clusterized"] = dependence.threshold_map(
metric_maps["map FS0"], mask, ref_img, f_thresh
maps=metric_maps["map FS0"],
mask=mask,
ref_img=ref_img,
threshold=f_thresh,
)

# Intermediate metrics
if "countsigFT2" in required_metrics:
LGR.info("Counting significant voxels in T2* F-statistic maps")
comptable["countsigFT2"] = dependence.compute_countsignal(
metric_maps["map FT2 clusterized"]
stat_cl_maps=metric_maps["map FT2 clusterized"],
)

if "countsigFS0" in required_metrics:
LGR.info("Counting significant voxels in S0 F-statistic maps")
comptable["countsigFS0"] = dependence.compute_countsignal(
metric_maps["map FS0 clusterized"]
stat_cl_maps=metric_maps["map FS0 clusterized"],
)

# Back to maps
if "map beta T2 clusterized" in required_metrics:
LGR.info("Thresholding optimal combination beta maps to match T2* F-statistic maps")
metric_maps["map beta T2 clusterized"] = dependence.threshold_to_match(
metric_maps["map optcom betas"], comptable["countsigFT2"], mask, ref_img
maps=metric_maps["map optcom betas"],
n_sig_voxels=comptable["countsigFT2"],
mask=mask,
ref_img=ref_img,
)

if "map beta S0 clusterized" in required_metrics:
LGR.info("Thresholding optimal combination beta maps to match S0 F-statistic maps")
metric_maps["map beta S0 clusterized"] = dependence.threshold_to_match(
metric_maps["map optcom betas"], comptable["countsigFS0"], mask, ref_img
maps=metric_maps["map optcom betas"],
n_sig_voxels=comptable["countsigFS0"],
mask=mask,
ref_img=ref_img,
)

# Dependence metrics
@@ -258,24 +287,23 @@ def generate_metrics(
if "variance explained" in required_metrics:
LGR.info("Calculating variance explained")
comptable["variance explained"] = dependence.calculate_varex(
metric_maps["map optcom betas"]
optcom_betas=metric_maps["map optcom betas"],
)

if "normalized variance explained" in required_metrics:
LGR.info("Calculating normalized variance explained")
comptable["normalized variance explained"] = dependence.calculate_varex_norm(
metric_maps["map weight"]
weights=metric_maps["map weight"],
)

# Spatial metrics
if "dice_FT2" in required_metrics:
LGR.info(
"Calculating DSI between thresholded T2* F-statistic and "
"optimal combination beta maps"
"Calculating DSI between thresholded T2* F-statistic and optimal combination beta maps"
)
comptable["dice_FT2"] = dependence.compute_dice(
metric_maps["map beta T2 clusterized"],
metric_maps["map FT2 clusterized"],
clmaps1=metric_maps["map beta T2 clusterized"],
clmaps2=metric_maps["map FT2 clusterized"],
axis=0,
)

@@ -285,20 +313,18 @@ def generate_metrics(
"optimal combination beta maps"
)
comptable["dice_FS0"] = dependence.compute_dice(
metric_maps["map beta S0 clusterized"],
metric_maps["map FS0 clusterized"],
clmaps1=metric_maps["map beta S0 clusterized"],
clmaps2=metric_maps["map FS0 clusterized"],
axis=0,
)

if "signal-noise_t" in required_metrics:
LGR.info("Calculating signal-noise t-statistics")
RepLGR.info(
"A t-test was performed between the distributions of T2*-model "
"F-statistics associated with clusters (i.e., signal) and "
"non-cluster voxels (i.e., noise) to generate a t-statistic "
"(metric signal-noise_z) and p-value (metric signal-noise_p) "
"measuring relative association of the component to signal "
"over noise."
"A t-test was performed between the distributions of T2*-model F-statistics "
"associated with clusters (i.e., signal) and non-cluster voxels (i.e., noise) to "
"generate a t-statistic (metric signal-noise_z) and p-value (metric signal-noise_p) "
"measuring relative association of the component to signal over noise."
)
(
comptable["signal-noise_t"],
@@ -312,20 +338,18 @@ def generate_metrics(
if "signal-noise_z" in required_metrics:
LGR.info("Calculating signal-noise z-statistics")
RepLGR.info(
"A t-test was performed between the distributions of T2*-model "
"F-statistics associated with clusters (i.e., signal) and "
"non-cluster voxels (i.e., noise) to generate a z-statistic "
"(metric signal-noise_z) and p-value (metric signal-noise_p) "
"measuring relative association of the component to signal "
"over noise."
"A t-test was performed between the distributions of T2*-model F-statistics "
"associated with clusters (i.e., signal) and non-cluster voxels (i.e., noise) to "
"generate a z-statistic (metric signal-noise_z) and p-value (metric signal-noise_p) "
"measuring relative association of the component to signal over noise."
)
(
comptable["signal-noise_z"],
comptable["signal-noise_p"],
) = dependence.compute_signal_minus_noise_z(
Z_maps=metric_maps["map Z"],
Z_clmaps=metric_maps["map Z clusterized"],
F_T2_maps=metric_maps["map FT2"],
z_maps=metric_maps["map Z"],
z_clmaps=metric_maps["map Z clusterized"],
f_t2_maps=metric_maps["map FT2"],
)

if "countnoise" in required_metrics:
@@ -335,18 +359,19 @@ def generate_metrics(
"calculated for each component."
)
comptable["countnoise"] = dependence.compute_countnoise(
metric_maps["map Z"], metric_maps["map Z clusterized"]
stat_maps=metric_maps["map Z"],
stat_cl_maps=metric_maps["map Z clusterized"],
)

# Composite metrics
if "d_table_score" in required_metrics:
LGR.info("Calculating decision table score")
comptable["d_table_score"] = dependence.generate_decision_table_score(
comptable["kappa"],
comptable["dice_FT2"],
comptable["signal-noise_t"],
comptable["countnoise"],
comptable["countsigFT2"],
kappa=comptable["kappa"],
dice_ft2=comptable["dice_FT2"],
signal_minus_noise_t=comptable["signal-noise_t"],
countnoise=comptable["countnoise"],
countsig_ft2=comptable["countsigFT2"],
)

# External regressor-based metrics
@@ -368,6 +393,7 @@ def generate_metrics(
write_t2s0 = "map predicted T2" in metric_maps
if write_betas:
betas = metric_maps["map echo betas"]

if write_t2s0:
pred_t2_maps = metric_maps["map predicted T2"]
pred_s0_maps = metric_maps["map predicted S0"]
@@ -377,22 +403,22 @@ def generate_metrics(
echo_betas = betas[:, i_echo, :]
io_generator.save_file(
utils.unmask(echo_betas, mask),
"echo weight " + label + " map split img",
f"echo weight {label} map split img",
echo=(i_echo + 1),
)

if write_t2s0:
echo_pred_t2_maps = pred_t2_maps[:, i_echo, :]
io_generator.save_file(
utils.unmask(echo_pred_t2_maps, mask),
"echo T2 " + label + " split img",
f"echo T2 {label} split img",
echo=(i_echo + 1),
)

echo_pred_s0_maps = pred_s0_maps[:, i_echo, :]
io_generator.save_file(
utils.unmask(echo_pred_s0_maps, mask),
"echo S0 " + label + " split img",
f"echo S0 {label} split img",
echo=(i_echo + 1),
)

Loading