Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ENH] Only use good echoes for metric calculation/optimal combination #358

Merged
merged 32 commits into from
Apr 18, 2020
Merged
Show file tree
Hide file tree
Changes from 29 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
a9068b4
Only use good echoes for metric calculation and optimal combination.
tsalo Jul 10, 2019
b0a2966
Change variable name.
tsalo Jul 10, 2019
4eebf5f
A bit more memory management.
tsalo Jul 10, 2019
faaaedf
Fix.
tsalo Jul 10, 2019
4caa6ec
Fix tests.
tsalo Jul 10, 2019
fac5749
Fix style.
tsalo Jul 10, 2019
10cd675
Limit mask to adaptive_mask >= 3
tsalo Jul 11, 2019
a320d38
Fix make_adaptive_mask
tsalo Jul 11, 2019
e2da4d1
Fix test.
tsalo Jul 11, 2019
a37f861
Update outputs.
tsalo Jul 11, 2019
92a9224
Merge branch 'master' into use-adaptive-mask
tsalo Jul 15, 2019
bc31c78
Merge branch 'master' into use-adaptive-mask
tsalo Jul 17, 2019
e17ae0e
Merge remote-tracking branch 'ME-ICA/master' into use-adaptive-mask
tsalo Jul 23, 2019
a47c2d6
Merge remote-tracking branch 'ME-ICA/master' into use-adaptive-mask
tsalo Jul 23, 2019
509a224
Merge remote-tracking branch 'ME-ICA/master' into use-adaptive-mask
tsalo Nov 11, 2019
d852a6f
Remove old file.
tsalo Nov 11, 2019
2379fc1
Revert some variable name changes.
tsalo Nov 11, 2019
0062679
Revert some more.
tsalo Nov 11, 2019
e13ad17
Merge remote-tracking branch 'ME-ICA/master' into use-adaptive-mask
tsalo Dec 1, 2019
a565e68
Fix style issues with test.
tsalo Dec 1, 2019
9e00632
Merge branch 'master' into use-adaptive-mask
tsalo Dec 11, 2019
0b4ec14
Merge branch 'master' into use-adaptive-mask
tsalo Dec 28, 2019
261b110
Add constant when calculating betas.
tsalo Dec 28, 2019
c6ea7d3
Merge remote-tracking branch 'ME-ICA/master' into use-adaptive-mask
tsalo Jan 16, 2020
af60558
Merge remote-tracking branch 'ME-ICA/master' into use-adaptive-mask
tsalo Jan 23, 2020
c5417ad
Drop extra components from outputs file.
tsalo Feb 2, 2020
99129da
Improve tedpca docstring.
tsalo Apr 17, 2020
b249616
Improve description of adaptive mask.
tsalo Apr 17, 2020
47719d9
Change empty to unstable.
tsalo Apr 17, 2020
e435343
Update tedana/combine.py
tsalo Apr 18, 2020
e544bb3
Update tedana/combine.py
tsalo Apr 18, 2020
1b19c76
Update tedana/combine.py
tsalo Apr 18, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 28 additions & 16 deletions tedana/combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def _combine_paid(data, tes):
return combined


def make_optcom(data, tes, mask, t2s=None, combmode='t2s', verbose=True):
def make_optcom(data, tes, adaptive_mask, t2s=None, combmode='t2s', verbose=True):
"""
Optimally combine BOLD data across TEs.
tsalo marked this conversation as resolved.
Show resolved Hide resolved

Expand All @@ -107,8 +107,11 @@ def make_optcom(data, tes, mask, t2s=None, combmode='t2s', verbose=True):
Concatenated BOLD data.
tes : (E,) :obj:`numpy.ndarray`
Array of TEs, in seconds.
mask : (S,) :obj:`numpy.ndarray`
Brain mask in 3D array.
adaptive_mask : (S,) :obj:`numpy.ndarray`
Adaptive mask of the data indicating those voxels for which at least
tsalo marked this conversation as resolved.
Show resolved Hide resolved
three echos are available. If this is less than the total number of
collected echos, we assume that later echos do not provided
meaningful signal.
t2s : (S [x T]) :obj:`numpy.ndarray` or None, optional
Estimated T2* values. Only required if combmode = 't2s'.
Default is None.
Expand Down Expand Up @@ -142,12 +145,12 @@ def make_optcom(data, tes, mask, t2s=None, combmode='t2s', verbose=True):
'dimension of input data: {0} != '
'{1}'.format(len(tes), data.shape[1]))

if mask.ndim != 1:
if adaptive_mask.ndim != 1:
raise ValueError('Mask is not 1D')
elif mask.shape[0] != data.shape[0]:
elif adaptive_mask.shape[0] != data.shape[0]:
raise ValueError('Mask and data do not have same number of '
'voxels/samples: {0} != {1}'.format(mask.shape[0],
data.shape[0]))
'voxels/samples: {0} != {1}'.format(
adaptive_mask.shape[0], data.shape[0]))

if combmode not in ['t2s', 'paid']:
raise ValueError("Argument 'combmode' must be either 't2s' or 'paid'")
Expand All @@ -158,23 +161,32 @@ def make_optcom(data, tes, mask, t2s=None, combmode='t2s', verbose=True):
LGR.warning("Argument 't2s' is not required if 'combmode' is 'paid'. "
"'t2s' array will not be used.")

data = data[mask, :, :] # mask out empty voxels/samples
tes = np.array(tes)[np.newaxis, ...] # (1 x E) array_like

if combmode == 'paid':
LGR.info('Optimally combining data with parallel-acquired inhomogeneity '
'desensitized (PAID) method')
combined = _combine_paid(data, tes)
LGR.info('Optimally combining data with parallel-acquired '
'inhomogeneity desensitized (PAID) method')
else:
if t2s.ndim == 1:
msg = 'Optimally combining data with voxel-wise T2* estimates'
else:
msg = ('Optimally combining data with voxel- and volume-wise T2* '
'estimates')
t2s = t2s[mask, ..., np.newaxis] # mask out empty voxels/samples

LGR.info(msg)
combined = _combine_t2s(data, tes, t2s)

mask = adaptive_mask >= 3
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For what I understand from this, there must be a minimum of 3 good echoes in order to keep a voxel. What happens with datasets that only have two echos?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We generally don't support those, as the method of usage is quite different in e.g. the Bright and Murphy method (where the first echo is generally treated as a regressor). Maybe we should open a separate issue to confirm if that's mentioned in the documentation, and to highlight it as a condition of tedana ?

data = data[mask, :, :] # mask out unstable voxels/samples
tes = np.array(tes)[np.newaxis, ...] # (1 x E) array_like
combined = np.zeros((data.shape[0], data.shape[2]))
for echo in np.unique(adaptive_mask[mask]):
echo_idx = adaptive_mask[mask] == echo

if combmode == 'paid':
combined[echo_idx, :] = _combine_paid(data[echo_idx, :echo, :],
tes[:echo])
else:
t2s_ = t2s[mask, ..., np.newaxis] # mask out empty voxels/samples

combined[echo_idx, :] = _combine_t2s(
data[echo_idx, :echo, :], tes[:, :echo], t2s_[echo_idx, ...])

combined = unmask(combined, mask)
return combined
22 changes: 7 additions & 15 deletions tedana/decomposition/pca.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def low_mem_pca(data):
return u, s, v


def tedpca(data_cat, data_oc, combmode, mask, t2s, t2sG,
def tedpca(data_cat, data_oc, combmode, mask, adaptive_mask, t2sG,
ref_img, tes, algorithm='mdl', kdaw=10., rdaw=1.,
out_dir='.', verbose=False, low_mem=False):
"""
Expand All @@ -65,8 +65,8 @@ def tedpca(data_cat, data_oc, combmode, mask, t2s, t2sG,
Poser 2006
mask : (S,) array_like
Boolean mask array
t2s : (S,) array_like
Map of voxel-wise T2* estimates.
adaptive_mask : (S,) array_like
Adaptive mask of the data indicating the number of echos with signal at each voxel
t2sG : (S,) array_like
Map of voxel-wise T2* estimates.
ref_img : :obj:`str` or img_like
Expand Down Expand Up @@ -218,18 +218,10 @@ def tedpca(data_cat, data_oc, combmode, mask, t2s, t2sG,

# Normalize each component's time series
vTmixN = stats.zscore(comp_ts, axis=0)
comptable, _, _, _ = metrics.dependence_metrics(data_cat,
data_oc,
comp_ts,
t2s,
tes,
ref_img,
reindex=False,
mmixN=vTmixN,
algorithm=None,
label='mepca_',
out_dir=out_dir,
verbose=verbose)
comptable, _, _, _ = metrics.dependence_metrics(
data_cat, data_oc, comp_ts, adaptive_mask, tes, ref_img,
reindex=False, mmixN=vTmixN, algorithm=None,
label='mepca_', out_dir=out_dir, verbose=verbose)

# varex_norm from PCA overrides varex_norm from dependence_metrics,
# but we retain the original
Expand Down
94 changes: 52 additions & 42 deletions tedana/metrics/kundu_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Z_MAX = 8


def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
def dependence_metrics(catd, tsoc, mmix, adaptive_mask, tes, ref_img,
reindex=False, mmixN=None, algorithm=None, label=None,
out_dir='.', verbose=False):
"""
Expand All @@ -35,8 +35,9 @@ def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
mmix : (T x C) array_like
Mixing matrix for converting input data to component space, where `C`
is components and `T` is the same as in `catd`
t2s : (S [x T]) array_like
Limited T2* map or timeseries.
adaptive_mask : (S) array_like
Adaptive mask, where each voxel's value is the number of echoes with
"good signal".
tes : list
List of echo times associated with `catd`, in milliseconds
ref_img : str or img_like
Expand Down Expand Up @@ -69,26 +70,23 @@ def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
mmix_corrected : :obj:`numpy.ndarray`
Mixing matrix after sign correction and resorting (if reindex is True).
"""
# Use t2s as mask
mask = t2s != 0
if not (catd.shape[0] == t2s.shape[0] == mask.shape[0] == tsoc.shape[0]):
# Use adaptive_mask as mask
mask = adaptive_mask >= 3
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same comment as above, what if the number of echos is 2?


if not (catd.shape[0] == adaptive_mask.shape[0] == tsoc.shape[0]):
raise ValueError('First dimensions (number of samples) of catd ({0}), '
'tsoc ({1}), and t2s ({2}) do not '
'tsoc ({1}), and adaptive_mask ({2}) do not '
'match'.format(catd.shape[0], tsoc.shape[0],
t2s.shape[0]))
adaptive_mask.shape[0]))
elif catd.shape[1] != len(tes):
raise ValueError('Second dimension of catd ({0}) does not match '
'number of echoes provided (tes; '
'{1})'.format(catd.shape[1], len(tes)))
elif not (catd.shape[2] == tsoc.shape[1] == mmix.shape[0]):
raise ValueError('Number of volumes in catd ({0}), '
'tsoc ({1}), and mmix ({2}) do not '
'match.'.format(catd.shape[2], tsoc.shape[1], mmix.shape[0]))
elif t2s.ndim == 2:
if catd.shape[2] != t2s.shape[1]:
raise ValueError('Number of volumes in catd '
'({0}) does not match number of volumes in '
't2s ({1})'.format(catd.shape[2], t2s.shape[1]))
'match.'.format(catd.shape[2], tsoc.shape[1],
mmix.shape[0]))

RepLGR.info("A series of TE-dependence metrics were calculated for "
"each component, including Kappa, Rho, and variance "
Expand All @@ -97,7 +95,6 @@ def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
# mask everything we can
tsoc = tsoc[mask, :]
catd = catd[mask, ...]
t2s = t2s[mask]

# demean optimal combination
tsoc_dm = tsoc - tsoc.mean(axis=-1, keepdims=True)
Expand Down Expand Up @@ -126,16 +123,17 @@ def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
# compute Betas and means over TEs for TE-dependence analysis
betas = get_coeffs(utils.unmask(catd, mask),
mmix_corrected,
np.repeat(mask[:, np.newaxis], len(tes), axis=1))
np.repeat(mask[:, np.newaxis], len(tes), axis=1),
handwerkerd marked this conversation as resolved.
Show resolved Hide resolved
add_const=True)
betas = betas[mask, ...]
n_voxels, n_echos, n_components = betas.shape
mu = catd.mean(axis=-1, dtype=float)
tes = np.reshape(tes, (n_echos, 1))
fmin, _, _ = getfbounds(n_echos)

# set up Xmats
X1 = mu.T # Model 1
X2 = np.tile(tes, (1, n_voxels)) * mu.T / t2s.T # Model 2
# set up design matrices
X1 = mu.T # Model 1: TE-independence model
X2 = np.tile(tes, (1, n_voxels)) * mu.T # Model 2: TE-dependence model
handwerkerd marked this conversation as resolved.
Show resolved Hide resolved

# tables for component selection
kappas = np.zeros([n_components])
Expand All @@ -145,8 +143,9 @@ def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
Z_maps = np.zeros([n_voxels, n_components])
F_R2_maps = np.zeros([n_voxels, n_components])
F_S0_maps = np.zeros([n_voxels, n_components])
pred_R2_maps = np.zeros([n_voxels, n_echos, n_components])
pred_S0_maps = np.zeros([n_voxels, n_echos, n_components])
if verbose:
pred_R2_maps = np.zeros([n_voxels, n_echos, n_components])
pred_S0_maps = np.zeros([n_voxels, n_echos, n_components])

LGR.info('Fitting TE- and S0-dependent models to components')
for i_comp in range(n_components):
Expand All @@ -156,24 +155,32 @@ def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
varex[i_comp] = (tsoc_B[:, i_comp]**2).sum() / totvar * 100.
varex_norm[i_comp] = (WTS[:, i_comp]**2).sum() / totvar_norm

# S0 Model
# (S,) model coefficient map
coeffs_S0 = (comp_betas * X1).sum(axis=0) / (X1**2).sum(axis=0)
pred_S0 = X1 * np.tile(coeffs_S0, (n_echos, 1))
pred_S0_maps[:, :, i_comp] = pred_S0.T
SSE_S0 = (comp_betas - pred_S0)**2
SSE_S0 = SSE_S0.sum(axis=0) # (S,) prediction error map
F_S0 = (alpha - SSE_S0) * (n_echos - 1) / (SSE_S0)
F_S0_maps[:, i_comp] = F_S0

# R2 Model
coeffs_R2 = (comp_betas * X2).sum(axis=0) / (X2**2).sum(axis=0)
pred_R2 = X2 * np.tile(coeffs_R2, (n_echos, 1))
pred_R2_maps[:, :, i_comp] = pred_R2.T
SSE_R2 = (comp_betas - pred_R2)**2
SSE_R2 = SSE_R2.sum(axis=0)
F_R2 = (alpha - SSE_R2) * (n_echos - 1) / (SSE_R2)
F_R2_maps[:, i_comp] = F_R2
for j_echo in np.unique(adaptive_mask[adaptive_mask >= 3]):
mask_idx = adaptive_mask == j_echo
alpha = (np.abs(comp_betas[:j_echo])**2).sum(axis=0)

# S0 Model
# (S,) model coefficient map
coeffs_S0 = (comp_betas[:j_echo] * X1[:j_echo, :]).sum(axis=0) /\
(X1[:j_echo, :]**2).sum(axis=0)
pred_S0 = X1[:j_echo, :] * np.tile(coeffs_S0, (j_echo, 1))
SSE_S0 = (comp_betas[:j_echo] - pred_S0)**2
SSE_S0 = SSE_S0.sum(axis=0) # (S,) prediction error map
F_S0 = (alpha - SSE_S0) * (j_echo - 1) / (SSE_S0)
F_S0_maps[mask_idx[mask], i_comp] = F_S0[mask_idx[mask]]

# R2 Model
coeffs_R2 = (comp_betas[:j_echo] * X2[:j_echo, :]).sum(axis=0) /\
(X2[:j_echo, :]**2).sum(axis=0)
pred_R2 = X2[:j_echo] * np.tile(coeffs_R2, (j_echo, 1))
SSE_R2 = (comp_betas[:j_echo] - pred_R2)**2
SSE_R2 = SSE_R2.sum(axis=0)
F_R2 = (alpha - SSE_R2) * (j_echo - 1) / (SSE_R2)
F_R2_maps[mask_idx[mask], i_comp] = F_R2[mask_idx[mask]]

if verbose:
pred_S0_maps[mask_idx[mask], :j_echo, i_comp] = pred_S0.T[mask_idx[mask], :]
pred_R2_maps[mask_idx[mask], :j_echo, i_comp] = pred_R2.T[mask_idx[mask], :]

# compute weights as Z-values
wtsZ = (WTS[:, i_comp] - WTS[:, i_comp].mean()) / WTS[:, i_comp].std()
Expand All @@ -199,12 +206,15 @@ def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
comptable = comptable[sort_idx, :]
mmix_corrected = mmix_corrected[:, sort_idx]
betas = betas[..., sort_idx]
pred_R2_maps = pred_R2_maps[:, :, sort_idx]
pred_S0_maps = pred_S0_maps[:, :, sort_idx]
F_R2_maps = F_R2_maps[:, sort_idx]
F_S0_maps = F_S0_maps[:, sort_idx]
Z_maps = Z_maps[:, sort_idx]
tsoc_Babs = tsoc_Babs[:, sort_idx]

if verbose:
pred_R2_maps = pred_R2_maps[:, :, sort_idx]
pred_S0_maps = pred_S0_maps[:, :, sort_idx]

if algorithm == 'kundu_v3':
WTS = WTS[:, sort_idx]
PSC = PSC[:, sort_idx]
Expand All @@ -226,7 +236,7 @@ def dependence_metrics(catd, tsoc, mmix, t2s, tes, ref_img,
io.filewrite(utils.unmask(Z_maps ** 2., mask),
op.join(out_dir, '{0}metric_weights.nii'.format(label)),
ref_img)
del pred_R2_maps, pred_S0_maps
del pred_R2_maps, pred_S0_maps

comptable = pd.DataFrame(comptable,
columns=['kappa', 'rho',
Expand Down
4 changes: 0 additions & 4 deletions tedana/tests/data/cornell_three_echo_outputs.txt
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,6 @@ figures/comp_064.png
figures/comp_065.png
figures/comp_066.png
figures/comp_067.png
figures/comp_068.png
figures/comp_069.png
figures/comp_070.png
figures/comp_071.png
dn_ts_OC.nii.gz
feats_OC2.nii.gz
figures
Expand Down
57 changes: 24 additions & 33 deletions tedana/tests/test_model_fit_dependence_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,58 +17,49 @@ def test_break_dependence_metrics():
catd = np.empty((n_samples, n_echos, n_vols))
tsoc = np.empty((n_samples, n_vols))
mmix = np.empty((n_vols, n_comps))
t2s = np.empty((n_samples, n_vols))
adaptive_mask = np.empty((n_samples))
tes = np.empty((n_echos))
ref_img = ''

# Shape of catd is wrong
catd = np.empty((n_samples + 1, n_echos, n_vols))
with pytest.raises(ValueError):
kundu_fit.dependence_metrics(catd=catd, tsoc=tsoc, mmix=mmix,
t2s=t2s, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None,
algorithm='kundu_v3')
kundu_fit.dependence_metrics(
catd=catd, tsoc=tsoc, mmix=mmix,
adaptive_mask=adaptive_mask, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None, algorithm='kundu_v3')

# Shape of t2s is wrong
# Shape of adaptive_mask is wrong
catd = np.empty((n_samples, n_echos, n_vols))
t2s = np.empty((n_samples + 1, n_vols))
adaptive_mask = np.empty((n_samples + 1))
with pytest.raises(ValueError):
kundu_fit.dependence_metrics(catd=catd, tsoc=tsoc, mmix=mmix,
t2s=t2s, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None,
algorithm='kundu_v3')
kundu_fit.dependence_metrics(
catd=catd, tsoc=tsoc, mmix=mmix,
adaptive_mask=adaptive_mask, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None, algorithm='kundu_v3')

# Shape of tsoc is wrong
t2s = np.empty((n_samples, n_vols))
adaptive_mask = np.empty((n_samples))
tsoc = np.empty((n_samples + 1, n_vols))
with pytest.raises(ValueError):
kundu_fit.dependence_metrics(catd=catd, tsoc=tsoc, mmix=mmix,
t2s=t2s, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None,
algorithm='kundu_v3')
kundu_fit.dependence_metrics(
catd=catd, tsoc=tsoc, mmix=mmix,
adaptive_mask=adaptive_mask, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None, algorithm='kundu_v3')

# Shape of catd is wrong
catd = np.empty((n_samples, n_echos + 1, n_vols))
tsoc = np.empty((n_samples, n_vols))
with pytest.raises(ValueError):
kundu_fit.dependence_metrics(catd=catd, tsoc=tsoc, mmix=mmix,
t2s=t2s, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None,
algorithm='kundu_v3')
kundu_fit.dependence_metrics(
catd=catd, tsoc=tsoc, mmix=mmix,
adaptive_mask=adaptive_mask, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None, algorithm='kundu_v3')

# Shape of catd is wrong
catd = np.empty((n_samples, n_echos, n_vols + 1))
with pytest.raises(ValueError):
kundu_fit.dependence_metrics(catd=catd, tsoc=tsoc, mmix=mmix,
t2s=t2s, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None,
algorithm='kundu_v3')

# Shape of t2s is wrong
catd = np.empty((n_samples, n_echos, n_vols))
t2s = np.empty((n_samples, n_vols + 1))
with pytest.raises(ValueError):
kundu_fit.dependence_metrics(catd=catd, tsoc=tsoc, mmix=mmix,
t2s=t2s, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None,
algorithm='kundu_v3')
kundu_fit.dependence_metrics(
catd=catd, tsoc=tsoc, mmix=mmix,
adaptive_mask=adaptive_mask, tes=tes, ref_img=ref_img,
reindex=False, mmixN=None, algorithm='kundu_v3')
Loading