Skip to content

Commit

Permalink
[FIX] Drop use of eimask (#555)
Browse files Browse the repository at this point in the history
This is a quick bug-fix, but we will ultimately probably incorporate
the eimask into the initial adaptive masking at the beginning of the
workflow.
  • Loading branch information
tsalo authored Apr 1, 2020
1 parent 574b908 commit 518842e
Showing 1 changed file with 4 additions and 16 deletions.
20 changes: 4 additions & 16 deletions tedana/decomposition/pca.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from sklearn.decomposition import PCA

from tedana import metrics, utils, io
from tedana.decomposition import (ma_pca, _utils)
from tedana.decomposition import ma_pca
from tedana.stats import computefeats2
from tedana.selection import kundu_tedpca

Expand Down Expand Up @@ -181,19 +181,14 @@ def tedpca(data_cat, data_oc, combmode, mask, t2s, t2sG,
n_samp, n_echos, n_vols = data_cat.shape

LGR.info('Computing PCA of optimally combined multi-echo data')
data = data_oc[mask, :][:, np.newaxis, :]

eim = np.squeeze(_utils.eimask(data))
data = np.squeeze(data[eim])
data = data_oc[mask, :]

data_z = ((data.T - data.T.mean(axis=0)) / data.T.std(axis=0)).T # var normalize ts
data_z = (data_z - data_z.mean()) / data_z.std() # var normalize everything

if algorithm in ['mdl', 'aic', 'kic']:
data_img = io.new_nii_like(
ref_img, utils.unmask(utils.unmask(data, eim), mask))
mask_img = io.new_nii_like(ref_img,
utils.unmask(eim, mask).astype(int))
data_img = io.new_nii_like(ref_img, utils.unmask(data, mask))
mask_img = io.new_nii_like(ref_img, mask.astype(int))
voxel_comp_weights, varex, varex_norm, comp_ts = ma_pca.ma_pca(
data_img, mask_img, algorithm)
elif low_mem:
Expand All @@ -209,13 +204,6 @@ def tedpca(data_cat, data_oc, combmode, mask, t2s, t2sG,
varex_norm = varex / varex.sum()

# Compute Kappa and Rho for PCA comps
eimum = np.atleast_2d(eim)
eimum = np.transpose(eimum, np.argsort(eimum.shape)[::-1])
eimum = eimum.prod(axis=1)
o = np.zeros((mask.shape[0], *eimum.shape[1:]))
o[mask, ...] = eimum
eimum = np.squeeze(o).astype(bool)

# Normalize each component's time series
vTmixN = stats.zscore(comp_ts, axis=0)
comptable, _, _, _ = metrics.dependence_metrics(data_cat,
Expand Down

0 comments on commit 518842e

Please sign in to comment.