Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MRG, ENH: Extend usability of stc_near_sensors #9396

Merged
merged 4 commits into from
May 15, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/changes/latest.inc
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ Enhancements

- New function :func:`mne.label.find_pos_in_annot` to get atlas label for MRI coordinates. (:gh:`9376` by **by new contributor** |Marian Dovgialo|_)

- Add support for ``picks`` in :func:`mne.stc_near_sensors` (:gh:`9396` by `Eric Larson`_)

Bugs
~~~~
- Fix bug with :meth:`mne.Epochs.crop` and :meth:`mne.Evoked.crop` when ``include_tmax=False``, where the last sample was always cut off, even when ``tmax > epo.times[-1]`` (:gh:`9378` **by new contributor** |Jan Sosulski|_)
Expand Down
15 changes: 7 additions & 8 deletions mne/io/nirx/nirx.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,18 +297,17 @@ def prepend(li, str):
for ch_idx2 in range(requested_channels.shape[0]):
# Find source and store location
src = int(requested_channels[ch_idx2, 0]) - 1
info['chs'][ch_idx2 * 2]['loc'][3:6] = src_locs[src, :]
info['chs'][ch_idx2 * 2 + 1]['loc'][3:6] = src_locs[src, :]
# Find detector and store location
det = int(requested_channels[ch_idx2, 1]) - 1
info['chs'][ch_idx2 * 2]['loc'][6:9] = det_locs[det, :]
info['chs'][ch_idx2 * 2 + 1]['loc'][6:9] = det_locs[det, :]
# Store channel location as midpoint between source and detector.
midpoint = (src_locs[src, :] + det_locs[det, :]) / 2
info['chs'][ch_idx2 * 2]['loc'][:3] = midpoint
info['chs'][ch_idx2 * 2 + 1]['loc'][:3] = midpoint
info['chs'][ch_idx2 * 2]['loc'][9] = fnirs_wavelengths[0]
info['chs'][ch_idx2 * 2 + 1]['loc'][9] = fnirs_wavelengths[1]
for ii in range(2):
ch_idx3 = ch_idx2 * 2 + ii
info['chs'][ch_idx3]['loc'][3:6] = src_locs[src, :]
info['chs'][ch_idx3]['loc'][6:9] = det_locs[det, :]
info['chs'][ch_idx3]['loc'][:3] = midpoint
info['chs'][ch_idx3]['loc'][9] = fnirs_wavelengths[ii]
info['chs'][ch_idx3]['coord_frame'] = FIFF.FIFFV_COORD_HEAD

# Extract the start/stop numbers for samples in the CSV. In theory the
# sample bounds should just be 10 * the number of channels, but some
Expand Down
10 changes: 8 additions & 2 deletions mne/morph.py
Original file line number Diff line number Diff line change
Expand Up @@ -1272,9 +1272,15 @@ def _surf_upsampling_mat(idx_from, e, smooth, warn=True):
_validate_type(smooth, ('int-like', str, None), 'smoothing steps')
if smooth is not None: # number of steps
smooth = _ensure_int(smooth, 'smoothing steps')
if smooth < 1:
if smooth == 0:
return sparse.csc_matrix(
(np.ones(len(idx_from)), # data, indices, indptr
idx_from,
np.arange(len(idx_from) + 1)),
shape=(e.shape[0], len(idx_from))).tocsr()
elif smooth < 0:
raise ValueError(
'The number of smoothing operations has to be at least 1, got '
'The number of smoothing operations has to be at least 0, got '
f'{smooth}')
smooth = smooth - 1
# idx will gradually expand from idx_from -> np.arange(n_tot)
Expand Down
30 changes: 26 additions & 4 deletions mne/source_estimate.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from .evoked import _get_peak
from .filter import resample
from .io.constants import FIFF
from .io.pick import pick_types
from .surface import (read_surface, _get_ico_surface, mesh_edges,
_project_onto_surface)
from .source_space import (_ensure_src, _get_morph_src_reordering,
Expand Down Expand Up @@ -3220,7 +3221,8 @@ def extract_label_time_course(stcs, labels, src, mode='auto',

@verbose
def stc_near_sensors(evoked, trans, subject, distance=0.01, mode='sum',
project=True, subjects_dir=None, src=None, verbose=None):
project=True, subjects_dir=None, src=None, picks=None,
verbose=None):
"""Create a STC from ECoG, sEEG and DBS sensor data.

Parameters
Expand All @@ -3233,10 +3235,14 @@ def stc_near_sensors(evoked, trans, subject, distance=0.01, mode='sum',
distance : float
Distance (m) defining the activation "ball" of the sensor.
mode : str
Can be "sum" to do a linear sum of weights, "nearest" to
Can be "sum" to do a linear sum of weights, "weighted" to make this
a weighted sum, "nearest" to
use only the weight of the nearest sensor, or "single" to
do a distance-weight of the nearest sensor. Default is "sum".
See Notes.

.. versionchanged:: 0.24
Added "weighted" option.
project : bool
If True, project the electrodes to the nearest ``'pial`` surface
vertex before computing distances. Only used when doing a
Expand All @@ -3247,6 +3253,9 @@ def stc_near_sensors(evoked, trans, subject, distance=0.01, mode='sum',

.. warning:: If a surface source space is used, make sure that
``surf='pial'`` was used during construction.
%(picks_base)s good sEEG, ECoG, and DBS channels.

.. versionadded:: 0.24
%(verbose)s

Returns
Expand Down Expand Up @@ -3281,6 +3290,9 @@ def stc_near_sensors(evoked, trans, subject, distance=0.01, mode='sum',
- ``'nearest'``
The value is given by the value of the nearest sensor, up to a
``distance`` (beyond which it is zero).
- ``'weighted'``
The value is given by the same as ``sum`` but the total weight for
each vertex is 1. (i.e., it's a weighted sum based on proximity).

If creating a Volume STC, ``src`` must be passed in, and this
function will project sEEG and DBS sensors to nearby surrounding vertices.
Expand All @@ -3294,10 +3306,16 @@ def stc_near_sensors(evoked, trans, subject, distance=0.01, mode='sum',
_validate_type(evoked, Evoked, 'evoked')
_validate_type(mode, str, 'mode')
_validate_type(src, (None, SourceSpaces), 'src')
_check_option('mode', mode, ('sum', 'single', 'nearest'))
_check_option('mode', mode, ('sum', 'single', 'nearest', 'weighted'))

# create a copy of Evoked using ecog, seeg and dbs
evoked = evoked.copy().pick_types(ecog=True, seeg=True, dbs=True)
if picks is None:
picks = pick_types(evoked.info, ecog=True, seeg=True, dbs=True)
evoked = evoked.copy().pick(picks)
frames = set(evoked.info['chs'][pick]['coord_frame'] for pick in picks)
if not frames == {FIFF.FIFFV_COORD_HEAD}:
raise RuntimeError('Channels must be in the head coordinate frame, '
f'got {sorted(frames)}')

# get channel positions that will be used to pinpoint where
# in the Source space we will use the evoked data
Expand Down Expand Up @@ -3371,6 +3389,10 @@ def stc_near_sensors(evoked, trans, subject, distance=0.01, mode='sum',
vals = w[range_, idx] if mode == 'single' else 1.
w.fill(0)
w[range_, idx] = vals
elif mode == 'weighted':
norms = w.sum(-1, keepdims=True)
norms[norms == 0] = 1.
w /= norms
missing = np.where(~np.any(w, axis=0))[0]
if len(missing):
warn(f'Channel{_pl(missing)} missing in STC: '
Expand Down
2 changes: 1 addition & 1 deletion mne/tests/test_morph.py
Original file line number Diff line number Diff line change
Expand Up @@ -671,7 +671,7 @@ def test_morph_stc_dense():
spacing=6, subjects_dir=subjects_dir)
del stc_to1

with pytest.raises(ValueError, match='smooth.* has to be at least 1'):
with pytest.raises(ValueError, match='smooth.* has to be at least 0'):
compute_source_morph(
stc_from, subject_from, subject_to, spacing=5, smooth=-1,
subjects_dir=subjects_dir)
Expand Down
31 changes: 30 additions & 1 deletion mne/tests/test_source_estimate.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
'sample_audvis_trunc-meg-vol-7-fwd.fif')
fname_inv_vol = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-vol-7-meg-inv.fif')
fname_nirx = op.join(data_path, 'NIRx', 'nirscout', 'nirx_15_0_recording')
rng = np.random.RandomState(0)


Expand Down Expand Up @@ -1551,7 +1552,7 @@ def test_stc_near_sensors(tmpdir):
# here we use a distance is smaller than the inter-sensor distance
kwargs = dict(subject='sample', trans=trans, subjects_dir=this_dir,
verbose=True, distance=0.005)
with pytest.raises(ValueError, match='No channels'):
with pytest.raises(ValueError, match='No appropriate channels'):
stc_near_sensors(evoked, **kwargs)
evoked.set_channel_types({ch_name: 'ecog' for ch_name in evoked.ch_names})
with catch_logging() as log:
Expand Down Expand Up @@ -1616,6 +1617,34 @@ def test_stc_near_sensors(tmpdir):
assert '4157 volume vertices' in log


@testing.requires_testing_data
def test_stc_near_sensors_picks():
"""Test using picks with stc_near_sensors."""
info = mne.io.read_raw_nirx(fname_nirx).info
evoked = mne.EvokedArray(np.ones((len(info['ch_names']), 1)), info)
src = mne.read_source_spaces(fname_src_fs)
kwargs = dict(
evoked=evoked, subject='fsaverage', trans='fsaverage',
subjects_dir=subjects_dir, src=src, project=True)
with pytest.raises(ValueError, match='No appropriate channels'):
stc_near_sensors(**kwargs)
picks = np.arange(len(info['ch_names']))
data = stc_near_sensors(picks=picks, **kwargs).data
assert len(data) == 20484
assert (data >= 0).all()
data = data[data > 0]
n_pts = len(data)
assert 500 < n_pts < 600
lo, hi = np.percentile(data, (5, 95))
assert 0.01 < lo < 0.1
assert 1.3 < hi < 1.7 # > 1
data = stc_near_sensors(picks=picks, mode='weighted', **kwargs).data
assert (data >= 0).all()
data = data[data > 0]
assert len(data) == n_pts
assert_array_equal(data, 1.) # values preserved


def _make_morph_map_hemi_same(subject_from, subject_to, subjects_dir,
reg_from, reg_to):
return _make_morph_map_hemi(subject_from, subject_from, subjects_dir,
Expand Down
6 changes: 3 additions & 3 deletions mne/viz/_brain/_brain.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@ def setup_time_viewer(self, time_viewer=True, show_traces=True):
raise ValueError("No data to visualize. See ``add_data``.")
self.time_viewer = time_viewer
self.orientation = list(_lh_views_dict.keys())
self.default_smoothing_range = [0, 15]
self.default_smoothing_range = [-1, 15]

# Default configuration
self.playback = False
Expand Down Expand Up @@ -1907,7 +1907,7 @@ def add_data(self, array, fmin=None, fmid=None, fmax=None,
if smoothing_steps is None:
smoothing_steps = 7
elif smoothing_steps == 'nearest':
smoothing_steps = 0
smoothing_steps = -1
elif isinstance(smoothing_steps, int):
if smoothing_steps < 0:
raise ValueError('Expected value of `smoothing_steps` is'
Expand Down Expand Up @@ -2749,7 +2749,7 @@ def set_data_smoothing(self, n_steps):
'len(data) < nvtx (%s < %s): the vertices '
'parameter must not be None'
% (len(hemi_data), self.geo[hemi].x.shape[0]))
morph_n_steps = 'nearest' if n_steps == 0 else n_steps
morph_n_steps = 'nearest' if n_steps == -1 else n_steps
maps = sparse.eye(len(self.geo[hemi].coords), format='csr')
with use_log_level(False):
smooth_mat = _hemi_morph(
Expand Down