From 162368a48429c848e083072a9ab39205928e9e22 Mon Sep 17 00:00:00 2001 From: Robert Luke <748691+rob-luke@users.noreply.github.com> Date: Sun, 23 May 2021 11:19:11 +1000 Subject: [PATCH 01/10] MRG: Tutorial improvements (#9416) * drammock tutorial improvements * MNE -> MNE-Python --- tutorials/io/30_reading_fnirs_data.py | 95 +++++++++++++-------------- 1 file changed, 47 insertions(+), 48 deletions(-) diff --git a/tutorials/io/30_reading_fnirs_data.py b/tutorials/io/30_reading_fnirs_data.py index 7661ba454ac..fd9ac07462c 100644 --- a/tutorials/io/30_reading_fnirs_data.py +++ b/tutorials/io/30_reading_fnirs_data.py @@ -6,25 +6,21 @@ Importing data from fNIRS devices ================================= -MNE includes various functions and utilities for reading fNIRS -data and optode locations. Regardless of the manufacturer and file format, -MNE stores both the measurement data and metadata in a consistent manner. - -fNIRS devices consist of light sources and light detectors, -often also termed emitter/transmitter and receiver respectively. -A channel is formed by source-detector pairs, and MNE represents the -channel location as the midpoint between source and detector. -MNE stores the location of the channels, sources, and -detectors. -There are a variety of fNIRS data types which can be represented in MNE. -For continuous wave fNIRS data this includes amplitude, optical density, -oxyhaemoglobin, and deoxyhemoglobin. -And for frequency domain fNIRS this additionally includes -AC amplitude and phase. -Different vendors save the data as different data types, and MNE will load -the data as the appropriate type. - -.. warning:: MNE stores metadata internally with a specific structure, +fNIRS devices consist of two kinds of optodes: light sources (AKA "emitters" or +"transmitters") and light detectors (AKA "receivers"). Channels are defined as +source-detector pairs, and channel locations are defined as the midpoint +between source and detector. + +MNE-Python provides functions for reading fNIRS data and optode locations from +several file formats. Regardless of the device manufacturer or file format, +MNE-Python's fNIRS functions will internally store the measurement data and its +metadata in the same way (e.g., data values are always converted into SI +units). Supported measurement types include amplitude, optical density, +oxyhaemoglobin concentration, and deoxyhemoglobin concentration (for continuous +wave fNIRS), and additionally AC amplitude and phase (for +frequency domain fNIRS). + +.. warning:: MNE-Python stores metadata internally with a specific structure, and internal functions expect specific naming conventions. Manual modification of channel names and metadata is not recommended. @@ -44,13 +40,13 @@ is designed by the fNIRS community in an effort to facilitate sharing and analysis of fNIRS data. And is the official format of the Society for functional near-infrared spectroscopy (SfNIRS). -SNIRF is the preferred format for reading data in to MNE. +SNIRF is the preferred format for reading data in to MNE-Python. Data stored in the SNIRF format can be read in using :func:`mne.io.read_raw_snirf`. .. note:: The SNIRF format has provisions for many different types of fNIRS - recordings. MNE currently only supports reading continuous wave data - stored in the .snirf format. + recordings. MNE-Python currently only supports reading continuous + wave data stored in the .snirf format. *********************** @@ -66,9 +62,10 @@ NIRx produce continuous wave fNIRS devices. NIRx recordings can be read in using :func:`mne.io.read_raw_nirx`. The NIRx device stores data directly to a directory with multiple file types, -MNE extracts the appropriate information from each file. -MNE only supports NIRx files recorded with NIRStar version 15.0 and above. -MNE supports reading data from NIRScout and NIRSport 1 devices. +MNE-Python extracts the appropriate information from each file. +MNE-Python only supports NIRx files recorded with NIRStar +version 15.0 and above. +MNE-Python supports reading data from NIRScout and NIRSport 1 devices. .. _import-hitachi: @@ -94,7 +91,7 @@ BOXY recordings can be read in using :func:`mne.io.read_raw_boxy`. The BOXY software and ISS Imagent I and II devices are frequency domain systems that store data in a single ``.txt`` file containing what they call -(with MNE's name for that type of data in parens): +(with MNE-Python's name for that type of data in parens): - DC All light collected by the detector (``fnirs_cw_amplitude``) @@ -110,9 +107,10 @@ These raw data files can be saved by the acquisition devices as parsed or unparsed ``.txt`` files, which affects how the data in the file is organised. -MNE will read either file type and extract the raw DC, AC, and Phase data. -If triggers are sent using the ``digaux`` port of the recording hardware, MNE -will also read the ``digaux`` data and create annotations for any triggers. +MNE-Python will read either file type and extract the raw DC, AC, +and Phase data. If triggers are sent using the ``digaux`` port of the +recording hardware, MNE-Python will also read the ``digaux`` data and +create annotations for any triggers. ****************** @@ -128,10 +126,11 @@ provided by the Society for functional Near-Infrared Spectroscopy, and then load it using :func:`mne.io.read_raw_snirf`. -fNIRS measurements can have a non-standardised format that is not supported by -MNE and cannot be converted easily into SNIRF. This legacy data is often in CSV -or TSV format, we show here a way to load it even though it is not officially -supported by MNE due to the lack of standardisation of the file format (the +fNIRS measurements may be stored in a non-standardised format that is not +supported by MNE-Python and cannot be converted easily into SNIRF. +This legacy data is often in CSV or TSV format, +we show here a way to load it even though it is not officially supported by +MNE-Python due to the lack of standardisation of the file format (the naming and ordering of channels, the type and scaling of data, and specification of sensor positions varies between each vendor). You will likely have to adapt this depending on the system from which your CSV originated. @@ -144,10 +143,10 @@ # sphinx_gallery_thumbnail_number = 2 ############################################################################### -# First, we generate an example CSV file which will then be loaded in to MNE. -# This step would be skipped if you have actual data you wish to load. -# We simulate 16 channels with 100 samples of data and save this to a file -# called fnirs.csv. +# First, we generate an example CSV file which will then be loaded in to +# MNE-Python. This step would be skipped if you have actual data you wish to +# load. We simulate 16 channels with 100 samples of data and save this to a +# file called fnirs.csv. pd.DataFrame(np.random.normal(size=(16, 100))).to_csv("fnirs.csv") @@ -173,7 +172,7 @@ # Then, the metadata must be specified manually as the CSV file does not # contain information about channel names, types, sample rate etc. # -# .. warning:: In MNE the naming of channels MUST follow the structure of +# .. warning:: In MNE-Python the naming of channels MUST follow the structure # ``S#_D# type`` where # is replaced by the appropriate source and # detector numbers and type is either ``hbo``, ``hbr`` or the # wavelength. @@ -190,14 +189,14 @@ ############################################################################### -# Finally, the data can be converted in to an MNE data structure. +# Finally, the data can be converted in to an MNE-Python data structure. # The metadata above is used to create an :class:`mne.Info` data structure, -# and this is combined with the data to create an MNE :class:`~mne.io.Raw` -# object. For more details on the info structure see :ref:`tut-info-class`, and -# for additional details on how continuous data is stored in MNE see -# :ref:`tut-raw-class`. -# For a more extensive description of how to create MNE data structures from -# raw array data see :ref:`tut_creating_data_structures`. +# and this is combined with the data to create an MNE-Python +# :class:`~mne.io.Raw` object. For more details on the info structure +# see :ref:`tut-info-class`, and for additional details on how continuous data +# is stored in MNE-Python see :ref:`tut-raw-class`. +# For a more extensive description of how to create MNE-Python data structures +# from raw array data see :ref:`tut_creating_data_structures`. info = mne.create_info(ch_names=ch_names, ch_types=ch_types, sfreq=sfreq) raw = mne.io.RawArray(data, info, verbose=True) @@ -212,10 +211,10 @@ # etc), this is may be particularly important for fNIRS as information about # the optode locations is required to convert the optical density data in to an # estimate of the haemoglobin concentrations. -# MNE provides methods to load standard sensor configurations (montages) from -# some vendors, and this is demonstrated below. +# MNE-Python provides methods to load standard sensor configurations +# (montages) from some vendors, and this is demonstrated below. # Some handy tutorials for understanding sensor locations, coordinate systems, -# and how to store and view this information in MNE are: +# and how to store and view this information in MNE-Python are: # :ref:`tut-sensor-locations`, :ref:`plot_source_alignment`, and # :ref:`ex-eeg-on-scalp`. # From 65f74fa1c4609d7e5219bca2cc3b840650cf978f Mon Sep 17 00:00:00 2001 From: Guillaume Favelier Date: Mon, 24 May 2021 13:23:06 +0200 Subject: [PATCH 02/10] MNT: Reduce number of calls to _update (#9407) * Reduce number of calls to _update * Revert "Reduce number of calls to _update" This reverts commit 5bf39ff3454d15404304917193ea36841acbac55. * Improve overlay caching * Disable plot updates at init * Disable more * ENH: Speed up vertex_to_mni * FIX: Flake * FIX: Fix path * ENH: Cache mesh_tris * ENH: Defer updates and avoid reset * TST: Fix test * Fix tests Co-authored-by: Eric Larson --- mne/morph.py | 2 +- mne/source_space.py | 17 ++--- mne/surface.py | 49 +++++++++------ mne/tests/test_coreg.py | 1 + mne/tests/test_source_space.py | 7 ++- mne/utils/__init__.py | 2 +- mne/utils/numerics.py | 12 ++++ mne/viz/_brain/_brain.py | 94 ++++++++++++++++++---------- mne/viz/_brain/surface.py | 3 +- mne/viz/_brain/tests/test_brain.py | 27 ++++++-- mne/viz/backends/_abstract.py | 10 +-- mne/viz/backends/_pysurfer_mayavi.py | 9 +-- mne/viz/backends/_pyvista.py | 12 ++-- 13 files changed, 161 insertions(+), 84 deletions(-) diff --git a/mne/morph.py b/mne/morph.py index a1e9f031c95..80a373cdafe 100644 --- a/mne/morph.py +++ b/mne/morph.py @@ -1142,7 +1142,7 @@ def _hemi_morph(tris, vertices_to, vertices_from, smooth, maps, warn): e = mesh_edges(tris) e.data[e.data == 2] = 1 n_vertices = e.shape[0] - e = e + sparse.eye(n_vertices) + e += sparse.eye(n_vertices, format='csr') if isinstance(smooth, str): _check_option('smooth', smooth, ('nearest',), extra=' when used as a string.') diff --git a/mne/source_space.py b/mne/source_space.py index 0e1186e798b..b62ac4e432b 100644 --- a/mne/source_space.py +++ b/mne/source_space.py @@ -33,7 +33,7 @@ _CheckInside) from .utils import (get_subjects_dir, check_fname, logger, verbose, fill_doc, _ensure_int, check_version, _get_call_line, warn, - _check_fname, _check_path_like, has_nibabel, _check_sphere, + _check_fname, _check_path_like, _check_sphere, _validate_type, _check_option, _is_numeric, _pl, _suggest, object_size, sizeof_fmt) from .parallel import parallel_func, check_n_jobs @@ -1357,17 +1357,17 @@ def read_talxfm(subject, subjects_dir=None, verbose=None): return mri_mni_t -def _read_mri_info(path, units='m', return_img=False): - if has_nibabel(): +def _read_mri_info(path, units='m', return_img=False, use_nibabel=False): + # This is equivalent but 100x slower, so only use nibabel if we need to + # (later): + if use_nibabel: import nibabel - mgz = nibabel.load(path) - hdr = mgz.header + hdr = nibabel.load(path).header n_orig = hdr.get_vox2ras() t_orig = hdr.get_vox2ras_tkr() dims = hdr.get_data_shape() zooms = hdr.get_zooms()[:3] else: - mgz = None hdr = _get_mgz_header(path) n_orig = hdr['vox2ras'] t_orig = hdr['vox2ras_tkr'] @@ -1395,7 +1395,8 @@ def _read_mri_info(path, units='m', return_img=False): out = (vox_ras_t, vox_mri_t, mri_ras_t, dims, zooms) if return_img: - out += (mgz,) + nibabel = _import_nibabel() + out += (nibabel.load(path),) return out @@ -2250,6 +2251,8 @@ def _vol_vertex(width, height, jj, kk, pp): def _get_mgz_header(fname): """Adapted from nibabel to quickly extract header info.""" + fname = _check_fname(fname, overwrite='read', must_exist=True, + name='MRI image') if not fname.endswith('.mgz'): raise IOError('Filename must end with .mgz') header_dtd = [('version', '>i4'), ('dims', '>i4', (4,)), diff --git a/mne/surface.py b/mne/surface.py index 9dda2cf2080..7609c4009d4 100644 --- a/mne/surface.py +++ b/mne/surface.py @@ -10,7 +10,7 @@ from copy import deepcopy from distutils.version import LooseVersion -from functools import partial +from functools import partial, lru_cache from glob import glob from os import path as op from struct import pack @@ -28,7 +28,7 @@ _get_trans, apply_trans, Transform) from .utils import (logger, verbose, get_subjects_dir, warn, _check_fname, _check_option, _ensure_int, _TempDir, run_subprocess, - _check_freesurfer_home) + _check_freesurfer_home, _hashable_ndarray) ############################################################################### @@ -343,7 +343,7 @@ def _normal_orth(nn): @verbose def complete_surface_info(surf, do_neighbor_vert=False, copy=True, - verbose=None): + do_neighbor_tri=True, *, verbose=None): """Complete surface information. Parameters @@ -351,9 +351,11 @@ def complete_surface_info(surf, do_neighbor_vert=False, copy=True, surf : dict The surface. do_neighbor_vert : bool - If True, add neighbor vertex information. + If True (default False), add neighbor vertex information. copy : bool If True (default), make a copy. If False, operate in-place. + do_neighbor_tri : bool + If True (default), compute triangle neighbors. %(verbose)s Returns @@ -383,27 +385,28 @@ def complete_surface_info(surf, do_neighbor_vert=False, copy=True, # Find neighboring triangles, accumulate vertex normals, normalize logger.info(' Triangle neighbors and vertex normals...') - surf['neighbor_tri'] = _triangle_neighbors(surf['tris'], surf['np']) surf['nn'] = _accumulate_normals(surf['tris'].astype(int), surf['tri_nn'], surf['np']) _normalize_vectors(surf['nn']) # Check for topological defects - zero, fewer = list(), list() - for ni, n in enumerate(surf['neighbor_tri']): - if len(n) < 3: - if len(n) == 0: - zero.append(ni) - else: - fewer.append(ni) - surf['neighbor_tri'][ni] = np.array([], int) - if len(zero) > 0: - logger.info(' Vertices do not have any neighboring ' - 'triangles: [%s]' % ', '.join(str(z) for z in zero)) - if len(fewer) > 0: - logger.info(' Vertices have fewer than three neighboring ' - 'triangles, removing neighbors: [%s]' - % ', '.join(str(f) for f in fewer)) + if do_neighbor_tri: + surf['neighbor_tri'] = _triangle_neighbors(surf['tris'], surf['np']) + zero, fewer = list(), list() + for ni, n in enumerate(surf['neighbor_tri']): + if len(n) < 3: + if len(n) == 0: + zero.append(ni) + else: + fewer.append(ni) + surf['neighbor_tri'][ni] = np.array([], int) + if len(zero) > 0: + logger.info(' Vertices do not have any neighboring ' + 'triangles: [%s]' % ', '.join(str(z) for z in zero)) + if len(fewer) > 0: + logger.info(' Vertices have fewer than three neighboring ' + 'triangles, removing neighbors: [%s]' + % ', '.join(str(f) for f in fewer)) # Determine the neighboring vertices and fix errors if do_neighbor_vert is True: @@ -1451,6 +1454,12 @@ def mesh_edges(tris): edges : sparse matrix The adjacency matrix. """ + tris = _hashable_ndarray(tris) + return _mesh_edges(tris=tris) + + +@lru_cache(maxsize=10) +def _mesh_edges(tris=None): from scipy.sparse import coo_matrix if np.max(tris) > len(np.unique(tris)): raise ValueError( diff --git a/mne/tests/test_coreg.py b/mne/tests/test_coreg.py index a2e1485a36c..843df90de3a 100644 --- a/mne/tests/test_coreg.py +++ b/mne/tests/test_coreg.py @@ -56,6 +56,7 @@ def make_dig(coords, cf): assert_array_almost_equal(trans_est['trans'], trans['trans']) +@requires_nibabel() @pytest.mark.slowtest # can take forever on OSX Travis @testing.requires_testing_data @pytest.mark.parametrize('scale', (.9, [1, .2, .8])) diff --git a/mne/tests/test_source_space.py b/mne/tests/test_source_space.py index cbf66a30485..568570eb9f1 100644 --- a/mne/tests/test_source_space.py +++ b/mne/tests/test_source_space.py @@ -293,6 +293,7 @@ def test_discrete_source_space(tmpdir): assert _get_src_type(src_new, None) == 'discrete' +@requires_nibabel() @pytest.mark.slowtest @testing.requires_testing_data def test_volume_source_space(tmpdir): @@ -593,6 +594,7 @@ def test_head_to_mni(): assert_allclose(coords_MNI, coords_MNI_2, atol=10.0) +@requires_nibabel() @testing.requires_testing_data def test_vertex_to_mni_fs_nibabel(monkeypatch): """Test equivalence of vert_to_mni for nibabel and freesurfer.""" @@ -601,7 +603,10 @@ def test_vertex_to_mni_fs_nibabel(monkeypatch): vertices = rng.randint(0, 100000, n_check) hemis = rng.randint(0, 1, n_check) coords = vertex_to_mni(vertices, hemis, subject, subjects_dir) - monkeypatch.setattr(mne.source_space, 'has_nibabel', lambda: False) + read_mri = mne.source_space._read_mri_info + monkeypatch.setattr( + mne.source_space, '_read_mri_info', + lambda *args, **kwargs: read_mri(*args, use_nibabel=True, **kwargs)) coords_2 = vertex_to_mni(vertices, hemis, subject, subjects_dir) # less than 0.1 mm error assert_allclose(coords, coords_2, atol=0.1) diff --git a/mne/utils/__init__.py b/mne/utils/__init__.py index bb41ad9fdcf..628cdee714f 100644 --- a/mne/utils/__init__.py +++ b/mne/utils/__init__.py @@ -60,7 +60,7 @@ _mask_to_onsets_offsets, _array_equal_nan, _julian_to_cal, _cal_to_julian, _dt_to_julian, _julian_to_dt, _dt_to_stamp, _stamp_to_dt, - _check_dt, _ReuseCycle, _arange_div) + _check_dt, _ReuseCycle, _arange_div, _hashable_ndarray) from .mixin import (SizeMixin, GetEpochsMixin, _prepare_read_metadata, _prepare_write_metadata, _FakeNoPandas, ShiftTimeMixin) from .linalg import (_svd_lwork, _repeated_svd, _sym_mat_pow, sqrtm_sym, eigh, diff --git a/mne/utils/numerics.py b/mne/utils/numerics.py index 7c24daf44be..7f181ffcce5 100644 --- a/mne/utils/numerics.py +++ b/mne/utils/numerics.py @@ -605,6 +605,18 @@ def grand_average(all_inst, interpolate_bads=True, drop_bads=True): return grand_average +class _HashableNdarray(np.ndarray): + def __hash__(self): + return object_hash(self) + + def __eq__(self, other): + return NotImplementedError # defer to hash + + +def _hashable_ndarray(x): + return x.view(_HashableNdarray) + + def object_hash(x, h=None): """Hash a reasonable python object. diff --git a/mne/viz/_brain/_brain.py b/mne/viz/_brain/_brain.py index f9002a59b13..edb1901f228 100644 --- a/mne/viz/_brain/_brain.py +++ b/mne/viz/_brain/_brain.py @@ -105,7 +105,8 @@ def __init__(self, renderer, vertices, triangles, normals): self._actor = None self._is_mapped = False - self._cache = None + self._current_colors = None + self._cached_colors = None self._overlays = OrderedDict() self._default_scalars = np.ones(vertices.shape) @@ -142,14 +143,15 @@ def _compute_over(self, B, A): return np.clip(C, 0, 1, out=C) def _compose_overlays(self): - B = None + B = cache = None for overlay in self._overlays.values(): A = overlay.to_colors() if B is None: B = A else: - B = self._compute_over(B, A) - return B + cache = B + B = self._compute_over(cache, A) + return B, cache def add_overlay(self, scalars, colormap, rng, opacity, name): overlay = _Overlay( @@ -161,36 +163,45 @@ def add_overlay(self, scalars, colormap, rng, opacity, name): ) self._overlays[name] = overlay colors = overlay.to_colors() - - # save colors in cache - if self._cache is None: - self._cache = colors + if self._current_colors is None: + self._current_colors = colors else: - self._cache = self._compute_over(self._cache, colors) + # save previous colors to cache + self._cached_colors = self._current_colors + self._current_colors = self._compute_over( + self._cached_colors, colors) - # update the texture - self._update() + # apply the texture + self._apply() def remove_overlay(self, names): + to_update = False if not isinstance(names, list): names = [names] for name in names: if name in self._overlays: del self._overlays[name] - self.update() + to_update = True + if to_update: + self.update() - def _update(self): - if self._cache is None or self._renderer is None: + def _apply(self): + if self._current_colors is None or self._renderer is None: return self._renderer._set_mesh_scalars( mesh=self._polydata, - scalars=self._cache, + scalars=self._current_colors, name=self._default_scalars_name, ) - def update(self): - self._cache = self._compose_overlays() - self._update() + def update(self, colors=None): + if colors is not None and self._cached_colors is not None: + self._current_colors = self._compute_over( + self._cached_colors, colors) + else: + self._current_colors, self._cached_colors = \ + self._compose_overlays() + self._apply() def _clean(self): mapper = self._actor.GetMapper() @@ -213,7 +224,11 @@ def update_overlay(self, name, scalars=None, colormap=None, overlay._opacity = opacity if rng is not None: overlay._rng = rng - self.update() + # partial update: use cache if possible + if name == list(self._overlays.keys())[-1]: + self.update(colors=overlay.to_colors()) + else: # full update + self.update() @fill_doc @@ -525,7 +540,8 @@ def __init__(self, subject_id, hemi, surf, title=None, alpha=self._silhouette["alpha"], decimate=self._silhouette["decimate"], ) - self._renderer.set_camera(**views_dicts[h][v]) + self._renderer.set_camera(update=False, reset_camera=False, + **views_dicts[h][v]) self.interaction = interaction self._closed = False @@ -1177,7 +1193,7 @@ def _configure_vertex_time_course(self): alpha=0.5, ls=':') # now plot the time line - self.plot_time_line() + self.plot_time_line(update=False) # then the picked points for idx, hemi in enumerate(['lh', 'rh', 'vol']): @@ -1206,7 +1222,7 @@ def _configure_vertex_time_course(self): else: mesh = self._layered_meshes[hemi]._polydata vertex_id = vertices[ind[0]] - self._add_vertex_glyph(hemi, mesh, vertex_id) + self._add_vertex_glyph(hemi, mesh, vertex_id, update=False) def _configure_picking(self): # get data for each hemi @@ -1476,7 +1492,7 @@ def _remove_label_glyph(self, hemi, label_id): self._layered_meshes[hemi].remove_overlay(label.name) self.picked_patches[hemi].remove(label_id) - def _add_vertex_glyph(self, hemi, mesh, vertex_id): + def _add_vertex_glyph(self, hemi, mesh, vertex_id, update=True): if vertex_id in self.picked_points[hemi]: return @@ -1484,7 +1500,7 @@ def _add_vertex_glyph(self, hemi, mesh, vertex_id): if self.act_data_smooth[hemi][0] is None: return color = next(self.color_cycle) - line = self.plot_time_course(hemi, vertex_id, color) + line = self.plot_time_course(hemi, vertex_id, color, update=update) if hemi == 'vol': ijk = np.unravel_index( vertex_id, np.array(mesh.GetDimensions()) - 1, order='F') @@ -1584,7 +1600,7 @@ def clear_glyphs(self): self.rms = None self._renderer._update() - def plot_time_course(self, hemi, vertex_id, color): + def plot_time_course(self, hemi, vertex_id, color, update=True): """Plot the vertex time course. Parameters @@ -1595,6 +1611,8 @@ def plot_time_course(self, hemi, vertex_id, color): The vertex identifier in the mesh. color : matplotlib color The color of the time course. + update : bool + Force an update of the plot. Defaults to True. Returns ------- @@ -1643,11 +1661,18 @@ def plot_time_course(self, hemi, vertex_id, color): lw=1., color=color, zorder=4, + update=update, ) return line - def plot_time_line(self): - """Add the time line to the MPL widget.""" + def plot_time_line(self, update=True): + """Add the time line to the MPL widget. + + Parameters + ---------- + update : bool + Force an update of the plot. Defaults to True. + """ if self.mpl_canvas is None: return if isinstance(self.show_traces, bool) and self.show_traces: @@ -1659,9 +1684,11 @@ def plot_time_line(self): label='time', color=self._fg_color, lw=1, + update=update, ) self.time_line.set_xdata(current_time) - self.mpl_canvas.update_plot() + if update: + self.mpl_canvas.update_plot() def _configure_help(self): pairs = [ @@ -1983,7 +2010,8 @@ def add_data(self, array, fmin=None, fmid=None, fmax=None, bgcolor=self._brain_color[:3]) kwargs.update(colorbar_kwargs or {}) self._scalar_bar = self._renderer.scalarbar(**kwargs) - self._renderer.set_camera(**views_dicts[hemi][v]) + self._renderer.set_camera( + update=False, reset_camera=False, **views_dicts[hemi][v]) # 4) update the scalar bar and opacity self.update_lut(alpha=alpha) @@ -2280,7 +2308,7 @@ def add_label(self, label, color=None, alpha=1, scalar_thresh=None, name=label_name, ) if reset_camera: - self._renderer.set_camera(**views_dicts[hemi][v]) + self._renderer.set_camera(update=False, **views_dicts[hemi][v]) if self.time_viewer and self.show_traces \ and self.traces_mode == 'label': label._color = orig_color @@ -2394,7 +2422,7 @@ def _configure_label_time_course(self): self.add_annotation(self.annot, color="w", alpha=0.75) # now plot the time line - self.plot_time_line() + self.plot_time_line(update=False) self.mpl_canvas.update_plot() for hemi in self._hemis: @@ -2736,7 +2764,6 @@ def set_data_smoothing(self, n_steps): n_steps : int Number of smoothing steps. """ - from scipy import sparse from ...morph import _hemi_morph for hemi in ['lh', 'rh']: hemi_data = self._data.get(hemi) @@ -2750,12 +2777,11 @@ def set_data_smoothing(self, n_steps): 'parameter must not be None' % (len(hemi_data), self.geo[hemi].x.shape[0])) morph_n_steps = 'nearest' if n_steps == -1 else n_steps - maps = sparse.eye(len(self.geo[hemi].coords), format='csr') with use_log_level(False): smooth_mat = _hemi_morph( self.geo[hemi].orig_faces, np.arange(len(self.geo[hemi].coords)), - vertices, morph_n_steps, maps, warn=False) + vertices, morph_n_steps, maps=None, warn=False) self._data[hemi]['smooth_mat'] = smooth_mat self.set_time_point(self._data['time_idx']) self._data['smoothing_steps'] = n_steps diff --git a/mne/viz/_brain/surface.py b/mne/viz/_brain/surface.py index e2e1e512692..dc7ebea19ab 100644 --- a/mne/viz/_brain/surface.py +++ b/mne/viz/_brain/surface.py @@ -131,7 +131,8 @@ def load_geometry(self): else: coords -= (np.min(x_) + self.offset) * self.x_dir surf = dict(rr=coords, tris=faces) - complete_surface_info(surf, copy=False, verbose=False) + complete_surface_info( + surf, copy=False, verbose=False, do_neighbor_tri=False) nn = surf['nn'] self.coords = coords self.faces = faces diff --git a/mne/viz/_brain/tests/test_brain.py b/mne/viz/_brain/tests/test_brain.py index 8dc280baa60..b20da49c8c0 100644 --- a/mne/viz/_brain/tests/test_brain.py +++ b/mne/viz/_brain/tests/test_brain.py @@ -115,7 +115,8 @@ def test_layered_mesh(renderer_interactive_pyvista): assert not mesh._is_mapped mesh.map() assert mesh._is_mapped - assert mesh._cache is None + assert mesh._current_colors is None + assert mesh._cached_colors is None mesh.update() assert len(mesh._overlays) == 0 mesh.add_overlay( @@ -123,13 +124,27 @@ def test_layered_mesh(renderer_interactive_pyvista): colormap=np.array([(1, 1, 1, 1), (0, 0, 0, 0)]), rng=[0, 1], opacity=None, - name='test', + name='test1', ) - assert mesh._cache is not None + assert mesh._current_colors is not None + assert mesh._cached_colors is None + assert len(mesh._overlays) == 1 + assert 'test1' in mesh._overlays + mesh.add_overlay( + scalars=np.array([1, 0, 0, 1]), + colormap=np.array([(1, 1, 1, 1), (0, 0, 0, 0)]), + rng=[0, 1], + opacity=None, + name='test2', + ) + assert mesh._current_colors is not None + assert mesh._cached_colors is not None + assert len(mesh._overlays) == 2 + assert 'test2' in mesh._overlays + mesh.remove_overlay('test2') + assert 'test2' not in mesh._overlays + mesh.update() assert len(mesh._overlays) == 1 - assert 'test' in mesh._overlays - mesh.remove_overlay('test') - assert len(mesh._overlays) == 0 mesh._clean() diff --git a/mne/viz/backends/_abstract.py b/mne/viz/backends/_abstract.py index a101f50d62f..11f4c79ecf1 100644 --- a/mne/viz/backends/_abstract.py +++ b/mne/viz/backends/_abstract.py @@ -655,17 +655,19 @@ def _connect(self): self.canvas.mpl_connect( event + '_event', getattr(self, 'on_' + event)) - def plot(self, x, y, label, **kwargs): + def plot(self, x, y, label, update=True, **kwargs): """Plot a curve.""" line, = self.axes.plot( x, y, label=label, **kwargs) - self.update_plot() + if update: + self.update_plot() return line - def plot_time_line(self, x, label, **kwargs): + def plot_time_line(self, x, label, update=True, **kwargs): """Plot the vertical line.""" line = self.axes.axvline(x, label=label, **kwargs) - self.update_plot() + if update: + self.update_plot() return line def update_plot(self): diff --git a/mne/viz/backends/_pysurfer_mayavi.py b/mne/viz/backends/_pysurfer_mayavi.py index 24aa8475da9..00dd0bbb7f2 100644 --- a/mne/viz/backends/_pysurfer_mayavi.py +++ b/mne/viz/backends/_pysurfer_mayavi.py @@ -316,10 +316,10 @@ def close(self): def set_camera(self, azimuth=None, elevation=None, distance=None, focalpoint=None, roll=None, reset_camera=None, - rigid=None): + rigid=None, update=True): _set_3d_view(figure=self.fig, azimuth=azimuth, elevation=elevation, distance=distance, - focalpoint=focalpoint, roll=roll) + focalpoint=focalpoint, roll=roll, update=update) def reset_camera(self): renderer = getattr(self.fig.scene, 'renderer', None) @@ -454,13 +454,14 @@ def _close_all(): def _set_3d_view(figure, azimuth, elevation, focalpoint, distance, roll=None, - reset_camera=True): + reset_camera=True, update=True): from mayavi import mlab with warnings.catch_warnings(record=True): # traits with SilenceStdout(): mlab.view(azimuth, elevation, distance, focalpoint=focalpoint, figure=figure, roll=roll) - mlab.draw(figure) + if update: + mlab.draw(figure) def _set_3d_title(figure, title, size=40): diff --git a/mne/viz/backends/_pyvista.py b/mne/viz/backends/_pyvista.py index cf942c48a0c..92e2b352a42 100644 --- a/mne/viz/backends/_pyvista.py +++ b/mne/viz/backends/_pyvista.py @@ -598,10 +598,10 @@ def close(self): def set_camera(self, azimuth=None, elevation=None, distance=None, focalpoint='auto', roll=None, reset_camera=True, - rigid=None): + rigid=None, update=True): _set_3d_view(self.figure, azimuth=azimuth, elevation=elevation, distance=distance, focalpoint=focalpoint, roll=roll, - reset_camera=reset_camera, rigid=rigid) + reset_camera=reset_camera, rigid=rigid, update=update) def reset_camera(self): self.plotter.reset_camera() @@ -930,7 +930,8 @@ def _get_camera_direction(focalpoint, position): def _set_3d_view(figure, azimuth=None, elevation=None, focalpoint='auto', - distance=None, roll=None, reset_camera=True, rigid=None): + distance=None, roll=None, reset_camera=True, rigid=None, + update=True): rigid = np.eye(4) if rigid is None else rigid position = np.array(figure.plotter.camera_position[0]) bounds = np.array(figure.plotter.renderer.ComputeVisiblePropBounds()) @@ -991,8 +992,9 @@ def _set_3d_view(figure, azimuth=None, elevation=None, focalpoint='auto', if roll is not None: figure.plotter.camera.SetRoll(figure.plotter.camera.GetRoll() + roll) - figure.plotter.update() - _process_events(figure.plotter) + if update: + figure.plotter.update() + _process_events(figure.plotter) def _set_3d_title(figure, title, size=16): From c75e81e8918b82f881560b9a0706ff0807dc5399 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 24 May 2021 08:03:18 -0400 Subject: [PATCH 03/10] MAINT: Update URL [ci skip] --- doc/overview/governance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/overview/governance.rst b/doc/overview/governance.rst index 563b1188136..ca606920daf 100644 --- a/doc/overview/governance.rst +++ b/doc/overview/governance.rst @@ -298,7 +298,7 @@ Acknowledgements Substantial portions of this document were adapted from the `SciPy project's governance document -`_, +`_, which in turn was adapted from `Jupyter/IPython project's governance document `_ and From 3384bbcc12595a89f2ceaa1f0c12abc4af920687 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 24 May 2021 09:46:50 -0400 Subject: [PATCH 04/10] MRG, ENH: Speed up brain test (#9422) * ENH: Speed up brain test * FIX: Object --- mne/tests/test_epochs.py | 3 ++- mne/viz/_brain/tests/test_notebook.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/mne/tests/test_epochs.py b/mne/tests/test_epochs.py index 003252172e1..b92de270c4c 100644 --- a/mne/tests/test_epochs.py +++ b/mne/tests/test_epochs.py @@ -2764,7 +2764,8 @@ def test_metadata(tmpdir): chs = ['a', 'b'] info = create_info(chs, 1000) meta = np.array([[1.] * 5 + [3.] * 5, - ['a'] * 2 + ['b'] * 3 + ['c'] * 3 + ['µ'] * 2]).T + ['a'] * 2 + ['b'] * 3 + ['c'] * 3 + ['µ'] * 2], + dtype='object').T meta = DataFrame(meta, columns=['num', 'letter']) meta['num'] = np.array(meta['num'], float) events = np.arange(meta.shape[0]) diff --git a/mne/viz/_brain/tests/test_notebook.py b/mne/viz/_brain/tests/test_notebook.py index 72c3f61e6af..f67c356d66a 100644 --- a/mne/viz/_brain/tests/test_notebook.py +++ b/mne/viz/_brain/tests/test_notebook.py @@ -51,6 +51,7 @@ def test_notebook_interactive(renderer_notebook, brain_gc, nbexec): subjects_dir = os.path.join(data_path, 'subjects') fname_stc = os.path.join(sample_dir, 'sample_audvis_trunc-meg') stc = mne.read_source_estimate(fname_stc, subject='sample') + stc.crop(0.1, 0.11) initial_time = 0.13 mne.viz.set_3d_backend('notebook') brain_class = mne.viz.get_brain_class() From 4ecca868906479eba9f90428b616e4408e781a35 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 24 May 2021 11:53:57 -0400 Subject: [PATCH 05/10] ENH: Test more on pre [skip circle] (#9423) --- tools/azure_dependencies.sh | 5 ++--- tools/github_actions_dependencies.sh | 7 ++++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tools/azure_dependencies.sh b/tools/azure_dependencies.sh index 42a59ce99bf..79931e8b885 100755 --- a/tools/azure_dependencies.sh +++ b/tools/azure_dependencies.sh @@ -6,10 +6,9 @@ if [ "${TEST_MODE}" == "pip" ]; then python -m pip install --upgrade --only-binary="numba,llvmlite" -r requirements.txt elif [ "${TEST_MODE}" == "pip-pre" ]; then python -m pip install --progress-bar off --upgrade pip setuptools - python -m pip install --progress-bar off --upgrade --pre --only-binary ":all:" -i "https://pypi.anaconda.org/scipy-wheels-nightly/simple" --extra-index-url https://www.riverbankcomputing.com/pypi/simple numpy scipy pandas scikit-learn PyQt5 - python -m pip install --progress-bar off --upgrade --pre --only-binary ":all:" -f "https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" h5py Pillow + python -m pip install --progress-bar off --upgrade --pre --only-binary ":all:" -i "https://pypi.anaconda.org/scipy-wheels-nightly/simple" --extra-index-url https://www.riverbankcomputing.com/pypi/simple numpy scipy pandas scikit-learn PyQt5 dipy statsmodels + python -m pip install --progress-bar off --upgrade --pre --only-binary ":all:" -f "https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" h5py Pillow matplotlib python -m pip install --progress-bar off --upgrade --pre --only-binary ":all" vtk - python -m pip install --progress-bar off --upgrade --only-binary ":all" matplotlib python -m pip install --progress-bar off https://github.com/pyvista/pyvista/zipball/master python -m pip install --progress-bar off https://github.com/pyvista/pyvistaqt/zipball/master python -m pip install --progress-bar off --upgrade --only-binary="numba,llvmlite" -r requirements.txt diff --git a/tools/github_actions_dependencies.sh b/tools/github_actions_dependencies.sh index 67f0edc73f8..1a301356f82 100755 --- a/tools/github_actions_dependencies.sh +++ b/tools/github_actions_dependencies.sh @@ -4,19 +4,20 @@ if [ ! -z "$CONDA_ENV" ]; then pip uninstall -yq mne elif [ ! -z "$CONDA_DEPENDENCIES" ]; then conda install -y $CONDA_DEPENDENCIES -else # pip --pre 3.9 (missing dipy in pre) +else # Changes here should also go in the interactive_test CircleCI job python -m pip install --progress-bar off --upgrade "pip!=20.3.0" setuptools wheel pip uninstall -yq numpy pip install --progress-bar off --upgrade --pre --only-binary ":all:" python-dateutil pytz joblib threadpoolctl - pip install --progress-bar off --upgrade --pre --only-binary ":all:" -i "https://pypi.anaconda.org/scipy-wheels-nightly/simple" --extra-index-url https://www.riverbankcomputing.com/pypi/simple numpy scipy pandas scikit-learn PyQt5 + pip install --progress-bar off --upgrade --pre --only-binary ":all:" -i "https://pypi.anaconda.org/scipy-wheels-nightly/simple" --extra-index-url https://www.riverbankcomputing.com/pypi/simple numpy scipy pandas scikit-learn PyQt5 dipy statsmodels pip install --progress-bar off --upgrade --pre --only-binary ":all:" -f "https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" h5py pillow matplotlib - pip install --progress-bar off --upgrade --pre --only-binary ":all:" numba llvmlite + pip install --progress-bar off --upgrade --pre --only-binary ":all:" numba llvmlite nilearn # built using vtk master branch on an Ubuntu 18.04.5 VM and uploaded to OSF: wget -q https://osf.io/kej3v/download -O vtk-9.0.20201117-cp39-cp39-linux_x86_64.whl pip install --progress-bar off vtk-9.0.20201117-cp39-cp39-linux_x86_64.whl pip install --progress-bar off https://github.com/pyvista/pyvista/zipball/master pip install --progress-bar off https://github.com/pyvista/pyvistaqt/zipball/master + pip install --progress-bar off --pre mayavi imageio-ffmpeg xlrd mffpy fi pip install --progress-bar off --upgrade -r requirements_testing.txt if [ "${DEPS}" != "minimal" ]; then From abfe37b2c046375bebb210cd711025c455068a3e Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Wed, 26 May 2021 18:03:40 -0400 Subject: [PATCH 06/10] ENH: Add mne.export (#9427) --- doc/changes/latest.inc | 2 + doc/export.rst | 17 ++++ doc/python_reference.rst | 1 + mne/__init__.py | 1 + mne/conftest.py | 3 + mne/epochs.py | 39 +------- mne/export/__init__.py | 1 + mne/export/_eeglab.py | 69 ++++++++++++++ mne/export/_export.py | 121 +++++++++++++++++++++++++ mne/export/tests/test_export.py | 64 +++++++++++++ mne/io/base.py | 43 +-------- mne/io/tests/test_raw.py | 24 +---- mne/io/utils.py | 21 ----- mne/tests/test_docstring_parameters.py | 3 +- mne/tests/test_epochs.py | 31 +------ mne/utils/__init__.py | 2 +- mne/utils/check.py | 43 --------- mne/utils/docs.py | 2 +- 18 files changed, 294 insertions(+), 193 deletions(-) create mode 100644 doc/export.rst create mode 100644 mne/export/__init__.py create mode 100644 mne/export/_eeglab.py create mode 100644 mne/export/_export.py create mode 100644 mne/export/tests/test_export.py diff --git a/doc/changes/latest.inc b/doc/changes/latest.inc index ee341d40815..9db709666dd 100644 --- a/doc/changes/latest.inc +++ b/doc/changes/latest.inc @@ -39,6 +39,8 @@ Enhancements - New function :func:`mne.label.find_pos_in_annot` to get atlas label for MRI coordinates. (:gh:`9376` by **by new contributor** |Marian Dovgialo|_) +- New namespace `mne.export` created to contain functions (such as `mne.export.export_raw` and `mne.export.export_epochs`) for exporting data to non-FIF formats (:gh:`9427` by `Eric Larson`_) + - Add support for Hitachi fNIRS devices in `mne.io.read_raw_hitachi` (:gh:`9391` by `Eric Larson`_) - Add support for ``picks`` in :func:`mne.stc_near_sensors` (:gh:`9396` by `Eric Larson`_) diff --git a/doc/export.rst b/doc/export.rst new file mode 100644 index 00000000000..a8349143ba8 --- /dev/null +++ b/doc/export.rst @@ -0,0 +1,17 @@ + +Exporting +================ + +:py:mod:`mne.export`: + +.. automodule:: mne.export + :no-members: + :no-inherited-members: + +.. currentmodule:: mne.export + +.. autosummary:: + :toctree: generated/ + + export_epochs + export_raw diff --git a/doc/python_reference.rst b/doc/python_reference.rst index a5d8ccdb7fd..76c306d4210 100644 --- a/doc/python_reference.rst +++ b/doc/python_reference.rst @@ -27,6 +27,7 @@ directly from a terminal, see :ref:`python_commands`. reading_raw_data file_io creating_from_arrays + export datasets visualization preprocessing diff --git a/mne/__init__.py b/mne/__init__.py index 5192d5f3f7d..66893ab4b60 100644 --- a/mne/__init__.py +++ b/mne/__init__.py @@ -121,6 +121,7 @@ from . import time_frequency from . import viz from . import decoding +from . import export # initialize logging set_log_level(None, False) diff --git a/mne/conftest.py b/mne/conftest.py index bb921494e45..8d1c655ee74 100644 --- a/mne/conftest.py +++ b/mne/conftest.py @@ -38,6 +38,9 @@ fname_trans = op.join(s_path, 'sample_audvis_trunc-trans.fif') +collect_ignore = ['export/_eeglab.py'] + + def pytest_configure(config): """Configure pytest options.""" # Markers diff --git a/mne/epochs.py b/mne/epochs.py index 5b35bf22e86..bc0dc7551cc 100644 --- a/mne/epochs.py +++ b/mne/epochs.py @@ -20,7 +20,6 @@ import numpy as np -from .io.utils import _get_als_coords_from_chs from .io.write import (start_file, start_block, end_file, end_block, write_int, write_float, write_float_matrix, write_double_matrix, write_complex_float_matrix, @@ -52,15 +51,14 @@ from .utils import (_check_fname, check_fname, logger, verbose, _time_mask, check_random_state, warn, _pl, sizeof_fmt, SizeMixin, copy_function_doc_to_method_doc, - _check_pandas_installed, _check_eeglabio_installed, + _check_pandas_installed, _check_preload, GetEpochsMixin, _prepare_read_metadata, _prepare_write_metadata, _check_event_id, _gen_events, _check_option, _check_combine, ShiftTimeMixin, _build_data_frame, _check_pandas_index_arguments, _convert_times, _scale_dataframe_data, _check_time_format, object_size, - _on_missing, _validate_type, _ensure_events, - _infer_check_export_fmt) + _on_missing, _validate_type, _ensure_events) from .utils.docs import fill_doc from .data.html_templates import epochs_template @@ -1836,37 +1834,8 @@ def export(self, fname, fmt='auto', verbose=None): ----- %(export_eeglab_note)s """ - supported_export_formats = { - 'eeglab': ('set',), - 'edf': ('edf',), - 'brainvision': ('eeg', 'vmrk', 'vhdr',) - } - fmt = _infer_check_export_fmt(fmt, fname, supported_export_formats) - - if fmt == 'eeglab': - _check_eeglabio_installed() - import eeglabio.epochs - # load data first - self.load_data() - - # remove extra epoc and STI channels - drop_chs = ['epoc', 'STI 014'] - ch_names = [ch for ch in self.ch_names if ch not in drop_chs] - cart_coords = _get_als_coords_from_chs(self.info['chs'], - drop_chs) - - eeglabio.epochs.export_set(fname, - data=self.get_data(picks=ch_names), - sfreq=self.info['sfreq'], - events=self.events, - tmin=self.tmin, tmax=self.tmax, - ch_names=ch_names, - event_id=self.event_id, - ch_locs=cart_coords) - elif fmt == 'edf': - raise NotImplementedError('Export to EDF format not implemented.') - elif fmt == 'brainvision': - raise NotImplementedError('Export to BrainVision not implemented.') + from .export import export_epochs + export_epochs(fname, self, fmt, verbose) def equalize_event_counts(self, event_ids=None, method='mintime'): """Equalize the number of trials in each condition. diff --git a/mne/export/__init__.py b/mne/export/__init__.py new file mode 100644 index 00000000000..8e4ab0dad9b --- /dev/null +++ b/mne/export/__init__.py @@ -0,0 +1 @@ +from ._export import export_raw, export_epochs diff --git a/mne/export/_eeglab.py b/mne/export/_eeglab.py new file mode 100644 index 00000000000..52ee06369d6 --- /dev/null +++ b/mne/export/_eeglab.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Authors: MNE Developers +# +# License: BSD (3-clause) + +import numpy as np + +from ..utils import _check_eeglabio_installed +_check_eeglabio_installed() +import eeglabio.raw # noqa: E402 +import eeglabio.epochs # noqa: E402 + + +def _export_raw(fname, raw): + # load data first + raw.load_data() + + # remove extra epoc and STI channels + drop_chs = ['epoc'] + if not (raw.filenames[0].endswith('.fif')): + drop_chs.append('STI 014') + + ch_names = [ch for ch in raw.ch_names if ch not in drop_chs] + cart_coords = _get_als_coords_from_chs(raw.info['chs'], drop_chs) + + annotations = [raw.annotations.description, + raw.annotations.onset, + raw.annotations.duration] + eeglabio.raw.export_set( + fname, data=raw.get_data(picks=ch_names), sfreq=raw.info['sfreq'], + ch_names=ch_names, ch_locs=cart_coords, annotations=annotations) + + +def _export_epochs(fname, epochs): + _check_eeglabio_installed() + # load data first + epochs.load_data() + + # remove extra epoc and STI channels + drop_chs = ['epoc', 'STI 014'] + ch_names = [ch for ch in epochs.ch_names if ch not in drop_chs] + cart_coords = _get_als_coords_from_chs(epochs.info['chs'], drop_chs) + + eeglabio.epochs.export_set( + fname, data=epochs.get_data(picks=ch_names), + sfreq=epochs.info['sfreq'], events=epochs.events, + tmin=epochs.tmin, tmax=epochs.tmax, ch_names=ch_names, + event_id=epochs.event_id, ch_locs=cart_coords) + + +def _get_als_coords_from_chs(chs, drop_chs=None): + """Extract channel locations in ALS format (x, y, z) from a chs instance. + + Returns + ------- + None if no valid coordinates are found (all zeros) + """ + if drop_chs is None: + drop_chs = [] + cart_coords = np.array([d['loc'][:3] for d in chs + if d['ch_name'] not in drop_chs]) + if cart_coords.any(): # has coordinates + # (-y x z) to (x y z) + cart_coords[:, 0] = -cart_coords[:, 0] # -y to y + # swap x (1) and y (0) + cart_coords[:, [0, 1]] = cart_coords[:, [1, 0]] + else: + cart_coords = None + return cart_coords diff --git a/mne/export/_export.py b/mne/export/_export.py new file mode 100644 index 00000000000..52afb4f8d4f --- /dev/null +++ b/mne/export/_export.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# Authors: MNE Developers +# +# License: BSD (3-clause) + +import os.path as op + +from ..utils import verbose, _validate_type + + +@verbose +def export_raw(fname, raw, fmt='auto', verbose=None): + """Export Raw to external formats. + + Supported formats: EEGLAB (set, uses :mod:`eeglabio`) + %(export_warning)s + + Parameters + ---------- + %(export_params_fname)s + raw : instance of Raw + The raw instance to export. + %(export_params_fmt)s + %(verbose)s + + Notes + ----- + %(export_eeglab_note)s + """ + supported_export_formats = { # format : extensions + 'eeglab': ('set',), + 'edf': ('edf',), + 'brainvision': ('eeg', 'vmrk', 'vhdr',) + } + fmt = _infer_check_export_fmt(fmt, fname, supported_export_formats) + + if fmt == 'eeglab': + from ._eeglab import _export_raw + _export_raw(fname, raw) + elif fmt == 'edf': + raise NotImplementedError('Export to EDF format not implemented.') + elif fmt == 'brainvision': + raise NotImplementedError('Export to BrainVision not implemented.') + + +@verbose +def export_epochs(fname, epochs, fmt='auto', verbose=None): + """Export Epochs to external formats. + + Supported formats: EEGLAB (set, uses :mod:`eeglabio`) + %(export_warning)s + + Parameters + ---------- + %(export_params_fname)s + epochs : instance of Epochs + The epochs to export. + %(export_params_fmt)s + %(verbose)s + + Notes + ----- + %(export_eeglab_note)s + """ + supported_export_formats = { + 'eeglab': ('set',), + 'edf': ('edf',), + 'brainvision': ('eeg', 'vmrk', 'vhdr',) + } + fmt = _infer_check_export_fmt(fmt, fname, supported_export_formats) + + if fmt == 'eeglab': + from ._eeglab import _export_epochs + _export_epochs(fname, epochs) + elif fmt == 'edf': + raise NotImplementedError('Export to EDF format not implemented.') + elif fmt == 'brainvision': + raise NotImplementedError('Export to BrainVision not implemented.') + + +def _infer_check_export_fmt(fmt, fname, supported_formats): + """Infer export format from filename extension if auto. + + Raises error if fmt is auto and no file extension found, + then checks format against supported formats, raises error if format is not + supported. + + Parameters + ---------- + fmt : str + Format of the export, will only infer the format from filename if fmt + is auto. + fname : str + Name of the target export file, only used when fmt is auto. + supported_formats : dict of str : tuple/list + Dictionary containing supported formats (as keys) and each format's + corresponding file extensions in a tuple/list (e.g. 'eeglab': ('set',)) + """ + _validate_type(fmt, str, 'fmt') + fmt = fmt.lower() + if fmt == "auto": + fmt = op.splitext(fname)[1] + if fmt: + fmt = fmt[1:].lower() + # find fmt in supported formats dict's tuples + fmt = next((k for k, v in supported_formats.items() if fmt in v), + fmt) # default to original fmt for raising error later + else: + raise ValueError(f"Couldn't infer format from filename {fname}" + " (no extension found)") + + if fmt not in supported_formats: + supported = [] + for format, extensions in supported_formats.items(): + ext_str = ', '.join(f'*.{ext}' for ext in extensions) + supported.append(f'{format} ({ext_str})') + + supported_str = ', '.join(supported) + raise ValueError(f"Format '{fmt}' is not supported. " + f"Supported formats are {supported_str}.") + return fmt diff --git a/mne/export/tests/test_export.py b/mne/export/tests/test_export.py new file mode 100644 index 00000000000..ef7ce429384 --- /dev/null +++ b/mne/export/tests/test_export.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +"""Test exporting functions.""" +# Authors: MNE Developers +# +# License: BSD (3-clause) + +from pathlib import Path +import os.path as op + +import pytest +import numpy as np +from numpy.testing import assert_allclose, assert_array_equal + +from mne import read_epochs_eeglab, Epochs +from mne.tests.test_epochs import _get_data +from mne.io import read_raw_fif, read_raw_eeglab +from mne.utils import _check_eeglabio_installed + + +@pytest.mark.skipif(not _check_eeglabio_installed(strict=False), + reason='eeglabio not installed') +def test_export_raw_eeglab(tmpdir): + """Test saving a Raw instance to EEGLAB's set format.""" + fname = (Path(__file__).parent.parent.parent / + "io" / "tests" / "data" / "test_raw.fif") + raw = read_raw_fif(fname) + raw.load_data() + temp_fname = op.join(str(tmpdir), 'test.set') + raw.export(temp_fname) + raw.drop_channels([ch for ch in ['epoc'] + if ch in raw.ch_names]) + raw_read = read_raw_eeglab(temp_fname, preload=True) + assert raw.ch_names == raw_read.ch_names + cart_coords = np.array([d['loc'][:3] for d in raw.info['chs']]) # just xyz + cart_coords_read = np.array([d['loc'][:3] for d in raw_read.info['chs']]) + assert_allclose(cart_coords, cart_coords_read) + assert_allclose(raw.times, raw_read.times) + assert_allclose(raw.get_data(), raw_read.get_data()) + + +@pytest.mark.skipif(not _check_eeglabio_installed(strict=False), + reason='eeglabio not installed') +@pytest.mark.parametrize('preload', (True, False)) +def test_export_epochs_eeglab(tmpdir, preload): + """Test saving an Epochs instance to EEGLAB's set format.""" + raw, events = _get_data()[:2] + raw.load_data() + epochs = Epochs(raw, events, preload=preload) + temp_fname = op.join(str(tmpdir), 'test.set') + epochs.export(temp_fname) + epochs.drop_channels([ch for ch in ['epoc', 'STI 014'] + if ch in epochs.ch_names]) + epochs_read = read_epochs_eeglab(temp_fname) + assert epochs.ch_names == epochs_read.ch_names + cart_coords = np.array([d['loc'][:3] + for d in epochs.info['chs']]) # just xyz + cart_coords_read = np.array([d['loc'][:3] + for d in epochs_read.info['chs']]) + assert_allclose(cart_coords, cart_coords_read) + assert_array_equal(epochs.events[:, 0], + epochs_read.events[:, 0]) # latency + assert epochs.event_id.keys() == epochs_read.event_id.keys() # just keys + assert_allclose(epochs.times, epochs_read.times) + assert_allclose(epochs.get_data(), epochs_read.get_data()) diff --git a/mne/io/base.py b/mne/io/base.py index 397836440fd..61978daffc0 100644 --- a/mne/io/base.py +++ b/mne/io/base.py @@ -21,9 +21,7 @@ import numpy as np from .constants import FIFF -from .utils import _construct_bids_filename, _check_orig_units, \ - _get_als_coords_from_chs -from ..utils.check import _infer_check_export_fmt +from .utils import _construct_bids_filename, _check_orig_units from .pick import (pick_types, pick_channels, pick_info, _picks_to_idx, channel_type) from .meas_info import write_meas_info @@ -49,8 +47,7 @@ copy_function_doc_to_method_doc, _validate_type, _check_preload, _get_argvalues, _check_option, _build_data_frame, _convert_times, _scale_dataframe_data, - _check_time_format, _arange_div, - _check_eeglabio_installed) + _check_time_format, _arange_div) from ..defaults import _handle_default from ..viz import plot_raw, plot_raw_psd, plot_raw_psd_topo, _RAW_CLIP_DEF from ..event import find_events, concatenate_events @@ -1470,40 +1467,8 @@ def export(self, fname, fmt='auto', verbose=None): ----- %(export_eeglab_note)s """ - supported_export_formats = { # format : extensions - 'eeglab': ('set',), - 'edf': ('edf',), - 'brainvision': ('eeg', 'vmrk', 'vhdr',) - } - fmt = _infer_check_export_fmt(fmt, fname, supported_export_formats) - - if fmt == 'eeglab': - _check_eeglabio_installed() - import eeglabio.raw - # load data first - self.load_data() - - # remove extra epoc and STI channels - drop_chs = ['epoc'] - if not (self.filenames[0].endswith('.fif')): - drop_chs.append('STI 014') - - ch_names = [ch for ch in self.ch_names if ch not in drop_chs] - cart_coords = _get_als_coords_from_chs(self.info['chs'], - drop_chs) - - annotations = [self.annotations.description, - self.annotations.onset, - self.annotations.duration] - eeglabio.raw.export_set(fname, data=self.get_data(picks=ch_names), - sfreq=self.info['sfreq'], - ch_names=ch_names, - ch_locs=cart_coords, - annotations=annotations) - elif fmt == 'edf': - raise NotImplementedError('Export to EDF format not implemented.') - elif fmt == 'brainvision': - raise NotImplementedError('Export to BrainVision not implemented.') + from ..export import export_raw + export_raw(fname, self, fmt, verbose=verbose) def _tmin_tmax_to_start_stop(self, tmin, tmax): start = int(np.floor(tmin * self.info['sfreq'])) diff --git a/mne/io/tests/test_raw.py b/mne/io/tests/test_raw.py index a220c6cda4e..10936088ac9 100644 --- a/mne/io/tests/test_raw.py +++ b/mne/io/tests/test_raw.py @@ -24,13 +24,11 @@ from mne.io import read_raw_fif, RawArray, BaseRaw, Info, _writing_info_hdf5 from mne.io.base import _get_scaling from mne.utils import (_TempDir, catch_logging, _raw_annot, _stamp_to_dt, - object_diff, check_version, requires_pandas, - _check_eeglabio_installed) + object_diff, check_version, requires_pandas) from mne.io.meas_info import _get_valid_units from mne.io._digitization import DigPoint from mne.io.proj import Projection from mne.io.utils import _mult_cal_one -from mne.io import read_raw_eeglab def assert_named_constants(info): @@ -694,23 +692,3 @@ def test_get_data_units(): # not the good type with pytest.raises(TypeError, match='instance of None, str, or dict'): raw.get_data(units=['fT/cm', 'fT', 'uV']) - - -@pytest.mark.skipif(not _check_eeglabio_installed(strict=False), - reason='eeglabio not installed') -def test_export_eeglab(tmpdir): - """Test saving a Raw instance to EEGLAB's set format.""" - fname = Path(__file__).parent / "data" / "test_raw.fif" - raw = read_raw_fif(fname) - raw.load_data() - temp_fname = op.join(str(tmpdir), 'test.set') - raw.export(temp_fname) - raw.drop_channels([ch for ch in ['epoc'] - if ch in raw.ch_names]) - raw_read = read_raw_eeglab(temp_fname, preload=True) - assert raw.ch_names == raw_read.ch_names - cart_coords = np.array([d['loc'][:3] for d in raw.info['chs']]) # just xyz - cart_coords_read = np.array([d['loc'][:3] for d in raw_read.info['chs']]) - assert_allclose(cart_coords, cart_coords_read) - assert_allclose(raw.times, raw_read.times) - assert_allclose(raw.get_data(), raw_read.get_data()) diff --git a/mne/io/utils.py b/mne/io/utils.py index f0c700880f7..7c9ab92240b 100644 --- a/mne/io/utils.py +++ b/mne/io/utils.py @@ -312,24 +312,3 @@ def _construct_bids_filename(base, ext, part_idx): if dirname: use_fname = op.join(dirname, use_fname) return use_fname - - -def _get_als_coords_from_chs(chs, drop_chs=None): - """Extract channel locations in ALS format (x, y, z) from a chs instance. - - Returns - ------- - None if no valid coordinates are found (all zeros) - """ - if drop_chs is None: - drop_chs = [] - cart_coords = np.array([d['loc'][:3] for d in chs - if d['ch_name'] not in drop_chs]) - if cart_coords.any(): # has coordinates - # (-y x z) to (x y z) - cart_coords[:, 0] = -cart_coords[:, 0] # -y to y - # swap x (1) and y (0) - cart_coords[:, [0, 1]] = cart_coords[:, [1, 0]] - else: - cart_coords = None - return cart_coords diff --git a/mne/tests/test_docstring_parameters.py b/mne/tests/test_docstring_parameters.py index 8bcb6d6f8d4..2d4ba5d9394 100644 --- a/mne/tests/test_docstring_parameters.py +++ b/mne/tests/test_docstring_parameters.py @@ -27,6 +27,7 @@ 'mne.datasets.sample', 'mne.decoding', 'mne.dipole', + 'mne.export', 'mne.filter', 'mne.forward', 'mne.inverse_sparse', @@ -283,7 +284,7 @@ def test_documented(): 'decoding', 'events', 'file_io', 'forward', 'inverse', 'logging', 'most_used_classes', 'mri', 'preprocessing', 'reading_raw_data', 'realtime', 'report', 'sensor_space', 'simulation', 'source_space', - 'statistics', 'time_frequency', 'visualization') + 'statistics', 'time_frequency', 'visualization', 'export') known_names = list() for api_file in api_files: with open(op.join(doc_dir, f'{api_file}.rst'), 'rb') as fid: diff --git a/mne/tests/test_epochs.py b/mne/tests/test_epochs.py index b92de270c4c..e565251101a 100644 --- a/mne/tests/test_epochs.py +++ b/mne/tests/test_epochs.py @@ -29,7 +29,7 @@ from mne.datasets import testing from mne.chpi import read_head_pos, head_pos_to_trans_rot_t from mne.event import merge_events -from mne.io import RawArray, read_raw_fif, read_epochs_eeglab +from mne.io import RawArray, read_raw_fif from mne.io.constants import FIFF from mne.io.proj import _has_eeg_average_ref_proj from mne.io.write import write_int, INT32_MAX, _get_split_size @@ -40,8 +40,7 @@ _handle_event_repeated, make_metadata) from mne.utils import (requires_pandas, object_diff, catch_logging, _FakeNoPandas, - assert_meg_snr, check_version, _dt_to_stamp, - _check_eeglabio_installed) + assert_meg_snr, check_version, _dt_to_stamp) data_path = testing.data_path(download=False) fname_raw_testing = op.join(data_path, 'MEG', 'sample', @@ -3106,32 +3105,6 @@ def test_save_complex_data(tmpdir, preload, is_complex, fmt, rtol): assert_allclose(data_read, data, rtol=rtol) -@pytest.mark.skipif(not _check_eeglabio_installed(strict=False), - reason='eeglabio not installed') -@pytest.mark.parametrize('preload', (True, False)) -def test_export_eeglab(tmpdir, preload): - """Test saving an Epochs instance to EEGLAB's set format.""" - raw, events = _get_data()[:2] - raw.load_data() - epochs = Epochs(raw, events, preload=preload) - temp_fname = op.join(str(tmpdir), 'test.set') - epochs.export(temp_fname) - epochs.drop_channels([ch for ch in ['epoc', 'STI 014'] - if ch in epochs.ch_names]) - epochs_read = read_epochs_eeglab(temp_fname) - assert epochs.ch_names == epochs_read.ch_names - cart_coords = np.array([d['loc'][:3] - for d in epochs.info['chs']]) # just xyz - cart_coords_read = np.array([d['loc'][:3] - for d in epochs_read.info['chs']]) - assert_allclose(cart_coords, cart_coords_read) - assert_array_equal(epochs.events[:, 0], - epochs_read.events[:, 0]) # latency - assert epochs.event_id.keys() == epochs_read.event_id.keys() # just keys - assert_allclose(epochs.times, epochs_read.times) - assert_allclose(epochs.get_data(), epochs_read.get_data()) - - def test_no_epochs(tmpdir): """Test that having the first epoch bad does not break writing.""" # a regression noticed in #5564 diff --git a/mne/utils/__init__.py b/mne/utils/__init__.py index 628cdee714f..206b8db22ec 100644 --- a/mne/utils/__init__.py +++ b/mne/utils/__init__.py @@ -19,7 +19,7 @@ _check_freesurfer_home, _suggest, _require_version, _on_missing, _check_on_missing, int_like, _safe_input, _check_all_same_channel_names, path_like, _ensure_events, - _check_eeglabio_installed, _infer_check_export_fmt) + _check_eeglabio_installed) from .config import (set_config, get_config, get_config_path, set_cache_dir, set_memmap_min_size, get_subjects_dir, _get_stim_channel, sys_info, _get_extra_data_path, _get_root_dir, diff --git a/mne/utils/check.py b/mne/utils/check.py index d472c24819c..c66b7d13690 100644 --- a/mne/utils/check.py +++ b/mne/utils/check.py @@ -774,46 +774,3 @@ def _ensure_events(events): raise ValueError( f'events must be of shape (N, 3), got {events.shape}') return events - - -def _infer_check_export_fmt(fmt, fname, supported_formats): - """Infer export format from filename extension if auto. - - Raises error if fmt is auto and no file extension found, - then checks format against supported formats, raises error if format is not - supported. - - Parameters - ---------- - fmt : str - Format of the export, will only infer the format from filename if fmt - is auto. - fname : str - Name of the target export file, only used when fmt is auto. - supported_formats : dict of str : tuple/list - Dictionary containing supported formats (as keys) and each format's - corresponding file extensions in a tuple/list (e.g. 'eeglab': ('set',)) - """ - _validate_type(fmt, str, 'fmt') - fmt = fmt.lower() - if fmt == "auto": - fmt = op.splitext(fname)[1] - if fmt: - fmt = fmt[1:].lower() - # find fmt in supported formats dict's tuples - fmt = next((k for k, v in supported_formats.items() if fmt in v), - fmt) # default to original fmt for raising error later - else: - raise ValueError(f"Couldn't infer format from filename {fname}" - " (no extension found)") - - if fmt not in supported_formats: - supported = [] - for format, extensions in supported_formats.items(): - ext_str = ', '.join(f'*.{ext}' for ext in extensions) - supported.append(f'{format} ({ext_str})') - - supported_str = ', '.join(supported) - raise ValueError(f"Format '{fmt}' is not supported. " - f"Supported formats are {supported_str}.") - return fmt diff --git a/mne/utils/docs.py b/mne/utils/docs.py index 75990ab6e48..976c51548b5 100644 --- a/mne/utils/docs.py +++ b/mne/utils/docs.py @@ -2326,7 +2326,7 @@ .. warning:: Since we are exporting to external formats, there's no guarantee that all the info will be preserved in the external format. To save in native MNE - format (``.fif``) without information loss, use :func:`save` instead. + format (``.fif``) without information loss, use ``save`` instead. """ docdict['export_params_fname'] = """ fname : str From 3b69e9b936ee01fcb357e732ee35cb75086a6bf8 Mon Sep 17 00:00:00 2001 From: Guillaume Favelier Date: Thu, 27 May 2021 16:01:32 +0200 Subject: [PATCH 07/10] FIX: brain save_movie (#9426) * Fix * Use filename * Fix docstring * Use ffprobe * Use imageio_ffmpeg --- mne/viz/_brain/_brain.py | 18 ++++++------------ mne/viz/_brain/tests/test_brain.py | 11 +++++++++-- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/mne/viz/_brain/_brain.py b/mne/viz/_brain/_brain.py index edb1901f228..fa95bda222e 100644 --- a/mne/viz/_brain/_brain.py +++ b/mne/viz/_brain/_brain.py @@ -1261,7 +1261,9 @@ def _configure_tool_bar(self): self._renderer._tool_bar_add_file_button( name="movie", desc="Save movie...", - func=self.save_movie, + func=lambda filename: self.save_movie( + filename=filename, + time_dilation=(1. / self.playback_speed)), shortcut="ctrl+shift+s", ) self._renderer._tool_bar_add_button( @@ -3081,12 +3083,9 @@ def frame_callback(frame, n_frames): self._renderer._window_new_cursor("WaitCursor")) try: - self._save_movie( - filename=filename, - time_dilation=(1. / self.playback_speed), - callback=frame_callback, - **kwargs - ) + self._save_movie(filename, time_dilation, tmin, tmax, + framerate, interpolation, codec, + bitrate, frame_callback, time_viewer, **kwargs) except (Exception, KeyboardInterrupt): warn('Movie saving aborted:\n' + traceback.format_exc()) finally: @@ -3139,11 +3138,6 @@ def save_movie(self, filename=None, time_dilation=4., tmin=None, tmax=None, %(brain_screenshot_time_viewer)s **kwargs : dict Specify additional options for :mod:`imageio`. - - Returns - ------- - dialog : object - The opened dialog is returned for testing purpose only. """ if filename is None: filename = _generate_default_filename(".mp4") diff --git a/mne/viz/_brain/tests/test_brain.py b/mne/viz/_brain/tests/test_brain.py index b20da49c8c0..55bd2ae3b85 100644 --- a/mne/viz/_brain/tests/test_brain.py +++ b/mne/viz/_brain/tests/test_brain.py @@ -391,6 +391,7 @@ def test_brain_save_movie(tmpdir, renderer, brain_gc): """Test saving a movie of a Brain instance.""" if renderer._get_3d_backend() == "mayavi": pytest.skip('Save movie only supported on PyVista') + from imageio_ffmpeg import count_frames_and_secs brain = _create_testing_brain(hemi='lh', time_viewer=False) filename = str(path.join(tmpdir, "brain_test.mov")) for interactive_state in (False, True): @@ -403,9 +404,15 @@ def test_brain_save_movie(tmpdir, renderer, brain_gc): brain.save_movie(filename, time_dilation=1, tmin=1, tmax=1.1, bad_name='blah') assert not path.isfile(filename) - brain.save_movie(filename, time_dilation=0.1, - interpolation='nearest') + tmin = 1 + tmax = 5 + duration = np.floor(tmax - tmin) + brain.save_movie(filename, time_dilation=1., tmin=tmin, + tmax=tmax, interpolation='nearest') assert path.isfile(filename) + _, nsecs = count_frames_and_secs(filename) + assert_allclose(duration, nsecs, atol=0.2) + os.remove(filename) brain.close() From f42f61284c280145c4f396aa3e9fa9ed504d7114 Mon Sep 17 00:00:00 2001 From: Robert Luke <748691+rob-luke@users.noreply.github.com> Date: Sat, 29 May 2021 18:47:55 +1000 Subject: [PATCH 08/10] MRG: Enable interpolation for all fNIRS types (#9431) * Enable interpolation for all fNIRS types * Update interpolation.py * Update latest.inc --- doc/changes/latest.inc | 2 ++ mne/channels/interpolation.py | 6 ++---- mne/channels/tests/test_interpolation.py | 8 +++++++- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/doc/changes/latest.inc b/doc/changes/latest.inc index 9db709666dd..1509209aff2 100644 --- a/doc/changes/latest.inc +++ b/doc/changes/latest.inc @@ -47,6 +47,8 @@ Enhancements - Add projections when printing a :class:`mne.Info` in the notebook (:gh:`9403` by `Alex Gramfort`_) +- Add support for interpolating oxy and deoxyhaemoglobin data types (:gh:`9431` by `Robert Luke`_) + Bugs ~~~~ - Fix bug with :meth:`mne.Epochs.crop` and :meth:`mne.Evoked.crop` when ``include_tmax=False``, where the last sample was always cut off, even when ``tmax > epo.times[-1]`` (:gh:`9378` **by new contributor** |Jan Sosulski|_) diff --git a/mne/channels/interpolation.py b/mne/channels/interpolation.py index 18a800d704f..ad10124d7be 100644 --- a/mne/channels/interpolation.py +++ b/mne/channels/interpolation.py @@ -205,12 +205,10 @@ def _interpolate_bads_meeg(inst, mode='accurate', origin=(0., 0., 0.04), @verbose def _interpolate_bads_nirs(inst, method='nearest', exclude=(), verbose=None): from scipy.spatial.distance import pdist, squareform - from mne.preprocessing.nirs import _channel_frequencies,\ - _check_channels_ordered + from mne.preprocessing.nirs import _validate_nirs_info # Returns pick of all nirs and ensures channels are correctly ordered - freqs = np.unique(_channel_frequencies(inst.info)) - picks_nirs = _check_channels_ordered(inst.info, freqs) + picks_nirs = _validate_nirs_info(inst.info) if len(picks_nirs) == 0: return diff --git a/mne/channels/tests/test_interpolation.py b/mne/channels/tests/test_interpolation.py index 9a81afdad08..076f1dffd12 100644 --- a/mne/channels/tests/test_interpolation.py +++ b/mne/channels/tests/test_interpolation.py @@ -8,7 +8,8 @@ from mne import io, pick_types, pick_channels, read_events, Epochs from mne.channels.interpolation import _make_interpolation_matrix from mne.datasets import testing -from mne.preprocessing.nirs import optical_density, scalp_coupling_index +from mne.preprocessing.nirs import (optical_density, scalp_coupling_index, + beer_lambert_law) from mne.datasets.testing import data_path from mne.io import read_raw_nirx from mne.io.proj import _has_eeg_average_ref_proj @@ -303,3 +304,8 @@ def test_interpolation_nirs(): raw_od.interpolate_bads() assert raw_od.info['bads'] == [] assert bad_0_std_pre_interp > np.std(raw_od._data[bad_0]) + raw_haemo = beer_lambert_law(raw_od) + raw_haemo.info['bads'] = raw_haemo.ch_names[2:4] + assert raw_haemo.info['bads'] == ['S1_D2 hbo', 'S1_D2 hbr'] + raw_haemo.interpolate_bads() + assert raw_haemo.info['bads'] == [] From f3fa1ca666419893def1635f16065f502739604b Mon Sep 17 00:00:00 2001 From: Alexandre Gramfort Date: Tue, 1 Jun 2021 10:31:56 +0200 Subject: [PATCH 09/10] FIX : rank computation from info now uses SSS proc history if only grad or mag are present (#9435) --- doc/changes/latest.inc | 2 ++ mne/rank.py | 2 +- mne/tests/test_rank.py | 6 +++++- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/doc/changes/latest.inc b/doc/changes/latest.inc index 1509209aff2..39efb7b87bd 100644 --- a/doc/changes/latest.inc +++ b/doc/changes/latest.inc @@ -59,6 +59,8 @@ Bugs - Fix bug when printing a :class:`mne.io.RawArray` in the notebook (:gh:`9404` by `Alex Gramfort`_) +- Fix bug when computing rank from info for SSS data with only gradiometers or magnetometers (:gh:`9435` by `Alex Gramfort`_) + API changes ~~~~~~~~~~~ - Nothing yet diff --git a/mne/rank.py b/mne/rank.py index 04695603f04..05ddf413743 100644 --- a/mne/rank.py +++ b/mne/rank.py @@ -256,7 +256,7 @@ def _get_rank_sss(inst, msg='You should use data-based rank estimate instead', def _info_rank(info, ch_type, picks, rank): - if ch_type == 'meg' and rank != 'full': + if ch_type in ['meg', 'mag', 'grad'] and rank != 'full': try: return _get_rank_sss(info) except ValueError: diff --git a/mne/tests/test_rank.py b/mne/tests/test_rank.py index e3aa0e760db..7ac9b28cb65 100644 --- a/mne/tests/test_rank.py +++ b/mne/tests/test_rank.py @@ -204,7 +204,11 @@ def test_maxfilter_get_rank(n_proj, fname, rank_orig, meg, tol_kind, tol): assert raw.info['projs'] == [] mf = raw.info['proc_history'][0]['max_info'] assert mf['sss_info']['nfree'] == rank_orig - assert _get_rank_sss(raw) == rank_orig + + assert compute_rank(raw, 'info')['meg'] == rank_orig + assert compute_rank(raw.copy().pick('grad'), 'info')['grad'] == rank_orig + assert compute_rank(raw.copy().pick('mag'), 'info')['mag'] == rank_orig + mult = 1 + (meg == 'separate') rank = rank_orig - mult * n_proj if n_proj > 0: From e6a029802bb2174eaee21f0ab544b4b64c71c1da Mon Sep 17 00:00:00 2001 From: Stefan Appelhoff Date: Wed, 2 Jun 2021 11:09:47 +0200 Subject: [PATCH 10/10] [MRG] change utils.logger.warning -> utils.warn (#9434) * change utils.logger.warning -> utils.warn * fix flake * revert fixes for mne/fixes.py, due to circular import * FIX: Revert * DOC: Give reason Co-authored-by: Eric Larson --- mne/gui/_coreg_gui.py | 4 ++-- mne/io/brainvision/brainvision.py | 4 ++-- mne/io/kit/kit.py | 2 +- mne/utils/misc.py | 10 +++++++--- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/mne/gui/_coreg_gui.py b/mne/gui/_coreg_gui.py index cb5ce0f29c3..81dcb36fc33 100644 --- a/mne/gui/_coreg_gui.py +++ b/mne/gui/_coreg_gui.py @@ -90,7 +90,7 @@ from ..coreg import fit_matched_points, scale_mri, _find_fiducials_files from ..viz.backends._pysurfer_mayavi import _toggle_mlab_render from ..viz._3d import _get_3d_option -from ..utils import logger, set_config, _pl +from ..utils import logger, set_config, _pl, warn from ._fiducials_gui import MRIHeadWithFiducialsModel, FiducialsPanel from ._file_traits import trans_wildcard, DigSource, SubjectSelectorPanel from ._viewer import (HeadViewController, PointObject, SurfaceObject, @@ -895,7 +895,7 @@ def close(self, info, is_ok): try: info.object.save_config(size=size) except Exception as exc: - warnings.warn("Error saving GUI configuration:\n%s" % (exc,)) + warn("Error saving GUI configuration:\n%s" % (exc,)) return True diff --git a/mne/io/brainvision/brainvision.py b/mne/io/brainvision/brainvision.py index d37e9c2126e..ed075e3d4e6 100644 --- a/mne/io/brainvision/brainvision.py +++ b/mne/io/brainvision/brainvision.py @@ -484,8 +484,8 @@ def _get_vhdr_info(vhdr_fname, eog, misc, scale): try: n_samples = cfg.getint(cinfostr, 'DataPoints') except configparser.NoOptionError: - logger.warning('No info on DataPoints found. Inferring number of ' - 'samples from the data file size.') + warn('No info on DataPoints found. Inferring number of ' + 'samples from the data file size.') with open(data_fname, 'rb') as fid: fid.seek(0, 2) n_bytes = fid.tell() diff --git a/mne/io/kit/kit.py b/mne/io/kit/kit.py index 0f4e0d639c0..6501635bbda 100644 --- a/mne/io/kit/kit.py +++ b/mne/io/kit/kit.py @@ -512,7 +512,7 @@ def get_kit_info(rawfile, allow_unknown_format, standardize_names=None, version_string = "V%iR%03i" % (version, revision) if allow_unknown_format: unsupported_format = True - logger.warning("Force loading KIT format %s", version_string) + warn("Force loading KIT format %s", version_string) else: raise UnsupportedKITFormat( version_string, diff --git a/mne/utils/misc.py b/mne/utils/misc.py index caceae5abe2..76d7303c704 100644 --- a/mne/utils/misc.py +++ b/mne/utils/misc.py @@ -123,7 +123,7 @@ def run_subprocess(command, return_code=False, verbose=None, *args, **kwargs): # non-blocking adapted from https://stackoverflow.com/questions/375427/non-blocking-read-on-a-subprocess-pipe-in-python#4896288 # noqa: E501 out_q = Queue() err_q = Queue() - with running_subprocess(command, *args, **kwargs) as p: + with running_subprocess(command, *args, **kwargs) as p, p.stdout, p.stderr: out_t = Thread(target=_enqueue_output, args=(p.stdout, out_q)) err_t = Thread(target=_enqueue_output, args=(p.stderr, err_q)) out_t.daemon = True @@ -149,12 +149,16 @@ def run_subprocess(command, return_code=False, verbose=None, *args, **kwargs): break else: err = err.decode('utf-8') + # Leave this as logger.warning rather than warn(...) to + # mirror the logger.info above for stdout. This function + # is basically just a version of subprocess.call, and + # shouldn't emit Python warnings due to stderr outputs + # (the calling function can check for stderr output and + # emit a warning if it wants). logger.warning(err) all_err += err if do_break: break - p.stdout.close() - p.stderr.close() output = (all_out, all_err) if return_code: