Skip to content

Commit

Permalink
FIX: Many fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
larsoner committed Feb 2, 2024
1 parent 02a1e22 commit 9c120fa
Show file tree
Hide file tree
Showing 9 changed files with 126 additions and 76 deletions.
78 changes: 77 additions & 1 deletion doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,9 @@
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ["mne_nirs."]

# TODO: Enable this and fix links
# default_role = "py:obj"

# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"

Expand Down Expand Up @@ -151,13 +154,85 @@
# copied from sklearn
r"mne\.utils\.deprecated",
}

numpydoc_xref_param_type = True
numpydoc_xref_aliases = {
"BaseRaw": "mne.io.Raw",
"dataframe": "pandas.DataFrame",
"DataFrame": "pandas.DataFrame",
"ConductorModel": "mne.bem.ConductorModel",
"Forward": "mne.Forward",
"Info": "mne.Info",
"Raw": "mne.io.Raw",
"RawBOXY": "mne.io.Raw",
"RawHitachi": "mne.io.Raw",
"RawNIRX": "mne.io.Raw",
"RawSNIRF": "mne.io.Raw",
"SourceSpaces": "mne.SourceSpaces",
"Transform": "mne.transforms.Transform",
}
numpydoc_xref_ignore = {
# words (some of these should be fixed)
"instance",
"of",
"or",
"None",
"array",
"like",
"shape",
"dtype",
"str",
"figure",
"pairs",
"mayavi.mlab.Figure",
"mayavi.core.api.Scene",
"color",
"Figure",
"String",
"default",
"colormap",
"length",
"Bool",
"Axes",
"Pandas",
"model",
"optional",
"output",
"Nilearn",
"in",
"specified",
"as",
"matplotlib.Figure",
"MNE",
"As",
"two",
"values",
"ResultsGLM",
"Contrast",
"p",
"q",
"Number",
"Array",
"number",
"n_windows",
"n_nirs",
"n_add_reg",
"n_frames",
"n_onsets",
"derivative",
"integers",
"lists",
}

# sphinxcontrib-bibtex
bibtex_bibfiles = ["./references.bib", "./references-nirs.bib"]
bibtex_style = "unsrt"
bibtex_footbibliography_header = ""

nitpick_ignore_regex = [
# Type hints for undocumented types
("py:.*", r"mne\.io\..*\.Raw.*"), # RawEDF etc.
]


# -- Options for HTML output ----------------------------------------------

Expand Down Expand Up @@ -281,6 +356,7 @@
"numpy": ("https://numpy.org/devdocs", None),
"scipy": ("https://scipy.github.io/devdocs", None),
"matplotlib": ("https://matplotlib.org/stable", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
"mne": ("https://mne.tools/stable", None),
"nilearn": ("http://nilearn.github.io/stable", None),
"sklearn": ("https://scikit-learn.org/stable", None),
Expand Down
43 changes: 3 additions & 40 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,44 +4,7 @@ channels:
dependencies:
- python>=3.8
- pip
- numpy
- scipy
- matplotlib
- tqdm
- pooch>=1.5
- decorator
- h5io
- packaging
- numba
- pandas
- xlrd
- scikit-learn
- h5py
- jinja2
- pillow
- statsmodels
- jupyter
- joblib
- psutil
- numexpr
- imageio
- spyder-kernels>=1.10.0
- imageio-ffmpeg>=0.4.1
- vtk>=9.0.1
- traitlets
- pyvista>=0.32,!=0.35.2,!=0.38.0,!=0.38.1,!=0.38.2,!=0.38.3,!=0.38.4,!=0.38.5,!=0.38.6
- pyvistaqt>=0.4
- qdarkstyle
- darkdetect
- dipy
- nibabel
- mne
- nilearn
- python-picard
- pyqt!=5.15.3
- mffpy>=0.5.7
- ipywidgets
- ipyvtklink
- mne-qt-browser
- pymatreader
- pip:
- git+https://github.com/mne-tools/mne-python.git@main
- statsmodels
- pytables
13 changes: 6 additions & 7 deletions examples/general/plot_11_hrf_measured.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@
viewed in the
:ref:`waveform analysis example <tut-fnirs-processing>`.
See
`Luke et al (2021) <https://www.spiedigitallibrary.org/journals/neurophotonics/volume-8/issue-2/025008/Analysis-methods-for-measuring-passive-auditory-fNIRS-responses-generated-by/10.1117/1.NPh.8.2.025008.short>`_
`Luke et al (2021) <https://www.spiedigitallibrary.org/journals/neurophotonics/volume-8/issue-2/025008/Analysis-methods-for-measuring-passive-auditory-fNIRS-responses-generated-by/10.1117/1.NPh.8.2.025008.short>`__
for a comparison of the epoching and GLM approaches.
This GLM analysis is a wrapper over the excellent
`Nilearn GLM <http://nilearn.github.io/modules/reference.html#module-nilearn.glm>`_.
`Nilearn GLM <http://nilearn.github.io/modules/reference.html#module-nilearn.glm>`__.
"""
# sphinx_gallery_thumbnail_number = 9

Expand Down Expand Up @@ -58,8 +58,7 @@
#
# Optodes were placed over the motor cortex using the standard NIRX motor
# montage, but with 8 short channels added (see their web page for details).
# To view the sensor locations run
# `raw_intensity.plot_sensors()`.
# To view the sensor locations run ``raw_intensity.plot_sensors()``.
# A sound was presented to indicate which hand the participant should tap.
# Participants tapped their thumb to their fingers for 5s.
# Conditions were presented in a random order with a randomised inter
Expand Down Expand Up @@ -149,7 +148,7 @@
#
# For further discussion on design matrices see
# the Nilearn examples. Specifically the
# `first level model example <http://nilearn.github.io/auto_examples/04_glm_first_level/plot_first_level_details.html>`_.
# `first level model example <http://nilearn.github.io/auto_examples/04_glm_first_level/plot_first_level_details.html>`__.
#
# Next we create a model to fit our data to.
# The model consists of various components to model different things we assume
Expand All @@ -160,7 +159,7 @@
# (as described above).
# We also include a cosine drift model with components up to the high pass
# parameter value. See the nilearn documentation for recommendations on setting
# these values. In short, they suggest `"The cutoff period (1/high_pass) should be
# these values. In short, they suggest "The cutoff period (1/high_pass) should be
# set as the longest period between two trials of the same condition multiplied by 2.
# For instance, if the longest period is 32s, the high_pass frequency shall be
# 1/64 Hz ~ 0.016 Hz".
Expand Down Expand Up @@ -425,7 +424,7 @@
# -----------------
#
# We can also define a contrast as described in
# `Nilearn docs <http://nilearn.github.io/auto_examples/04_glm_first_level/plot_localizer_surface_analysis.html>`_
# `Nilearn docs <http://nilearn.github.io/auto_examples/04_glm_first_level/plot_localizer_surface_analysis.html>`__
# and plot it.
# Here we contrast the response to tapping on the left hand with the response
# from tapping on the right hand.
Expand Down
4 changes: 2 additions & 2 deletions examples/general/plot_15_waveform.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,10 +162,10 @@
# remove this. A high pass filter is also included to remove slow drifts
# in the data.

fig = raw_haemo.plot_psd(average=True, amplitude=False)
fig = raw_haemo.compute_psd().plot(average=True, amplitude=False)
fig.suptitle("Before filtering", weight="bold", size="x-large")
raw_haemo = raw_haemo.filter(0.05, 0.7, h_trans_bandwidth=0.2, l_trans_bandwidth=0.02)
fig = raw_haemo.plot_psd(average=True, amplitude=False)
fig = raw_haemo.compute_psd().plot(average=True, amplitude=False)
fig.suptitle("After filtering", weight="bold", size="x-large")

# %%
Expand Down
39 changes: 20 additions & 19 deletions examples/general/plot_30_frequency.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@
# Rescale to be in expected units of uM.
hrf = raw_haemo.copy().pick(picks=[0])
hrf._data[0] = 1e-6 * (design_matrix["Tapping_Left"] + design_matrix["Tapping_Right"]).T
hrf.pick(picks="hbo").plot_psd(
average=True, fmax=2, xscale="log", color="r", show=False, amplitude=False
hrf.pick(picks="hbo").compute_psd(fmax=2).plot(
average=True, xscale="log", color="r", show=False, amplitude=False
)


Expand All @@ -111,8 +111,8 @@

# rescale data to fit in plot. TODO: fix this
raw_haemo._data = raw_haemo._data * 1e-2
raw_haemo.pick(picks="hbo").plot_psd(
average=True, fmax=2, xscale="log", amplitude=False
raw_haemo.pick(picks="hbo").compute_psd(fmax=2).plot(
average=True, xscale="log", amplitude=False
)


Expand Down Expand Up @@ -145,8 +145,8 @@
verbose=True,
)

epochs.pick(picks="hbo").plot_psd(
average=True, fmax=2, color="g", xscale="log", amplitude=False
epochs.pick(picks="hbo").compute_psd(fmax=2).plot(
average=True, color="g", xscale="log", amplitude=False
)


Expand Down Expand Up @@ -198,14 +198,16 @@
# removes these unwanted components and they are not visible in the
# epoched data.

fig = hrf.pick(picks="hbo").plot_psd(
average=True, fmax=2, color="r", show=False, amplitude=False
fig = (
hrf.pick(picks="hbo")
.compute_psd(fmax=2)
.plot(average=True, color="r", show=False, amplitude=False)
)
raw_haemo.pick(picks="hbo").plot_psd(
average=True, fmax=2, ax=fig.axes, show=False, amplitude=False
raw_haemo.pick(picks="hbo").compute_psd(fmax=2).plot(
average=True, axes=fig.axes, show=False, amplitude=False
)
epochs.pick(picks="hbo").plot_psd(
average=True, fmax=2, ax=fig.axes, show=False, color="g", amplitude=False
epochs.pick(picks="hbo").compute_psd(fmax=2).plot(
average=True, axes=fig.axes, show=False, color="g", amplitude=False
)
mne.viz.plot_filter(
filter_params,
Expand Down Expand Up @@ -237,12 +239,12 @@
# The green line illustrates the signal before filtering, and the red line
# shows the signal after filtering.

fig = raw_haemo.plot_psd(
average=True, fmax=2, xscale="log", color="r", show=False, amplitude=False
fig = raw_haemo.compute_psd(fmax=2).plot(
average=True, xscale="log", color="r", show=False, amplitude=False
)
raw_haemo = raw_haemo.filter(l_freq=None, h_freq=0.4, h_trans_bandwidth=0.2)
raw_haemo.plot_psd(
average=True, fmax=2, xscale="log", ax=fig.axes, color="g", amplitude=False
raw_haemo.compute_psd(fmax=2).plot(
average=True, xscale="log", axes=fig.axes, color="g", amplitude=False
)


Expand Down Expand Up @@ -273,10 +275,9 @@
isi_max=max_isi,
)
raw._data[0] = raw._data[0] - np.mean(raw._data[0])
raw.pick(picks="hbo").plot_psd(
raw.pick(picks="hbo").compute_psd(fmax=2).plot(
average=True,
fmax=2,
ax=axes[rep, column],
axes=axes[rep, column],
show=False,
color=sm.cmap(sm.norm(max_isi)),
xscale="log",
Expand Down
4 changes: 2 additions & 2 deletions examples/general/plot_99_bad.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,10 +115,10 @@
# Do not do this!!
#

fig = raw_haemo.plot_psd(average=True, amplitude=False)
fig = raw_haemo.compute_psd().plot(average=True, amplitude=False)
fig.suptitle("Before filtering", weight="bold", size="x-large")
raw_haemo = raw_haemo.filter(0.05, 0.1, h_trans_bandwidth=0.2, l_trans_bandwidth=0.02)
fig = raw_haemo.plot_psd(average=True, amplitude=False)
fig = raw_haemo.compute_psd().plot(average=True, amplitude=False)
fig.suptitle("After filtering", weight="bold", size="x-large")


Expand Down
8 changes: 7 additions & 1 deletion mne_nirs/statistics/_glm_level_first.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import warnings
from copy import deepcopy
from inspect import getfullargspec
from pathlib import PosixPath

import numpy as np
Expand Down Expand Up @@ -784,8 +785,13 @@ def run_glm(raw, design_matrix, noise_model="ar1", bins=0, n_jobs=1, verbose=0):
def _compute_contrast(glm_est, contrast, contrast_type=None):
from nilearn.glm.contrasts import compute_contrast as _cc

key = "contrast_type"
_ccc = _cc
if hasattr(_cc, "__wrapped__"):
_ccc = _cc.__wrapped__
key = "stat_type" if "stat_type" in getfullargspec(_ccc).args else "contrast_type"
return _cc(
np.array(list(glm_est.keys())), glm_est, contrast, contrast_type=contrast_type
np.array(list(glm_est.keys())), glm_est, contrast, **{key: contrast_type}
)


Expand Down
12 changes: 8 additions & 4 deletions mne_nirs/utils/_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,18 @@ def glm_to_tidy(info, statistic, design_matrix, wide=True, order=None):

def _tidy_Contrast(data, glm_est, design_matrix):
df = pd.DataFrame()
if hasattr(glm_est, "stat_type"):
contrast_type = glm_est.stat_type
else: # nilearn < 0.10
contrast_type = glm_est.contrast_type
for idx, ch in enumerate(data.ch_names):
df = pd.concat(
[
df,
pd.DataFrame(
{
"ch_name": ch,
"ContrastType": glm_est.contrast_type,
"ContrastType": contrast_type,
"variable": "effect",
"value": glm_est.effect[0][idx],
},
Expand All @@ -82,7 +86,7 @@ def _tidy_Contrast(data, glm_est, design_matrix):
pd.DataFrame(
{
"ch_name": ch,
"ContrastType": glm_est.contrast_type,
"ContrastType": contrast_type,
"variable": "p_value",
"value": glm_est.p_value()[idx],
},
Expand All @@ -91,7 +95,7 @@ def _tidy_Contrast(data, glm_est, design_matrix):
pd.DataFrame(
{
"ch_name": ch,
"ContrastType": glm_est.contrast_type,
"ContrastType": contrast_type,
"variable": "stat",
"value": glm_est.stat()[idx],
},
Expand All @@ -100,7 +104,7 @@ def _tidy_Contrast(data, glm_est, design_matrix):
pd.DataFrame(
{
"ch_name": ch,
"ContrastType": glm_est.contrast_type,
"ContrastType": contrast_type,
"variable": "z_score",
"value": glm_est.z_score()[idx],
},
Expand Down
1 change: 1 addition & 0 deletions requirements_testing.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ seaborn
fooof
snirf
statsmodels
tables

0 comments on commit 9c120fa

Please sign in to comment.