From ca66da6888ea6ec0a0b00b37616e9131029cf5ee Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 7 Aug 2023 11:22:43 -0400 Subject: [PATCH 1/6] Add github action to codespell main on push and PRs --- .github/workflows/codespell.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .github/workflows/codespell.yml diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 00000000..3ebbf550 --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,22 @@ +--- +name: Codespell + +on: + push: + branches: [main] + pull_request: + branches: [main] + +permissions: + contents: read + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Codespell + uses: codespell-project/actions-codespell@v2 From 2e5356942f5a10a5c46561bf28f2bc8d49e03a09 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 7 Aug 2023 11:22:43 -0400 Subject: [PATCH 2/6] Add rudimentary codespell config --- pyproject.toml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 45a65531..f637d6ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,4 +90,8 @@ omit = [ "pynapple/io/ephys_gui.py", "pynapple/io/ophys_gui.py", "pynapple/core/jitted_functions.py" # coverage doesn't work with numba -] \ No newline at end of file +] +[tool.codespell] +skip = '.git,*.pdf,*.svg' +# +# ignore-words-list = '' From a349ebda0a98f3d7c3b9fb18a5ed0e59aa121756 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 7 Aug 2023 11:27:04 -0400 Subject: [PATCH 3/6] Skip images in notebooks and ADN abbreviation --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f637d6ce..8ca3e71a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,5 +93,5 @@ omit = [ ] [tool.codespell] skip = '.git,*.pdf,*.svg' -# -# ignore-words-list = '' +ignore-regex = '^\s*"image/\S+": ".*' +ignore-words-list = 'adn' From 9cd44a9304e7737517b624718c54afc793d37a23 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 7 Aug 2023 11:27:16 -0400 Subject: [PATCH 4/6] [DATALAD RUNCMD] run codespell throughout but ignore fail === Do not change lines below === { "chain": [], "cmd": "codespell -w || :", "exit": 0, "extra_inputs": [], "inputs": [], "outputs": [], "pwd": "." } ^^^ Do not change lines above ^^^ --- README.md | 2 +- docs/index.md | 2 +- docs/notebooks/pynapple-core-notebook.ipynb | 4 ++-- docs/notebooks/pynapple-io-notebook.ipynb | 4 ++-- docs/notebooks/pynapple-old-io-notebook.ipynb | 8 ++++---- docs/notebooks/pynapple-quick-start.ipynb | 10 +++++----- docs/tutorials/tutorial_pynapple_core.py | 12 +++++------ .../tutorial_pynapple_quick_start.py | 10 +++++----- environment.yml | 2 +- pynapple/core/interval_set.py | 4 ++-- pynapple/core/time_series.py | 4 ++-- pynapple/core/ts_group.py | 20 +++++++++---------- pynapple/io/cnmfe.py | 6 +++--- pynapple/io/folder.py | 6 +++--- pynapple/io/interface_npz.py | 4 ++-- pynapple/io/interface_nwb.py | 6 +++--- pynapple/io/loader.py | 2 +- pynapple/io/misc.py | 2 +- pynapple/io/neurosuite.py | 4 ++-- pynapple/io/phy.py | 2 +- pynapple/io/suite2p.py | 2 +- pynapple/process/decoding.py | 2 +- pynapple/process/perievent.py | 6 +++--- pynapple/process/tuning_curves.py | 16 +++++++-------- template_loader.py | 2 +- tests/test_jitted.py | 2 +- 26 files changed, 72 insertions(+), 72 deletions(-) diff --git a/README.md b/README.md index 14fdc720..5fd22d0c 100644 --- a/README.md +++ b/README.md @@ -155,7 +155,7 @@ Special thanks to Francesco P. Battaglia *neuroseries* () packages, the latter constituting the core of *pynapple*. -This package was developped by Guillaume Viejo +This package was developed by Guillaume Viejo () and other members of the Peyrache Lab. Logo: Sofia Skromne Carrasco, 2021. diff --git a/docs/index.md b/docs/index.md index 3d7963ed..221f2eb1 100644 --- a/docs/index.md +++ b/docs/index.md @@ -148,7 +148,7 @@ Special thanks to Francesco P. Battaglia *neuroseries* () packages, the latter constituting the core of *pynapple*. -This package was developped by Guillaume Viejo +This package was developed by Guillaume Viejo () and other members of the Peyrache Lab. Logo: Sofia Skromne Carrasco, 2021. diff --git a/docs/notebooks/pynapple-core-notebook.ipynb b/docs/notebooks/pynapple-core-notebook.ipynb index 1a38db71..05ca5bf2 100644 --- a/docs/notebooks/pynapple-core-notebook.ipynb +++ b/docs/notebooks/pynapple-core-notebook.ipynb @@ -449,7 +449,7 @@ "source": [ "## Time support\n", "\n", - "A key feature of how pynapple manipulates time series is an inherent time support object defined for Ts, Tsd, TsdFrame and TsGroup objects. The time support object is defined as an IntervalSet that provides the time serie with a context. For example, the restrict operation will automatically update the time support object for the new time series. Ideally, the time support object should be defined for all time series when instantiating them. If no time series is given, the time support is inferred from the start and end of the time series. \n", + "A key feature of how pynapple manipulates time series is an inherent time support object defined for Ts, Tsd, TsdFrame and TsGroup objects. The time support object is defined as an IntervalSet that provides the time series with a context. For example, the restrict operation will automatically update the time support object for the new time series. Ideally, the time support object should be defined for all time series when instantiating them. If no time series is given, the time support is inferred from the start and end of the time series. \n", "\n", "In this example, a TsGroup is instantiated with and without a time support. Notice how the frequency of each Ts element is changed when the time support is defined explicitly." ] @@ -484,7 +484,7 @@ "source": [ "time_support = nap.IntervalSet(start = 0, end = 200, time_units = 's')\n", "\n", - "my_ts = {0:nap.Ts(t = np.sort(np.random.uniform(0, 100, 10)), time_units = 's'), # here a simple dictionnary\n", + "my_ts = {0:nap.Ts(t = np.sort(np.random.uniform(0, 100, 10)), time_units = 's'), # here a simple dictionary\n", " 1:nap.Ts(t = np.sort(np.random.uniform(0, 100, 20)), time_units = 's'),\n", " 2:nap.Ts(t = np.sort(np.random.uniform(0, 100, 30)), time_units = 's')}\n", "\n", diff --git a/docs/notebooks/pynapple-io-notebook.ipynb b/docs/notebooks/pynapple-io-notebook.ipynb index 3b231881..b81707c8 100644 --- a/docs/notebooks/pynapple-io-notebook.ipynb +++ b/docs/notebooks/pynapple-io-notebook.ipynb @@ -134,7 +134,7 @@ "source": [ "Here it shows all the subjects (in this case only A2929), all the sessions and all of the derivatives folders. It shows as well all the NPZ files that contains a pynapple object and the NWB files.\n", "\n", - "The object project behaves like a nested dictionnary. It is then easy to loop and navigate through a hierarchy of folders when doing analyses. In this case, we are gonna take only the session A2929-200711." + "The object project behaves like a nested dictionary. It is then easy to loop and navigate through a hierarchy of folders when doing analyses. In this case, we are gonna take only the session A2929-200711." ] }, { @@ -293,7 +293,7 @@ "source": [ "## 3. Metadata\n", "\n", - "A good practice for sharing datasets is to write as many metainformation as possible. Following BIDS specifications, any data files should be accompagned by a JSON sidecar file. " + "A good practice for sharing datasets is to write as many metainformation as possible. Following BIDS specifications, any data files should be accompanied by a JSON sidecar file. " ] }, { diff --git a/docs/notebooks/pynapple-old-io-notebook.ipynb b/docs/notebooks/pynapple-old-io-notebook.ipynb index c91bd122..7d5c97b7 100644 --- a/docs/notebooks/pynapple-old-io-notebook.ipynb +++ b/docs/notebooks/pynapple-old-io-notebook.ipynb @@ -9,7 +9,7 @@ "\n", "This script demonstrates how to load data in pynapple.\n", "\n", - "The general workflow of loading a session is described by the infographic below. As it is challenging to accomodate all possible types of format, we aimed to keep the IO of pynapple minimal while allowing the user to inherit the base loader and import their own custom io functions. The base loader is thus responsible for initializing the NWB file containing the tracking data, the epochs and the session informations. An example of a custom IO class is shown at the end of this tutorial.\n", + "The general workflow of loading a session is described by the infographic below. As it is challenging to accommodate all possible types of format, we aimed to keep the IO of pynapple minimal while allowing the user to inherit the base loader and import their own custom io functions. The base loader is thus responsible for initializing the NWB file containing the tracking data, the epochs and the session information. An example of a custom IO class is shown at the end of this tutorial.\n", "\n", "\n", "\n", @@ -81,10 +81,10 @@ "\n", "The tracking tab allows to load tracking data saved with a CSV file. Reading a CSV file is always a challenge when the header is unknown. The default csv file should contains only one row for the header with the column names. The first column should be the time index in seconds. Other formats are DeepLabCut and Optitrack.\n", "\n", - "Frame alignement can vary as well. Pynapple offers three ways to align the tracking frames :\n", + "Frame alignment can vary as well. Pynapple offers three ways to align the tracking frames :\n", "- Global timestamps: the time column of the CSV file contains the timestamps aligned to the global timeframe of the session.\n", "- Local timestamps: the time column of the CSV file contains the timestamps aligned to one epoch. In this case, the user should select which epoch.\n", - "- TTL detection: a binary file containing TTL pulses for each tracking frame is located within the folder and can be loaded. Alignement is made with TTL detection.\n", + "- TTL detection: a binary file containing TTL pulses for each tracking frame is located within the folder and can be loaded. Alignment is made with TTL detection.\n", "\n", "In this example session, Tracking was made with Optitrack and TTL pulses were written to an analogin file recorded by an Intan RHD2000 recording system. The parameters for the tracking tab are shown below.\n", "\n", @@ -244,7 +244,7 @@ " success = self.load_my_nwb(path)\n", " if success: loading_my_data = False\n", "\n", - " # Bypass if data have already been transfered to nwb\n", + " # Bypass if data have already been transferred to nwb\n", " if loading_my_data:\n", " self.load_my_data(path)\n", "\n", diff --git a/docs/notebooks/pynapple-quick-start.ipynb b/docs/notebooks/pynapple-quick-start.ipynb index 2627d624..ce30f5ac 100644 --- a/docs/notebooks/pynapple-quick-start.ipynb +++ b/docs/notebooks/pynapple-quick-start.ipynb @@ -18,8 +18,8 @@ "***\n", "\n", "This notebook is meant to provide an overview of pynapple by going through:\n", - "1. **Input output (IO)**. In this case, pynapple will load a session containing data processed with NeuroSuite and automatically create a [NWB file](https://pynwb.readthedocs.io/en/stable/). See this [notebook](https://peyrachelab.github.io/pynapple/notebooks/pynapple-io-notebook/) for more informations about IO and how to make a custom IO.\n", - "2. **Core functions** that handle time series, interval sets and groups of time series. See this [notebook](https://peyrachelab.github.io/pynapple/notebooks/pynapple-core-notebook/) for a detailled usage of the core functions.\n", + "1. **Input output (IO)**. In this case, pynapple will load a session containing data processed with NeuroSuite and automatically create a [NWB file](https://pynwb.readthedocs.io/en/stable/). See this [notebook](https://peyrachelab.github.io/pynapple/notebooks/pynapple-io-notebook/) for more information about IO and how to make a custom IO.\n", + "2. **Core functions** that handle time series, interval sets and groups of time series. See this [notebook](https://peyrachelab.github.io/pynapple/notebooks/pynapple-core-notebook/) for a detailed usage of the core functions.\n", "3. **Process functions**. A small collection of high-level functions widely used in system neuroscience. This [notebook](https://peyrachelab.github.io/pynapple/notebooks/pynapple-process-notebook/) details those functions. Examples of higher analysis can be found in the collaborative repository [pynacollada](https://github.com/PeyracheLab/pynacollada).\n", "\n" ] @@ -125,7 +125,7 @@ "source": [ "*spikes* is a [TsGroup](https://peyrachelab.github.io/pynapple/core.ts_group/) object. It allows to group together time series with different timestamps and couple metainformation to each neuron. In this case, the location of where the neuron was recorded has been added when loading the session for the first time.\n", "\n", - "In this case, the TsGroup holds 15 neurons and it is possible to access, similar to a dictionnary, the spike times of a single neuron: " + "In this case, the TsGroup holds 15 neurons and it is possible to access, similar to a dictionary, the spike times of a single neuron: " ] }, { @@ -340,7 +340,7 @@ "id": "2caf07b6", "metadata": {}, "source": [ - "The epochs above the threshold can be accessed through the time support of the Tsd object. The time support is an important concept in the pynapple package. It helps the user to define the epochs for which the time serie should be defined. By default, Ts, Tsd and TsGroup objects possess a time support (defined as an IntervalSet). It is recommended to pass the time support when instantiating one of those objects." + "The epochs above the threshold can be accessed through the time support of the Tsd object. The time support is an important concept in the pynapple package. It helps the user to define the epochs for which the time series should be defined. By default, Ts, Tsd and TsGroup objects possess a time support (defined as an IntervalSet). It is recommended to pass the time support when instantiating one of those objects." ] }, { @@ -811,7 +811,7 @@ "id": "fcdb8fa1", "metadata": {}, "source": [ - "From the previous figure, we can see that neurons 0 and 1 fires for opposite directions during wake. Therefore we expect their cross-correlograms to show a trough around 0 time lag, meaning those two neurons do not fire spikes together. A similar trough during sleep for the same pair thus indicates a persistence of their coordination even if the animal is not moving its head." + "From the previous figure, we can see that neurons 0 and 1 fires for opposite directions during wake. Therefore we expect their cross-correlograms to show a through around 0 time lag, meaning those two neurons do not fire spikes together. A similar through during sleep for the same pair thus indicates a persistence of their coordination even if the animal is not moving its head." ] }, { diff --git a/docs/tutorials/tutorial_pynapple_core.py b/docs/tutorials/tutorial_pynapple_core.py index 27db7a03..328e1d5f 100644 --- a/docs/tutorials/tutorial_pynapple_core.py +++ b/docs/tutorials/tutorial_pynapple_core.py @@ -79,17 +79,17 @@ # Multiple time series with different time stamps # (.i.e. a group of neurons with different spike times from one session) # can be grouped with the TsGroup object. -# The TsGroup behaves like a dictionnary but it is also possible to slice with a list of indexes +# The TsGroup behaves like a dictionary but it is also possible to slice with a list of indexes -my_ts = {0:nap.Ts(t = np.sort(np.random.uniform(0, 100, 1000)), time_units = 's'), # here a simple dictionnary +my_ts = {0:nap.Ts(t = np.sort(np.random.uniform(0, 100, 1000)), time_units = 's'), # here a simple dictionary 1:nap.Ts(t = np.sort(np.random.uniform(0, 100, 2000)), time_units = 's'), 2:nap.Ts(t = np.sort(np.random.uniform(0, 100, 3000)), time_units = 's')} tsgroup = nap.TsGroup(my_ts) print(tsgroup, '\n') -print(tsgroup[0], '\n') # dictionnary like indexing returns directly the Ts object +print(tsgroup[0], '\n') # dictionary like indexing returns directly the Ts object print(tsgroup[[0,2]]) # list like indexing # Operations such as restrict can thus be directly applied to the TsGroup as well as other operations. @@ -118,15 +118,15 @@ # ## Time support # A key element of the manipulation of time series by pynapple is the inherent time support defined for Ts, Tsd, TsdFrame and TsGroup objects. -# The time support is defined as an IntervalSet that provides the time serie with a context. +# The time support is defined as an IntervalSet that provides the time series with a context. # For example,, the restrict operation will update automatically the time support to the new time series. # Ideally the time support should be defined for all time series when instantiating them. # If no time series is given, the time support is inferred from the start and end of the time series. -# In this example, a TsGroup is instantiated with and without a time support. Notice how the frequency of each Ts element is changed when the time support is defined explicitely. +# In this example, a TsGroup is instantiated with and without a time support. Notice how the frequency of each Ts element is changed when the time support is defined explicitly. time_support = nap.IntervalSet(start = 0, end = 100, time_units = 's') -my_ts = {0:nap.Ts(t = np.sort(np.random.uniform(0, 100, 10)), time_units = 's'), # here a simple dictionnary +my_ts = {0:nap.Ts(t = np.sort(np.random.uniform(0, 100, 10)), time_units = 's'), # here a simple dictionary 1:nap.Ts(t = np.sort(np.random.uniform(0, 100, 20)), time_units = 's'), 2:nap.Ts(t = np.sort(np.random.uniform(0, 100, 30)), time_units = 's')} diff --git a/docs/tutorials/tutorial_pynapple_quick_start.py b/docs/tutorials/tutorial_pynapple_quick_start.py index 5b288f49..00e298d0 100644 --- a/docs/tutorials/tutorial_pynapple_quick_start.py +++ b/docs/tutorials/tutorial_pynapple_quick_start.py @@ -42,7 +42,7 @@ # The first step is to load the session with the function *load_session*. # When loading a session for the first time, pynapple will show a GUI # in order for the user to provide the information about the session, the subject, the tracking, the epochs and the neuronal data. -# When informations has been entered, a [NWB file](https://pynwb.readthedocs.io/en/stable/) is created. +# When information has been entered, a [NWB file](https://pynwb.readthedocs.io/en/stable/) is created. # In this example dataset, the NWB file already exists. data = nap.load_session(data_directory, 'neurosuite') @@ -57,10 +57,10 @@ # *spikes* is a TsGroup object. # It allows to group together time series with different timestamps and associate metainformation about each neuron. -# Under the hood, it wraps a dictionnary. +# Under the hood, it wraps a dictionary. # In this case, the location of where the neuron was recorded has been added when loading the session for the first time. # -# In this case it holds 15 neurons and it is possible to access, similar to a dictionnary, the spike times of a single neuron: +# In this case it holds 15 neurons and it is possible to access, similar to a dictionary, the spike times of a single neuron: neuron_0 = spikes[0] neuron_0 @@ -112,7 +112,7 @@ # The epochs above the threshold can be accessed through the time support of the Tsd object. # The time support is an important concept in the pynapple package. -# It helps the user to define the epochs for which the time serie should be defined. +# It helps the user to define the epochs for which the time series should be defined. # By default, Ts, Tsd and TsGroup objects possess a time support (defined as an IntervalSet). # It is recommended to pass the time support when instantiating one of those objects. @@ -277,7 +277,7 @@ plt.figure(figsize=(16,5)) plt.subplot(211) # create a raster plot -# To create a raster plot, we can convert the tsgroup to a tsd and assing to each spikes the order of the peaks +# To create a raster plot, we can convert the tsgroup to a tsd and assign to each spikes the order of the peaks spikes_adn_tsd = spikes_adn.to_tsd("order") # And plot it plt.plot(spikes_adn_tsd.restrict(subep), '|', markersize = 10) diff --git a/environment.yml b/environment.yml index b9a7aec4..0fbc08c2 100644 --- a/environment.yml +++ b/environment.yml @@ -148,7 +148,7 @@ dependencies: - pip: - alabaster==0.7.12 - arrow==1.2.1 - - astroid==2.6.6 + - asteroid==2.6.6 - astunparse==1.6.3 - atomicwrites==1.4.0 - autopep8==1.5.7 diff --git a/pynapple/core/interval_set.py b/pynapple/core/interval_set.py index 1a67b1ee..82d98af4 100755 --- a/pynapple/core/interval_set.py +++ b/pynapple/core/interval_set.py @@ -324,7 +324,7 @@ def drop_short_intervals(self, threshold, time_units="s"): threshold : numeric Time threshold for "short" intervals time_units : None, optional - The time units for the treshold ('us', 'ms', 's' [default]) + The time units for the threshold ('us', 'ms', 's' [default]) Returns ------- @@ -347,7 +347,7 @@ def drop_long_intervals(self, threshold, time_units="s"): threshold : numeric Time threshold for "long" intervals time_units : None, optional - The time units for the treshold ('us', 'ms', 's' [default]) + The time units for the threshold ('us', 'ms', 's' [default]) Returns ------- diff --git a/pynapple/core/time_series.py b/pynapple/core/time_series.py index bf572f8d..468ba2c6 100755 --- a/pynapple/core/time_series.py +++ b/pynapple/core/time_series.py @@ -330,11 +330,11 @@ def restrict(self, ep): def count(self, *args, **kwargs): """ - Count occurences of events within bin_size or within a set of bins defined as an IntervalSet. + Count occurrences of events within bin_size or within a set of bins defined as an IntervalSet. You can call this function in multiple ways : 1. *tsd.count(bin_size=1, time_units = 'ms')* - -> Count occurence of events within a 1 ms bin defined on the time support of the object. + -> Count occurrence of events within a 1 ms bin defined on the time support of the object. 2. *tsd.count(1, ep=my_epochs)* -> Count occurent of events within a 1 second bin defined on the IntervalSet my_epochs. diff --git a/pynapple/core/ts_group.py b/pynapple/core/ts_group.py index c1861afa..9ab9a9d6 100644 --- a/pynapple/core/ts_group.py +++ b/pynapple/core/ts_group.py @@ -77,7 +77,7 @@ def __init__( Parameters ---------- data : dict - Dictionnary containing Ts/Tsd objects + Dictionary containing Ts/Tsd objects time_support : IntervalSet, optional The time support of the TsGroup. Ts/Tsd objects will be restricted to the time support if passed. If no time support is specified, TsGroup will merge time supports from all the Ts/Tsd objects in data. @@ -88,7 +88,7 @@ def __init__( Useful to speed up initialization of TsGroup when Ts/Tsd objects have already been restricted beforehand **kwargs Meta-info about the Ts/Tsd objects. Can be either pandas.Series or numpy.ndarray. - Note that the index should match the index of the input dictionnary. + Note that the index should match the index of the input dictionary. Raises ------ @@ -244,7 +244,7 @@ def metadata_columns(self): def set_info(self, *args, **kwargs): """ - Add metadata informations about the TsGroup. + Add metadata information about the TsGroup. Metadata are saved as a DataFrame. Parameters @@ -432,11 +432,11 @@ def value_from(self, tsd, ep=None): def count(self, *args, **kwargs): """ - Count occurences of events within bin_size or within a set of bins defined as an IntervalSet. + Count occurrences of events within bin_size or within a set of bins defined as an IntervalSet. You can call this function in multiple ways : 1. *tsgroup.count(bin_size=1, time_units = 'ms')* - -> Count occurence of events within a 1 ms bin defined on the time support of the object. + -> Count occurrence of events within a 1 ms bin defined on the time support of the object. 2. *tsgroup.count(1, ep=my_epochs)* -> Count occurent of events within a 1 second bin defined on the IntervalSet my_epochs. @@ -706,7 +706,7 @@ def getby_threshold(self, key, thr, op=">"): Raises ------ RuntimeError - Raise eror is operation is not recognized. + Raise error is operation is not recognized. Examples -------- @@ -723,7 +723,7 @@ def getby_threshold(self, key, thr, op=">"): 1 2 2 4 - This exemple shows how to get a new TsGroup with all elements for which the metainfo frequency is above 1. + This example shows how to get a new TsGroup with all elements for which the metainfo frequency is above 1. >>> newtsgroup = tsgroup.getby_threshold('freq', 1, op = '>') Index Freq. (Hz) ------- ------------ @@ -777,7 +777,7 @@ def getby_intervals(self, key, bins): 1 2 1 2 4 2 - This exemple shows how to bin the TsGroup according to one metainfo key. + This example shows how to bin the TsGroup according to one metainfo key. >>> newtsgroup, bincenter = tsgroup.getby_intervals('alpha', [0, 1, 2]) >>> newtsgroup [ Index Freq. (Hz) alpha @@ -812,7 +812,7 @@ def getby_category(self, key): Returns ------- dict - A dictionnary of TsGroup + A dictionary of TsGroup Examples -------- @@ -829,7 +829,7 @@ def getby_category(self, key): 1 2 1 2 4 1 - This exemple shows how to group the TsGroup according to one metainfo key. + This example shows how to group the TsGroup according to one metainfo key. >>> newtsgroup = tsgroup.getby_category('group') >>> newtsgroup {0: Index Freq. (Hz) group diff --git a/pynapple/io/cnmfe.py b/pynapple/io/cnmfe.py index 0a822a5b..e083f7c3 100644 --- a/pynapple/io/cnmfe.py +++ b/pynapple/io/cnmfe.py @@ -67,7 +67,7 @@ def __init__(self, path): if success: loading_my_data = False - # Bypass if data have already been transfered to nwb + # Bypass if data have already been transferred to nwb if loading_my_data: app = App() window = OphysGUI(app, path=path) @@ -274,7 +274,7 @@ def __init__(self, path): if success: loading_my_data = False - # Bypass if data have already been transfered to nwb + # Bypass if data have already been transferred to nwb if loading_my_data: app = App() window = OphysGUI(app, path=path) @@ -485,7 +485,7 @@ def __init__(self, path): if success: loading_my_data = False - # Bypass if data have already been transfered to nwb + # Bypass if data have already been transferred to nwb if loading_my_data: app = App() window = OphysGUI(app, path=path) diff --git a/pynapple/io/folder.py b/pynapple/io/folder.py index 499cd054..415d4d78 100644 --- a/pynapple/io/folder.py +++ b/pynapple/io/folder.py @@ -86,7 +86,7 @@ class Folder(UserDict): Attributes ---------- data : dict - Dictionnary holidng all the pynapple objects found in the folder. + Dictionary holidng all the pynapple objects found in the folder. name : str Name of the folder npz_files : list @@ -96,7 +96,7 @@ class Folder(UserDict): path : str Absolute path of the folder subfolds : dict - Dictionnary of all the subfolders + Dictionary of all the subfolders """ @@ -169,7 +169,7 @@ def __getitem__(self, key): Raises ------ KeyError - If key is not in the dictionnary + If key is not in the dictionary """ if key.__hash__: if self.__contains__(key): diff --git a/pynapple/io/interface_npz.py b/pynapple/io/interface_npz.py index a94014a4..7e7c4a04 100644 --- a/pynapple/io/interface_npz.py +++ b/pynapple/io/interface_npz.py @@ -9,7 +9,7 @@ """ File classes help to validate and load pynapple objects or NWB files. Data are always lazy-loaded. -Both classes behaves like dictionnary. +Both classes behaves like dictionary. """ import os @@ -49,7 +49,7 @@ def __init__(self, path): self.file = np.load(self.path, allow_pickle=True) self.type = "" - # First check if type is explicitely defined + # First check if type is explicitly defined possible = ["Ts", "Tsd", "TsdFrame", "TsGroup", "IntervalSet"] if "type" in self.file.keys(): if len(self.file["type"]) == 1: diff --git a/pynapple/io/interface_nwb.py b/pynapple/io/interface_nwb.py index 9411b9af..3780fb76 100644 --- a/pynapple/io/interface_nwb.py +++ b/pynapple/io/interface_nwb.py @@ -7,7 +7,7 @@ """ Pynapple class to interface with NWB files. Data are always lazy-loaded. -Object behaves like dictionnary. +Object behaves like dictionary. """ import errno @@ -35,7 +35,7 @@ def _extract_compatible_data_from_nwbfile(nwbfile): Returns ------- dict - Dictionnary containing all the object found and their type in pynapple. + Dictionary containing all the object found and their type in pynapple. """ data = {} @@ -359,7 +359,7 @@ def __getitem__(self, key): Raises ------ KeyError - If key is not in the dictionnary + If key is not in the dictionary """ if key.__hash__: if self.__contains__(key): diff --git a/pynapple/io/loader.py b/pynapple/io/loader.py index 5915efc3..1d121524 100644 --- a/pynapple/io/loader.py +++ b/pynapple/io/loader.py @@ -157,7 +157,7 @@ def load_optitrack_csv(self, csv_file): cols.append(n) if len(order) == 0: raise RuntimeError( - "Unknow tracking format for csv file {}".format(csv_file) + "Unknown tracking format for csv file {}".format(csv_file) ) position = position[cols] position.columns = order diff --git a/pynapple/io/misc.py b/pynapple/io/misc.py index 1bec674f..14186a8a 100644 --- a/pynapple/io/misc.py +++ b/pynapple/io/misc.py @@ -180,7 +180,7 @@ def load_eeg( Deleted Parameters ------------------ extension : str, optional - The file extenstion (.eeg, .dat, .lfp). Make sure the frequency match + The file extension (.eeg, .dat, .lfp). Make sure the frequency match """ # Need to check if a xml file exists diff --git a/pynapple/io/neurosuite.py b/pynapple/io/neurosuite.py index 392cef53..e2e550b8 100755 --- a/pynapple/io/neurosuite.py +++ b/pynapple/io/neurosuite.py @@ -54,7 +54,7 @@ def __init__(self, path): if success: loading_neurosuite = False - # Bypass if data have already been transfered to nwb + # Bypass if data have already been transferred to nwb if loading_neurosuite: self.load_neurosuite_xml(path) # print("XML loaded") @@ -352,7 +352,7 @@ def load_lfp( channel : int or list of int, optional The channel(s) to load. If None return a memory map of the dat file to avoid memory error extension : str, optional - The file extenstion (.eeg, .dat, .lfp). Make sure the frequency match + The file extension (.eeg, .dat, .lfp). Make sure the frequency match frequency : float, optional Default 1250 Hz for the eeg file precision : str, optional diff --git a/pynapple/io/phy.py b/pynapple/io/phy.py index 7e12219a..4fecf679 100644 --- a/pynapple/io/phy.py +++ b/pynapple/io/phy.py @@ -346,7 +346,7 @@ def load_lfp( channel : int or list of int, optional The channel(s) to load. If None return a memory map of the dat file to avoid memory error extension : str, optional - The file extenstion (.eeg, .dat, .lfp). Make sure the frequency match + The file extension (.eeg, .dat, .lfp). Make sure the frequency match frequency : float, optional Default 1250 Hz for the eeg file precision : str, optional diff --git a/pynapple/io/suite2p.py b/pynapple/io/suite2p.py index 904c8415..7c840586 100644 --- a/pynapple/io/suite2p.py +++ b/pynapple/io/suite2p.py @@ -84,7 +84,7 @@ def __init__(self, path): if success: loading_my_data = False - # Bypass if data have already been transfered to nwb + # Bypass if data have already been transferred to nwb if loading_my_data: app = App() window = OphysGUI(app, path=path) diff --git a/pynapple/process/decoding.py b/pynapple/process/decoding.py index c217f3e9..b47aff37 100644 --- a/pynapple/process/decoding.py +++ b/pynapple/process/decoding.py @@ -128,7 +128,7 @@ def decode_2d(tuning_curves, group, ep, bin_size, xy, time_units="s", features=N tuning_curves : dict Dictionnay of 2d tuning curves (one for each neuron). group : TsGroup or dict of Ts/Tsd object. - A group of neurons with the same keys as tuning_curves dictionnary. + A group of neurons with the same keys as tuning_curves dictionary. ep : IntervalSet The epoch on which decoding is computed bin_size : float diff --git a/pynapple/process/perievent.py b/pynapple/process/perievent.py index 37e7e98c..c46ea05b 100644 --- a/pynapple/process/perievent.py +++ b/pynapple/process/perievent.py @@ -62,7 +62,7 @@ def compute_perievent(data, tref, minmax, time_unit="s"): data : Ts/Tsd/TsGroup The data to align to tref. If Ts/Tsd, returns a TsGroup. - If TsGroup, returns a dictionnary of TsGroup + If TsGroup, returns a dictionary of TsGroup tref : Ts/Tsd The timestamps of the event to align to minmax : tuple or int or float @@ -74,7 +74,7 @@ def compute_perievent(data, tref, minmax, time_unit="s"): ------- dict A TsGroup if data is a Ts/Tsd or - a dictionnary of TsGroup if data is a TsGroup. + a dictionary of TsGroup if data is a TsGroup. Raises ------ @@ -112,7 +112,7 @@ def compute_event_trigger_average( """ Bin the spike train in binsize and compute the Spike Trigger Average (STA) within windowsize. If C is the spike count matrix and feature is a Tsd array, the function computes - the Hankel matrix H from windowsize=(-t1,+t2) by offseting the Tsd array. + the Hankel matrix H from windowsize=(-t1,+t2) by offsetting the Tsd array. The STA is then defined as the dot product between H and C divided by the number of spikes. diff --git a/pynapple/process/tuning_curves.py b/pynapple/process/tuning_curves.py index 431ef646..64bdd981 100644 --- a/pynapple/process/tuning_curves.py +++ b/pynapple/process/tuning_curves.py @@ -17,12 +17,12 @@ def compute_discrete_tuning_curves(group, dict_ep): """ - Compute discrete tuning curves of a TsGroup using a dictionnary of epochs. - The function returns a pandas DataFrame with each row being a key of the dictionnary of epochs + Compute discrete tuning curves of a TsGroup using a dictionary of epochs. + The function returns a pandas DataFrame with each row being a key of the dictionary of epochs and each column being a neurons. This function can typically being used for a set of stimulus being presented for multiple epochs. - An example of the dictionnary is : + An example of the dictionary is : >>> dict_ep = { "stim0": nap.IntervalSet(start=0, end=1), @@ -154,7 +154,7 @@ def compute_2d_tuning_curves(group, feature, nb_bins, ep=None, minmax=None): ------- tuple A tuple containing: \n - tc (dict): Dictionnary of the tuning curves with dimensions (nb_bins, nb_bins).\n + tc (dict): Dictionary of the tuning curves with dimensions (nb_bins, nb_bins).\n xy (list): List of bins center in the two dimensions Raises @@ -356,7 +356,7 @@ def compute_1d_tuning_curves_continous( tsdframe, feature, nb_bins, ep=None, minmax=None ): """ - Computes 1-dimensional tuning curves relative to a feature with continous data. + Computes 1-dimensional tuning curves relative to a feature with continuous data. Parameters ---------- @@ -414,7 +414,7 @@ def compute_2d_tuning_curves_continuous( tsdframe, features, nb_bins, ep=None, minmax=None ): """ - Computes 2-dimensional tuning curves relative to a 2d feature with continous data. + Computes 2-dimensional tuning curves relative to a 2d feature with continuous data. Parameters ---------- @@ -437,7 +437,7 @@ def compute_2d_tuning_curves_continuous( ------- tuple A tuple containing: \n - tc (dict): Dictionnary of the tuning curves with dimensions (nb_bins, nb_bins).\n + tc (dict): Dictionary of the tuning curves with dimensions (nb_bins, nb_bins).\n xy (list): List of bins center in the two dimensions Raises @@ -517,7 +517,7 @@ def compute_1d_poisson_glm( windowsize : Float The window for offsetting the regressors ep : IntervalSet, optional - On which epoch to perfom the GLM + On which epoch to perform the GLM time_units : str, optional Time units of binsize and windowsize niter : int, optional diff --git a/template_loader.py b/template_loader.py index 4b2e56f1..4a58aba9 100644 --- a/template_loader.py +++ b/template_loader.py @@ -38,7 +38,7 @@ def __init__(self, path): if success: loading_my_data = False - # Bypass if data have already been transfered to nwb + # Bypass if data have already been transferred to nwb if loading_my_data: self.load_my_data(path) diff --git a/tests/test_jitted.py b/tests/test_jitted.py index c52dc36c..ec5b3a5b 100644 --- a/tests/test_jitted.py +++ b/tests/test_jitted.py @@ -44,7 +44,7 @@ def get_example_isets(n=100): def restrict(ep, tsd): bins = ep.values.ravel() - # Because yes there is no funtion with both bounds closed as an option + # Because yes there is no function with both bounds closed as an option ix = np.array( pd.cut(tsd.index, bins, labels=np.arange(len(bins) - 1, dtype=np.float64)) ) From e2b831b447c1250617800283c2d843da9917d83a Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 7 Aug 2023 11:28:35 -0400 Subject: [PATCH 5/6] Add codespell into tox --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index f5924f52..57d34fd6 100755 --- a/tox.ini +++ b/tox.ini @@ -10,11 +10,13 @@ deps = isort pytest coverage + codespell commands = black --check pynapple isort --check pynapple flake8 pynapple --max-complexity 10 + codespell #pytest tests/ coverage run --source=pynapple --branch -m pytest tests/ coverage report -m From 4cc57834303ce958722382012aa737d9cc411928 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 7 Aug 2023 11:29:22 -0400 Subject: [PATCH 6/6] [DATALAD RUNCMD] Do interactive fixing of some ambigous typos === Do not change lines below === { "chain": [], "cmd": "codespell -w -i 3 -C 2", "exit": 0, "extra_inputs": [], "inputs": [], "outputs": [], "pwd": "." } ^^^ Do not change lines above ^^^ --- docs/CONTRIBUTING.md | 2 +- docs/HISTORY.md | 2 +- pynapple/core/interval_set.py | 2 +- pynapple/io/misc.py | 2 +- pynapple/process/tuning_curves.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index c854a4e5..dab0c2db 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -98,7 +98,7 @@ Before you submit a pull request, check that it meets these guidelines: 1. The pull request should include tests. 2. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and add - the feature to the list in README.nd. + the feature to the list in README.md. 3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy.