Skip to content

Commit

Permalink
Merge pull request #1365 from NeurodataWithoutBorders/rc/2.0.0
Browse files Browse the repository at this point in the history
  • Loading branch information
rly authored Aug 10, 2021
2 parents 441c64f + 0ef954f commit c0ad3e7
Show file tree
Hide file tree
Showing 91 changed files with 4,797 additions and 481 deletions.
142 changes: 52 additions & 90 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,6 @@ references:
username: hdmf
password: $DOCKERHUB_PASSWORD

py36: &py36
docker:
- image: circleci/python:3.6.13-buster
auth:
username: hdmf
password: $DOCKERHUB_PASSWORD

conda-image: &conda-image
docker:
- image: continuumio/miniconda3:4.9.2
Expand Down Expand Up @@ -161,14 +154,6 @@ jobs:
- run:
<<: *run-style-check

python36:
<<: *py36
environment:
- TEST_TOX_ENV: "py36"
- BUILD_TOX_ENV: "build-py36"
- TEST_WHEELINSTALL_ENV: "wheelinstall"
<<: *ci-steps

python37:
<<: *py37
environment:
Expand All @@ -183,7 +168,6 @@ jobs:
- TEST_TOX_ENV: "py38"
- BUILD_TOX_ENV: "build-py38"
- TEST_WHEELINSTALL_ENV: "wheelinstall"
- UPLOAD_WHEELS: "true"
<<: *ci-steps

python39:
Expand All @@ -192,41 +176,33 @@ jobs:
- TEST_TOX_ENV: "py39"
- BUILD_TOX_ENV: "build-py39"
- TEST_WHEELINSTALL_ENV: "wheelinstall"
- UPLOAD_WHEELS: "true" # upload distributions from only this job to pypi
<<: *ci-steps

python38-upgrade-dev:
<<: *py38
python39-upgrade-dev:
<<: *py39
environment:
- TEST_TOX_ENV: "py38-upgrade-dev"
- BUILD_TOX_ENV: "build-py38-upgrade-dev"
- TEST_TOX_ENV: "py39-upgrade-dev"
- BUILD_TOX_ENV: "build-py39-upgrade-dev"
- TEST_WHEELINSTALL_ENV: "wheelinstall"
<<: *ci-steps

python38-upgrade-dev-pre:
<<: *py38
python39-upgrade-dev-pre:
<<: *py39
environment:
- TEST_TOX_ENV: "py38-upgrade-dev-pre"
- BUILD_TOX_ENV: "build-py38-upgrade-dev-pre"
- TEST_TOX_ENV: "py39-upgrade-dev-pre"
- BUILD_TOX_ENV: "build-py39-upgrade-dev-pre"
- TEST_WHEELINSTALL_ENV: "wheelinstall"
<<: *ci-steps

python36-min-req:
<<: *py36
python37-min-req:
<<: *py37
environment:
- TEST_TOX_ENV: "py36-min-req"
- BUILD_TOX_ENV: "build-py36-min-req"
- TEST_TOX_ENV: "py37-min-req"
- BUILD_TOX_ENV: "build-py37-min-req"
- TEST_WHEELINSTALL_ENV: "wheelinstall"
<<: *ci-steps

miniconda36:
<<: *conda-image
environment:
- CONDA_PYTHON_VER: "3.6.*=*_cpython" # avoid using pypy compiler
- TEST_TOX_ENV: "py36"
- BUILD_TOX_ENV: "build-py36"
- TEST_WHEELINSTALL_ENV: "wheelinstall"
<<: *conda-steps

miniconda37:
<<: *conda-image
environment:
Expand Down Expand Up @@ -254,12 +230,6 @@ jobs:
- TEST_WHEELINSTALL_ENV: "wheelinstall"
<<: *conda-steps

gallery36:
<<: *py36
environment:
- TEST_TOX_ENV: "gallery-py36"
<<: *gallery-steps

gallery37:
<<: *py37
environment:
Expand All @@ -278,26 +248,26 @@ jobs:
- TEST_TOX_ENV: "gallery-py39"
<<: *gallery-steps

gallery38-upgrade-dev:
<<: *py38
gallery39-upgrade-dev:
<<: *py39
environment:
- TEST_TOX_ENV: "gallery-py38-upgrade-dev"
- TEST_TOX_ENV: "gallery-py39-upgrade-dev"
<<: *gallery-steps

gallery38-upgrade-dev-pre:
<<: *py38
gallery39-upgrade-dev-pre:
<<: *py39
environment:
- TEST_TOX_ENV: "gallery-py38-upgrade-dev-pre"
- TEST_TOX_ENV: "gallery-py39-upgrade-dev-pre"
<<: *gallery-steps

gallery36-min-req:
<<: *py36
gallery37-min-req:
<<: *py37
environment:
- TEST_TOX_ENV: "gallery-py36-min-req"
- TEST_TOX_ENV: "gallery-py37-min-req"
<<: *gallery-steps

test-validation:
<<: *py38
<<: *py39
steps:
- checkout
- run: git submodule sync
Expand All @@ -309,13 +279,13 @@ jobs:
command: |
. ../venv/bin/activate
pip install tox
tox -e validation-py38
tox -e validation-py39
# Install is expected to be quick. Increase timeout in case there are some network issues.
# While pip installing tox does not output by default. Circle thinks task is dead after 10 min.
no_output_timeout: 30m

deploy-dev:
<<: *py38
<<: *py39
steps:
- checkout
- attach_workspace:
Expand All @@ -336,7 +306,7 @@ jobs:
--exit-success-if-missing-token
deploy-release:
<<: *py38
<<: *py39
steps:
- attach_workspace:
at: ./
Expand Down Expand Up @@ -366,27 +336,27 @@ workflows:
jobs:
- flake8:
<<: *no_filters
- python38:
- python37-min-req:
<<: *no_filters
- python36-min-req:
- python39:
<<: *no_filters
- miniconda36:
- miniconda37:
<<: *no_filters
- miniconda38:
- miniconda39:
<<: *no_filters
- gallery38:
- gallery37-min-req:
<<: *no_filters
- gallery36-min-req:
- gallery38: # TODO replace with gallery39 after allensdk support py39
<<: *no_filters
- deploy-dev:
requires:
- flake8
- python38
- python36-min-req
- miniconda36
- miniconda38
- gallery38
- gallery36-min-req
- python37-min-req
- python39
- miniconda37
- miniconda39
- gallery37-min-req
- gallery38 # gallery39
filters:
tags:
ignore:
Expand All @@ -400,12 +370,12 @@ workflows:
- deploy-release:
requires:
- flake8
- python38
- python36-min-req
- miniconda36
- miniconda38
- gallery38
- gallery36-min-req
- python37-min-req
- python39
- miniconda37
- miniconda39
- gallery37-min-req
- gallery38 # gallery39
filters:
tags:
only: /^[0-9]+(\.[0-9]+)*(\.post[0-9]+)?$/
Expand All @@ -424,41 +394,33 @@ workflows:
jobs:
- flake8:
<<: *no_filters
- python36:
<<: *no_filters
- python37:
<<: *no_filters
- python37-min-req:
<<: *no_filters
- python38:
<<: *no_filters
- python39:
<<: *no_filters
- python38-upgrade-dev:
<<: *no_filters
- python36-min-req:
- python39-upgrade-dev:
<<: *no_filters
- miniconda36:
- python39-upgrade-dev-pre:
<<: *no_filters
- miniconda37:
<<: *no_filters
- miniconda38:
<<: *no_filters
- miniconda39:
<<: *no_filters
- gallery36:
<<: *no_filters
- gallery37:
<<: *no_filters
- gallery37-min-req:
<<: *no_filters
- gallery38:
<<: *no_filters
- gallery39:
<<: *no_filters
- gallery38-upgrade-dev:
<<: *no_filters
- gallery36-min-req:
<<: *no_filters
- python38-upgrade-dev-pre:
<<: *no_filters
- gallery38-upgrade-dev-pre:
- gallery39-upgrade-dev:
<<: *no_filters
- test-validation:
- gallery39-upgrade-dev-pre:
<<: *no_filters
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/bug_report.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ assignees: ''
<!--Please describe your environment according to the following bullet points.-->

Python Executable: Conda or Python
Python Version: Python 3.6, 3.7, or 3.8
Python Version: Python 3.7, 3.8, or 3.9
Operating System: Windows, macOS or Linux
HDMF Version:
PyNWB Version:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
os: [ubuntu-latest, macos-latest, windows-latest]
env:
OS: ${{ matrix.os }}
PYTHON: '3.8'
PYTHON: '3.9'
steps:
- name: Cancel Workflow Action
uses: styfle/cancel-workflow-action@0.6.0
Expand All @@ -27,7 +27,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand Down
56 changes: 48 additions & 8 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,19 +1,59 @@
# PyNWB Changelog

## PyNWB 1.6.0 (TBD, 2021)
## PyNWB 2.0.0 (Upcoming)

### Breaking changes:
- ``SweepTable`` has been deprecated in favor of the new icephys metadata tables. Use of ``SweepTable``
is still possible but no longer recommended. @oruebel (#1349
- ``TimeSeries.__init__`` now requires the ``data`` argument because the 'data' dataset is required by the schema.
If a ``TimeSeries`` is read without a value for ``data``, it will be set to a default value. For most
``TimeSeries``, this is a 1-dimensional empty array with dtype uint8. For ``ImageSeries`` and
``DecompositionSeries``, this is a 3-dimensional empty array with dtype uint8. @rly (#1274)
- ``TimeSeries.__init__`` now requires the ``unit`` argument because the 'unit' attribute is required by the schema.
If a ``TimeSeries`` is read without a value for ``unit``, it will be set to a default value. For most
``TimeSeries``, this is "unknown". For ``IndexSeries``, this is "N/A" according to the NWB 2.4.0 schema. @rly (#1274)

## Minor new features:
### New features:
- Added new intracellular electrophysiology hierarchical table structure from ndx-icephys-meta to NWB core.
This includes the new types ``TimeSeriesReferenceVectorData``, ``IntracellularRecordingsTable``,
``SimultaneousRecordingsTable``, ``SequentialRecordingsTable``, ``RepetitionsTable`` and
``ExperimentalConditionsTable`` as well as corresponding updates to ``NWBFile`` to support interaction
with the new tables. @oruebel (#1349)
- Added support for NWB 2.4.0. See [Release Notes](https://nwb-schema.readthedocs.io/en/latest/format_release_notes.html)
for more details. @oruebel, @rly (#1349)
- Dropped Python 3.6 support, added Python 3.9 support. @rly (#1377)
- Updated requirements to allow compatibility with HDMF 3 and h5py 3. @rly (#1377)

### Tutorial enhancements:
- Added new tutorial for intracellular electrophysiology to describe the use of the new metadata tables
and declared the previous tutoral using ``SweepTable`` as deprecated. @oruebel (#1349)
- Added new tutorial for querying intracellular electrophysiology metadata
(``docs/gallery/domain/plot_icephys_pandas.py``). @oruebel (#1349, #1383)
- Added thumbnails for tutorials to improve presentation of online docs. @oruebel (#1349)
- Used `sphinx.ext.extlinks` extension in docs to simplify linking to common targets. @oruebel (#1349)
- Created new section for advanced I/O tutorials and moved parallel I/O tutorial to its own file. @oruebel (#1349)

### Minor new features:
- Add RRID for citing PyNWB to the docs. @oruebel (#1372)
- Update CI and tests to handle deprecations in libraries. @rly (#1377)
- Add test utilities for icephys (``pynwb.testing.icephys_testutils``) to ease creation of test data
for tests and tutorials. @oruebel (#1349, #1383)

## Bug fix:
- Enforce electrode ID uniqueness during insertion into table. @CodyCBakerPhD (#1344)
- Fix integration tests with invalid test data that will be caught by future hdmf validator version.
### Bug fixes:
- Updated behavior of ``make clean`` command for docs to ensure tutorial files are cleaned up. @oruebel (#1349)
- Enforced electrode ID uniqueness during insertion into table. @CodyCBakerPhD (#1344)
- Fixed integration tests with invalid test data that will be caught by future hdmf validator version.
@dsleiter, @rly (#1366, #1376)
- Fix build warnings in docs @oruebel (#1380)
- Fixed build warnings in docs. @oruebel (#1380)
- Fix intersphinx links in docs for numpy. @oruebel (#1386)
- Previously, the ``data`` argument was required in ``OpticalSeries.__init__`` even though ``external_file`` could
be provided in place of ``data``. ``OpticalSeries.__init__`` now makes ``data`` optional. However, this has the
side effect of moving the position of ``data`` to later in the argument list, which may break code that relies
on positional arguments for ``OpticalSeries.__init__``. @rly (#1274)

## PyNWB 1.5.1 (May 24, 2021)

## Bug fix:
### Bug fixes:
- Raise minimum version of pandas from 0.23 to 1.0.5 to be compatible with numpy 1.20, and raise minimum version of
HDMF to use the corresponding change in HDMF. @rly (#1363)
- Update documentation and update structure of requirements files. @rly (#1363)
Expand Down Expand Up @@ -56,7 +96,7 @@
- Add capability to add a row to a column after IO.
- Add method `AbstractContainer.get_fields_conf`.
- Add functionality for storing external resource references.
- Add method `hdmf.utils.get_docval_macro` to get a tuple of the current values for a docval_macro, e.g., 'array_data'
- Add method `hdmf.utils.get_docval_macro` to get a tuple of the current values for a docval_macro, e.g., 'array_data'
and 'scalar_data'.
- `DynamicTable` can be automatically generated using `get_class`. Now the HDMF API can read files with extensions
that contain a DynamicTable without needing to import the extension first.
Expand Down
Loading

0 comments on commit c0ad3e7

Please sign in to comment.