Skip to content

Commit

Permalink
Fix #1148 : Add passthrough on non-DCI H5DataIO to support its use in…
Browse files Browse the repository at this point in the history
… pynwb TimeSeries. (#1149)

* Add passthrough on non-DCI H5DataIO to support its use in pynwb TimeSeries. Fixes #1148.

* CHANGELOG update for #1149

* Add another maxshape fallback (self.shape)

* Incorporated @stephprince suggestions on #1149.

---------

Co-authored-by: Steph Prince <40640337+stephprince@users.noreply.github.com>
  • Loading branch information
cboulay and stephprince authored Aug 28, 2024
1 parent abb6fe5 commit d378dec
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 1 deletion.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

### Bug fixes
- Fixed issue where scalar datasets with a compound data type were being written as non-scalar datasets @stephprince [#1176](https://github.com/hdmf-dev/hdmf/pull/1176)
- Fixed H5DataIO not exposing `maxshape` on non-dci dsets. @cboulay [#1149](https://github.com/hdmf-dev/hdmf/pull/1149)

## HDMF 3.14.3 (July 29, 2024)

Expand Down
13 changes: 12 additions & 1 deletion src/hdmf/backends/hdf5/h5_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from ...query import HDMFDataset, ReferenceResolver, ContainerResolver, BuilderResolver
from ...region import RegionSlicer
from ...spec import SpecWriter, SpecReader
from ...utils import docval, getargs, popargs, get_docval
from ...utils import docval, getargs, popargs, get_docval, get_data_shape


class HDF5IODataChunkIteratorQueue(deque):
Expand Down Expand Up @@ -672,3 +672,14 @@ def valid(self):
if isinstance(self.data, Dataset) and not self.data.id.valid:
return False
return super().valid

@property
def maxshape(self):
if 'maxshape' in self.io_settings:
return self.io_settings['maxshape']
elif hasattr(self.data, 'maxshape'):
return self.data.maxshape
elif hasattr(self, "shape"):
return self.shape
else:
return get_data_shape(self.data)
14 changes: 14 additions & 0 deletions tests/unit/test_io_hdf5_h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -607,6 +607,12 @@ def test_pass_through_of_chunk_shape_generic_data_chunk_iterator(self):
#############################################
# H5DataIO general
#############################################
def test_pass_through_of_maxshape_on_h5dataset(self):
k = 10
self.io.write_dataset(self.f, DatasetBuilder('test_dataset', np.arange(k), attributes={}))
dset = H5DataIO(self.f['test_dataset'])
self.assertEqual(dset.maxshape, (k,))

def test_warning_on_non_gzip_compression(self):
# Make sure no warning is issued when using gzip
with warnings.catch_warnings(record=True) as w:
Expand Down Expand Up @@ -3763,6 +3769,14 @@ def test_dataio_shape_then_data(self):
with self.assertRaisesRegex(ValueError, "Setting data when dtype and shape are not None is not supported"):
dataio.data = list()

def test_dataio_maxshape(self):
dataio = H5DataIO(data=np.arange(10), maxshape=(None,))
self.assertEqual(dataio.maxshape, (None,))

def test_dataio_maxshape_from_data(self):
dataio = H5DataIO(data=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
self.assertEqual(dataio.maxshape, (10,))


def test_hdf5io_can_read():
assert not HDF5IO.can_read("not_a_file")
Expand Down

0 comments on commit d378dec

Please sign in to comment.