Skip to content

Commit

Permalink
Removed redundend shape check functions. Fix #204 (#205)
Browse files Browse the repository at this point in the history
* Removed redundend shape check functions. Fix #204
  • Loading branch information
oruebel authored Nov 18, 2019
1 parent ee1684a commit c93d595
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 42 deletions.
6 changes: 3 additions & 3 deletions src/hdmf/backends/hdf5/h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
import warnings
from ...container import Container

from ...utils import docval, getargs, popargs, call_docval_func
from ...data_utils import AbstractDataChunkIterator, get_shape
from ...utils import docval, getargs, popargs, call_docval_func, get_data_shape
from ...data_utils import AbstractDataChunkIterator
from ...build import Builder, GroupBuilder, DatasetBuilder, LinkBuilder, BuildManager,\
RegionBuilder, ReferenceBuilder, TypeMap, ObjectMapper
from ...spec import RefSpec, DtypeSpec, NamespaceCatalog, GroupSpec
Expand Down Expand Up @@ -1028,7 +1028,7 @@ def __list_fill__(cls, parent, name, data, options=None):
elif isinstance(dtype, np.dtype):
data_shape = (len(data),)
else:
data_shape = get_shape(data)
data_shape = get_data_shape(data)
# Create the dataset
try:
dset = parent.create_dataset(name, shape=data_shape, dtype=dtype, **io_settings)
Expand Down
6 changes: 3 additions & 3 deletions src/hdmf/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from abc import abstractmethod
from uuid import uuid4
from six import with_metaclass
from .utils import docval, get_docval, call_docval_func, getargs, ExtenderMeta
from .data_utils import DataIO, get_shape
from .utils import docval, get_docval, call_docval_func, getargs, ExtenderMeta, get_data_shape
from .data_utils import DataIO
from warnings import warn
import h5py

Expand Down Expand Up @@ -397,7 +397,7 @@ def shape(self):
:return: Shape tuple
:rtype: tuple of ints
"""
return get_shape(self.__data)
return get_data_shape(self.__data)

@docval({'name': 'dataio', 'type': DataIO, 'doc': 'the DataIO to apply to the data held by this Data'})
def set_dataio(self, **kwargs):
Expand Down
29 changes: 1 addition & 28 deletions src/hdmf/data_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,39 +3,12 @@

import numpy as np
from warnings import warn
from six import with_metaclass, text_type, binary_type
from six import with_metaclass
import copy

from .utils import docval, getargs, popargs, docval_macro, get_data_shape


def __get_shape_helper(data):
"""Helper function used by get_shape"""
shape = list()
if hasattr(data, '__len__'):
shape.append(len(data))
if len(data) and not isinstance(data[0], (text_type, binary_type)):
shape.extend(__get_shape_helper(data[0]))
return tuple(shape)


def get_shape(data):
"""
Determine the data shape for the given data
:param data: Array for which the data should be determined
:type data: list, ndarray, dict
:return: None in case shape is unknown and shape tuple otherwise
"""
if isinstance(data, dict):
return None
elif hasattr(data, 'shape'):
return data.shape
elif hasattr(data, '__len__') and not isinstance(data, (text_type, binary_type)):
return __get_shape_helper(data)
else:
return None


@docval_macro('array_data')
class AbstractDataChunkIterator(with_metaclass(ABCMeta, object)):
"""
Expand Down
6 changes: 4 additions & 2 deletions src/hdmf/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -648,9 +648,11 @@ def __get_shape_helper(local_data):
return tuple(shape)
if hasattr(data, 'maxshape'):
return data.maxshape
if hasattr(data, 'shape'):
elif hasattr(data, 'shape'):
return data.shape
if hasattr(data, '__len__') and not isinstance(data, (text_type, binary_type)):
elif isinstance(data, dict):
return None
elif hasattr(data, '__len__') and not isinstance(data, (text_type, binary_type)):
if not strict_no_data_load or (isinstance(data, list) or isinstance(data, tuple) or isinstance(data, set)):
return __get_shape_helper(data)
else:
Expand Down
7 changes: 3 additions & 4 deletions src/hdmf/validate/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
import re
from itertools import chain

from ..utils import docval, getargs, call_docval_func, pystr
from ..data_utils import get_shape
from ..utils import docval, getargs, call_docval_func, pystr, get_data_shape

from ..spec import Spec, AttributeSpec, GroupSpec, DatasetSpec, RefSpec
from ..spec.spec import BaseStorageSpec, DtypeHelper
Expand Down Expand Up @@ -318,7 +317,7 @@ def validate(self, **kwargs):
dtype = get_type(value)
if not check_type(spec.dtype, dtype):
ret.append(DtypeError(self.get_spec_loc(spec), spec.dtype, dtype))
shape = get_shape(value)
shape = get_data_shape(value)
if not check_shape(spec.shape, shape):
ret.append(ShapeError(self.get_spec_loc(spec), spec.shape, shape))
return ret
Expand Down Expand Up @@ -374,7 +373,7 @@ def validate(self, **kwargs):
if not check_type(self.spec.dtype, dtype):
ret.append(DtypeError(self.get_spec_loc(self.spec), self.spec.dtype, dtype,
location=self.get_builder_loc(builder)))
shape = get_shape(data)
shape = get_data_shape(data)
if not check_shape(self.spec.shape, shape):
if shape is None:
ret.append(ExpectedArrayError(self.get_spec_loc(self.spec), self.spec.shape, str(data),
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/test_io_hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from hdmf.backends.hdf5 import HDF5IO
from hdmf.build import GroupBuilder, DatasetBuilder, LinkBuilder
from hdmf.data_utils import get_shape
from hdmf.utils import get_data_shape

from numbers import Number

Expand Down Expand Up @@ -239,5 +239,5 @@ def test_dataset_shape(self):
io.write_builder(self.builder)
builder = io.read_builder()
dset = builder['test_bucket']['foo_holder']['foo1']['my_data'].data
self.assertEqual(get_shape(dset), (10,))
self.assertEqual(get_data_shape(dset), (10,))
io.close()

0 comments on commit c93d595

Please sign in to comment.