Skip to content

Commit

Permalink
Introduce a base store Class
Browse files Browse the repository at this point in the history
Unconditionally close store in tests.

All the tested stores should now have a `close()` method we can call and
will be no-op if the stores do not need closing.

Turn UserWarnings into errors

And turn then back into only warnings into relevant tests.
This ensure that we are not using deprecated functionalities, except
when testing for it.

initially based on 318eddcd, and later 1249f35
  • Loading branch information
Carreau committed Nov 6, 2020
1 parent 06450d0 commit 0437842
Show file tree
Hide file tree
Showing 14 changed files with 640 additions and 425 deletions.
12 changes: 7 additions & 5 deletions docs/tutorial.rst
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ print some diagnostics, e.g.::
Read-only : False
Compressor : Blosc(cname='zstd', clevel=3, shuffle=BITSHUFFLE,
: blocksize=0)
Store type : builtins.dict
Store type : zarr.storage.KVStore
No. bytes : 400000000 (381.5M)
No. bytes stored : 3379344 (3.2M)
Storage ratio : 118.4
Expand Down Expand Up @@ -268,7 +268,7 @@ Here is an example using a delta filter with the Blosc compressor::
Read-only : False
Filter [0] : Delta(dtype='<i4')
Compressor : Blosc(cname='zstd', clevel=1, shuffle=SHUFFLE, blocksize=0)
Store type : builtins.dict
Store type : zarr.storage.KVStore
No. bytes : 400000000 (381.5M)
No. bytes stored : 1290562 (1.2M)
Storage ratio : 309.9
Expand Down Expand Up @@ -795,8 +795,10 @@ Here is an example using S3Map to read an array created previously::
Order : C
Read-only : False
Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0)
Store type : fsspec.mapping.FSMap
Store type : zarr.storage.KVStore
No. bytes : 21
No. bytes stored : 382
Storage ratio : 0.1
Chunks initialized : 3/3
>>> z[:]
array([b'H', b'e', b'l', b'l', b'o', b' ', b'f', b'r', b'o', b'm', b' ',
Expand Down Expand Up @@ -1262,7 +1264,7 @@ ratios, depending on the correlation structure within the data. E.g.::
Order : C
Read-only : False
Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0)
Store type : builtins.dict
Store type : zarr.storage.KVStore
No. bytes : 400000000 (381.5M)
No. bytes stored : 6696010 (6.4M)
Storage ratio : 59.7
Expand All @@ -1276,7 +1278,7 @@ ratios, depending on the correlation structure within the data. E.g.::
Order : F
Read-only : False
Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0)
Store type : builtins.dict
Store type : zarr.storage.KVStore
No. bytes : 400000000 (381.5M)
No. bytes stored : 4684636 (4.5M)
Storage ratio : 85.4
Expand Down
2 changes: 1 addition & 1 deletion mypy.ini
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[mypy]
python_version = 3.6
python_version = 3.8
ignore_missing_imports = True
follow_imports = silent
2 changes: 2 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,6 @@ doctest_optionflags = NORMALIZE_WHITESPACE ELLIPSIS IGNORE_EXCEPTION_DETAIL
addopts = --durations=10
filterwarnings =
error::DeprecationWarning:zarr.*
error::UserWarning:zarr.*
ignore:PY_SSIZE_T_CLEAN will be required.*:DeprecationWarning
ignore:The loop argument is deprecated since Python 3.8.*:DeprecationWarning
16 changes: 8 additions & 8 deletions zarr/convenience.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from zarr.hierarchy import group as _create_group
from zarr.hierarchy import open_group
from zarr.meta import json_dumps, json_loads
from zarr.storage import contains_array, contains_group
from zarr.storage import contains_array, contains_group, Store
from zarr.util import TreeViewer, buffer_size, normalize_storage_path


Expand Down Expand Up @@ -73,7 +73,7 @@ def open(store=None, mode='a', **kwargs):
path = kwargs.get('path', None)
# handle polymorphic store arg
clobber = mode == 'w'
store = normalize_store_arg(store, clobber=clobber)
store: Store = normalize_store_arg(store, clobber=clobber)
path = normalize_storage_path(path)

if mode in {'w', 'w-', 'x'}:
Expand All @@ -97,7 +97,7 @@ def open(store=None, mode='a', **kwargs):
raise PathNotFoundError(path)


def save_array(store, arr, **kwargs):
def save_array(store: Store, arr, **kwargs):
"""Convenience function to save a NumPy array to the local file system, following a
similar API to the NumPy save() function.
Expand Down Expand Up @@ -138,7 +138,7 @@ def save_array(store, arr, **kwargs):
store.close()


def save_group(store, *args, **kwargs):
def save_group(store: Store, *args, **kwargs):
"""Convenience function to save several NumPy arrays to the local file system, following a
similar API to the NumPy savez()/savez_compressed() functions.
Expand Down Expand Up @@ -214,7 +214,7 @@ def save_group(store, *args, **kwargs):
store.close()


def save(store, *args, **kwargs):
def save(store: Store, *args, **kwargs):
"""Convenience function to save an array or group of arrays to the local file system.
Parameters
Expand Down Expand Up @@ -324,7 +324,7 @@ def __repr__(self):
return r


def load(store):
def load(store: Store):
"""Load data from an array or group into memory.
Parameters
Expand Down Expand Up @@ -1069,7 +1069,7 @@ def copy_all(source, dest, shallow=False, without_attrs=False, log=None,
return n_copied, n_skipped, n_bytes_copied


def consolidate_metadata(store, metadata_key='.zmetadata'):
def consolidate_metadata(store: Store, metadata_key=".zmetadata"):
"""
Consolidate all metadata for groups and arrays within the given store
into a single resource and put it under the given key.
Expand Down Expand Up @@ -1120,7 +1120,7 @@ def is_zarr_key(key):
return open_consolidated(store, metadata_key=metadata_key)


def open_consolidated(store, metadata_key='.zmetadata', mode='r+', **kwargs):
def open_consolidated(store: Store, metadata_key=".zmetadata", mode="r+", **kwargs):
"""Open group using metadata previously consolidated into a single key.
This is an optimised method for opening a Zarr group, where instead of
Expand Down
50 changes: 36 additions & 14 deletions zarr/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,39 @@
import numpy as np
from numcodecs.compat import ensure_bytes, ensure_ndarray

from collections.abc import MutableMapping

from zarr.attrs import Attributes
from zarr.codecs import AsType, get_codec
from zarr.errors import ArrayNotFoundError, ReadOnlyError
from zarr.indexing import (BasicIndexer, CoordinateIndexer, MaskIndexer,
OIndex, OrthogonalIndexer, VIndex, check_fields,
check_no_multi_fields, ensure_tuple,
err_too_many_indices, is_contiguous_selection,
is_scalar, pop_fields)
from zarr.indexing import (
BasicIndexer,
CoordinateIndexer,
MaskIndexer,
OIndex,
OrthogonalIndexer,
VIndex,
check_fields,
check_no_multi_fields,
ensure_tuple,
err_too_many_indices,
is_contiguous_selection,
is_scalar,
pop_fields,
)
from zarr.meta import decode_array_metadata, encode_array_metadata
from zarr.storage import array_meta_key, attrs_key, getsize, listdir
from zarr.util import (InfoReporter, check_array_shape, human_readable_size,
is_total_slice, nolock, normalize_chunks,
normalize_resize_args, normalize_shape,
normalize_storage_path)
from zarr.storage import array_meta_key, attrs_key, getsize, listdir, Store
from zarr.util import (
InfoReporter,
check_array_shape,
human_readable_size,
is_total_slice,
nolock,
normalize_chunks,
normalize_resize_args,
normalize_shape,
normalize_storage_path,
)


# noinspection PyUnresolvedReferences
Expand Down Expand Up @@ -107,6 +126,9 @@ def __init__(self, store, path=None, read_only=False, chunk_store=None,
# N.B., expect at this point store is fully initialized with all
# configuration metadata fully specified and normalized

store = Store._ensure_store(store)
chunk_store = Store._ensure_store(chunk_store)

self._store = store
self._chunk_store = chunk_store
self._path = normalize_storage_path(path)
Expand Down Expand Up @@ -1851,7 +1873,7 @@ def _encode_chunk(self, chunk):
cdata = chunk

# ensure in-memory data is immutable and easy to compare
if isinstance(self.chunk_store, dict):
if isinstance(self.chunk_store, MutableMapping):
cdata = ensure_bytes(cdata)

return cdata
Expand Down Expand Up @@ -1884,10 +1906,10 @@ def info(self):
Order : C
Read-only : False
Compressor : Blosc(cname='lz4', clevel=5, shuffle=SHUFFLE, blocksize=0)
Store type : builtins.dict
Store type : zarr.storage.KVStore
No. bytes : 4000000 (3.8M)
No. bytes stored : ...
Storage ratio : ...
No. bytes stored : 320
Storage ratio : 12500.0
Chunks initialized : 0/10
"""
Expand Down
24 changes: 19 additions & 5 deletions zarr/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import numpy as np
from numcodecs.registry import codec_registry
from collections.abc import MutableMapping

from zarr.core import Array
from zarr.errors import (
Expand All @@ -10,9 +11,18 @@
ContainsGroupError,
)
from zarr.n5 import N5Store
from zarr.storage import (DirectoryStore, ZipStore, contains_array,
contains_group, default_compressor, init_array,
normalize_storage_path, FSStore)
from zarr.storage import (
DirectoryStore,
ZipStore,
KVStore,
contains_array,
contains_group,
default_compressor,
init_array,
normalize_storage_path,
FSStore,
Store,
)


def create(shape, chunks=True, dtype=None, compressor='default',
Expand Down Expand Up @@ -129,9 +139,11 @@ def create(shape, chunks=True, dtype=None, compressor='default',
return z


def normalize_store_arg(store, clobber=False, default=dict, storage_options=None):
def normalize_store_arg(
store, clobber=False, default=dict, storage_options=None
) -> Store:
if store is None:
return default()
return Store._ensure_store(default())
elif isinstance(store, str):
mode = 'w' if clobber else 'r'
if "://" in store or "::" in store:
Expand All @@ -145,6 +157,8 @@ def normalize_store_arg(store, clobber=False, default=dict, storage_options=None
else:
return DirectoryStore(store)
else:
if not isinstance(store, Store) and isinstance(store, MutableMapping):
store = KVStore(store)
return store


Expand Down
36 changes: 25 additions & 11 deletions zarr/hierarchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,26 @@
ReadOnlyError,
)
from zarr.meta import decode_group_metadata
from zarr.storage import (MemoryStore, attrs_key, contains_array,
contains_group, group_meta_key, init_group, listdir,
rename, rmdir)
from zarr.util import (InfoReporter, TreeViewer, is_valid_python_name, nolock,
normalize_shape, normalize_storage_path)
from zarr.storage import (
MemoryStore,
attrs_key,
contains_array,
contains_group,
group_meta_key,
init_group,
listdir,
rename,
rmdir,
Store,
)
from zarr.util import (
InfoReporter,
TreeViewer,
is_valid_python_name,
nolock,
normalize_shape,
normalize_storage_path,
)


class Group(MutableMapping):
Expand Down Expand Up @@ -96,6 +111,8 @@ class Group(MutableMapping):

def __init__(self, store, path=None, read_only=False, chunk_store=None,
cache_attrs=True, synchronizer=None):
store = Store._ensure_store(store)
chunk_store = Store._ensure_store(chunk_store)
self._store = store
self._chunk_store = chunk_store
self._path = normalize_storage_path(path)
Expand Down Expand Up @@ -237,11 +254,8 @@ def __enter__(self):
return self

def __exit__(self, exc_type, exc_val, exc_tb):
"""If the underlying Store has a ``close`` method, call it."""
try:
self.store.close()
except AttributeError:
pass
"""If the underlying Store should always heave a ``close`` method, call it."""
self.store.close()

def info_items(self):

Expand Down Expand Up @@ -1036,7 +1050,7 @@ def move(self, source, dest):


def _normalize_store_arg(store, clobber=False, storage_options=None):
return normalize_store_arg(store, clobber=clobber, default=MemoryStore,
return normalize_store_arg(store, clobber=clobber, default=MemoryStore, return normalize_store_arg(
storage_options=storage_options)


Expand Down
Loading

0 comments on commit 0437842

Please sign in to comment.