Skip to content

Commit

Permalink
fixed tests, used constants for format names
Browse files Browse the repository at this point in the history
  • Loading branch information
forman committed May 16, 2019
1 parent ac8b06d commit b77c4be
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 10 deletions.
23 changes: 16 additions & 7 deletions xcube/webapi/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,11 @@
from .mldataset import FileStorageMultiLevelDataset, BaseMultiLevelDataset, MultiLevelDataset, \
ComputedMultiLevelDataset, ObjectStorageMultiLevelDataset
from .reqparams import RequestParams
from ..util.dsio import guess_dataset_format, FORMAT_NAME_NETCDF4, FORMAT_NAME_ZARR
from ..util.perf import measure_time

FORMAT_NAME_LEVELS = 'levels'

COMPUTE_DATASET = 'compute_dataset'
ALL_PLACES = "all"

Expand Down Expand Up @@ -407,6 +410,12 @@ def find_dataset_descriptor(cls,
return next((dsd for dsd in dataset_descriptors if dsd['Identifier'] == ds_name), None)


def guess_cube_format(path: str) -> str:
if path.endswith('.levels'):
return FORMAT_NAME_LEVELS
return guess_dataset_format(path)


# noinspection PyUnusedLocal
def open_ml_dataset_from_object_storage(ctx: ServiceContext,
dataset_descriptor: DatasetDescriptor) -> MultiLevelDataset:
Expand All @@ -416,7 +425,7 @@ def open_ml_dataset_from_object_storage(ctx: ServiceContext,
if not path:
raise ServiceConfigError(f"Missing 'path' entry in dataset descriptor {ds_id}")

data_format = dataset_descriptor.get('Format', 'zarr')
data_format = dataset_descriptor.get('Format', FORMAT_NAME_ZARR)

s3_client_kwargs = {}
if 'Endpoint' in dataset_descriptor:
Expand All @@ -425,14 +434,14 @@ def open_ml_dataset_from_object_storage(ctx: ServiceContext,
s3_client_kwargs['region_name'] = dataset_descriptor['Region']
obs_file_system = s3fs.S3FileSystem(anon=True, client_kwargs=s3_client_kwargs)

if data_format == 'zarr':
if data_format == FORMAT_NAME_ZARR:
store = s3fs.S3Map(root=path, s3=obs_file_system, check=False)
cached_store = zarr.LRUStoreCache(store, max_size=2 ** 28)
with measure_time(tag=f"opened remote zarr dataset {path}"):
ds = xr.open_zarr(cached_store)
return BaseMultiLevelDataset(ds)

if data_format == 'levels':
if data_format == FORMAT_NAME_LEVELS:
with measure_time(tag=f"opened remote levels dataset {path}"):
return ObjectStorageMultiLevelDataset(ds_id, obs_file_system, path,
exception_type=ServiceConfigError)
Expand All @@ -448,19 +457,19 @@ def open_ml_dataset_from_local_fs(ctx: ServiceContext, dataset_descriptor: Datas
if not os.path.isabs(path):
path = os.path.join(ctx.base_dir, path)

data_format = dataset_descriptor.get('Format', 'nc')
data_format = dataset_descriptor.get('Format', guess_cube_format(path))

if data_format == 'netcdf4':
if data_format == FORMAT_NAME_NETCDF4:
with measure_time(tag=f"opened local NetCDF dataset {path}"):
ds = xr.open_dataset(path)
return BaseMultiLevelDataset(ds)

if data_format == 'zarr':
if data_format == FORMAT_NAME_ZARR:
with measure_time(tag=f"opened local zarr dataset {path}"):
ds = xr.open_zarr(path)
return BaseMultiLevelDataset(ds)

if data_format == 'levels':
if data_format == FORMAT_NAME_LEVELS:
with measure_time(tag=f"opened local levels dataset {path}"):
return FileStorageMultiLevelDataset(path)

Expand Down
5 changes: 2 additions & 3 deletions xcube/webapi/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,11 @@
from tornado.log import enable_pretty_logging
from tornado.web import RequestHandler, Application

from .context import ServiceContext
from .context import ServiceContext, guess_cube_format
from .defaults import DEFAULT_ADDRESS, DEFAULT_PORT, DEFAULT_UPDATE_PERIOD, DEFAULT_LOG_PREFIX, \
DEFAULT_TILE_CACHE_SIZE, DEFAULT_NAME, DEFAULT_TRACE_PERF, DEFAULT_TILE_COMP_MODE
from .errors import ServiceBadRequestError
from .reqparams import RequestParams
from ..util.dsio import guess_dataset_format
from ..util.undefined import UNDEFINED

__author__ = "Norman Fomferra (Brockmann Consult GmbH)"
Expand Down Expand Up @@ -393,7 +392,7 @@ def new_default_config(cube_paths: List[str], styles: Dict[str, Tuple] = None):
for cube_path in cube_paths:
dataset_list.append(dict(Identifier=f"dataset_{index + 1}",
Title=f"Dataset #{index + 1}",
Format=guess_dataset_format(cube_path),
Format=guess_cube_format(cube_path),
Path=cube_path,
FileSystem='local'))
index += 1
Expand Down

0 comments on commit b77c4be

Please sign in to comment.