1
1
import os .path
2
2
import unittest
3
+ import warnings
3
4
from abc import ABC , abstractmethod
4
- from typing import Type , Union
5
+ from typing import Any , Dict , Optional , Type , Union
5
6
6
7
import fsspec
7
8
import xarray as xr
@@ -38,7 +39,7 @@ def prepare_fs(cls, fs: fsspec.AbstractFileSystem, root: str):
38
39
# print(f'{fs.protocol}: making root {root}')
39
40
fs .mkdirs (root )
40
41
41
- # Write a text file into each subdirectory so
42
+ # Write a text file into each subdirectory, so
42
43
# we also test that store.get_data_ids() scans
43
44
# recursively.
44
45
dir_path = root
@@ -54,6 +55,7 @@ def prepare_fs(cls, fs: fsspec.AbstractFileSystem, root: str):
54
55
def test_mldataset_levels (self ):
55
56
data_store = self .create_data_store ()
56
57
self .assertMultiLevelDatasetFormatSupported (data_store )
58
+ self .assertMultiLevelDatasetFormatWithLinkSupported (data_store )
57
59
58
60
def test_dataset_zarr (self ):
59
61
data_store = self .create_data_store ()
@@ -73,6 +75,47 @@ def assertMultiLevelDatasetFormatSupported(self,
73
75
MultiLevelDataset ,
74
76
MultiLevelDatasetDescriptor )
75
77
78
+ # Test that use_saved_levels works
79
+ self .assertDatasetSupported (data_store ,
80
+ '.levels' ,
81
+ 'mldataset' ,
82
+ MultiLevelDataset ,
83
+ MultiLevelDatasetDescriptor ,
84
+ write_params = dict (
85
+ use_saved_levels = True ,
86
+ ))
87
+
88
+ def assertMultiLevelDatasetFormatWithLinkSupported (
89
+ self ,
90
+ data_store : MutableDataStore
91
+ ):
92
+ base_dataset = self .new_cube_data ()
93
+ base_dataset_id = f'{ DATA_PATH } /base-ds.zarr'
94
+ data_store .write_data (base_dataset , base_dataset_id )
95
+
96
+ # Test that base_dataset_id works
97
+ self .assertDatasetSupported (data_store ,
98
+ '.levels' ,
99
+ 'mldataset' ,
100
+ MultiLevelDataset ,
101
+ MultiLevelDatasetDescriptor ,
102
+ write_params = dict (
103
+ base_dataset_id = base_dataset_id ,
104
+ ))
105
+
106
+ # Test that base_dataset_id + use_saved_levels works
107
+ self .assertDatasetSupported (data_store ,
108
+ '.levels' ,
109
+ 'mldataset' ,
110
+ MultiLevelDataset ,
111
+ MultiLevelDatasetDescriptor ,
112
+ write_params = dict (
113
+ base_dataset_id = base_dataset_id ,
114
+ use_saved_levels = True ,
115
+ ))
116
+
117
+ data_store .delete_data (base_dataset_id )
118
+
76
119
def assertDatasetFormatSupported (self ,
77
120
data_store : MutableDataStore ,
78
121
filename_ext : str ):
@@ -89,8 +132,12 @@ def assertDatasetSupported(
89
132
expected_data_type_alias : str ,
90
133
expected_type : Union [Type [xr .Dataset ],
91
134
Type [MultiLevelDataset ]],
92
- expected_descriptor_type : Union [Type [DatasetDescriptor ],
93
- Type [MultiLevelDatasetDescriptor ]]
135
+ expected_descriptor_type : Union [
136
+ Type [DatasetDescriptor ],
137
+ Type [MultiLevelDatasetDescriptor ]
138
+ ],
139
+ write_params : Optional [Dict [str , Any ]] = None ,
140
+ open_params : Optional [Dict [str , Any ]] = None ,
94
141
):
95
142
"""
96
143
Call all DataStore operations to ensure data of type
@@ -102,10 +149,15 @@ def assertDatasetSupported(
102
149
:param expected_data_type_alias: The expected data type alias.
103
150
:param expected_type: The expected data type.
104
151
:param expected_descriptor_type: The expected data descriptor type.
152
+ :param write_params: Optional write parameters
153
+ :param open_params: Optional open parameters
105
154
"""
106
155
107
156
data_id = f'{ DATA_PATH } /ds{ filename_ext } '
108
157
158
+ write_params = write_params or {}
159
+ open_params = open_params or {}
160
+
109
161
self .assertIsInstance (data_store , MutableDataStore )
110
162
111
163
self .assertEqual ({'dataset' , 'mldataset' , 'geodataframe' },
@@ -114,36 +166,44 @@ def assertDatasetSupported(
114
166
with self .assertRaises (DataStoreError ):
115
167
data_store .get_data_types_for_data (data_id )
116
168
self .assertEqual (False , data_store .has_data (data_id ))
117
- self .assertEqual ([], list (data_store .get_data_ids ()))
169
+ self .assertNotIn ( data_id , set (data_store .get_data_ids ()))
118
170
119
- data = new_cube ( variables = dict ( A = 8 , B = 9 ) )
120
- written_data_id = data_store .write_data (data , data_id )
171
+ data = self . new_cube_data ( )
172
+ written_data_id = data_store .write_data (data , data_id , ** write_params )
121
173
self .assertEqual (data_id , written_data_id )
174
+
122
175
self .assertEqual ({expected_data_type_alias },
123
176
set (data_store .get_data_types_for_data (data_id )))
124
177
self .assertEqual (True , data_store .has_data (data_id ))
125
- self .assertEqual ([ data_id ], list (data_store .get_data_ids ()))
178
+ self .assertIn ( data_id , set (data_store .get_data_ids ()))
126
179
127
- data_descriptors = list (data_store .search_data ())
180
+ data_descriptors = list (data_store .search_data (
181
+ data_type = expected_type )
182
+ )
128
183
self .assertEqual (1 , len (data_descriptors ))
129
184
self .assertIsInstance (data_descriptors [0 ], DataDescriptor )
130
185
self .assertIsInstance (data_descriptors [0 ], expected_descriptor_type )
131
186
132
- data = data_store .open_data (data_id )
187
+ data = data_store .open_data (data_id , ** open_params )
133
188
self .assertIsInstance (data , expected_type )
134
189
135
190
try :
136
191
data_store .delete_data (data_id )
137
- except PermissionError : # Typically occurs on win32 due to fsspec
192
+ except PermissionError as e : # May occur on win32 due to fsspec
193
+ warnings .warn (f'{ e } ' )
138
194
return
139
195
with self .assertRaises (DataStoreError ):
140
196
data_store .get_data_types_for_data (data_id )
141
197
self .assertEqual (False , data_store .has_data (data_id ))
142
- self .assertEqual ([], list (data_store .get_data_ids ()))
198
+ self .assertNotIn ( data_id , set (data_store .get_data_ids ()))
143
199
200
+ @staticmethod
201
+ def new_cube_data ():
202
+ cube = new_cube (variables = dict (A = 8.5 , B = 9.5 ))
203
+ return cube .chunk (dict (time = 1 , lat = 90 , lon = 180 ))
144
204
145
- class FileFsDataStoresTest (FsDataStoresTestMixin , unittest .TestCase ):
146
205
206
+ class FileFsDataStoresTest (FsDataStoresTestMixin , unittest .TestCase ):
147
207
def create_data_store (self ) -> FsDataStore :
148
208
root = os .path .join (new_temp_dir (prefix = 'xcube' ), ROOT_DIR )
149
209
self .prepare_fs (fsspec .filesystem ('file' ), root )
0 commit comments