Skip to content

Commit 1525fb0

Browse files
authored
remove autoclose in open_dataset and related warning test (#4725)
* remove autoclose in open_dataset and related warning test * black * remove autoclose from open_mfdataset * update what's new
1 parent 03d8d56 commit 1525fb0

File tree

4 files changed

+4
-46
lines changed

4 files changed

+4
-46
lines changed

doc/whats-new.rst

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,8 @@ v0.16.3 (unreleased)
2222

2323
Breaking changes
2424
~~~~~~~~~~~~~~~~
25-
25+
- remove deprecated ``autoclose`` kwargs from :py:func:`open_dataset` (:pull: `4725`).
26+
By `Aureliana Barghini <https://github.com/aurghs>`_
2627

2728
New Features
2829
~~~~~~~~~~~~

xarray/backends/api.py

Lines changed: 2 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
11
import os
2-
import pathlib
3-
import warnings
42
from glob import glob
53
from io import BytesIO
64
from numbers import Number
@@ -151,7 +149,7 @@ def _get_default_engine(path: str, allow_remote: bool = False):
151149
def _autodetect_engine(filename_or_obj):
152150
if isinstance(filename_or_obj, AbstractDataStore):
153151
engine = "store"
154-
elif isinstance(filename_or_obj, (str, pathlib.Path)):
152+
elif isinstance(filename_or_obj, (str, Path)):
155153
engine = _get_default_engine(str(filename_or_obj), allow_remote=True)
156154
else:
157155
engine = _get_engine_from_magic_number(filename_or_obj)
@@ -312,7 +310,6 @@ def open_dataset(
312310
decode_cf=True,
313311
mask_and_scale=None,
314312
decode_times=True,
315-
autoclose=None,
316313
concat_characters=True,
317314
decode_coords=True,
318315
engine=None,
@@ -352,10 +349,6 @@ def open_dataset(
352349
decode_times : bool, optional
353350
If True, decode times encoded in the standard NetCDF datetime format
354351
into datetime objects. Otherwise, leave them encoded as numbers.
355-
autoclose : bool, optional
356-
If True, automatically close files to avoid OS Error of too many files
357-
being open. However, this option doesn't work with streams, e.g.,
358-
BytesIO.
359352
concat_characters : bool, optional
360353
If True, concatenate along the last dimension of character arrays to
361354
form string arrays. Dimensions will only be concatenated over (and
@@ -435,17 +428,6 @@ def open_dataset(
435428

436429
return apiv2.open_dataset(**kwargs)
437430

438-
if autoclose is not None:
439-
warnings.warn(
440-
"The autoclose argument is no longer used by "
441-
"xarray.open_dataset() and is now ignored; it will be removed in "
442-
"a future version of xarray. If necessary, you can control the "
443-
"maximum number of simultaneous open files with "
444-
"xarray.set_options(file_cache_maxsize=...).",
445-
FutureWarning,
446-
stacklevel=2,
447-
)
448-
449431
if mask_and_scale is None:
450432
mask_and_scale = not engine == "pseudonetcdf"
451433

@@ -583,7 +565,6 @@ def open_dataarray(
583565
decode_cf=True,
584566
mask_and_scale=None,
585567
decode_times=True,
586-
autoclose=None,
587568
concat_characters=True,
588569
decode_coords=True,
589570
engine=None,
@@ -699,7 +680,6 @@ def open_dataarray(
699680
decode_cf=decode_cf,
700681
mask_and_scale=mask_and_scale,
701682
decode_times=decode_times,
702-
autoclose=autoclose,
703683
concat_characters=concat_characters,
704684
decode_coords=decode_coords,
705685
engine=engine,
@@ -757,7 +737,6 @@ def open_mfdataset(
757737
data_vars="all",
758738
coords="different",
759739
combine="by_coords",
760-
autoclose=None,
761740
parallel=False,
762741
join="outer",
763742
attrs_file=None,
@@ -924,9 +903,7 @@ def open_mfdataset(
924903
combined_ids_paths = _infer_concat_order_from_positions(paths)
925904
ids, paths = (list(combined_ids_paths.keys()), list(combined_ids_paths.values()))
926905

927-
open_kwargs = dict(
928-
engine=engine, chunks=chunks or {}, lock=lock, autoclose=autoclose, **kwargs
929-
)
906+
open_kwargs = dict(engine=engine, chunks=chunks or {}, lock=lock, **kwargs)
930907

931908
if parallel:
932909
import dask

xarray/backends/apiv2.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import os
2-
import warnings
32

43
from ..core import indexing
54
from ..core.dataset import _get_chunk, _maybe_chunk
@@ -124,7 +123,6 @@ def open_dataset(
124123
concat_characters=None,
125124
decode_coords=None,
126125
drop_variables=None,
127-
autoclose=None,
128126
backend_kwargs=None,
129127
**kwargs,
130128
):
@@ -239,16 +237,6 @@ def open_dataset(
239237
--------
240238
open_mfdataset
241239
"""
242-
if autoclose is not None:
243-
warnings.warn(
244-
"The autoclose argument is no longer used by "
245-
"xarray.open_dataset() and is now ignored; it will be removed in "
246-
"a future version of xarray. If necessary, you can control the "
247-
"maximum number of simultaneous open files with "
248-
"xarray.set_options(file_cache_maxsize=...).",
249-
FutureWarning,
250-
stacklevel=2,
251-
)
252240

253241
if cache is None:
254242
cache = chunks is None

xarray/tests/test_backends.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1454,14 +1454,6 @@ def test_setncattr_string(self):
14541454
assert_array_equal(one_element_list_of_strings, totest.attrs["bar"])
14551455
assert one_string == totest.attrs["baz"]
14561456

1457-
def test_autoclose_future_warning(self):
1458-
data = create_test_data()
1459-
with create_tmp_file() as tmp_file:
1460-
self.save(data, tmp_file)
1461-
with pytest.warns(FutureWarning):
1462-
with self.open(tmp_file, autoclose=True) as actual:
1463-
assert_identical(data, actual)
1464-
14651457

14661458
@requires_netCDF4
14671459
class TestNetCDF4AlreadyOpen:

0 commit comments

Comments
 (0)