diff --git a/xarray/backends/api.py b/xarray/backends/api.py index e8c52e9f45b..81e171521f8 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -5,6 +5,7 @@ from numbers import Number from pathlib import Path from typing import Callable, Dict, Hashable, Iterable, Mapping, Tuple, Union +from textwrap import dedent import numpy as np @@ -786,9 +787,18 @@ def open_mfdataset(paths, chunks=None, concat_dim='_not_supplied', if combine == '_old_auto': # Use the old auto_combine for now # Remove this after deprecation cycle from #2616 is complete + basic_msg = dedent("""\ + In xarray version 0.13 the default behaviour of `open_mfdataset` + will change. To retain the existing behavior, pass + combine='nested'. To use future default behavior, pass + combine='by_coords'. See + http://xarray.pydata.org/en/stable/combining.html#combining-multi + """) + warnings.warn(basic_msg, FutureWarning, stacklevel=2) + combined = auto_combine(datasets, concat_dim=concat_dim, compat=compat, data_vars=data_vars, - coords=coords) + coords=coords, from_openmfds=True) elif combine == 'nested': # Combined nested list by successive concat and merge operations # along each dimension, using structure given by "ids" diff --git a/xarray/core/combine.py b/xarray/core/combine.py index 96d13da2481..37ae903b6c3 100644 --- a/xarray/core/combine.py +++ b/xarray/core/combine.py @@ -523,7 +523,8 @@ def combine_by_coords(datasets, compat='no_conflicts', data_vars='all', def auto_combine(datasets, concat_dim='_not_supplied', compat='no_conflicts', - data_vars='all', coords='different', fill_value=dtypes.NA): + data_vars='all', coords='different', fill_value=dtypes.NA, + from_openmfds=False): """ Attempt to auto-magically combine the given datasets into one. @@ -582,8 +583,11 @@ def auto_combine(datasets, concat_dim='_not_supplied', compat='no_conflicts', Dataset.merge """ - basic_msg = """In xarray version 0.13 `auto_combine` will be deprecated.""" - warnings.warn(basic_msg, FutureWarning, stacklevel=2) + if not from_openmfds: + basic_msg = dedent("""\ + In xarray version 0.13 `auto_combine` will be deprecated. See + http://xarray.pydata.org/en/stable/combining.html#combining-multi""") + warnings.warn(basic_msg, FutureWarning, stacklevel=2) if concat_dim == '_not_supplied': concat_dim = _CONCAT_DIM_DEFAULT @@ -599,10 +603,10 @@ def auto_combine(datasets, concat_dim='_not_supplied', compat='no_conflicts', message += dedent("""\ The datasets supplied have global dimension coordinates. You may want to use the new `combine_by_coords` function (or the - `combine='by_coords'` option to `open_mfdataset` to order the datasets + `combine='by_coords'` option to `open_mfdataset`) to order the datasets before concatenation. Alternatively, to continue concatenating based - on the order the datasets are supplied in in future, please use the - new `combine_nested` function (or the `combine='nested'` option to + on the order the datasets are supplied in future, please use the new + `combine_nested` function (or the `combine='nested'` option to open_mfdataset).""") else: message += dedent("""\ @@ -615,7 +619,7 @@ def auto_combine(datasets, concat_dim='_not_supplied', compat='no_conflicts', manual_dims = [concat_dim].append(None) message += dedent("""\ The datasets supplied require both concatenation and merging. From - xarray version 0.14 this will operation will require either using the + xarray version 0.13 this will operation will require either using the new `combine_nested` function (or the `combine='nested'` option to open_mfdataset), with a nested list structure such that you can combine along the dimensions {}. Alternatively if your datasets have global diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 512bb2238ff..f5c27f2fb92 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -2884,6 +2884,17 @@ class TestOpenMFDataSetDeprecation: Set of tests to check that FutureWarnings are correctly raised until the deprecation cycle is complete. #2616 """ + def test_open_mfdataset_default(self): + ds1, ds2 = Dataset({'x': [0]}), Dataset({'x': [1]}) + with create_tmp_file() as tmp1: + with create_tmp_file() as tmp2: + ds1.to_netcdf(tmp1) + ds2.to_netcdf(tmp2) + + with pytest.warns(FutureWarning, match="default behaviour of" + " `open_mfdataset`"): + open_mfdataset([tmp1, tmp2]) + def test_open_mfdataset_with_concat_dim(self): ds1, ds2 = Dataset({'x': [0]}), Dataset({'x': [1]}) with create_tmp_file() as tmp1: