Skip to content

Commit 3cadf4e

Browse files
WeatherGodshoyer
authored andcommitted
Change an == to an is. Fix tests so that this won't happen again. (#2648)
* Change an `==` to an `is`. Fix tests so that this won't happen again. Closes #2647 and re-affirms #1988. * Reuse the same _CONCAT_DIM_DEFAULT object
1 parent 624d78f commit 3cadf4e

File tree

3 files changed

+27
-6
lines changed

3 files changed

+27
-6
lines changed

xarray/backends/api.py

+3-5
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@
1010

1111
from .. import Dataset, backends, conventions
1212
from ..core import indexing
13-
from ..core.combine import _auto_combine, _infer_concat_order_from_positions
13+
from ..core.combine import (
14+
_CONCAT_DIM_DEFAULT, _auto_combine, _infer_concat_order_from_positions)
1415
from ..core.pycompat import basestring, path_type
1516
from ..core.utils import close_on_error, is_grib_path, is_remote_uri
1617
from .common import ArrayWriter
@@ -483,9 +484,6 @@ def close(self):
483484
f.close()
484485

485486

486-
_CONCAT_DIM_DEFAULT = '__infer_concat_dim__'
487-
488-
489487
def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
490488
compat='no_conflicts', preprocess=None, engine=None,
491489
lock=None, data_vars='all', coords='different',
@@ -606,7 +604,7 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
606604
# Coerce 1D input into ND to maintain backwards-compatible API until API
607605
# for N-D combine decided
608606
# (see https://github.com/pydata/xarray/pull/2553/#issuecomment-445892746)
609-
if concat_dim is None or concat_dim == _CONCAT_DIM_DEFAULT:
607+
if concat_dim is None or concat_dim is _CONCAT_DIM_DEFAULT:
610608
concat_dims = concat_dim
611609
elif not isinstance(concat_dim, list):
612610
concat_dims = [concat_dim]

xarray/core/combine.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,7 @@ def _auto_concat(datasets, dim=None, data_vars='all', coords='different'):
368368
return concat(datasets, dim=dim, data_vars=data_vars, coords=coords)
369369

370370

371-
_CONCAT_DIM_DEFAULT = '__infer_concat_dim__'
371+
_CONCAT_DIM_DEFAULT = utils.ReprObject('<inferred>')
372372

373373

374374
def _infer_concat_order_from_positions(datasets, concat_dims):

xarray/tests/test_backends.py

+23
Original file line numberDiff line numberDiff line change
@@ -2367,6 +2367,29 @@ def test_open_single_dataset(self):
23672367
with open_mfdataset([tmp], concat_dim=dim) as actual:
23682368
assert_identical(expected, actual)
23692369

2370+
def test_open_multi_dataset(self):
2371+
# Test for issue GH #1988 and #2647. This makes sure that the
2372+
# concat_dim is utilized when specified in open_mfdataset().
2373+
# The additional wrinkle is to ensure that a length greater
2374+
# than one is tested as well due to numpy's implicit casting
2375+
# of 1-length arrays to booleans in tests, which allowed
2376+
# #2647 to still pass the test_open_single_dataset(),
2377+
# which is itself still needed as-is because the original
2378+
# bug caused one-length arrays to not be used correctly
2379+
# in concatenation.
2380+
rnddata = np.random.randn(10)
2381+
original = Dataset({'foo': ('x', rnddata)})
2382+
dim = DataArray([100, 150], name='baz', dims='baz')
2383+
expected = Dataset({'foo': (('baz', 'x'),
2384+
np.tile(rnddata[np.newaxis, :], (2, 1)))},
2385+
{'baz': [100, 150]})
2386+
with create_tmp_file() as tmp1, \
2387+
create_tmp_file() as tmp2:
2388+
original.to_netcdf(tmp1)
2389+
original.to_netcdf(tmp2)
2390+
with open_mfdataset([tmp1, tmp2], concat_dim=dim) as actual:
2391+
assert_identical(expected, actual)
2392+
23702393
def test_dask_roundtrip(self):
23712394
with create_tmp_file() as tmp:
23722395
data = create_test_data()

0 commit comments

Comments
 (0)