diff --git a/doc/source/whatsnew/v0.18.1.txt b/doc/source/whatsnew/v0.18.1.txt index b7a0cf888f1a2..a2324a49ebb73 100644 --- a/doc/source/whatsnew/v0.18.1.txt +++ b/doc/source/whatsnew/v0.18.1.txt @@ -125,10 +125,13 @@ Bug Fixes - +- Bug in ``concat`` raises ``AttributeError`` when input data contains tz-aware datetime and timedelta (:issue:`12620`) - Bug in ``pivot_table`` when ``margins=True`` and ``dropna=True`` where nulls still contributed to margin count (:issue:`12577`) + + + diff --git a/pandas/core/common.py b/pandas/core/common.py index 05bcb53d85dd0..379e59394b6f5 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -2713,7 +2713,7 @@ def is_nonempty(x): # these are mandated to handle empties as well if 'datetime' in typs or 'datetimetz' in typs or 'timedelta' in typs: from pandas.tseries.common import _concat_compat - return _concat_compat(to_concat, axis=axis) + return _concat_compat(to_concat, axis=axis, typs=typs) elif 'sparse' in typs: from pandas.sparse.array import _concat_compat diff --git a/pandas/tools/tests/test_merge.py b/pandas/tools/tests/test_merge.py index d5ddfe624e240..ddc4e7aaf1588 100644 --- a/pandas/tools/tests/test_merge.py +++ b/pandas/tools/tests/test_merge.py @@ -1161,6 +1161,20 @@ def test_concat_tz_series(self): result = pd.concat([first, second]) self.assertEqual(result[0].dtype, 'datetime64[ns, Europe/London]') + def test_concat_tz_series_with_datetimelike(self): + # GH 12620 + # tz and timedelta + x = [pd.Timestamp('2011-01-01', tz='US/Eastern'), + pd.Timestamp('2011-02-01', tz='US/Eastern')] + y = [pd.Timedelta('1 day'), pd.Timedelta('2 day')] + result = concat([pd.Series(x), pd.Series(y)], ignore_index=True) + tm.assert_series_equal(result, pd.Series(x + y, dtype='object')) + + # tz and period + y = [pd.Period('2011-03', freq='M'), pd.Period('2011-04', freq='M')] + result = concat([pd.Series(x), pd.Series(y)], ignore_index=True) + tm.assert_series_equal(result, pd.Series(x + y, dtype='object')) + def test_concat_period_series(self): x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) y = Series(pd.PeriodIndex(['2015-10-01', '2016-01-01'], freq='D')) diff --git a/pandas/tseries/common.py b/pandas/tseries/common.py index 5c31d79dc6780..87b5b1c895038 100644 --- a/pandas/tseries/common.py +++ b/pandas/tseries/common.py @@ -238,7 +238,7 @@ class CombinedDatetimelikeProperties(DatetimeProperties, TimedeltaProperties): __doc__ = DatetimeProperties.__doc__ -def _concat_compat(to_concat, axis=0): +def _concat_compat(to_concat, axis=0, typs=None): """ provide concatenation of an datetimelike array of arrays each of which is a single M8[ns], datetimet64[ns, tz] or m8[ns] dtype @@ -272,38 +272,33 @@ def convert_to_pydatetime(x, axis): return x - typs = get_dtype_kinds(to_concat) + if typs is None: + typs = get_dtype_kinds(to_concat) - # datetimetz - if 'datetimetz' in typs: - - # if to_concat have 'datetime' or 'object' - # then we need to coerce to object - if 'datetime' in typs or 'object' in typs: - to_concat = [convert_to_pydatetime(x, axis) for x in to_concat] - return np.concatenate(to_concat, axis=axis) + # must be single dtype + if len(typs) == 1: - # we require ALL of the same tz for datetimetz - tzs = set([getattr(x, 'tz', None) for x in to_concat]) - set([None]) - if len(tzs) == 1: - return DatetimeIndex(np.concatenate([x.tz_localize(None).asi8 - for x in to_concat]), - tz=list(tzs)[0]) + if 'datetimetz' in typs: + # datetime with no tz should be stored as "datetime" in typs, + # thus no need to care - # single dtype - if len(typs) == 1: + # we require ALL of the same tz for datetimetz + tzs = set([x.tz for x in to_concat]) + if len(tzs) == 1: + return DatetimeIndex(np.concatenate([x.tz_localize(None).asi8 + for x in to_concat]), + tz=list(tzs)[0]) - if not len(typs - set(['datetime'])): + elif 'datetime' in typs: new_values = np.concatenate([x.view(np.int64) for x in to_concat], axis=axis) return new_values.view(_NS_DTYPE) - elif not len(typs - set(['timedelta'])): + elif 'timedelta' in typs: new_values = np.concatenate([x.view(np.int64) for x in to_concat], axis=axis) return new_values.view(_TD_DTYPE) # need to coerce to object to_concat = [convert_to_pydatetime(x, axis) for x in to_concat] - return np.concatenate(to_concat, axis=axis)