Skip to content

Commit 16d76b9

Browse files
committed
BUG: added series type to wrap_result for empty DataFrame
1 parent 1cb1d55 commit 16d76b9

File tree

3 files changed

+28
-13
lines changed

3 files changed

+28
-13
lines changed

doc/source/whatsnew/v0.19.0.txt

+1
Original file line numberDiff line numberDiff line change
@@ -1563,3 +1563,4 @@ Bug Fixes
15631563
- ``PeriodIndex`` can now accept ``list`` and ``array`` which contains ``pd.NaT`` (:issue:`13430`)
15641564
- Bug in ``df.groupby`` where ``.median()`` returns arbitrary values if grouped dataframe contains empty bins (:issue:`13629`)
15651565
- Bug in ``Index.copy()`` where ``name`` parameter was ignored (:issue:`14302`)
1566+
- Bug in ``_downsample()``. Inconsistent return type on resample of empty DataFrame (:issue:`14962`)

pandas/tests/tseries/test_resample.py

+17-8
Original file line numberDiff line numberDiff line change
@@ -757,8 +757,15 @@ def test_resample_empty_series(self):
757757
freq in ['M', 'D']):
758758
# GH12871 - TODO: name should propagate, but currently
759759
# doesn't on lower / same frequency with PeriodIndex
760+
<<<<<<< HEAD
760761
assert_series_equal(result, expected, check_dtype=False)
761762

763+
=======
764+
assert_series_equal(result, expected, check_dtype=False,
765+
check_names=False)
766+
# this assert will break when fixed
767+
# self.assertTrue(result.name is None)
768+
>>>>>>> BUG: added series type to wrap_result for empty DataFrame
762769
else:
763770
assert_series_equal(result, expected, check_dtype=False)
764771

@@ -769,15 +776,15 @@ def test_resample_empty_dataframe(self):
769776

770777
for freq in ['M', 'D', 'H']:
771778
# count retains dimensions too
772-
methods = downsample_methods + ['count']
779+
methods = downsample_methods + upsample_methods
773780
for method in methods:
774781
result = getattr(f.resample(freq), method)()
775782

776-
expected = f.copy()
783+
expected = pd.Series([])
777784
expected.index = f.index._shallow_copy(freq=freq)
778785
assert_index_equal(result.index, expected.index)
779786
self.assertEqual(result.index.freq, expected.index.freq)
780-
assert_frame_equal(result, expected, check_dtype=False)
787+
assert_series_equal(result, expected, check_dtype=False)
781788

782789
# test size for GH13212 (currently stays as df)
783790

@@ -836,11 +843,13 @@ def test_resample_loffset_arg_type(self):
836843

837844
def test_resample_empty_dataframe_with_size(self):
838845
# GH 14962
839-
df1 = pd.DataFrame(dict(a=range(100)),
840-
index=pd.date_range('1/1/2000', periods=100, freq="M"))
841-
df2 = df1[df1.a < 0]
842-
result = df2.resample("Q").size()
843-
assertIsInstance(result, pd.Series)
846+
index = pd.DatetimeIndex([], freq='M')
847+
df = pd.DataFrame([], index=index)
848+
849+
for freq in ['M', 'D', 'H']:
850+
result = df.resample(freq).size()
851+
expected = pd.Series([], index=index, dtype='int64')
852+
assert_series_equal(result, expected)
844853

845854

846855
class TestDatetimeIndex(Base, tm.TestCase):

pandas/tseries/resample.py

+10-5
Original file line numberDiff line numberDiff line change
@@ -700,12 +700,8 @@ def _downsample(self, how, **kwargs):
700700
if not len(ax):
701701
# reset to the new freq
702702
obj = obj.copy()
703-
if how == "size" and isinstance(obj, pd.DataFrame):
704-
obj = obj.groupby(
705-
self.grouper, axis=self.axis).aggregate(how, **kwargs)
706-
707703
obj.index.freq = self.freq
708-
return obj
704+
return self._wrap_result(obj)
709705

710706
# do we have a regular frequency
711707
if ax.freq is not None or ax.inferred_freq is not None:
@@ -776,6 +772,15 @@ def _wrap_result(self, result):
776772
# convert if needed
777773
if self.kind == 'period' and not isinstance(result.index, PeriodIndex):
778774
result.index = result.index.to_period(self.freq)
775+
776+
# Make consistent type of result. GH14962
777+
if not len(self.ax):
778+
grouper = BinGrouper([], result.index)
779+
grouped = self._selected_obj.groupby(grouper)
780+
result = pd.Series([],
781+
index=result.index,
782+
name=grouped.name,
783+
dtype='int64')
779784
return result
780785

781786

0 commit comments

Comments
 (0)