Skip to content

BUG: Fix #10355, std() groupby calculation #26229

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 17 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions doc/source/whatsnew/v0.25.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -442,6 +442,7 @@ Groupby/Resample/Rolling
- Bug in :meth:`pandas.core.groupby.GroupBy.idxmax` and :meth:`pandas.core.groupby.GroupBy.idxmin` with datetime column would return incorrect dtype (:issue:`25444`, :issue:`15306`)
- Bug in :meth:`pandas.core.groupby.GroupBy.cumsum`, :meth:`pandas.core.groupby.GroupBy.cumprod`, :meth:`pandas.core.groupby.GroupBy.cummin` and :meth:`pandas.core.groupby.GroupBy.cummax` with categorical column having absent categories, would return incorrect result or segfault (:issue:`16771`)
- Bug in :meth:`pandas.core.groupby.GroupBy.nth` where NA values in the grouping would return incorrect results (:issue:`26011`)
- Bug in :meth:`pandas.core.groupby.GroupBy.std` that computed standard deviation without respecting groupby context when ``as_index=False`` (:issue:`10355`)
- Bug in :meth:`pandas.core.groupby.SeriesGroupBy.transform` where transforming an empty group would raise error (:issue:`26208`)


Expand Down
15 changes: 9 additions & 6 deletions pandas/_libs/reduction.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -238,12 +238,15 @@ cdef class SeriesBinGrouper:
counts = np.zeros(self.ngroups, dtype=np.int64)

if self.ngroups > 0:
counts[0] = self.bins[0]
for i in range(1, self.ngroups):
if i == self.ngroups - 1:
counts[i] = len(self.arr) - self.bins[i - 1]
else:
counts[i] = self.bins[i] - self.bins[i - 1]
if len(self.bins) == 0:
return np.empty(0, dtype='O'), counts
else:
counts[0] = self.bins[0]
for i in range(1, self.ngroups):
if i == self.ngroups - 1:
counts[i] = len(self.arr) - self.bins[i - 1]
else:
counts[i] = self.bins[i] - self.bins[i - 1]

group_size = 0
n = len(self.arr)
Expand Down
24 changes: 14 additions & 10 deletions pandas/core/groupby/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -1207,10 +1207,16 @@ def std(self, ddof=1, *args, **kwargs):
Series or DataFrame
Standard deviation of values within each group.
"""

# TODO: implement at Cython level?
nv.validate_groupby_func('std', args, kwargs)
return np.sqrt(self.var(ddof=ddof, **kwargs))
if ddof == 1:
try:
return self._cython_agg_general('std', **kwargs)
except Exception:
pass

f = lambda x: x.std(ddof=ddof, **kwargs)
with _group_selection_context(self):
return self._python_agg_general(f)

@Substitution(name='groupby')
@Appender(_common_see_also)
Expand All @@ -1235,13 +1241,11 @@ def var(self, ddof=1, *args, **kwargs):
try:
return self._cython_agg_general('var', **kwargs)
except Exception:
f = lambda x: x.var(ddof=ddof, **kwargs)
with _group_selection_context(self):
return self._python_agg_general(f)
else:
f = lambda x: x.var(ddof=ddof, **kwargs)
with _group_selection_context(self):
return self._python_agg_general(f)
pass

f = lambda x: x.var(ddof=ddof, **kwargs)
with _group_selection_context(self):
return self._python_agg_general(f)

@Substitution(name='groupby')
@Appender(_common_see_also)
Expand Down
4 changes: 4 additions & 0 deletions pandas/core/groupby/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,6 +345,10 @@ def get_group_levels(self):
'name': 'group_median'
},
'var': 'group_var',
'std': {
'name': 'group_var_bin',
'f': lambda func, a: np.sqrt(func(a)),
},
'first': {
'name': 'group_nth',
'f': lambda func, a, b, c, d, e: func(a, b, c, d, 1, -1)
Expand Down
38 changes: 25 additions & 13 deletions pandas/tests/groupby/test_whitelist.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,36 +161,48 @@ def raw_frame():

@pytest.mark.parametrize('op', AGG_FUNCTIONS)
@pytest.mark.parametrize('level', [0, 1])
@pytest.mark.parametrize('axis', [0, 1])
@pytest.mark.parametrize('skipna', [True, False])
@pytest.mark.parametrize('sort', [True, False])
@pytest.mark.parametrize('as_index', [True, False])
def test_regression_whitelist_methods(
raw_frame, op, level,
axis, skipna, sort):
axis, skipna, sort, as_index):
# GH6944
# GH 17537
# explicitly test the whitelist methods

if axis == 0:
if not as_index and axis not in [0, 'index']:
pytest.skip('as_index=False only valid for axis=0')

if axis in [0, 'index']:
frame = raw_frame
else:
frame = raw_frame.T

if not isinstance(frame.index, MultiIndex) and (level > 0 or level < -1):
pytest.skip('level > 0 or level < -1 only valid with MultiIndex')

grouped = frame.groupby(level=level, axis=axis, sort=sort,
as_index=as_index)

if op in AGG_FUNCTIONS_WITH_SKIPNA:
grouped = frame.groupby(level=level, axis=axis, sort=sort)
result = getattr(grouped, op)(skipna=skipna)
expected = getattr(frame, op)(level=level, axis=axis,
skipna=skipna)
if sort:
expected = expected.sort_index(axis=axis, level=level)
tm.assert_frame_equal(result, expected)
expected = getattr(frame, op)(level=level, axis=axis, skipna=skipna)
else:
grouped = frame.groupby(level=level, axis=axis, sort=sort)
result = getattr(grouped, op)()
expected = getattr(frame, op)(level=level, axis=axis)
if sort:
expected = expected.sort_index(axis=axis, level=level)
tm.assert_frame_equal(result, expected)

if sort:
expected = expected.sort_index(axis=axis, level=level)

if not as_index:
expected = expected.reset_index()
if level == 0:
expected = expected.drop(columns=['first'])
if level == 1:
expected = expected.drop(columns=['second'])

tm.assert_frame_equal(result, expected)


def test_groupby_blacklist(df_letters):
Expand Down