Skip to content

Commit 842d34b

Browse files
jbrockmendelmathurk1tkmz-n
committed
REF: use BlockManager.apply for Rolling.count (pandas-dev#35883)
* REF: remove unnecesary try/except * TST: add test for agg on ordered categorical cols (pandas-dev#35630) * TST: resample does not yield empty groups (pandas-dev#10603) (pandas-dev#35799) * revert accidental rebase * REF: use BlockManager.apply for Rolling.count Co-authored-by: Karthik Mathur <[email protected]> Co-authored-by: tkmz-n <[email protected]>
1 parent 5eface5 commit 842d34b

File tree

1 file changed

+17
-42
lines changed

1 file changed

+17
-42
lines changed

pandas/core/window/rolling.py

+17-42
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323
from pandas._libs.tslibs import BaseOffset, to_offset
2424
import pandas._libs.window.aggregations as window_aggregations
25-
from pandas._typing import ArrayLike, Axis, FrameOrSeries, FrameOrSeriesUnion, Label
25+
from pandas._typing import ArrayLike, Axis, FrameOrSeries, FrameOrSeriesUnion
2626
from pandas.compat._optional import import_optional_dependency
2727
from pandas.compat.numpy import function as nv
2828
from pandas.util._decorators import Appender, Substitution, cache_readonly, doc
@@ -44,6 +44,7 @@
4444
ABCSeries,
4545
ABCTimedeltaIndex,
4646
)
47+
from pandas.core.dtypes.missing import notna
4748

4849
from pandas.core.base import DataError, PandasObject, SelectionMixin, ShallowMixin
4950
import pandas.core.common as com
@@ -395,40 +396,6 @@ def _wrap_result(self, result, block=None, obj=None):
395396
return type(obj)(result, index=index, columns=block.columns)
396397
return result
397398

398-
def _wrap_results(self, results, obj, skipped: List[int]) -> FrameOrSeriesUnion:
399-
"""
400-
Wrap the results.
401-
402-
Parameters
403-
----------
404-
results : list of ndarrays
405-
obj : conformed data (may be resampled)
406-
skipped: List[int]
407-
Indices of blocks that are skipped.
408-
"""
409-
from pandas import Series, concat
410-
411-
if obj.ndim == 1:
412-
if not results:
413-
raise DataError("No numeric types to aggregate")
414-
assert len(results) == 1
415-
return Series(results[0], index=obj.index, name=obj.name)
416-
417-
exclude: List[Label] = []
418-
orig_blocks = list(obj._to_dict_of_blocks(copy=False).values())
419-
for i in skipped:
420-
exclude.extend(orig_blocks[i].columns)
421-
422-
columns = [c for c in self._selected_obj.columns if c not in exclude]
423-
if not columns and not len(results) and exclude:
424-
raise DataError("No numeric types to aggregate")
425-
elif not len(results):
426-
return obj.astype("float64")
427-
428-
df = concat(results, axis=1).reindex(columns=columns, copy=False)
429-
self._insert_on_column(df, obj)
430-
return df
431-
432399
def _insert_on_column(self, result: "DataFrame", obj: "DataFrame"):
433400
# if we have an 'on' column we want to put it back into
434401
# the results in the same location
@@ -1325,21 +1292,29 @@ def count(self):
13251292
# implementations shouldn't end up here
13261293
assert not isinstance(self.window, BaseIndexer)
13271294

1328-
blocks, obj = self._create_blocks(self._selected_obj)
1329-
results = []
1330-
for b in blocks:
1331-
result = b.notna().astype(int)
1295+
_, obj = self._create_blocks(self._selected_obj)
1296+
1297+
def hfunc(values: np.ndarray) -> np.ndarray:
1298+
result = notna(values)
1299+
result = result.astype(int)
1300+
frame = type(obj)(result.T)
13321301
result = self._constructor(
1333-
result,
1302+
frame,
13341303
window=self._get_window(),
13351304
min_periods=self.min_periods or 0,
13361305
center=self.center,
13371306
axis=self.axis,
13381307
closed=self.closed,
13391308
).sum()
1340-
results.append(result)
1309+
return result.values.T
13411310

1342-
return self._wrap_results(results, obj, skipped=[])
1311+
new_mgr = obj._mgr.apply(hfunc)
1312+
out = obj._constructor(new_mgr)
1313+
if obj.ndim == 1:
1314+
out.name = obj.name
1315+
else:
1316+
self._insert_on_column(out, obj)
1317+
return out
13431318

13441319
_shared_docs["apply"] = dedent(
13451320
r"""

0 commit comments

Comments
 (0)