Skip to content

Commit 9c37226

Browse files
jbrockmendeljorisvandenbossche
authored andcommitted
CLN: rename reduce-->do_reduce (#27706)
1 parent d330416 commit 9c37226

14 files changed

+32
-45
lines changed

pandas/_libs/reduction.pyx

+1-1
Original file line numberDiff line numberDiff line change
@@ -628,7 +628,7 @@ cdef class BlockSlider:
628628
arr.shape[1] = 0
629629

630630

631-
def reduce(arr, f, axis=0, dummy=None, labels=None):
631+
def compute_reduction(arr, f, axis=0, dummy=None, labels=None):
632632
"""
633633
634634
Parameters

pandas/_libs/tslibs/timedeltas.pyx

+2-1
Original file line numberDiff line numberDiff line change
@@ -1280,7 +1280,8 @@ class Timedelta(_Timedelta):
12801280
else:
12811281
raise ValueError(
12821282
"Value must be Timedelta, string, integer, "
1283-
"float, timedelta or convertible")
1283+
"float, timedelta or convertible, not {typ}"
1284+
.format(typ=type(value).__name__))
12841285

12851286
if is_timedelta64_object(value):
12861287
value = value.view('i8')

pandas/core/apply.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -221,7 +221,7 @@ def apply_raw(self):
221221
""" apply to the values as a numpy array """
222222

223223
try:
224-
result = reduction.reduce(self.values, self.f, axis=self.axis)
224+
result = reduction.compute_reduction(self.values, self.f, axis=self.axis)
225225
except Exception:
226226
result = np.apply_along_axis(self.f, self.axis, self.values)
227227

@@ -281,7 +281,7 @@ def apply_standard(self):
281281
dummy = Series(empty_arr, index=index, dtype=values.dtype)
282282

283283
try:
284-
result = reduction.reduce(
284+
result = reduction.compute_reduction(
285285
values, self.f, axis=self.axis, dummy=dummy, labels=labels
286286
)
287287
return self.obj._constructor_sliced(result, index=labels)

pandas/core/arrays/categorical.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2703,7 +2703,7 @@ def _convert_to_list_like(list_like):
27032703
elif is_scalar(list_like):
27042704
return [list_like]
27052705
else:
2706-
# is this reached?
2706+
# TODO: is this reached?
27072707
return [list_like]
27082708

27092709

pandas/core/arrays/datetimelike.py

-11
Original file line numberDiff line numberDiff line change
@@ -57,21 +57,10 @@
5757
class AttributesMixin:
5858
_data = None # type: np.ndarray
5959

60-
@property
61-
def _attributes(self):
62-
# Inheriting subclass should implement _attributes as a list of strings
63-
raise AbstractMethodError(self)
64-
6560
@classmethod
6661
def _simple_new(cls, values, **kwargs):
6762
raise AbstractMethodError(cls)
6863

69-
def _get_attributes_dict(self):
70-
"""
71-
return an attributes dict for my class
72-
"""
73-
return {k: getattr(self, k, None) for k in self._attributes}
74-
7564
@property
7665
def _scalar_type(self) -> Type[DatetimeLikeScalar]:
7766
"""The scalar associated with this datelike

pandas/core/arrays/datetimes.py

-1
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,6 @@ class DatetimeArray(dtl.DatetimeLikeArrayMixin, dtl.TimelikeOps, dtl.DatelikeOps
328328
# -----------------------------------------------------------------
329329
# Constructors
330330

331-
_attributes = ["freq", "tz"]
332331
_dtype = None # type: Union[np.dtype, DatetimeTZDtype]
333332
_freq = None
334333

pandas/core/arrays/period.py

-1
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,6 @@ class PeriodArray(dtl.DatetimeLikeArrayMixin, dtl.DatelikeOps):
161161

162162
# array priority higher than numpy scalars
163163
__array_priority__ = 1000
164-
_attributes = ["freq"]
165164
_typ = "periodarray" # ABCPeriodArray
166165
_scalar_type = Period
167166

pandas/core/arrays/timedeltas.py

-1
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,6 @@ def dtype(self):
199199

200200
# ----------------------------------------------------------------
201201
# Constructors
202-
_attributes = ["freq"]
203202

204203
def __init__(self, values, dtype=_TD_DTYPE, freq=None, copy=False):
205204
if isinstance(values, (ABCSeries, ABCIndexClass)):

pandas/core/generic.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -3556,7 +3556,7 @@ def _iget_item_cache(self, item):
35563556
def _box_item_values(self, key, values):
35573557
raise AbstractMethodError(self)
35583558

3559-
def _slice(self, slobj, axis=0, kind=None):
3559+
def _slice(self, slobj: slice, axis=0, kind=None):
35603560
"""
35613561
Construct a slice of this container.
35623562
@@ -6183,8 +6183,6 @@ def fillna(
61836183
axis = 0
61846184
axis = self._get_axis_number(axis)
61856185

6186-
from pandas import DataFrame
6187-
61886186
if value is None:
61896187

61906188
if self._is_mixed_type and axis == 1:
@@ -6247,7 +6245,7 @@ def fillna(
62476245
new_data = self._data.fillna(
62486246
value=value, limit=limit, inplace=inplace, downcast=downcast
62496247
)
6250-
elif isinstance(value, DataFrame) and self.ndim == 2:
6248+
elif isinstance(value, ABCDataFrame) and self.ndim == 2:
62516249
new_data = self.where(self.notna(), value)
62526250
else:
62536251
raise ValueError("invalid fill value with a %s" % type(value))

pandas/core/groupby/groupby.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,16 @@ class providing the base-class of operations.
2929
from pandas.core.dtypes.cast import maybe_downcast_to_dtype
3030
from pandas.core.dtypes.common import (
3131
ensure_float,
32+
is_datetime64_dtype,
3233
is_datetime64tz_dtype,
3334
is_extension_array_dtype,
35+
is_integer_dtype,
3436
is_numeric_dtype,
37+
is_object_dtype,
3538
is_scalar,
3639
)
3740
from pandas.core.dtypes.missing import isna, notna
3841

39-
from pandas.api.types import is_datetime64_dtype, is_integer_dtype, is_object_dtype
4042
import pandas.core.algorithms as algorithms
4143
from pandas.core.arrays import Categorical
4244
from pandas.core.base import (
@@ -343,7 +345,7 @@ class _GroupBy(PandasObject, SelectionMixin):
343345

344346
def __init__(
345347
self,
346-
obj,
348+
obj: NDFrame,
347349
keys=None,
348350
axis=0,
349351
level=None,
@@ -360,8 +362,8 @@ def __init__(
360362

361363
self._selection = selection
362364

363-
if isinstance(obj, NDFrame):
364-
obj._consolidate_inplace()
365+
assert isinstance(obj, NDFrame), type(obj)
366+
obj._consolidate_inplace()
365367

366368
self.level = level
367369

pandas/core/groupby/grouper.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
from pandas.core.arrays import Categorical, ExtensionArray
2626
import pandas.core.common as com
2727
from pandas.core.frame import DataFrame
28+
from pandas.core.generic import NDFrame
2829
from pandas.core.groupby.categorical import recode_for_groupby, recode_from_groupby
2930
from pandas.core.groupby.ops import BaseGrouper
3031
from pandas.core.index import CategoricalIndex, Index, MultiIndex
@@ -423,7 +424,7 @@ def groups(self):
423424

424425

425426
def _get_grouper(
426-
obj,
427+
obj: NDFrame,
427428
key=None,
428429
axis=0,
429430
level=None,

pandas/core/groupby/ops.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -906,7 +906,7 @@ def _get_sorted_data(self):
906906
return self.data.take(self.sort_idx, axis=self.axis)
907907

908908
def _chop(self, sdata, slice_obj):
909-
return sdata.iloc[slice_obj]
909+
raise AbstractMethodError(self)
910910

911911
def apply(self, f):
912912
raise AbstractMethodError(self)
@@ -933,7 +933,7 @@ def _chop(self, sdata, slice_obj):
933933
if self.axis == 0:
934934
return sdata.iloc[slice_obj]
935935
else:
936-
return sdata._slice(slice_obj, axis=1) # .loc[:, slice_obj]
936+
return sdata._slice(slice_obj, axis=1)
937937

938938

939939
def get_splitter(data, *args, **kwargs):

pandas/core/ops/__init__.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
from pandas.core.dtypes.generic import (
3838
ABCDataFrame,
3939
ABCDatetimeArray,
40+
ABCDatetimeIndex,
4041
ABCIndex,
4142
ABCIndexClass,
4243
ABCSeries,
@@ -47,7 +48,7 @@
4748

4849
import pandas as pd
4950
from pandas._typing import ArrayLike
50-
import pandas.core.common as com
51+
from pandas.core.construction import extract_array
5152

5253
from . import missing
5354
from .docstrings import (
@@ -1022,7 +1023,7 @@ def wrapper(left, right):
10221023
# does inference in the case where `result` has object-dtype.
10231024
return construct_result(left, result, index=left.index, name=res_name)
10241025

1025-
elif isinstance(right, (ABCDatetimeArray, pd.DatetimeIndex)):
1026+
elif isinstance(right, (ABCDatetimeArray, ABCDatetimeIndex)):
10261027
result = op(left._values, right)
10271028
return construct_result(left, result, index=left.index, name=res_name)
10281029

@@ -1194,7 +1195,7 @@ def wrapper(self, other, axis=None):
11941195
)
11951196

11961197
# always return a full value series here
1197-
res_values = com.values_from_object(res)
1198+
res_values = extract_array(res, extract_numpy=True)
11981199
return self._constructor(
11991200
res_values, index=self.index, name=res_name, dtype="bool"
12001201
)

pandas/tests/groupby/test_bin_groupby.py

+10-12
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,13 @@
66

77
from pandas.core.dtypes.common import ensure_int64
88

9-
from pandas import Index, isna
9+
from pandas import Index, Series, isna
1010
from pandas.core.groupby.ops import generate_bins_generic
1111
import pandas.util.testing as tm
1212
from pandas.util.testing import assert_almost_equal
1313

1414

1515
def test_series_grouper():
16-
from pandas import Series
17-
1816
obj = Series(np.random.randn(10))
1917
dummy = obj[:0]
2018

@@ -31,8 +29,6 @@ def test_series_grouper():
3129

3230

3331
def test_series_bin_grouper():
34-
from pandas import Series
35-
3632
obj = Series(np.random.randn(10))
3733
dummy = obj[:0]
3834

@@ -123,30 +119,32 @@ class TestMoments:
123119

124120
class TestReducer:
125121
def test_int_index(self):
126-
from pandas.core.series import Series
127-
128122
arr = np.random.randn(100, 4)
129-
result = reduction.reduce(arr, np.sum, labels=Index(np.arange(4)))
123+
result = reduction.compute_reduction(arr, np.sum, labels=Index(np.arange(4)))
130124
expected = arr.sum(0)
131125
assert_almost_equal(result, expected)
132126

133-
result = reduction.reduce(arr, np.sum, axis=1, labels=Index(np.arange(100)))
127+
result = reduction.compute_reduction(
128+
arr, np.sum, axis=1, labels=Index(np.arange(100))
129+
)
134130
expected = arr.sum(1)
135131
assert_almost_equal(result, expected)
136132

137133
dummy = Series(0.0, index=np.arange(100))
138-
result = reduction.reduce(arr, np.sum, dummy=dummy, labels=Index(np.arange(4)))
134+
result = reduction.compute_reduction(
135+
arr, np.sum, dummy=dummy, labels=Index(np.arange(4))
136+
)
139137
expected = arr.sum(0)
140138
assert_almost_equal(result, expected)
141139

142140
dummy = Series(0.0, index=np.arange(4))
143-
result = reduction.reduce(
141+
result = reduction.compute_reduction(
144142
arr, np.sum, axis=1, dummy=dummy, labels=Index(np.arange(100))
145143
)
146144
expected = arr.sum(1)
147145
assert_almost_equal(result, expected)
148146

149-
result = reduction.reduce(
147+
result = reduction.compute_reduction(
150148
arr, np.sum, axis=1, dummy=dummy, labels=Index(np.arange(100))
151149
)
152150
assert_almost_equal(result, expected)

0 commit comments

Comments
 (0)