Skip to content

REF: define reductions non-dynamically #52428

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
209 changes: 168 additions & 41 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,10 @@
sanitize_array,
sanitize_masked_array,
)
from pandas.core.generic import NDFrame
from pandas.core.generic import (
NDFrame,
make_doc,
)
from pandas.core.indexers import check_key_length
from pandas.core.indexes.api import (
DatetimeIndex,
Expand Down Expand Up @@ -9592,43 +9595,6 @@ def aggregate(self, func=None, axis: Axis = 0, *args, **kwargs):

agg = aggregate

# error: Signature of "any" incompatible with supertype "NDFrame" [override]
@overload # type: ignore[override]
def any(
self,
*,
axis: Axis = ...,
bool_only: bool | None = ...,
skipna: bool = ...,
level: None = ...,
**kwargs,
) -> Series:
...

@overload
def any(
self,
*,
axis: Axis = ...,
bool_only: bool | None = ...,
skipna: bool = ...,
level: Level,
**kwargs,
) -> DataFrame | Series:
...

# error: Missing return statement
@doc(NDFrame.any, **_shared_doc_kwargs)
def any( # type: ignore[empty-body]
self,
axis: Axis = 0,
bool_only: bool | None = None,
skipna: bool = True,
level: Level = None,
**kwargs,
) -> DataFrame | Series:
...

@doc(
_shared_docs["transform"],
klass=_shared_doc_kwargs["klass"],
Expand Down Expand Up @@ -10920,6 +10886,170 @@ def _reduce_axis1(self, name: str, func, skipna: bool) -> Series:
res_ser = self._constructor_sliced(result, index=self.index, copy=False)
return res_ser

@doc(make_doc("any", ndim=2))
# error: Signature of "any" incompatible with supertype "NDFrame"
def any( # type: ignore[override]
self,
*,
axis: Axis = 0,
bool_only=None,
skipna: bool = True,
**kwargs,
) -> Series:
# error: Incompatible return value type (got "Union[Series, bool]",
# expected "Series")
return self._logical_func( # type: ignore[return-value]
"any", nanops.nanany, axis, bool_only, skipna, **kwargs
)

@doc(make_doc("all", ndim=2))
def all(
self,
axis: Axis = 0,
bool_only=None,
skipna: bool = True,
**kwargs,
) -> Series:
# error: Incompatible return value type (got "Union[Series, bool]",
# expected "Series")
return self._logical_func( # type: ignore[return-value]
"all", nanops.nanall, axis, bool_only, skipna, **kwargs
)

@doc(make_doc("min", ndim=2))
def min(
self,
axis: Axis | None = 0,
skipna: bool = True,
numeric_only: bool = False,
**kwargs,
):
return super().min(axis, skipna, numeric_only, **kwargs)

@doc(make_doc("max", ndim=2))
def max(
self,
axis: Axis | None = 0,
skipna: bool = True,
numeric_only: bool = False,
**kwargs,
):
return super().max(axis, skipna, numeric_only, **kwargs)

@doc(make_doc("sum", ndim=2))
def sum(
self,
axis: Axis | None = None,
skipna: bool = True,
numeric_only: bool = False,
min_count: int = 0,
**kwargs,
):
return super().sum(axis, skipna, numeric_only, min_count, **kwargs)

@doc(make_doc("prod", ndim=2))
def prod(
self,
axis: Axis | None = None,
skipna: bool = True,
numeric_only: bool = False,
min_count: int = 0,
**kwargs,
):
return super().prod(axis, skipna, numeric_only, min_count, **kwargs)

@doc(make_doc("mean", ndim=2))
def mean(
self,
axis: Axis | None = 0,
skipna: bool = True,
numeric_only: bool = False,
**kwargs,
):
return super().mean(axis, skipna, numeric_only, **kwargs)

@doc(make_doc("median", ndim=2))
def median(
self,
axis: Axis | None = 0,
skipna: bool = True,
numeric_only: bool = False,
**kwargs,
):
return super().median(axis, skipna, numeric_only, **kwargs)

@doc(make_doc("sem", ndim=2))
def sem(
self,
axis: Axis | None = None,
skipna: bool = True,
ddof: int = 1,
numeric_only: bool = False,
**kwargs,
):
return super().sem(axis, skipna, ddof, numeric_only, **kwargs)

@doc(make_doc("var", ndim=2))
def var(
self,
axis: Axis | None = None,
skipna: bool = True,
ddof: int = 1,
numeric_only: bool = False,
**kwargs,
):
return super().var(axis, skipna, ddof, numeric_only, **kwargs)

@doc(make_doc("std", ndim=2))
def std(
self,
axis: Axis | None = None,
skipna: bool = True,
ddof: int = 1,
numeric_only: bool = False,
**kwargs,
):
return super().std(axis, skipna, ddof, numeric_only, **kwargs)

@doc(make_doc("skew", ndim=2))
def skew(
self,
axis: Axis | None = 0,
skipna: bool = True,
numeric_only: bool = False,
**kwargs,
):
return super().skew(axis, skipna, numeric_only, **kwargs)

@doc(make_doc("kurt", ndim=2))
def kurt(
self,
axis: Axis | None = 0,
skipna: bool = True,
numeric_only: bool = False,
**kwargs,
):
return super().kurt(axis, skipna, numeric_only, **kwargs)

kurtosis = kurt
product = prod

@doc(make_doc("cummin", ndim=2))
def cummin(self, axis: Axis | None = None, skipna: bool = True, *args, **kwargs):
return NDFrame.cummin(self, axis, skipna, *args, **kwargs)

@doc(make_doc("cummax", ndim=2))
def cummax(self, axis: Axis | None = None, skipna: bool = True, *args, **kwargs):
return NDFrame.cummax(self, axis, skipna, *args, **kwargs)

@doc(make_doc("cumsum", ndim=2))
def cumsum(self, axis: Axis | None = None, skipna: bool = True, *args, **kwargs):
return NDFrame.cumsum(self, axis, skipna, *args, **kwargs)

@doc(make_doc("cumprod", 2))
def cumprod(self, axis: Axis | None = None, skipna: bool = True, *args, **kwargs):
return NDFrame.cumprod(self, axis, skipna, *args, **kwargs)

def nunique(self, axis: Axis = 0, dropna: bool = True) -> Series:
"""
Count number of distinct elements in specified axis.
Expand Down Expand Up @@ -11724,9 +11854,6 @@ def values(self) -> np.ndarray:
return self._mgr.as_array()


DataFrame._add_numeric_operations()


def _from_nested_dict(data) -> collections.defaultdict:
new_data: collections.defaultdict = collections.defaultdict(dict)
for index, s in data.items():
Expand Down
Loading