Skip to content

Remove convert_dtype from Series.apply #57369

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/source/whatsnew/v3.0.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ Removal of prior version deprecations/changes
- Removed ``Series.view`` (:issue:`56054`)
- Removed ``axis`` argument from :meth:`DataFrame.groupby`, :meth:`Series.groupby`, :meth:`DataFrame.rolling`, :meth:`Series.rolling`, :meth:`DataFrame.resample`, and :meth:`Series.resample` (:issue:`51203`)
- Removed ``axis`` argument from all groupby operations (:issue:`50405`)
- Removed ``convert_dtype`` from :meth:`Series.apply` (:issue:`52257`)
- Removed ``pandas.api.types.is_interval`` and ``pandas.api.types.is_period``, use ``isinstance(obj, pd.Interval)`` and ``isinstance(obj, pd.Period)`` instead (:issue:`55264`)
- Removed ``pandas.io.sql.execute`` (:issue:`50185`)
- Removed ``pandas.value_counts``, use :meth:`Series.value_counts` instead (:issue:`53493`)
Expand All @@ -129,6 +130,7 @@ Removal of prior version deprecations/changes
- Removed unused arguments ``*args`` and ``**kwargs`` in :class:`Resampler` methods (:issue:`50977`)
- Unrecognized timezones when parsing strings to datetimes now raises a ``ValueError`` (:issue:`51477`)


.. ---------------------------------------------------------------------------
.. _whatsnew_300.performance:

Expand Down
10 changes: 2 additions & 8 deletions pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -1635,7 +1635,6 @@ def map_array(
arr: ArrayLike,
mapper,
na_action: Literal["ignore"] | None = None,
convert: bool = True,
) -> np.ndarray | ExtensionArray | Index:
"""
Map values using an input mapping or function.
Expand All @@ -1647,9 +1646,6 @@ def map_array(
na_action : {None, 'ignore'}, default None
If 'ignore', propagate NA values, without passing them to the
mapping correspondence.
convert : bool, default True
Try to find better dtype for elementwise function results. If
False, leave as dtype=object.

Returns
-------
Expand Down Expand Up @@ -1707,8 +1703,6 @@ def map_array(
# we must convert to python types
values = arr.astype(object, copy=False)
if na_action is None:
return lib.map_infer(values, mapper, convert=convert)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can convert be removed from map_infer now?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no we are setting this to false in other use-cases

return lib.map_infer(values, mapper)
else:
return lib.map_infer_mask(
values, mapper, mask=isna(values).view(np.uint8), convert=convert
)
return lib.map_infer_mask(values, mapper, mask=isna(values).view(np.uint8))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can convert be removed from map_infer_mask now?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same as above

18 changes: 1 addition & 17 deletions pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

import numpy as np

from pandas._libs import lib
from pandas._libs.internals import BlockValuesRefs
from pandas._typing import (
AggFuncType,
Expand Down Expand Up @@ -1376,23 +1375,10 @@ def __init__(
obj: Series,
func: AggFuncType,
*,
convert_dtype: bool | lib.NoDefault = lib.no_default,
by_row: Literal[False, "compat", "_compat"] = "compat",
args,
kwargs,
) -> None:
if convert_dtype is lib.no_default:
convert_dtype = True
else:
warnings.warn(
"the convert_dtype parameter is deprecated and will be removed in a "
"future version. Do ``ser.astype(object).apply()`` "
"instead if you want ``convert_dtype=False``.",
FutureWarning,
stacklevel=find_stack_level(),
)
self.convert_dtype = convert_dtype

super().__init__(
obj,
func,
Expand Down Expand Up @@ -1486,9 +1472,7 @@ def curried(x):
# TODO: remove the `na_action="ignore"` when that default has been changed in
# Categorical (GH51645).
action = "ignore" if isinstance(obj.dtype, CategoricalDtype) else None
mapped = obj._map_values(
mapper=curried, na_action=action, convert=self.convert_dtype
)
mapped = obj._map_values(mapper=curried, na_action=action)

if len(mapped) and isinstance(mapped[0], ABCSeries):
# GH#43986 Need to do list(mapped) in order to get treated as nested
Expand Down
8 changes: 2 additions & 6 deletions pandas/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -896,7 +896,7 @@ def hasnans(self) -> bool:
return bool(isna(self).any()) # type: ignore[union-attr]

@final
def _map_values(self, mapper, na_action=None, convert: bool = True):
def _map_values(self, mapper, na_action=None):
"""
An internal function that maps values using the input
correspondence (which can be a dict, Series, or function).
Expand All @@ -908,10 +908,6 @@ def _map_values(self, mapper, na_action=None, convert: bool = True):
na_action : {None, 'ignore'}
If 'ignore', propagate NA values, without passing them to the
mapping function
convert : bool, default True
Try to find better dtype for elementwise function results. If
False, leave as dtype=object. Note that the dtype is always
preserved for some extension array dtypes, such as Categorical.

Returns
-------
Expand All @@ -925,7 +921,7 @@ def _map_values(self, mapper, na_action=None, convert: bool = True):
if isinstance(arr, ExtensionArray):
return arr.map(mapper, na_action=na_action)

return algorithms.map_array(arr, mapper, na_action=na_action, convert=convert)
return algorithms.map_array(arr, mapper, na_action=na_action)

@final
def value_counts(
Expand Down
10 changes: 0 additions & 10 deletions pandas/core/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -4481,7 +4481,6 @@ def transform(
def apply(
self,
func: AggFuncType,
convert_dtype: bool | lib.NoDefault = lib.no_default,
args: tuple[Any, ...] = (),
*,
by_row: Literal[False, "compat"] = "compat",
Expand All @@ -4497,14 +4496,6 @@ def apply(
----------
func : function
Python function or NumPy ufunc to apply.
convert_dtype : bool, default True
Try to find better dtype for elementwise function results. If
False, leave as dtype=object. Note that the dtype is always
preserved for some extension array dtypes, such as Categorical.

.. deprecated:: 2.1.0
``convert_dtype`` has been deprecated. Do ``ser.astype(object).apply()``
instead if you want ``convert_dtype=False``.
args : tuple
Positional arguments passed to func after the series value.
by_row : False or "compat", default "compat"
Expand Down Expand Up @@ -4608,7 +4599,6 @@ def apply(
return SeriesApply(
self,
func,
convert_dtype=convert_dtype,
by_row=by_row,
args=args,
kwargs=kwargs,
Expand Down
11 changes: 0 additions & 11 deletions pandas/tests/apply/test_series_apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,17 +75,6 @@ def f(x):
tm.assert_series_equal(result, expected)


@pytest.mark.parametrize("convert_dtype", [True, False])
def test_apply_convert_dtype_deprecated(convert_dtype):
ser = Series(np.random.default_rng(2).standard_normal(10))

def func(x):
return x if x > 0 else np.nan

with tm.assert_produces_warning(FutureWarning):
ser.apply(func, convert_dtype=convert_dtype, by_row="compat")


def test_apply_args():
s = Series(["foo,bar"])

Expand Down