Skip to content

DEPR: Index.is_monotonic for Index.is_monotonic_increasing #45422

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Jan 23, 2022
3 changes: 0 additions & 3 deletions asv_bench/benchmarks/index_cached_properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,6 @@ def time_values(self, index_type):
def time_shape(self, index_type):
self.idx.shape

def time_is_monotonic(self, index_type):
self.idx.is_monotonic

def time_is_monotonic_decreasing(self, index_type):
self.idx.is_monotonic_decreasing

Expand Down
2 changes: 1 addition & 1 deletion asv_bench/benchmarks/multiindex_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def time_get_indexer_and_pad(self):
self.mi_int.get_indexer(self.other_mi_many_mismatches, method="pad")

def time_is_monotonic(self):
self.mi_int.is_monotonic
self.mi_int.is_monotonic_increasing


class Duplicated:
Expand Down
1 change: 1 addition & 0 deletions doc/source/whatsnew/v1.5.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ Other Deprecations
- Deprecated behavior of :meth:`SparseArray.astype`, :meth:`Series.astype`, and :meth:`DataFrame.astype` with :class:`SparseDtype` when passing a non-sparse ``dtype``. In a future version, this will cast to that non-sparse dtype instead of wrapping it in a :class:`SparseDtype` (:issue:`34457`)
- Deprecated behavior of :meth:`DatetimeIndex.intersection` and :meth:`DatetimeIndex.symmetric_difference` (``union`` behavior was already deprecated in version 1.3.0) with mixed timezones; in a future version both will be cast to UTC instead of object dtype (:issue:`39328`, :issue:`45357`)
- Deprecated :meth:`DataFrame.iteritems`, :meth:`Series.iteritems`, :meth:`HDFStore.iteritems` in favor of :meth:`DataFrame.items`, :meth:`Series.items`, :meth:`HDFStore.items` (:issue:`45321`)
- Deprecated :meth:`Series.is_monotonic` and :meth:`Index.is_monotonic` in favor of :meth:`Series.is_monotonic_increasing` and :meth:`Index.is_monotonic_increasing` (:issue:`45422`, :issue:`21335`)
- Deprecated the ``__array_wrap__`` method of DataFrame and Series, rely on standard numpy ufuncs instead (:issue:`45451`)
-

Expand Down
24 changes: 18 additions & 6 deletions pandas/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
final,
overload,
)
import warnings

import numpy as np

Expand All @@ -35,6 +36,7 @@
cache_readonly,
doc,
)
from pandas.util._exceptions import find_stack_level

from pandas.core.dtypes.common import (
is_categorical_dtype,
Expand Down Expand Up @@ -1050,17 +1052,27 @@ def is_monotonic(self) -> bool:
-------
bool
"""
from pandas import Index

return Index(self).is_monotonic
warnings.warn(
"is_monotonic is deprecated and will be removed in a future version. "
"Use is_monotonic_increasing instead.",
FutureWarning,
stacklevel=find_stack_level(),
)
return self.is_monotonic_increasing

@property
def is_monotonic_increasing(self) -> bool:
"""
Alias for is_monotonic.
Return boolean if values in the object are
monotonic_increasing.

Returns
-------
bool
"""
# mypy complains if we alias directly
return self.is_monotonic
from pandas import Index

return Index(self).is_monotonic_increasing

@property
def is_monotonic_decreasing(self) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -7161,7 +7161,7 @@ def asof(self, where, subset=None):
if isinstance(where, str):
where = Timestamp(where)

if not self.index.is_monotonic:
if not self.index.is_monotonic_increasing:
raise ValueError("asof requires a sorted index")

is_series = isinstance(self, ABCSeries)
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/groupby/grouper.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ def _set_grouper(self, obj: NDFrame, sort: bool = False):
raise ValueError(f"The level {level} is not valid")

# possibly sort
if (self.sort or sort) and not ax.is_monotonic:
if (self.sort or sort) and not ax.is_monotonic_increasing:
# use stable sort to support first, last, nth
# TODO: why does putting na_position="first" fix datetimelike cases?
indexer = self.indexer = ax.array.argsort(
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/groupby/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -823,7 +823,7 @@ def groups(self) -> dict[Hashable, np.ndarray]:
@cache_readonly
def is_monotonic(self) -> bool:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

deprecate here too?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll try to address this one in another pass. This appears to be a private-ish version for groupby specifically.

# return if my group orderings are monotonic
return Index(self.group_info[0]).is_monotonic
return Index(self.group_info[0]).is_monotonic_increasing

@cache_readonly
def group_info(self) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp], int]:
Expand Down
24 changes: 17 additions & 7 deletions pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2172,6 +2172,12 @@ def is_monotonic(self) -> bool:
"""
Alias for is_monotonic_increasing.
"""
warnings.warn(
"is_monotonic is deprecated and will be removed in a future version. "
"Use is_monotonic_increasing instead.",
FutureWarning,
stacklevel=find_stack_level(),
)
return self.is_monotonic_increasing

@property
Expand Down Expand Up @@ -3235,8 +3241,8 @@ def _union(self, other: Index, sort):

if (
sort is None
and self.is_monotonic
and other.is_monotonic
and self.is_monotonic_increasing
and other.is_monotonic_increasing
and not (self.has_duplicates and other.has_duplicates)
and self._can_use_libjoin
):
Expand Down Expand Up @@ -3274,7 +3280,7 @@ def _union(self, other: Index, sort):
else:
result = lvals

if not self.is_monotonic or not other.is_monotonic:
if not self.is_monotonic_increasing or not other.is_monotonic_increasing:
# if both are monotonic then result should already be sorted
result = _maybe_try_sort(result, sort)

Expand Down Expand Up @@ -3379,7 +3385,11 @@ def _intersection(self, other: Index, sort=False):
"""
intersection specialized to the case with matching dtypes.
"""
if self.is_monotonic and other.is_monotonic and self._can_use_libjoin:
if (
self.is_monotonic_increasing
and other.is_monotonic_increasing
and self._can_use_libjoin
):
try:
result = self._inner_indexer(other)[0]
except TypeError:
Expand Down Expand Up @@ -4475,15 +4485,15 @@ def join(
if not self.is_unique and not other.is_unique:
return self._join_non_unique(other, how=how)
elif not self.is_unique or not other.is_unique:
if self.is_monotonic and other.is_monotonic:
if self.is_monotonic_increasing and other.is_monotonic_increasing:
if self._can_use_libjoin:
# otherwise we will fall through to _join_via_get_indexer
return self._join_monotonic(other, how=how)
else:
return self._join_non_unique(other, how=how)
elif (
self.is_monotonic
and other.is_monotonic
self.is_monotonic_increasing
and other.is_monotonic_increasing
and self._can_use_libjoin
and (
not isinstance(self, ABCMultiIndex)
Expand Down
10 changes: 5 additions & 5 deletions pandas/core/indexes/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -1557,7 +1557,7 @@ def is_monotonic_increasing(self) -> bool:
if any(-1 in code for code in self.codes):
return False

if all(level.is_monotonic for level in self.levels):
if all(level.is_monotonic_increasing for level in self.levels):
# If each level is sorted, we can operate on the codes directly. GH27495
return libalgos.is_lexsorted(
[x.astype("int64", copy=False) for x in self.codes]
Expand All @@ -1574,11 +1574,11 @@ def is_monotonic_increasing(self) -> bool:
# int, float, complex, str, bytes, _NestedSequence[Union[bool, int, float,
# complex, str, bytes]]]" [arg-type]
sort_order = np.lexsort(values) # type: ignore[arg-type]
return Index(sort_order).is_monotonic
return Index(sort_order).is_monotonic_increasing
except TypeError:

# we have mixed types and np.lexsort is not happy
return Index(self._values).is_monotonic
return Index(self._values).is_monotonic_increasing

@cache_readonly
def is_monotonic_decreasing(self) -> bool:
Expand Down Expand Up @@ -1946,15 +1946,15 @@ def _sort_levels_monotonic(self) -> MultiIndex:
('b', 'bb')],
)
"""
if self._is_lexsorted() and self.is_monotonic:
if self._is_lexsorted() and self.is_monotonic_increasing:
return self

new_levels = []
new_codes = []

for lev, level_codes in zip(self.levels, self.codes):

if not lev.is_monotonic:
if not lev.is_monotonic_increasing:
try:
# indexer to reorder the levels
indexer = lev.argsort()
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def clean_interp_method(method: str, index: Index, **kwargs) -> str:
raise ValueError(f"method must be one of {valid}. Got '{method}' instead.")

if method in ("krogh", "piecewise_polynomial", "pchip"):
if not index.is_monotonic:
if not index.is_monotonic_increasing:
raise ValueError(
f"{method} interpolation requires that the index be monotonic."
)
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/reshape/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -1929,14 +1929,14 @@ def flip(xs) -> np.ndarray:
tolerance = self.tolerance

# we require sortedness and non-null values in the join keys
if not Index(left_values).is_monotonic:
if not Index(left_values).is_monotonic_increasing:
side = "left"
if isna(left_values).any():
raise ValueError(f"Merge keys contain null values on {side} side")
else:
raise ValueError(f"{side} keys must be sorted")

if not Index(right_values).is_monotonic:
if not Index(right_values).is_monotonic_increasing:
side = "right"
if isna(right_values).any():
raise ValueError(f"Merge keys contain null values on {side} side")
Expand Down
20 changes: 9 additions & 11 deletions pandas/tests/frame/methods/test_sort_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_sort_index_and_reconstruction_doc_example(self):
),
)
assert df.index._is_lexsorted()
assert not df.index.is_monotonic
assert not df.index.is_monotonic_increasing

# sort it
expected = DataFrame(
Expand All @@ -35,23 +35,21 @@ def test_sort_index_and_reconstruction_doc_example(self):
),
)
result = df.sort_index()
assert result.index.is_monotonic

assert result.index.is_monotonic_increasing
tm.assert_frame_equal(result, expected)

# reconstruct
result = df.sort_index().copy()
result.index = result.index._sort_levels_monotonic()
assert result.index.is_monotonic

assert result.index.is_monotonic_increasing
tm.assert_frame_equal(result, expected)

def test_sort_index_non_existent_label_multiindex(self):
# GH#12261
df = DataFrame(0, columns=[], index=MultiIndex.from_product([[], []]))
df.loc["b", "2"] = 1
df.loc["a", "3"] = 1
result = df.sort_index().index.is_monotonic
result = df.sort_index().index.is_monotonic_increasing
assert result is True

def test_sort_index_reorder_on_ops(self):
Expand Down Expand Up @@ -549,7 +547,7 @@ def test_sort_index_and_reconstruction(self):
index=MultiIndex.from_product([[0.5, 0.8], list("ab")]),
)
result = result.sort_index()
assert result.index.is_monotonic
assert result.index.is_monotonic_increasing

tm.assert_frame_equal(result, expected)

Expand All @@ -567,7 +565,7 @@ def test_sort_index_and_reconstruction(self):
concatted = pd.concat([df, df], keys=[0.8, 0.5])
result = concatted.sort_index()

assert result.index.is_monotonic
assert result.index.is_monotonic_increasing

tm.assert_frame_equal(result, expected)

Expand All @@ -583,11 +581,11 @@ def test_sort_index_and_reconstruction(self):
df.columns = df.columns.set_levels(
pd.to_datetime(df.columns.levels[1]), level=1
)
assert not df.columns.is_monotonic
assert not df.columns.is_monotonic_increasing
result = df.sort_index(axis=1)
assert result.columns.is_monotonic
assert result.columns.is_monotonic_increasing
result = df.sort_index(axis=1, level=1)
assert result.columns.is_monotonic
assert result.columns.is_monotonic_increasing

# TODO: better name, de-duplicate with test_sort_index_level above
def test_sort_index_level2(self, multiindex_dataframe_random_data):
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/frame/test_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -1530,7 +1530,7 @@ def test_constructor_mixed_dict_and_Series(self):
data["B"] = Series([4, 3, 2, 1], index=["bar", "qux", "baz", "foo"])

result = DataFrame(data)
assert result.index.is_monotonic
assert result.index.is_monotonic_increasing

# ordering ambiguous, raise exception
with pytest.raises(ValueError, match="ambiguous ordering"):
Expand Down
8 changes: 4 additions & 4 deletions pandas/tests/indexes/datetimelike_/test_is_monotonic.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

def test_is_monotonic_with_nat():
# GH#31437
# PeriodIndex.is_monotonic should behave analogously to DatetimeIndex,
# PeriodIndex.is_monotonic_increasing should behave analogously to DatetimeIndex,
# in particular never be monotonic when we have NaT
dti = date_range("2016-01-01", periods=3)
pi = dti.to_period("D")
Expand All @@ -16,7 +16,7 @@ def test_is_monotonic_with_nat():
for obj in [pi, pi._engine, dti, dti._engine, tdi, tdi._engine]:
if isinstance(obj, Index):
# i.e. not Engines
assert obj.is_monotonic
assert obj.is_monotonic_increasing
assert obj.is_monotonic_increasing
assert not obj.is_monotonic_decreasing
assert obj.is_unique
Expand All @@ -28,7 +28,7 @@ def test_is_monotonic_with_nat():
for obj in [pi1, pi1._engine, dti1, dti1._engine, tdi1, tdi1._engine]:
if isinstance(obj, Index):
# i.e. not Engines
assert not obj.is_monotonic
assert not obj.is_monotonic_increasing
assert not obj.is_monotonic_increasing
assert not obj.is_monotonic_decreasing
assert obj.is_unique
Expand All @@ -40,7 +40,7 @@ def test_is_monotonic_with_nat():
for obj in [pi2, pi2._engine, dti2, dti2._engine, tdi2, tdi2._engine]:
if isinstance(obj, Index):
# i.e. not Engines
assert not obj.is_monotonic
assert not obj.is_monotonic_increasing
assert not obj.is_monotonic_increasing
assert not obj.is_monotonic_decreasing
assert obj.is_unique
9 changes: 4 additions & 5 deletions pandas/tests/indexes/datetimelike_/test_sort_values.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,17 +59,16 @@ def test_argmin_argmax(self, non_monotonic_idx):
def test_sort_values(self, non_monotonic_idx):
idx = non_monotonic_idx
ordered = idx.sort_values()
assert ordered.is_monotonic

assert ordered.is_monotonic_increasing
ordered = idx.sort_values(ascending=False)
assert ordered[::-1].is_monotonic
assert ordered[::-1].is_monotonic_increasing

ordered, dexer = idx.sort_values(return_indexer=True)
assert ordered.is_monotonic
assert ordered.is_monotonic_increasing
tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0], dtype=np.intp))

ordered, dexer = idx.sort_values(return_indexer=True, ascending=False)
assert ordered[::-1].is_monotonic
assert ordered[::-1].is_monotonic_increasing
tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1], dtype=np.intp))

def check_sort_values_with_freq(self, idx):
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/indexes/datetimes/test_join.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def test_join_nonunique(self):
idx1 = to_datetime(["2012-11-06 16:00:11.477563", "2012-11-06 16:00:11.477563"])
idx2 = to_datetime(["2012-11-06 15:11:09.006507", "2012-11-06 15:11:09.006507"])
rs = idx1.join(idx2, how="outer")
assert rs.is_monotonic
assert rs.is_monotonic_increasing

@pytest.mark.parametrize("freq", ["B", "C"])
def test_outer_join(self, freq):
Expand Down
Loading