diff --git a/doc/source/whatsnew/v2.1.0.rst b/doc/source/whatsnew/v2.1.0.rst
index b83f317814ad9..ca3475c822581 100644
--- a/doc/source/whatsnew/v2.1.0.rst
+++ b/doc/source/whatsnew/v2.1.0.rst
@@ -84,7 +84,7 @@ See :ref:`install.dependencies` and :ref:`install.optional_dependencies` for mor
Other API changes
^^^^^^^^^^^^^^^^^
--
+- Many methods that raised :class:`AbstractMethodError` are now decorated with ``@abstractmethod`` (:issue:`48909`)
-
.. ---------------------------------------------------------------------------
@@ -93,6 +93,7 @@ Other API changes
Deprecations
~~~~~~~~~~~~
- Deprecated accepting slices in :meth:`DataFrame.take`, call ``obj[slicer]`` or pass a sequence of integers instead (:issue:`51539`)
+- :class:`AbstractMethodError` is deprecated, please use the builtin ``NotImplementedError`` instead (:issue:`48909`)
-
.. ---------------------------------------------------------------------------
diff --git a/pandas/core/arrays/_mixins.py b/pandas/core/arrays/_mixins.py
index 5d1bb04cfacbd..63af4dcd4e5b2 100644
--- a/pandas/core/arrays/_mixins.py
+++ b/pandas/core/arrays/_mixins.py
@@ -1,5 +1,9 @@
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
from functools import wraps
from typing import (
TYPE_CHECKING,
@@ -29,7 +33,6 @@
npt,
type_t,
)
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import doc
from pandas.util._validators import (
validate_bool_kwarg,
@@ -94,7 +97,7 @@ def method(self, *args, **kwargs):
return cast(F, method)
-class NDArrayBackedExtensionArray(NDArrayBacked, ExtensionArray):
+class NDArrayBackedExtensionArray(NDArrayBacked, ExtensionArray, ABC):
"""
ExtensionArray that is backed by a single NumPy ndarray.
"""
@@ -113,9 +116,10 @@ def _box_func(self, x):
"""
return x
+ @abstractmethod
def _validate_scalar(self, value):
# used by NDArrayBackedExtensionIndex.insert
- raise AbstractMethodError(self)
+ ...
# ------------------------------------------------------------------------
diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py
index db8c87f0654cd..71a93f95639b2 100644
--- a/pandas/core/arrays/base.py
+++ b/pandas/core/arrays/base.py
@@ -8,6 +8,10 @@
"""
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
import operator
from typing import (
TYPE_CHECKING,
@@ -41,7 +45,6 @@
)
from pandas.compat import set_function_name
from pandas.compat.numpy import function as nv
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import (
Appender,
Substitution,
@@ -258,7 +261,7 @@ def _from_sequence(cls, scalars, *, dtype: Dtype | None = None, copy: bool = Fal
-------
ExtensionArray
"""
- raise AbstractMethodError(cls)
+ raise NotImplementedError
@classmethod
def _from_sequence_of_strings(
@@ -282,7 +285,7 @@ def _from_sequence_of_strings(
-------
ExtensionArray
"""
- raise AbstractMethodError(cls)
+ raise NotImplementedError
@classmethod
def _from_factorized(cls, values, original):
@@ -301,7 +304,7 @@ def _from_factorized(cls, values, original):
factorize : Top-level factorize method that dispatches here.
ExtensionArray.factorize : Encode the extension array as an enumerated type.
"""
- raise AbstractMethodError(cls)
+ raise NotImplementedError
# ------------------------------------------------------------------------
# Must be a Sequence
@@ -347,7 +350,7 @@ def __getitem__(
For a boolean mask, return an instance of ``ExtensionArray``, filtered
to the values where ``item`` is True.
"""
- raise AbstractMethodError(self)
+ raise NotImplementedError
def __setitem__(self, key, value) -> None:
"""
@@ -402,7 +405,7 @@ def __len__(self) -> int:
-------
length : int
"""
- raise AbstractMethodError(self)
+ raise NotImplementedError
def __iter__(self) -> Iterator[Any]:
"""
@@ -444,7 +447,7 @@ def __eq__(self, other: Any) -> ArrayLike: # type: ignore[override]
# return NotImplemented (to ensure that those objects are responsible for
# first unpacking the arrays, and then dispatch the operation to the
# underlying arrays)
- raise AbstractMethodError(self)
+ raise NotImplementedError
# error: Signature of "__ne__" incompatible with supertype "object"
def __ne__(self, other: Any) -> ArrayLike: # type: ignore[override]
@@ -498,7 +501,7 @@ def dtype(self) -> ExtensionDtype:
"""
An instance of 'ExtensionDtype'.
"""
- raise AbstractMethodError(self)
+ raise NotImplementedError
@property
def shape(self) -> Shape:
@@ -530,7 +533,7 @@ def nbytes(self) -> int:
"""
# If this is expensive to compute, return an approximate lower bound
# on the number of bytes needed.
- raise AbstractMethodError(self)
+ raise NotImplementedError
# ------------------------------------------------------------------------
# Additional Methods
@@ -611,7 +614,7 @@ def isna(self) -> np.ndarray | ExtensionArraySupportsAnyAll:
* `na_values` should implement :func:`ExtensionArray._reduce`
* ``na_values.any`` and ``na_values.all`` should be implemented
"""
- raise AbstractMethodError(self)
+ raise NotImplementedError
@property
def _hasna(self) -> bool:
@@ -1215,7 +1218,7 @@ def take(self, indices, allow_fill=False, fill_value=None):
# uses. In this case, your implementation is responsible for casting
# the user-facing type to the storage type, before using
# pandas.api.extensions.take
- raise AbstractMethodError(self)
+ raise NotImplementedError
def copy(self: ExtensionArrayT) -> ExtensionArrayT:
"""
@@ -1225,7 +1228,7 @@ def copy(self: ExtensionArrayT) -> ExtensionArrayT:
-------
ExtensionArray
"""
- raise AbstractMethodError(self)
+ raise NotImplementedError
def view(self, dtype: Dtype | None = None) -> ArrayLike:
"""
@@ -1368,7 +1371,7 @@ def _concat_same_type(
# should allow "easy" concatenation (no upcasting needed), and result
# in a new ExtensionArray of the same dtype.
# Note: this strict behaviour is only guaranteed starting with pandas 1.1
- raise AbstractMethodError(cls)
+ raise NotImplementedError
# The _can_hold_na attribute is set to True so that pandas internals
# will use the ExtensionDtype.na_value as the NA value in operations
@@ -1686,15 +1689,17 @@ def __array_ufunc__(self, ufunc: np.ufunc, method: str, *inputs, **kwargs):
return arraylike.default_array_ufunc(self, ufunc, method, *inputs, **kwargs)
-class ExtensionArraySupportsAnyAll(ExtensionArray):
+class ExtensionArraySupportsAnyAll(ExtensionArray, ABC):
+ @abstractmethod
def any(self, *, skipna: bool = True) -> bool:
- raise AbstractMethodError(self)
+ ...
+ @abstractmethod
def all(self, *, skipna: bool = True) -> bool:
- raise AbstractMethodError(self)
+ ...
-class ExtensionOpsMixin:
+class ExtensionOpsMixin(ABC):
"""
A base class for linking the operators to their dunder names.
@@ -1706,8 +1711,9 @@ class ExtensionOpsMixin:
"""
@classmethod
+ @abstractmethod
def _create_arithmetic_method(cls, op):
- raise AbstractMethodError(cls)
+ ...
@classmethod
def _add_arithmetic_ops(cls) -> None:
@@ -1731,8 +1737,9 @@ def _add_arithmetic_ops(cls) -> None:
setattr(cls, "__rdivmod__", cls._create_arithmetic_method(roperator.rdivmod))
@classmethod
+ @abstractmethod
def _create_comparison_method(cls, op):
- raise AbstractMethodError(cls)
+ ...
@classmethod
def _add_comparison_ops(cls) -> None:
@@ -1744,8 +1751,9 @@ def _add_comparison_ops(cls) -> None:
setattr(cls, "__ge__", cls._create_comparison_method(operator.ge))
@classmethod
+ @abstractmethod
def _create_logical_method(cls, op):
- raise AbstractMethodError(cls)
+ ...
@classmethod
def _add_logical_ops(cls) -> None:
diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py
index b8fca76115446..ed527aea87431 100644
--- a/pandas/core/arrays/datetimelike.py
+++ b/pandas/core/arrays/datetimelike.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+from abc import abstractmethod
from datetime import (
datetime,
timedelta,
@@ -70,7 +71,6 @@
)
from pandas.compat.numpy import function as nv
from pandas.errors import (
- AbstractMethodError,
InvalidComparison,
PerformanceWarning,
)
@@ -205,12 +205,14 @@ class DatetimeLikeArrayMixin(OpsMixin, NDArrayBackedExtensionArray):
def _can_hold_na(self) -> bool:
return True
+ @abstractmethod
def __init__(
self, data, dtype: Dtype | None = None, freq=None, copy: bool = False
) -> None:
- raise AbstractMethodError(self)
+ ...
@property
+ @abstractmethod
def _scalar_type(self) -> type[DatetimeLikeScalar]:
"""
The scalar associated with this datelike
@@ -219,8 +221,8 @@ def _scalar_type(self) -> type[DatetimeLikeScalar]:
* DatetimeArray : Timestamp
* TimedeltaArray : Timedelta
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _scalar_from_string(self, value: str) -> DTScalarOrNaT:
"""
Construct a scalar type from a string.
@@ -239,8 +241,8 @@ def _scalar_from_string(self, value: str) -> DTScalarOrNaT:
This should call ``self._check_compatible_with`` before
unboxing the result.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _unbox_scalar(
self, value: DTScalarOrNaT
) -> np.int64 | np.datetime64 | np.timedelta64:
@@ -261,8 +263,8 @@ def _unbox_scalar(
>>> self._unbox_scalar(Timedelta("10s")) # doctest: +SKIP
10000000000
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _check_compatible_with(self, other: DTScalarOrNaT) -> None:
"""
Verify that `self` and `other` are compatible.
@@ -281,15 +283,14 @@ def _check_compatible_with(self, other: DTScalarOrNaT) -> None:
------
Exception
"""
- raise AbstractMethodError(self)
# ------------------------------------------------------------------
+ @abstractmethod
def _box_func(self, x):
"""
box function to get object from internal representation
"""
- raise AbstractMethodError(self)
def _box_values(self, values) -> np.ndarray:
"""
@@ -319,6 +320,7 @@ def asi8(self) -> npt.NDArray[np.int64]:
# ----------------------------------------------------------------
# Rendering Methods
+ @abstractmethod
def _format_native_types(
self, *, na_rep: str | float = "NaT", date_format=None
) -> npt.NDArray[np.object_]:
@@ -329,7 +331,6 @@ def _format_native_types(
-------
ndarray[str]
"""
- raise AbstractMethodError(self)
def _formatter(self, boxed: bool = False):
# TODO: Remove Datetime & DatetimeTZ formatters.
@@ -1135,8 +1136,9 @@ def _add_period(self, other: Period) -> PeriodArray:
parr = PeriodArray(i8vals, freq=other.freq)
return parr + self
+ @abstractmethod
def _add_offset(self, offset):
- raise AbstractMethodError(self)
+ ...
def _add_timedeltalike_scalar(self, other):
"""
@@ -1855,8 +1857,9 @@ def __init__(
type(self)._validate_frequency(self, freq)
@classmethod
+ @abstractmethod
def _validate_dtype(cls, values, dtype):
- raise AbstractMethodError(cls)
+ ...
@property
def freq(self):
@@ -1920,10 +1923,11 @@ def _validate_frequency(cls, index, freq, **kwargs):
) from err
@classmethod
+ @abstractmethod
def _generate_range(
cls: type[DatetimeLikeArrayT], start, end, periods, freq, *args, **kwargs
) -> DatetimeLikeArrayT:
- raise AbstractMethodError(cls)
+ ...
# --------------------------------------------------------------
diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py
index 9b9cb3e29810d..85918375dc245 100644
--- a/pandas/core/arrays/masked.py
+++ b/pandas/core/arrays/masked.py
@@ -1,5 +1,9 @@
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
from typing import (
TYPE_CHECKING,
Any,
@@ -34,7 +38,6 @@
Shape,
npt,
)
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import doc
from pandas.util._validators import validate_fillna_kwargs
@@ -97,7 +100,7 @@
BaseMaskedArrayT = TypeVar("BaseMaskedArrayT", bound="BaseMaskedArray")
-class BaseMaskedArray(OpsMixin, ExtensionArray):
+class BaseMaskedArray(OpsMixin, ExtensionArray, ABC):
"""
Base class for masked arrays (which use _data and _mask to store the data).
@@ -141,8 +144,9 @@ def _from_sequence(
return cls(values, mask)
@property
+ @abstractmethod
def dtype(self) -> BaseMaskedDtype:
- raise AbstractMethodError(self)
+ ...
@overload
def __getitem__(self, item: ScalarIndexer) -> Any:
@@ -198,10 +202,11 @@ def fillna(
return new_values
@classmethod
+ @abstractmethod
def _coerce_to_array(
cls, values, *, dtype: DtypeObj, copy: bool = False
) -> tuple[np.ndarray, np.ndarray]:
- raise AbstractMethodError(cls)
+ ...
def _validate_setitem_value(self, value):
"""
diff --git a/pandas/core/arrays/numeric.py b/pandas/core/arrays/numeric.py
index 2d9a3ae63259d..7045151e1c323 100644
--- a/pandas/core/arrays/numeric.py
+++ b/pandas/core/arrays/numeric.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+from abc import abstractmethod
import numbers
from typing import (
TYPE_CHECKING,
@@ -20,7 +21,6 @@
DtypeObj,
npt,
)
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import cache_readonly
from pandas.core.dtypes.common import (
@@ -114,8 +114,9 @@ def __from_arrow__(
return array_class._concat_same_type(results)
@classmethod
+ @abstractmethod
def _str_to_dtype_mapping(cls) -> Mapping[str, NumericDtype]:
- raise AbstractMethodError(cls)
+ ...
@classmethod
def _standardize_dtype(cls, dtype: NumericDtype | str | np.dtype) -> NumericDtype:
@@ -136,13 +137,13 @@ def _standardize_dtype(cls, dtype: NumericDtype | str | np.dtype) -> NumericDtyp
return dtype
@classmethod
+ @abstractmethod
def _safe_cast(cls, values: np.ndarray, dtype: np.dtype, copy: bool) -> np.ndarray:
"""
Safely cast the values to the given dtype.
"safe" in this context means the casting is lossless.
"""
- raise AbstractMethodError(cls)
def _coerce_to_data_and_mask(values, mask, dtype, copy, dtype_cls, default_dtype):
diff --git a/pandas/core/base.py b/pandas/core/base.py
index 9f02e20b1605d..8a0047e94ae6a 100644
--- a/pandas/core/base.py
+++ b/pandas/core/base.py
@@ -4,6 +4,10 @@
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
import textwrap
from typing import (
TYPE_CHECKING,
@@ -32,7 +36,6 @@
)
from pandas.compat import PYPY
from pandas.compat.numpy import function as nv
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import (
cache_readonly,
doc,
@@ -176,7 +179,7 @@ def __setattr__(self, key: str, value) -> None:
object.__setattr__(self, key, value)
-class SelectionMixin(Generic[NDFrameT]):
+class SelectionMixin(ABC, Generic[NDFrameT]):
"""
mixin implementing the selection & aggregation interface on a group-like
object sub-classes need to define: obj, exclusions
@@ -256,15 +259,16 @@ def _gotitem(self, key, ndim: int, subset=None):
subset : object, default None
subset to act on
"""
- raise AbstractMethodError(self)
+ raise NotImplementedError
+ @abstractmethod
def aggregate(self, func, *args, **kwargs):
- raise AbstractMethodError(self)
+ ...
agg = aggregate
-class IndexOpsMixin(OpsMixin):
+class IndexOpsMixin(ABC, OpsMixin):
"""
Common ops mixin to support a unified interface / docs for Series / Index
"""
@@ -276,14 +280,16 @@ class IndexOpsMixin(OpsMixin):
)
@property
+ @abstractmethod
def dtype(self) -> DtypeObj:
# must be defined here as a property for mypy
- raise AbstractMethodError(self)
+ ...
@property
+ @abstractmethod
def _values(self) -> ExtensionArray | np.ndarray:
# must be defined here as a property for mypy
- raise AbstractMethodError(self)
+ ...
@final
def transpose(self: _T, *args, **kwargs) -> _T:
@@ -331,9 +337,10 @@ def shape(self) -> Shape:
"""
return self._values.shape
+ @abstractmethod
def __len__(self) -> int:
# We need this defined here for mypy
- raise AbstractMethodError(self)
+ ...
@property
def ndim(self) -> Literal[1]:
@@ -409,6 +416,7 @@ def size(self) -> int:
return len(self._values)
@property
+ @abstractmethod
def array(self) -> ExtensionArray:
"""
The ExtensionArray of the data backing this Series or Index.
@@ -471,7 +479,6 @@ def array(self) -> ExtensionArray:
['a', 'b', 'a']
Categories (2, object): ['a', 'b']
"""
- raise AbstractMethodError(self)
@final
def to_numpy(
@@ -1390,9 +1397,9 @@ def _arith_method(self, other, op):
return self._construct_result(result, name=res_name)
+ @abstractmethod
def _construct_result(self, result, name):
"""
Construct an appropriately-wrapped result from the ArrayLike result
of an arithmetic-like operation.
"""
- raise AbstractMethodError(self)
diff --git a/pandas/core/dtypes/base.py b/pandas/core/dtypes/base.py
index bce2a82f057f3..09e4471d70677 100644
--- a/pandas/core/dtypes/base.py
+++ b/pandas/core/dtypes/base.py
@@ -21,7 +21,6 @@
npt,
type_t,
)
-from pandas.errors import AbstractMethodError
from pandas.core.dtypes.generic import (
ABCDataFrame,
@@ -161,7 +160,7 @@ def type(self) -> type_t[Any]:
that value is valid (not NA). NA values do not need to be
instances of `type`.
"""
- raise AbstractMethodError(self)
+ raise NotImplementedError(self)
@property
def kind(self) -> str:
@@ -186,7 +185,7 @@ def name(self) -> str:
Will be used for display in, e.g. ``Series.dtype``
"""
- raise AbstractMethodError(self)
+ raise NotImplementedError(self)
@property
def names(self) -> list[str] | None:
@@ -207,7 +206,7 @@ def construct_array_type(cls) -> type_t[ExtensionArray]:
-------
type
"""
- raise AbstractMethodError(cls)
+ raise NotImplementedError(cls)
def empty(self, shape: Shape) -> type_t[ExtensionArray]:
"""
diff --git a/pandas/core/generic.py b/pandas/core/generic.py
index 0e050cdbdeea0..a21b5aab30748 100644
--- a/pandas/core/generic.py
+++ b/pandas/core/generic.py
@@ -1,6 +1,10 @@
# pyright: reportPropertyTypeMismatch=false
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
import collections
import datetime as dt
from functools import partial
@@ -85,7 +89,6 @@
from pandas.compat._optional import import_optional_dependency
from pandas.compat.numpy import function as nv
from pandas.errors import (
- AbstractMethodError,
InvalidIndexError,
SettingWithCopyError,
SettingWithCopyWarning,
@@ -220,7 +223,7 @@
bool_t = bool # Need alias because NDFrame has def bool:
-class NDFrame(PandasObject, indexing.IndexingMixin):
+class NDFrame(ABC, PandasObject, indexing.IndexingMixin):
"""
N-dimensional analogue of DataFrame. Store multi-dimensional in a
size-mutable, labeled data structure
@@ -467,12 +470,12 @@ def _validate_dtype(cls, dtype) -> DtypeObj | None:
# Construction
@property
+ @abstractmethod
def _constructor(self: NDFrameT) -> Callable[..., NDFrameT]:
"""
Used when a manipulation result has the same dimensions as the
original.
"""
- raise AbstractMethodError(self)
# ----------------------------------------------------------------------
# Internals
@@ -3770,11 +3773,11 @@ def to_csv(
# ----------------------------------------------------------------------
# Lookup Caching
+ @abstractmethod
def _reset_cacher(self) -> None:
"""
Reset the cacher.
"""
- raise AbstractMethodError(self)
def _maybe_update_cacher(
self,
@@ -3802,8 +3805,9 @@ def _maybe_update_cacher(
if clear:
self._clear_item_cache()
+ @abstractmethod
def _clear_item_cache(self) -> None:
- raise AbstractMethodError(self)
+ ...
# ----------------------------------------------------------------------
# Indexing Methods
@@ -4118,8 +4122,9 @@ class animal locomotion
result._set_is_copy(self, copy=not result._is_view)
return result
+ @abstractmethod
def __getitem__(self, item):
- raise AbstractMethodError(self)
+ ...
def _slice(self: NDFrameT, slobj: slice, axis: Axis = 0) -> NDFrameT:
"""
@@ -4836,6 +4841,7 @@ def sort_values(
) -> NDFrameT | None:
...
+ @abstractmethod
def sort_values(
self: NDFrameT,
*,
@@ -4988,7 +4994,6 @@ def sort_values(
4 96hr 50
1 128hr 20
"""
- raise AbstractMethodError(self)
@overload
def sort_index(
@@ -5389,7 +5394,7 @@ def _needs_reindex_multi(self, axes, method, level) -> bool_t:
)
def _reindex_multi(self, axes, copy, fill_value):
- raise AbstractMethodError(self)
+ raise NotImplementedError
@final
def _reindex_with_indexers(
@@ -6101,13 +6106,14 @@ def _get_bool_data(self):
# Internal Interface Methods
@property
+ @abstractmethod
def values(self):
- raise AbstractMethodError(self)
+ ...
@property
+ @abstractmethod
def _values(self) -> ArrayLike:
"""internal implementation"""
- raise AbstractMethodError(self)
@property
def dtypes(self):
diff --git a/pandas/core/groupby/groupby.py b/pandas/core/groupby/groupby.py
index 39ba102ab3782..ee153f332c444 100644
--- a/pandas/core/groupby/groupby.py
+++ b/pandas/core/groupby/groupby.py
@@ -8,6 +8,7 @@ class providing the base-class of operations.
"""
from __future__ import annotations
+from abc import abstractmethod
import datetime
from functools import (
partial,
@@ -58,10 +59,7 @@ class providing the base-class of operations.
npt,
)
from pandas.compat.numpy import function as nv
-from pandas.errors import (
- AbstractMethodError,
- DataError,
-)
+from pandas.errors import DataError
from pandas.util._decorators import (
Appender,
Substitution,
@@ -1099,11 +1097,6 @@ def _insert_inaxis_grouper(self, result: Series | DataFrame) -> DataFrame:
return result
- def _indexed_output_to_ndframe(
- self, result: Mapping[base.OutputKey, ArrayLike]
- ) -> Series | DataFrame:
- raise AbstractMethodError(self)
-
@final
def _maybe_transpose_result(self, result: NDFrameT) -> NDFrameT:
if self.axis == 1:
@@ -1157,6 +1150,7 @@ def _wrap_aggregated_output(
res = self._maybe_transpose_result(result) # type: ignore[arg-type]
return self._reindex_output(res, qs=qs)
+ @abstractmethod
def _wrap_applied_output(
self,
data,
@@ -1164,7 +1158,7 @@ def _wrap_applied_output(
not_indexed_same: bool = False,
is_transform: bool = False,
):
- raise AbstractMethodError(self)
+ ...
# -----------------------------------------------------------------
# numba
@@ -1503,10 +1497,11 @@ def array_func(values: ArrayLike) -> ArrayLike:
out = out.infer_objects(copy=False)
return out
+ @abstractmethod
def _cython_transform(
self, how: str, numeric_only: bool = False, axis: AxisInt = 0, **kwargs
):
- raise AbstractMethodError(self)
+ ...
@final
def _transform(self, func, *args, engine=None, engine_kwargs=None, **kwargs):
diff --git a/pandas/core/groupby/ops.py b/pandas/core/groupby/ops.py
index 726d75d705344..ba53c31e60878 100644
--- a/pandas/core/groupby/ops.py
+++ b/pandas/core/groupby/ops.py
@@ -7,6 +7,10 @@
"""
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
import collections
import functools
from typing import (
@@ -35,7 +39,6 @@
Shape,
npt,
)
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import cache_readonly
from pandas.core.dtypes.cast import (
@@ -1200,7 +1203,7 @@ def _is_indexed_like(obj, axes, axis: AxisInt) -> bool:
# Splitting / application
-class DataSplitter(Generic[NDFrameT]):
+class DataSplitter(ABC, Generic[NDFrameT]):
def __init__(
self,
data: NDFrameT,
@@ -1242,8 +1245,9 @@ def __iter__(self) -> Iterator:
def _sorted_data(self) -> NDFrameT:
return self.data.take(self._sort_idx, axis=self.axis)
+ @abstractmethod
def _chop(self, sdata, slice_obj: slice) -> NDFrame:
- raise AbstractMethodError(self)
+ ...
class SeriesSplitter(DataSplitter):
diff --git a/pandas/core/indexing.py b/pandas/core/indexing.py
index c1435ebbe39ef..13cc79ae0427e 100644
--- a/pandas/core/indexing.py
+++ b/pandas/core/indexing.py
@@ -1,5 +1,9 @@
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
from contextlib import suppress
import sys
from typing import (
@@ -23,7 +27,6 @@
)
from pandas.compat import PYPY
from pandas.errors import (
- AbstractMethodError,
ChainedAssignmentError,
IndexingError,
InvalidIndexError,
@@ -661,7 +664,7 @@ def iat(self) -> _iAtIndexer:
return _iAtIndexer("iat", self)
-class _LocationIndexer(NDFrameIndexerBase):
+class _LocationIndexer(ABC, NDFrameIndexerBase):
_valid_types: str
axis: AxisInt | None = None
@@ -852,6 +855,7 @@ def __setitem__(self, key, value) -> None:
iloc = self if self.name == "iloc" else self.obj.iloc
iloc._setitem_with_indexer(indexer, value, self.name)
+ @abstractmethod
def _validate_key(self, key, axis: AxisInt):
"""
Ensure that key is valid for current indexer.
@@ -872,7 +876,6 @@ def _validate_key(self, key, axis: AxisInt):
KeyError
If the key was not found.
"""
- raise AbstractMethodError(self)
@final
def _expand_ellipsis(self, tup: tuple) -> tuple:
@@ -1087,8 +1090,9 @@ def _getitem_nested_tuple(self, tup: tuple):
return obj
+ @abstractmethod
def _convert_to_indexer(self, key, axis: AxisInt):
- raise AbstractMethodError(self)
+ ...
@final
def __getitem__(self, key):
@@ -1109,14 +1113,16 @@ def __getitem__(self, key):
def _is_scalar_access(self, key: tuple):
raise NotImplementedError()
+ @abstractmethod
def _getitem_tuple(self, tup: tuple):
- raise AbstractMethodError(self)
+ ...
def _getitem_axis(self, key, axis: AxisInt):
raise NotImplementedError()
+ @abstractmethod
def _has_valid_setitem_indexer(self, indexer) -> bool:
- raise AbstractMethodError(self)
+ ...
@final
def _getbool_axis(self, key, axis: AxisInt):
@@ -2348,7 +2354,7 @@ def _align_frame(self, indexer, df: DataFrame) -> DataFrame:
raise ValueError("Incompatible indexer with DataFrame")
-class _ScalarAccessIndexer(NDFrameIndexerBase):
+class _ScalarAccessIndexer(ABC, NDFrameIndexerBase):
"""
Access scalars quickly.
"""
@@ -2356,8 +2362,9 @@ class _ScalarAccessIndexer(NDFrameIndexerBase):
# sub-classes need to set _takeable
_takeable: bool
+ @abstractmethod
def _convert_key(self, key):
- raise AbstractMethodError(self)
+ ...
def __getitem__(self, key):
if not isinstance(key, tuple):
diff --git a/pandas/core/internals/base.py b/pandas/core/internals/base.py
index bb5d7e839a98c..09bc97673688e 100644
--- a/pandas/core/internals/base.py
+++ b/pandas/core/internals/base.py
@@ -4,6 +4,10 @@
"""
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
from typing import (
Literal,
TypeVar,
@@ -18,7 +22,6 @@
DtypeObj,
Shape,
)
-from pandas.errors import AbstractMethodError
from pandas.core.dtypes.cast import (
find_common_type,
@@ -34,14 +37,15 @@
T = TypeVar("T", bound="DataManager")
-class DataManager(PandasObject):
+class DataManager(ABC, PandasObject):
# TODO share more methods/attributes
axes: list[Index]
@property
+ @abstractmethod
def items(self) -> Index:
- raise AbstractMethodError(self)
+ ...
@final
def __len__(self) -> int:
@@ -72,6 +76,7 @@ def _validate_set_axis(self, axis: AxisInt, new_labels: Index) -> None:
f"values have {new_len} elements"
)
+ @abstractmethod
def reindex_indexer(
self: T,
new_axis,
@@ -82,7 +87,7 @@ def reindex_indexer(
copy: bool = True,
only_slice: bool = False,
) -> T:
- raise AbstractMethodError(self)
+ ...
@final
def reindex_axis(
@@ -106,12 +111,12 @@ def reindex_axis(
only_slice=only_slice,
)
+ @abstractmethod
def _equal_values(self: T, other: T) -> bool:
"""
To be implemented by the subclasses. Only check the column values
assuming shape and indexes have already been checked.
"""
- raise AbstractMethodError(self)
@final
def equals(self, other: object) -> bool:
@@ -129,13 +134,14 @@ def equals(self, other: object) -> bool:
return self._equal_values(other)
+ @abstractmethod
def apply(
self: T,
f,
align_keys: list[str] | None = None,
**kwargs,
) -> T:
- raise AbstractMethodError(self)
+ ...
@final
def isna(self: T, func) -> T:
@@ -197,8 +203,9 @@ def grouped_reduce(self, func):
return mgr
@classmethod
+ @abstractmethod
def from_array(cls, arr: ArrayLike, index: Index):
- raise AbstractMethodError(cls)
+ ...
def interleaved_dtype(dtypes: list[DtypeObj]) -> DtypeObj | None:
diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py
index 1aba48371b430..ed47cef0433dc 100644
--- a/pandas/core/internals/blocks.py
+++ b/pandas/core/internals/blocks.py
@@ -1,5 +1,9 @@
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
from functools import wraps
import re
from typing import (
@@ -36,7 +40,6 @@
Shape,
npt,
)
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import cache_readonly
from pandas.util._validators import validate_bool_kwarg
@@ -139,7 +142,7 @@ def newfunc(self, *args, **kwargs) -> list[Block]:
return cast(F, newfunc)
-class Block(PandasObject):
+class Block(ABC, PandasObject):
"""
Canonical n-dimensional unit of homogeneous dtype contained in a pandas
data structure
@@ -1449,26 +1452,27 @@ def delete(self, loc) -> list[Block]:
return new_blocks
@property
+ @abstractmethod
def is_view(self) -> bool:
"""return a boolean if I am possibly a view"""
- raise AbstractMethodError(self)
@property
+ @abstractmethod
def array_values(self) -> ExtensionArray:
"""
The array that Series.array returns. Always an ExtensionArray.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def get_values(self, dtype: DtypeObj | None = None) -> np.ndarray:
"""
return an internal format, currently just the ndarray
this is often overridden to handle to_dense like operations
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def values_for_json(self) -> np.ndarray:
- raise AbstractMethodError(self)
+ ...
class EABackedBlock(Block):
diff --git a/pandas/core/resample.py b/pandas/core/resample.py
index f23256c64db2d..b0b0fd11c077d 100644
--- a/pandas/core/resample.py
+++ b/pandas/core/resample.py
@@ -1,5 +1,9 @@
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
import copy
from textwrap import dedent
from typing import (
@@ -40,7 +44,6 @@
npt,
)
from pandas.compat.numpy import function as nv
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import (
Appender,
Substitution,
@@ -102,7 +105,7 @@
_shared_docs_kwargs: dict[str, str] = {}
-class Resampler(BaseGroupBy, PandasObject):
+class Resampler(BaseGroupBy, PandasObject, ABC):
"""
Class for resampling datetimelike data, a groupby-like operation.
See aggregate, transform, and apply functions on this object.
@@ -219,8 +222,9 @@ def _convert_obj(self, obj: NDFrameT) -> NDFrameT:
"""
return obj._consolidate()
+ @abstractmethod
def _get_binner_for_time(self):
- raise AbstractMethodError(self)
+ ...
@final
def _get_binner(self):
@@ -372,11 +376,13 @@ def transform(self, arg, *args, **kwargs):
arg, *args, **kwargs
)
+ @abstractmethod
def _downsample(self, f, **kwargs):
- raise AbstractMethodError(self)
+ ...
+ @abstractmethod
def _upsample(self, f, limit=None, fill_value=None):
- raise AbstractMethodError(self)
+ ...
def _gotitem(self, key, ndim: int, subset=None):
"""
diff --git a/pandas/errors/__init__.py b/pandas/errors/__init__.py
index 3ecee50ffbaa7..7bbf368aa1ec1 100644
--- a/pandas/errors/__init__.py
+++ b/pandas/errors/__init__.py
@@ -4,6 +4,7 @@
from __future__ import annotations
import ctypes
+import warnings
from pandas._config.config import OptionError
@@ -188,6 +189,14 @@ class AbstractMethodError(NotImplementedError):
"""
def __init__(self, class_instance, methodtype: str = "method") -> None:
+ from pandas.util._exceptions import find_stack_level
+
+ warnings.warn(
+ "`AbstractMethodError` will be removed in a future version. Consider"
+ + " using `NotImplementedError`",
+ FutureWarning,
+ stacklevel=find_stack_level(),
+ )
types = {"method", "classmethod", "staticmethod", "property"}
if methodtype not in types:
raise ValueError(
diff --git a/pandas/io/formats/xml.py b/pandas/io/formats/xml.py
index cc258e0271031..cfdca7276316b 100644
--- a/pandas/io/formats/xml.py
+++ b/pandas/io/formats/xml.py
@@ -3,6 +3,10 @@
"""
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
import codecs
import io
from typing import (
@@ -17,7 +21,6 @@
StorageOptions,
WriteBuffer,
)
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import doc
from pandas.core.dtypes.common import is_list_like
@@ -39,7 +42,7 @@
storage_options=_shared_docs["storage_options"],
compression_options=_shared_docs["compression_options"] % "path_or_buffer",
)
-class BaseXMLFormatter:
+class BaseXMLFormatter(ABC):
"""
Subclass for formatting data in XML.
@@ -143,6 +146,7 @@ def __init__(
self.prefix_uri = self.get_prefix_uri()
self.handle_indexes()
+ @abstractmethod
def build_tree(self) -> bytes:
"""
Build tree from data.
@@ -150,7 +154,6 @@ def build_tree(self) -> bytes:
This method initializes the root and builds attributes and elements
with optional namespaces.
"""
- raise AbstractMethodError(self)
def validate_columns(self) -> None:
"""
@@ -226,6 +229,7 @@ def handle_indexes(self) -> None:
if self.elem_cols:
self.elem_cols = indexes + self.elem_cols
+ @abstractmethod
def get_prefix_uri(self) -> str:
"""
Get uri of namespace prefix.
@@ -238,8 +242,6 @@ def get_prefix_uri(self) -> str:
*If prefix is not included in namespace dict.
"""
- raise AbstractMethodError(self)
-
def other_namespaces(self) -> dict:
"""
Define other namespaces.
@@ -288,6 +290,7 @@ def _get_flat_col_name(self, col: str | tuple) -> str:
)
return f"{self.prefix_uri}{flat_col}"
+ @abstractmethod
def build_elems(self, d: dict[str, Any], elem_row: Any) -> None:
"""
Create child elements of row.
@@ -296,8 +299,6 @@ def build_elems(self, d: dict[str, Any], elem_row: Any) -> None:
works with tuples for multindex or hierarchical columns.
"""
- raise AbstractMethodError(self)
-
def _build_elems(self, sub_element_cls, d: dict[str, Any], elem_row: Any) -> None:
if not self.elem_cols:
return
diff --git a/pandas/io/html.py b/pandas/io/html.py
index d6d1c5651dd37..d3911feb62ea3 100644
--- a/pandas/io/html.py
+++ b/pandas/io/html.py
@@ -6,6 +6,10 @@
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
from collections import abc
import numbers
import re
@@ -27,10 +31,7 @@
ReadBuffer,
)
from pandas.compat._optional import import_optional_dependency
-from pandas.errors import (
- AbstractMethodError,
- EmptyDataError,
-)
+from pandas.errors import EmptyDataError
from pandas.core.dtypes.common import is_list_like
@@ -134,7 +135,7 @@ def _read(obj: FilePath | BaseBuffer, encoding: str | None) -> str | bytes:
return text
-class _HtmlFrameParser:
+class _HtmlFrameParser(ABC):
"""
Base class for parsers that parse HTML into DataFrames.
@@ -250,6 +251,7 @@ def _attr_getter(self, obj, attr):
# Both lxml and BeautifulSoup have the same implementation:
return obj.get(attr)
+ @abstractmethod
def _href_getter(self, obj):
"""
Return a href if the DOM node contains a child or None.
@@ -264,8 +266,8 @@ def _href_getter(self, obj):
href : str or unicode
The href from the child of the DOM node.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _text_getter(self, obj):
"""
Return the text of an individual DOM node.
@@ -280,8 +282,8 @@ def _text_getter(self, obj):
text : str or unicode
The text from an individual DOM node.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _parse_td(self, obj):
"""
Return the td elements from a row element.
@@ -296,8 +298,8 @@ def _parse_td(self, obj):
list of node-like
These are the elements of each row, i.e., the columns.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _parse_thead_tr(self, table):
"""
Return the list of thead row elements from the parsed table element.
@@ -311,8 +313,8 @@ def _parse_thead_tr(self, table):
list of node-like
These are the row elements of a table.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _parse_tbody_tr(self, table):
"""
Return the list of tbody row elements from the parsed table element.
@@ -330,8 +332,8 @@ def _parse_tbody_tr(self, table):
list of node-like
These are the
row elements of a table.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _parse_tfoot_tr(self, table):
"""
Return the list of tfoot row elements from the parsed table element.
@@ -345,8 +347,8 @@ def _parse_tfoot_tr(self, table):
list of node-like
These are the
row elements of a table.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _parse_tables(self, doc, match, attrs):
"""
Return all tables from the parsed DOM.
@@ -371,8 +373,8 @@ def _parse_tables(self, doc, match, attrs):
list of node-like
HTML elements to be parsed into raw data.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _equals_tag(self, obj, tag):
"""
Return whether an individual DOM node matches a tag
@@ -390,8 +392,8 @@ def _equals_tag(self, obj, tag):
boolean
Whether `obj`'s tag name is `tag`
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _build_doc(self):
"""
Return a tree-like object that can be used to iterate over the DOM.
@@ -401,7 +403,6 @@ def _build_doc(self):
node-like
The DOM from which to parse the table element.
"""
- raise AbstractMethodError(self)
def _parse_thead_tbody_tfoot(self, table_html):
"""
@@ -859,7 +860,7 @@ def _data_to_frame(**kwargs):
return tp.read()
-_valid_parsers = {
+_valid_parsers: dict[str | None, type[_HtmlFrameParser]] = {
"lxml": _LxmlFrameParser,
None: _LxmlFrameParser,
"html5lib": _BeautifulSoupHtml5LibFrameParser,
diff --git a/pandas/io/json/_json.py b/pandas/io/json/_json.py
index 335d510666a1f..ef8e114b38baa 100644
--- a/pandas/io/json/_json.py
+++ b/pandas/io/json/_json.py
@@ -44,7 +44,6 @@
WriteBuffer,
)
from pandas.compat._optional import import_optional_dependency
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import doc
from pandas.core.dtypes.common import (
@@ -238,8 +237,9 @@ def __init__(
self.is_copy = None
self._format_axes()
+ @abstractmethod
def _format_axes(self):
- raise AbstractMethodError(self)
+ ...
def write(self) -> str:
iso_dates = self.date_format == "iso"
@@ -1085,7 +1085,7 @@ def __exit__(
self.close()
-class Parser:
+class Parser(ABC):
_split_keys: tuple[str, ...]
_default_orient: str
@@ -1153,8 +1153,9 @@ def parse(self):
self._try_convert_types()
return self.obj
+ @abstractmethod
def _parse(self):
- raise AbstractMethodError(self)
+ ...
def _convert_axes(self) -> None:
"""
@@ -1172,8 +1173,9 @@ def _convert_axes(self) -> None:
if result:
setattr(self.obj, axis_name, new_axis)
+ @abstractmethod
def _try_convert_types(self):
- raise AbstractMethodError(self)
+ ...
def _try_convert_data(
self,
@@ -1293,7 +1295,7 @@ def _try_convert_to_date(self, data):
return data, False
def _try_convert_dates(self):
- raise AbstractMethodError(self)
+ raise NotImplementedError
class SeriesParser(Parser):
diff --git a/pandas/io/parquet.py b/pandas/io/parquet.py
index aec31f40f8570..ee6a4edc5aa76 100644
--- a/pandas/io/parquet.py
+++ b/pandas/io/parquet.py
@@ -1,6 +1,10 @@
""" parquet compat """
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
import io
import os
from typing import (
@@ -19,7 +23,6 @@
WriteBuffer,
)
from pandas.compat._optional import import_optional_dependency
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import doc
from pandas import (
@@ -114,7 +117,7 @@ def _get_path_or_handle(
return path_or_handle, handles, fs
-class BaseImpl:
+class BaseImpl(ABC):
@staticmethod
def validate_dataframe(df: DataFrame) -> None:
if not isinstance(df, DataFrame):
@@ -142,11 +145,13 @@ def validate_dataframe(df: DataFrame) -> None:
if not valid_names:
raise ValueError("Index level names must be strings")
+ @abstractmethod
def write(self, df: DataFrame, path, compression, **kwargs):
- raise AbstractMethodError(self)
+ ...
+ @abstractmethod
def read(self, path, columns=None, **kwargs) -> DataFrame:
- raise AbstractMethodError(self)
+ ...
class PyArrowImpl(BaseImpl):
diff --git a/pandas/io/parsers/readers.py b/pandas/io/parsers/readers.py
index 635c98e38da16..e2a15c7f99142 100644
--- a/pandas/io/parsers/readers.py
+++ b/pandas/io/parsers/readers.py
@@ -37,10 +37,7 @@
ReadCsvBuffer,
StorageOptions,
)
-from pandas.errors import (
- AbstractMethodError,
- ParserWarning,
-)
+from pandas.errors import ParserWarning
from pandas.util._decorators import Appender
from pandas.util._exceptions import find_stack_level
@@ -1700,7 +1697,7 @@ def _make_engine(
raise
def _failover_to_python(self) -> None:
- raise AbstractMethodError(self)
+ raise NotImplementedError
def read(self, nrows: int | None = None) -> DataFrame:
if self.engine == "pyarrow":
diff --git a/pandas/io/sql.py b/pandas/io/sql.py
index 4e86166ef512d..0693a63e49775 100644
--- a/pandas/io/sql.py
+++ b/pandas/io/sql.py
@@ -41,10 +41,7 @@
IndexLabel,
)
from pandas.compat._optional import import_optional_dependency
-from pandas.errors import (
- AbstractMethodError,
- DatabaseError,
-)
+from pandas.errors import DatabaseError
from pandas.util._exceptions import find_stack_level
from pandas.core.dtypes.common import (
@@ -1460,7 +1457,8 @@ def _create_sql_schema(
pass
-class BaseEngine:
+class BaseEngine(ABC):
+ @abstractmethod
def insert_records(
self,
table: SQLTable,
@@ -1476,7 +1474,6 @@ def insert_records(
"""
Inserts data into already-prepared table
"""
- raise AbstractMethodError(self)
class SQLAlchemyEngine(BaseEngine):
diff --git a/pandas/io/xml.py b/pandas/io/xml.py
index 90d67ac45d4fd..0ec8507332f29 100644
--- a/pandas/io/xml.py
+++ b/pandas/io/xml.py
@@ -4,6 +4,10 @@
from __future__ import annotations
+from abc import (
+ ABC,
+ abstractmethod,
+)
import io
from typing import (
Any,
@@ -26,10 +30,7 @@
XMLParsers,
)
from pandas.compat._optional import import_optional_dependency
-from pandas.errors import (
- AbstractMethodError,
- ParserError,
-)
+from pandas.errors import ParserError
from pandas.util._decorators import doc
from pandas.core.dtypes.common import is_list_like
@@ -58,7 +59,7 @@
storage_options=_shared_docs["storage_options"],
decompression_options=_shared_docs["decompression_options"] % "path_or_buffer",
)
-class _XMLFrameParser:
+class _XMLFrameParser(ABC):
"""
Internal subclass to parse XML into DataFrames.
@@ -175,6 +176,7 @@ def __init__(
self.compression = compression
self.storage_options = storage_options
+ @abstractmethod
def parse_data(self) -> list[dict[str, str | None]]:
"""
Parse xml data.
@@ -183,8 +185,6 @@ def parse_data(self) -> list[dict[str, str | None]]:
validate xpath, names, parse and return specific nodes.
"""
- raise AbstractMethodError(self)
-
def _parse_nodes(self, elems: list[Any]) -> list[dict[str, str | None]]:
"""
Parse xml nodes.
@@ -389,6 +389,7 @@ def _iterparse_nodes(self, iterparse: Callable) -> list[dict[str, str | None]]:
return dicts
+ @abstractmethod
def _validate_path(self) -> list[Any]:
"""
Validate xpath.
@@ -404,8 +405,7 @@ def _validate_path(self) -> list[Any]:
* If xpah does not return any nodes.
"""
- raise AbstractMethodError(self)
-
+ @abstractmethod
def _validate_names(self) -> None:
"""
Validate names.
@@ -418,8 +418,8 @@ def _validate_names(self) -> None:
ValueError
* If value is not a list and less then length of nodes.
"""
- raise AbstractMethodError(self)
+ @abstractmethod
def _parse_doc(
self, raw_doc: FilePath | ReadBuffer[bytes] | ReadBuffer[str]
) -> Element | etree._Element:
@@ -429,7 +429,6 @@ def _parse_doc(
This method will parse XML object into tree
either from string/bytes or file location.
"""
- raise AbstractMethodError(self)
class _EtreeFrameParser(_XMLFrameParser):
diff --git a/pandas/plotting/_matplotlib/core.py b/pandas/plotting/_matplotlib/core.py
index 754cc94b6ded6..1ad1e982b93f4 100644
--- a/pandas/plotting/_matplotlib/core.py
+++ b/pandas/plotting/_matplotlib/core.py
@@ -22,7 +22,6 @@
PlottingOrientation,
npt,
)
-from pandas.errors import AbstractMethodError
from pandas.util._decorators import cache_readonly
from pandas.util._exceptions import find_stack_level
@@ -633,8 +632,9 @@ def _compute_plot_data(self):
self.data = numeric_data.apply(self._convert_to_ndarray)
+ @abstractmethod
def _make_plot(self):
- raise AbstractMethodError(self)
+ ...
def _add_table(self) -> None:
if self.table is False:
diff --git a/pandas/tests/extension/decimal/array.py b/pandas/tests/extension/decimal/array.py
index afeca326a9fd4..5e4a4dca8560c 100644
--- a/pandas/tests/extension/decimal/array.py
+++ b/pandas/tests/extension/decimal/array.py
@@ -275,6 +275,10 @@ def value_counts(self, dropna: bool = True):
return value_counts(self.to_numpy(), dropna=dropna)
+ @classmethod
+ def _create_logical_method(cls, op):
+ raise NotImplementedError
+
def to_decimal(values, context=None):
return DecimalArray([decimal.Decimal(x) for x in values], context=context)
diff --git a/pandas/tests/test_errors.py b/pandas/tests/test_errors.py
index aeddc08e4b888..6b82201ca8b06 100644
--- a/pandas/tests/test_errors.py
+++ b/pandas/tests/test_errors.py
@@ -85,28 +85,10 @@ def test_catch_undefined_variable_error(is_local):
raise UndefinedVariableError(variable_name, is_local)
-class Foo:
- @classmethod
- def classmethod(cls):
- raise AbstractMethodError(cls, methodtype="classmethod")
-
- @property
- def property(self):
- raise AbstractMethodError(self, methodtype="property")
-
- def method(self):
- raise AbstractMethodError(self)
-
-
-def test_AbstractMethodError_classmethod():
- xpr = "This classmethod must be defined in the concrete class Foo"
- with pytest.raises(AbstractMethodError, match=xpr):
- Foo.classmethod()
-
- xpr = "This property must be defined in the concrete class Foo"
- with pytest.raises(AbstractMethodError, match=xpr):
- Foo().property
-
- xpr = "This method must be defined in the concrete class Foo"
- with pytest.raises(AbstractMethodError, match=xpr):
- Foo().method()
+def test_AbstractMethodError_deprecation():
+ msg = (
+ "`AbstractMethodError` will be removed in a future version."
+ + " Consider using `NotImplementedError`"
+ )
+ with pd._testing.assert_produces_warning(FutureWarning, match=msg):
+ AbstractMethodError(None) # pylint: disable=pointless-exception-statement
diff --git a/scripts/pandas_errors_documented.py b/scripts/pandas_errors_documented.py
index 52c1e2008b8a0..7851cbc7ea67f 100644
--- a/scripts/pandas_errors_documented.py
+++ b/scripts/pandas_errors_documented.py
@@ -19,7 +19,7 @@
def get_defined_errors(content: str) -> set[str]:
errors = set()
- for node in ast.walk(ast.parse(content)):
+ for node in ast.iter_child_nodes(ast.parse(content)):
if isinstance(node, ast.ClassDef):
errors.add(node.name)
elif isinstance(node, ast.ImportFrom) and node.module != "__future__":