Skip to content

Commit 3b96ada

Browse files
authored
Removed ABCs from pandas._typing (#27424)
1 parent d7eb306 commit 3b96ada

File tree

5 files changed

+55
-50
lines changed

5 files changed

+55
-50
lines changed

pandas/_typing.py

+17-22
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,29 @@
11
from pathlib import Path
2-
from typing import IO, AnyStr, TypeVar, Union
2+
from typing import IO, TYPE_CHECKING, AnyStr, TypeVar, Union
33

44
import numpy as np
55

6-
from pandas._libs import Timestamp
7-
from pandas._libs.tslibs.period import Period
8-
from pandas._libs.tslibs.timedeltas import Timedelta
6+
# To prevent import cycles place any internal imports in the branch below
7+
# and use a string literal forward reference to it in subsequent types
8+
# https://mypy.readthedocs.io/en/latest/common_issues.html#import-cycles
9+
if TYPE_CHECKING:
10+
from pandas._libs import Period, Timedelta, Timestamp # noqa: F401
11+
from pandas.core.arrays.base import ExtensionArray # noqa: F401
12+
from pandas.core.dtypes.dtypes import ExtensionDtype # noqa: F401
13+
from pandas.core.indexes.base import Index # noqa: F401
14+
from pandas.core.frame import DataFrame # noqa: F401
15+
from pandas.core.series import Series # noqa: F401
16+
from pandas.core.sparse.series import SparseSeries # noqa: F401
917

10-
from pandas.core.dtypes.dtypes import ExtensionDtype
11-
from pandas.core.dtypes.generic import (
12-
ABCDataFrame,
13-
ABCExtensionArray,
14-
ABCIndexClass,
15-
ABCSeries,
16-
ABCSparseSeries,
17-
)
1818

1919
AnyArrayLike = TypeVar(
20-
"AnyArrayLike",
21-
ABCExtensionArray,
22-
ABCIndexClass,
23-
ABCSeries,
24-
ABCSparseSeries,
25-
np.ndarray,
20+
"AnyArrayLike", "ExtensionArray", "Index", "Series", "SparseSeries", np.ndarray
2621
)
27-
ArrayLike = TypeVar("ArrayLike", ABCExtensionArray, np.ndarray)
28-
DatetimeLikeScalar = TypeVar("DatetimeLikeScalar", Period, Timestamp, Timedelta)
29-
Dtype = Union[str, np.dtype, ExtensionDtype]
22+
ArrayLike = TypeVar("ArrayLike", "ExtensionArray", np.ndarray)
23+
DatetimeLikeScalar = TypeVar("DatetimeLikeScalar", "Period", "Timestamp", "Timedelta")
24+
Dtype = Union[str, np.dtype, "ExtensionDtype"]
3025
FilePathOrBuffer = Union[str, Path, IO[AnyStr]]
3126

32-
FrameOrSeries = TypeVar("FrameOrSeries", ABCSeries, ABCDataFrame)
27+
FrameOrSeries = TypeVar("FrameOrSeries", "Series", "DataFrame")
3328
Scalar = Union[str, int, float]
3429
Axis = Union[str, int]

pandas/core/dtypes/common.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -167,12 +167,13 @@ def ensure_int_or_float(arr: ArrayLike, copy=False) -> np.array:
167167
If the array is explicitly of type uint64 the type
168168
will remain unchanged.
169169
"""
170+
# TODO: GH27506 potential bug with ExtensionArrays
170171
try:
171-
return arr.astype("int64", copy=copy, casting="safe")
172+
return arr.astype("int64", copy=copy, casting="safe") # type: ignore
172173
except TypeError:
173174
pass
174175
try:
175-
return arr.astype("uint64", copy=copy, casting="safe")
176+
return arr.astype("uint64", copy=copy, casting="safe") # type: ignore
176177
except TypeError:
177178
return arr.astype("float64", copy=copy)
178179

pandas/core/indexes/interval.py

+31-24
Original file line numberDiff line numberDiff line change
@@ -906,35 +906,35 @@ def get_indexer(
906906
)
907907
raise InvalidIndexError(msg)
908908

909-
target = ensure_index(target)
909+
target_as_index = ensure_index(target)
910910

911-
if isinstance(target, IntervalIndex):
911+
if isinstance(target_as_index, IntervalIndex):
912912
# equal indexes -> 1:1 positional match
913-
if self.equals(target):
913+
if self.equals(target_as_index):
914914
return np.arange(len(self), dtype="intp")
915915

916916
# different closed or incompatible subtype -> no matches
917917
common_subtype = find_common_type(
918-
[self.dtype.subtype, target.dtype.subtype]
918+
[self.dtype.subtype, target_as_index.dtype.subtype]
919919
)
920-
if self.closed != target.closed or is_object_dtype(common_subtype):
921-
return np.repeat(np.intp(-1), len(target))
920+
if self.closed != target_as_index.closed or is_object_dtype(common_subtype):
921+
return np.repeat(np.intp(-1), len(target_as_index))
922922

923-
# non-overlapping -> at most one match per interval in target
923+
# non-overlapping -> at most one match per interval in target_as_index
924924
# want exact matches -> need both left/right to match, so defer to
925925
# left/right get_indexer, compare elementwise, equality -> match
926-
left_indexer = self.left.get_indexer(target.left)
927-
right_indexer = self.right.get_indexer(target.right)
926+
left_indexer = self.left.get_indexer(target_as_index.left)
927+
right_indexer = self.right.get_indexer(target_as_index.right)
928928
indexer = np.where(left_indexer == right_indexer, left_indexer, -1)
929-
elif not is_object_dtype(target):
929+
elif not is_object_dtype(target_as_index):
930930
# homogeneous scalar index: use IntervalTree
931-
target = self._maybe_convert_i8(target)
932-
indexer = self._engine.get_indexer(target.values)
931+
target_as_index = self._maybe_convert_i8(target_as_index)
932+
indexer = self._engine.get_indexer(target_as_index.values)
933933
else:
934934
# heterogeneous scalar index: defer elementwise to get_loc
935935
# (non-overlapping so get_loc guarantees scalar of KeyError)
936936
indexer = []
937-
for key in target:
937+
for key in target_as_index:
938938
try:
939939
loc = self.get_loc(key)
940940
except KeyError:
@@ -947,21 +947,26 @@ def get_indexer(
947947
def get_indexer_non_unique(
948948
self, target: AnyArrayLike
949949
) -> Tuple[np.ndarray, np.ndarray]:
950-
target = ensure_index(target)
950+
target_as_index = ensure_index(target)
951951

952-
# check that target IntervalIndex is compatible
953-
if isinstance(target, IntervalIndex):
952+
# check that target_as_index IntervalIndex is compatible
953+
if isinstance(target_as_index, IntervalIndex):
954954
common_subtype = find_common_type(
955-
[self.dtype.subtype, target.dtype.subtype]
955+
[self.dtype.subtype, target_as_index.dtype.subtype]
956956
)
957-
if self.closed != target.closed or is_object_dtype(common_subtype):
957+
if self.closed != target_as_index.closed or is_object_dtype(common_subtype):
958958
# different closed or incompatible subtype -> no matches
959-
return np.repeat(-1, len(target)), np.arange(len(target))
959+
return (
960+
np.repeat(-1, len(target_as_index)),
961+
np.arange(len(target_as_index)),
962+
)
960963

961-
if is_object_dtype(target) or isinstance(target, IntervalIndex):
962-
# target might contain intervals: defer elementwise to get_loc
964+
if is_object_dtype(target_as_index) or isinstance(
965+
target_as_index, IntervalIndex
966+
):
967+
# target_as_index might contain intervals: defer elementwise to get_loc
963968
indexer, missing = [], []
964-
for i, key in enumerate(target):
969+
for i, key in enumerate(target_as_index):
965970
try:
966971
locs = self.get_loc(key)
967972
if isinstance(locs, slice):
@@ -973,8 +978,10 @@ def get_indexer_non_unique(
973978
indexer.append(locs)
974979
indexer = np.concatenate(indexer)
975980
else:
976-
target = self._maybe_convert_i8(target)
977-
indexer, missing = self._engine.get_indexer_non_unique(target.values)
981+
target_as_index = self._maybe_convert_i8(target_as_index)
982+
indexer, missing = self._engine.get_indexer_non_unique(
983+
target_as_index.values
984+
)
978985

979986
return ensure_platform_int(indexer), ensure_platform_int(missing)
980987

pandas/core/window.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray:
240240

241241
return values
242242

243-
def _wrap_result(self, result, block=None, obj=None) -> FrameOrSeries:
243+
def _wrap_result(self, result, block=None, obj=None):
244244
"""
245245
Wrap a single result.
246246
"""

setup.cfg

+3-1
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,9 @@ filterwarnings =
7777

7878
[coverage:run]
7979
branch = False
80-
omit = */tests/*
80+
omit =
81+
*/tests/*
82+
pandas/_typing.py
8183
plugins = Cython.Coverage
8284

8385
[coverage:report]

0 commit comments

Comments
 (0)