Skip to content

Commit 548f44f

Browse files
authored
TYP: fix ignores (#40412)
1 parent 4007513 commit 548f44f

File tree

11 files changed

+74
-186
lines changed

11 files changed

+74
-186
lines changed

pandas/_libs/parsers.pyx

+1-1
Original file line numberDiff line numberDiff line change
@@ -337,7 +337,7 @@ cdef class TextReader:
337337
object skiprows
338338
object dtype
339339
object usecols
340-
list dtype_cast_order
340+
list dtype_cast_order # list[np.dtype]
341341
set unnamed_cols
342342
set noconvert
343343

pandas/_testing/asserters.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -976,8 +976,8 @@ def assert_series_equal(
976976
left_values = left._values
977977
right_values = right._values
978978
# Only check exact if dtype is numeric
979-
if is_extension_array_dtype(left_values) and is_extension_array_dtype(
980-
right_values
979+
if isinstance(left_values, ExtensionArray) and isinstance(
980+
right_values, ExtensionArray
981981
):
982982
assert_extension_array_equal(
983983
left_values,

pandas/core/algorithms.py

+11-24
Original file line numberDiff line numberDiff line change
@@ -235,41 +235,26 @@ def _reconstruct_data(
235235
# Catch DatetimeArray/TimedeltaArray
236236
return values
237237

238-
if is_extension_array_dtype(dtype):
239-
# error: Item "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" has no
240-
# attribute "construct_array_type"
241-
cls = dtype.construct_array_type() # type: ignore[union-attr]
238+
if not isinstance(dtype, np.dtype):
239+
# i.e. ExtensionDtype
240+
cls = dtype.construct_array_type()
242241
if isinstance(values, cls) and values.dtype == dtype:
243242
return values
244243

245244
values = cls._from_sequence(values)
246245
elif is_bool_dtype(dtype):
247-
# error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has
248-
# incompatible type "Union[dtype, ExtensionDtype]"; expected
249-
# "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int],
250-
# Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict,
251-
# Tuple[Any, Any]]"
252-
values = values.astype(dtype, copy=False) # type: ignore[arg-type]
246+
values = values.astype(dtype, copy=False)
253247

254248
# we only support object dtypes bool Index
255249
if isinstance(original, ABCIndex):
256250
values = values.astype(object, copy=False)
257251
elif dtype is not None:
258252
if is_datetime64_dtype(dtype):
259-
# error: Incompatible types in assignment (expression has type
260-
# "str", variable has type "Union[dtype, ExtensionDtype]")
261-
dtype = "datetime64[ns]" # type: ignore[assignment]
253+
dtype = np.dtype("datetime64[ns]")
262254
elif is_timedelta64_dtype(dtype):
263-
# error: Incompatible types in assignment (expression has type
264-
# "str", variable has type "Union[dtype, ExtensionDtype]")
265-
dtype = "timedelta64[ns]" # type: ignore[assignment]
255+
dtype = np.dtype("timedelta64[ns]")
266256

267-
# error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has
268-
# incompatible type "Union[dtype, ExtensionDtype]"; expected
269-
# "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int],
270-
# Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict,
271-
# Tuple[Any, Any]]"
272-
values = values.astype(dtype, copy=False) # type: ignore[arg-type]
257+
values = values.astype(dtype, copy=False)
273258

274259
return values
275260

@@ -772,7 +757,8 @@ def factorize(
772757
uniques = Index(uniques)
773758
return codes, uniques
774759

775-
if is_extension_array_dtype(values.dtype):
760+
if not isinstance(values.dtype, np.dtype):
761+
# i.e. ExtensionDtype
776762
codes, uniques = values.factorize(na_sentinel=na_sentinel)
777763
dtype = original.dtype
778764
else:
@@ -1662,7 +1648,8 @@ def diff(arr, n: int, axis: int = 0, stacklevel=3):
16621648
arr = arr.to_numpy()
16631649
dtype = arr.dtype
16641650

1665-
if is_extension_array_dtype(dtype):
1651+
if not isinstance(dtype, np.dtype):
1652+
# i.e ExtensionDtype
16661653
if hasattr(arr, f"__{op.__name__}__"):
16671654
if axis != 0:
16681655
raise ValueError(f"cannot diff {type(arr).__name__} on axis={axis}")

pandas/core/arrays/categorical.py

+7-17
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,10 @@
6666
needs_i8_conversion,
6767
pandas_dtype,
6868
)
69-
from pandas.core.dtypes.dtypes import CategoricalDtype
69+
from pandas.core.dtypes.dtypes import (
70+
CategoricalDtype,
71+
ExtensionDtype,
72+
)
7073
from pandas.core.dtypes.generic import (
7174
ABCIndex,
7275
ABCSeries,
@@ -504,7 +507,7 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike:
504507
result = self._set_dtype(dtype)
505508

506509
# TODO: consolidate with ndarray case?
507-
elif is_extension_array_dtype(dtype):
510+
elif isinstance(dtype, ExtensionDtype):
508511
result = pd_array(self, dtype=dtype, copy=copy)
509512

510513
elif is_integer_dtype(dtype) and self.isna().any():
@@ -515,28 +518,15 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike:
515518
# variable has type "Categorical")
516519
result = np.array( # type: ignore[assignment]
517520
self,
518-
# error: Argument "dtype" to "array" has incompatible type
519-
# "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float],
520-
# Type[int], Type[complex], Type[bool], Type[object]]"; expected
521-
# "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any,
522-
# int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict,
523-
# Tuple[Any, Any]]]"
524-
dtype=dtype, # type: ignore[arg-type]
521+
dtype=dtype,
525522
copy=copy,
526523
)
527524

528525
else:
529526
# GH8628 (PERF): astype category codes instead of astyping array
530527
try:
531528
new_cats = np.asarray(self.categories)
532-
# error: Argument "dtype" to "astype" of "_ArrayOrScalarCommon" has
533-
# incompatible type "Union[ExtensionDtype, dtype[Any]]"; expected
534-
# "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any,
535-
# int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict,
536-
# Tuple[Any, Any]]]"
537-
new_cats = new_cats.astype(
538-
dtype=dtype, copy=copy # type: ignore[arg-type]
539-
)
529+
new_cats = new_cats.astype(dtype=dtype, copy=copy)
540530
except (
541531
TypeError, # downstream error msg for CategoricalIndex is misleading
542532
ValueError,

pandas/core/arrays/datetimelike.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -878,12 +878,11 @@ def _isnan(self) -> np.ndarray:
878878
return self.asi8 == iNaT
879879

880880
@property # NB: override with cache_readonly in immutable subclasses
881-
def _hasnans(self) -> np.ndarray:
881+
def _hasnans(self) -> bool:
882882
"""
883883
return if I have any nans; enables various perf speedups
884884
"""
885-
# error: Incompatible return value type (got "bool", expected "ndarray")
886-
return bool(self._isnan.any()) # type: ignore[return-value]
885+
return bool(self._isnan.any())
887886

888887
def _maybe_mask_results(
889888
self, result: np.ndarray, fill_value=iNaT, convert=None

pandas/core/indexes/base.py

+29-85
Original file line numberDiff line numberDiff line change
@@ -191,8 +191,7 @@
191191
str_t = str
192192

193193

194-
# error: Value of type variable "_DTypeScalar" of "dtype" cannot be "object"
195-
_o_dtype = np.dtype(object) # type: ignore[type-var]
194+
_o_dtype = np.dtype("object")
196195

197196

198197
_Identity = NewType("_Identity", object)
@@ -417,11 +416,7 @@ def __new__(
417416
# maybe coerce to a sub-class
418417
arr = data
419418
else:
420-
# error: Argument "dtype" to "asarray_tuplesafe" has incompatible type
421-
# "Type[object]"; expected "Union[str, dtype[Any], None]"
422-
arr = com.asarray_tuplesafe(
423-
data, dtype=object # type: ignore[arg-type]
424-
)
419+
arr = com.asarray_tuplesafe(data, dtype=np.dtype("object"))
425420

426421
if dtype is None:
427422
arr = _maybe_cast_data_without_dtype(arr)
@@ -456,9 +451,7 @@ def __new__(
456451
)
457452
# other iterable of some kind
458453

459-
# error: Argument "dtype" to "asarray_tuplesafe" has incompatible type
460-
# "Type[object]"; expected "Union[str, dtype[Any], None]"
461-
subarr = com.asarray_tuplesafe(data, dtype=object) # type: ignore[arg-type]
454+
subarr = com.asarray_tuplesafe(data, dtype=np.dtype("object"))
462455
return Index(subarr, dtype=dtype, copy=copy, name=name, **kwargs)
463456

464457
@classmethod
@@ -2902,16 +2895,10 @@ def union(self, other, sort=None):
29022895
# <T> | <T> -> T
29032896
# <T> | <U> -> object
29042897
if not (is_integer_dtype(self.dtype) and is_integer_dtype(other.dtype)):
2905-
# error: Incompatible types in assignment (expression has type
2906-
# "str", variable has type "Union[dtype[Any], ExtensionDtype]")
2907-
dtype = "float64" # type: ignore[assignment]
2898+
dtype = np.dtype("float64")
29082899
else:
29092900
# one is int64 other is uint64
2910-
2911-
# error: Incompatible types in assignment (expression has type
2912-
# "Type[object]", variable has type "Union[dtype[Any],
2913-
# ExtensionDtype]")
2914-
dtype = object # type: ignore[assignment]
2901+
dtype = np.dtype("object")
29152902

29162903
left = self.astype(dtype, copy=False)
29172904
right = other.astype(dtype, copy=False)
@@ -3906,6 +3893,9 @@ def join(
39063893
self_is_mi = isinstance(self, ABCMultiIndex)
39073894
other_is_mi = isinstance(other, ABCMultiIndex)
39083895

3896+
lindexer: Optional[np.ndarray]
3897+
rindexer: Optional[np.ndarray]
3898+
39093899
# try to figure out the join level
39103900
# GH3662
39113901
if level is None and (self_is_mi or other_is_mi):
@@ -4003,15 +3993,11 @@ def join(
40033993

40043994
if return_indexers:
40053995
if join_index is self:
4006-
# error: Incompatible types in assignment (expression has type "None",
4007-
# variable has type "ndarray")
4008-
lindexer = None # type: ignore[assignment]
3996+
lindexer = None
40093997
else:
40103998
lindexer = self.get_indexer(join_index)
40113999
if join_index is other:
4012-
# error: Incompatible types in assignment (expression has type "None",
4013-
# variable has type "ndarray")
4014-
rindexer = None # type: ignore[assignment]
4000+
rindexer = None
40154001
else:
40164002
rindexer = other.get_indexer(join_index)
40174003
return join_index, lindexer, rindexer
@@ -4114,15 +4100,11 @@ def _join_non_unique(self, other, how="left", return_indexers=False):
41144100
left_idx = ensure_platform_int(left_idx)
41154101
right_idx = ensure_platform_int(right_idx)
41164102

4117-
join_index = np.asarray(lvalues.take(left_idx))
4103+
join_array = np.asarray(lvalues.take(left_idx))
41184104
mask = left_idx == -1
4119-
np.putmask(join_index, mask, rvalues.take(right_idx))
4105+
np.putmask(join_array, mask, rvalues.take(right_idx))
41204106

4121-
# error: Incompatible types in assignment (expression has type "Index", variable
4122-
# has type "ndarray")
4123-
join_index = self._wrap_joined_index(
4124-
join_index, other # type: ignore[assignment]
4125-
)
4107+
join_index = self._wrap_joined_index(join_array, other)
41264108

41274109
if return_indexers:
41284110
return join_index, left_idx, right_idx
@@ -4286,6 +4268,9 @@ def _join_monotonic(self, other, how="left", return_indexers=False):
42864268
sv = self._get_engine_target()
42874269
ov = other._get_engine_target()
42884270

4271+
ridx: Optional[np.ndarray]
4272+
lidx: Optional[np.ndarray]
4273+
42894274
if self.is_unique and other.is_unique:
42904275
# We can perform much better than the general case
42914276
if how == "left":
@@ -4295,61 +4280,24 @@ def _join_monotonic(self, other, how="left", return_indexers=False):
42954280
elif how == "right":
42964281
join_index = other
42974282
lidx = self._left_indexer_unique(ov, sv)
4298-
# error: Incompatible types in assignment (expression has type "None",
4299-
# variable has type "ndarray")
4300-
ridx = None # type: ignore[assignment]
4283+
ridx = None
43014284
elif how == "inner":
4302-
# error: Incompatible types in assignment (expression has type
4303-
# "ndarray", variable has type "Index")
4304-
join_index, lidx, ridx = self._inner_indexer( # type:ignore[assignment]
4305-
sv, ov
4306-
)
4307-
# error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible
4308-
# type "Index"; expected "ndarray"
4309-
join_index = self._wrap_joined_index(
4310-
join_index, other # type: ignore[arg-type]
4311-
)
4285+
join_array, lidx, ridx = self._inner_indexer(sv, ov)
4286+
join_index = self._wrap_joined_index(join_array, other)
43124287
elif how == "outer":
4313-
# error: Incompatible types in assignment (expression has type
4314-
# "ndarray", variable has type "Index")
4315-
join_index, lidx, ridx = self._outer_indexer( # type:ignore[assignment]
4316-
sv, ov
4317-
)
4318-
# error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible
4319-
# type "Index"; expected "ndarray"
4320-
join_index = self._wrap_joined_index(
4321-
join_index, other # type: ignore[arg-type]
4322-
)
4288+
join_array, lidx, ridx = self._outer_indexer(sv, ov)
4289+
join_index = self._wrap_joined_index(join_array, other)
43234290
else:
43244291
if how == "left":
4325-
# error: Incompatible types in assignment (expression has type
4326-
# "ndarray", variable has type "Index")
4327-
join_index, lidx, ridx = self._left_indexer( # type: ignore[assignment]
4328-
sv, ov
4329-
)
4292+
join_array, lidx, ridx = self._left_indexer(sv, ov)
43304293
elif how == "right":
4331-
# error: Incompatible types in assignment (expression has type
4332-
# "ndarray", variable has type "Index")
4333-
join_index, ridx, lidx = self._left_indexer( # type: ignore[assignment]
4334-
ov, sv
4335-
)
4294+
join_array, ridx, lidx = self._left_indexer(ov, sv)
43364295
elif how == "inner":
4337-
# error: Incompatible types in assignment (expression has type
4338-
# "ndarray", variable has type "Index")
4339-
join_index, lidx, ridx = self._inner_indexer( # type:ignore[assignment]
4340-
sv, ov
4341-
)
4296+
join_array, lidx, ridx = self._inner_indexer(sv, ov)
43424297
elif how == "outer":
4343-
# error: Incompatible types in assignment (expression has type
4344-
# "ndarray", variable has type "Index")
4345-
join_index, lidx, ridx = self._outer_indexer( # type:ignore[assignment]
4346-
sv, ov
4347-
)
4348-
# error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible type
4349-
# "Index"; expected "ndarray"
4350-
join_index = self._wrap_joined_index(
4351-
join_index, other # type: ignore[arg-type]
4352-
)
4298+
join_array, lidx, ridx = self._outer_indexer(sv, ov)
4299+
4300+
join_index = self._wrap_joined_index(join_array, other)
43534301

43544302
if return_indexers:
43554303
lidx = None if lidx is None else ensure_platform_int(lidx)
@@ -6481,12 +6429,8 @@ def _maybe_cast_data_without_dtype(subarr):
64816429
pass
64826430

64836431
elif inferred.startswith("timedelta"):
6484-
# error: Incompatible types in assignment (expression has type
6485-
# "TimedeltaArray", variable has type "ndarray")
6486-
data = TimedeltaArray._from_sequence( # type: ignore[assignment]
6487-
subarr, copy=False
6488-
)
6489-
return data
6432+
tda = TimedeltaArray._from_sequence(subarr, copy=False)
6433+
return tda
64906434
elif inferred == "period":
64916435
try:
64926436
data = PeriodArray._from_sequence(subarr)

pandas/core/internals/blocks.py

+4-11
Original file line numberDiff line numberDiff line change
@@ -118,9 +118,7 @@
118118
from pandas.core.arrays._mixins import NDArrayBackedExtensionArray
119119

120120
# comparison is faster than is_object_dtype
121-
122-
# error: Value of type variable "_DTypeScalar" of "dtype" cannot be "object"
123-
_dtype_obj = np.dtype(object) # type: ignore[type-var]
121+
_dtype_obj = np.dtype("object")
124122

125123

126124
class Block(PandasObject):
@@ -1598,14 +1596,9 @@ def to_native_types(self, na_rep="nan", quoting=None, **kwargs):
15981596
values = self.values
15991597
mask = isna(values)
16001598

1601-
# error: Incompatible types in assignment (expression has type "ndarray",
1602-
# variable has type "ExtensionArray")
1603-
values = np.asarray(values.astype(object)) # type: ignore[assignment]
1604-
values[mask] = na_rep
1605-
1606-
# TODO(EA2D): reshape not needed with 2D EAs
1607-
# we are expected to return a 2-d ndarray
1608-
return self.make_block(values)
1599+
new_values = np.asarray(values.astype(object))
1600+
new_values[mask] = na_rep
1601+
return self.make_block(new_values)
16091602

16101603
def take_nd(
16111604
self, indexer, axis: int = 0, new_mgr_locs=None, fill_value=lib.no_default

0 commit comments

Comments
 (0)