Skip to content

TYP: Fix typing errors on main #47469

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jun 23, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -1064,7 +1064,13 @@ def checked_add_with_arr(
elif arr_mask is not None:
not_nan = np.logical_not(arr_mask)
elif b_mask is not None:
not_nan = np.logical_not(b2_mask)
# error: Argument 1 to "__call__" of "_UFunc_Nin1_Nout1" has
# incompatible type "Optional[ndarray[Any, dtype[bool_]]]";
# expected "Union[_SupportsArray[dtype[Any]], _NestedSequence
# [_SupportsArray[dtype[Any]]], bool, int, float, complex, str
# , bytes, _NestedSequence[Union[bool, int, float, complex, str
# , bytes]]]"
not_nan = np.logical_not(b2_mask) # type: ignore[arg-type]
else:
not_nan = np.empty(arr.shape, dtype=bool)
not_nan.fill(True)
Expand Down
10 changes: 8 additions & 2 deletions pandas/core/array_algos/quantile.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,10 @@ def _nanpercentile_1d(
return np.percentile(
values,
qs,
**{np_percentile_argname: interpolation},
# error: No overload variant of "percentile" matches argument
# types "ndarray[Any, Any]", "ndarray[Any, dtype[floating[_64Bit]]]"
# , "Dict[str, str]" [call-overload]
**{np_percentile_argname: interpolation}, # type: ignore[call-overload]
)


Expand Down Expand Up @@ -212,5 +215,8 @@ def _nanpercentile(
values,
qs,
axis=1,
**{np_percentile_argname: interpolation},
# error: No overload variant of "percentile" matches argument types
# "ndarray[Any, Any]", "ndarray[Any, dtype[floating[_64Bit]]]",
# "int", "Dict[str, str]" [call-overload]
**{np_percentile_argname: interpolation}, # type: ignore[call-overload]
)
3 changes: 1 addition & 2 deletions pandas/core/arraylike.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,9 +265,8 @@ def array_ufunc(self, ufunc: np.ufunc, method: str, *inputs: Any, **kwargs: Any)
return result

# Determine if we should defer.
# error: "Type[ndarray[Any, Any]]" has no attribute "__array_ufunc__"
no_defer = (
np.ndarray.__array_ufunc__, # type: ignore[attr-defined]
np.ndarray.__array_ufunc__,
cls.__array_ufunc__,
)

Expand Down
10 changes: 9 additions & 1 deletion pandas/core/arrays/arrow/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,7 +496,15 @@ def _indexing_key_to_indices(
if isinstance(key, slice):
indices = np.arange(n)[key]
elif is_integer(key):
indices = np.arange(n)[[key]]
# error: Invalid index type "List[Union[int, ndarray[Any, Any]]]"
# for "ndarray[Any, dtype[signedinteger[Any]]]"; expected type
# "Union[SupportsIndex, _SupportsArray[dtype[Union[bool_,
# integer[Any]]]], _NestedSequence[_SupportsArray[dtype[Union
# [bool_, integer[Any]]]]], _NestedSequence[Union[bool, int]]
# , Tuple[Union[SupportsIndex, _SupportsArray[dtype[Union[bool_
# , integer[Any]]]], _NestedSequence[_SupportsArray[dtype[Union
# [bool_, integer[Any]]]]], _NestedSequence[Union[bool, int]]], ...]]"
indices = np.arange(n)[[key]] # type: ignore[index]
elif is_bool_dtype(key):
key = np.asarray(key)
if len(key) != n:
Expand Down
5 changes: 1 addition & 4 deletions pandas/core/arrays/datetimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,10 +487,7 @@ def _generate_range(
np.linspace(0, end.value - start.value, periods, dtype="int64")
+ start.value
)
# error: Non-overlapping equality check
# (left operand type: "dtype[signedinteger[Any]]",
# right operand type: "Literal['i8']")
if i8values.dtype != "i8": # type: ignore[comparison-overlap]
if i8values.dtype != "i8":
# 2022-01-09 I (brock) am not sure if it is possible for this
# to overflow and cast to e.g. f8, but if it does we need to cast
i8values = i8values.astype("i8")
Expand Down
25 changes: 9 additions & 16 deletions pandas/core/arrays/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -687,21 +687,7 @@ def __getitem__(
if is_scalar(left) and isna(left):
return self._fill_value
return Interval(left, right, inclusive=self.inclusive)
# error: Argument 1 to "ndim" has incompatible type
# "Union[ndarray[Any, Any], ExtensionArray]"; expected
# "Union[Sequence[Sequence[Sequence[Sequence[Sequence[Any]]]]],
# Union[Union[_SupportsArray[dtype[Any]],
# Sequence[_SupportsArray[dtype[Any]]],
# Sequence[Sequence[_SupportsArray[dtype[Any]]]],
# Sequence[Sequence[Sequence[_SupportsArray[dtype[Any]]]]],
# Sequence[Sequence[Sequence[Sequence[_SupportsArray[dtype[Any]]]]]]],
# Union[bool, int, float, complex, str, bytes,
# Sequence[Union[bool, int, float, complex, str, bytes]],
# Sequence[Sequence[Union[bool, int, float, complex, str, bytes]]],
# Sequence[Sequence[Sequence[Union[bool, int, float, complex, str, bytes]]]],
# Sequence[Sequence[Sequence[Sequence[Union[bool, int, float,
# complex, str, bytes]]]]]]]]"
if np.ndim(left) > 1: # type: ignore[arg-type]
if np.ndim(left) > 1:
# GH#30588 multi-dimensional indexer disallowed
raise ValueError("multi-dimensional indexing not allowed")
return self._shallow_copy(left, right)
Expand Down Expand Up @@ -1679,7 +1665,14 @@ def isin(self, values) -> np.ndarray:
# complex128 ndarray is much more performant.
left = self._combined.view("complex128")
right = values._combined.view("complex128")
return np.in1d(left, right)
# error: Argument 1 to "in1d" has incompatible type
# "Union[ExtensionArray, ndarray[Any, Any],
# ndarray[Any, dtype[Any]]]"; expected
# "Union[_SupportsArray[dtype[Any]],
# _NestedSequence[_SupportsArray[dtype[Any]]], bool,
# int, float, complex, str, bytes, _NestedSequence[
# Union[bool, int, float, complex, str, bytes]]]"
return np.in1d(left, right) # type: ignore[arg-type]

elif needs_i8_conversion(self.left.dtype) ^ needs_i8_conversion(
values.left.dtype
Expand Down
22 changes: 13 additions & 9 deletions pandas/core/arrays/masked.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,13 +110,7 @@ def __init__(
self, values: np.ndarray, mask: npt.NDArray[np.bool_], copy: bool = False
) -> None:
# values is supposed to already be validated in the subclass
if not (
isinstance(mask, np.ndarray)
and
# error: Non-overlapping equality check
# (left operand type: "dtype[bool_]", right operand type: "Type[bool_]")
mask.dtype == np.bool_ # type: ignore[comparison-overlap]
):
if not (isinstance(mask, np.ndarray) and mask.dtype == np.bool_):
raise TypeError(
"mask should be boolean numpy array. Use "
"the 'pd.array' function instead"
Expand Down Expand Up @@ -1157,7 +1151,12 @@ def any(self, *, skipna: bool = True, **kwargs):
nv.validate_any((), kwargs)

values = self._data.copy()
np.putmask(values, self._mask, self._falsey_value)
# error: Argument 3 to "putmask" has incompatible type "object";
# expected "Union[_SupportsArray[dtype[Any]],
# _NestedSequence[_SupportsArray[dtype[Any]]],
# bool, int, float, complex, str, bytes,
# _NestedSequence[Union[bool, int, float, complex, str, bytes]]]"
np.putmask(values, self._mask, self._falsey_value) # type: ignore[arg-type]
result = values.any()
if skipna:
return result
Expand Down Expand Up @@ -1233,7 +1232,12 @@ def all(self, *, skipna: bool = True, **kwargs):
nv.validate_all((), kwargs)

values = self._data.copy()
np.putmask(values, self._mask, self._truthy_value)
# error: Argument 3 to "putmask" has incompatible type "object";
# expected "Union[_SupportsArray[dtype[Any]],
# _NestedSequence[_SupportsArray[dtype[Any]]],
# bool, int, float, complex, str, bytes,
# _NestedSequence[Union[bool, int, float, complex, str, bytes]]]"
np.putmask(values, self._mask, self._truthy_value) # type: ignore[arg-type]
result = values.all()

if skipna:
Expand Down
19 changes: 16 additions & 3 deletions pandas/core/arrays/sparse/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -944,7 +944,16 @@ def __getitem__(
if is_integer(key):
return self._get_val_at(key)
elif isinstance(key, tuple):
data_slice = self.to_dense()[key]
# error: Invalid index type "Tuple[Union[int, ellipsis], ...]"
# for "ndarray[Any, Any]"; expected type
# "Union[SupportsIndex, _SupportsArray[dtype[Union[bool_,
# integer[Any]]]], _NestedSequence[_SupportsArray[dtype[
# Union[bool_, integer[Any]]]]], _NestedSequence[Union[
# bool, int]], Tuple[Union[SupportsIndex, _SupportsArray[
# dtype[Union[bool_, integer[Any]]]], _NestedSequence[
# _SupportsArray[dtype[Union[bool_, integer[Any]]]]],
# _NestedSequence[Union[bool, int]]], ...]]"
data_slice = self.to_dense()[key] # type: ignore[index]
elif isinstance(key, slice):

# Avoid densifying when handling contiguous slices
Expand Down Expand Up @@ -1184,7 +1193,10 @@ def _concat_same_type(

data = np.concatenate(values)
indices_arr = np.concatenate(indices)
sp_index = IntIndex(length, indices_arr)
# error: Argument 2 to "IntIndex" has incompatible type
# "ndarray[Any, dtype[signedinteger[_32Bit]]]";
# expected "Sequence[int]"
sp_index = IntIndex(length, indices_arr) # type: ignore[arg-type]

else:
# when concatenating block indices, we don't claim that you'll
Expand Down Expand Up @@ -1374,7 +1386,8 @@ def __setstate__(self, state):
if isinstance(state, tuple):
# Compat for pandas < 0.24.0
nd_state, (fill_value, sp_index) = state
sparse_values = np.array([])
# error: Need type annotation for "sparse_values"
sparse_values = np.array([]) # type: ignore[var-annotated]
sparse_values.__setstate__(nd_state)

self._sparse_values = sparse_values
Expand Down
8 changes: 2 additions & 6 deletions pandas/core/dtypes/astype.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,7 @@ def astype_nansafe(
).reshape(shape)

elif is_datetime64_dtype(arr.dtype):
# error: Non-overlapping equality check (left
# operand type: "dtype[Any]", right operand type: "Type[signedinteger[Any]]")
if dtype == np.int64: # type: ignore[comparison-overlap]
if dtype == np.int64:
if isna(arr).any():
raise ValueError("Cannot convert NaT values to integer")
return arr.view(dtype)
Expand All @@ -127,9 +125,7 @@ def astype_nansafe(
raise TypeError(f"cannot astype a datetimelike from [{arr.dtype}] to [{dtype}]")

elif is_timedelta64_dtype(arr.dtype):
# error: Non-overlapping equality check (left
# operand type: "dtype[Any]", right operand type: "Type[signedinteger[Any]]")
if dtype == np.int64: # type: ignore[comparison-overlap]
if dtype == np.int64:
if isna(arr).any():
raise ValueError("Cannot convert NaT values to integer")
return arr.view(dtype)
Expand Down
4 changes: 1 addition & 3 deletions pandas/core/dtypes/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -534,9 +534,7 @@ def is_string_or_object_np_dtype(dtype: np.dtype) -> bool:
"""
Faster alternative to is_string_dtype, assumes we have a np.dtype object.
"""
# error: Non-overlapping equality check (left operand type:
# "dtype[Any]", right operand type: "Type[object]")
return dtype == object or dtype.kind in "SU" # type: ignore[comparison-overlap]
return dtype == object or dtype.kind in "SU"


def is_string_dtype(arr_or_dtype) -> bool:
Expand Down
3 changes: 1 addition & 2 deletions pandas/core/exchange/buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,7 @@ def __dlpack__(self):
Represent this structure as DLPack interface.
"""
if _NUMPY_HAS_DLPACK:
# error: "ndarray[Any, Any]" has no attribute "__dlpack__"
return self._x.__dlpack__() # type: ignore[attr-defined]
return self._x.__dlpack__()
raise NotImplementedError("__dlpack__")

def __dlpack_device__(self) -> Tuple[DlpackDeviceType, Optional[int]]:
Expand Down
4 changes: 3 additions & 1 deletion pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -2481,7 +2481,9 @@ def to_records(
if dtype_mapping is None:
formats.append(v.dtype)
elif isinstance(dtype_mapping, (type, np.dtype, str)):
formats.append(dtype_mapping)
# error: Argument 1 to "append" of "list" has incompatible
# type "Union[type, dtype[Any], str]"; expected "dtype[Any]"
formats.append(dtype_mapping) # type: ignore[arg-type]
else:
element = "row" if i < index_len else "column"
msg = f"Invalid dtype {dtype_mapping} specified for {element} {name}"
Expand Down
17 changes: 14 additions & 3 deletions pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4834,7 +4834,13 @@ def _join_non_unique(
right = other._values.take(right_idx)

if isinstance(join_array, np.ndarray):
np.putmask(join_array, mask, right)
# error: Argument 3 to "putmask" has incompatible type
# "Union[ExtensionArray, ndarray[Any, Any]]"; expected
# "Union[_SupportsArray[dtype[Any]], _NestedSequence[
# _SupportsArray[dtype[Any]]], bool, int, float, complex,
# str, bytes, _NestedSequence[Union[bool, int, float,
# complex, str, bytes]]]"
np.putmask(join_array, mask, right) # type: ignore[arg-type]
else:
join_array._putmask(mask, right)

Expand Down Expand Up @@ -5348,7 +5354,10 @@ def __getitem__(self, key):
if hasattr(result, "_ndarray"):
# i.e. NDArrayBackedExtensionArray
# Unpack to ndarray for MPL compat
return result._ndarray
# error: Item "ndarray[Any, Any]" of
# "Union[ExtensionArray, ndarray[Any, Any]]"
# has no attribute "_ndarray"
return result._ndarray # type: ignore[union-attr]
return result

# NB: Using _constructor._simple_new would break if MultiIndex
Expand Down Expand Up @@ -6886,7 +6895,9 @@ def insert(self, loc: int, item) -> Index:
new_values = np.insert(arr, loc, casted)

else:
new_values = np.insert(arr, loc, None)
# error: No overload variant of "insert" matches argument types
# "ndarray[Any, Any]", "int", "None"
new_values = np.insert(arr, loc, None) # type: ignore[call-overload]
loc = loc if loc >= 0 else loc - 1
new_values[loc] = item

Expand Down
13 changes: 11 additions & 2 deletions pandas/core/indexes/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,10 @@ def _validate_codes(self, level: list, code: list):
"""
null_mask = isna(level)
if np.any(null_mask):
code = np.where(null_mask[code], -1, code)
# error: Incompatible types in assignment
# (expression has type "ndarray[Any, dtype[Any]]",
# variable has type "List[Any]")
code = np.where(null_mask[code], -1, code) # type: ignore[assignment]
return code

def _verify_integrity(self, codes: list | None = None, levels: list | None = None):
Expand Down Expand Up @@ -1577,7 +1580,13 @@ def is_monotonic_increasing(self) -> bool:
self._get_level_values(i)._values for i in reversed(range(len(self.levels)))
]
try:
sort_order = np.lexsort(values)
# error: Argument 1 to "lexsort" has incompatible type
# "List[Union[ExtensionArray, ndarray[Any, Any]]]";
# expected "Union[_SupportsArray[dtype[Any]],
# _NestedSequence[_SupportsArray[dtype[Any]]], bool,
# int, float, complex, str, bytes, _NestedSequence[Union
# [bool, int, float, complex, str, bytes]]]"
sort_order = np.lexsort(values) # type: ignore[arg-type]
return Index(sort_order).is_monotonic_increasing
except TypeError:

Expand Down
4 changes: 1 addition & 3 deletions pandas/core/internals/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,7 @@ def _get_same_shape_values(
# argument type "Tuple[Union[ndarray, slice], slice]"
lvals = lvals[rblk.mgr_locs.indexer, :] # type: ignore[call-overload]
assert lvals.shape[0] == 1, lvals.shape
# error: No overload variant of "__getitem__" of "ExtensionArray" matches
# argument type "Tuple[int, slice]"
lvals = lvals[0, :] # type: ignore[call-overload]
lvals = lvals[0, :]
else:
# lvals are 1D, rvals are 2D
assert rvals.shape[0] == 1, rvals.shape
Expand Down
22 changes: 19 additions & 3 deletions pandas/core/missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,13 @@ def func(yvalues: np.ndarray) -> None:
**kwargs,
)

np.apply_along_axis(func, axis, data)
# error: Argument 1 to "apply_along_axis" has incompatible type
# "Callable[[ndarray[Any, Any]], None]"; expected "Callable[...,
# Union[_SupportsArray[dtype[<nothing>]], Sequence[_SupportsArray
# [dtype[<nothing>]]], Sequence[Sequence[_SupportsArray[dtype[<nothing>]]]],
# Sequence[Sequence[Sequence[_SupportsArray[dtype[<nothing>]]]]],
# Sequence[Sequence[Sequence[Sequence[_SupportsArray[dtype[<nothing>]]]]]]]]"
np.apply_along_axis(func, axis, data) # type: ignore[arg-type]
return


Expand Down Expand Up @@ -772,13 +778,23 @@ def interpolate_2d(
"""
if limit_area is not None:
np.apply_along_axis(
partial(
# error: Argument 1 to "apply_along_axis" has incompatible type
# "partial[None]"; expected
# "Callable[..., Union[_SupportsArray[dtype[<nothing>]],
# Sequence[_SupportsArray[dtype[<nothing>]]],
# Sequence[Sequence[_SupportsArray[dtype[<nothing>]]]],
# Sequence[Sequence[Sequence[_SupportsArray[dtype[<nothing>]]]]],
# Sequence[Sequence[Sequence[Sequence[_
# SupportsArray[dtype[<nothing>]]]]]]]]"
partial( # type: ignore[arg-type]
_interpolate_with_limit_area,
method=method,
limit=limit,
limit_area=limit_area,
),
axis,
# error: Argument 2 to "apply_along_axis" has incompatible type
# "Union[str, int]"; expected "SupportsIndex"
axis, # type: ignore[arg-type]
values,
)
return
Expand Down
4 changes: 3 additions & 1 deletion pandas/core/reshape/melt.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,9 @@ def melt(
if is_extension_array_dtype(id_data):
id_data = concat([id_data] * K, ignore_index=True)
else:
id_data = np.tile(id_data._values, K)
# error: Incompatible types in assignment (expression has type
# "ndarray[Any, dtype[Any]]", variable has type "Series")
id_data = np.tile(id_data._values, K) # type: ignore[assignment]
mdata[col] = id_data

mcolumns = id_vars + var_name + [value_name]
Expand Down
4 changes: 3 additions & 1 deletion pandas/core/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -2024,7 +2024,9 @@ def count(self, level=None):
lev = lev.insert(cnt, lev._na_value)

obs = level_codes[notna(self._values)]
out = np.bincount(obs, minlength=len(lev) or None)
# error: Argument "minlength" to "bincount" has incompatible type
# "Optional[int]"; expected "SupportsIndex"
out = np.bincount(obs, minlength=len(lev) or None) # type: ignore[arg-type]
return self._constructor(out, index=lev, dtype="int64").__finalize__(
self, method="count"
)
Expand Down
Loading