Skip to content

TYP: enable disallow_untyped_decorators #43828

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
Oct 30, 2021
9 changes: 9 additions & 0 deletions pandas/_libs/properties.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# pyright: reportIncompleteStub = false
from typing import Any

# note: this is a lie to make type checkers happy (they special
# case property). cache_readonly uses attribute names similar to
# property (fget) but it does not provide fset and fdel.
cache_readonly = property

def __getattr__(name: str) -> Any: ... # incomplete
12 changes: 6 additions & 6 deletions pandas/_libs/properties.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,12 @@ from cpython.dict cimport (
cdef class CachedProperty:

cdef readonly:
object func, name, __doc__
object fget, name, __doc__

def __init__(self, func):
self.func = func
self.name = func.__name__
self.__doc__ = getattr(func, '__doc__', None)
def __init__(self, fget):
self.fget = fget
self.name = fget.__name__
self.__doc__ = getattr(fget, '__doc__', None)

def __get__(self, obj, typ):
if obj is None:
Expand All @@ -34,7 +34,7 @@ cdef class CachedProperty:
# not necessary to Py_INCREF
val = <object>PyDict_GetItem(cache, self.name)
else:
val = self.func(obj)
val = self.fget(obj)
PyDict_SetItem(cache, self.name, val)
return val

Expand Down
1 change: 1 addition & 0 deletions pandas/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
- Dtypes
- Misc
"""
# pyright: reportUntypedFunctionDecorator = false

from collections import abc
from datetime import (
Expand Down
3 changes: 2 additions & 1 deletion pandas/core/_numba/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ def generate_shared_aggregator(

numba = import_optional_dependency("numba")

@numba.jit(nopython=nopython, nogil=nogil, parallel=parallel)
# error: Untyped decorator makes function "column_looper" untyped
@numba.jit(nopython=nopython, nogil=nogil, parallel=parallel) # type: ignore[misc]
def column_looper(
values: np.ndarray,
start: np.ndarray,
Expand Down
9 changes: 6 additions & 3 deletions pandas/core/_numba/kernels/mean_.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
from pandas.core._numba.kernels.shared import is_monotonic_increasing


@numba.jit(nopython=True, nogil=True, parallel=False)
# error: Untyped decorator makes function "add_mean" untyped
@numba.jit(nopython=True, nogil=True, parallel=False) # type: ignore[misc]
def add_mean(
val: float, nobs: int, sum_x: float, neg_ct: int, compensation: float
) -> tuple[int, float, int, float]:
Expand All @@ -29,7 +30,8 @@ def add_mean(
return nobs, sum_x, neg_ct, compensation


@numba.jit(nopython=True, nogil=True, parallel=False)
# error: Untyped decorator makes function "remove_mean" untyped
@numba.jit(nopython=True, nogil=True, parallel=False) # type: ignore[misc]
def remove_mean(
val: float, nobs: int, sum_x: float, neg_ct: int, compensation: float
) -> tuple[int, float, int, float]:
Expand All @@ -44,7 +46,8 @@ def remove_mean(
return nobs, sum_x, neg_ct, compensation


@numba.jit(nopython=True, nogil=True, parallel=False)
# error: Untyped decorator makes function "sliding_mean" untyped
@numba.jit(nopython=True, nogil=True, parallel=False) # type: ignore[misc]
def sliding_mean(
values: np.ndarray,
start: np.ndarray,
Expand Down
5 changes: 4 additions & 1 deletion pandas/core/_numba/kernels/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@
import numpy as np


@numba.jit(numba.boolean(numba.int64[:]), nopython=True, nogil=True, parallel=False)
# error: Untyped decorator makes function "is_monotonic_increasing" untyped
@numba.jit( # type: ignore[misc]
numba.boolean(numba.int64[:]), nopython=True, nogil=True, parallel=False
)
def is_monotonic_increasing(bounds: np.ndarray) -> bool:
"""Check if int64 values are monotonically increasing."""
n = len(bounds)
Expand Down
9 changes: 6 additions & 3 deletions pandas/core/_numba/kernels/sum_.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
from pandas.core._numba.kernels.shared import is_monotonic_increasing


@numba.jit(nopython=True, nogil=True, parallel=False)
# error: Untyped decorator makes function "add_sum" untyped
@numba.jit(nopython=True, nogil=True, parallel=False) # type: ignore[misc]
def add_sum(
val: float, nobs: int, sum_x: float, compensation: float
) -> tuple[int, float, float]:
Expand All @@ -27,7 +28,8 @@ def add_sum(
return nobs, sum_x, compensation


@numba.jit(nopython=True, nogil=True, parallel=False)
# error: Untyped decorator makes function "remove_sum" untyped
@numba.jit(nopython=True, nogil=True, parallel=False) # type: ignore[misc]
def remove_sum(
val: float, nobs: int, sum_x: float, compensation: float
) -> tuple[int, float, float]:
Expand All @@ -40,7 +42,8 @@ def remove_sum(
return nobs, sum_x, compensation


@numba.jit(nopython=True, nogil=True, parallel=False)
# error: Untyped decorator makes function "sliding_sum" untyped
@numba.jit(nopython=True, nogil=True, parallel=False) # type: ignore[misc]
def sliding_sum(
values: np.ndarray,
start: np.ndarray,
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/arrays/categorical.py
Original file line number Diff line number Diff line change
Expand Up @@ -2699,7 +2699,7 @@ def _get_codes_for_values(values, categories: Index) -> np.ndarray:
"""
dtype_equal = is_dtype_equal(values.dtype, categories.dtype)

if is_extension_array_dtype(categories.dtype) and is_object_dtype(values):
if isinstance(categories.dtype, ExtensionDtype) and is_object_dtype(values):
# Support inferring the correct extension dtype from an array of
# scalar objects. e.g.
# Categorical(array[Period, Period], categories=PeriodIndex(...))
Expand Down
4 changes: 3 additions & 1 deletion pandas/core/arrays/period.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,7 +669,9 @@ def fillna(self, value=None, method=None, limit=None) -> PeriodArray:
# view as dt64 so we get treated as timelike in core.missing
dta = self.view("M8[ns]")
result = dta.fillna(value=value, method=method, limit=limit)
return result.view(self.dtype)
# error: Incompatible return value type (got "Union[ExtensionArray,
# ndarray[Any, Any]]", expected "PeriodArray")
return result.view(self.dtype) # type: ignore[return-value]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

in pandas/core/arrays/datetimelike.py we overloaded view for DatetimeLikeArrayMixin. maybe could do the same for PeriodArray. (suggestion, not necessarily for this PR)

return super().fillna(value=value, method=method, limit=limit)

# ------------------------------------------------------------------
Expand Down
6 changes: 5 additions & 1 deletion pandas/core/groupby/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -675,7 +675,11 @@ def apply_series_value_counts():
# multi-index components
codes = self.grouper.reconstructed_codes
codes = [rep(level_codes) for level_codes in codes] + [llab(lab, inc)]
levels = [ping.group_index for ping in self.grouper.groupings] + [lev]
# error: List item 0 has incompatible type "Union[ndarray[Any, Any], Index]";
# expected "Index"
levels = [ping.group_index for ping in self.grouper.groupings] + [
lev # type: ignore[list-item]
]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hmm, we have a Union return type from factorize.

Over time we should avoid Union return types, either through refactoring code or using overloads.

from https://github.com/python/typeshed/blob/master/CONTRIBUTING.md#conventions

avoid union return types: python/mypy#1693;

(comment, no action needed for this PR)

names = self.grouper.names + [self.obj.name]

if dropna:
Expand Down
4 changes: 3 additions & 1 deletion pandas/core/groupby/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -3449,7 +3449,9 @@ def _reindex_output(
levels_list = [ping.group_index for ping in groupings]
names = self.grouper.names
if qs is not None:
levels_list.append(qs)
# error: Argument 1 to "append" of "list" has incompatible type
# "ndarray[Any, dtype[floating[_64Bit]]]"; expected "Index"
levels_list.append(qs) # type: ignore[arg-type]
names = names + [None]
index, _ = MultiIndex.from_product(levels_list, names=names).sortlevel()

Expand Down
6 changes: 4 additions & 2 deletions pandas/core/groupby/numba_.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,8 @@ def generate_numba_agg_func(
numba_func = jit_user_function(func, nopython, nogil, parallel)
numba = import_optional_dependency("numba")

@numba.jit(nopython=nopython, nogil=nogil, parallel=parallel)
# error: Untyped decorator makes function "group_agg" untyped
@numba.jit(nopython=nopython, nogil=nogil, parallel=parallel) # type: ignore[misc]
def group_agg(
values: np.ndarray,
index: np.ndarray,
Expand Down Expand Up @@ -153,7 +154,8 @@ def generate_numba_transform_func(
numba_func = jit_user_function(func, nopython, nogil, parallel)
numba = import_optional_dependency("numba")

@numba.jit(nopython=nopython, nogil=nogil, parallel=parallel)
# error: Untyped decorator makes function "group_transform" untyped
@numba.jit(nopython=nopython, nogil=nogil, parallel=parallel) # type: ignore[misc]
def group_transform(
values: np.ndarray,
index: np.ndarray,
Expand Down
23 changes: 18 additions & 5 deletions pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3681,7 +3681,11 @@ def _get_indexer(
else:
tgt_values = target._get_engine_target()
if target._is_multi and self._is_multi:
tgt_values = self._engine._extract_level_codes(target)
engine = self._engine
# error: "IndexEngine" has no attribute "_extract_level_codes"
tgt_values = engine._extract_level_codes( # type: ignore[attr-defined]
target
)

indexer = self._engine.get_indexer(tgt_values)

Expand Down Expand Up @@ -3758,7 +3762,8 @@ def _get_fill_indexer(
if self._is_multi:
# TODO: get_indexer_with_fill docstring says values must be _sorted_
# but that doesn't appear to be enforced
return self._engine.get_indexer_with_fill(
# error: "IndexEngine" has no attribute "get_indexer_with_fill"
return self._engine.get_indexer_with_fill( # type: ignore[attr-defined]
target=target._values, values=self._values, method=method, limit=limit
)

Expand Down Expand Up @@ -4677,7 +4682,9 @@ def values(self) -> ArrayLike:
"""
return self._data

@cache_readonly
# error: Decorated property not supported
# https://github.com/python/mypy/issues/1362
@cache_readonly # type: ignore[misc]
@doc(IndexOpsMixin.array)
def array(self) -> ExtensionArray:
array = self._data
Expand Down Expand Up @@ -5596,7 +5603,11 @@ def get_indexer_non_unique(
# self and non-Multi target
tgt_values = target._get_engine_target()
if self._is_multi and target._is_multi:
tgt_values = self._engine._extract_level_codes(target)
engine = self._engine
# error: "IndexEngine" has no attribute "_extract_level_codes"
tgt_values = engine._extract_level_codes( # type: ignore[attr-defined]
target
)

indexer, missing = self._engine.get_indexer_non_unique(tgt_values)
return ensure_platform_int(indexer), ensure_platform_int(missing)
Expand Down Expand Up @@ -7049,7 +7060,9 @@ def unpack_nested_dtype(other: _IndexT) -> _IndexT:
if is_categorical_dtype(dtype):
# If there is ever a SparseIndex, this could get dispatched
# here too.
return dtype.categories
# error: Item "dtype[Any]"/"ExtensionDtype" of "Union[dtype[Any],
# ExtensionDtype]" has no attribute "categories"
return dtype.categories # type: ignore[union-attr]
return other


Expand Down
7 changes: 5 additions & 2 deletions pandas/core/indexes/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,11 @@ class DatetimeIndexOpsMixin(NDArrayBackedExtensionIndex):
_field_ops: list[str] = []

# error: "Callable[[Any], Any]" has no attribute "fget"
hasnans = cache_readonly(
DatetimeLikeArrayMixin._hasnans.fget # type: ignore[attr-defined]
hasnans = cast(
bool,
cache_readonly(
DatetimeLikeArrayMixin._hasnans.fget # type: ignore[attr-defined]
),
)

@property
Expand Down
24 changes: 17 additions & 7 deletions pandas/core/indexes/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
"""
from __future__ import annotations

from typing import TypeVar
from typing import (
Callable,
TypeVar,
)

import numpy as np

Expand All @@ -23,9 +26,12 @@
from pandas.core.indexes.base import Index

_T = TypeVar("_T", bound="NDArrayBackedExtensionIndex")
_ExtensionIndexT = TypeVar("_ExtensionIndexT", bound="ExtensionIndex")


def _inherit_from_data(name: str, delegate, cache: bool = False, wrap: bool = False):
def _inherit_from_data(
name: str, delegate: type, cache: bool = False, wrap: bool = False
):
"""
Make an alias for a method of the underlying ExtensionArray.

Expand Down Expand Up @@ -81,8 +87,9 @@ def fset(self, value):
method = attr

else:

def method(self, *args, **kwargs):
# error: Incompatible redefinition (redefinition with type "Callable[[Any,
# VarArg(Any), KwArg(Any)], Any]", original type "property")
def method(self, *args, **kwargs): # type: ignore[misc]
if "inplace" in kwargs:
raise ValueError(f"cannot use inplace with {type(self).__name__}")
result = attr(self._data, *args, **kwargs)
Expand All @@ -94,12 +101,15 @@ def method(self, *args, **kwargs):
return Index(result, name=self.name)
return result

method.__name__ = name
# error: "property" has no attribute "__name__"
method.__name__ = name # type: ignore[attr-defined]
method.__doc__ = attr.__doc__
return method


def inherit_names(names: list[str], delegate, cache: bool = False, wrap: bool = False):
def inherit_names(
names: list[str], delegate: type, cache: bool = False, wrap: bool = False
) -> Callable[[type[_ExtensionIndexT]], type[_ExtensionIndexT]]:
"""
Class decorator to pin attributes from an ExtensionArray to a Index subclass.

Expand All @@ -112,7 +122,7 @@ def inherit_names(names: list[str], delegate, cache: bool = False, wrap: bool =
Whether to wrap the inherited result in an Index.
"""

def wrapper(cls):
def wrapper(cls: type[_ExtensionIndexT]) -> type[_ExtensionIndexT]:
for name in names:
meth = _inherit_from_data(name, delegate, cache=cache, wrap=wrap)
setattr(cls, name, meth)
Expand Down
8 changes: 6 additions & 2 deletions pandas/core/indexes/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -531,7 +531,9 @@ def _maybe_convert_i8(self, key):
key_i8 = key_i8.where(~key._isnan)

# ensure consistency with IntervalIndex subtype
subtype = self.dtype.subtype
# error: Item "ExtensionDtype"/"dtype[Any]" of "Union[dtype[Any],
# ExtensionDtype]" has no attribute "subtype"
subtype = self.dtype.subtype # type: ignore[union-attr]

if not is_dtype_equal(subtype, key_dtype):
raise ValueError(
Expand Down Expand Up @@ -766,7 +768,9 @@ def _convert_slice_indexer(self, key: slice, kind: str):
def _should_fallback_to_positional(self) -> bool:
# integer lookups in Series.__getitem__ are unambiguously
# positional in this case
return self.dtype.subtype.kind in ["m", "M"]
# error: Item "ExtensionDtype"/"dtype[Any]" of "Union[dtype[Any],
# ExtensionDtype]" has no attribute "subtype"
return self.dtype.subtype.kind in ["m", "M"] # type: ignore[union-attr]

def _maybe_cast_slice_bound(self, label, side: str, kind=lib.no_default):
self._deprecated_arg(kind, "kind", "_maybe_cast_slice_bound")
Expand Down
4 changes: 3 additions & 1 deletion pandas/core/indexes/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -1095,8 +1095,10 @@ def _engine(self):
return MultiIndexPyIntEngine(self.levels, self.codes, offsets)
return MultiIndexUIntEngine(self.levels, self.codes, offsets)

# Return type "Callable[..., MultiIndex]" of "_constructor" incompatible with return
# type "Type[MultiIndex]" in supertype "Index"
@property
def _constructor(self) -> Callable[..., MultiIndex]:
def _constructor(self) -> Callable[..., MultiIndex]: # type: ignore[override]
return type(self).from_tuples

@doc(Index._shallow_copy)
Expand Down
10 changes: 7 additions & 3 deletions pandas/core/indexes/numeric.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,9 @@ class NumericIndex(Index):
_can_hold_strings = False
_is_backward_compat_public_numeric_index: bool = True

# error: Signature of "_can_hold_na" incompatible with supertype "Index"
@cache_readonly
def _can_hold_na(self) -> bool:
def _can_hold_na(self) -> bool: # type: ignore[override]
if is_float_dtype(self.dtype):
return True
else:
Expand All @@ -123,7 +124,9 @@ def _can_hold_na(self) -> bool:

@property
def _engine_type(self):
return self._engine_types[self.dtype]
# error: Invalid index type "Union[dtype[Any], ExtensionDtype]" for
# "Dict[dtype[Any], Type[IndexEngine]]"; expected type "dtype[Any]"
return self._engine_types[self.dtype] # type: ignore[index]

@cache_readonly
def inferred_type(self) -> str:
Expand Down Expand Up @@ -264,7 +267,8 @@ def astype(self, dtype, copy=True):
# ----------------------------------------------------------------
# Indexing Methods

@cache_readonly
# error: Decorated property not supported
@cache_readonly # type: ignore[misc]
@doc(Index._should_fallback_to_positional)
def _should_fallback_to_positional(self) -> bool:
return False
Expand Down
Loading