Skip to content

Commit 2b9ca07

Browse files
authored
STY: Bump pyright, pyupgrade and mypy for new PEP-696 syntax (#60006)
* STY: Bump pyright, pyupgrade and mypy for new PEP-696 syntax * Apply update * fix & ignore failures * another ignore
1 parent 97c4ce3 commit 2b9ca07

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+78
-84
lines changed

.pre-commit-config.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ repos:
7474
hooks:
7575
- id: isort
7676
- repo: https://github.com/asottile/pyupgrade
77-
rev: v3.16.0
77+
rev: v3.17.0
7878
hooks:
7979
- id: pyupgrade
8080
args: [--py310-plus]
@@ -112,7 +112,7 @@ repos:
112112
types: [python]
113113
stages: [manual]
114114
additional_dependencies: &pyright_dependencies
115-
115+
116116
- id: pyright
117117
# note: assumes python env is setup and activated
118118
name: pyright reportGeneralTypeIssues

environment.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -76,10 +76,10 @@ dependencies:
7676
- cxx-compiler
7777

7878
# code checks
79-
- flake8=6.1.0 # run in subprocess over docstring examples
80-
- mypy=1.9.0 # pre-commit uses locally installed mypy
79+
- flake8=7.1.0 # run in subprocess over docstring examples
80+
- mypy=1.11.2 # pre-commit uses locally installed mypy
8181
- tokenize-rt # scripts/check_for_inconsistent_pandas_namespace.py
82-
- pre-commit>=3.6.0
82+
- pre-commit>=4.0.1
8383

8484
# documentation
8585
- gitpython # obtain contributors from git for whatsnew

pandas/_config/config.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -411,7 +411,7 @@ def __dir__(self) -> list[str]:
411411

412412

413413
@contextmanager
414-
def option_context(*args) -> Generator[None, None, None]:
414+
def option_context(*args) -> Generator[None]:
415415
"""
416416
Context manager to temporarily set options in a ``with`` statement.
417417
@@ -718,7 +718,7 @@ def _build_option_description(k: str) -> str:
718718

719719

720720
@contextmanager
721-
def config_prefix(prefix: str) -> Generator[None, None, None]:
721+
def config_prefix(prefix: str) -> Generator[None]:
722722
"""
723723
contextmanager for multiple invocations of API with a common prefix
724724

pandas/_config/localization.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
@contextmanager
2626
def set_locale(
2727
new_locale: str | tuple[str, str], lc_var: int = locale.LC_ALL
28-
) -> Generator[str | tuple[str, str], None, None]:
28+
) -> Generator[str | tuple[str, str]]:
2929
"""
3030
Context manager for temporarily setting a locale.
3131

pandas/_testing/_warnings.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def assert_produces_warning(
3535
raise_on_extra_warnings: bool = True,
3636
match: str | tuple[str | None, ...] | None = None,
3737
must_find_all_warnings: bool = True,
38-
) -> Generator[list[warnings.WarningMessage], None, None]:
38+
) -> Generator[list[warnings.WarningMessage]]:
3939
"""
4040
Context manager for running code expected to either raise a specific warning,
4141
multiple specific warnings, or not raise any warnings. Verifies that the code

pandas/_testing/contexts.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
@contextmanager
3030
def decompress_file(
3131
path: FilePath | BaseBuffer, compression: CompressionOptions
32-
) -> Generator[IO[bytes], None, None]:
32+
) -> Generator[IO[bytes]]:
3333
"""
3434
Open a compressed file and return a file object.
3535
@@ -50,7 +50,7 @@ def decompress_file(
5050

5151

5252
@contextmanager
53-
def set_timezone(tz: str) -> Generator[None, None, None]:
53+
def set_timezone(tz: str) -> Generator[None]:
5454
"""
5555
Context manager for temporarily setting a timezone.
5656
@@ -92,7 +92,7 @@ def setTZ(tz) -> None:
9292

9393

9494
@contextmanager
95-
def ensure_clean(filename=None) -> Generator[Any, None, None]:
95+
def ensure_clean(filename=None) -> Generator[Any]:
9696
"""
9797
Gets a temporary path and agrees to remove on close.
9898
@@ -124,7 +124,7 @@ def ensure_clean(filename=None) -> Generator[Any, None, None]:
124124

125125

126126
@contextmanager
127-
def with_csv_dialect(name: str, **kwargs) -> Generator[None, None, None]:
127+
def with_csv_dialect(name: str, **kwargs) -> Generator[None]:
128128
"""
129129
Context manager to temporarily register a CSV dialect for parsing CSV.
130130

pandas/compat/pickle_compat.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ def loads(
131131

132132

133133
@contextlib.contextmanager
134-
def patch_pickle() -> Generator[None, None, None]:
134+
def patch_pickle() -> Generator[None]:
135135
"""
136136
Temporarily patch pickle to use our unpickler.
137137
"""

pandas/core/apply.py

+4-8
Original file line numberDiff line numberDiff line change
@@ -246,12 +246,8 @@ def transform(self) -> DataFrame | Series:
246246
and not obj.empty
247247
):
248248
raise ValueError("Transform function failed")
249-
# error: Argument 1 to "__get__" of "AxisProperty" has incompatible type
250-
# "Union[Series, DataFrame, GroupBy[Any], SeriesGroupBy,
251-
# DataFrameGroupBy, BaseWindow, Resampler]"; expected "Union[DataFrame,
252-
# Series]"
253249
if not isinstance(result, (ABCSeries, ABCDataFrame)) or not result.index.equals(
254-
obj.index # type: ignore[arg-type]
250+
obj.index
255251
):
256252
raise ValueError("Function did not transform")
257253

@@ -803,7 +799,7 @@ def result_columns(self) -> Index:
803799

804800
@property
805801
@abc.abstractmethod
806-
def series_generator(self) -> Generator[Series, None, None]:
802+
def series_generator(self) -> Generator[Series]:
807803
pass
808804

809805
@staticmethod
@@ -1128,7 +1124,7 @@ class FrameRowApply(FrameApply):
11281124
axis: AxisInt = 0
11291125

11301126
@property
1131-
def series_generator(self) -> Generator[Series, None, None]:
1127+
def series_generator(self) -> Generator[Series]:
11321128
return (self.obj._ixs(i, axis=1) for i in range(len(self.columns)))
11331129

11341130
@staticmethod
@@ -1235,7 +1231,7 @@ def apply_broadcast(self, target: DataFrame) -> DataFrame:
12351231
return result.T
12361232

12371233
@property
1238-
def series_generator(self) -> Generator[Series, None, None]:
1234+
def series_generator(self) -> Generator[Series]:
12391235
values = self.values
12401236
values = ensure_wrapped_if_datetimelike(values)
12411237
assert len(values) > 0

pandas/core/arraylike.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -403,12 +403,12 @@ def _reconstruct(result):
403403
# for np.<ufunc>(..) calls
404404
# kwargs cannot necessarily be handled block-by-block, so only
405405
# take this path if there are no kwargs
406-
mgr = inputs[0]._mgr
406+
mgr = inputs[0]._mgr # pyright: ignore[reportGeneralTypeIssues]
407407
result = mgr.apply(getattr(ufunc, method))
408408
else:
409409
# otherwise specific ufunc methods (eg np.<ufunc>.accumulate(..))
410410
# Those can have an axis keyword and thus can't be called block-by-block
411-
result = default_array_ufunc(inputs[0], ufunc, method, *inputs, **kwargs)
411+
result = default_array_ufunc(inputs[0], ufunc, method, *inputs, **kwargs) # pyright: ignore[reportGeneralTypeIssues]
412412
# e.g. np.negative (only one reached), with "where" and "out" in kwargs
413413

414414
result = reconstruct(result)

pandas/core/arrays/arrow/array.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2428,7 +2428,7 @@ def _str_rindex(self, sub: str, start: int = 0, end: int | None = None) -> Self:
24282428
result = self._apply_elementwise(predicate)
24292429
return type(self)(pa.chunked_array(result))
24302430

2431-
def _str_normalize(self, form: str) -> Self:
2431+
def _str_normalize(self, form: Literal["NFC", "NFD", "NFKC", "NFKD"]) -> Self:
24322432
predicate = lambda val: unicodedata.normalize(form, val)
24332433
result = self._apply_elementwise(predicate)
24342434
return type(self)(pa.chunked_array(result))

pandas/core/arrays/boolean.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ def _coerce_to_array(
369369
assert dtype == "boolean"
370370
return coerce_to_array(value, copy=copy)
371371

372-
def _logical_method(self, other, op):
372+
def _logical_method(self, other, op): # type: ignore[override]
373373
assert op.__name__ in {"or_", "ror_", "and_", "rand_", "xor", "rxor"}
374374
other_is_scalar = lib.is_scalar(other)
375375
mask = None

pandas/core/arrays/datetimes.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2918,7 +2918,7 @@ def _generate_range(
29182918
offset: BaseOffset,
29192919
*,
29202920
unit: str,
2921-
) -> Generator[Timestamp, None, None]:
2921+
) -> Generator[Timestamp]:
29222922
"""
29232923
Generates a sequence of dates corresponding to the specified time
29242924
offset. Similar to dateutil.rrule except uses pandas DateOffset

pandas/core/common.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -560,9 +560,7 @@ def convert_to_list_like(
560560

561561

562562
@contextlib.contextmanager
563-
def temp_setattr(
564-
obj, attr: str, value, condition: bool = True
565-
) -> Generator[None, None, None]:
563+
def temp_setattr(obj, attr: str, value, condition: bool = True) -> Generator[None]:
566564
"""
567565
Temporarily set attribute on an object.
568566

pandas/core/computation/expr.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ def _preparse(
168168
the ``tokenize`` module and ``tokval`` is a string.
169169
"""
170170
assert callable(f), "f must be callable"
171-
return tokenize.untokenize(f(x) for x in tokenize_string(source))
171+
return tokenize.untokenize(f(x) for x in tokenize_string(source)) # pyright: ignore[reportArgumentType]
172172

173173

174174
def _is_type(t):

pandas/core/frame.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2306,7 +2306,7 @@ def maybe_reorder(
23062306

23072307
if any(exclude):
23082308
arr_exclude = (x for x in exclude if x in arr_columns)
2309-
to_remove = {arr_columns.get_loc(col) for col in arr_exclude}
2309+
to_remove = {arr_columns.get_loc(col) for col in arr_exclude} # pyright: ignore[reportUnhashable]
23102310
arrays = [v for i, v in enumerate(arrays) if i not in to_remove]
23112311

23122312
columns = columns.drop(exclude)

pandas/core/groupby/groupby.py

+5-3
Original file line numberDiff line numberDiff line change
@@ -3719,7 +3719,7 @@ def blk_func(values: ArrayLike) -> ArrayLike:
37193719
mask = isna(values)
37203720
if values.ndim == 1:
37213721
indexer = np.empty(values.shape, dtype=np.intp)
3722-
col_func(out=indexer, mask=mask)
3722+
col_func(out=indexer, mask=mask) # type: ignore[arg-type]
37233723
return algorithms.take_nd(values, indexer)
37243724

37253725
else:
@@ -4081,7 +4081,9 @@ def _nth(
40814081
def quantile(
40824082
self,
40834083
q: float | AnyArrayLike = 0.5,
4084-
interpolation: str = "linear",
4084+
interpolation: Literal[
4085+
"linear", "lower", "higher", "nearest", "midpoint"
4086+
] = "linear",
40854087
numeric_only: bool = False,
40864088
):
40874089
"""
@@ -4270,7 +4272,7 @@ def blk_func(values: ArrayLike) -> ArrayLike:
42704272
func(
42714273
out[0],
42724274
values=vals,
4273-
mask=mask,
4275+
mask=mask, # type: ignore[arg-type]
42744276
result_mask=result_mask,
42754277
is_datetimelike=is_datetimelike,
42764278
)

pandas/core/groupby/ops.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -898,7 +898,7 @@ def _unob_index_and_ids(
898898
return unob_index, unob_ids
899899

900900
@final
901-
def get_group_levels(self) -> Generator[Index, None, None]:
901+
def get_group_levels(self) -> Generator[Index]:
902902
# Note: only called from _insert_inaxis_grouper, which
903903
# is only called for BaseGrouper, never for BinGrouper
904904
result_index = self.result_index

pandas/core/indexes/interval.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -991,7 +991,7 @@ def length(self) -> Index:
991991
# --------------------------------------------------------------------
992992
# Set Operations
993993

994-
def _intersection(self, other, sort):
994+
def _intersection(self, other, sort: bool = False):
995995
"""
996996
intersection specialized to the case with matching dtypes.
997997
"""
@@ -1006,7 +1006,7 @@ def _intersection(self, other, sort):
10061006
# duplicates
10071007
taken = self._intersection_non_unique(other)
10081008

1009-
if sort is None:
1009+
if sort:
10101010
taken = taken.sort_values()
10111011

10121012
return taken

pandas/core/indexes/multi.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2664,7 +2664,7 @@ def _reorder_ilevels(self, order) -> MultiIndex:
26642664

26652665
def _recode_for_new_levels(
26662666
self, new_levels, copy: bool = True
2667-
) -> Generator[np.ndarray, None, None]:
2667+
) -> Generator[np.ndarray]:
26682668
if len(new_levels) > self.nlevels:
26692669
raise AssertionError(
26702670
f"Length of new_levels ({len(new_levels)}) "

pandas/core/internals/blocks.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -388,7 +388,7 @@ def _split_op_result(self, result: ArrayLike) -> list[Block]:
388388
return [nb]
389389

390390
@final
391-
def _split(self) -> Generator[Block, None, None]:
391+
def _split(self) -> Generator[Block]:
392392
"""
393393
Split a block into a list of single-column blocks.
394394
"""

pandas/core/internals/concat.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ def _concat_homogeneous_fastpath(
250250

251251
def _get_combined_plan(
252252
mgrs: list[BlockManager],
253-
) -> Generator[tuple[BlockPlacement, list[JoinUnit]], None, None]:
253+
) -> Generator[tuple[BlockPlacement, list[JoinUnit]]]:
254254
max_len = mgrs[0].shape[0]
255255

256256
blknos_list = [mgr.blknos for mgr in mgrs]

pandas/core/internals/managers.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -856,7 +856,7 @@ def _slice_take_blocks_ax0(
856856
*,
857857
use_na_proxy: bool = False,
858858
ref_inplace_op: bool = False,
859-
) -> Generator[Block, None, None]:
859+
) -> Generator[Block]:
860860
"""
861861
Slice/take blocks along axis=0.
862862
@@ -1731,7 +1731,7 @@ def unstack(self, unstacker, fill_value) -> BlockManager:
17311731
bm = BlockManager(new_blocks, [new_columns, new_index], verify_integrity=False)
17321732
return bm
17331733

1734-
def to_iter_dict(self) -> Generator[tuple[str, Self], None, None]:
1734+
def to_iter_dict(self) -> Generator[tuple[str, Self]]:
17351735
"""
17361736
Yield a tuple of (str(dtype), BlockManager)
17371737

pandas/core/methods/to_dict.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333

3434
def create_data_for_split(
3535
df: DataFrame, are_all_object_dtype_cols: bool, object_dtype_indices: list[int]
36-
) -> Generator[list, None, None]:
36+
) -> Generator[list]:
3737
"""
3838
Simple helper method to create data for to ``to_dict(orient="split")``
3939
to create the main output data

pandas/core/resample.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -404,7 +404,7 @@ def transform(self, arg, *args, **kwargs):
404404
arg, *args, **kwargs
405405
)
406406

407-
def _downsample(self, f, **kwargs):
407+
def _downsample(self, how, **kwargs):
408408
raise AbstractMethodError(self)
409409

410410
def _upsample(self, f, limit: int | None = None, fill_value=None):

pandas/core/series.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -813,8 +813,7 @@ def _values(self):
813813
def _references(self) -> BlockValuesRefs:
814814
return self._mgr._block.refs
815815

816-
# error: Decorated property not supported
817-
@Appender(base.IndexOpsMixin.array.__doc__) # type: ignore[misc]
816+
@Appender(base.IndexOpsMixin.array.__doc__) # type: ignore[prop-decorator]
818817
@property
819818
def array(self) -> ExtensionArray:
820819
return self._mgr.array_values()

pandas/core/tools/datetimes.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1000,7 +1000,7 @@ def to_datetime(
10001000
dayfirst=dayfirst,
10011001
yearfirst=yearfirst,
10021002
errors=errors,
1003-
exact=exact,
1003+
exact=exact, # type: ignore[arg-type]
10041004
)
10051005
result: Timestamp | NaTType | Series | Index
10061006

pandas/core/window/rolling.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1507,7 +1507,7 @@ def _generate_cython_apply_func(
15071507
window_aggregations.roll_apply,
15081508
args=args,
15091509
kwargs=kwargs,
1510-
raw=raw,
1510+
raw=bool(raw),
15111511
function=function,
15121512
)
15131513

pandas/io/excel/_odswriter.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ class ODSWriter(ExcelWriter):
3434
_engine = "odf"
3535
_supported_extensions = (".ods",)
3636

37-
def __init__(
37+
def __init__( # pyright: ignore[reportInconsistentConstructor]
3838
self,
3939
path: FilePath | WriteExcelBuffer | ExcelWriter,
4040
engine: str | None = None,

pandas/io/excel/_openpyxl.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ class OpenpyxlWriter(ExcelWriter):
4242
_engine = "openpyxl"
4343
_supported_extensions = (".xlsx", ".xlsm")
4444

45-
def __init__(
45+
def __init__( # pyright: ignore[reportInconsistentConstructor]
4646
self,
4747
path: FilePath | WriteExcelBuffer | ExcelWriter,
4848
engine: str | None = None,

pandas/io/excel/_xlsxwriter.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ class XlsxWriter(ExcelWriter):
181181
_engine = "xlsxwriter"
182182
_supported_extensions = (".xlsx",)
183183

184-
def __init__(
184+
def __init__( # pyright: ignore[reportInconsistentConstructor]
185185
self,
186186
path: FilePath | WriteExcelBuffer | ExcelWriter,
187187
engine: str | None = None,

0 commit comments

Comments
 (0)