Skip to content

Commit 16eaa36

Browse files
committed
STY: remove --keep-runtime-typing from pyupgrade #40759 Part-1
1 parent ce34c1c commit 16eaa36

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

97 files changed

+1837
-2232
lines changed

.pre-commit-config.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ repos:
5454
rev: v2.11.0
5555
hooks:
5656
- id: pyupgrade
57-
args: [--py37-plus, --keep-runtime-typing]
57+
args: [--py37-plus]
5858
- repo: https://github.com/pre-commit/pygrep-hooks
5959
rev: v1.8.0
6060
hooks:

pandas/_testing/__init__.py

+16-18
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,6 @@
1414
ContextManager,
1515
Counter,
1616
Iterable,
17-
List,
18-
Type,
1917
)
2018
import warnings
2119

@@ -116,24 +114,24 @@
116114
_N = 30
117115
_K = 4
118116

119-
UNSIGNED_INT_DTYPES: List[Dtype] = ["uint8", "uint16", "uint32", "uint64"]
120-
UNSIGNED_EA_INT_DTYPES: List[Dtype] = ["UInt8", "UInt16", "UInt32", "UInt64"]
121-
SIGNED_INT_DTYPES: List[Dtype] = [int, "int8", "int16", "int32", "int64"]
122-
SIGNED_EA_INT_DTYPES: List[Dtype] = ["Int8", "Int16", "Int32", "Int64"]
117+
UNSIGNED_INT_DTYPES: list[Dtype] = ["uint8", "uint16", "uint32", "uint64"]
118+
UNSIGNED_EA_INT_DTYPES: list[Dtype] = ["UInt8", "UInt16", "UInt32", "UInt64"]
119+
SIGNED_INT_DTYPES: list[Dtype] = [int, "int8", "int16", "int32", "int64"]
120+
SIGNED_EA_INT_DTYPES: list[Dtype] = ["Int8", "Int16", "Int32", "Int64"]
123121
ALL_INT_DTYPES = UNSIGNED_INT_DTYPES + SIGNED_INT_DTYPES
124122
ALL_EA_INT_DTYPES = UNSIGNED_EA_INT_DTYPES + SIGNED_EA_INT_DTYPES
125123

126-
FLOAT_DTYPES: List[Dtype] = [float, "float32", "float64"]
127-
FLOAT_EA_DTYPES: List[Dtype] = ["Float32", "Float64"]
128-
COMPLEX_DTYPES: List[Dtype] = [complex, "complex64", "complex128"]
129-
STRING_DTYPES: List[Dtype] = [str, "str", "U"]
124+
FLOAT_DTYPES: list[Dtype] = [float, "float32", "float64"]
125+
FLOAT_EA_DTYPES: list[Dtype] = ["Float32", "Float64"]
126+
COMPLEX_DTYPES: list[Dtype] = [complex, "complex64", "complex128"]
127+
STRING_DTYPES: list[Dtype] = [str, "str", "U"]
130128

131-
DATETIME64_DTYPES: List[Dtype] = ["datetime64[ns]", "M8[ns]"]
132-
TIMEDELTA64_DTYPES: List[Dtype] = ["timedelta64[ns]", "m8[ns]"]
129+
DATETIME64_DTYPES: list[Dtype] = ["datetime64[ns]", "M8[ns]"]
130+
TIMEDELTA64_DTYPES: list[Dtype] = ["timedelta64[ns]", "m8[ns]"]
133131

134-
BOOL_DTYPES: List[Dtype] = [bool, "bool"]
135-
BYTES_DTYPES: List[Dtype] = [bytes, "bytes"]
136-
OBJECT_DTYPES: List[Dtype] = [object, "object"]
132+
BOOL_DTYPES: list[Dtype] = [bool, "bool"]
133+
BYTES_DTYPES: list[Dtype] = [bytes, "bytes"]
134+
OBJECT_DTYPES: list[Dtype] = [object, "object"]
137135

138136
ALL_REAL_DTYPES = FLOAT_DTYPES + ALL_INT_DTYPES
139137
ALL_NUMPY_DTYPES = (
@@ -428,7 +426,7 @@ def all_timeseries_index_generator(k: int = 10) -> Iterable[Index]:
428426
----------
429427
k: length of each of the index instances
430428
"""
431-
make_index_funcs: List[Callable[..., Index]] = [
429+
make_index_funcs: list[Callable[..., Index]] = [
432430
makeDateIndex,
433431
makePeriodIndex,
434432
makeTimedeltaIndex,
@@ -876,7 +874,7 @@ def skipna_wrapper(x):
876874
return skipna_wrapper
877875

878876

879-
def convert_rows_list_to_csv_str(rows_list: List[str]):
877+
def convert_rows_list_to_csv_str(rows_list: list[str]):
880878
"""
881879
Convert list of CSV rows to single CSV-formatted string for current OS.
882880
@@ -896,7 +894,7 @@ def convert_rows_list_to_csv_str(rows_list: List[str]):
896894
return sep.join(rows_list) + sep
897895

898896

899-
def external_error_raised(expected_exception: Type[Exception]) -> ContextManager:
897+
def external_error_raised(expected_exception: type[Exception]) -> ContextManager:
900898
"""
901899
Helper function to mark pytest.raises that have an external error message.
902900

pandas/_typing.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
Optional,
2626
Sequence,
2727
Tuple,
28-
Type,
28+
Type as type_t,
2929
TypeVar,
3030
Union,
3131
)
@@ -119,7 +119,7 @@
119119
# dtypes
120120
NpDtype = Union[str, np.dtype]
121121
Dtype = Union[
122-
"ExtensionDtype", NpDtype, Type[Union[str, float, int, complex, bool, object]]
122+
"ExtensionDtype", NpDtype, type_t[Union[str, float, int, complex, bool, object]]
123123
]
124124
# DtypeArg specifies all allowable dtypes in a functions its dtype argument
125125
DtypeArg = Union[Dtype, Dict[Hashable, Dtype]]

pandas/compat/pickle_compat.py

+2-5
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,7 @@
77
import copy
88
import io
99
import pickle as pkl
10-
from typing import (
11-
TYPE_CHECKING,
12-
Optional,
13-
)
10+
from typing import TYPE_CHECKING
1411
import warnings
1512

1613
from pandas._libs.tslibs import BaseOffset
@@ -235,7 +232,7 @@ def load_newobj_ex(self):
235232
pass
236233

237234

238-
def load(fh, encoding: Optional[str] = None, is_verbose: bool = False):
235+
def load(fh, encoding: str | None = None, is_verbose: bool = False):
239236
"""
240237
Load a pickle, with a provided encoding,
241238

pandas/core/aggregation.py

+11-16
Original file line numberDiff line numberDiff line change
@@ -12,14 +12,9 @@
1212
Any,
1313
Callable,
1414
DefaultDict,
15-
Dict,
1615
Hashable,
1716
Iterable,
18-
List,
19-
Optional,
2017
Sequence,
21-
Tuple,
22-
Union,
2318
)
2419

2520
from pandas._typing import (
@@ -42,8 +37,8 @@
4237

4338

4439
def reconstruct_func(
45-
func: Optional[AggFuncType], **kwargs
46-
) -> Tuple[bool, Optional[AggFuncType], Optional[List[str]], Optional[List[int]]]:
40+
func: AggFuncType | None, **kwargs
41+
) -> tuple[bool, AggFuncType | None, list[str] | None, list[int] | None]:
4742
"""
4843
This is the internal function to reconstruct func given if there is relabeling
4944
or not and also normalize the keyword to get new order of columns.
@@ -81,8 +76,8 @@ def reconstruct_func(
8176
(False, 'min', None, None)
8277
"""
8378
relabeling = func is None and is_multi_agg_with_relabel(**kwargs)
84-
columns: Optional[List[str]] = None
85-
order: Optional[List[int]] = None
79+
columns: list[str] | None = None
80+
order: list[int] | None = None
8681

8782
if not relabeling:
8883
if isinstance(func, list) and len(func) > len(set(func)):
@@ -129,7 +124,7 @@ def is_multi_agg_with_relabel(**kwargs) -> bool:
129124
)
130125

131126

132-
def normalize_keyword_aggregation(kwargs: dict) -> Tuple[dict, List[str], List[int]]:
127+
def normalize_keyword_aggregation(kwargs: dict) -> tuple[dict, list[str], list[int]]:
133128
"""
134129
Normalize user-provided "named aggregation" kwargs.
135130
Transforms from the new ``Mapping[str, NamedAgg]`` style kwargs
@@ -187,8 +182,8 @@ def normalize_keyword_aggregation(kwargs: dict) -> Tuple[dict, List[str], List[i
187182

188183

189184
def _make_unique_kwarg_list(
190-
seq: Sequence[Tuple[Any, Any]]
191-
) -> Sequence[Tuple[Any, Any]]:
185+
seq: Sequence[tuple[Any, Any]]
186+
) -> Sequence[tuple[Any, Any]]:
192187
"""
193188
Uniquify aggfunc name of the pairs in the order list
194189
@@ -292,10 +287,10 @@ def maybe_mangle_lambdas(agg_spec: Any) -> Any:
292287

293288
def relabel_result(
294289
result: FrameOrSeries,
295-
func: Dict[str, List[Union[Callable, str]]],
290+
func: dict[str, list[Callable | str]],
296291
columns: Iterable[Hashable],
297292
order: Iterable[int],
298-
) -> Dict[Hashable, Series]:
293+
) -> dict[Hashable, Series]:
299294
"""
300295
Internal function to reorder result if relabelling is True for
301296
dataframe.agg, and return the reordered result in dict.
@@ -322,7 +317,7 @@ def relabel_result(
322317
reordered_indexes = [
323318
pair[0] for pair in sorted(zip(columns, order), key=lambda t: t[1])
324319
]
325-
reordered_result_in_dict: Dict[Hashable, Series] = {}
320+
reordered_result_in_dict: dict[Hashable, Series] = {}
326321
idx = 0
327322

328323
reorder_mask = not isinstance(result, ABCSeries) and len(result.columns) > 1
@@ -366,7 +361,7 @@ def relabel_result(
366361

367362
def validate_func_kwargs(
368363
kwargs: dict,
369-
) -> Tuple[List[str], List[Union[str, Callable[..., Any]]]]:
364+
) -> tuple[list[str], list[str | Callable[..., Any]]]:
370365
"""
371366
Validates types of user-provided "named aggregation" kwargs.
372367
`TypeError` is raised if aggfunc is not `str` or callable.

pandas/core/algorithms.py

+12-15
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,6 @@
88
from textwrap import dedent
99
from typing import (
1010
TYPE_CHECKING,
11-
Dict,
12-
Optional,
13-
Tuple,
1411
Union,
1512
cast,
1613
)
@@ -103,13 +100,13 @@
103100
TimedeltaArray,
104101
)
105102

106-
_shared_docs: Dict[str, str] = {}
103+
_shared_docs: dict[str, str] = {}
107104

108105

109106
# --------------- #
110107
# dtype access #
111108
# --------------- #
112-
def _ensure_data(values: ArrayLike) -> Tuple[np.ndarray, DtypeObj]:
109+
def _ensure_data(values: ArrayLike) -> tuple[np.ndarray, DtypeObj]:
113110
"""
114111
routine to ensure that our data is of the correct
115112
input dtype for lower-level routines
@@ -542,10 +539,10 @@ def f(c, v):
542539
def factorize_array(
543540
values: np.ndarray,
544541
na_sentinel: int = -1,
545-
size_hint: Optional[int] = None,
542+
size_hint: int | None = None,
546543
na_value=None,
547-
mask: Optional[np.ndarray] = None,
548-
) -> Tuple[np.ndarray, np.ndarray]:
544+
mask: np.ndarray | None = None,
545+
) -> tuple[np.ndarray, np.ndarray]:
549546
"""
550547
Factorize an array-like to codes and uniques.
551548
@@ -608,9 +605,9 @@ def factorize_array(
608605
def factorize(
609606
values,
610607
sort: bool = False,
611-
na_sentinel: Optional[int] = -1,
612-
size_hint: Optional[int] = None,
613-
) -> Tuple[np.ndarray, Union[np.ndarray, Index]]:
608+
na_sentinel: int | None = -1,
609+
size_hint: int | None = None,
610+
) -> tuple[np.ndarray, np.ndarray | Index]:
614611
"""
615612
Encode the object as an enumerated type or categorical variable.
616613
@@ -926,7 +923,7 @@ def value_counts_arraylike(values, dropna: bool):
926923
return keys, counts
927924

928925

929-
def duplicated(values: ArrayLike, keep: Union[str, bool] = "first") -> np.ndarray:
926+
def duplicated(values: ArrayLike, keep: str | bool = "first") -> np.ndarray:
930927
"""
931928
Return boolean ndarray denoting duplicate values.
932929
@@ -1062,8 +1059,8 @@ def rank(
10621059
def checked_add_with_arr(
10631060
arr: np.ndarray,
10641061
b,
1065-
arr_mask: Optional[np.ndarray] = None,
1066-
b_mask: Optional[np.ndarray] = None,
1062+
arr_mask: np.ndarray | None = None,
1063+
b_mask: np.ndarray | None = None,
10671064
) -> np.ndarray:
10681065
"""
10691066
Perform array addition that checks for underflow and overflow.
@@ -1741,7 +1738,7 @@ def safe_sort(
17411738
na_sentinel: int = -1,
17421739
assume_unique: bool = False,
17431740
verify: bool = True,
1744-
) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]:
1741+
) -> np.ndarray | tuple[np.ndarray, np.ndarray]:
17451742
"""
17461743
Sort ``values`` and reorder corresponding ``codes``.
17471744

0 commit comments

Comments
 (0)