Skip to content

Commit 71d3ece

Browse files
jbrockmendelnoatamir
authored andcommitted
CLN: assorted follow-ups (pandas-dev#49489)
1 parent 5a3c973 commit 71d3ece

File tree

26 files changed

+48
-215
lines changed

26 files changed

+48
-215
lines changed

doc/source/getting_started/install.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -417,7 +417,7 @@ Dependency Minimum Version optional_extra Notes
417417
PyTables 3.6.1 hdf5 HDF5-based reading / writing
418418
blosc 1.21.0 hdf5 Compression for HDF5
419419
zlib hdf5 Compression for HDF5
420-
fastparquet 0.4.0 - Parquet reading / writing (pyarrow is default)
420+
fastparquet 0.6.3 - Parquet reading / writing (pyarrow is default)
421421
pyarrow 6.0.0 parquet, feather Parquet, ORC, and feather reading / writing
422422
pyreadstat 1.1.2 spss SPSS files (.sav) reading
423423
odfpy 1.4.1 excel Open document format (.odf, .ods, .odt) reading / writing

pandas/core/dtypes/astype.py

+1
Original file line numberDiff line numberDiff line change
@@ -215,6 +215,7 @@ def astype_array(values: ArrayLike, dtype: DtypeObj, copy: bool = False) -> Arra
215215
# Series.astype behavior pre-2.0 did
216216
# values.tz_localize("UTC").tz_convert(dtype.tz)
217217
# which did not match the DTA/DTI behavior.
218+
# We special-case here to give a Series-specific exception message.
218219
raise TypeError(
219220
"Cannot use .astype to convert from timezone-naive dtype to "
220221
"timezone-aware dtype. Use ser.dt.tz_localize instead."

pandas/core/dtypes/cast.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1414,7 +1414,7 @@ def _ensure_nanosecond_dtype(dtype: DtypeObj) -> DtypeObj:
14141414

14151415

14161416
# TODO: other value-dependent functions to standardize here include
1417-
# dtypes.concat.cast_to_common_type and Index._find_common_type_compat
1417+
# Index._find_common_type_compat
14181418
def find_result_type(left: ArrayLike, right: Any) -> DtypeObj:
14191419
"""
14201420
Find the type/dtype for a the result of an operation between these objects.

pandas/core/dtypes/concat.py

+6-42
Original file line numberDiff line numberDiff line change
@@ -3,30 +3,20 @@
33
"""
44
from __future__ import annotations
55

6-
from typing import (
7-
TYPE_CHECKING,
8-
cast,
9-
)
6+
from typing import TYPE_CHECKING
107
import warnings
118

129
import numpy as np
1310

14-
from pandas._typing import (
15-
ArrayLike,
16-
AxisInt,
17-
DtypeObj,
18-
)
11+
from pandas._typing import AxisInt
1912
from pandas.util._exceptions import find_stack_level
2013

2114
from pandas.core.dtypes.astype import astype_array
2215
from pandas.core.dtypes.cast import (
2316
common_dtype_categorical_compat,
2417
find_common_type,
2518
)
26-
from pandas.core.dtypes.common import (
27-
is_dtype_equal,
28-
is_sparse,
29-
)
19+
from pandas.core.dtypes.common import is_dtype_equal
3020
from pandas.core.dtypes.dtypes import (
3121
DatetimeTZDtype,
3222
ExtensionDtype,
@@ -39,34 +29,6 @@
3929

4030
if TYPE_CHECKING:
4131
from pandas.core.arrays import Categorical
42-
from pandas.core.arrays.sparse import SparseArray
43-
44-
45-
def cast_to_common_type(arr: ArrayLike, dtype: DtypeObj) -> ArrayLike:
46-
"""
47-
Helper function for `arr.astype(common_dtype)` but handling all special
48-
cases.
49-
"""
50-
if is_dtype_equal(arr.dtype, dtype):
51-
return arr
52-
53-
if is_sparse(arr) and not is_sparse(dtype):
54-
# TODO(2.0): remove special case once SparseArray.astype deprecation
55-
# is enforced.
56-
# problem case: SparseArray.astype(dtype) doesn't follow the specified
57-
# dtype exactly, but converts this to Sparse[dtype] -> first manually
58-
# convert to dense array
59-
60-
# error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has incompatible type
61-
# "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, _
62-
# SupportsDType[dtype[Any]], str, Union[Tuple[Any, int], Tuple[Any,
63-
# Union[SupportsIndex, Sequence[SupportsIndex]]], List[Any], _DTypeDict,
64-
# Tuple[Any, Any]]]" [arg-type]
65-
arr = cast("SparseArray", arr)
66-
return arr.to_dense().astype(dtype, copy=False) # type: ignore[arg-type]
67-
68-
# astype_array includes ensure_wrapped_if_datetimelike
69-
return astype_array(arr, dtype=dtype, copy=False)
7032

7133

7234
def concat_compat(to_concat, axis: AxisInt = 0, ea_compat_axis: bool = False):
@@ -126,7 +88,9 @@ def is_nonempty(x) -> bool:
12688
if not single_dtype:
12789
target_dtype = find_common_type([x.dtype for x in to_concat])
12890
target_dtype = common_dtype_categorical_compat(to_concat, target_dtype)
129-
to_concat = [cast_to_common_type(arr, target_dtype) for arr in to_concat]
91+
to_concat = [
92+
astype_array(arr, target_dtype, copy=False) for arr in to_concat
93+
]
13094

13195
if isinstance(to_concat[0], ABCExtensionArray):
13296
# TODO: what about EA-backed Index?

pandas/core/frame.py

-1
Original file line numberDiff line numberDiff line change
@@ -5046,7 +5046,6 @@ def align(
50465046
broadcast_axis=broadcast_axis,
50475047
)
50485048

5049-
# error: Signature of "set_axis" incompatible with supertype "NDFrame"
50505049
@Appender(
50515050
"""
50525051
Examples

pandas/core/indexes/multi.py

+1-21
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
Sequence,
1515
Tuple,
1616
cast,
17-
overload,
1817
)
1918
import warnings
2019

@@ -3739,28 +3738,9 @@ def isin(self, values, level=None) -> npt.NDArray[np.bool_]:
37393738
return np.zeros(len(levs), dtype=np.bool_)
37403739
return levs.isin(values)
37413740

3742-
@overload
3743-
def set_names(
3744-
self, names, *, level=..., inplace: Literal[False] = ...
3745-
) -> MultiIndex:
3746-
...
3747-
3748-
@overload
3749-
def set_names(self, names, *, level=..., inplace: Literal[True]) -> None:
3750-
...
3751-
3752-
@overload
3753-
def set_names(self, names, *, level=..., inplace: bool = ...) -> MultiIndex | None:
3754-
...
3755-
3756-
def set_names(
3757-
self, names, *, level=None, inplace: bool = False
3758-
) -> MultiIndex | None:
3759-
return super().set_names(names=names, level=level, inplace=inplace)
3760-
37613741
# error: Incompatible types in assignment (expression has type overloaded function,
37623742
# base class "Index" defined the type as "Callable[[Index, Any, bool], Any]")
3763-
rename = set_names # type: ignore[assignment]
3743+
rename = Index.set_names # type: ignore[assignment]
37643744

37653745
# ---------------------------------------------------------------
37663746
# Arithmetic/Numeric Methods - Disabled

pandas/core/internals/concat.py

+3-5
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
)
2525
from pandas.util._decorators import cache_readonly
2626

27+
from pandas.core.dtypes.astype import astype_array
2728
from pandas.core.dtypes.cast import (
2829
ensure_dtype_can_hold_na,
2930
find_common_type,
@@ -34,10 +35,7 @@
3435
is_scalar,
3536
needs_i8_conversion,
3637
)
37-
from pandas.core.dtypes.concat import (
38-
cast_to_common_type,
39-
concat_compat,
40-
)
38+
from pandas.core.dtypes.concat import concat_compat
4139
from pandas.core.dtypes.dtypes import (
4240
DatetimeTZDtype,
4341
ExtensionDtype,
@@ -153,7 +151,7 @@ def concat_arrays(to_concat: list) -> ArrayLike:
153151
to_concat = [
154152
arr.to_array(target_dtype)
155153
if isinstance(arr, NullArrayProxy)
156-
else cast_to_common_type(arr, target_dtype)
154+
else astype_array(arr, target_dtype, copy=False)
157155
for arr in to_concat
158156
]
159157

pandas/core/series.py

-1
Original file line numberDiff line numberDiff line change
@@ -4925,7 +4925,6 @@ def rename(
49254925
else:
49264926
return self._set_name(index, inplace=inplace)
49274927

4928-
# error: Signature of "set_axis" incompatible with supertype "NDFrame"
49294928
@Appender(
49304929
"""
49314930
Examples

pandas/core/sorting.py

+1-9
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
Sequence,
1212
cast,
1313
)
14-
import warnings
1514

1615
import numpy as np
1716

@@ -341,14 +340,7 @@ def lexsort_indexer(
341340
keys = [ensure_key_mapped(k, key) for k in keys]
342341

343342
for k, order in zip(keys, orders):
344-
with warnings.catch_warnings():
345-
# TODO(2.0): unnecessary once deprecation is enforced
346-
# GH#45618 don't issue warning user can't do anything about
347-
warnings.filterwarnings(
348-
"ignore", ".*(SparseArray|SparseDtype).*", category=FutureWarning
349-
)
350-
351-
cat = Categorical(k, ordered=True)
343+
cat = Categorical(k, ordered=True)
352344

353345
if na_position not in ["last", "first"]:
354346
raise ValueError(f"invalid na_position: {na_position}")

pandas/io/formats/style.py

+4-10
Original file line numberDiff line numberDiff line change
@@ -3604,15 +3604,11 @@ def _background_gradient(
36043604
rng = smax - smin
36053605
# extend lower / upper bounds, compresses color range
36063606
norm = mpl.colors.Normalize(smin - (rng * low), smax + (rng * high))
3607-
from pandas.plotting._matplotlib.compat import mpl_ge_3_6_0
36083607

3609-
if mpl_ge_3_6_0():
3610-
if cmap is None:
3611-
rgbas = mpl.colormaps[mpl.rcParams["image.cmap"]](norm(gmap))
3612-
else:
3613-
rgbas = mpl.colormaps.get_cmap(cmap)(norm(gmap))
3608+
if cmap is None:
3609+
rgbas = mpl.colormaps[mpl.rcParams["image.cmap"]](norm(gmap))
36143610
else:
3615-
rgbas = plt.cm.get_cmap(cmap)(norm(gmap))
3611+
rgbas = mpl.colormaps.get_cmap(cmap)(norm(gmap))
36163612

36173613
def relative_luminance(rgba) -> float:
36183614
"""
@@ -3891,10 +3887,8 @@ def css_calc(x, left: float, right: float, align: str, color: str | list | tuple
38913887
if cmap is not None:
38923888
# use the matplotlib colormap input
38933889
with _mpl(Styler.bar) as (plt, mpl):
3894-
from pandas.plotting._matplotlib.compat import mpl_ge_3_6_0
3895-
38963890
cmap = (
3897-
(mpl.colormaps[cmap] if mpl_ge_3_6_0() else mpl.cm.get_cmap(cmap))
3891+
mpl.colormaps[cmap]
38983892
if isinstance(cmap, str)
38993893
else cmap # assumed to be a Colormap instance as documented
39003894
)

pandas/plotting/_matplotlib/compat.py

-7
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
# being a bit too dynamic
22
from __future__ import annotations
33

4-
import operator
5-
64
from pandas.util.version import Version
75

86

@@ -15,8 +13,3 @@ def inner():
1513
return op(Version(mpl.__version__), Version(version))
1614

1715
return inner
18-
19-
20-
mpl_ge_3_4_0 = _mpl_version("3.4.0", operator.ge)
21-
mpl_ge_3_5_0 = _mpl_version("3.5.0", operator.ge)
22-
mpl_ge_3_6_0 = _mpl_version("3.6.0", operator.ge)

pandas/plotting/_matplotlib/core.py

+3-13
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@
5555
from pandas.core.frame import DataFrame
5656

5757
from pandas.io.formats.printing import pprint_thing
58-
from pandas.plotting._matplotlib.compat import mpl_ge_3_6_0
5958
from pandas.plotting._matplotlib.converter import register_pandas_matplotlib_converters
6059
from pandas.plotting._matplotlib.groupby import reconstruct_data_with_by
6160
from pandas.plotting._matplotlib.misc import unpack_single_str_list
@@ -1229,19 +1228,13 @@ def _make_plot(self):
12291228
c_values = c
12301229

12311230
if self.colormap is not None:
1232-
if mpl_ge_3_6_0():
1233-
cmap = mpl.colormaps.get_cmap(self.colormap)
1234-
else:
1235-
cmap = self.plt.cm.get_cmap(self.colormap)
1231+
cmap = mpl.colormaps.get_cmap(self.colormap)
12361232
else:
12371233
# cmap is only used if c_values are integers, otherwise UserWarning
12381234
if is_integer_dtype(c_values):
12391235
# pandas uses colormap, matplotlib uses cmap.
12401236
cmap = "Greys"
1241-
if mpl_ge_3_6_0():
1242-
cmap = mpl.colormaps[cmap]
1243-
else:
1244-
cmap = self.plt.cm.get_cmap(cmap)
1237+
cmap = mpl.colormaps[cmap]
12451238
else:
12461239
cmap = None
12471240

@@ -1309,10 +1302,7 @@ def _make_plot(self) -> None:
13091302
ax = self.axes[0]
13101303
# pandas uses colormap, matplotlib uses cmap.
13111304
cmap = self.colormap or "BuGn"
1312-
if mpl_ge_3_6_0():
1313-
cmap = mpl.colormaps.get_cmap(cmap)
1314-
else:
1315-
cmap = self.plt.cm.get_cmap(cmap)
1305+
cmap = mpl.colormaps.get_cmap(cmap)
13161306
cb = self.kwds.pop("colorbar", True)
13171307

13181308
if C is None:

pandas/plotting/_matplotlib/style.py

+1-7
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
import warnings
1111

1212
import matplotlib as mpl
13-
from matplotlib import cm
1413
import matplotlib.colors
1514
import numpy as np
1615

@@ -21,8 +20,6 @@
2120

2221
import pandas.core.common as com
2322

24-
from pandas.plotting._matplotlib.compat import mpl_ge_3_6_0
25-
2623
if TYPE_CHECKING:
2724
from matplotlib.colors import Colormap
2825

@@ -153,10 +150,7 @@ def _get_cmap_instance(colormap: str | Colormap) -> Colormap:
153150
"""Get instance of matplotlib colormap."""
154151
if isinstance(colormap, str):
155152
cmap = colormap
156-
if mpl_ge_3_6_0():
157-
colormap = mpl.colormaps[colormap]
158-
else:
159-
colormap = cm.get_cmap(colormap)
153+
colormap = mpl.colormaps[colormap]
160154
if colormap is None:
161155
raise ValueError(f"Colormap {cmap} is not recognized")
162156
return colormap

pandas/plotting/_matplotlib/tools.py

+2-10
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@
2222
ABCSeries,
2323
)
2424

25-
from pandas.plotting._matplotlib import compat
26-
2725
if TYPE_CHECKING:
2826
from matplotlib.axes import Axes
2927
from matplotlib.axis import Axis
@@ -396,10 +394,7 @@ def handle_shared_axes(
396394
row_num = lambda x: x.get_subplotspec().rowspan.start
397395
col_num = lambda x: x.get_subplotspec().colspan.start
398396

399-
if compat.mpl_ge_3_4_0():
400-
is_first_col = lambda x: x.get_subplotspec().is_first_col()
401-
else:
402-
is_first_col = lambda x: x.is_first_col()
397+
is_first_col = lambda x: x.get_subplotspec().is_first_col()
403398

404399
if nrows > 1:
405400
try:
@@ -421,10 +416,7 @@ def handle_shared_axes(
421416
except IndexError:
422417
# if gridspec is used, ax.rowNum and ax.colNum may different
423418
# from layout shape. in this case, use last_row logic
424-
if compat.mpl_ge_3_4_0():
425-
is_last_row = lambda x: x.get_subplotspec().is_last_row()
426-
else:
427-
is_last_row = lambda x: x.is_last_row()
419+
is_last_row = lambda x: x.get_subplotspec().is_last_row()
428420
for ax in axarr:
429421
if is_last_row(ax):
430422
continue

0 commit comments

Comments
 (0)