From c0a593cea4c6506c6f94fca0951bacfc68d6f76f Mon Sep 17 00:00:00 2001 From: Irv Lustig Date: Tue, 25 Jun 2024 16:15:57 -0400 Subject: [PATCH 1/2] change np.float_ to np.double --- pandas-stubs/core/resample.pyi | 2 +- pandas-stubs/core/reshape/tile.pyi | 6 +++--- pandas-stubs/plotting/_core.pyi | 4 ++-- tests/test_pandas.py | 20 ++++++++++---------- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/pandas-stubs/core/resample.pyi b/pandas-stubs/core/resample.pyi index 9b1d8ee96..dcc895f0e 100644 --- a/pandas-stubs/core/resample.pyi +++ b/pandas-stubs/core/resample.pyi @@ -163,7 +163,7 @@ class Resampler(BaseGroupBy[NDFrameT]): def count(self: Resampler[DataFrame]) -> DataFrame: ... def quantile( self, - q: float | list[float] | npt.NDArray[np.float_] | Series[float] = ..., + q: float | list[float] | npt.NDArray[np.double] | Series[float] = ..., **kwargs, ) -> NDFrameT: ... diff --git a/pandas-stubs/core/reshape/tile.pyi b/pandas-stubs/core/reshape/tile.pyi index 3cbaec8f3..60bdde5b3 100644 --- a/pandas-stubs/core/reshape/tile.pyi +++ b/pandas-stubs/core/reshape/tile.pyi @@ -250,7 +250,7 @@ def qcut( retbins: Literal[True], precision: int = ..., duplicates: Literal["raise", "drop"] = ..., -) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.float_]]: ... +) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.double]]: ... @overload def qcut( x: Series, @@ -260,7 +260,7 @@ def qcut( retbins: Literal[True], precision: int = ..., duplicates: Literal["raise", "drop"] = ..., -) -> tuple[Series, npt.NDArray[np.float_]]: ... +) -> tuple[Series, npt.NDArray[np.double]]: ... @overload def qcut( x: Index | npt.NDArray | Sequence[int] | Sequence[float], @@ -270,4 +270,4 @@ def qcut( retbins: Literal[True], precision: int = ..., duplicates: Literal["raise", "drop"] = ..., -) -> tuple[Categorical, npt.NDArray[np.float_]]: ... +) -> tuple[Categorical, npt.NDArray[np.double]]: ... diff --git a/pandas-stubs/plotting/_core.pyi b/pandas-stubs/plotting/_core.pyi index 6861d65d7..d46bfc293 100644 --- a/pandas-stubs/plotting/_core.pyi +++ b/pandas-stubs/plotting/_core.pyi @@ -340,7 +340,7 @@ class PlotAccessor: | Callable[[gaussian_kde], float] | None ) = ..., - ind: npt.NDArray[np.float_] | int | None = ..., + ind: npt.NDArray[np.double] | int | None = ..., *, subplots: Literal[False] | None = ..., **kwargs, @@ -354,7 +354,7 @@ class PlotAccessor: | Callable[[gaussian_kde], float] | None ) = ..., - ind: npt.NDArray[np.float_] | int | None = ..., + ind: npt.NDArray[np.double] | int | None = ..., *, subplots: Literal[True], **kwargs, diff --git a/tests/test_pandas.py b/tests/test_pandas.py index c7d777be9..e72538673 100644 --- a/tests/test_pandas.py +++ b/tests/test_pandas.py @@ -1004,10 +1004,10 @@ def test_qcut() -> None: check(assert_type(c0, pd.Categorical), pd.Categorical) check(assert_type(d0, pd.Series), pd.Series) - check(assert_type(a1, npt.NDArray[np.float_]), np.ndarray) - check(assert_type(b1, npt.NDArray[np.float_]), np.ndarray) - check(assert_type(c1, npt.NDArray[np.float_]), np.ndarray) - check(assert_type(d1, npt.NDArray[np.float_]), np.ndarray) + check(assert_type(a1, npt.NDArray[np.double]), np.ndarray) + check(assert_type(b1, npt.NDArray[np.double]), np.ndarray) + check(assert_type(c1, npt.NDArray[np.double]), np.ndarray) + check(assert_type(d1, npt.NDArray[np.double]), np.ndarray) e0, e1 = pd.qcut(val_list, [0.25, 0.5, 0.75], retbins=True) f0, f1 = pd.qcut(val_arr, np.array([0.25, 0.5, 0.75]), retbins=True) @@ -1023,12 +1023,12 @@ def test_qcut() -> None: check(assert_type(i0, npt.NDArray[np.intp]), np.ndarray) check(assert_type(j0, npt.NDArray[np.intp]), np.ndarray) - check(assert_type(e1, npt.NDArray[np.float_]), np.ndarray) - check(assert_type(f1, npt.NDArray[np.float_]), np.ndarray) - check(assert_type(g1, npt.NDArray[np.float_]), np.ndarray) - check(assert_type(h1, npt.NDArray[np.float_]), np.ndarray) - check(assert_type(i1, npt.NDArray[np.float_]), np.ndarray) - check(assert_type(j1, npt.NDArray[np.float_]), np.ndarray) + check(assert_type(e1, npt.NDArray[np.double]), np.ndarray) + check(assert_type(f1, npt.NDArray[np.double]), np.ndarray) + check(assert_type(g1, npt.NDArray[np.double]), np.ndarray) + check(assert_type(h1, npt.NDArray[np.double]), np.ndarray) + check(assert_type(i1, npt.NDArray[np.double]), np.ndarray) + check(assert_type(j1, npt.NDArray[np.double]), np.ndarray) def test_merge() -> None: From 12de93e016fb77325047c92a2796169bfbf7cf50 Mon Sep 17 00:00:00 2001 From: Irv Lustig Date: Tue, 25 Jun 2024 19:47:47 -0400 Subject: [PATCH 2/2] changes to support numpy 2.0 --- pandas-stubs/_typing.pyi | 8 +-- pandas-stubs/core/series.pyi | 10 ++++ pyproject.toml | 98 ++++++++++++++++++++++-------------- tests/__init__.py | 4 +- tests/test_io.py | 6 +++ tests/test_series.py | 63 ++++++++++------------- tests/test_utility.py | 16 +++++- 7 files changed, 124 insertions(+), 81 deletions(-) diff --git a/pandas-stubs/_typing.pyi b/pandas-stubs/_typing.pyi index 6023b98db..28dc09512 100644 --- a/pandas-stubs/_typing.pyi +++ b/pandas-stubs/_typing.pyi @@ -115,7 +115,7 @@ BooleanDtypeArg: TypeAlias = ( # Numpy bool type # https://numpy.org/doc/stable/reference/arrays.scalars.html#numpy.bool_ | type[np.bool_] - | Literal["?", "b1", "bool8", "bool_"] + | Literal["?", "b1", "bool_"] # PyArrow boolean type and its string alias | Literal["bool[pyarrow]", "boolean[pyarrow]"] ) @@ -147,7 +147,7 @@ IntDtypeArg: TypeAlias = ( | Literal["q", "longlong"] # NOTE: int128 not assigned # https://numpy.org/doc/stable/reference/arrays.scalars.html#numpy.intp | type[np.intp] # signed pointer (=`intptr_t`, platform dependent) - | Literal["p", "intp", "int0"] + | Literal["p", "intp"] # PyArrow integer types and their string aliases | Literal["int8[pyarrow]", "int16[pyarrow]", "int32[pyarrow]", "int64[pyarrow]"] ) @@ -176,7 +176,7 @@ UIntDtypeArg: TypeAlias = ( | Literal["Q", "ulonglong"] # NOTE: uint128 not assigned # https://numpy.org/doc/stable/reference/arrays.scalars.html#numpy.uintp | type[np.uintp] # unsigned pointer (=`uintptr_t`, platform dependent) - | Literal["P", "uintp", "uint0"] + | Literal["P", "uintp"] # PyArrow unsigned integer types and their string aliases | Literal["uint8[pyarrow]", "uint16[pyarrow]", "uint32[pyarrow]", "uint64[pyarrow]"] ) @@ -361,7 +361,7 @@ BytesDtypeArg: TypeAlias = ( # Numpy bytes type and its string alias # https://numpy.org/doc/stable/reference/arrays.scalars.html#numpy.bytes_ | type[np.bytes_] - | Literal["S", "a", "bytes_", "bytes0", "string_"] + | Literal["S", "bytes_", "bytes0", "string_"] # PyArrow binary type and its string alias | Literal["binary[pyarrow]"] ) diff --git a/pandas-stubs/core/series.pyi b/pandas-stubs/core/series.pyi index 900ca9b9c..27fd6c6bc 100644 --- a/pandas-stubs/core/series.pyi +++ b/pandas-stubs/core/series.pyi @@ -230,6 +230,16 @@ _ListLike: TypeAlias = ( class Series(IndexOpsMixin[S1], NDFrame): __hash__: ClassVar[None] + @overload + def __new__( # type: ignore[overload-overlap] + cls, + data: npt.NDArray[np.float64], + index: Axes | None = ..., + *, + dtype: Dtype = ..., + name: Hashable = ..., + copy: bool = ..., + ) -> Series[float]: ... @overload def __new__( # type: ignore[overload-overlap] cls, diff --git a/pyproject.toml b/pyproject.toml index 43e0ad69a..b6889888d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,22 +8,20 @@ readme = "README.md" homepage = "https://pandas.pydata.org" repository = "https://github.com/pandas-dev/pandas-stubs" classifiers = [ - "Development Status :: 5 - Production/Stable", - "Environment :: Console", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: BSD License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Topic :: Scientific/Engineering" -] -packages = [ - { "include" = "pandas-stubs"} + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering", ] +packages = [{ "include" = "pandas-stubs" }] [tool.poetry.urls] "Bug Tracker" = "https://github.com/pandas-dev/pandas-stubs/issues" @@ -33,26 +31,26 @@ packages = [ python = ">=3.9" types-pytz = ">= 2022.1.1" numpy = [ - { version = ">=1.23.5", python = ">=3.9,<3.12" }, - { version = ">=1.26.0", python = ">=3.12,<3.13" } + { version = ">=1.23.5,<2.0.0", python = ">=3.9,<3.12" }, + { version = ">=2.0.0", python = ">=3.12,<3.13" }, ] [tool.poetry.group.dev.dependencies] -mypy = "1.10.0" +mypy = "1.10.1" pandas = "2.2.2" pyarrow = ">=10.0.1" pytest = ">=7.1.2" -pyright = ">=1.1.365" +pyright = ">=1.1.369" poethepoet = ">=0.16.5" loguru = ">=0.6.0" typing-extensions = ">=4.4.0" -matplotlib = ">=3.5.1,<3.9.0" # TODO https://github.com/pandas-dev/pandas/issues/58851 +matplotlib = ">=3.5.1,<3.9.0" # TODO https://github.com/pandas-dev/pandas/issues/58851 pre-commit = ">=2.19.0" black = ">=23.3.0" isort = ">=5.12.0" openpyxl = ">=3.0.10" # for tables, MacOS gives random CI failures on 3.9.2 -tables = { version = "==3.9.2", python = "<4"} # 3.8.0 depends on blosc2 which caps python to <4 +tables = { version = "==3.9.2", python = "<4" } # 3.8.0 depends on blosc2 which caps python to <4 lxml = ">=4.9.1" pyreadstat = ">=1.2.0" xlrd = ">=2.0.1" @@ -88,7 +86,9 @@ script = "scripts.test:test(dist=True)" [tool.poe.tasks.pytest] help = "Run pytest" script = "scripts.test:pytest(nightly)" -args = [{name = "nightly", positional = false, default = false, type = "boolean", required = false, help= "Use pandas nightly (off by default)"}] +args = [ + { name = "nightly", positional = false, default = false, type = "boolean", required = false, help = "Use pandas nightly (off by default)" }, +] [tool.poe.tasks.style] help = "Run pre-commit" @@ -96,7 +96,9 @@ script = "scripts.test.run:style" [tool.poe.tasks.mypy] help = "Run mypy on 'tests' (using the local stubs) and on the local stubs" -args = [{name = "mypy_nightly", positional = false, default = false, type = "boolean", required = false, help= "Use mypy nightly (off by default)"}] +args = [ + { name = "mypy_nightly", positional = false, default = false, type = "boolean", required = false, help = "Use mypy nightly (off by default)" }, +] script = "scripts.test:mypy_src(mypy_nightly)" [tool.poe.tasks.mypy_dist] @@ -114,7 +116,11 @@ script = "scripts.test:test(dist=True, type_checker='pyright')" [tool.poe.tasks.stubtest] script = "scripts.test:stubtest(allowlist, check_missing, nightly)" help = "Run stubtest to compare the installed stubs against pandas" -args = [{ name = "allowlist", positional = true, default = "", required = false, help= "Path to an allowlist (optional)" }, {name = "check_missing", positional = false, default = false, type = "boolean", required = false, help= "Report errors when the stubs are incomplete (off by default)"}, {name = "nightly", positional = false, default = false, type = "boolean", required = false, help= "Compare against pandas nightly (off by default)"}] +args = [ + { name = "allowlist", positional = true, default = "", required = false, help = "Path to an allowlist (optional)" }, + { name = "check_missing", positional = false, default = false, type = "boolean", required = false, help = "Report errors when the stubs are incomplete (off by default)" }, + { name = "nightly", positional = false, default = false, type = "boolean", required = false, help = "Compare against pandas nightly (off by default)" }, +] [tool.black] @@ -122,10 +128,26 @@ target-version = ['py39'] [tool.isort] known_pre_libs = "pandas._config" -known_pre_core = ["pandas._libs", "pandas._typing", "pandas.util._*", "pandas.compat", "pandas.errors"] +known_pre_core = [ + "pandas._libs", + "pandas._typing", + "pandas.util._*", + "pandas.compat", + "pandas.errors", +] known_dtypes = "pandas.core.dtypes" known_post_core = ["pandas.tseries", "pandas.io", "pandas.plotting"] -sections = ["FUTURE", "STDLIB", "THIRDPARTY" ,"PRE_LIBS" , "PRE_CORE", "DTYPES", "FIRSTPARTY", "POST_CORE", "LOCALFOLDER"] +sections = [ + "FUTURE", + "STDLIB", + "THIRDPARTY", + "PRE_LIBS", + "PRE_CORE", + "DTYPES", + "FIRSTPARTY", + "POST_CORE", + "LOCALFOLDER", +] profile = "black" combine_as_imports = true force_grid_wrap = 2 @@ -142,16 +164,16 @@ follow_imports_for_stubs = false no_site_packages = false no_silence_site_packages = false # Disallow dynamic typing -disallow_any_unimported = false # TODO -disallow_any_expr = false # TODO -disallow_any_decorated = false # TODO -disallow_any_explicit = false # TODO -disallow_any_generics = false # TODO +disallow_any_unimported = false # TODO +disallow_any_expr = false # TODO +disallow_any_decorated = false # TODO +disallow_any_explicit = false # TODO +disallow_any_generics = false # TODO disallow_subclassing_any = false # TODO # Untyped definitions and calls -disallow_untyped_calls = false # TODO -disallow_untyped_defs = false # TODO -disallow_incomplete_defs = false # TODO +disallow_untyped_calls = false # TODO +disallow_untyped_defs = false # TODO +disallow_incomplete_defs = false # TODO check_untyped_defs = true disallow_untyped_decorators = true # None and Optional handling @@ -161,8 +183,8 @@ strict_optional = true warn_redundant_casts = true warn_unused_ignores = true warn_no_return = true -warn_return_any = false # TODO -warn_unreachable = false # GH#27396 +warn_return_any = false # TODO +warn_unreachable = false # GH#27396 # Suppressing errors ignore_errors = false enable_error_code = "ignore-without-code" # same as in pandas @@ -170,7 +192,7 @@ enable_error_code = "ignore-without-code" # same as in pandas allow_untyped_globals = false allow_redefinition = false local_partial_types = false -implicit_reexport = false # pyright behaves the same +implicit_reexport = false # pyright behaves the same strict_equality = true # Configuring error messages show_error_context = false @@ -181,7 +203,7 @@ show_error_codes = true typeCheckingMode = "strict" stubPath = "." include = ["tests", "pandas-stubs"] -enableTypeIgnoreComments = false # use pyright-specific ignores +enableTypeIgnoreComments = false # use pyright-specific ignores # disable subset of strict reportMissingParameterType = false reportMissingTypeArgument = false diff --git a/tests/__init__.py b/tests/__init__.py index 4448af3c5..2dcce20af 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -13,6 +13,7 @@ Literal, ) +import numpy as np import pandas as pd from pandas.core.groupby.groupby import BaseGroupBy from pandas.util.version import Version @@ -23,6 +24,7 @@ TYPE_CHECKING_INVALID_USAGE: Final = TYPE_CHECKING WINDOWS = os.name == "nt" or "cygwin" in platform.system().lower() PD_LTE_22 = Version(pd.__version__) < Version("2.2.999") +NUMPY20 = np.lib.NumpyVersion(np.__version__) >= "2.0.0" def check( @@ -40,7 +42,7 @@ def check( if isinstance(actual, pd.Series): value = actual.iloc[index_to_check_for_type] elif isinstance(actual, pd.Index): - value = actual[index_to_check_for_type] # type: ignore[assignment] + value = actual[index_to_check_for_type] elif isinstance(actual, BaseGroupBy): value = actual.obj elif hasattr(actual, "__iter__"): diff --git a/tests/test_io.py b/tests/test_io.py index 54d450502..c93ac40f7 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -53,6 +53,7 @@ WINDOWS, check, ) +from tests import NUMPY20 # See https://github.com/PyTables/PyTables/issues/1172 from pandas.io.api import to_pickle from pandas.io.json._json import JsonReader @@ -347,12 +348,14 @@ def test_sas_xport() -> None: pass +@pytest.mark.skipif(NUMPY20, reason="numpy 2.0 not compatible with Pytables") def test_hdf(): with ensure_clean() as path: check(assert_type(DF.to_hdf(path, key="df"), None), type(None)) check(assert_type(read_hdf(path), Union[DataFrame, Series]), DataFrame) +@pytest.mark.skipif(NUMPY20, reason="numpy 2.0 not compatible with Pytables") def test_hdfstore() -> None: with ensure_clean() as path: store = HDFStore(path, model="w") @@ -396,6 +399,7 @@ def test_hdfstore() -> None: store.close() +@pytest.mark.skipif(NUMPY20, reason="numpy 2.0 not compatible with Pytables") def test_read_hdf_iterator() -> None: with ensure_clean() as path: check(assert_type(DF.to_hdf(path, key="df", format="table"), None), type(None)) @@ -410,6 +414,7 @@ def test_read_hdf_iterator() -> None: ti.close() +@pytest.mark.skipif(NUMPY20, reason="numpy 2.0 not compatible with Pytables") def test_hdf_context_manager() -> None: with ensure_clean() as path: check(assert_type(DF.to_hdf(path, key="df", format="table"), None), type(None)) @@ -418,6 +423,7 @@ def test_hdf_context_manager() -> None: check(assert_type(store.get("df"), Union[DataFrame, Series]), DataFrame) +@pytest.mark.skipif(NUMPY20, reason="numpy 2.0 not compatible with Pytables") def test_hdf_series(): s = DF["a"] with ensure_clean() as path: diff --git a/tests/test_series.py b/tests/test_series.py index 42de32de5..61e2b8bbb 100644 --- a/tests/test_series.py +++ b/tests/test_series.py @@ -1840,7 +1840,6 @@ def test_change_to_dict_return_type() -> None: # numpy boolean type (np.bool_, np.bool_), ("bool_", np.bool_), - ("bool8", np.bool_), ("?", np.bool_), ("b1", np.bool_), # pyarrow boolean type @@ -1885,8 +1884,6 @@ def test_change_to_dict_return_type() -> None: # numpy int64 (np.int_, np.int_), ("int_", np.int_), - ("long", np.int_), - ("l", np.int_), ("int64", np.int64), ("i8", np.int64), # numpy extended int @@ -1896,7 +1893,6 @@ def test_change_to_dict_return_type() -> None: # numpy signed pointer (platform dependent one of int[8,16,32,64]) (np.intp, np.intp), ("intp", np.intp), - ("int0", np.intp), ("p", np.intp), # pyarrow integer types ("int8[pyarrow]", int), @@ -1939,8 +1935,6 @@ def test_change_to_dict_return_type() -> None: # numpy uint64 (np.uint, np.uint), ("uint", np.uint), - ("ulong", np.uint), - ("L", np.uint), ("uint64", np.uint64), ("u8", np.uint64), # numpy extended uint @@ -1950,7 +1944,6 @@ def test_change_to_dict_return_type() -> None: # numpy unsigned pointer (platform dependent one of uint[8,16,32,64]) (np.uintp, np.uintp), ("uintp", np.uintp), - ("uint0", np.uintp), ("P", np.uintp), # pyarrow unsigned integer types ("uint8[pyarrow]", int), @@ -1984,14 +1977,12 @@ def test_change_to_dict_return_type() -> None: # numpy float64 (np.double, np.double), ("double", np.double), - ("float_", np.double), ("d", np.double), ("float64", np.float64), ("f8", np.float64), # numpy float128 (np.longdouble, np.longdouble), ("longdouble", np.longdouble), - ("longfloat", np.longdouble), ("g", np.longdouble), ("f16", np.longdouble), # ("float96", np.longdouble), # NOTE: unsupported @@ -2011,23 +2002,18 @@ def test_change_to_dict_return_type() -> None: # numpy complex64 (np.csingle, np.csingle), ("csingle", np.csingle), - ("singlecomplex", np.csingle), ("F", np.csingle), ("complex64", np.complex64), ("c8", np.complex64), # numpy complex128 (np.cdouble, np.cdouble), ("cdouble", np.cdouble), - ("cfloat", np.cdouble), - ("complex_", np.cdouble), ("D", np.cdouble), ("complex128", np.complex128), ("c16", np.complex128), # numpy complex256 (np.clongdouble, np.clongdouble), ("clongdouble", np.clongdouble), - ("clongfloat", np.clongdouble), - ("longcomplex", np.clongdouble), ("G", np.clongdouble), ("c32", np.clongdouble), # ("complex192", np.clongdouble), # NOTE: unsupported @@ -2156,9 +2142,7 @@ def test_change_to_dict_return_type() -> None: # numpy string (np.str_, str), ("str_", str), - ("str0", str), ("unicode", str), - ("unicode_", str), ("U", str), # pyarrow string ("string[pyarrow]", str), @@ -2171,10 +2155,7 @@ def test_change_to_dict_return_type() -> None: # numpy bytes (np.bytes_, np.bytes_), ("bytes_", np.bytes_), - ("string_", np.bytes_), - ("bytes0", np.bytes_), ("S", np.bytes_), - ("a", np.bytes_), # pyarrow bytes ("binary[pyarrow]", bytes), ] @@ -2203,7 +2184,6 @@ def test_change_to_dict_return_type() -> None: (np.void, np.void), ("void", np.void), ("V", np.void), - ("void0", np.void), ] @@ -2222,7 +2202,6 @@ def test_astype_bool(cast_arg: BooleanDtypeArg, target_type: type) -> None: # numpy boolean type assert_type(s.astype(np.bool_), "pd.Series[bool]") assert_type(s.astype("bool_"), "pd.Series[bool]") - assert_type(s.astype("bool8"), "pd.Series[bool]") assert_type(s.astype("?"), "pd.Series[bool]") # pyarrow boolean type assert_type(s.astype("bool[pyarrow]"), "pd.Series[bool]") @@ -2278,7 +2257,6 @@ def test_astype_int(cast_arg: IntDtypeArg, target_type: type) -> None: # numpy int64 assert_type(s.astype(np.int_), "pd.Series[int]") assert_type(s.astype("int_"), "pd.Series[int]") - assert_type(s.astype("int0"), "pd.Series[int]") assert_type(s.astype("int64"), "pd.Series[int]") assert_type(s.astype("long"), "pd.Series[int]") assert_type(s.astype("l"), "pd.Series[int]") @@ -2333,7 +2311,6 @@ def test_astype_uint(cast_arg: IntDtypeArg, target_type: type) -> None: # numpy uint64 assert_type(s.astype(np.uint), "pd.Series[int]") assert_type(s.astype("uint"), "pd.Series[int]") - assert_type(s.astype("uint0"), "pd.Series[int]") assert_type(s.astype("uint64"), "pd.Series[int]") assert_type(s.astype("ulong"), "pd.Series[int]") assert_type(s.astype("L"), "pd.Series[int]") @@ -2385,14 +2362,12 @@ def test_astype_float(cast_arg: FloatDtypeArg, target_type: type) -> None: # numpy float64 assert_type(s.astype(np.double), "pd.Series[float]") assert_type(s.astype("double"), "pd.Series[float]") - assert_type(s.astype("float_"), "pd.Series[float]") assert_type(s.astype("float64"), "pd.Series[float]") assert_type(s.astype("d"), "pd.Series[float]") assert_type(s.astype("f8"), "pd.Series[float]") # numpy float128 assert_type(s.astype(np.longdouble), "pd.Series[float]") assert_type(s.astype("longdouble"), "pd.Series[float]") - assert_type(s.astype("longfloat"), "pd.Series[float]") assert_type(s.astype("float128"), "pd.Series[float]") assert_type(s.astype("g"), "pd.Series[float]") assert_type(s.astype("f16"), "pd.Series[float]") @@ -2421,23 +2396,18 @@ def test_astype_complex(cast_arg: ComplexDtypeArg, target_type: type) -> None: # numpy complex64 assert_type(s.astype(np.csingle), "pd.Series[complex]") assert_type(s.astype("csingle"), "pd.Series[complex]") - assert_type(s.astype("singlecomplex"), "pd.Series[complex]") assert_type(s.astype("complex64"), "pd.Series[complex]") assert_type(s.astype("F"), "pd.Series[complex]") assert_type(s.astype("c8"), "pd.Series[complex]") # numpy complex128 assert_type(s.astype(np.cdouble), "pd.Series[complex]") assert_type(s.astype("cdouble"), "pd.Series[complex]") - assert_type(s.astype("cfloat"), "pd.Series[complex]") - assert_type(s.astype("complex_"), "pd.Series[complex]") assert_type(s.astype("complex128"), "pd.Series[complex]") assert_type(s.astype("D"), "pd.Series[complex]") assert_type(s.astype("c16"), "pd.Series[complex]") # numpy complex256 assert_type(s.astype(np.clongdouble), "pd.Series[complex]") assert_type(s.astype("clongdouble"), "pd.Series[complex]") - assert_type(s.astype("clongfloat"), "pd.Series[complex]") - assert_type(s.astype("longcomplex"), "pd.Series[complex]") assert_type(s.astype("complex256"), "pd.Series[complex]") assert_type(s.astype("G"), "pd.Series[complex]") assert_type(s.astype("c32"), "pd.Series[complex]") @@ -2581,9 +2551,7 @@ def test_astype_string(cast_arg: StrDtypeArg, target_type: type) -> None: # numpy string assert_type(s.astype(np.str_), "pd.Series[str]") assert_type(s.astype("str_"), "pd.Series[str]") - assert_type(s.astype("str0"), "pd.Series[str]") assert_type(s.astype("unicode"), "pd.Series[str]") - assert_type(s.astype("unicode_"), "pd.Series[str]") assert_type(s.astype("U"), "pd.Series[str]") # pyarrow string assert_type(s.astype("string[pyarrow]"), "pd.Series[str]") @@ -2601,8 +2569,6 @@ def test_astype_bytes(cast_arg: BytesDtypeArg, target_type: type) -> None: # numpy bytes assert_type(s.astype(np.bytes_), "pd.Series[bytes]") assert_type(s.astype("bytes_"), "pd.Series[bytes]") - assert_type(s.astype("bytes0"), "pd.Series[bytes]") - assert_type(s.astype("string_"), "pd.Series[bytes]") assert_type(s.astype("S"), "pd.Series[bytes]") # pyarrow bytes assert_type(s.astype("binary[pyarrow]"), "pd.Series[bytes]") @@ -2647,7 +2613,6 @@ def test_astype_void(cast_arg: VoidDtypeArg, target_type: type) -> None: assert_type(s.astype(np.void), "pd.Series[Any]") assert_type(s.astype("void"), "pd.Series[Any]") assert_type(s.astype("V"), "pd.Series[Any]") - assert_type(s.astype("void0"), "pd.Series[Any]") def test_astype_other() -> None: @@ -2689,6 +2654,30 @@ def test_all_astype_args_tested() -> None: "M8", "object_", "object0", + "a", # deprecated in numpy 2.0 + } + NON_NUMPY20_ALIASES = { + "complex_", + "unicode_", + "uint0", + "longfloat", + "string_", + "cfloat", + "int0", + "void0", + "bytes0", + "singlecomplex", + "longcomplex", + "bool8", + "clongfloat", + "str0", + "float_", + # Next 4 are excluded because results are incompatible between numpy 1.x + # and 2.0, and it's not possible to do numpy version specific typing + "long", + "l", + "ulong", + "L", } TESTED_ASTYPE_ARGS: list[tuple[Any, type]] = ( ASTYPE_BOOL_ARGS @@ -2708,7 +2697,9 @@ def test_all_astype_args_tested() -> None: TESTED_ALIASES: set[str] = { arg for arg, _ in TESTED_ASTYPE_ARGS if isinstance(arg, str) } - UNTESTED_ALIASES = (NUMPY_ALIASES - TESTED_ALIASES) - EXCLUDED_ALIASES + UNTESTED_ALIASES = ( + NUMPY_ALIASES - TESTED_ALIASES - NON_NUMPY20_ALIASES + ) - EXCLUDED_ALIASES assert not UNTESTED_ALIASES, f"{UNTESTED_ALIASES}" NUMPY_TYPES: set[type] = set(np.sctypeDict.values()) diff --git a/tests/test_utility.py b/tests/test_utility.py index 70ae19034..beb3d7592 100644 --- a/tests/test_utility.py +++ b/tests/test_utility.py @@ -1,9 +1,15 @@ +from contextlib import ( + AbstractContextManager, + nullcontext, +) import platform import pandas as pd +import pytest from typing_extensions import assert_type from tests import ( + NUMPY20, check, pytest_warns_bounded, ) @@ -16,8 +22,14 @@ def test_show_version(): upper="3.11.99", version_str=platform.python_version(), ): - check(assert_type(pd.show_versions(True), None), type(None)) - check(assert_type(pd.show_versions(False), None), type(None)) + context: AbstractContextManager + if NUMPY20: # https://github.com/PyTables/PyTables/issues/1172 + context = pytest.raises(ValueError) + else: + context = nullcontext() + with context: + check(assert_type(pd.show_versions(True), None), type(None)) + check(assert_type(pd.show_versions(False), None), type(None)) def test_dummies():