Skip to content

Commit 2374726

Browse files
authored
Merge pull request #175 from pandas-dev/master
Sync Fork from Upstream Repo
2 parents 26494c1 + b4e9566 commit 2374726

27 files changed

+113
-152
lines changed

ci/azure/windows.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ jobs:
88
vmImage: ${{ parameters.vmImage }}
99
strategy:
1010
matrix:
11-
py37_np16:
11+
py37_np17:
1212
ENV_FILE: ci/deps/azure-windows-37.yaml
1313
CONDA_PY: "37"
1414
PATTERN: "not slow and not network"

ci/deps/actions-37-minimum_versions.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ dependencies:
1818
- jinja2=2.10
1919
- numba=0.46.0
2020
- numexpr=2.6.8
21-
- numpy=1.16.5
21+
- numpy=1.17.3
2222
- openpyxl=3.0.0
2323
- pytables=3.5.1
2424
- python-dateutil=2.7.3

ci/deps/azure-macos-37.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ dependencies:
1919
- matplotlib=2.2.3
2020
- nomkl
2121
- numexpr
22-
- numpy=1.16.5
22+
- numpy=1.17.3
2323
- openpyxl
2424
- pyarrow=0.15.1
2525
- pytables

ci/deps/azure-windows-37.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ dependencies:
2424
- moto>=1.3.14
2525
- flask
2626
- numexpr
27-
- numpy=1.16.*
27+
- numpy=1.17.*
2828
- openpyxl
2929
- pyarrow=0.15
3030
- pytables

doc/source/getting_started/install.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ Dependencies
222222
Package Minimum supported version
223223
================================================================ ==========================
224224
`setuptools <https://setuptools.readthedocs.io/en/latest/>`__ 38.6.0
225-
`NumPy <https://numpy.org>`__ 1.16.5
225+
`NumPy <https://numpy.org>`__ 1.17.3
226226
`python-dateutil <https://dateutil.readthedocs.io/en/stable/>`__ 2.7.3
227227
`pytz <https://pypi.org/project/pytz/>`__ 2017.3
228228
================================================================ ==========================

doc/source/whatsnew/v1.3.0.rst

+2-2
Original file line numberDiff line numberDiff line change
@@ -468,7 +468,7 @@ If installed, we now require:
468468
+-----------------+-----------------+----------+---------+
469469
| Package | Minimum Version | Required | Changed |
470470
+=================+=================+==========+=========+
471-
| numpy | 1.16.5 | X | |
471+
| numpy | 1.17.3 | X | X |
472472
+-----------------+-----------------+----------+---------+
473473
| pytz | 2017.3 | X | |
474474
+-----------------+-----------------+----------+---------+
@@ -714,7 +714,7 @@ Missing
714714

715715
- Bug in :class:`Grouper` now correctly propagates ``dropna`` argument and :meth:`DataFrameGroupBy.transform` now correctly handles missing values for ``dropna=True`` (:issue:`35612`)
716716
- Bug in :func:`isna`, and :meth:`Series.isna`, :meth:`Index.isna`, :meth:`DataFrame.isna` (and the corresponding ``notna`` functions) not recognizing ``Decimal("NaN")`` objects (:issue:`39409`)
717-
-
717+
- Bug in :meth:`DataFrame.fillna` not accepting dictionary for ``downcast`` keyword (:issue:`40809`)
718718

719719
MultiIndex
720720
^^^^^^^^^^

environment.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ channels:
33
- conda-forge
44
dependencies:
55
# required
6-
- numpy>=1.16.5
6+
- numpy>=1.17.3
77
- python=3
88
- python-dateutil>=2.7.3
99
- pytz

pandas/__init__.py

-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
# numpy compat
2222
from pandas.compat import (
23-
np_version_under1p17 as _np_version_under1p17,
2423
np_version_under1p18 as _np_version_under1p18,
2524
is_numpy_dev as _is_numpy_dev,
2625
)

pandas/compat/__init__.py

-2
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
is_numpy_dev,
1717
np_array_datetime64_compat,
1818
np_datetime64_compat,
19-
np_version_under1p17,
2019
np_version_under1p18,
2120
np_version_under1p19,
2221
np_version_under1p20,
@@ -133,7 +132,6 @@ def get_lzma_file(lzma):
133132
"is_numpy_dev",
134133
"np_array_datetime64_compat",
135134
"np_datetime64_compat",
136-
"np_version_under1p17",
137135
"np_version_under1p18",
138136
"np_version_under1p19",
139137
"np_version_under1p20",

pandas/compat/numpy/__init__.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,11 @@
88
# numpy versioning
99
_np_version = np.__version__
1010
_nlv = LooseVersion(_np_version)
11-
np_version_under1p17 = _nlv < LooseVersion("1.17")
1211
np_version_under1p18 = _nlv < LooseVersion("1.18")
1312
np_version_under1p19 = _nlv < LooseVersion("1.19")
1413
np_version_under1p20 = _nlv < LooseVersion("1.20")
1514
is_numpy_dev = ".dev" in str(_nlv)
16-
_min_numpy_ver = "1.16.5"
15+
_min_numpy_ver = "1.17.3"
1716

1817

1918
if _nlv < _min_numpy_ver:
@@ -65,6 +64,5 @@ def np_array_datetime64_compat(arr, dtype="M8[ns]"):
6564
__all__ = [
6665
"np",
6766
"_np_version",
68-
"np_version_under1p17",
6967
"is_numpy_dev",
7068
]

pandas/core/array_algos/masked_reductions.py

+1-6
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
import numpy as np
99

1010
from pandas._libs import missing as libmissing
11-
from pandas.compat import np_version_under1p17
1211

1312
from pandas.core.nanops import check_below_min_count
1413

@@ -46,11 +45,7 @@ def _sumprod(
4645
else:
4746
if check_below_min_count(values.shape, mask, min_count):
4847
return libmissing.NA
49-
50-
if np_version_under1p17:
51-
return func(values[~mask])
52-
else:
53-
return func(values, where=~mask)
48+
return func(values, where=~mask)
5449

5550

5651
def sum(

pandas/core/generic.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -6446,11 +6446,13 @@ def fillna(
64466446
)
64476447

64486448
result = self if inplace else self.copy()
6449+
is_dict = isinstance(downcast, dict)
64496450
for k, v in value.items():
64506451
if k not in result:
64516452
continue
64526453
obj = result[k]
6453-
obj.fillna(v, limit=limit, inplace=True, downcast=downcast)
6454+
downcast_k = downcast if not is_dict else downcast.get(k)
6455+
obj.fillna(v, limit=limit, inplace=True, downcast=downcast_k)
64546456
return result if not inplace else None
64556457

64566458
elif not is_list_like(value):

pandas/core/groupby/generic.py

-6
Original file line numberDiff line numberDiff line change
@@ -177,9 +177,6 @@ def pinner(cls):
177177
class SeriesGroupBy(GroupBy[Series]):
178178
_apply_allowlist = base.series_apply_allowlist
179179

180-
# Defined as a cache_readonly in SelectionMixin
181-
_obj_with_exclusions: Series
182-
183180
def _iterate_slices(self) -> Iterable[Series]:
184181
yield self._selected_obj
185182

@@ -930,9 +927,6 @@ def pct_change(self, periods=1, fill_method="pad", limit=None, freq=None):
930927
@pin_allowlisted_properties(DataFrame, base.dataframe_apply_allowlist)
931928
class DataFrameGroupBy(GroupBy[DataFrame]):
932929

933-
# Defined as a cache_readonly in SelectionMixin
934-
_obj_with_exclusions: DataFrame
935-
936930
_apply_allowlist = base.dataframe_apply_allowlist
937931

938932
_agg_examples_doc = dedent(

pandas/core/indexing.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -1873,7 +1873,11 @@ def _setitem_single_column(self, loc: int, value, plane_indexer):
18731873
if com.is_null_slice(pi) or com.is_full_slice(pi, len(self.obj)):
18741874
ser = value
18751875
elif is_array_like(value) and is_exact_shape_match(ser, value):
1876-
ser = value
1876+
if is_list_like(pi):
1877+
ser = value[np.argsort(pi)]
1878+
else:
1879+
# in case of slice
1880+
ser = value[pi]
18771881
else:
18781882
# set the item, possibly having a dtype change
18791883
ser = ser.copy()

pandas/io/formats/style.py

+49-46
Original file line numberDiff line numberDiff line change
@@ -1558,55 +1558,10 @@ def highlight_between(
15581558
15591559
.. figure:: ../../_static/style/hbetw_props.png
15601560
"""
1561-
1562-
def f(
1563-
data: FrameOrSeries,
1564-
props: str,
1565-
left: Scalar | Sequence | np.ndarray | FrameOrSeries | None = None,
1566-
right: Scalar | Sequence | np.ndarray | FrameOrSeries | None = None,
1567-
inclusive: bool | str = True,
1568-
) -> np.ndarray:
1569-
if np.iterable(left) and not isinstance(left, str):
1570-
left = _validate_apply_axis_arg(
1571-
left, "left", None, data # type: ignore[arg-type]
1572-
)
1573-
1574-
if np.iterable(right) and not isinstance(right, str):
1575-
right = _validate_apply_axis_arg(
1576-
right, "right", None, data # type: ignore[arg-type]
1577-
)
1578-
1579-
# get ops with correct boundary attribution
1580-
if inclusive == "both":
1581-
ops = (operator.ge, operator.le)
1582-
elif inclusive == "neither":
1583-
ops = (operator.gt, operator.lt)
1584-
elif inclusive == "left":
1585-
ops = (operator.ge, operator.lt)
1586-
elif inclusive == "right":
1587-
ops = (operator.gt, operator.le)
1588-
else:
1589-
raise ValueError(
1590-
f"'inclusive' values can be 'both', 'left', 'right', or 'neither' "
1591-
f"got {inclusive}"
1592-
)
1593-
1594-
g_left = (
1595-
ops[0](data, left)
1596-
if left is not None
1597-
else np.full(data.shape, True, dtype=bool)
1598-
)
1599-
l_right = (
1600-
ops[1](data, right)
1601-
if right is not None
1602-
else np.full(data.shape, True, dtype=bool)
1603-
)
1604-
return np.where(g_left & l_right, props, "")
1605-
16061561
if props is None:
16071562
props = f"background-color: {color};"
16081563
return self.apply(
1609-
f, # type: ignore[arg-type]
1564+
_highlight_between, # type: ignore[arg-type]
16101565
axis=axis,
16111566
subset=subset,
16121567
props=props,
@@ -1831,3 +1786,51 @@ def css(rgba) -> str:
18311786
index=data.index,
18321787
columns=data.columns,
18331788
)
1789+
1790+
1791+
def _highlight_between(
1792+
data: FrameOrSeries,
1793+
props: str,
1794+
left: Scalar | Sequence | np.ndarray | FrameOrSeries | None = None,
1795+
right: Scalar | Sequence | np.ndarray | FrameOrSeries | None = None,
1796+
inclusive: bool | str = True,
1797+
) -> np.ndarray:
1798+
"""
1799+
Return an array of css props based on condition of data values within given range.
1800+
"""
1801+
if np.iterable(left) and not isinstance(left, str):
1802+
left = _validate_apply_axis_arg(
1803+
left, "left", None, data # type: ignore[arg-type]
1804+
)
1805+
1806+
if np.iterable(right) and not isinstance(right, str):
1807+
right = _validate_apply_axis_arg(
1808+
right, "right", None, data # type: ignore[arg-type]
1809+
)
1810+
1811+
# get ops with correct boundary attribution
1812+
if inclusive == "both":
1813+
ops = (operator.ge, operator.le)
1814+
elif inclusive == "neither":
1815+
ops = (operator.gt, operator.lt)
1816+
elif inclusive == "left":
1817+
ops = (operator.ge, operator.lt)
1818+
elif inclusive == "right":
1819+
ops = (operator.gt, operator.le)
1820+
else:
1821+
raise ValueError(
1822+
f"'inclusive' values can be 'both', 'left', 'right', or 'neither' "
1823+
f"got {inclusive}"
1824+
)
1825+
1826+
g_left = (
1827+
ops[0](data, left)
1828+
if left is not None
1829+
else np.full(data.shape, True, dtype=bool)
1830+
)
1831+
l_right = (
1832+
ops[1](data, right)
1833+
if right is not None
1834+
else np.full(data.shape, True, dtype=bool)
1835+
)
1836+
return np.where(g_left & l_right, props, "")

pandas/tests/api/test_api.py

-1
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,6 @@ class TestPDApi(Base):
192192
"_hashtable",
193193
"_lib",
194194
"_libs",
195-
"_np_version_under1p17",
196195
"_np_version_under1p18",
197196
"_is_numpy_dev",
198197
"_testing",

pandas/tests/computation/test_eval.py

-20
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,6 @@
1212
import numpy as np
1313
import pytest
1414

15-
from pandas.compat import (
16-
is_platform_windows,
17-
np_version_under1p17,
18-
)
1915
from pandas.errors import PerformanceWarning
2016
import pandas.util._test_decorators as td
2117

@@ -220,22 +216,6 @@ def test_simple_cmp_ops(self, cmp_op):
220216

221217
@pytest.mark.parametrize("op", _good_arith_ops)
222218
def test_binary_arith_ops(self, op, lhs, rhs, request):
223-
224-
if (
225-
op == "/"
226-
and isinstance(lhs, DataFrame)
227-
and isinstance(rhs, DataFrame)
228-
and not lhs.isna().any().any()
229-
and rhs.shape == (10, 5)
230-
and np_version_under1p17
231-
and is_platform_windows()
232-
and compat.PY38
233-
):
234-
mark = pytest.mark.xfail(
235-
reason="GH#37328 floating point precision on Windows builds"
236-
)
237-
request.node.add_marker(mark)
238-
239219
self.check_binary_arith_op(lhs, op, rhs)
240220

241221
def test_modulus(self, lhs, rhs):

pandas/tests/extension/base/dim2.py

-19
Original file line numberDiff line numberDiff line change
@@ -4,26 +4,10 @@
44
import numpy as np
55
import pytest
66

7-
from pandas.compat import np_version_under1p17
8-
97
import pandas as pd
10-
from pandas.core.arrays import (
11-
FloatingArray,
12-
IntegerArray,
13-
)
148
from pandas.tests.extension.base.base import BaseExtensionTests
159

1610

17-
def maybe_xfail_masked_reductions(arr, request):
18-
if (
19-
isinstance(arr, (FloatingArray, IntegerArray))
20-
and np_version_under1p17
21-
and arr.ndim == 2
22-
):
23-
mark = pytest.mark.xfail(reason="masked_reductions does not implement")
24-
request.node.add_marker(mark)
25-
26-
2711
class Dim2CompatTests(BaseExtensionTests):
2812
def test_swapaxes(self, data):
2913
arr2d = data.repeat(2).reshape(-1, 2)
@@ -148,7 +132,6 @@ def test_reductions_2d_axis_none(self, data, method, request):
148132
pytest.skip("test is not applicable for this type/dtype")
149133

150134
arr2d = data.reshape(1, -1)
151-
maybe_xfail_masked_reductions(arr2d, request)
152135

153136
err_expected = None
154137
err_result = None
@@ -177,7 +160,6 @@ def test_reductions_2d_axis0(self, data, method, request):
177160
pytest.skip("test is not applicable for this type/dtype")
178161

179162
arr2d = data.reshape(1, -1)
180-
maybe_xfail_masked_reductions(arr2d, request)
181163

182164
kwargs = {}
183165
if method == "std":
@@ -225,7 +207,6 @@ def test_reductions_2d_axis1(self, data, method, request):
225207
pytest.skip("test is not applicable for this type/dtype")
226208

227209
arr2d = data.reshape(1, -1)
228-
maybe_xfail_masked_reductions(arr2d, request)
229210

230211
try:
231212
result = getattr(arr2d, method)(axis=1)

0 commit comments

Comments
 (0)