Skip to content

Commit 38a6942

Browse files
author
Matteo Felici
committed
Merge branch 'master' into format - fix CI fail pandas-dev#34835
2 parents cc2e1c1 + 16dfb61 commit 38a6942

File tree

78 files changed

+286
-285
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

78 files changed

+286
-285
lines changed

asv_bench/benchmarks/pandas_vb_common.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
np.uint8,
3434
]
3535
datetime_dtypes = [np.datetime64, np.timedelta64]
36-
string_dtypes = [np.object]
36+
string_dtypes = [object]
3737
try:
3838
extension_dtypes = [
3939
pd.Int8Dtype,

asv_bench/benchmarks/series_methods.py

+8-10
Original file line numberDiff line numberDiff line change
@@ -58,17 +58,15 @@ def time_isin_nan_values(self):
5858

5959
class IsInForObjects:
6060
def setup(self):
61-
self.s_nans = Series(np.full(10 ** 4, np.nan)).astype(np.object)
62-
self.vals_nans = np.full(10 ** 4, np.nan).astype(np.object)
63-
self.s_short = Series(np.arange(2)).astype(np.object)
64-
self.s_long = Series(np.arange(10 ** 5)).astype(np.object)
65-
self.vals_short = np.arange(2).astype(np.object)
66-
self.vals_long = np.arange(10 ** 5).astype(np.object)
61+
self.s_nans = Series(np.full(10 ** 4, np.nan)).astype(object)
62+
self.vals_nans = np.full(10 ** 4, np.nan).astype(object)
63+
self.s_short = Series(np.arange(2)).astype(object)
64+
self.s_long = Series(np.arange(10 ** 5)).astype(object)
65+
self.vals_short = np.arange(2).astype(object)
66+
self.vals_long = np.arange(10 ** 5).astype(object)
6767
# because of nans floats are special:
68-
self.s_long_floats = Series(np.arange(10 ** 5, dtype=np.float)).astype(
69-
np.object
70-
)
71-
self.vals_long_floats = np.arange(10 ** 5, dtype=np.float).astype(np.object)
68+
self.s_long_floats = Series(np.arange(10 ** 5, dtype=np.float)).astype(object)
69+
self.vals_long_floats = np.arange(10 ** 5, dtype=np.float).astype(object)
7270

7371
def time_isin_nans(self):
7472
# if nan-objects are different objects,

asv_bench/benchmarks/sparse.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def time_series_to_frame(self):
3232

3333
class SparseArrayConstructor:
3434

35-
params = ([0.1, 0.01], [0, np.nan], [np.int64, np.float64, np.object])
35+
params = ([0.1, 0.01], [0, np.nan], [np.int64, np.float64, object])
3636
param_names = ["dense_proportion", "fill_value", "dtype"]
3737

3838
def setup(self, dense_proportion, fill_value, dtype):

doc/source/user_guide/io.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -1884,7 +1884,7 @@ Fallback behavior
18841884
If the JSON serializer cannot handle the container contents directly it will
18851885
fall back in the following manner:
18861886

1887-
* if the dtype is unsupported (e.g. ``np.complex``) then the ``default_handler``, if provided, will be called
1887+
* if the dtype is unsupported (e.g. ``np.complex_``) then the ``default_handler``, if provided, will be called
18881888
for each value, otherwise an exception is raised.
18891889

18901890
* if an object is unsupported it will attempt the following:

doc/source/whatsnew/v1.0.5.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11

22
.. _whatsnew_105:
33

4-
What's new in 1.0.5 (June XX, 2020)
4+
What's new in 1.0.5 (June 17, 2020)
55
-----------------------------------
66

77
These are the changes in pandas 1.0.5. See :ref:`release` for a full changelog

doc/source/whatsnew/v1.1.0.rst

+4-1
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,7 @@ Other enhancements
292292
- :meth:`DataFrame.to_csv` and :meth:`Series.to_csv` now accept an ``errors`` argument (:issue:`22610`)
293293
- :meth:`groupby.transform` now allows ``func`` to be ``pad``, ``backfill`` and ``cumcount`` (:issue:`31269`).
294294
- :meth:`~pandas.io.json.read_json` now accepts `nrows` parameter. (:issue:`33916`).
295-
- :meth `~pandas.io.gbq.read_gbq` now allows to disable progress bar (:issue:`33360`).
295+
- :meth:`~pandas.io.gbq.read_gbq` now allows to disable progress bar (:issue:`33360`).
296296
- :meth:`~pandas.io.gbq.read_gbq` now supports the ``max_results`` kwarg from ``pandas-gbq`` (:issue:`34639`).
297297

298298
.. ---------------------------------------------------------------------------
@@ -843,6 +843,9 @@ Datetimelike
843843
- Bug in :meth:`DatetimeIndex.intersection` and :meth:`TimedeltaIndex.intersection` with results not having the correct ``name`` attribute (:issue:`33904`)
844844
- Bug in :meth:`DatetimeArray.__setitem__`, :meth:`TimedeltaArray.__setitem__`, :meth:`PeriodArray.__setitem__` incorrectly allowing values with ``int64`` dtype to be silently cast (:issue:`33717`)
845845
- Bug in subtracting :class:`TimedeltaIndex` from :class:`Period` incorrectly raising ``TypeError`` in some cases where it should succeed and ``IncompatibleFrequency`` in some cases where it should raise ``TypeError`` (:issue:`33883`)
846+
- Bug in constructing a Series or Index from a read-only NumPy array with non-ns
847+
resolution which converted to object dtype instead of coercing to ``datetime64[ns]``
848+
dtype when within the timestamp bounds (:issue:`34843`).
846849
- The ``freq`` keyword in :class:`Period`, :func:`date_range`, :func:`period_range`, :func:`pd.tseries.frequencies.to_offset` no longer allows tuples, pass as string instead (:issue:`34703`)
847850

848851
Timedelta

pandas/_libs/hashtable_class_helper.pxi.in

+1-1
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,7 @@ cdef class StringVector:
178178
Py_ssize_t n
179179
object val
180180

181-
ao = np.empty(self.data.n, dtype=np.object)
181+
ao = np.empty(self.data.n, dtype=object)
182182
for i in range(self.data.n):
183183
val = self.data.data[i]
184184
ao[i] = val

pandas/_libs/hashtable_func_helper.pxi.in

+1-1
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ cpdef value_count_{{dtype}}({{c_type}}[:] values, bint dropna):
9494
build_count_table_{{dtype}}(values, table, dropna)
9595
{{endif}}
9696

97-
result_keys = np.empty(table.n_occupied, dtype=np.{{dtype}})
97+
result_keys = np.empty(table.n_occupied, '{{dtype}}')
9898
result_counts = np.zeros(table.n_occupied, dtype=np.int64)
9999

100100
{{if dtype == 'object'}}

pandas/_libs/parsers.pyx

+1-1
Original file line numberDiff line numberDiff line change
@@ -2037,7 +2037,7 @@ def _concatenate_chunks(list chunks):
20372037
numpy_dtypes = {x for x in dtypes if not is_categorical_dtype(x)}
20382038
if len(numpy_dtypes) > 1:
20392039
common_type = np.find_common_type(numpy_dtypes, [])
2040-
if common_type == np.object:
2040+
if common_type == object:
20412041
warning_columns.append(str(name))
20422042

20432043
dtype = dtypes.pop()

pandas/_libs/sparse.pyx

+1-1
Original file line numberDiff line numberDiff line change
@@ -791,4 +791,4 @@ def make_mask_object_ndarray(ndarray[object, ndim=1] arr, object fill_value):
791791
if value == fill_value and type(value) == type(fill_value):
792792
mask[i] = 0
793793

794-
return mask.view(dtype=np.bool)
794+
return mask.view(dtype=bool)

pandas/_libs/testing.pyx

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ cdef NUMERIC_TYPES = (
1111
bool,
1212
int,
1313
float,
14-
np.bool,
14+
np.bool_,
1515
np.int8,
1616
np.int16,
1717
np.int32,

pandas/_libs/tslibs/conversion.pyx

+2-1
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,8 @@ def ensure_datetime64ns(arr: ndarray, copy: bool=True):
167167
"""
168168
cdef:
169169
Py_ssize_t i, n = arr.size
170-
int64_t[:] ivalues, iresult
170+
const int64_t[:] ivalues
171+
int64_t[:] iresult
171172
NPY_DATETIMEUNIT unit
172173
npy_datetimestruct dts
173174

pandas/core/algorithms.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ def _ensure_data(
171171
return values, dtype
172172

173173
# we have failed, return object
174-
values = np.asarray(values, dtype=np.object)
174+
values = np.asarray(values, dtype=object)
175175
return ensure_object(values), np.dtype("object")
176176

177177

pandas/core/arrays/sparse/array.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ def _sparse_array_op(
150150
# to make template simple, cast here
151151
left_sp_values = left.sp_values.view(np.uint8)
152152
right_sp_values = right.sp_values.view(np.uint8)
153-
result_dtype = np.bool
153+
result_dtype = bool
154154
else:
155155
opname = f"sparse_{name}_{dtype}"
156156
left_sp_values = left.sp_values
@@ -183,7 +183,7 @@ def _wrap_result(name, data, sparse_index, fill_value, dtype=None):
183183
name = name[2:-2]
184184

185185
if name in ("eq", "ne", "lt", "gt", "le", "ge"):
186-
dtype = np.bool
186+
dtype = bool
187187

188188
fill_value = lib.item_from_zerodim(fill_value)
189189

pandas/core/base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1520,7 +1520,7 @@ def drop_duplicates(self, keep="first"):
15201520
def duplicated(self, keep="first"):
15211521
if isinstance(self, ABCIndexClass):
15221522
if self.is_unique:
1523-
return np.zeros(len(self), dtype=np.bool)
1523+
return np.zeros(len(self), dtype=bool)
15241524
return duplicated(self, keep=keep)
15251525
else:
15261526
return self._constructor(

pandas/core/dtypes/cast.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ def trans(x):
225225
# if we have any nulls, then we are done
226226
return result
227227

228-
elif not isinstance(r[0], (np.integer, np.floating, np.bool, int, float, bool)):
228+
elif not isinstance(r[0], (np.integer, np.floating, int, float, bool)):
229229
# a comparable, e.g. a Decimal may slip in here
230230
return result
231231

@@ -315,7 +315,7 @@ def maybe_cast_result_dtype(dtype: DtypeObj, how: str) -> DtypeObj:
315315
from pandas.core.arrays.boolean import BooleanDtype
316316
from pandas.core.arrays.integer import Int64Dtype
317317

318-
if how in ["add", "cumsum", "sum"] and (dtype == np.dtype(np.bool)):
318+
if how in ["add", "cumsum", "sum"] and (dtype == np.dtype(bool)):
319319
return np.dtype(np.int64)
320320
elif how in ["add", "cumsum", "sum"] and isinstance(dtype, BooleanDtype):
321321
return Int64Dtype()
@@ -597,7 +597,7 @@ def _ensure_dtype_type(value, dtype):
597597
"""
598598
Ensure that the given value is an instance of the given dtype.
599599
600-
e.g. if out dtype is np.complex64, we should have an instance of that
600+
e.g. if out dtype is np.complex64_, we should have an instance of that
601601
as opposed to a python complex object.
602602
603603
Parameters
@@ -1483,7 +1483,7 @@ def find_common_type(types: List[DtypeObj]) -> DtypeObj:
14831483
if has_bools:
14841484
for t in types:
14851485
if is_integer_dtype(t) or is_float_dtype(t) or is_complex_dtype(t):
1486-
return np.object
1486+
return object
14871487

14881488
return np.find_common_type(types, [])
14891489

@@ -1742,7 +1742,7 @@ def validate_numeric_casting(dtype: np.dtype, value):
17421742
if is_float(value) and np.isnan(value):
17431743
raise ValueError("Cannot assign nan to integer series")
17441744

1745-
if issubclass(dtype.type, (np.integer, np.floating, np.complex)) and not issubclass(
1745+
if issubclass(dtype.type, (np.integer, np.floating, complex)) and not issubclass(
17461746
dtype.type, np.bool_
17471747
):
17481748
if is_bool(value):

pandas/core/dtypes/common.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1354,7 +1354,7 @@ def is_bool_dtype(arr_or_dtype) -> bool:
13541354
False
13551355
>>> is_bool_dtype(bool)
13561356
True
1357-
>>> is_bool_dtype(np.bool)
1357+
>>> is_bool_dtype(np.bool_)
13581358
True
13591359
>>> is_bool_dtype(np.array(['a', 'b']))
13601360
False
@@ -1526,7 +1526,7 @@ def is_complex_dtype(arr_or_dtype) -> bool:
15261526
False
15271527
>>> is_complex_dtype(int)
15281528
False
1529-
>>> is_complex_dtype(np.complex)
1529+
>>> is_complex_dtype(np.complex_)
15301530
True
15311531
>>> is_complex_dtype(np.array(['a', 'b']))
15321532
False

pandas/core/generic.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -10024,7 +10024,7 @@ def describe(
1002410024
1002510025
Including only string columns in a ``DataFrame`` description.
1002610026
10027-
>>> df.describe(include=[np.object]) # doctest: +SKIP
10027+
>>> df.describe(include=[object]) # doctest: +SKIP
1002810028
object
1002910029
count 3
1003010030
unique 3
@@ -10051,7 +10051,7 @@ def describe(
1005110051
1005210052
Excluding object columns from a ``DataFrame`` description.
1005310053
10054-
>>> df.describe(exclude=[np.object]) # doctest: +SKIP
10054+
>>> df.describe(exclude=[object]) # doctest: +SKIP
1005510055
categorical numeric
1005610056
count 3 3.0
1005710057
unique 3 NaN

pandas/core/groupby/groupby.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1267,9 +1267,9 @@ def objs_to_bool(vals: np.ndarray) -> Tuple[np.ndarray, Type]:
12671267
if is_object_dtype(vals):
12681268
vals = np.array([bool(x) for x in vals])
12691269
else:
1270-
vals = vals.astype(np.bool)
1270+
vals = vals.astype(bool)
12711271

1272-
return vals.view(np.uint8), np.bool
1272+
return vals.view(np.uint8), bool
12731273

12741274
def result_to_bool(result: np.ndarray, inference: Type) -> np.ndarray:
12751275
return result.astype(inference, copy=False)
@@ -2059,7 +2059,7 @@ def pre_processor(vals: np.ndarray) -> Tuple[np.ndarray, Optional[Type]]:
20592059
vals = vals.to_numpy(dtype=float, na_value=np.nan)
20602060
elif is_datetime64_dtype(vals.dtype):
20612061
inference = "datetime64[ns]"
2062-
vals = np.asarray(vals).astype(np.float)
2062+
vals = np.asarray(vals).astype(float)
20632063

20642064
return vals, inference
20652065

pandas/core/indexes/base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -374,7 +374,7 @@ def __new__(
374374
return UInt64Index(data, copy=copy, dtype=dtype, name=name)
375375
elif is_float_dtype(data.dtype):
376376
return Float64Index(data, copy=copy, dtype=dtype, name=name)
377-
elif issubclass(data.dtype.type, np.bool) or is_bool_dtype(data):
377+
elif issubclass(data.dtype.type, bool) or is_bool_dtype(data):
378378
subarr = data.astype("object")
379379
else:
380380
subarr = com.asarray_tuplesafe(data, dtype=object)

pandas/core/internals/managers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1951,7 +1951,7 @@ def _check_comparison_types(
19511951
if isinstance(result, np.ndarray):
19521952
# The shape of the mask can differ to that of the result
19531953
# since we may compare only a subset of a's or b's elements
1954-
tmp = np.zeros(mask.shape, dtype=np.bool)
1954+
tmp = np.zeros(mask.shape, dtype=np.bool_)
19551955
tmp[mask] = result
19561956
result = tmp
19571957

pandas/core/util/hashing.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -264,7 +264,7 @@ def hash_array(
264264

265265
# First, turn whatever array this is into unsigned 64-bit ints, if we can
266266
# manage it.
267-
elif isinstance(dtype, np.bool):
267+
elif isinstance(dtype, bool):
268268
vals = vals.astype("u8")
269269
elif issubclass(dtype.type, (np.datetime64, np.timedelta64)):
270270
vals = vals.view("i8").astype("u8", copy=False)

pandas/core/window/rolling.py

+8-5
Original file line numberDiff line numberDiff line change
@@ -922,16 +922,19 @@ class Window(_Window):
922922
* ``blackmanharris``
923923
* ``nuttall``
924924
* ``barthann``
925-
* ``kaiser`` (needs beta)
926-
* ``gaussian`` (needs std)
927-
* ``general_gaussian`` (needs power, width)
928-
* ``slepian`` (needs width)
929-
* ``exponential`` (needs tau), center is set to None.
925+
* ``kaiser`` (needs parameter: beta)
926+
* ``gaussian`` (needs parameter: std)
927+
* ``general_gaussian`` (needs parameters: power, width)
928+
* ``slepian`` (needs parameter: width)
929+
* ``exponential`` (needs parameter: tau), center is set to None.
930930
931931
If ``win_type=None`` all points are evenly weighted. To learn more about
932932
different window types see `scipy.signal window functions
933933
<https://docs.scipy.org/doc/scipy/reference/signal.html#window-functions>`__.
934934
935+
Certain window types require additional parameters to be passed. Please see
936+
the third example below on how to add the additional parameters.
937+
935938
Examples
936939
--------
937940
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]})

pandas/io/parsers.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -3476,13 +3476,13 @@ def _get_empty_meta(columns, index_col, index_names, dtype=None):
34763476
# This will enable us to write `dtype[col_name]`
34773477
# without worrying about KeyError issues later on.
34783478
if not isinstance(dtype, dict):
3479-
# if dtype == None, default will be np.object.
3480-
default_dtype = dtype or np.object
3479+
# if dtype == None, default will be object.
3480+
default_dtype = dtype or object
34813481
dtype = defaultdict(lambda: default_dtype)
34823482
else:
34833483
# Save a copy of the dictionary.
34843484
_dtype = dtype.copy()
3485-
dtype = defaultdict(lambda: np.object)
3485+
dtype = defaultdict(lambda: object)
34863486

34873487
# Convert column indexes to column names.
34883488
for k, v in _dtype.items():

pandas/io/sas/sas7bdat.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -685,7 +685,7 @@ def read(self, nrows=None):
685685
nd = self._column_types.count(b"d")
686686
ns = self._column_types.count(b"s")
687687

688-
self._string_chunk = np.empty((ns, nrows), dtype=np.object)
688+
self._string_chunk = np.empty((ns, nrows), dtype=object)
689689
self._byte_chunk = np.zeros((nd, 8 * nrows), dtype=np.uint8)
690690

691691
self._current_row_in_chunk_index = 0

pandas/io/stata.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -322,7 +322,7 @@ def convert_delta_safe(base, deltas, unit) -> Series:
322322
elif fmt.startswith(("%tC", "tC")):
323323

324324
warnings.warn("Encountered %tC format. Leaving in Stata Internal Format.")
325-
conv_dates = Series(dates, dtype=np.object)
325+
conv_dates = Series(dates, dtype=object)
326326
if has_bad_values:
327327
conv_dates[bad_locs] = NaT
328328
return conv_dates
@@ -451,7 +451,7 @@ def g(x: datetime.datetime) -> int:
451451
conv_dates = 4 * (d.year - stata_epoch.year) + (d.month - 1) // 3
452452
elif fmt in ["%th", "th"]:
453453
d = parse_dates_safe(dates, year=True)
454-
conv_dates = 2 * (d.year - stata_epoch.year) + (d.month > 6).astype(np.int)
454+
conv_dates = 2 * (d.year - stata_epoch.year) + (d.month > 6).astype(int)
455455
elif fmt in ["%ty", "ty"]:
456456
d = parse_dates_safe(dates, year=True)
457457
conv_dates = d.year
@@ -553,7 +553,7 @@ def _cast_to_stata_types(data: DataFrame) -> DataFrame:
553553
ws = ""
554554
# original, if small, if large
555555
conversion_data = (
556-
(np.bool, np.int8, np.int8),
556+
(np.bool_, np.int8, np.int8),
557557
(np.uint8, np.int8, np.int16),
558558
(np.uint16, np.int16, np.int32),
559559
(np.uint32, np.int32, np.int64),
@@ -1725,7 +1725,7 @@ def _do_convert_missing(self, data: DataFrame, convert_missing: bool) -> DataFra
17251725
if convert_missing: # Replacement follows Stata notation
17261726
missing_loc = np.nonzero(np.asarray(missing))[0]
17271727
umissing, umissing_loc = np.unique(series[missing], return_inverse=True)
1728-
replacement = Series(series, dtype=np.object)
1728+
replacement = Series(series, dtype=object)
17291729
for j, um in enumerate(umissing):
17301730
missing_value = StataMissingValue(um)
17311731

pandas/plotting/_matplotlib/tools.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ def _handle_shared_axes(axarr, nplots, naxes, nrows, ncols, sharex, sharey):
301301
try:
302302
# first find out the ax layout,
303303
# so that we can correctly handle 'gaps"
304-
layout = np.zeros((nrows + 1, ncols + 1), dtype=np.bool)
304+
layout = np.zeros((nrows + 1, ncols + 1), dtype=np.bool_)
305305
for ax in axarr:
306306
layout[row_num(ax), col_num(ax)] = ax.get_visible()
307307

0 commit comments

Comments
 (0)