Skip to content

Commit 1f98682

Browse files
committed
Merge branch 'master' into nui-regression
2 parents fddbe55 + df90970 commit 1f98682

File tree

3 files changed

+35
-48
lines changed

3 files changed

+35
-48
lines changed

pandas/_testing.py

Lines changed: 25 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1294,6 +1294,8 @@ def assert_series_equal(
12941294
rtol=1.0e-5,
12951295
atol=1.0e-8,
12961296
obj="Series",
1297+
*,
1298+
check_index=True,
12971299
):
12981300
"""
12991301
Check that left and right Series are equal.
@@ -1353,6 +1355,10 @@ def assert_series_equal(
13531355
obj : str, default 'Series'
13541356
Specify object name being compared, internally used to show appropriate
13551357
assertion message.
1358+
check_index : bool, default True
1359+
Whether to check index equivalence. If False, then compare only values.
1360+
1361+
.. versionadded:: 1.3.0
13561362
13571363
Examples
13581364
--------
@@ -1388,18 +1394,20 @@ def assert_series_equal(
13881394
if check_flags:
13891395
assert left.flags == right.flags, f"{repr(left.flags)} != {repr(right.flags)}"
13901396

1391-
# index comparison
1392-
assert_index_equal(
1393-
left.index,
1394-
right.index,
1395-
exact=check_index_type,
1396-
check_names=check_names,
1397-
check_exact=check_exact,
1398-
check_categorical=check_categorical,
1399-
rtol=rtol,
1400-
atol=atol,
1401-
obj=f"{obj}.index",
1402-
)
1397+
if check_index:
1398+
# GH #38183
1399+
assert_index_equal(
1400+
left.index,
1401+
right.index,
1402+
exact=check_index_type,
1403+
check_names=check_names,
1404+
check_exact=check_exact,
1405+
check_categorical=check_categorical,
1406+
rtol=rtol,
1407+
atol=atol,
1408+
obj=f"{obj}.index",
1409+
)
1410+
14031411
if check_freq and isinstance(left.index, (pd.DatetimeIndex, pd.TimedeltaIndex)):
14041412
lidx = left.index
14051413
ridx = right.index
@@ -1704,6 +1712,10 @@ def assert_frame_equal(
17041712
assert col in right
17051713
lcol = left.iloc[:, i]
17061714
rcol = right.iloc[:, i]
1715+
# GH #38183
1716+
# use check_index=False, because we do not want to run
1717+
# assert_index_equal for each column,
1718+
# as we already checked it for the whole dataframe before.
17071719
assert_series_equal(
17081720
lcol,
17091721
rcol,
@@ -1717,6 +1729,7 @@ def assert_frame_equal(
17171729
obj=f'{obj}.iloc[:, {i}] (column name="{col}")',
17181730
rtol=rtol,
17191731
atol=atol,
1732+
check_index=False,
17201733
)
17211734

17221735

pandas/core/algorithms.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
import numpy as np
1313

14-
from pandas._libs import Timestamp, algos, hashtable as htable, iNaT, lib
14+
from pandas._libs import algos, hashtable as htable, iNaT, lib
1515
from pandas._typing import AnyArrayLike, ArrayLike, DtypeObj, FrameOrSeriesUnion
1616
from pandas.util._decorators import doc
1717

@@ -59,7 +59,11 @@
5959
)
6060
from pandas.core.dtypes.missing import isna, na_value_for_dtype
6161

62-
from pandas.core.construction import array, extract_array
62+
from pandas.core.construction import (
63+
array,
64+
ensure_wrapped_if_datetimelike,
65+
extract_array,
66+
)
6367
from pandas.core.indexers import validate_indices
6468

6569
if TYPE_CHECKING:
@@ -1906,10 +1910,7 @@ def searchsorted(arr, value, side="left", sorter=None) -> np.ndarray:
19061910
):
19071911
# E.g. if `arr` is an array with dtype='datetime64[ns]'
19081912
# and `value` is a pd.Timestamp, we may need to convert value
1909-
value_ser = array([value]) if is_scalar(value) else array(value)
1910-
value = value_ser[0] if is_scalar(value) else value_ser
1911-
if isinstance(value, Timestamp) and value.tzinfo is None:
1912-
value = value.to_datetime64()
1913+
arr = ensure_wrapped_if_datetimelike(arr)
19131914

19141915
result = arr.searchsorted(value, side=side, sorter=sorter)
19151916
return result

pandas/core/internals/blocks.py

Lines changed: 3 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,7 @@
3737
from pandas.core.dtypes.common import (
3838
DT64NS_DTYPE,
3939
TD64NS_DTYPE,
40-
is_bool_dtype,
4140
is_categorical_dtype,
42-
is_datetime64_any_dtype,
4341
is_datetime64_dtype,
4442
is_datetime64tz_dtype,
4543
is_dtype_equal,
@@ -53,7 +51,6 @@
5351
is_re,
5452
is_re_compilable,
5553
is_sparse,
56-
is_timedelta64_dtype,
5754
pandas_dtype,
5855
)
5956
from pandas.core.dtypes.dtypes import CategoricalDtype, ExtensionDtype
@@ -927,7 +924,7 @@ def setitem(self, indexer, value):
927924

928925
else:
929926
# current dtype cannot store value, coerce to common dtype
930-
927+
# TODO: can we just use coerce_to_target_dtype for all this
931928
if hasattr(value, "dtype"):
932929
dtype = value.dtype
933930

@@ -1164,33 +1161,9 @@ def coerce_to_target_dtype(self, other):
11641161
# if we cannot then coerce to object
11651162
dtype, _ = infer_dtype_from(other, pandas_dtype=True)
11661163

1167-
if is_dtype_equal(self.dtype, dtype):
1168-
return self
1169-
1170-
if self.is_bool or is_object_dtype(dtype) or is_bool_dtype(dtype):
1171-
# we don't upcast to bool
1172-
return self.astype(object)
1173-
1174-
elif (self.is_float or self.is_complex) and (
1175-
is_integer_dtype(dtype) or is_float_dtype(dtype)
1176-
):
1177-
# don't coerce float/complex to int
1178-
return self
1164+
new_dtype = find_common_type([self.dtype, dtype])
11791165

1180-
elif self.is_datetime or is_datetime64_any_dtype(dtype):
1181-
# The is_dtype_equal check above ensures that at most one of
1182-
# these two conditions hold, so we must cast to object.
1183-
return self.astype(object)
1184-
1185-
elif self.is_timedelta or is_timedelta64_dtype(dtype):
1186-
# The is_dtype_equal check above ensures that at most one of
1187-
# these two conditions hold, so we must cast to object.
1188-
return self.astype(object)
1189-
1190-
try:
1191-
return self.astype(dtype)
1192-
except (ValueError, TypeError, OverflowError):
1193-
return self.astype(object)
1166+
return self.astype(new_dtype, copy=False)
11941167

11951168
def interpolate(
11961169
self,

0 commit comments

Comments
 (0)