Skip to content

Commit 8e4424f

Browse files
jbrockmendeljreback
authored andcommitted
TST: parametrize/de-duplicate test_datetime64 (#29559)
1 parent 5c36aa1 commit 8e4424f

File tree

1 file changed

+57
-133
lines changed

1 file changed

+57
-133
lines changed

pandas/tests/arithmetic/test_datetime64.py

+57-133
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,9 @@
2626
Timestamp,
2727
date_range,
2828
)
29+
import pandas.core.arrays.datetimelike as dtl
2930
from pandas.core.indexes.datetimes import _to_M8
31+
from pandas.core.ops import roperator
3032
import pandas.util.testing as tm
3133

3234

@@ -102,19 +104,24 @@ def test_compare_zerodim(self, tz_naive_fixture, box_with_array):
102104
expected = tm.box_expected(expected, xbox)
103105
tm.assert_equal(result, expected)
104106

105-
def test_dt64arr_cmp_date_invalid(self, tz_naive_fixture, box_with_array):
106-
# GH#19800, GH#19301 datetime.date comparison raises to
107-
# match DatetimeIndex/Timestamp. This also matches the behavior
108-
# of stdlib datetime.datetime
109-
tz = tz_naive_fixture
110-
111-
dti = pd.date_range("20010101", periods=10, tz=tz)
112-
date = dti[0].to_pydatetime().date()
113-
114-
dtarr = tm.box_expected(dti, box_with_array)
115-
assert_invalid_comparison(dtarr, date, box_with_array)
116-
117-
@pytest.mark.parametrize("other", ["foo", -1, 99, 4.0, object(), timedelta(days=2)])
107+
@pytest.mark.parametrize(
108+
"other",
109+
[
110+
"foo",
111+
-1,
112+
99,
113+
4.0,
114+
object(),
115+
timedelta(days=2),
116+
# GH#19800, GH#19301 datetime.date comparison raises to
117+
# match DatetimeIndex/Timestamp. This also matches the behavior
118+
# of stdlib datetime.datetime
119+
datetime(2001, 1, 1).date(),
120+
# GH#19301 None and NaN are *not* cast to NaT for comparisons
121+
None,
122+
np.nan,
123+
],
124+
)
118125
def test_dt64arr_cmp_scalar_invalid(self, other, tz_naive_fixture, box_with_array):
119126
# GH#22074, GH#15966
120127
tz = tz_naive_fixture
@@ -123,16 +130,6 @@ def test_dt64arr_cmp_scalar_invalid(self, other, tz_naive_fixture, box_with_arra
123130
dtarr = tm.box_expected(rng, box_with_array)
124131
assert_invalid_comparison(dtarr, other, box_with_array)
125132

126-
@pytest.mark.parametrize("other", [None, np.nan])
127-
def test_dt64arr_cmp_na_scalar_invalid(
128-
self, other, tz_naive_fixture, box_with_array
129-
):
130-
# GH#19301
131-
tz = tz_naive_fixture
132-
dti = pd.date_range("2016-01-01", periods=2, tz=tz)
133-
dtarr = tm.box_expected(dti, box_with_array)
134-
assert_invalid_comparison(dtarr, other, box_with_array)
135-
136133
def test_dt64arr_nat_comparison(self, tz_naive_fixture, box_with_array):
137134
# GH#22242, GH#22163 DataFrame considered NaT == ts incorrectly
138135
tz = tz_naive_fixture
@@ -258,15 +255,10 @@ def test_nat_comparisons_scalar(self, dtype, data, box_with_array):
258255
tm.assert_equal(left >= NaT, expected)
259256
tm.assert_equal(NaT <= left, expected)
260257

261-
def test_series_comparison_scalars(self):
258+
@pytest.mark.parametrize("val", [datetime(2000, 1, 4), datetime(2000, 1, 5)])
259+
def test_series_comparison_scalars(self, val):
262260
series = Series(date_range("1/1/2000", periods=10))
263261

264-
val = datetime(2000, 1, 4)
265-
result = series > val
266-
expected = Series([x > val for x in series])
267-
tm.assert_series_equal(result, expected)
268-
269-
val = series[5]
270262
result = series > val
271263
expected = Series([x > val for x in series])
272264
tm.assert_series_equal(result, expected)
@@ -1020,9 +1012,18 @@ def test_dt64arr_add_timestamp_raises(self, box_with_array):
10201012
# -------------------------------------------------------------
10211013
# Other Invalid Addition/Subtraction
10221014

1023-
@pytest.mark.parametrize("other", [3.14, np.array([2.0, 3.0])])
1024-
def test_dt64arr_add_sub_float(self, other, box_with_array):
1025-
dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq="D")
1015+
@pytest.mark.parametrize(
1016+
"other",
1017+
[
1018+
3.14,
1019+
np.array([2.0, 3.0]),
1020+
# GH#13078 datetime +/- Period is invalid
1021+
pd.Period("2011-01-01", freq="D"),
1022+
],
1023+
)
1024+
@pytest.mark.parametrize("dti_freq", [None, "D"])
1025+
def test_dt64arr_add_sub_invalid(self, dti_freq, other, box_with_array):
1026+
dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
10261027
dtarr = tm.box_expected(dti, box_with_array)
10271028
msg = "|".join(
10281029
[
@@ -1068,24 +1069,6 @@ def test_dt64arr_add_sub_parr(
10681069
with pytest.raises(TypeError, match=msg):
10691070
parr - dtarr
10701071

1071-
@pytest.mark.parametrize("dti_freq", [None, "D"])
1072-
def test_dt64arr_add_sub_period_scalar(self, dti_freq, box_with_array):
1073-
# GH#13078
1074-
# not supported, check TypeError
1075-
per = pd.Period("2011-01-01", freq="D")
1076-
1077-
idx = pd.DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
1078-
dtarr = tm.box_expected(idx, box_with_array)
1079-
msg = "|".join(["unsupported operand type", "cannot (add|subtract)"])
1080-
with pytest.raises(TypeError, match=msg):
1081-
dtarr + per
1082-
with pytest.raises(TypeError, match=msg):
1083-
per + dtarr
1084-
with pytest.raises(TypeError, match=msg):
1085-
dtarr - per
1086-
with pytest.raises(TypeError, match=msg):
1087-
per - dtarr
1088-
10891072

10901073
class TestDatetime64DateOffsetArithmetic:
10911074

@@ -1406,7 +1389,7 @@ def test_dt64arr_add_mixed_offset_array(self, box_with_array):
14061389
s = tm.box_expected(s, box_with_array)
14071390

14081391
warn = None if box_with_array is pd.DataFrame else PerformanceWarning
1409-
with tm.assert_produces_warning(warn, clear=[pd.core.arrays.datetimelike]):
1392+
with tm.assert_produces_warning(warn, clear=[dtl]):
14101393
other = pd.Index([pd.offsets.DateOffset(years=1), pd.offsets.MonthEnd()])
14111394
other = tm.box_expected(other, box_with_array)
14121395
result = s + other
@@ -1435,19 +1418,19 @@ def test_dt64arr_add_sub_offset_ndarray(self, tz_naive_fixture, box_with_array):
14351418
other = np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)])
14361419

14371420
warn = None if box_with_array is pd.DataFrame else PerformanceWarning
1438-
with tm.assert_produces_warning(warn, clear=[pd.core.arrays.datetimelike]):
1421+
with tm.assert_produces_warning(warn, clear=[dtl]):
14391422
res = dtarr + other
14401423
expected = DatetimeIndex(
14411424
[dti[n] + other[n] for n in range(len(dti))], name=dti.name, freq="infer"
14421425
)
14431426
expected = tm.box_expected(expected, box_with_array)
14441427
tm.assert_equal(res, expected)
14451428

1446-
with tm.assert_produces_warning(warn, clear=[pd.core.arrays.datetimelike]):
1429+
with tm.assert_produces_warning(warn, clear=[dtl]):
14471430
res2 = other + dtarr
14481431
tm.assert_equal(res2, expected)
14491432

1450-
with tm.assert_produces_warning(warn, clear=[pd.core.arrays.datetimelike]):
1433+
with tm.assert_produces_warning(warn, clear=[dtl]):
14511434
res = dtarr - other
14521435
expected = DatetimeIndex(
14531436
[dti[n] - other[n] for n in range(len(dti))], name=dti.name, freq="infer"
@@ -2168,16 +2151,16 @@ def test_dti_isub_tdi(self, tz_naive_fixture):
21682151
ids=lambda x: type(x).__name__,
21692152
)
21702153
@pytest.mark.parametrize("tz", [None, "US/Eastern"])
2171-
def test_add_datetimelike_and_dti(self, addend, tz):
2154+
def test_add_datetimelike_and_dtarr(self, box_with_array, addend, tz):
21722155
# GH#9631
21732156
dti = DatetimeIndex(["2011-01-01", "2011-01-02"]).tz_localize(tz)
2174-
msg = (
2175-
"cannot add DatetimeArray and {0}".format(type(addend).__name__)
2176-
).replace("DatetimeIndex", "DatetimeArray")
2157+
dtarr = tm.box_expected(dti, box_with_array)
2158+
msg = "cannot add DatetimeArray and"
2159+
21772160
with pytest.raises(TypeError, match=msg):
2178-
dti + addend
2161+
dtarr + addend
21792162
with pytest.raises(TypeError, match=msg):
2180-
addend + dti
2163+
addend + dtarr
21812164

21822165
# -------------------------------------------------------------
21832166

@@ -2257,13 +2240,6 @@ def test_timedelta64_equal_timedelta_supported_ops(self, op):
22572240

22582241
intervals = ["D", "h", "m", "s", "us"]
22592242

2260-
# TODO: unused
2261-
# npy16_mappings = {'D': 24 * 60 * 60 * 1000000,
2262-
# 'h': 60 * 60 * 1000000,
2263-
# 'm': 60 * 1000000,
2264-
# 's': 1000000,
2265-
# 'us': 1}
2266-
22672243
def timedelta64(*args):
22682244
# see casting notes in NumPy gh-12927
22692245
return np.sum(list(starmap(np.timedelta64, zip(args, intervals))))
@@ -2406,82 +2382,30 @@ def test_dti_add_series(self, tz, names):
24062382
result4 = index + ser.values
24072383
tm.assert_index_equal(result4, expected)
24082384

2385+
@pytest.mark.parametrize("other_box", [pd.Index, Series])
2386+
@pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
24092387
@pytest.mark.parametrize(
24102388
"names", [(None, None, None), ("foo", "bar", None), ("foo", "foo", "foo")]
24112389
)
2412-
def test_dti_add_offset_index(self, tz_naive_fixture, names):
2390+
def test_dti_addsub_offset_arraylike(self, tz_naive_fixture, names, op, other_box):
24132391
# GH#18849, GH#19744
2414-
tz = tz_naive_fixture
2415-
dti = pd.date_range("2017-01-01", periods=2, tz=tz, name=names[0])
2416-
other = pd.Index([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)], name=names[1])
2417-
2418-
with tm.assert_produces_warning(
2419-
PerformanceWarning, clear=[pd.core.arrays.datetimelike]
2420-
):
2421-
res = dti + other
2422-
expected = DatetimeIndex(
2423-
[dti[n] + other[n] for n in range(len(dti))], name=names[2], freq="infer"
2424-
)
2425-
tm.assert_index_equal(res, expected)
2426-
2427-
with tm.assert_produces_warning(
2428-
PerformanceWarning, clear=[pd.core.arrays.datetimelike]
2429-
):
2430-
res2 = other + dti
2431-
tm.assert_index_equal(res2, expected)
2432-
2433-
@pytest.mark.parametrize(
2434-
"names", [(None, None, None), ("foo", "bar", None), ("foo", "foo", "foo")]
2435-
)
2436-
def test_dti_sub_offset_index(self, tz_naive_fixture, names):
2437-
# GH#18824, GH#19744
2438-
tz = tz_naive_fixture
2439-
dti = pd.date_range("2017-01-01", periods=2, tz=tz, name=names[0])
2440-
other = pd.Index([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)], name=names[1])
2441-
2442-
with tm.assert_produces_warning(
2443-
PerformanceWarning, clear=[pd.core.arrays.datetimelike]
2444-
):
2445-
res = dti - other
2446-
expected = DatetimeIndex(
2447-
[dti[n] - other[n] for n in range(len(dti))], name=names[2], freq="infer"
2448-
)
2449-
tm.assert_index_equal(res, expected)
2392+
box = pd.Index
2393+
from .test_timedelta64 import get_upcast_box
24502394

2451-
@pytest.mark.parametrize(
2452-
"names", [(None, None, None), ("foo", "bar", None), ("foo", "foo", "foo")]
2453-
)
2454-
def test_dti_with_offset_series(self, tz_naive_fixture, names):
2455-
# GH#18849
24562395
tz = tz_naive_fixture
24572396
dti = pd.date_range("2017-01-01", periods=2, tz=tz, name=names[0])
2458-
other = Series([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)], name=names[1])
2459-
2460-
expected_add = Series(
2461-
[dti[n] + other[n] for n in range(len(dti))], name=names[2]
2462-
)
2397+
other = other_box([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)], name=names[1])
24632398

2464-
with tm.assert_produces_warning(
2465-
PerformanceWarning, clear=[pd.core.arrays.datetimelike]
2466-
):
2467-
res = dti + other
2468-
tm.assert_series_equal(res, expected_add)
2399+
xbox = get_upcast_box(box, other)
24692400

2470-
with tm.assert_produces_warning(
2471-
PerformanceWarning, clear=[pd.core.arrays.datetimelike]
2472-
):
2473-
res2 = other + dti
2474-
tm.assert_series_equal(res2, expected_add)
2401+
with tm.assert_produces_warning(PerformanceWarning, clear=[dtl]):
2402+
res = op(dti, other)
24752403

2476-
expected_sub = Series(
2477-
[dti[n] - other[n] for n in range(len(dti))], name=names[2]
2404+
expected = DatetimeIndex(
2405+
[op(dti[n], other[n]) for n in range(len(dti))], name=names[2], freq="infer"
24782406
)
2479-
2480-
with tm.assert_produces_warning(
2481-
PerformanceWarning, clear=[pd.core.arrays.datetimelike]
2482-
):
2483-
res3 = dti - other
2484-
tm.assert_series_equal(res3, expected_sub)
2407+
expected = tm.box_expected(expected, xbox)
2408+
tm.assert_equal(res, expected)
24852409

24862410

24872411
@pytest.mark.parametrize("years", [-1, 0, 1])

0 commit comments

Comments
 (0)