|
1 | 1 | """
|
2 | 2 | Tests for DatetimeArray
|
3 | 3 | """
|
| 4 | +from datetime import timedelta |
4 | 5 | import operator
|
5 | 6 |
|
6 | 7 | import numpy as np
|
7 | 8 | import pytest
|
8 | 9 |
|
9 | 10 | from pandas._libs.tslibs import tz_compare
|
10 |
| -from pandas._libs.tslibs.dtypes import NpyDatetimeUnit |
| 11 | +from pandas._libs.tslibs.dtypes import ( |
| 12 | + NpyDatetimeUnit, |
| 13 | + npy_unit_to_abbrev, |
| 14 | +) |
11 | 15 |
|
12 | 16 | from pandas.core.dtypes.dtypes import DatetimeTZDtype
|
13 | 17 |
|
@@ -221,6 +225,35 @@ def test_add_mismatched_reso_doesnt_downcast(self):
|
221 | 225 | # (so we _could_ downcast to unit="s"), we do not.
|
222 | 226 | assert res._unit == "us"
|
223 | 227 |
|
| 228 | + @pytest.mark.parametrize( |
| 229 | + "scalar", |
| 230 | + [ |
| 231 | + timedelta(hours=2), |
| 232 | + pd.Timedelta(hours=2), |
| 233 | + np.timedelta64(2, "h"), |
| 234 | + np.timedelta64(2 * 3600 * 1000, "ms"), |
| 235 | + pd.offsets.Minute(120), |
| 236 | + pd.offsets.Hour(2), |
| 237 | + ], |
| 238 | + ) |
| 239 | + def test_add_timedeltalike_scalar_mismatched_reso(self, dta_dti, scalar): |
| 240 | + dta, dti = dta_dti |
| 241 | + |
| 242 | + td = pd.Timedelta(scalar) |
| 243 | + exp_reso = max(dta._reso, td._reso) |
| 244 | + exp_unit = npy_unit_to_abbrev(exp_reso) |
| 245 | + |
| 246 | + expected = (dti + td)._data._as_unit(exp_unit) |
| 247 | + result = dta + scalar |
| 248 | + tm.assert_extension_array_equal(result, expected) |
| 249 | + |
| 250 | + result = scalar + dta |
| 251 | + tm.assert_extension_array_equal(result, expected) |
| 252 | + |
| 253 | + expected = (dti - td)._data._as_unit(exp_unit) |
| 254 | + result = dta - scalar |
| 255 | + tm.assert_extension_array_equal(result, expected) |
| 256 | + |
224 | 257 | def test_sub_datetimelike_scalar_mismatch(self):
|
225 | 258 | dti = pd.date_range("2016-01-01", periods=3)
|
226 | 259 | dta = dti._data._as_unit("us")
|
|
0 commit comments