From 7999af74970c77893e0f1f57a363999fabfed3a0 Mon Sep 17 00:00:00 2001 From: Brock Date: Tue, 27 Sep 2022 16:17:23 -0700 Subject: [PATCH 1/7] API: make Timestamp/Timedelta _as_unit public as_unit --- doc/source/reference/arrays.rst | 4 +++ pandas/_libs/tslibs/nattype.pyi | 1 + pandas/_libs/tslibs/nattype.pyx | 17 +++++++++++ pandas/_libs/tslibs/timedeltas.pyi | 4 ++- pandas/_libs/tslibs/timedeltas.pyx | 19 +++++++++++- pandas/_libs/tslibs/timestamps.pyi | 4 ++- pandas/_libs/tslibs/timestamps.pyx | 19 +++++++++++- pandas/core/arrays/datetimelike.py | 2 +- pandas/tests/arrays/test_timedeltas.py | 4 +-- pandas/tests/scalar/test_nat.py | 4 ++- .../tests/scalar/timedelta/test_timedelta.py | 28 ++++++++--------- .../tests/scalar/timestamp/test_timestamp.py | 30 +++++++++---------- .../tests/scalar/timestamp/test_timezones.py | 8 ++--- .../tests/scalar/timestamp/test_unary_ops.py | 14 ++++----- 14 files changed, 110 insertions(+), 48 deletions(-) diff --git a/doc/source/reference/arrays.rst b/doc/source/reference/arrays.rst index 61ee894f4b126..f7d08807276ec 100644 --- a/doc/source/reference/arrays.rst +++ b/doc/source/reference/arrays.rst @@ -139,6 +139,7 @@ Properties Timestamp.second Timestamp.tz Timestamp.tzinfo + Timestamp.unit Timestamp.value Timestamp.week Timestamp.weekofyear @@ -149,6 +150,7 @@ Methods .. autosummary:: :toctree: api/ + Timestamp.as_unit Timestamp.astimezone Timestamp.ceil Timestamp.combine @@ -247,6 +249,7 @@ Properties Timedelta.nanoseconds Timedelta.resolution Timedelta.seconds + Timedelta.unit Timedelta.value Timedelta.view @@ -255,6 +258,7 @@ Methods .. autosummary:: :toctree: api/ + Timedelta.as_unit Timedelta.ceil Timedelta.floor Timedelta.isoformat diff --git a/pandas/_libs/tslibs/nattype.pyi b/pandas/_libs/tslibs/nattype.pyi index e9ae46cee7aec..1332dcedd85d1 100644 --- a/pandas/_libs/tslibs/nattype.pyi +++ b/pandas/_libs/tslibs/nattype.pyi @@ -127,3 +127,4 @@ class NaTType: __le__: _NatComparison __gt__: _NatComparison __ge__: _NatComparison + def as_unit(self, round_ok: bool = ...) -> NaTType: ... diff --git a/pandas/_libs/tslibs/nattype.pyx b/pandas/_libs/tslibs/nattype.pyx index 55c5e478868cb..a454f73b55cec 100644 --- a/pandas/_libs/tslibs/nattype.pyx +++ b/pandas/_libs/tslibs/nattype.pyx @@ -1202,6 +1202,7 @@ default 'raise' NaT """, ) + @property def tz(self) -> None: return None @@ -1210,6 +1211,22 @@ default 'raise' def tzinfo(self) -> None: return None + def as_unit(self, str unit, bint round_ok=True) -> "NaTType": + """ + Convert the underlying int64 representaton to the given unit. + + Parameters + ---------- + unit : {"ns", "us", "ms", "s"} + round_ok : bool, default True + If False and the conversion requires rounding, raise. + + Returns + ------- + Timestamp + """ + return c_NaT + c_NaT = NaTType() # C-visible NaT = c_NaT # Python-visible diff --git a/pandas/_libs/tslibs/timedeltas.pyi b/pandas/_libs/tslibs/timedeltas.pyi index 1fb2bf1b45888..3a9997c2d282f 100644 --- a/pandas/_libs/tslibs/timedeltas.pyi +++ b/pandas/_libs/tslibs/timedeltas.pyi @@ -153,4 +153,6 @@ class Timedelta(timedelta): def freq(self) -> None: ... @property def is_populated(self) -> bool: ... - def _as_unit(self, unit: str, round_ok: bool = ...) -> Timedelta: ... + def as_unit(self, unit: str, round_ok: bool = ...) -> Timedelta: ... + @property + def unit(self) -> str: ... diff --git a/pandas/_libs/tslibs/timedeltas.pyx b/pandas/_libs/tslibs/timedeltas.pyx index c9e997ffb405c..b77de23b3569c 100644 --- a/pandas/_libs/tslibs/timedeltas.pyx +++ b/pandas/_libs/tslibs/timedeltas.pyx @@ -1081,6 +1081,10 @@ cdef class _Timedelta(timedelta): ) return self._is_populated + @property + def unit(self) -> str: + return npy_unit_to_abbrev(self._reso) + def __hash__(_Timedelta self): if self._has_ns(): # Note: this does *not* satisfy the invariance @@ -1534,7 +1538,20 @@ cdef class _Timedelta(timedelta): # exposing as classmethod for testing return _timedelta_from_value_and_reso(value, reso) - def _as_unit(self, str unit, bint round_ok=True): + def as_unit(self, str unit, bint round_ok=True): + """ + Convert the underlying int64 representaton to the given unit. + + Parameters + ---------- + unit : {"ns", "us", "ms", "s"} + round_ok : bool, default True + If False and the conversion requires rounding, raise. + + Returns + ------- + Timedelta + """ dtype = np.dtype(f"m8[{unit}]") reso = get_unit_from_dtype(dtype) try: diff --git a/pandas/_libs/tslibs/timestamps.pyi b/pandas/_libs/tslibs/timestamps.pyi index e4be7fda43005..b718ba0bc4adb 100644 --- a/pandas/_libs/tslibs/timestamps.pyi +++ b/pandas/_libs/tslibs/timestamps.pyi @@ -221,4 +221,6 @@ class Timestamp(datetime): def days_in_month(self) -> int: ... @property def daysinmonth(self) -> int: ... - def _as_unit(self, unit: str, round_ok: bool = ...) -> Timestamp: ... + def as_unit(self, unit: str, round_ok: bool = ...) -> Timestamp: ... + @property + def unit(self) -> str: ... diff --git a/pandas/_libs/tslibs/timestamps.pyx b/pandas/_libs/tslibs/timestamps.pyx index 07c6e32028942..629e52ae7034a 100644 --- a/pandas/_libs/tslibs/timestamps.pyx +++ b/pandas/_libs/tslibs/timestamps.pyx @@ -257,6 +257,10 @@ cdef class _Timestamp(ABCTimestamp): ) return self._freq + @property + def unit(self) -> str: + return npy_unit_to_abbrev(self._reso) + # ----------------------------------------------------------------- # Constructors @@ -1113,7 +1117,20 @@ cdef class _Timestamp(ABCTimestamp): value = convert_reso(self.value, self._reso, reso, round_ok=round_ok) return type(self)._from_value_and_reso(value, reso=reso, tz=self.tzinfo) - def _as_unit(self, str unit, bint round_ok=True): + def as_unit(self, str unit, bint round_ok=True): + """ + Convert the underlying int64 representaton to the given unit. + + Parameters + ---------- + unit : {"ns", "us", "ms", "s"} + round_ok : bool, default True + If False and the conversion requires rounding, raise. + + Returns + ------- + Timestamp + """ dtype = np.dtype(f"M8[{unit}]") reso = get_unit_from_dtype(dtype) try: diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 5a92cc3c8509c..32e7c239ae74f 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -1137,7 +1137,7 @@ def _add_datetimelike_scalar(self, other) -> DatetimeArray: # Just as with Timestamp/Timedelta, we cast to the lower resolution # so long as doing so is lossless. if self._reso < other._reso: - other = other._as_unit(self._unit, round_ok=False) + other = other.as_unit(self._unit, round_ok=False) else: unit = npy_unit_to_abbrev(other._reso) self = self._as_unit(unit) diff --git a/pandas/tests/arrays/test_timedeltas.py b/pandas/tests/arrays/test_timedeltas.py index de45d0b29889b..9fbea814ef346 100644 --- a/pandas/tests/arrays/test_timedeltas.py +++ b/pandas/tests/arrays/test_timedeltas.py @@ -104,7 +104,7 @@ def test_add_pdnat(self, tda): def test_add_datetimelike_scalar(self, tda, tz_naive_fixture): ts = pd.Timestamp("2016-01-01", tz=tz_naive_fixture) - expected = tda + ts._as_unit(tda._unit) + expected = tda + ts.as_unit(tda._unit) res = tda + ts tm.assert_extension_array_equal(res, expected) res = ts + tda @@ -119,7 +119,7 @@ def test_add_datetimelike_scalar(self, tda, tz_naive_fixture): # mismatched reso -> check that we don't give an incorrect result ts + tda - ts = ts._as_unit(tda._unit) + ts = ts.as_unit(tda._unit) exp_values = tda._ndarray + ts.asm8 expected = ( diff --git a/pandas/tests/scalar/test_nat.py b/pandas/tests/scalar/test_nat.py index 55577af7be9d9..4b2cae1b5cb98 100644 --- a/pandas/tests/scalar/test_nat.py +++ b/pandas/tests/scalar/test_nat.py @@ -190,7 +190,7 @@ def test_nat_iso_format(get_nat): @pytest.mark.parametrize( "klass,expected", [ - (Timestamp, ["freqstr", "normalize", "to_julian_date", "to_period"]), + (Timestamp, ["freqstr", "normalize", "to_julian_date", "to_period", "unit"]), ( Timedelta, [ @@ -200,6 +200,7 @@ def test_nat_iso_format(get_nat): "resolution_string", "to_pytimedelta", "to_timedelta64", + "unit", "view", ], ), @@ -262,6 +263,7 @@ def _get_overlap_public_nat_methods(klass, as_tuple=False): ( Timestamp, [ + "as_unit", "astimezone", "ceil", "combine", diff --git a/pandas/tests/scalar/timedelta/test_timedelta.py b/pandas/tests/scalar/timedelta/test_timedelta.py index 21f32cf2d2d1e..d601537aa6b0d 100644 --- a/pandas/tests/scalar/timedelta/test_timedelta.py +++ b/pandas/tests/scalar/timedelta/test_timedelta.py @@ -30,29 +30,29 @@ class TestAsUnit: def test_as_unit(self): td = Timedelta(days=1) - assert td._as_unit("ns") is td + assert td.as_unit("ns") is td - res = td._as_unit("us") + res = td.as_unit("us") assert res.value == td.value // 1000 assert res._reso == NpyDatetimeUnit.NPY_FR_us.value - rt = res._as_unit("ns") + rt = res.as_unit("ns") assert rt.value == td.value assert rt._reso == td._reso - res = td._as_unit("ms") + res = td.as_unit("ms") assert res.value == td.value // 1_000_000 assert res._reso == NpyDatetimeUnit.NPY_FR_ms.value - rt = res._as_unit("ns") + rt = res.as_unit("ns") assert rt.value == td.value assert rt._reso == td._reso - res = td._as_unit("s") + res = td.as_unit("s") assert res.value == td.value // 1_000_000_000 assert res._reso == NpyDatetimeUnit.NPY_FR_s.value - rt = res._as_unit("ns") + rt = res.as_unit("ns") assert rt.value == td.value assert rt._reso == td._reso @@ -63,15 +63,15 @@ def test_as_unit_overflows(self): msg = "Cannot cast 106752 days 00:00:00 to unit='ns' without overflow" with pytest.raises(OutOfBoundsTimedelta, match=msg): - td._as_unit("ns") + td.as_unit("ns") - res = td._as_unit("ms") + res = td.as_unit("ms") assert res.value == us // 1000 assert res._reso == NpyDatetimeUnit.NPY_FR_ms.value def test_as_unit_rounding(self): td = Timedelta(microseconds=1500) - res = td._as_unit("ms") + res = td.as_unit("ms") expected = Timedelta(milliseconds=1) assert res == expected @@ -80,18 +80,18 @@ def test_as_unit_rounding(self): assert res.value == 1 with pytest.raises(ValueError, match="Cannot losslessly convert units"): - td._as_unit("ms", round_ok=False) + td.as_unit("ms", round_ok=False) def test_as_unit_non_nano(self): # case where we are going neither to nor from nano - td = Timedelta(days=1)._as_unit("ms") + td = Timedelta(days=1).as_unit("ms") assert td.days == 1 assert td.value == 86_400_000 assert td.components.days == 1 assert td._d == 1 assert td.total_seconds() == 86400 - res = td._as_unit("us") + res = td.as_unit("us") assert res.value == 86_400_000_000 assert res.components.days == 1 assert res.components.hours == 0 @@ -733,7 +733,7 @@ def test_round_sanity(self, val, method): @pytest.mark.parametrize("unit", ["ns", "us", "ms", "s"]) def test_round_non_nano(self, unit): - td = Timedelta("1 days 02:34:57")._as_unit(unit) + td = Timedelta("1 days 02:34:57").as_unit(unit) res = td.round("min") assert res == Timedelta("1 days 02:35:00") diff --git a/pandas/tests/scalar/timestamp/test_timestamp.py b/pandas/tests/scalar/timestamp/test_timestamp.py index dc3ddc7361afd..2313164f370d1 100644 --- a/pandas/tests/scalar/timestamp/test_timestamp.py +++ b/pandas/tests/scalar/timestamp/test_timestamp.py @@ -938,7 +938,7 @@ def test_sub_datetimelike_mismatched_reso(self, ts_tz): NpyDatetimeUnit.NPY_FR_ms.value: "s", NpyDatetimeUnit.NPY_FR_s.value: "us", }[ts._reso] - other = ts._as_unit(unit) + other = ts.as_unit(unit) assert other._reso != ts._reso result = ts - other @@ -978,7 +978,7 @@ def test_sub_timedeltalike_mismatched_reso(self, ts_tz): NpyDatetimeUnit.NPY_FR_ms.value: "s", NpyDatetimeUnit.NPY_FR_s.value: "us", }[ts._reso] - other = Timedelta(0)._as_unit(unit) + other = Timedelta(0).as_unit(unit) assert other._reso != ts._reso result = ts + other @@ -1045,29 +1045,29 @@ class TestAsUnit: def test_as_unit(self): ts = Timestamp("1970-01-01") - assert ts._as_unit("ns") is ts + assert ts.as_unit("ns") is ts - res = ts._as_unit("us") + res = ts.as_unit("us") assert res.value == ts.value // 1000 assert res._reso == NpyDatetimeUnit.NPY_FR_us.value - rt = res._as_unit("ns") + rt = res.as_unit("ns") assert rt.value == ts.value assert rt._reso == ts._reso - res = ts._as_unit("ms") + res = ts.as_unit("ms") assert res.value == ts.value // 1_000_000 assert res._reso == NpyDatetimeUnit.NPY_FR_ms.value - rt = res._as_unit("ns") + rt = res.as_unit("ns") assert rt.value == ts.value assert rt._reso == ts._reso - res = ts._as_unit("s") + res = ts.as_unit("s") assert res.value == ts.value // 1_000_000_000 assert res._reso == NpyDatetimeUnit.NPY_FR_s.value - rt = res._as_unit("ns") + rt = res.as_unit("ns") assert rt.value == ts.value assert rt._reso == ts._reso @@ -1078,15 +1078,15 @@ def test_as_unit_overflows(self): msg = "Cannot cast 2262-04-12 00:00:00 to unit='ns' without overflow" with pytest.raises(OutOfBoundsDatetime, match=msg): - ts._as_unit("ns") + ts.as_unit("ns") - res = ts._as_unit("ms") + res = ts.as_unit("ms") assert res.value == us // 1000 assert res._reso == NpyDatetimeUnit.NPY_FR_ms.value def test_as_unit_rounding(self): ts = Timestamp(1_500_000) # i.e. 1500 microseconds - res = ts._as_unit("ms") + res = ts.as_unit("ms") expected = Timestamp(1_000_000) # i.e. 1 millisecond assert res == expected @@ -1095,17 +1095,17 @@ def test_as_unit_rounding(self): assert res.value == 1 with pytest.raises(ValueError, match="Cannot losslessly convert units"): - ts._as_unit("ms", round_ok=False) + ts.as_unit("ms", round_ok=False) def test_as_unit_non_nano(self): # case where we are going neither to nor from nano - ts = Timestamp("1970-01-02")._as_unit("ms") + ts = Timestamp("1970-01-02").as_unit("ms") assert ts.year == 1970 assert ts.month == 1 assert ts.day == 2 assert ts.hour == ts.minute == ts.second == ts.microsecond == ts.nanosecond == 0 - res = ts._as_unit("s") + res = ts.as_unit("s") assert res.value == 24 * 3600 assert res.year == 1970 assert res.month == 1 diff --git a/pandas/tests/scalar/timestamp/test_timezones.py b/pandas/tests/scalar/timestamp/test_timezones.py index 874575fa9ad4c..fde89e6d83480 100644 --- a/pandas/tests/scalar/timestamp/test_timezones.py +++ b/pandas/tests/scalar/timestamp/test_timezones.py @@ -62,7 +62,7 @@ def test_tz_localize_pushes_out_of_bounds(self): def test_tz_localize_ambiguous_bool(self, unit): # make sure that we are correctly accepting bool values as ambiguous # GH#14402 - ts = Timestamp("2015-11-01 01:00:03")._as_unit(unit) + ts = Timestamp("2015-11-01 01:00:03").as_unit(unit) expected0 = Timestamp("2015-11-01 01:00:03-0500", tz="US/Central") expected1 = Timestamp("2015-11-01 01:00:03-0600", tz="US/Central") @@ -257,7 +257,7 @@ def test_timestamp_tz_localize_nonexistent_shift( tz = tz_type + tz if isinstance(shift, str): shift = "shift_" + shift - ts = Timestamp(start_ts)._as_unit(unit) + ts = Timestamp(start_ts).as_unit(unit) result = ts.tz_localize(tz, nonexistent=shift) expected = Timestamp(end_ts).tz_localize(tz) @@ -286,7 +286,7 @@ def test_timestamp_tz_localize_nonexistent_shift_invalid(self, offset, tz_type): @pytest.mark.parametrize("unit", ["ns", "us", "ms", "s"]) def test_timestamp_tz_localize_nonexistent_NaT(self, tz, unit): # GH 8917 - ts = Timestamp("2015-03-29 02:20:00")._as_unit(unit) + ts = Timestamp("2015-03-29 02:20:00").as_unit(unit) result = ts.tz_localize(tz, nonexistent="NaT") assert result is NaT @@ -294,7 +294,7 @@ def test_timestamp_tz_localize_nonexistent_NaT(self, tz, unit): @pytest.mark.parametrize("unit", ["ns", "us", "ms", "s"]) def test_timestamp_tz_localize_nonexistent_raise(self, tz, unit): # GH 8917 - ts = Timestamp("2015-03-29 02:20:00")._as_unit(unit) + ts = Timestamp("2015-03-29 02:20:00").as_unit(unit) msg = "2015-03-29 02:20:00" with pytest.raises(pytz.NonExistentTimeError, match=msg): ts.tz_localize(tz, nonexistent="raise") diff --git a/pandas/tests/scalar/timestamp/test_unary_ops.py b/pandas/tests/scalar/timestamp/test_unary_ops.py index 9c376c7a13efc..f9e25ab1db619 100644 --- a/pandas/tests/scalar/timestamp/test_unary_ops.py +++ b/pandas/tests/scalar/timestamp/test_unary_ops.py @@ -150,7 +150,7 @@ def test_round_minute_freq(self, test_input, freq, expected, rounder): @pytest.mark.parametrize("unit", ["ns", "us", "ms", "s"]) def test_ceil(self, unit): - dt = Timestamp("20130101 09:10:11")._as_unit(unit) + dt = Timestamp("20130101 09:10:11").as_unit(unit) result = dt.ceil("D") expected = Timestamp("20130102") assert result == expected @@ -158,7 +158,7 @@ def test_ceil(self, unit): @pytest.mark.parametrize("unit", ["ns", "us", "ms", "s"]) def test_floor(self, unit): - dt = Timestamp("20130101 09:10:11")._as_unit(unit) + dt = Timestamp("20130101 09:10:11").as_unit(unit) result = dt.floor("D") expected = Timestamp("20130101") assert result == expected @@ -172,7 +172,7 @@ def test_floor(self, unit): def test_round_dst_border_ambiguous(self, method, unit): # GH 18946 round near "fall back" DST ts = Timestamp("2017-10-29 00:00:00", tz="UTC").tz_convert("Europe/Madrid") - ts = ts._as_unit(unit) + ts = ts.as_unit(unit) # result = getattr(ts, method)("H", ambiguous=True) assert result == ts @@ -206,7 +206,7 @@ def test_round_dst_border_ambiguous(self, method, unit): ) def test_round_dst_border_nonexistent(self, method, ts_str, freq, unit): # GH 23324 round near "spring forward" DST - ts = Timestamp(ts_str, tz="America/Chicago")._as_unit(unit) + ts = Timestamp(ts_str, tz="America/Chicago").as_unit(unit) result = getattr(ts, method)(freq, nonexistent="shift_forward") expected = Timestamp("2018-03-11 03:00:00", tz="America/Chicago") assert result == expected @@ -486,7 +486,7 @@ def test_replace_across_dst(self, tz, normalize): @pytest.mark.parametrize("unit", ["ns", "us", "ms", "s"]) def test_replace_dst_border(self, unit): # Gh 7825 - t = Timestamp("2013-11-3", tz="America/Chicago")._as_unit(unit) + t = Timestamp("2013-11-3", tz="America/Chicago").as_unit(unit) result = t.replace(hour=3) expected = Timestamp("2013-11-3 03:00:00", tz="America/Chicago") assert result == expected @@ -498,7 +498,7 @@ def test_replace_dst_border(self, unit): def test_replace_dst_fold(self, fold, tz, unit): # GH 25017 d = datetime(2019, 10, 27, 2, 30) - ts = Timestamp(d, tz=tz)._as_unit(unit) + ts = Timestamp(d, tz=tz).as_unit(unit) result = ts.replace(hour=1, fold=fold) expected = Timestamp(datetime(2019, 10, 27, 1, 30)).tz_localize( tz, ambiguous=not fold @@ -513,7 +513,7 @@ def test_replace_dst_fold(self, fold, tz, unit): @pytest.mark.parametrize("unit", ["ns", "us", "ms", "s"]) def test_normalize(self, tz_naive_fixture, arg, unit): tz = tz_naive_fixture - ts = Timestamp(arg, tz=tz)._as_unit(unit) + ts = Timestamp(arg, tz=tz).as_unit(unit) result = ts.normalize() expected = Timestamp("2013-11-30", tz=tz) assert result == expected From 5b7a570f8fff0fb79188445a3f70fadd259da527 Mon Sep 17 00:00:00 2001 From: Brock Date: Tue, 27 Sep 2022 18:32:00 -0700 Subject: [PATCH 2/7] update test --- pandas/tests/indexes/datetimes/test_indexing.py | 4 ++-- pandas/tests/indexes/timedeltas/test_indexing.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pandas/tests/indexes/datetimes/test_indexing.py b/pandas/tests/indexes/datetimes/test_indexing.py index 62fdff528bd84..6d2862de2c481 100644 --- a/pandas/tests/indexes/datetimes/test_indexing.py +++ b/pandas/tests/indexes/datetimes/test_indexing.py @@ -390,7 +390,7 @@ def test_take_fill_value_with_timezone(self): class TestGetLoc: def test_get_loc_key_unit_mismatch(self): idx = date_range("2000-01-01", periods=3) - key = idx[1]._as_unit("ms") + key = idx[1].as_unit("ms") loc = idx.get_loc(key) assert loc == 1 assert key in idx @@ -398,7 +398,7 @@ def test_get_loc_key_unit_mismatch(self): def test_get_loc_key_unit_mismatch_not_castable(self): dta = date_range("2000-01-01", periods=3)._data.astype("M8[s]") dti = DatetimeIndex(dta) - key = dta[0]._as_unit("ns") + pd.Timedelta(1) + key = dta[0].as_unit("ns") + pd.Timedelta(1) with pytest.raises( KeyError, match=r"Timestamp\('2000-01-01 00:00:00.000000001'\)" diff --git a/pandas/tests/indexes/timedeltas/test_indexing.py b/pandas/tests/indexes/timedeltas/test_indexing.py index bdf299f6dbbdf..5c753580a7d71 100644 --- a/pandas/tests/indexes/timedeltas/test_indexing.py +++ b/pandas/tests/indexes/timedeltas/test_indexing.py @@ -77,7 +77,7 @@ def test_timestamp_invalid_key(self, key): class TestGetLoc: def test_get_loc_key_unit_mismatch(self): idx = to_timedelta(["0 days", "1 days", "2 days"]) - key = idx[1]._as_unit("ms") + key = idx[1].as_unit("ms") loc = idx.get_loc(key) assert loc == 1 @@ -89,7 +89,7 @@ def test_get_loc_key_unit_mismatch_not_castable(self): tda2 = type(tda)._simple_new(arr, dtype=arr.dtype) tdi = TimedeltaIndex(tda2) assert tdi.dtype == "m8[s]" - key = tda[0]._as_unit("ns") + Timedelta(1) + key = tda[0].as_unit("ns") + Timedelta(1) with pytest.raises(KeyError, match=r"Timedelta\('0 days 00:00:00.000000001'\)"): tdi.get_loc(key) From c27c83ed956be5ec8a29d6ffa62d49d8b06289ce Mon Sep 17 00:00:00 2001 From: Brock Date: Tue, 27 Sep 2022 18:33:08 -0700 Subject: [PATCH 3/7] update test --- pandas/tests/scalar/timedelta/test_timedelta.py | 2 +- pandas/tests/scalar/timestamp/test_timestamp.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pandas/tests/scalar/timedelta/test_timedelta.py b/pandas/tests/scalar/timedelta/test_timedelta.py index b797ad15dac13..62ca608e6c7ce 100644 --- a/pandas/tests/scalar/timedelta/test_timedelta.py +++ b/pandas/tests/scalar/timedelta/test_timedelta.py @@ -239,7 +239,7 @@ def test_floordiv_numeric(self, td): def test_addsub_mismatched_reso(self, td): # need to cast to since td is out of bounds for ns, so # so we would raise OverflowError without casting - other = Timedelta(days=1)._as_unit("us") + other = Timedelta(days=1).as_unit("us") # td is out of bounds for ns result = td + other diff --git a/pandas/tests/scalar/timestamp/test_timestamp.py b/pandas/tests/scalar/timestamp/test_timestamp.py index 4fa97cd1e2b62..8227935592cba 100644 --- a/pandas/tests/scalar/timestamp/test_timestamp.py +++ b/pandas/tests/scalar/timestamp/test_timestamp.py @@ -964,7 +964,7 @@ def test_sub_datetimelike_mismatched_reso(self, ts_tz): if ts._reso < other._reso: # Case where rounding is lossy other2 = other + Timedelta._from_value_and_reso(1, other._reso) - exp = ts._as_unit(npy_unit_to_abbrev(other._reso)) - other2 + exp = ts.as_unit(npy_unit_to_abbrev(other._reso)) - other2 res = ts - other2 assert res == exp @@ -975,7 +975,7 @@ def test_sub_datetimelike_mismatched_reso(self, ts_tz): assert res._reso == max(ts._reso, other._reso) else: ts2 = ts + Timedelta._from_value_and_reso(1, ts._reso) - exp = ts2 - other._as_unit(npy_unit_to_abbrev(ts2._reso)) + exp = ts2 - other.as_unit(npy_unit_to_abbrev(ts2._reso)) res = ts2 - other assert res == exp @@ -1012,7 +1012,7 @@ def test_sub_timedeltalike_mismatched_reso(self, ts_tz): if ts._reso < other._reso: # Case where rounding is lossy other2 = other + Timedelta._from_value_and_reso(1, other._reso) - exp = ts._as_unit(npy_unit_to_abbrev(other._reso)) + other2 + exp = ts.as_unit(npy_unit_to_abbrev(other._reso)) + other2 res = ts + other2 assert res == exp assert res._reso == max(ts._reso, other._reso) @@ -1021,7 +1021,7 @@ def test_sub_timedeltalike_mismatched_reso(self, ts_tz): assert res._reso == max(ts._reso, other._reso) else: ts2 = ts + Timedelta._from_value_and_reso(1, ts._reso) - exp = ts2 + other._as_unit(npy_unit_to_abbrev(ts2._reso)) + exp = ts2 + other.as_unit(npy_unit_to_abbrev(ts2._reso)) res = ts2 + other assert res == exp @@ -1034,7 +1034,7 @@ def test_sub_timedelta64_mismatched_reso(self, ts_tz): ts = ts_tz res = ts + np.timedelta64(1, "ns") - exp = ts._as_unit("ns") + np.timedelta64(1, "ns") + exp = ts.as_unit("ns") + np.timedelta64(1, "ns") assert exp == res assert exp._reso == NpyDatetimeUnit.NPY_FR_ns.value From c4fab3d1e390a64d9b21f7236e6b76364c51de91 Mon Sep 17 00:00:00 2001 From: Brock Date: Wed, 28 Sep 2022 14:14:34 -0700 Subject: [PATCH 4/7] update tests --- pandas/tests/arrays/test_datetimes.py | 2 +- pandas/tests/scalar/timestamp/test_timestamp.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pandas/tests/arrays/test_datetimes.py b/pandas/tests/arrays/test_datetimes.py index 83850c6cd2594..9ab642719cc8f 100644 --- a/pandas/tests/arrays/test_datetimes.py +++ b/pandas/tests/arrays/test_datetimes.py @@ -213,7 +213,7 @@ def test_add_mismatched_reso_doesnt_downcast(self): dti = pd.date_range("2016-01-01", periods=3) - td dta = dti._data._as_unit("us") - res = dta + td._as_unit("us") + res = dta + td.as_unit("us") # even though the result is an even number of days # (so we _could_ downcast to unit="s"), we do not. assert res._unit == "us" diff --git a/pandas/tests/scalar/timestamp/test_timestamp.py b/pandas/tests/scalar/timestamp/test_timestamp.py index 5076fcf837a02..07322b1931659 100644 --- a/pandas/tests/scalar/timestamp/test_timestamp.py +++ b/pandas/tests/scalar/timestamp/test_timestamp.py @@ -1032,8 +1032,8 @@ def test_sub_timedeltalike_mismatched_reso(self, ts_tz): def test_addition_doesnt_downcast_reso(self): # https://github.com/pandas-dev/pandas/pull/48748#pullrequestreview-1122635413 - ts = Timestamp(year=2022, month=1, day=1, microsecond=999999)._as_unit("us") - td = Timedelta(microseconds=1)._as_unit("us") + ts = Timestamp(year=2022, month=1, day=1, microsecond=999999).as_unit("us") + td = Timedelta(microseconds=1).as_unit("us") res = ts + td assert res._reso == ts._reso From 929afcc4381ec5988756152d65a202d2343219ce Mon Sep 17 00:00:00 2001 From: Brock Date: Wed, 28 Sep 2022 14:52:41 -0700 Subject: [PATCH 5/7] fix pyi typo --- pandas/_libs/tslibs/nattype.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pandas/_libs/tslibs/nattype.pyi b/pandas/_libs/tslibs/nattype.pyi index 1332dcedd85d1..72f55bb50895a 100644 --- a/pandas/_libs/tslibs/nattype.pyi +++ b/pandas/_libs/tslibs/nattype.pyi @@ -127,4 +127,4 @@ class NaTType: __le__: _NatComparison __gt__: _NatComparison __ge__: _NatComparison - def as_unit(self, round_ok: bool = ...) -> NaTType: ... + def as_unit(self, unit: str, round_ok: bool = ...) -> NaTType: ... From 4cafcdccf42593befc8f90bdc854152c20691242 Mon Sep 17 00:00:00 2001 From: Brock Date: Tue, 18 Oct 2022 15:05:16 -0700 Subject: [PATCH 6/7] fixup --- pandas/core/arrays/datetimes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index c95c4819c68be..672084cb67774 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -354,9 +354,9 @@ def _from_sequence_not_strict( data_unit = np.datetime_data(subarr.dtype)[0] data_dtype = tz_to_dtype(tz, data_unit) result = cls._simple_new(subarr, freq=freq, dtype=data_dtype) - if unit is not None and unit != result._unit: + if unit is not None and unit != result.unit: # If unit was specified in user-passed dtype, cast to it here - result = result._as_unit(unit) + result = result.as_unit(unit) if inferred_freq is None and freq is not None: # this condition precludes `freq_infer` From eb0bef59fc0541b99af497eae0eb820b474c127b Mon Sep 17 00:00:00 2001 From: Brock Date: Mon, 7 Nov 2022 12:58:53 -0800 Subject: [PATCH 7/7] fixup --- pandas/_libs/tslib.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pandas/_libs/tslib.pyx b/pandas/_libs/tslib.pyx index 756a6151cb191..d7c4c022a2556 100644 --- a/pandas/_libs/tslib.pyx +++ b/pandas/_libs/tslib.pyx @@ -906,7 +906,7 @@ def array_to_datetime_with_tz(ndarray values, tzinfo tz): else: # datetime64, tznaive pydatetime, int, float ts = ts.tz_localize(tz) - ts = ts._as_unit("ns") + ts = ts.as_unit("ns") ival = ts.value # Analogous to: result[i] = ival