diff --git a/doc/source/user_guide/advanced.rst b/doc/source/user_guide/advanced.rst index 682fa4c9b4fcc..453536098cfbb 100644 --- a/doc/source/user_guide/advanced.rst +++ b/doc/source/user_guide/advanced.rst @@ -976,7 +976,7 @@ of :ref:`frequency aliases ` with datetime-like inter pd.interval_range(start=pd.Timestamp("2017-01-01"), periods=4, freq="W") - pd.interval_range(start=pd.Timedelta("0 days"), periods=3, freq="9H") + pd.interval_range(start=pd.Timedelta("0 days"), periods=3, freq="9h") Additionally, the ``closed`` parameter can be used to specify which side(s) the intervals are closed on. Intervals are closed on the right side by default. diff --git a/doc/source/user_guide/timedeltas.rst b/doc/source/user_guide/timedeltas.rst index cd567f8442671..5daf204f39bcf 100644 --- a/doc/source/user_guide/timedeltas.rst +++ b/doc/source/user_guide/timedeltas.rst @@ -392,7 +392,7 @@ The ``freq`` parameter can passed a variety of :ref:`frequency aliases = 0.9.3. (:issue:`5945`) - ``pd.stats.moments.rolling_var`` now uses Welford's method for increased numerical stability (:issue:`6817`) diff --git a/doc/source/whatsnew/v0.15.0.rst b/doc/source/whatsnew/v0.15.0.rst index dffb4c7b9ff9e..8dafed1efee97 100644 --- a/doc/source/whatsnew/v0.15.0.rst +++ b/doc/source/whatsnew/v0.15.0.rst @@ -332,16 +332,37 @@ Timezone handling improvements - ``tz_localize(None)`` for tz-aware ``Timestamp`` and ``DatetimeIndex`` now removes timezone holding local time, previously this resulted in ``Exception`` or ``TypeError`` (:issue:`7812`) - .. ipython:: python + .. code-block:: ipython + + In [58]: ts = pd.Timestamp('2014-08-01 09:00', tz='US/Eastern') + + In[59]: ts + Out[59]: Timestamp('2014-08-01 09:00:00-0400', tz='US/Eastern') - ts = pd.Timestamp('2014-08-01 09:00', tz='US/Eastern') - ts - ts.tz_localize(None) + In [60]: ts.tz_localize(None) + Out[60]: Timestamp('2014-08-01 09:00:00') - didx = pd.date_range(start='2014-08-01 09:00', freq='H', - periods=10, tz='US/Eastern') - didx - didx.tz_localize(None) + In [61]: didx = pd.date_range(start='2014-08-01 09:00', freq='H', + ....: periods=10, tz='US/Eastern') + ....: + + In [62]: didx + Out[62]: + DatetimeIndex(['2014-08-01 09:00:00-04:00', '2014-08-01 10:00:00-04:00', + '2014-08-01 11:00:00-04:00', '2014-08-01 12:00:00-04:00', + '2014-08-01 13:00:00-04:00', '2014-08-01 14:00:00-04:00', + '2014-08-01 15:00:00-04:00', '2014-08-01 16:00:00-04:00', + '2014-08-01 17:00:00-04:00', '2014-08-01 18:00:00-04:00'], + dtype='datetime64[ns, US/Eastern]', freq='H') + + In [63]: didx.tz_localize(None) + Out[63]: + DatetimeIndex(['2014-08-01 09:00:00', '2014-08-01 10:00:00', + '2014-08-01 11:00:00', '2014-08-01 12:00:00', + '2014-08-01 13:00:00', '2014-08-01 14:00:00', + '2014-08-01 15:00:00', '2014-08-01 16:00:00', + '2014-08-01 17:00:00', '2014-08-01 18:00:00'], + dtype='datetime64[ns]', freq=None) - ``tz_localize`` now accepts the ``ambiguous`` keyword which allows for passing an array of bools indicating whether the date belongs in DST or not, 'NaT' for setting transition times to NaT, @@ -1050,16 +1071,35 @@ Other: If ``Period`` freq is ``D``, ``H``, ``T``, ``S``, ``L``, ``U``, ``N``, ``Timedelta``-like can be added if the result can have same freq. Otherwise, only the same ``offsets`` can be added. - .. ipython:: python + .. code-block:: ipython - idx = pd.period_range('2014-07-01 09:00', periods=5, freq='H') - idx - idx + pd.offsets.Hour(2) - idx + pd.Timedelta('120m') + In [104]: idx = pd.period_range('2014-07-01 09:00', periods=5, freq='H') - idx = pd.period_range('2014-07', periods=5, freq='M') - idx - idx + pd.offsets.MonthEnd(3) + In [105]: idx + Out[105]: + PeriodIndex(['2014-07-01 09:00', '2014-07-01 10:00', '2014-07-01 11:00', + '2014-07-01 12:00', '2014-07-01 13:00'], + dtype='period[H]') + + In [106]: idx + pd.offsets.Hour(2) + Out[106]: + PeriodIndex(['2014-07-01 11:00', '2014-07-01 12:00', '2014-07-01 13:00', + '2014-07-01 14:00', '2014-07-01 15:00'], + dtype='period[H]') + + In [107]: idx + pd.Timedelta('120m') + Out[107]: + PeriodIndex(['2014-07-01 11:00', '2014-07-01 12:00', '2014-07-01 13:00', + '2014-07-01 14:00', '2014-07-01 15:00'], + dtype='period[H]') + + In [108]: idx = pd.period_range('2014-07', periods=5, freq='M') + + In [109]: idx + Out[109]: PeriodIndex(['2014-07', '2014-08', '2014-09', '2014-10', '2014-11'], dtype='period[M]') + + In [110]: idx + pd.offsets.MonthEnd(3) + Out[110]: PeriodIndex(['2014-10', '2014-11', '2014-12', '2015-01', '2015-02'], dtype='period[M]') - Added experimental compatibility with ``openpyxl`` for versions >= 2.0. The ``DataFrame.to_excel`` method ``engine`` keyword now recognizes ``openpyxl1`` and ``openpyxl2`` diff --git a/doc/source/whatsnew/v0.18.0.rst b/doc/source/whatsnew/v0.18.0.rst index 02e47553cd184..8984109da2a43 100644 --- a/doc/source/whatsnew/v0.18.0.rst +++ b/doc/source/whatsnew/v0.18.0.rst @@ -1000,10 +1000,16 @@ Other API changes ^^^^^^^^^^^^^^^^^ - ``DataFrame.between_time`` and ``Series.between_time`` now only parse a fixed set of time strings. Parsing of date strings is no longer supported and raises a ``ValueError``. (:issue:`11818`) - .. ipython:: python + .. code-block:: ipython + + In [107]: s = pd.Series(range(10), pd.date_range('2015-01-01', freq='H', periods=10)) - s = pd.Series(range(10), pd.date_range('2015-01-01', freq='H', periods=10)) - s.between_time("7:00am", "9:00am") + In [108]: s.between_time("7:00am", "9:00am") + Out[108]: + 2015-01-01 07:00:00 7 + 2015-01-01 08:00:00 8 + 2015-01-01 09:00:00 9 + Freq: H, Length: 3, dtype: int64 This will now raise. diff --git a/doc/source/whatsnew/v0.18.1.rst b/doc/source/whatsnew/v0.18.1.rst index ee6a60144bc35..85e0e63016729 100644 --- a/doc/source/whatsnew/v0.18.1.rst +++ b/doc/source/whatsnew/v0.18.1.rst @@ -256,26 +256,78 @@ Partial string indexing on ``DatetimeIndex`` when part of a ``MultiIndex`` Partial string indexing now matches on ``DateTimeIndex`` when part of a ``MultiIndex`` (:issue:`10331`) -.. ipython:: python +.. code-block:: ipython - dft2 = pd.DataFrame( - np.random.randn(20, 1), - columns=["A"], - index=pd.MultiIndex.from_product( - [pd.date_range("20130101", periods=10, freq="12H"), ["a", "b"]] - ), - ) - dft2 - dft2.loc["2013-01-05"] + In [20]: dft2 = pd.DataFrame( + ....: np.random.randn(20, 1), + ....: columns=["A"], + ....: index=pd.MultiIndex.from_product( + ....: [pd.date_range("20130101", periods=10, freq="12H"), ["a", "b"]] + ....: ), + ....: ) + ....: + + In [21]: dft2 + Out[21]: + A + 2013-01-01 00:00:00 a 0.469112 + b -0.282863 + 2013-01-01 12:00:00 a -1.509059 + b -1.135632 + 2013-01-02 00:00:00 a 1.212112 + ... ... + 2013-01-04 12:00:00 b 0.271860 + 2013-01-05 00:00:00 a -0.424972 + b 0.567020 + 2013-01-05 12:00:00 a 0.276232 + b -1.087401 + + [20 rows x 1 columns] + + In [22]: dft2.loc["2013-01-05"] + Out[22]: + A + 2013-01-05 00:00:00 a -0.424972 + b 0.567020 + 2013-01-05 12:00:00 a 0.276232 + b -1.087401 + + [4 rows x 1 columns] On other levels -.. ipython:: python +.. code-block:: ipython - idx = pd.IndexSlice - dft2 = dft2.swaplevel(0, 1).sort_index() - dft2 - dft2.loc[idx[:, "2013-01-05"], :] + In [26]: idx = pd.IndexSlice + + In [27]: dft2 = dft2.swaplevel(0, 1).sort_index() + + In [28]: dft2 + Out[28]: + A + a 2013-01-01 00:00:00 0.469112 + 2013-01-01 12:00:00 -1.509059 + 2013-01-02 00:00:00 1.212112 + 2013-01-02 12:00:00 0.119209 + 2013-01-03 00:00:00 -0.861849 + ... ... + b 2013-01-03 12:00:00 1.071804 + 2013-01-04 00:00:00 -0.706771 + 2013-01-04 12:00:00 0.271860 + 2013-01-05 00:00:00 0.567020 + 2013-01-05 12:00:00 -1.087401 + + [20 rows x 1 columns] + + In [29]: dft2.loc[idx[:, "2013-01-05"], :] + Out[29]: + A + a 2013-01-05 00:00:00 -0.424972 + 2013-01-05 12:00:00 0.276232 + b 2013-01-05 00:00:00 0.567020 + 2013-01-05 12:00:00 -1.087401 + + [4 rows x 1 columns] .. _whatsnew_0181.enhancements.assembling: diff --git a/doc/source/whatsnew/v0.20.0.rst b/doc/source/whatsnew/v0.20.0.rst index 9005fafaf18f7..ae70eb078f6d9 100644 --- a/doc/source/whatsnew/v0.20.0.rst +++ b/doc/source/whatsnew/v0.20.0.rst @@ -614,11 +614,18 @@ New behavior: ``map`` on a ``Series`` with ``datetime64`` values may return ``int64`` dtypes rather than ``int32`` -.. ipython:: python +.. code-block:: ipython + + In [64]: s = pd.Series(pd.date_range('2011-01-02T00:00', '2011-01-02T02:00', freq='H') + ....: .tz_localize('Asia/Tokyo')) + ....: - s = pd.Series(pd.date_range('2011-01-02T00:00', '2011-01-02T02:00', freq='H') - .tz_localize('Asia/Tokyo')) - s + In [65]: s + Out[65]: + 0 2011-01-02 00:00:00+09:00 + 1 2011-01-02 01:00:00+09:00 + 2 2011-01-02 02:00:00+09:00 + Length: 3, dtype: datetime64[ns, Asia/Tokyo] Previous behavior: @@ -633,9 +640,14 @@ Previous behavior: New behavior: -.. ipython:: python +.. code-block:: ipython - s.map(lambda x: x.hour) + In [66]: s.map(lambda x: x.hour) + Out[66]: + 0 0 + 1 1 + 2 2 + Length: 3, dtype: int64 .. _whatsnew_0200.api_breaking.index_dt_field: @@ -659,10 +671,12 @@ Previous behaviour: New behavior: -.. ipython:: python +.. code-block:: ipython + + In [67]: idx = pd.date_range("2015-01-01", periods=5, freq='10H') - idx = pd.date_range("2015-01-01", periods=5, freq='10H') - idx.hour + In [68]: idx.hour + Out[68]: Index([0, 10, 20, 6, 16], dtype='int32') This has the advantage that specific ``Index`` methods are still available on the result. On the other hand, this might have backward incompatibilities: e.g. diff --git a/doc/source/whatsnew/v0.21.0.rst b/doc/source/whatsnew/v0.21.0.rst index f8eacd28fa795..62296550a472b 100644 --- a/doc/source/whatsnew/v0.21.0.rst +++ b/doc/source/whatsnew/v0.21.0.rst @@ -671,15 +671,35 @@ Previous behavior: New behavior: -.. ipython:: python +.. code-block:: ipython + + In [56]: pi = pd.period_range(start='2000-01-01', freq='D', periods=10) + + In [57]: s = pd.Series(np.arange(10), index=pi) - pi = pd.period_range(start='2000-01-01', freq='D', periods=10) + In [58]: s.resample('H').ohlc() + Out[58]: + open high low close + 2000-01-01 00:00 0.0 0.0 0.0 0.0 + 2000-01-01 01:00 NaN NaN NaN NaN + 2000-01-01 02:00 NaN NaN NaN NaN + 2000-01-01 03:00 NaN NaN NaN NaN + 2000-01-01 04:00 NaN NaN NaN NaN + ... ... ... ... ... + 2000-01-10 19:00 NaN NaN NaN NaN + 2000-01-10 20:00 NaN NaN NaN NaN + 2000-01-10 21:00 NaN NaN NaN NaN + 2000-01-10 22:00 NaN NaN NaN NaN + 2000-01-10 23:00 NaN NaN NaN NaN - s = pd.Series(np.arange(10), index=pi) + [240 rows x 4 columns] - s.resample('H').ohlc() + In [59]: s.resample('M').ohlc() + Out[59]: + open high low close + 2000-01 0 9 0 9 - s.resample('M').ohlc() + [1 rows x 4 columns] .. _whatsnew_0210.api_breaking.pandas_eval: diff --git a/doc/source/whatsnew/v0.22.0.rst b/doc/source/whatsnew/v0.22.0.rst index c494b4f286662..a33a8f7addeef 100644 --- a/doc/source/whatsnew/v0.22.0.rst +++ b/doc/source/whatsnew/v0.22.0.rst @@ -187,16 +187,27 @@ entirely valid. *pandas 0.22.0* -.. ipython:: python +.. code-block:: ipython - idx = pd.DatetimeIndex(["2017-01-01", "2017-01-02"]) - pd.Series([1, 2], index=idx).resample("12H").sum() + In [14]: idx = pd.DatetimeIndex(["2017-01-01", "2017-01-02"]) + In [15]: pd.Series([1, 2], index=idx).resample("12H").sum() + Out[15]: + 2017-01-01 00:00:00 1 + 2017-01-01 12:00:00 0 + 2017-01-02 00:00:00 2 + Freq: 12H, Length: 3, dtype: int64 Once again, the ``min_count`` keyword is available to restore the 0.21 behavior. -.. ipython:: python +.. code-block:: ipython + + In [16]: pd.Series([1, 2], index=idx).resample("12H").sum(min_count=1) + Out[16]: + 2017-01-01 00:00:00 1.0 + 2017-01-01 12:00:00 NaN + 2017-01-02 00:00:00 2.0 + Freq: 12H, Length: 3, dtype: float64 - pd.Series([1, 2], index=idx).resample("12H").sum(min_count=1) Rolling and expanding ^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/source/whatsnew/v1.5.0.rst b/doc/source/whatsnew/v1.5.0.rst index 44728e7e552ab..8fa1361cc30c1 100644 --- a/doc/source/whatsnew/v1.5.0.rst +++ b/doc/source/whatsnew/v1.5.0.rst @@ -112,14 +112,33 @@ to the index in the resample when :meth:`.Resampler.apply` is used. of pandas, not specifying ``group_keys`` will default to the same behavior as ``group_keys=False``. -.. ipython:: python +.. code-block:: ipython - df = pd.DataFrame( - {'a': range(6)}, - index=pd.date_range("2021-01-01", periods=6, freq="8H") - ) - df.resample("D", group_keys=True).apply(lambda x: x) - df.resample("D", group_keys=False).apply(lambda x: x) + In [11]: df = pd.DataFrame( + ....: {'a': range(6)}, + ....: index=pd.date_range("2021-01-01", periods=6, freq="8H") + ....: ) + ....: + + In [12]: df.resample("D", group_keys=True).apply(lambda x: x) + Out[12]: + a + 2021-01-01 2021-01-01 00:00:00 0 + 2021-01-01 08:00:00 1 + 2021-01-01 16:00:00 2 + 2021-01-02 2021-01-02 00:00:00 3 + 2021-01-02 08:00:00 4 + 2021-01-02 16:00:00 5 + + In [13]: df.resample("D", group_keys=False).apply(lambda x: x) + Out[13]: + a + 2021-01-01 00:00:00 0 + 2021-01-01 08:00:00 1 + 2021-01-01 16:00:00 2 + 2021-01-02 00:00:00 3 + 2021-01-02 08:00:00 4 + 2021-01-02 16:00:00 5 Previously, the resulting index would depend upon the values returned by ``apply``, as seen in the following example. @@ -461,19 +480,20 @@ upon serialization. (Related issue :issue:`12997`) *Old Behavior* -.. ipython:: python +.. code-block:: ipython - index = pd.date_range( - start='2020-12-28 00:00:00', - end='2020-12-28 02:00:00', - freq='1H', - ) - a = pd.Series( - data=range(3), - index=index, - ) + In [32]: index = pd.date_range( + ....: start='2020-12-28 00:00:00', + ....: end='2020-12-28 02:00:00', + ....: freq='1H', + ....: ) + ....: -.. code-block:: ipython + In [33]: a = pd.Series( + ....: data=range(3), + ....: index=index, + ....: ) + ....: In [4]: from io import StringIO @@ -485,12 +505,16 @@ upon serialization. (Related issue :issue:`12997`) *New Behavior* -.. ipython:: python +.. code-block:: ipython + + In [34]: from io import StringIO + + In [35]: a.to_json(date_format='iso') + Out[35]: '{"2020-12-28T00:00:00.000Z":0,"2020-12-28T01:00:00.000Z":1,"2020-12-28T02:00:00.000Z":2}' - from io import StringIO - a.to_json(date_format='iso') # Roundtripping now works - pd.read_json(StringIO(a.to_json(date_format='iso')), typ="series").index == a.index + In [36]: pd.read_json(StringIO(a.to_json(date_format='iso')), typ="series").index == a.index + Out[36]: array([ True, True, True]) .. _whatsnew_150.notable_bug_fixes.groupby_value_counts_categorical: diff --git a/doc/source/whatsnew/v2.2.0.rst b/doc/source/whatsnew/v2.2.0.rst index 017a28ffb573a..04a76b481bfc3 100644 --- a/doc/source/whatsnew/v2.2.0.rst +++ b/doc/source/whatsnew/v2.2.0.rst @@ -232,7 +232,7 @@ For example: Other Deprecations ^^^^^^^^^^^^^^^^^^ -- Changed :meth:`Timedelta.resolution_string` to return ``min``, ``s``, ``ms``, ``us``, and ``ns`` instead of ``T``, ``S``, ``L``, ``U``, and ``N``, for compatibility with respective deprecations in frequency aliases (:issue:`52536`) +- Changed :meth:`Timedelta.resolution_string` to return ``h``, ``min``, ``s``, ``ms``, ``us``, and ``ns`` instead of ``H``, ``T``, ``S``, ``L``, ``U``, and ``N``, for compatibility with respective deprecations in frequency aliases (:issue:`52536`) - Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_clipboard`. (:issue:`54229`) - Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_csv` except ``path_or_buf``. (:issue:`54229`) - Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_dict`. (:issue:`54229`) @@ -252,9 +252,10 @@ Other Deprecations - Deprecated including the groups in computations when using :meth:`DataFrameGroupBy.apply` and :meth:`DataFrameGroupBy.resample`; pass ``include_groups=False`` to exclude the groups (:issue:`7155`) - Deprecated not passing a tuple to :class:`DataFrameGroupBy.get_group` or :class:`SeriesGroupBy.get_group` when grouping by a length-1 list-like (:issue:`25971`) - Deprecated string ``A`` denoting frequency in :class:`YearEnd` and strings ``A-DEC``, ``A-JAN``, etc. denoting annual frequencies with various fiscal year ends (:issue:`52536`) -- Deprecated strings ``S``, ``U``, and ``N`` denoting units in :func:`to_timedelta` (:issue:`52536`) +- Deprecated strings ``H``, ``BH``, and ``CBH`` denoting frequencies in :class:`Hour`, :class:`BusinessHour`, :class:`CustomBusinessHour` (:issue:`52536`) +- Deprecated strings ``H``, ``S``, ``U``, and ``N`` denoting units in :func:`to_timedelta` (:issue:`52536`) +- Deprecated strings ``H``, ``T``, ``S``, ``L``, ``U``, and ``N`` denoting units in :class:`Timedelta` (:issue:`52536`) - Deprecated strings ``T``, ``S``, ``L``, ``U``, and ``N`` denoting frequencies in :class:`Minute`, :class:`Second`, :class:`Milli`, :class:`Micro`, :class:`Nano` (:issue:`52536`) -- Deprecated strings ``T``, ``S``, ``L``, ``U``, and ``N`` denoting units in :class:`Timedelta` (:issue:`52536`) - Deprecated the extension test classes ``BaseNoReduceTests``, ``BaseBooleanReduceTests``, and ``BaseNumericReduceTests``, use ``BaseReduceTests`` instead (:issue:`54663`) - Deprecated the option ``mode.data_manager`` and the ``ArrayManager``; only the ``BlockManager`` will be available in future versions (:issue:`55043`) - Deprecating downcasting the results of :meth:`DataFrame.fillna`, :meth:`Series.fillna`, :meth:`DataFrame.ffill`, :meth:`Series.ffill`, :meth:`DataFrame.bfill`, :meth:`Series.bfill` in object-dtype cases. To opt in to the future version, use ``pd.set_option("future.no_silent_downcasting", True)`` (:issue:`54261`) diff --git a/pandas/_libs/tslibs/dtypes.pyx b/pandas/_libs/tslibs/dtypes.pyx index ebb6e3a240cbe..86f620beeec3b 100644 --- a/pandas/_libs/tslibs/dtypes.pyx +++ b/pandas/_libs/tslibs/dtypes.pyx @@ -142,7 +142,7 @@ _period_code_map = { "B": PeriodDtypeCode.B, # Business days "D": PeriodDtypeCode.D, # Daily - "H": PeriodDtypeCode.H, # Hourly + "h": PeriodDtypeCode.H, # Hourly "min": PeriodDtypeCode.T, # Minutely "s": PeriodDtypeCode.S, # Secondly "ms": PeriodDtypeCode.L, # Millisecondly @@ -175,7 +175,7 @@ _attrname_to_abbrevs = { "quarter": "Q", "month": "M", "day": "D", - "hour": "H", + "hour": "h", "minute": "min", "second": "s", "millisecond": "ms", @@ -203,7 +203,7 @@ OFFSET_TO_PERIOD_FREQSTR: dict = { "ms": "ms", "us": "us", "ns": "ns", - "H": "H", + "h": "h", "Q": "Q", "Y": "Y", "W": "W", @@ -244,6 +244,9 @@ DEPR_ABBREVS: dict[str, str]= { "A-SEP": "Y-SEP", "A-OCT": "Y-OCT", "A-NOV": "Y-NOV", + "H": "h", + "BH": "bh", + "CBH": "cbh", "T": "min", "t": "min", "S": "s", @@ -339,10 +342,10 @@ class Resolution(Enum): Examples -------- - >>> Resolution.get_reso_from_freqstr('H') + >>> Resolution.get_reso_from_freqstr('h') - >>> Resolution.get_reso_from_freqstr('H') == Resolution.RESO_HR + >>> Resolution.get_reso_from_freqstr('h') == Resolution.RESO_HR True """ try: diff --git a/pandas/_libs/tslibs/nattype.pyx b/pandas/_libs/tslibs/nattype.pyx index bb497f2e17b93..9f9549b93fefe 100644 --- a/pandas/_libs/tslibs/nattype.pyx +++ b/pandas/_libs/tslibs/nattype.pyx @@ -1000,7 +1000,7 @@ timedelta}, default 'raise' A timestamp can be rounded using multiple frequency units: - >>> ts.round(freq='H') # hour + >>> ts.round(freq='h') # hour Timestamp('2020-03-14 16:00:00') >>> ts.round(freq='min') # minute @@ -1017,9 +1017,9 @@ timedelta}, default 'raise' >>> ts.round(freq='5min') Timestamp('2020-03-14 15:35:00') - or a combination of multiple units, like '1H30min' (i.e. 1 hour and 30 minutes): + or a combination of multiple units, like '1h30min' (i.e. 1 hour and 30 minutes): - >>> ts.round(freq='1H30min') + >>> ts.round(freq='1h30min') Timestamp('2020-03-14 15:00:00') Analogous for ``pd.NaT``: @@ -1032,10 +1032,10 @@ timedelta}, default 'raise' >>> ts_tz = pd.Timestamp("2021-10-31 01:30:00").tz_localize("Europe/Amsterdam") - >>> ts_tz.round("H", ambiguous=False) + >>> ts_tz.round("h", ambiguous=False) Timestamp('2021-10-31 02:00:00+0100', tz='Europe/Amsterdam') - >>> ts_tz.round("H", ambiguous=True) + >>> ts_tz.round("h", ambiguous=True) Timestamp('2021-10-31 02:00:00+0200', tz='Europe/Amsterdam') """, ) @@ -1089,7 +1089,7 @@ timedelta}, default 'raise' A timestamp can be floored using multiple frequency units: - >>> ts.floor(freq='H') # hour + >>> ts.floor(freq='h') # hour Timestamp('2020-03-14 15:00:00') >>> ts.floor(freq='min') # minute @@ -1106,9 +1106,9 @@ timedelta}, default 'raise' >>> ts.floor(freq='5min') Timestamp('2020-03-14 15:30:00') - or a combination of multiple units, like '1H30min' (i.e. 1 hour and 30 minutes): + or a combination of multiple units, like '1h30min' (i.e. 1 hour and 30 minutes): - >>> ts.floor(freq='1H30min') + >>> ts.floor(freq='1h30min') Timestamp('2020-03-14 15:00:00') Analogous for ``pd.NaT``: @@ -1121,10 +1121,10 @@ timedelta}, default 'raise' >>> ts_tz = pd.Timestamp("2021-10-31 03:30:00").tz_localize("Europe/Amsterdam") - >>> ts_tz.floor("2H", ambiguous=False) + >>> ts_tz.floor("2h", ambiguous=False) Timestamp('2021-10-31 02:00:00+0100', tz='Europe/Amsterdam') - >>> ts_tz.floor("2H", ambiguous=True) + >>> ts_tz.floor("2h", ambiguous=True) Timestamp('2021-10-31 02:00:00+0200', tz='Europe/Amsterdam') """, ) @@ -1178,7 +1178,7 @@ timedelta}, default 'raise' A timestamp can be ceiled using multiple frequency units: - >>> ts.ceil(freq='H') # hour + >>> ts.ceil(freq='h') # hour Timestamp('2020-03-14 16:00:00') >>> ts.ceil(freq='min') # minute @@ -1195,9 +1195,9 @@ timedelta}, default 'raise' >>> ts.ceil(freq='5min') Timestamp('2020-03-14 15:35:00') - or a combination of multiple units, like '1H30min' (i.e. 1 hour and 30 minutes): + or a combination of multiple units, like '1h30min' (i.e. 1 hour and 30 minutes): - >>> ts.ceil(freq='1H30min') + >>> ts.ceil(freq='1h30min') Timestamp('2020-03-14 16:30:00') Analogous for ``pd.NaT``: @@ -1210,10 +1210,10 @@ timedelta}, default 'raise' >>> ts_tz = pd.Timestamp("2021-10-31 01:30:00").tz_localize("Europe/Amsterdam") - >>> ts_tz.ceil("H", ambiguous=False) + >>> ts_tz.ceil("h", ambiguous=False) Timestamp('2021-10-31 02:00:00+0100', tz='Europe/Amsterdam') - >>> ts_tz.ceil("H", ambiguous=True) + >>> ts_tz.ceil("h", ambiguous=True) Timestamp('2021-10-31 02:00:00+0200', tz='Europe/Amsterdam') """, ) diff --git a/pandas/_libs/tslibs/offsets.pyx b/pandas/_libs/tslibs/offsets.pyx index 534d8e9926b38..042d5dafe3046 100644 --- a/pandas/_libs/tslibs/offsets.pyx +++ b/pandas/_libs/tslibs/offsets.pyx @@ -603,10 +603,10 @@ cdef class BaseOffset: Examples -------- >>> pd.offsets.Hour().name - 'H' + 'h' >>> pd.offsets.Hour(5).name - 'H' + 'h' """ return self.rule_code @@ -629,7 +629,7 @@ cdef class BaseOffset: '<5 * DateOffsets>' >>> pd.offsets.BusinessHour(2).freqstr - '2BH' + '2bh' >>> pd.offsets.Nano().freqstr 'ns' @@ -1166,7 +1166,7 @@ cdef class Hour(Tick): Timestamp('2022-12-09 11:00:00') """ _nanos_inc = 3600 * 1_000_000_000 - _prefix = "H" + _prefix = "h" _period_dtype_code = PeriodDtypeCode.H _creso = NPY_DATETIMEUNIT.NPY_FR_h @@ -1630,7 +1630,7 @@ cdef class BusinessMixin(SingleConstructorOffset): # Older (<0.22.0) versions have offset attribute instead of _offset self._offset = state.pop("offset") - if self._prefix.startswith("C"): + if self._prefix.startswith(("C", "c")): # i.e. this is a Custom class weekmask = state.pop("weekmask") holidays = state.pop("holidays") @@ -1696,7 +1696,7 @@ cdef class BusinessDay(BusinessMixin): s = td.seconds hrs = int(s / 3600) if hrs != 0: - off_str += str(hrs) + "H" + off_str += str(hrs) + "h" s -= hrs * 3600 mts = int(s / 60) if mts != 0: @@ -1893,10 +1893,10 @@ cdef class BusinessHour(BusinessMixin): '2022-12-12 06:00:00', '2022-12-12 07:00:00', '2022-12-12 10:00:00', '2022-12-12 11:00:00', '2022-12-12 15:00:00', '2022-12-12 16:00:00'], - dtype='datetime64[ns]', freq='BH') + dtype='datetime64[ns]', freq='bh') """ - _prefix = "BH" + _prefix = "bh" _anchor = 0 _attributes = tuple(["n", "normalize", "start", "end", "offset"]) _adjust_dst = False @@ -2016,7 +2016,7 @@ cdef class BusinessHour(BusinessMixin): nb_offset = 1 else: nb_offset = -1 - if self._prefix.startswith("C"): + if self._prefix.startswith(("c")): # CustomBusinessHour return CustomBusinessDay( n=nb_offset, @@ -2176,7 +2176,7 @@ cdef class BusinessHour(BusinessMixin): # adjust by business days first if bd != 0: - if self._prefix.startswith("C"): + if self._prefix.startswith("c"): # GH#30593 this is a Custom offset skip_bd = CustomBusinessDay( n=bd, @@ -2242,7 +2242,7 @@ cdef class BusinessHour(BusinessMixin): dt = datetime( dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond ) - # Valid BH can be on the different BusinessDay during midnight + # Valid bh can be on the different BusinessDay during midnight # Distinguish by the time spent from previous opening time return self._is_on_offset(dt) @@ -2252,7 +2252,7 @@ cdef class BusinessHour(BusinessMixin): """ # if self.normalize and not _is_normalized(dt): # return False - # Valid BH can be on the different BusinessDay during midnight + # Valid bh can be on the different BusinessDay during midnight # Distinguish by the time spent from previous opening time if self.n >= 0: op = self._prev_opening_time(dt) @@ -4277,7 +4277,7 @@ cdef class CustomBusinessHour(BusinessHour): '2022-12-12 06:00:00', '2022-12-12 07:00:00', '2022-12-12 10:00:00', '2022-12-12 11:00:00', '2022-12-12 15:00:00', '2022-12-12 16:00:00'], - dtype='datetime64[ns]', freq='CBH') + dtype='datetime64[ns]', freq='cbh') Business days can be specified by ``weekmask`` parameter. To convert the returned datetime object to its string representation @@ -4306,10 +4306,10 @@ cdef class CustomBusinessHour(BusinessHour): '2022-12-15 11:00:00', '2022-12-15 12:00:00', '2022-12-16 10:00:00', '2022-12-16 11:00:00', '2022-12-16 12:00:00'], - dtype='datetime64[ns]', freq='CBH') + dtype='datetime64[ns]', freq='cbh') """ - _prefix = "CBH" + _prefix = "cbh" _anchor = 0 _attributes = tuple( ["n", "normalize", "weekmask", "holidays", "calendar", "start", "end", "offset"] @@ -4549,11 +4549,11 @@ prefix_mapping = { BusinessMonthEnd, # 'BM' BQuarterEnd, # 'BQ' BQuarterBegin, # 'BQS' - BusinessHour, # 'BH' + BusinessHour, # 'bh' CustomBusinessDay, # 'C' CustomBusinessMonthEnd, # 'CBM' CustomBusinessMonthBegin, # 'CBMS' - CustomBusinessHour, # 'CBH' + CustomBusinessHour, # 'cbh' MonthEnd, # 'ME' MonthBegin, # 'MS' Nano, # 'ns' @@ -4566,7 +4566,7 @@ prefix_mapping = { QuarterEnd, # 'Q' QuarterBegin, # 'QS' Milli, # 'ms' - Hour, # 'H' + Hour, # 'h' Day, # 'D' WeekOfMonth, # 'WOM' FY5253, @@ -4598,7 +4598,7 @@ _lite_rule_alias = { "ns": "ns", } -_dont_uppercase = {"MS", "ms", "s", "me"} +_dont_uppercase = {"h", "bh", "cbh", "MS", "ms", "s", "me"} INVALID_FREQ_ERR_MSG = "Invalid frequency: {0}" @@ -4667,7 +4667,7 @@ cpdef to_offset(freq, bint is_period=False): >>> to_offset("5min") <5 * Minutes> - >>> to_offset("1D1H") + >>> to_offset("1D1h") <25 * Hours> >>> to_offset("2W") @@ -4735,17 +4735,18 @@ cpdef to_offset(freq, bint is_period=False): if prefix in c_DEPR_ABBREVS: warnings.warn( - f"\'{prefix}\' is deprecated and will be removed in a " - f"future version. Please use \'{c_DEPR_ABBREVS.get(prefix)}\' " + f"\'{prefix}\' is deprecated and will be removed " + f"in a future version. Please use " + f"\'{c_DEPR_ABBREVS.get(prefix)}\' " f"instead of \'{prefix}\'.", FutureWarning, stacklevel=find_stack_level(), ) prefix = c_DEPR_ABBREVS[prefix] - if prefix in {"D", "H", "min", "s", "ms", "us", "ns"}: - # For these prefixes, we have something like "3H" or - # "2.5T", so we can construct a Timedelta with the + if prefix in {"D", "h", "min", "s", "ms", "us", "ns"}: + # For these prefixes, we have something like "3h" or + # "2.5min", so we can construct a Timedelta with the # matching unit and get our offset from delta_to_tick td = Timedelta(1, unit=prefix) off = delta_to_tick(td) diff --git a/pandas/_libs/tslibs/parsing.pyx b/pandas/_libs/tslibs/parsing.pyx index 71be1d213437a..b23611124ea7c 100644 --- a/pandas/_libs/tslibs/parsing.pyx +++ b/pandas/_libs/tslibs/parsing.pyx @@ -716,7 +716,7 @@ cdef datetime dateutil_parse( elif res.tzoffset: ret = ret.replace(tzinfo=tzoffset(res.tzname, res.tzoffset)) - # dateutil can return a datetime with a tzoffset outside of (-24H, 24H) + # dateutil can return a datetime with a tzoffset outside of (-24h, 24h) # bounds, which is invalid (can be constructed, but raises if we call # str(ret)). Check that and raise here if necessary. try: diff --git a/pandas/_libs/tslibs/period.pyx b/pandas/_libs/tslibs/period.pyx index 5fecc77044b4b..cacfe43b236d8 100644 --- a/pandas/_libs/tslibs/period.pyx +++ b/pandas/_libs/tslibs/period.pyx @@ -1942,8 +1942,8 @@ cdef class _Period(PeriodMixin): Examples -------- >>> period = pd.Period('2023-1-1', freq='D') - >>> period.asfreq('H') - Period('2023-01-01 23:00', 'H') + >>> period.asfreq('h') + Period('2023-01-01 23:00', 'h') """ freq = self._maybe_convert_freq(freq) how = validate_end_alias(how) @@ -2054,7 +2054,7 @@ cdef class _Period(PeriodMixin): Examples -------- - >>> p = pd.Period("2018-03-11", freq='H') + >>> p = pd.Period("2018-03-11", freq='h') >>> p.day 11 """ @@ -2155,7 +2155,7 @@ cdef class _Period(PeriodMixin): Examples -------- - >>> p = pd.Period("2018-03-11", "H") + >>> p = pd.Period("2018-03-11", "h") >>> p.weekofyear 10 @@ -2186,7 +2186,7 @@ cdef class _Period(PeriodMixin): Examples -------- - >>> p = pd.Period("2018-03-11", "H") + >>> p = pd.Period("2018-03-11", "h") >>> p.week 10 @@ -2226,14 +2226,14 @@ cdef class _Period(PeriodMixin): Examples -------- - >>> per = pd.Period('2017-12-31 22:00', 'H') + >>> per = pd.Period('2017-12-31 22:00', 'h') >>> per.day_of_week 6 For periods that span over multiple days, the day at the beginning of the period is returned. - >>> per = pd.Period('2017-12-31 22:00', '4H') + >>> per = pd.Period('2017-12-31 22:00', '4h') >>> per.day_of_week 6 >>> per.start_time.day_of_week @@ -2277,14 +2277,14 @@ cdef class _Period(PeriodMixin): Examples -------- - >>> per = pd.Period('2017-12-31 22:00', 'H') + >>> per = pd.Period('2017-12-31 22:00', 'h') >>> per.dayofweek 6 For periods that span over multiple days, the day at the beginning of the period is returned. - >>> per = pd.Period('2017-12-31 22:00', '4H') + >>> per = pd.Period('2017-12-31 22:00', '4h') >>> per.dayofweek 6 >>> per.start_time.dayofweek @@ -2326,7 +2326,7 @@ cdef class _Period(PeriodMixin): Examples -------- - >>> period = pd.Period("2015-10-23", freq='H') + >>> period = pd.Period("2015-10-23", freq='h') >>> period.day_of_year 296 >>> period = pd.Period("2012-12-31", freq='D') @@ -2447,7 +2447,7 @@ cdef class _Period(PeriodMixin): Examples -------- - >>> p = pd.Period("2018-03-11", freq='H') + >>> p = pd.Period("2018-03-11", freq='h') >>> p.daysinmonth 31 """ @@ -2482,8 +2482,8 @@ cdef class _Period(PeriodMixin): Examples -------- - >>> pd.Period.now('H') # doctest: +SKIP - Period('2023-06-12 11:00', 'H') + >>> pd.Period.now('h') # doctest: +SKIP + Period('2023-06-12 11:00', 'h') """ return Period(datetime.now(), freq=freq) diff --git a/pandas/_libs/tslibs/timedeltas.pyx b/pandas/_libs/tslibs/timedeltas.pyx index 2f6fa35cae070..e5d81bd5928b9 100644 --- a/pandas/_libs/tslibs/timedeltas.pyx +++ b/pandas/_libs/tslibs/timedeltas.pyx @@ -898,8 +898,8 @@ cdef int64_t parse_iso_format_string(str ts) except? -1: elif c in ["W", "D", "H", "M"]: if c in ["H", "M"] and len(number) > 2: raise ValueError(err_msg) - if c == "M": - c = "min" + if c in ["M", "H"]: + c = c.replace("M", "min").replace("H", "h") unit.append(c) r = timedelta_from_spec(number, "0", unit) result += timedelta_as_neg(r, neg) @@ -1442,7 +1442,7 @@ cdef class _Timedelta(timedelta): Resolution: Return value * Days: 'D' - * Hours: 'H' + * Hours: 'h' * Minutes: 'min' * Seconds: 's' * Milliseconds: 'ms' @@ -1484,7 +1484,7 @@ cdef class _Timedelta(timedelta): elif self._m: return "min" elif self._h: - return "H" + return "h" else: return "D" @@ -1725,8 +1725,8 @@ class Timedelta(_Timedelta): .. deprecated:: 2.2.0 - Values `T`, `S`, `L`, `U`, and `N` are deprecated in favour of the values - `min`, `s`, `ms`, `us`, and `ns`. + Values `H`, `T`, `S`, `L`, `U`, and `N` are deprecated in favour + of the values `h`, `min`, `s`, `ms`, `us`, and `ns`. **kwargs Available kwargs: {days, seconds, microseconds, diff --git a/pandas/_libs/tslibs/timestamps.pyx b/pandas/_libs/tslibs/timestamps.pyx index 8bf1ebb9bf608..edd061fd8cdf1 100644 --- a/pandas/_libs/tslibs/timestamps.pyx +++ b/pandas/_libs/tslibs/timestamps.pyx @@ -1982,7 +1982,7 @@ timedelta}, default 'raise' A timestamp can be rounded using multiple frequency units: - >>> ts.round(freq='H') # hour + >>> ts.round(freq='h') # hour Timestamp('2020-03-14 16:00:00') >>> ts.round(freq='min') # minute @@ -1999,9 +1999,9 @@ timedelta}, default 'raise' >>> ts.round(freq='5min') Timestamp('2020-03-14 15:35:00') - or a combination of multiple units, like '1H30min' (i.e. 1 hour and 30 minutes): + or a combination of multiple units, like '1h30min' (i.e. 1 hour and 30 minutes): - >>> ts.round(freq='1H30min') + >>> ts.round(freq='1h30min') Timestamp('2020-03-14 15:00:00') Analogous for ``pd.NaT``: @@ -2014,10 +2014,10 @@ timedelta}, default 'raise' >>> ts_tz = pd.Timestamp("2021-10-31 01:30:00").tz_localize("Europe/Amsterdam") - >>> ts_tz.round("H", ambiguous=False) + >>> ts_tz.round("h", ambiguous=False) Timestamp('2021-10-31 02:00:00+0100', tz='Europe/Amsterdam') - >>> ts_tz.round("H", ambiguous=True) + >>> ts_tz.round("h", ambiguous=True) Timestamp('2021-10-31 02:00:00+0200', tz='Europe/Amsterdam') """ return self._round( @@ -2073,7 +2073,7 @@ timedelta}, default 'raise' A timestamp can be floored using multiple frequency units: - >>> ts.floor(freq='H') # hour + >>> ts.floor(freq='h') # hour Timestamp('2020-03-14 15:00:00') >>> ts.floor(freq='min') # minute @@ -2090,9 +2090,9 @@ timedelta}, default 'raise' >>> ts.floor(freq='5min') Timestamp('2020-03-14 15:30:00') - or a combination of multiple units, like '1H30min' (i.e. 1 hour and 30 minutes): + or a combination of multiple units, like '1h30min' (i.e. 1 hour and 30 minutes): - >>> ts.floor(freq='1H30min') + >>> ts.floor(freq='1h30min') Timestamp('2020-03-14 15:00:00') Analogous for ``pd.NaT``: @@ -2105,10 +2105,10 @@ timedelta}, default 'raise' >>> ts_tz = pd.Timestamp("2021-10-31 03:30:00").tz_localize("Europe/Amsterdam") - >>> ts_tz.floor("2H", ambiguous=False) + >>> ts_tz.floor("2h", ambiguous=False) Timestamp('2021-10-31 02:00:00+0100', tz='Europe/Amsterdam') - >>> ts_tz.floor("2H", ambiguous=True) + >>> ts_tz.floor("2h", ambiguous=True) Timestamp('2021-10-31 02:00:00+0200', tz='Europe/Amsterdam') """ return self._round(freq, RoundTo.MINUS_INFTY, ambiguous, nonexistent) @@ -2162,7 +2162,7 @@ timedelta}, default 'raise' A timestamp can be ceiled using multiple frequency units: - >>> ts.ceil(freq='H') # hour + >>> ts.ceil(freq='h') # hour Timestamp('2020-03-14 16:00:00') >>> ts.ceil(freq='min') # minute @@ -2179,9 +2179,9 @@ timedelta}, default 'raise' >>> ts.ceil(freq='5min') Timestamp('2020-03-14 15:35:00') - or a combination of multiple units, like '1H30min' (i.e. 1 hour and 30 minutes): + or a combination of multiple units, like '1h30min' (i.e. 1 hour and 30 minutes): - >>> ts.ceil(freq='1H30min') + >>> ts.ceil(freq='1h30min') Timestamp('2020-03-14 16:30:00') Analogous for ``pd.NaT``: @@ -2194,10 +2194,10 @@ timedelta}, default 'raise' >>> ts_tz = pd.Timestamp("2021-10-31 01:30:00").tz_localize("Europe/Amsterdam") - >>> ts_tz.ceil("H", ambiguous=False) + >>> ts_tz.ceil("h", ambiguous=False) Timestamp('2021-10-31 02:00:00+0100', tz='Europe/Amsterdam') - >>> ts_tz.ceil("H", ambiguous=True) + >>> ts_tz.ceil("h", ambiguous=True) Timestamp('2021-10-31 02:00:00+0200', tz='Europe/Amsterdam') """ return self._round(freq, RoundTo.PLUS_INFTY, ambiguous, nonexistent) diff --git a/pandas/core/arrays/arrow/array.py b/pandas/core/arrays/arrow/array.py index e844c0542df69..60c42c01e9f6f 100644 --- a/pandas/core/arrays/arrow/array.py +++ b/pandas/core/arrays/arrow/array.py @@ -2533,7 +2533,7 @@ def _round_temporally( "MS": "month", "W": "week", "D": "day", - "H": "hour", + "h": "hour", "min": "minute", "s": "second", "ms": "millisecond", diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index a2960a2870882..fd51303ebd55f 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -1826,14 +1826,14 @@ def strftime(self, date_format: str) -> npt.NDArray[np.object_]: dtype='datetime64[ns]', freq='min') """ -_round_example = """>>> rng.round('H') +_round_example = """>>> rng.round('h') DatetimeIndex(['2018-01-01 12:00:00', '2018-01-01 12:00:00', '2018-01-01 12:00:00'], dtype='datetime64[ns]', freq=None) **Series** - >>> pd.Series(rng).dt.round("H") + >>> pd.Series(rng).dt.round("h") 0 2018-01-01 12:00:00 1 2018-01-01 12:00:00 2 2018-01-01 12:00:00 @@ -1844,23 +1844,23 @@ def strftime(self, date_format: str) -> npt.NDArray[np.object_]: >>> rng_tz = pd.DatetimeIndex(["2021-10-31 03:30:00"], tz="Europe/Amsterdam") - >>> rng_tz.floor("2H", ambiguous=False) + >>> rng_tz.floor("2h", ambiguous=False) DatetimeIndex(['2021-10-31 02:00:00+01:00'], dtype='datetime64[ns, Europe/Amsterdam]', freq=None) - >>> rng_tz.floor("2H", ambiguous=True) + >>> rng_tz.floor("2h", ambiguous=True) DatetimeIndex(['2021-10-31 02:00:00+02:00'], dtype='datetime64[ns, Europe/Amsterdam]', freq=None) """ -_floor_example = """>>> rng.floor('H') +_floor_example = """>>> rng.floor('h') DatetimeIndex(['2018-01-01 11:00:00', '2018-01-01 12:00:00', '2018-01-01 12:00:00'], dtype='datetime64[ns]', freq=None) **Series** - >>> pd.Series(rng).dt.floor("H") + >>> pd.Series(rng).dt.floor("h") 0 2018-01-01 11:00:00 1 2018-01-01 12:00:00 2 2018-01-01 12:00:00 @@ -1871,23 +1871,23 @@ def strftime(self, date_format: str) -> npt.NDArray[np.object_]: >>> rng_tz = pd.DatetimeIndex(["2021-10-31 03:30:00"], tz="Europe/Amsterdam") - >>> rng_tz.floor("2H", ambiguous=False) + >>> rng_tz.floor("2h", ambiguous=False) DatetimeIndex(['2021-10-31 02:00:00+01:00'], dtype='datetime64[ns, Europe/Amsterdam]', freq=None) - >>> rng_tz.floor("2H", ambiguous=True) + >>> rng_tz.floor("2h", ambiguous=True) DatetimeIndex(['2021-10-31 02:00:00+02:00'], dtype='datetime64[ns, Europe/Amsterdam]', freq=None) """ -_ceil_example = """>>> rng.ceil('H') +_ceil_example = """>>> rng.ceil('h') DatetimeIndex(['2018-01-01 12:00:00', '2018-01-01 12:00:00', '2018-01-01 13:00:00'], dtype='datetime64[ns]', freq=None) **Series** - >>> pd.Series(rng).dt.ceil("H") + >>> pd.Series(rng).dt.ceil("h") 0 2018-01-01 12:00:00 1 2018-01-01 12:00:00 2 2018-01-01 13:00:00 @@ -1898,11 +1898,11 @@ def strftime(self, date_format: str) -> npt.NDArray[np.object_]: >>> rng_tz = pd.DatetimeIndex(["2021-10-31 01:30:00"], tz="Europe/Amsterdam") - >>> rng_tz.ceil("H", ambiguous=False) + >>> rng_tz.ceil("h", ambiguous=False) DatetimeIndex(['2021-10-31 02:00:00+01:00'], dtype='datetime64[ns, Europe/Amsterdam]', freq=None) - >>> rng_tz.ceil("H", ambiguous=True) + >>> rng_tz.ceil("h", ambiguous=True) DatetimeIndex(['2021-10-31 02:00:00+02:00'], dtype='datetime64[ns, Europe/Amsterdam]', freq=None) """ diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index e0d4098dd1f33..a2742aed31e4c 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -857,37 +857,37 @@ def tz_convert(self, tz) -> Self: to other time zones: >>> dti = pd.date_range(start='2014-08-01 09:00', - ... freq='H', periods=3, tz='Europe/Berlin') + ... freq='h', periods=3, tz='Europe/Berlin') >>> dti DatetimeIndex(['2014-08-01 09:00:00+02:00', '2014-08-01 10:00:00+02:00', '2014-08-01 11:00:00+02:00'], - dtype='datetime64[ns, Europe/Berlin]', freq='H') + dtype='datetime64[ns, Europe/Berlin]', freq='h') >>> dti.tz_convert('US/Central') DatetimeIndex(['2014-08-01 02:00:00-05:00', '2014-08-01 03:00:00-05:00', '2014-08-01 04:00:00-05:00'], - dtype='datetime64[ns, US/Central]', freq='H') + dtype='datetime64[ns, US/Central]', freq='h') With the ``tz=None``, we can remove the timezone (after converting to UTC if necessary): - >>> dti = pd.date_range(start='2014-08-01 09:00', freq='H', + >>> dti = pd.date_range(start='2014-08-01 09:00', freq='h', ... periods=3, tz='Europe/Berlin') >>> dti DatetimeIndex(['2014-08-01 09:00:00+02:00', '2014-08-01 10:00:00+02:00', '2014-08-01 11:00:00+02:00'], - dtype='datetime64[ns, Europe/Berlin]', freq='H') + dtype='datetime64[ns, Europe/Berlin]', freq='h') >>> dti.tz_convert(None) DatetimeIndex(['2014-08-01 07:00:00', '2014-08-01 08:00:00', '2014-08-01 09:00:00'], - dtype='datetime64[ns]', freq='H') + dtype='datetime64[ns]', freq='h') """ tz = timezones.maybe_get_tz(tz) @@ -1042,7 +1042,7 @@ def tz_localize( 1 2015-03-29 03:30:00+02:00 dtype: datetime64[ns, Europe/Warsaw] - >>> s.dt.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1H')) + >>> s.dt.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1h')) 0 2015-03-29 03:30:00+02:00 1 2015-03-29 03:30:00+02:00 dtype: datetime64[ns, Europe/Warsaw] @@ -1132,13 +1132,13 @@ def normalize(self) -> Self: Examples -------- - >>> idx = pd.date_range(start='2014-08-01 10:00', freq='H', + >>> idx = pd.date_range(start='2014-08-01 10:00', freq='h', ... periods=3, tz='Asia/Calcutta') >>> idx DatetimeIndex(['2014-08-01 10:00:00+05:30', '2014-08-01 11:00:00+05:30', '2014-08-01 12:00:00+05:30'], - dtype='datetime64[ns, Asia/Calcutta]', freq='H') + dtype='datetime64[ns, Asia/Calcutta]', freq='h') >>> idx.normalize() DatetimeIndex(['2014-08-01 00:00:00+05:30', '2014-08-01 00:00:00+05:30', diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index 2468535397568..f188b73b4fc64 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -477,7 +477,7 @@ def __arrow_array__(self, type=None): Examples -------- - >>> idx = pd.PeriodIndex(["2023-01-01 10:00", "2023-01-01 11:00"], freq='H') + >>> idx = pd.PeriodIndex(["2023-01-01 10:00", "2023-01-01 11:00"], freq='h') >>> idx.hour Index([10, 11], dtype='int64') """, diff --git a/pandas/core/arrays/timedeltas.py b/pandas/core/arrays/timedeltas.py index 931a220d7ab29..ca908c11a97bb 100644 --- a/pandas/core/arrays/timedeltas.py +++ b/pandas/core/arrays/timedeltas.py @@ -136,7 +136,7 @@ class TimedeltaArray(dtl.TimelikeOps): Examples -------- - >>> pd.arrays.TimedeltaArray(pd.TimedeltaIndex(['1H', '2H'])) + >>> pd.arrays.TimedeltaArray(pd.TimedeltaIndex(['1h', '2h'])) ['0 days 01:00:00', '0 days 02:00:00'] Length: 2, dtype: timedelta64[ns] diff --git a/pandas/core/construction.py b/pandas/core/construction.py index e661d590ab330..b4b9a4176472d 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -203,7 +203,7 @@ def array( ['2015-01-01 00:00:00', '2016-01-01 00:00:00'] Length: 2, dtype: datetime64[ns] - >>> pd.array(["1H", "2H"], dtype='timedelta64[ns]') + >>> pd.array(["1h", "2h"], dtype='timedelta64[ns]') ['0 days 01:00:00', '0 days 02:00:00'] Length: 2, dtype: timedelta64[ns] diff --git a/pandas/core/dtypes/base.py b/pandas/core/dtypes/base.py index 6567ca7155b0d..6b00a5284ec5b 100644 --- a/pandas/core/dtypes/base.py +++ b/pandas/core/dtypes/base.py @@ -242,7 +242,7 @@ def construct_from_string(cls, string: str) -> Self: This is useful mainly for data types that accept parameters. For example, a period dtype accepts a frequency parameter that - can be set as ``period[H]`` (where H means hourly frequency). + can be set as ``period[h]`` (where H means hourly frequency). By default, in the abstract class, just the name of the type is expected. But subclasses can overwrite this method to accept diff --git a/pandas/core/generic.py b/pandas/core/generic.py index e131d689b6a40..b7b01a2097358 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -9039,7 +9039,7 @@ def at_time(self, time, asof: bool_t = False, axis: Axis | None = None) -> Self: Examples -------- - >>> i = pd.date_range('2018-04-09', periods=4, freq='12H') + >>> i = pd.date_range('2018-04-09', periods=4, freq='12h') >>> ts = pd.DataFrame({'A': [1, 2, 3, 4]}, index=i) >>> ts A @@ -11380,7 +11380,7 @@ def tz_localize( 2015-03-29 01:59:59.999999999+01:00 0 2015-03-29 03:30:00+02:00 1 dtype: int64 - >>> s.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1H')) + >>> s.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1h')) 2015-03-29 03:30:00+02:00 0 2015-03-29 03:30:00+02:00 1 dtype: int64 diff --git a/pandas/core/indexes/accessors.py b/pandas/core/indexes/accessors.py index af8fa441f8b3f..d90de383adb48 100644 --- a/pandas/core/indexes/accessors.py +++ b/pandas/core/indexes/accessors.py @@ -544,7 +544,7 @@ class PeriodProperties(Properties): 1 2000-01-01 01:00 2 2000-01-01 02:00 3 2000-01-01 03:00 - dtype: period[H] + dtype: period[h] >>> hours_series.dt.hour 0 0 1 1 diff --git a/pandas/core/indexes/datetimes.py b/pandas/core/indexes/datetimes.py index 62cdce36ed5fb..f3b2a35f379f4 100644 --- a/pandas/core/indexes/datetimes.py +++ b/pandas/core/indexes/datetimes.py @@ -779,11 +779,11 @@ def indexer_between_time( Examples -------- - >>> idx = pd.date_range("2023-01-01", periods=4, freq="H") + >>> idx = pd.date_range("2023-01-01", periods=4, freq="h") >>> idx DatetimeIndex(['2023-01-01 00:00:00', '2023-01-01 01:00:00', '2023-01-01 02:00:00', '2023-01-01 03:00:00'], - dtype='datetime64[ns]', freq='H') + dtype='datetime64[ns]', freq='h') >>> idx.indexer_between_time("00:00", "2:00", include_end=False) array([0, 1]) """ @@ -848,7 +848,7 @@ def date_range( periods : int, optional Number of periods to generate. freq : str, Timedelta, datetime.timedelta, or DateOffset, default 'D' - Frequency strings can have multiples, e.g. '5H'. See + Frequency strings can have multiples, e.g. '5h'. See :ref:`here ` for a list of frequency aliases. tz : str or tzinfo, optional @@ -1040,7 +1040,7 @@ def bdate_range( periods : int, default None Number of periods to generate. freq : str, Timedelta, datetime.timedelta, or DateOffset, default 'B' - Frequency strings can have multiples, e.g. '5H'. The default is + Frequency strings can have multiples, e.g. '5h'. The default is business daily ('B'). tz : str or None Time zone name for returning localized DatetimeIndex, for example diff --git a/pandas/core/indexes/timedeltas.py b/pandas/core/indexes/timedeltas.py index 5ce3dd33eee48..498fe56a7ae7f 100644 --- a/pandas/core/indexes/timedeltas.py +++ b/pandas/core/indexes/timedeltas.py @@ -278,7 +278,7 @@ def timedelta_range( periods : int, default None Number of periods to generate. freq : str, Timedelta, datetime.timedelta, or DateOffset, default 'D' - Frequency strings can have multiples, e.g. '5H'. + Frequency strings can have multiples, e.g. '5h'. name : str, default None Name of the resulting TimedeltaIndex. closed : str, default None @@ -320,10 +320,10 @@ def timedelta_range( Only fixed frequencies can be passed, non-fixed frequencies such as 'M' (month end) will raise. - >>> pd.timedelta_range(start='1 day', end='2 days', freq='6H') + >>> pd.timedelta_range(start='1 day', end='2 days', freq='6h') TimedeltaIndex(['1 days 00:00:00', '1 days 06:00:00', '1 days 12:00:00', '1 days 18:00:00', '2 days 00:00:00'], - dtype='timedelta64[ns]', freq='6H') + dtype='timedelta64[ns]', freq='6h') Specify ``start``, ``end``, and ``periods``; the frequency is generated automatically (linearly spaced). diff --git a/pandas/core/resample.py b/pandas/core/resample.py index b669028ac60bd..bb5f3ce56b470 100644 --- a/pandas/core/resample.py +++ b/pandas/core/resample.py @@ -376,13 +376,13 @@ def transform(self, arg, *args, **kwargs): >>> s 2018-01-01 00:00:00 1 2018-01-01 01:00:00 2 - Freq: H, dtype: int64 + Freq: h, dtype: int64 >>> resampled = s.resample('15min') >>> resampled.transform(lambda x: (x - x.mean()) / x.std()) 2018-01-01 00:00:00 NaN 2018-01-01 01:00:00 NaN - Freq: H, dtype: float64 + Freq: h, dtype: float64 """ return self._selected_obj.groupby(self._timegrouper).transform( arg, *args, **kwargs @@ -612,7 +612,7 @@ def nearest(self, limit: int | None = None): >>> s 2018-01-01 00:00:00 1 2018-01-01 01:00:00 2 - Freq: H, dtype: int64 + Freq: h, dtype: int64 >>> s.resample('15min').nearest() 2018-01-01 00:00:00 1 @@ -681,7 +681,7 @@ def bfill(self, limit: int | None = None): 2018-01-01 00:00:00 1 2018-01-01 01:00:00 2 2018-01-01 02:00:00 3 - Freq: H, dtype: int64 + Freq: h, dtype: int64 >>> s.resample('30min').bfill() 2018-01-01 00:00:00 1 @@ -792,7 +792,7 @@ def fillna(self, method, limit: int | None = None): 2018-01-01 00:00:00 1 2018-01-01 01:00:00 2 2018-01-01 02:00:00 3 - Freq: H, dtype: int64 + Freq: h, dtype: int64 Without filling the missing values you get: @@ -848,7 +848,7 @@ def fillna(self, method, limit: int | None = None): 2018-01-01 00:00:00 1.0 2018-01-01 01:00:00 NaN 2018-01-01 02:00:00 3.0 - Freq: H, dtype: float64 + Freq: h, dtype: float64 >>> sm.resample('30min').fillna('backfill') 2018-01-01 00:00:00 1.0 @@ -2339,7 +2339,7 @@ def _get_time_delta_bins(self, ax: TimedeltaIndex): # GH#51896 raise ValueError( "Resampling on a TimedeltaIndex requires fixed-duration `freq`, " - f"e.g. '24H' or '3D', not {self.freq}" + f"e.g. '24h' or '3D', not {self.freq}" ) if not len(ax): @@ -2544,8 +2544,8 @@ def _get_timestamp_range_edges( origin = Timestamp("1970-01-01", tz=index_tz) if isinstance(freq, Day): - # _adjust_dates_anchored assumes 'D' means 24H, but first/last - # might contain a DST transition (23H, 24H, or 25H). + # _adjust_dates_anchored assumes 'D' means 24h, but first/last + # might contain a DST transition (23h, 24h, or 25h). # So "pretend" the dates are naive when adjusting the endpoints first = first.tz_localize(None) last = last.tz_localize(None) diff --git a/pandas/core/tools/timedeltas.py b/pandas/core/tools/timedeltas.py index a9abc0714baa3..8db77725a1aa3 100644 --- a/pandas/core/tools/timedeltas.py +++ b/pandas/core/tools/timedeltas.py @@ -111,7 +111,7 @@ def to_timedelta( * 'W' * 'D' / 'days' / 'day' - * 'hours' / 'hour' / 'hr' / 'h' + * 'hours' / 'hour' / 'hr' / 'h' / 'H' * 'm' / 'minute' / 'min' / 'minutes' / 'T' * 's' / 'seconds' / 'sec' / 'second' / 'S' * 'ms' / 'milliseconds' / 'millisecond' / 'milli' / 'millis' / 'L' @@ -121,9 +121,9 @@ def to_timedelta( Must not be specified when `arg` context strings and ``errors="raise"``. .. deprecated:: 2.2.0 - Units 'T', 'S', 'L', 'U' and 'N' are deprecated and will be removed - in a future version. Please use 'min', 's', 'ms', 'us', and 'ns' instead of - 'T', 'S', 'L', 'U' and 'N'. + Units 'H', 'T', 'S', 'L', 'U' and 'N' are deprecated and will be removed + in a future version. Please use 'h', 'min', 's', 'ms', 'us', and 'ns' + instead of 'H', 'T', 'S', 'L', 'U' and 'N'. errors : {'ignore', 'raise', 'coerce'}, default 'raise' - If 'raise', then invalid parsing will raise an exception. diff --git a/pandas/tests/apply/test_frame_apply.py b/pandas/tests/apply/test_frame_apply.py index 227b72573f979..be988594ebf58 100644 --- a/pandas/tests/apply/test_frame_apply.py +++ b/pandas/tests/apply/test_frame_apply.py @@ -839,7 +839,7 @@ def test_with_listlike_columns(): { "a": Series(np.random.default_rng(2).standard_normal(4)), "b": ["a", "list", "of", "words"], - "ts": date_range("2016-10-01", periods=4, freq="H"), + "ts": date_range("2016-10-01", periods=4, freq="h"), } ) diff --git a/pandas/tests/apply/test_series_apply.py b/pandas/tests/apply/test_series_apply.py index aeb6a01eb587a..643b9220999f7 100644 --- a/pandas/tests/apply/test_series_apply.py +++ b/pandas/tests/apply/test_series_apply.py @@ -188,13 +188,13 @@ def test_apply_box(): def test_apply_datetimetz(by_row): - values = pd.date_range("2011-01-01", "2011-01-02", freq="H").tz_localize( + values = pd.date_range("2011-01-01", "2011-01-02", freq="h").tz_localize( "Asia/Tokyo" ) s = Series(values, name="XX") result = s.apply(lambda x: x + pd.offsets.Day(), by_row=by_row) - exp_values = pd.date_range("2011-01-02", "2011-01-03", freq="H").tz_localize( + exp_values = pd.date_range("2011-01-02", "2011-01-03", freq="h").tz_localize( "Asia/Tokyo" ) exp = Series(exp_values, name="XX") diff --git a/pandas/tests/arithmetic/conftest.py b/pandas/tests/arithmetic/conftest.py index 7ec77e5b65b7e..f77b81574e1c1 100644 --- a/pandas/tests/arithmetic/conftest.py +++ b/pandas/tests/arithmetic/conftest.py @@ -21,13 +21,13 @@ def one(request): Examples -------- - dti = pd.date_range('2016-01-01', periods=2, freq='H') + dti = pd.date_range('2016-01-01', periods=2, freq='h') dti DatetimeIndex(['2016-01-01 00:00:00', '2016-01-01 01:00:00'], - dtype='datetime64[ns]', freq='H') + dtype='datetime64[ns]', freq='h') dti + one DatetimeIndex(['2016-01-01 01:00:00', '2016-01-01 02:00:00'], - dtype='datetime64[ns]', freq='H') + dtype='datetime64[ns]', freq='h') """ return request.param diff --git a/pandas/tests/arithmetic/test_datetime64.py b/pandas/tests/arithmetic/test_datetime64.py index 0c46d22ddcc2e..df6ccda27ab85 100644 --- a/pandas/tests/arithmetic/test_datetime64.py +++ b/pandas/tests/arithmetic/test_datetime64.py @@ -1076,7 +1076,7 @@ def test_dt64arr_add_dtlike_raises(self, tz_naive_fixture, box_with_array): # Note: freq here includes both Tick and non-Tick offsets; this is # relevant because historically integer-addition was allowed if we had # a freq. - @pytest.mark.parametrize("freq", ["H", "D", "W", "2ME", "MS", "Q", "B", None]) + @pytest.mark.parametrize("freq", ["h", "D", "W", "2ME", "MS", "Q", "B", None]) @pytest.mark.parametrize("dtype", [None, "uint8"]) def test_dt64arr_addsub_intlike( self, request, dtype, box_with_array, freq, tz_naive_fixture @@ -1144,7 +1144,7 @@ def test_dt64arr_add_sub_invalid(self, dti_freq, other, box_with_array): ) assert_invalid_addsub_type(dtarr, other, msg) - @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "H"]) + @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"]) @pytest.mark.parametrize("dti_freq", [None, "D"]) def test_dt64arr_add_sub_parr( self, dti_freq, pi_freq, box_with_array, box_with_array2 @@ -1282,10 +1282,10 @@ def test_dti_add_tick_tzaware(self, tz_aware_fixture, box_with_array): offset = dates + pd.offsets.Hour(5) assert dates[0] + pd.offsets.Hour(5) == offset[0] - dates = date_range("2010-11-01 00:00", periods=3, tz=tz, freq="H") + dates = date_range("2010-11-01 00:00", periods=3, tz=tz, freq="h") expected = DatetimeIndex( ["2010-11-01 05:00", "2010-11-01 06:00", "2010-11-01 07:00"], - freq="H", + freq="h", tz=tz, ) @@ -1953,7 +1953,7 @@ def test_operators_datetimelike_with_timezones(self): dt2 = dt1.copy() dt2.iloc[2] = np.nan - td1 = Series(pd.timedelta_range("1 days 1 min", periods=5, freq="H")) + td1 = Series(pd.timedelta_range("1 days 1 min", periods=5, freq="h")) td2 = td1.copy() td2.iloc[1] = np.nan assert td2._values.freq is None diff --git a/pandas/tests/arithmetic/test_period.py b/pandas/tests/arithmetic/test_period.py index bee1e1a385672..5af63258921ed 100644 --- a/pandas/tests/arithmetic/test_period.py +++ b/pandas/tests/arithmetic/test_period.py @@ -637,11 +637,11 @@ def test_pi_sub_pi_with_nat(self): def test_parr_sub_pi_mismatched_freq(self, box_with_array, box_with_array2): rng = period_range("1/1/2000", freq="D", periods=5) - other = period_range("1/6/2000", freq="H", periods=5) + other = period_range("1/6/2000", freq="h", periods=5) rng = tm.box_expected(rng, box_with_array) other = tm.box_expected(other, box_with_array2) - msg = r"Input has different freq=[HD] from PeriodArray\(freq=[DH]\)" + msg = r"Input has different freq=[hD] from PeriodArray\(freq=[Dh]\)" with pytest.raises(IncompatibleFrequency, match=msg): rng - other @@ -696,7 +696,7 @@ def test_sub_n_gt_1_offsets(self, offset, kwd_name, n): Timestamp("2016-01-01").to_pydatetime(), Timestamp("2016-01-01").to_datetime64(), # datetime-like arrays - pd.date_range("2016-01-01", periods=3, freq="H"), + pd.date_range("2016-01-01", periods=3, freq="h"), pd.date_range("2016-01-01", periods=3, tz="Europe/Brussels"), pd.date_range("2016-01-01", periods=3, freq="s")._data, pd.date_range("2016-01-01", periods=3, tz="Asia/Tokyo")._data, @@ -779,8 +779,8 @@ def test_pi_add_sub_td64_array_tick(self): with pytest.raises(TypeError, match=msg): tdi - rng - @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "H"]) - @pytest.mark.parametrize("tdi_freq", [None, "H"]) + @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"]) + @pytest.mark.parametrize("tdi_freq", [None, "h"]) def test_parr_sub_td64array(self, box_with_array, tdi_freq, pi_freq): box = box_with_array xbox = box if box not in [pd.array, tm.to_array] else pd.Index @@ -792,7 +792,7 @@ def test_parr_sub_td64array(self, box_with_array, tdi_freq, pi_freq): # TODO: parametrize over box for pi? td64obj = tm.box_expected(tdi, box) - if pi_freq == "H": + if pi_freq == "h": result = pi - td64obj expected = (pi.to_timestamp("s") - tdi).to_period(pi_freq) expected = tm.box_expected(expected, xbox) @@ -891,9 +891,9 @@ def test_pi_sub_offset_array(self, box): def test_pi_add_iadd_int(self, one): # Variants of `one` for #19012 - rng = period_range("2000-01-01 09:00", freq="H", periods=10) + rng = period_range("2000-01-01 09:00", freq="h", periods=10) result = rng + one - expected = period_range("2000-01-01 10:00", freq="H", periods=10) + expected = period_range("2000-01-01 10:00", freq="h", periods=10) tm.assert_index_equal(result, expected) rng += one tm.assert_index_equal(rng, expected) @@ -903,9 +903,9 @@ def test_pi_sub_isub_int(self, one): PeriodIndex.__sub__ and __isub__ with several representations of the integer 1, e.g. int, np.int64, np.uint8, ... """ - rng = period_range("2000-01-01 09:00", freq="H", periods=10) + rng = period_range("2000-01-01 09:00", freq="h", periods=10) result = rng - one - expected = period_range("2000-01-01 08:00", freq="H", periods=10) + expected = period_range("2000-01-01 08:00", freq="h", periods=10) tm.assert_index_equal(result, expected) rng -= one tm.assert_index_equal(rng, expected) @@ -1131,8 +1131,8 @@ def test_parr_add_sub_timedeltalike_freq_mismatch_daily( def test_pi_add_iadd_timedeltalike_hourly(self, two_hours): other = two_hours - rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="H") - expected = period_range("2014-01-01 12:00", "2014-01-05 12:00", freq="H") + rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h") + expected = period_range("2014-01-01 12:00", "2014-01-05 12:00", freq="h") result = rng + other tm.assert_index_equal(result, expected) @@ -1144,12 +1144,12 @@ def test_parr_add_timedeltalike_mismatched_freq_hourly( self, not_hourly, box_with_array ): other = not_hourly - rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="H") + rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h") rng = tm.box_expected(rng, box_with_array) msg = "|".join( [ # non-timedelta-like DateOffset - "Input has different freq(=.+)? from Period.*?\\(freq=H\\)", + "Input has different freq(=.+)? from Period.*?\\(freq=h\\)", # timedelta/td64/Timedelta but not a multiple of 24H "Cannot add/subtract timedelta-like from PeriodArray that is " "not an integer multiple of the PeriodArray's freq.", @@ -1164,8 +1164,8 @@ def test_parr_add_timedeltalike_mismatched_freq_hourly( def test_pi_sub_isub_timedeltalike_hourly(self, two_hours): other = two_hours - rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="H") - expected = period_range("2014-01-01 08:00", "2014-01-05 08:00", freq="H") + rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h") + expected = period_range("2014-01-01 08:00", "2014-01-05 08:00", freq="h") result = rng - other tm.assert_index_equal(result, expected) diff --git a/pandas/tests/arithmetic/test_timedelta64.py b/pandas/tests/arithmetic/test_timedelta64.py index 3d237b3ac4a31..205e6472aaecb 100644 --- a/pandas/tests/arithmetic/test_timedelta64.py +++ b/pandas/tests/arithmetic/test_timedelta64.py @@ -70,7 +70,7 @@ def test_compare_timedelta64_zerodim(self, box_with_array): box = box_with_array xbox = box_with_array if box_with_array not in [Index, pd.array] else np.ndarray - tdi = timedelta_range("2H", periods=4) + tdi = timedelta_range("2h", periods=4) other = np.array(tdi.to_numpy()[0]) tdi = tm.box_expected(tdi, box) @@ -276,32 +276,32 @@ class TestTimedelta64ArithmeticUnsorted: def test_ufunc_coercions(self): # normal ops are also tested in tseries/test_timedeltas.py - idx = TimedeltaIndex(["2H", "4H", "6H", "8H", "10H"], freq="2H", name="x") + idx = TimedeltaIndex(["2h", "4h", "6h", "8h", "10h"], freq="2h", name="x") for result in [idx * 2, np.multiply(idx, 2)]: assert isinstance(result, TimedeltaIndex) - exp = TimedeltaIndex(["4H", "8H", "12H", "16H", "20H"], freq="4H", name="x") + exp = TimedeltaIndex(["4h", "8h", "12h", "16h", "20h"], freq="4h", name="x") tm.assert_index_equal(result, exp) - assert result.freq == "4H" + assert result.freq == "4h" for result in [idx / 2, np.divide(idx, 2)]: assert isinstance(result, TimedeltaIndex) - exp = TimedeltaIndex(["1H", "2H", "3H", "4H", "5H"], freq="H", name="x") + exp = TimedeltaIndex(["1h", "2h", "3h", "4h", "5h"], freq="h", name="x") tm.assert_index_equal(result, exp) - assert result.freq == "H" + assert result.freq == "h" for result in [-idx, np.negative(idx)]: assert isinstance(result, TimedeltaIndex) exp = TimedeltaIndex( - ["-2H", "-4H", "-6H", "-8H", "-10H"], freq="-2H", name="x" + ["-2h", "-4h", "-6h", "-8h", "-10h"], freq="-2h", name="x" ) tm.assert_index_equal(result, exp) - assert result.freq == "-2H" + assert result.freq == "-2h" - idx = TimedeltaIndex(["-2H", "-1H", "0H", "1H", "2H"], freq="H", name="x") + idx = TimedeltaIndex(["-2h", "-1h", "0h", "1h", "2h"], freq="h", name="x") for result in [abs(idx), np.absolute(idx)]: assert isinstance(result, TimedeltaIndex) - exp = TimedeltaIndex(["2H", "1H", "0H", "1H", "2H"], freq=None, name="x") + exp = TimedeltaIndex(["2h", "1h", "0h", "1h", "2h"], freq=None, name="x") tm.assert_index_equal(result, exp) assert result.freq is None @@ -1073,8 +1073,8 @@ def test_td64arr_add_dt64_array(self, box_with_array): # ------------------------------------------------------------------ # Invalid __add__/__sub__ operations - @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "H"]) - @pytest.mark.parametrize("tdi_freq", [None, "H"]) + @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"]) + @pytest.mark.parametrize("tdi_freq", [None, "h"]) def test_td64arr_sub_periodlike( self, box_with_array, box_with_array2, tdi_freq, pi_freq ): @@ -1133,7 +1133,7 @@ def test_td64arr_addsub_numeric_arr_invalid( def test_td64arr_add_sub_int(self, box_with_array, one): # Variants of `one` for #19012, deprecated GH#22535 - rng = timedelta_range("1 days 09:00:00", freq="H", periods=10) + rng = timedelta_range("1 days 09:00:00", freq="h", periods=10) tdarr = tm.box_expected(rng, box_with_array) msg = "Addition/subtraction of integers" @@ -1152,7 +1152,7 @@ def test_td64arr_add_sub_integer_array(self, box_with_array): box = box_with_array xbox = np.ndarray if box is pd.array else box - rng = timedelta_range("1 days 09:00:00", freq="H", periods=3) + rng = timedelta_range("1 days 09:00:00", freq="h", periods=3) tdarr = tm.box_expected(rng, box) other = tm.box_expected([4, 3, 2], xbox) @@ -2011,7 +2011,7 @@ def test_td64arr_div_numeric_array( tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]") vector = vector.astype(any_real_numpy_dtype) - expected = Series(["2.95D", "1D 23H 12m", "NaT"], dtype="timedelta64[ns]") + expected = Series(["2.95D", "1D 23h 12m", "NaT"], dtype="timedelta64[ns]") tdser = tm.box_expected(tdser, box_with_array) xbox = get_upcast_box(tdser, vector) diff --git a/pandas/tests/arrays/categorical/test_repr.py b/pandas/tests/arrays/categorical/test_repr.py index cdf5d967d9c3d..dca171bf81047 100644 --- a/pandas/tests/arrays/categorical/test_repr.py +++ b/pandas/tests/arrays/categorical/test_repr.py @@ -148,7 +148,7 @@ def test_categorical_repr_ordered(self): assert repr(c) == exp def test_categorical_repr_datetime(self): - idx = date_range("2011-01-01 09:00", freq="H", periods=5) + idx = date_range("2011-01-01 09:00", freq="h", periods=5) c = Categorical(idx) exp = ( @@ -176,7 +176,7 @@ def test_categorical_repr_datetime(self): assert repr(c) == exp - idx = date_range("2011-01-01 09:00", freq="H", periods=5, tz="US/Eastern") + idx = date_range("2011-01-01 09:00", freq="h", periods=5, tz="US/Eastern") c = Categorical(idx) exp = ( "[2011-01-01 09:00:00-05:00, 2011-01-01 10:00:00-05:00, " @@ -210,7 +210,7 @@ def test_categorical_repr_datetime(self): assert repr(c) == exp def test_categorical_repr_datetime_ordered(self): - idx = date_range("2011-01-01 09:00", freq="H", periods=5) + idx = date_range("2011-01-01 09:00", freq="h", periods=5) c = Categorical(idx, ordered=True) exp = """[2011-01-01 09:00:00, 2011-01-01 10:00:00, 2011-01-01 11:00:00, 2011-01-01 12:00:00, 2011-01-01 13:00:00] Categories (5, datetime64[ns]): [2011-01-01 09:00:00 < 2011-01-01 10:00:00 < 2011-01-01 11:00:00 < @@ -225,7 +225,7 @@ def test_categorical_repr_datetime_ordered(self): assert repr(c) == exp - idx = date_range("2011-01-01 09:00", freq="H", periods=5, tz="US/Eastern") + idx = date_range("2011-01-01 09:00", freq="h", periods=5, tz="US/Eastern") c = Categorical(idx, ordered=True) exp = """[2011-01-01 09:00:00-05:00, 2011-01-01 10:00:00-05:00, 2011-01-01 11:00:00-05:00, 2011-01-01 12:00:00-05:00, 2011-01-01 13:00:00-05:00] Categories (5, datetime64[ns, US/Eastern]): [2011-01-01 09:00:00-05:00 < 2011-01-01 10:00:00-05:00 < @@ -254,17 +254,17 @@ def test_categorical_repr_int_with_nan(self): assert repr(s) == s_exp def test_categorical_repr_period(self): - idx = period_range("2011-01-01 09:00", freq="H", periods=5) + idx = period_range("2011-01-01 09:00", freq="h", periods=5) c = Categorical(idx) exp = """[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00] -Categories (5, period[H]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, +Categories (5, period[h]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00]""" # noqa: E501 assert repr(c) == exp c = Categorical(idx.append(idx), categories=idx) exp = """[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00, 2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00] -Categories (5, period[H]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, +Categories (5, period[h]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00]""" # noqa: E501 assert repr(c) == exp @@ -283,17 +283,17 @@ def test_categorical_repr_period(self): assert repr(c) == exp def test_categorical_repr_period_ordered(self): - idx = period_range("2011-01-01 09:00", freq="H", periods=5) + idx = period_range("2011-01-01 09:00", freq="h", periods=5) c = Categorical(idx, ordered=True) exp = """[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00] -Categories (5, period[H]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < +Categories (5, period[h]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < 2011-01-01 13:00]""" # noqa: E501 assert repr(c) == exp c = Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00, 2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00] -Categories (5, period[H]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < +Categories (5, period[h]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < 2011-01-01 13:00]""" # noqa: E501 assert repr(c) == exp @@ -396,7 +396,7 @@ def test_categorical_index_repr_ordered(self): assert repr(i) == exp def test_categorical_index_repr_datetime(self): - idx = date_range("2011-01-01 09:00", freq="H", periods=5) + idx = date_range("2011-01-01 09:00", freq="h", periods=5) i = CategoricalIndex(Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00:00', '2011-01-01 10:00:00', '2011-01-01 11:00:00', '2011-01-01 12:00:00', @@ -405,7 +405,7 @@ def test_categorical_index_repr_datetime(self): assert repr(i) == exp - idx = date_range("2011-01-01 09:00", freq="H", periods=5, tz="US/Eastern") + idx = date_range("2011-01-01 09:00", freq="h", periods=5, tz="US/Eastern") i = CategoricalIndex(Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00:00-05:00', '2011-01-01 10:00:00-05:00', '2011-01-01 11:00:00-05:00', '2011-01-01 12:00:00-05:00', @@ -415,7 +415,7 @@ def test_categorical_index_repr_datetime(self): assert repr(i) == exp def test_categorical_index_repr_datetime_ordered(self): - idx = date_range("2011-01-01 09:00", freq="H", periods=5) + idx = date_range("2011-01-01 09:00", freq="h", periods=5) i = CategoricalIndex(Categorical(idx, ordered=True)) exp = """CategoricalIndex(['2011-01-01 09:00:00', '2011-01-01 10:00:00', '2011-01-01 11:00:00', '2011-01-01 12:00:00', @@ -424,7 +424,7 @@ def test_categorical_index_repr_datetime_ordered(self): assert repr(i) == exp - idx = date_range("2011-01-01 09:00", freq="H", periods=5, tz="US/Eastern") + idx = date_range("2011-01-01 09:00", freq="h", periods=5, tz="US/Eastern") i = CategoricalIndex(Categorical(idx, ordered=True)) exp = """CategoricalIndex(['2011-01-01 09:00:00-05:00', '2011-01-01 10:00:00-05:00', '2011-01-01 11:00:00-05:00', '2011-01-01 12:00:00-05:00', @@ -445,22 +445,22 @@ def test_categorical_index_repr_datetime_ordered(self): def test_categorical_index_repr_period(self): # test all length - idx = period_range("2011-01-01 09:00", freq="H", periods=1) + idx = period_range("2011-01-01 09:00", freq="h", periods=1) i = CategoricalIndex(Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00'], categories=[2011-01-01 09:00], ordered=False, dtype='category')""" # noqa: E501 assert repr(i) == exp - idx = period_range("2011-01-01 09:00", freq="H", periods=2) + idx = period_range("2011-01-01 09:00", freq="h", periods=2) i = CategoricalIndex(Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00', '2011-01-01 10:00'], categories=[2011-01-01 09:00, 2011-01-01 10:00], ordered=False, dtype='category')""" # noqa: E501 assert repr(i) == exp - idx = period_range("2011-01-01 09:00", freq="H", periods=3) + idx = period_range("2011-01-01 09:00", freq="h", periods=3) i = CategoricalIndex(Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00', '2011-01-01 10:00', '2011-01-01 11:00'], categories=[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00], ordered=False, dtype='category')""" # noqa: E501 assert repr(i) == exp - idx = period_range("2011-01-01 09:00", freq="H", periods=5) + idx = period_range("2011-01-01 09:00", freq="h", periods=5) i = CategoricalIndex(Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00', '2011-01-01 10:00', '2011-01-01 11:00', '2011-01-01 12:00', '2011-01-01 13:00'], @@ -483,7 +483,7 @@ def test_categorical_index_repr_period(self): assert repr(i) == exp def test_categorical_index_repr_period_ordered(self): - idx = period_range("2011-01-01 09:00", freq="H", periods=5) + idx = period_range("2011-01-01 09:00", freq="h", periods=5) i = CategoricalIndex(Categorical(idx, ordered=True)) exp = """CategoricalIndex(['2011-01-01 09:00', '2011-01-01 10:00', '2011-01-01 11:00', '2011-01-01 12:00', '2011-01-01 13:00'], diff --git a/pandas/tests/arrays/datetimes/test_constructors.py b/pandas/tests/arrays/datetimes/test_constructors.py index 30f47e37fedf5..e513457819eb5 100644 --- a/pandas/tests/arrays/datetimes/test_constructors.py +++ b/pandas/tests/arrays/datetimes/test_constructors.py @@ -34,7 +34,7 @@ def test_freq_validation(self): arr = np.arange(5, dtype=np.int64) * 3600 * 10**9 msg = ( - "Inferred frequency H from passed values does not " + "Inferred frequency h from passed values does not " "conform to passed frequency W-SUN" ) with pytest.raises(ValueError, match=msg): @@ -70,7 +70,7 @@ def test_from_pandas_array(self): result = DatetimeArray._from_sequence(arr)._with_freq("infer") - expected = pd.date_range("1970-01-01", periods=5, freq="H")._data + expected = pd.date_range("1970-01-01", periods=5, freq="h")._data tm.assert_datetime_array_equal(result, expected) def test_mismatched_timezone_raises(self): diff --git a/pandas/tests/arrays/period/test_arrow_compat.py b/pandas/tests/arrays/period/test_arrow_compat.py index 6c04d7c603d4c..a1e1e8efe6dee 100644 --- a/pandas/tests/arrays/period/test_arrow_compat.py +++ b/pandas/tests/arrays/period/test_arrow_compat.py @@ -110,7 +110,7 @@ def test_arrow_load_from_zero_chunks(): def test_arrow_table_roundtrip_without_metadata(): - arr = PeriodArray([1, 2, 3], dtype="period[H]") + arr = PeriodArray([1, 2, 3], dtype="period[h]") arr[1] = pd.NaT df = pd.DataFrame({"a": arr}) diff --git a/pandas/tests/arrays/test_array.py b/pandas/tests/arrays/test_array.py index 0aeedf4d03919..92536a222296e 100644 --- a/pandas/tests/arrays/test_array.py +++ b/pandas/tests/arrays/test_array.py @@ -135,14 +135,14 @@ def test_dt64_array(dtype_unit): ), # Timedelta ( - ["1H", "2H"], + ["1h", "2h"], np.dtype("timedelta64[ns]"), - TimedeltaArray._from_sequence(["1H", "2H"]), + TimedeltaArray._from_sequence(["1h", "2h"]), ), ( - pd.TimedeltaIndex(["1H", "2H"]), + pd.TimedeltaIndex(["1h", "2h"]), np.dtype("timedelta64[ns]"), - TimedeltaArray._from_sequence(["1H", "2H"]), + TimedeltaArray._from_sequence(["1h", "2h"]), ), ( np.array([1, 2], dtype="m8[s]"), @@ -150,9 +150,9 @@ def test_dt64_array(dtype_unit): TimedeltaArray._from_sequence(np.array([1, 2], dtype="m8[s]")), ), ( - pd.TimedeltaIndex(["1H", "2H"]), + pd.TimedeltaIndex(["1h", "2h"]), None, - TimedeltaArray._from_sequence(["1H", "2H"]), + TimedeltaArray._from_sequence(["1h", "2h"]), ), ( # preserve non-nano, i.e. don't cast to NumpyExtensionArray @@ -298,8 +298,8 @@ def test_array_copy(): ), # timedelta ( - [pd.Timedelta("1H"), pd.Timedelta("2H")], - TimedeltaArray._from_sequence(["1H", "2H"]), + [pd.Timedelta("1h"), pd.Timedelta("2h")], + TimedeltaArray._from_sequence(["1h", "2h"]), ), ( np.array([1, 2], dtype="m8[ns]"), diff --git a/pandas/tests/arrays/test_datetimelike.py b/pandas/tests/arrays/test_datetimelike.py index 291c687d84125..3f91b9b03e1de 100644 --- a/pandas/tests/arrays/test_datetimelike.py +++ b/pandas/tests/arrays/test_datetimelike.py @@ -858,7 +858,7 @@ def test_concat_same_type_invalid(self, arr1d): def test_concat_same_type_different_freq(self): # we *can* concatenate DTI with different freqs. a = DatetimeArray(pd.date_range("2000", periods=2, freq="D", tz="US/Central")) - b = DatetimeArray(pd.date_range("2000", periods=2, freq="H", tz="US/Central")) + b = DatetimeArray(pd.date_range("2000", periods=2, freq="h", tz="US/Central")) result = DatetimeArray._concat_same_type([a, b]) expected = DatetimeArray( pd.to_datetime( diff --git a/pandas/tests/arrays/test_period.py b/pandas/tests/arrays/test_period.py index 43a80a92573c5..48453ba19e9a1 100644 --- a/pandas/tests/arrays/test_period.py +++ b/pandas/tests/arrays/test_period.py @@ -133,8 +133,8 @@ def test_sub_period_overflow(): @pytest.mark.parametrize( "other", [ - pd.Period("2000", freq="H"), - PeriodArray._from_sequence(["2000", "2001", "2000"], dtype="period[H]"), + pd.Period("2000", freq="h"), + PeriodArray._from_sequence(["2000", "2001", "2000"], dtype="period[h]"), ], ) def test_where_different_freq_raises(other): diff --git a/pandas/tests/arrays/test_timedeltas.py b/pandas/tests/arrays/test_timedeltas.py index 1043c2ee6c9b6..21bc85a4d070e 100644 --- a/pandas/tests/arrays/test_timedeltas.py +++ b/pandas/tests/arrays/test_timedeltas.py @@ -196,7 +196,7 @@ def test_add_timedeltaarraylike(self, tda): class TestTimedeltaArray: @pytest.mark.parametrize("dtype", [int, np.int32, np.int64, "uint32", "uint64"]) def test_astype_int(self, dtype): - arr = TimedeltaArray._from_sequence([Timedelta("1H"), Timedelta("2H")]) + arr = TimedeltaArray._from_sequence([Timedelta("1h"), Timedelta("2h")]) if np.dtype(dtype) != np.int64: with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"): @@ -208,8 +208,8 @@ def test_astype_int(self, dtype): tm.assert_numpy_array_equal(result, expected) def test_setitem_clears_freq(self): - a = TimedeltaArray(pd.timedelta_range("1H", periods=2, freq="H")) - a[0] = Timedelta("1H") + a = TimedeltaArray(pd.timedelta_range("1h", periods=2, freq="h")) + a[0] = Timedelta("1h") assert a.freq is None @pytest.mark.parametrize( @@ -222,7 +222,7 @@ def test_setitem_clears_freq(self): ) def test_setitem_objects(self, obj): # make sure we accept timedelta64 and timedelta in addition to Timedelta - tdi = pd.timedelta_range("2 Days", periods=4, freq="H") + tdi = pd.timedelta_range("2 Days", periods=4, freq="h") arr = TimedeltaArray(tdi, freq=tdi.freq) arr[0] = obj @@ -299,7 +299,7 @@ def test_neg(self): tm.assert_timedelta_array_equal(result2, expected) def test_neg_freq(self): - tdi = pd.timedelta_range("2 Days", periods=4, freq="H") + tdi = pd.timedelta_range("2 Days", periods=4, freq="h") arr = TimedeltaArray(tdi, freq=tdi.freq) expected = TimedeltaArray(-tdi._data, freq=-tdi.freq) diff --git a/pandas/tests/arrays/timedeltas/test_reductions.py b/pandas/tests/arrays/timedeltas/test_reductions.py index 72d45f5b9a78c..3718e7e646ea9 100644 --- a/pandas/tests/arrays/timedeltas/test_reductions.py +++ b/pandas/tests/arrays/timedeltas/test_reductions.py @@ -35,14 +35,14 @@ def test_sum_empty(self, skipna): assert result == Timedelta(0) def test_min_max(self): - arr = TimedeltaArray._from_sequence(["3H", "3H", "NaT", "2H", "5H", "4H"]) + arr = TimedeltaArray._from_sequence(["3h", "3h", "NaT", "2h", "5h", "4h"]) result = arr.min() - expected = Timedelta("2H") + expected = Timedelta("2h") assert result == expected result = arr.max() - expected = Timedelta("5H") + expected = Timedelta("5h") assert result == expected result = arr.min(skipna=False) @@ -52,7 +52,7 @@ def test_min_max(self): assert result is pd.NaT def test_sum(self): - tdi = pd.TimedeltaIndex(["3H", "3H", "NaT", "2H", "5H", "4H"]) + tdi = pd.TimedeltaIndex(["3h", "3h", "NaT", "2h", "5h", "4h"]) arr = tdi.array result = arr.sum(skipna=True) @@ -86,7 +86,7 @@ def test_sum(self): def test_npsum(self): # GH#25282, GH#25335 np.sum should return a Timedelta, not timedelta64 - tdi = pd.TimedeltaIndex(["3H", "3H", "2H", "5H", "4H"]) + tdi = pd.TimedeltaIndex(["3h", "3h", "2h", "5h", "4h"]) arr = tdi.array result = np.sum(tdi) @@ -133,7 +133,7 @@ def test_sum_2d_skipna_false(self): ], ) def test_std(self, add): - tdi = pd.TimedeltaIndex(["0H", "4H", "NaT", "4H", "0H", "2H"]) + add + tdi = pd.TimedeltaIndex(["0h", "4h", "NaT", "4h", "0h", "2h"]) + add arr = tdi.array result = arr.std(skipna=True) @@ -162,7 +162,7 @@ def test_std(self, add): assert np.isnat(result) def test_median(self): - tdi = pd.TimedeltaIndex(["0H", "3H", "NaT", "5H06m", "0H", "2H"]) + tdi = pd.TimedeltaIndex(["0h", "3h", "NaT", "5h06m", "0h", "2h"]) arr = tdi.array result = arr.median(skipna=True) @@ -181,7 +181,7 @@ def test_median(self): assert result is pd.NaT def test_mean(self): - tdi = pd.TimedeltaIndex(["0H", "3H", "NaT", "5H06m", "0H", "2H"]) + tdi = pd.TimedeltaIndex(["0h", "3h", "NaT", "5h06m", "0h", "2h"]) arr = tdi._data # manually verified result diff --git a/pandas/tests/base/test_conversion.py b/pandas/tests/base/test_conversion.py index 20ee2d443f340..3e0b0dbeb5624 100644 --- a/pandas/tests/base/test_conversion.py +++ b/pandas/tests/base/test_conversion.py @@ -314,7 +314,7 @@ def test_array_multiindex_raises(): ), # Timedelta ( - TimedeltaArray(np.array([0, 3600000000000], dtype="i8"), freq="H"), + TimedeltaArray(np.array([0, 3600000000000], dtype="i8"), freq="h"), np.array([0, 3600000000000], dtype="m8[ns]"), ), # GH#26406 tz is preserved in Categorical[dt64tz] diff --git a/pandas/tests/copy_view/test_methods.py b/pandas/tests/copy_view/test_methods.py index 76b974330cbf1..45bfc6a6fcf9b 100644 --- a/pandas/tests/copy_view/test_methods.py +++ b/pandas/tests/copy_view/test_methods.py @@ -1311,7 +1311,7 @@ def test_rename_axis(using_copy_on_write, kwargs): def test_tz_convert_localize(using_copy_on_write, func, tz): # GH 49473 ser = Series( - [1, 2], index=date_range(start="2014-08-01 09:00", freq="H", periods=2, tz=tz) + [1, 2], index=date_range(start="2014-08-01 09:00", freq="h", periods=2, tz=tz) ) ser_orig = ser.copy() ser2 = getattr(ser, func)("US/Central") diff --git a/pandas/tests/dtypes/cast/test_find_common_type.py b/pandas/tests/dtypes/cast/test_find_common_type.py index 8ce05337be70b..83ef7382fbe8a 100644 --- a/pandas/tests/dtypes/cast/test_find_common_type.py +++ b/pandas/tests/dtypes/cast/test_find_common_type.py @@ -122,7 +122,7 @@ def test_period_dtype_match(): [ DatetimeTZDtype(unit="ns", tz="Asia/Tokyo"), PeriodDtype(freq="2D"), - PeriodDtype(freq="H"), + PeriodDtype(freq="h"), np.dtype("datetime64[ns]"), object, np.int64, diff --git a/pandas/tests/dtypes/test_dtypes.py b/pandas/tests/dtypes/test_dtypes.py index 6562074eee634..1f9c371c50ad4 100644 --- a/pandas/tests/dtypes/test_dtypes.py +++ b/pandas/tests/dtypes/test_dtypes.py @@ -432,12 +432,12 @@ def test_construction(self): assert dt.freq == pd.tseries.offsets.Day(3) for s in [ - "period[26H]", - "Period[26H]", - "26H", - "period[1D2H]", - "Period[1D2H]", - "1D2H", + "period[26h]", + "Period[26h]", + "26h", + "period[1D2h]", + "Period[1D2h]", + "1D2h", ]: dt = PeriodDtype(s) assert dt.freq == pd.tseries.offsets.Hour(26) @@ -533,7 +533,7 @@ def test_basic(self, dtype): with tm.assert_produces_warning(FutureWarning, match=msg): assert is_period_dtype(dtype) - pidx = pd.period_range("2013-01-01 09:00", periods=5, freq="H") + pidx = pd.period_range("2013-01-01 09:00", periods=5, freq="h") assert is_period_dtype(pidx.dtype) assert is_period_dtype(pidx) diff --git a/pandas/tests/extension/test_arrow.py b/pandas/tests/extension/test_arrow.py index 339e97e735f85..41312f45838a9 100644 --- a/pandas/tests/extension/test_arrow.py +++ b/pandas/tests/extension/test_arrow.py @@ -2484,10 +2484,10 @@ def test_dt_roundlike_tz_options_not_supported(method): dtype=ArrowDtype(pa.timestamp("ns")), ) with pytest.raises(NotImplementedError, match="ambiguous is not supported."): - getattr(ser.dt, method)("1H", ambiguous="NaT") + getattr(ser.dt, method)("1h", ambiguous="NaT") with pytest.raises(NotImplementedError, match="nonexistent is not supported."): - getattr(ser.dt, method)("1H", nonexistent="NaT") + getattr(ser.dt, method)("1h", nonexistent="NaT") @pytest.mark.parametrize("method", ["ceil", "floor", "round"]) @@ -2506,7 +2506,7 @@ def test_dt_roundlike_unsupported_freq(method): @pytest.mark.xfail( pa_version_under7p0, reason="Methods not supported for pyarrow < 7.0" ) -@pytest.mark.parametrize("freq", ["D", "H", "min", "s", "ms", "us", "ns"]) +@pytest.mark.parametrize("freq", ["D", "h", "min", "s", "ms", "us", "ns"]) @pytest.mark.parametrize("method", ["ceil", "floor", "round"]) def test_dt_ceil_year_floor(freq, method): ser = pd.Series( diff --git a/pandas/tests/frame/methods/test_align.py b/pandas/tests/frame/methods/test_align.py index 87a56c0736287..25eb2ccb18361 100644 --- a/pandas/tests/frame/methods/test_align.py +++ b/pandas/tests/frame/methods/test_align.py @@ -23,8 +23,8 @@ def test_align_asfreq_method_raises(self): df.align(df.iloc[::-1], method="asfreq") def test_frame_align_aware(self): - idx1 = date_range("2001", periods=5, freq="H", tz="US/Eastern") - idx2 = date_range("2001", periods=5, freq="2H", tz="US/Eastern") + idx1 = date_range("2001", periods=5, freq="h", tz="US/Eastern") + idx2 = date_range("2001", periods=5, freq="2h", tz="US/Eastern") df1 = DataFrame(np.random.default_rng(2).standard_normal((len(idx1), 3)), idx1) df2 = DataFrame(np.random.default_rng(2).standard_normal((len(idx2), 3)), idx2) new1, new2 = df1.align(df2) diff --git a/pandas/tests/frame/methods/test_asfreq.py b/pandas/tests/frame/methods/test_asfreq.py index 0527bfb16492c..b3ab11d07bd7e 100644 --- a/pandas/tests/frame/methods/test_asfreq.py +++ b/pandas/tests/frame/methods/test_asfreq.py @@ -63,8 +63,8 @@ def test_asfreq2(self, frame_or_series): def test_asfreq_datetimeindex_empty(self, frame_or_series): # GH#14320 index = DatetimeIndex(["2016-09-29 11:00"]) - expected = frame_or_series(index=index, dtype=object).asfreq("H") - result = frame_or_series([3], index=index.copy()).asfreq("H") + expected = frame_or_series(index=index, dtype=object).asfreq("h") + result = frame_or_series([3], index=index.copy()).asfreq("h") tm.assert_index_equal(expected.index, result.index) @pytest.mark.parametrize("tz", ["US/Eastern", "dateutil/US/Eastern"]) @@ -194,8 +194,8 @@ def test_asfreq_with_date_object_index(self, frame_or_series): ts2 = ts.copy() ts2.index = [x.date() for x in ts2.index] - result = ts2.asfreq("4H", method="ffill") - expected = ts.asfreq("4H", method="ffill") + result = ts2.asfreq("4h", method="ffill") + expected = ts.asfreq("4h", method="ffill") tm.assert_equal(result, expected) def test_asfreq_with_unsorted_index(self, frame_or_series): diff --git a/pandas/tests/frame/methods/test_asof.py b/pandas/tests/frame/methods/test_asof.py index 5683ec60b0d88..4a8adf89b3aef 100644 --- a/pandas/tests/frame/methods/test_asof.py +++ b/pandas/tests/frame/methods/test_asof.py @@ -178,7 +178,7 @@ def test_is_copy(self, date_range_frame): def test_asof_periodindex_mismatched_freq(self): N = 50 - rng = period_range("1/1/1990", periods=N, freq="H") + rng = period_range("1/1/1990", periods=N, freq="h") df = DataFrame(np.random.default_rng(2).standard_normal(N), index=rng) # Mismatched freq diff --git a/pandas/tests/frame/methods/test_at_time.py b/pandas/tests/frame/methods/test_at_time.py index 67200396f6375..4c1434bd66aff 100644 --- a/pandas/tests/frame/methods/test_at_time.py +++ b/pandas/tests/frame/methods/test_at_time.py @@ -18,7 +18,7 @@ class TestAtTime: def test_localized_at_time(self, tzstr, frame_or_series): tz = timezones.maybe_get_tz(tzstr) - rng = date_range("4/16/2012", "5/1/2012", freq="H") + rng = date_range("4/16/2012", "5/1/2012", freq="h") ts = frame_or_series( np.random.default_rng(2).standard_normal(len(rng)), index=rng ) @@ -69,7 +69,7 @@ def test_at_time_nonexistent(self, frame_or_series): ) def test_at_time_errors(self, hour): # GH#24043 - dti = date_range("2018", periods=3, freq="H") + dti = date_range("2018", periods=3, freq="h") df = DataFrame(list(range(len(dti))), index=dti) if getattr(hour, "tzinfo", None) is None: result = df.at_time(hour) @@ -81,7 +81,7 @@ def test_at_time_errors(self, hour): def test_at_time_tz(self): # GH#24043 - dti = date_range("2018", periods=3, freq="H", tz="US/Pacific") + dti = date_range("2018", periods=3, freq="h", tz="US/Pacific") df = DataFrame(list(range(len(dti))), index=dti) result = df.at_time(time(4, tzinfo=pytz.timezone("US/Eastern"))) expected = df.iloc[1:2] diff --git a/pandas/tests/frame/methods/test_between_time.py b/pandas/tests/frame/methods/test_between_time.py index 4c1e009b04639..74d6291707e19 100644 --- a/pandas/tests/frame/methods/test_between_time.py +++ b/pandas/tests/frame/methods/test_between_time.py @@ -46,7 +46,7 @@ def test_between_time_formats(self, frame_or_series): def test_localized_between_time(self, tzstr, frame_or_series): tz = timezones.maybe_get_tz(tzstr) - rng = date_range("4/16/2012", "5/1/2012", freq="H") + rng = date_range("4/16/2012", "5/1/2012", freq="h") ts = Series(np.random.default_rng(2).standard_normal(len(rng)), index=rng) if frame_or_series is DataFrame: ts = ts.to_frame() diff --git a/pandas/tests/frame/methods/test_describe.py b/pandas/tests/frame/methods/test_describe.py index f56a7896c753e..5beb09940acf3 100644 --- a/pandas/tests/frame/methods/test_describe.py +++ b/pandas/tests/frame/methods/test_describe.py @@ -204,7 +204,7 @@ def test_describe_datetime_columns(self): def test_describe_timedelta_values(self): # GH#6145 t1 = pd.timedelta_range("1 days", freq="D", periods=5) - t2 = pd.timedelta_range("1 hours", freq="H", periods=5) + t2 = pd.timedelta_range("1 hours", freq="h", periods=5) df = DataFrame({"t1": t1, "t2": t2}) expected = DataFrame( diff --git a/pandas/tests/frame/methods/test_drop.py b/pandas/tests/frame/methods/test_drop.py index 9a4882f11e961..f72c0594fa1f7 100644 --- a/pandas/tests/frame/methods/test_drop.py +++ b/pandas/tests/frame/methods/test_drop.py @@ -57,7 +57,7 @@ def test_drop_with_non_unique_datetime_index_and_invalid_keys(): df = DataFrame( np.random.default_rng(2).standard_normal((5, 3)), columns=["a", "b", "c"], - index=pd.date_range("2012", freq="H", periods=5), + index=pd.date_range("2012", freq="h", periods=5), ) # create dataframe with non-unique datetime index df = df.iloc[[0, 2, 2, 3]].copy() diff --git a/pandas/tests/frame/methods/test_reindex.py b/pandas/tests/frame/methods/test_reindex.py index bba86b481eadc..fb6e08cd52d97 100644 --- a/pandas/tests/frame/methods/test_reindex.py +++ b/pandas/tests/frame/methods/test_reindex.py @@ -46,7 +46,7 @@ def test_dti_set_index_reindex_datetimeindex(self): def test_dti_set_index_reindex_freq_with_tz(self): # GH#11314 with tz index = date_range( - datetime(2015, 10, 1), datetime(2015, 10, 1, 23), freq="H", tz="US/Eastern" + datetime(2015, 10, 1), datetime(2015, 10, 1, 23), freq="h", tz="US/Eastern" ) df = DataFrame( np.random.default_rng(2).standard_normal((24, 1)), @@ -54,7 +54,7 @@ def test_dti_set_index_reindex_freq_with_tz(self): index=index, ) new_index = date_range( - datetime(2015, 10, 2), datetime(2015, 10, 2, 23), freq="H", tz="US/Eastern" + datetime(2015, 10, 2), datetime(2015, 10, 2, 23), freq="h", tz="US/Eastern" ) result = df.set_index(new_index) @@ -389,9 +389,9 @@ def test_reindex_frame_tz_ffill_bfill(self, frame_or_series, method, exp_values) # GH#38566 obj = frame_or_series( [0, 1, 2, 3], - index=date_range("2020-01-01 00:00:00", periods=4, freq="H", tz="UTC"), + index=date_range("2020-01-01 00:00:00", periods=4, freq="h", tz="UTC"), ) - new_index = date_range("2020-01-01 00:01:00", periods=4, freq="H", tz="UTC") + new_index = date_range("2020-01-01 00:01:00", periods=4, freq="h", tz="UTC") result = obj.reindex(new_index, method=method, tolerance=pd.Timedelta("1 hour")) expected = frame_or_series(exp_values, index=new_index) tm.assert_equal(result, expected) @@ -1067,7 +1067,7 @@ def test_reindex_multi_categorical_time(self): midx = MultiIndex.from_product( [ Categorical(["a", "b", "c"]), - Categorical(date_range("2012-01-01", periods=3, freq="H")), + Categorical(date_range("2012-01-01", periods=3, freq="h")), ] ) df = DataFrame({"a": range(len(midx))}, index=midx) diff --git a/pandas/tests/frame/methods/test_set_index.py b/pandas/tests/frame/methods/test_set_index.py index f755ef0c2763d..9b87ffb0241ef 100644 --- a/pandas/tests/frame/methods/test_set_index.py +++ b/pandas/tests/frame/methods/test_set_index.py @@ -99,7 +99,7 @@ def test_set_index_cast_datetimeindex(self): assert isinstance(idf.index, DatetimeIndex) def test_set_index_dst(self): - di = date_range("2006-10-29 00:00:00", periods=3, freq="H", tz="US/Pacific") + di = date_range("2006-10-29 00:00:00", periods=3, freq="h", tz="US/Pacific") df = DataFrame(data={"a": [0, 1, 2], "b": [3, 4, 5]}, index=di).reset_index() # single level @@ -491,7 +491,7 @@ def test_set_index_period(self): df = DataFrame(np.random.default_rng(2).random(6)) idx1 = period_range("2011-01-01", periods=3, freq="M") idx1 = idx1.append(idx1) - idx2 = period_range("2013-01-01 09:00", periods=2, freq="H") + idx2 = period_range("2013-01-01 09:00", periods=2, freq="h") idx2 = idx2.append(idx2).append(idx2) idx3 = period_range("2005", periods=6, freq="Y") @@ -500,7 +500,7 @@ def test_set_index_period(self): df = df.set_index(idx3, append=True) expected1 = period_range("2011-01-01", periods=3, freq="M") - expected2 = period_range("2013-01-01 09:00", periods=2, freq="H") + expected2 = period_range("2013-01-01 09:00", periods=2, freq="h") tm.assert_index_equal(df.index.levels[0], expected1) tm.assert_index_equal(df.index.levels[1], expected2) diff --git a/pandas/tests/frame/methods/test_shift.py b/pandas/tests/frame/methods/test_shift.py index 6bd441400dc54..201046ebafc35 100644 --- a/pandas/tests/frame/methods/test_shift.py +++ b/pandas/tests/frame/methods/test_shift.py @@ -37,7 +37,7 @@ def test_shift_deprecate_freq_and_fill_value(self, frame_or_series): # Can't pass both! obj = frame_or_series( np.random.default_rng(2).standard_normal(5), - index=date_range("1/1/2000", periods=5, freq="H"), + index=date_range("1/1/2000", periods=5, freq="h"), ) msg = ( @@ -45,12 +45,12 @@ def test_shift_deprecate_freq_and_fill_value(self, frame_or_series): "fill_value" ) with tm.assert_produces_warning(FutureWarning, match=msg): - obj.shift(1, fill_value=1, freq="H") + obj.shift(1, fill_value=1, freq="h") if frame_or_series is DataFrame: - obj.columns = date_range("1/1/2000", periods=1, freq="H") + obj.columns = date_range("1/1/2000", periods=1, freq="h") with tm.assert_produces_warning(FutureWarning, match=msg): - obj.shift(1, axis=1, fill_value=1, freq="H") + obj.shift(1, axis=1, fill_value=1, freq="h") @pytest.mark.parametrize( "input_data, output_data", @@ -77,7 +77,7 @@ def test_shift_non_writable_array(self, input_data, output_data, frame_or_series def test_shift_mismatched_freq(self, frame_or_series): ts = frame_or_series( np.random.default_rng(2).standard_normal(5), - index=date_range("1/1/2000", periods=5, freq="H"), + index=date_range("1/1/2000", periods=5, freq="h"), ) result = ts.shift(1, freq="5min") @@ -85,7 +85,7 @@ def test_shift_mismatched_freq(self, frame_or_series): tm.assert_index_equal(result.index, exp_index) # GH#1063, multiple of same base - result = ts.shift(1, freq="4H") + result = ts.shift(1, freq="4h") exp_index = ts.index + offsets.Hour(4) tm.assert_index_equal(result.index, exp_index) @@ -93,7 +93,7 @@ def test_shift_mismatched_freq(self, frame_or_series): "obj", [ Series([np.arange(5)]), - date_range("1/1/2011", periods=24, freq="H"), + date_range("1/1/2011", periods=24, freq="h"), Series(range(5), index=date_range("2017", periods=5)), ], ) @@ -145,20 +145,20 @@ def test_shift_preserve_freqstr(self, periods, frame_or_series): # GH#21275 obj = frame_or_series( range(periods), - index=date_range("2016-1-1 00:00:00", periods=periods, freq="H"), + index=date_range("2016-1-1 00:00:00", periods=periods, freq="h"), ) - result = obj.shift(1, "2H") + result = obj.shift(1, "2h") expected = frame_or_series( range(periods), - index=date_range("2016-1-1 02:00:00", periods=periods, freq="H"), + index=date_range("2016-1-1 02:00:00", periods=periods, freq="h"), ) tm.assert_equal(result, expected) def test_shift_dst(self, frame_or_series): # GH#13926 - dates = date_range("2016-11-06", freq="H", periods=10, tz="US/Eastern") + dates = date_range("2016-11-06", freq="h", periods=10, tz="US/Eastern") obj = frame_or_series(dates) res = obj.shift(0) @@ -180,7 +180,7 @@ def test_shift_dst(self, frame_or_series): @pytest.mark.parametrize("ex", [10, -10, 20, -20]) def test_shift_dst_beyond(self, frame_or_series, ex): # GH#13926 - dates = date_range("2016-11-06", freq="H", periods=10, tz="US/Eastern") + dates = date_range("2016-11-06", freq="h", periods=10, tz="US/Eastern") obj = frame_or_series(dates) res = obj.shift(ex) exp = frame_or_series([NaT] * 10, dtype="datetime64[ns, US/Eastern]") @@ -367,7 +367,7 @@ def test_shift_categorical_fill_value(self, frame_or_series): def test_shift_fill_value(self, frame_or_series): # GH#24128 - dti = date_range("1/1/2000", periods=5, freq="H") + dti = date_range("1/1/2000", periods=5, freq="h") ts = frame_or_series([1.0, 2.0, 3.0, 4.0, 5.0], index=dti) exp = frame_or_series([0.0, 1.0, 2.0, 3.0, 4.0], index=dti) @@ -707,7 +707,7 @@ def test_shift_with_iterable_freq_and_fill_value(self): # GH#44424 df = DataFrame( np.random.default_rng(2).standard_normal(5), - index=date_range("1/1/2000", periods=5, freq="H"), + index=date_range("1/1/2000", periods=5, freq="h"), ) tm.assert_frame_equal( @@ -717,8 +717,8 @@ def test_shift_with_iterable_freq_and_fill_value(self): ) tm.assert_frame_equal( - df.shift([1], freq="H").rename(columns=lambda x: int(x[0])), - df.shift(1, freq="H"), + df.shift([1], freq="h").rename(columns=lambda x: int(x[0])), + df.shift(1, freq="h"), ) msg = ( @@ -726,7 +726,7 @@ def test_shift_with_iterable_freq_and_fill_value(self): "fill_value" ) with tm.assert_produces_warning(FutureWarning, match=msg): - df.shift([1, 2], fill_value=1, freq="H") + df.shift([1, 2], fill_value=1, freq="h") def test_shift_with_iterable_check_other_arguments(self): # GH#44424 diff --git a/pandas/tests/frame/methods/test_to_csv.py b/pandas/tests/frame/methods/test_to_csv.py index 9f45347c31165..94c98ad477cc1 100644 --- a/pandas/tests/frame/methods/test_to_csv.py +++ b/pandas/tests/frame/methods/test_to_csv.py @@ -420,7 +420,7 @@ def test_to_csv_from_csv_w_some_infs(self, float_frame): # test roundtrip with inf, -inf, nan, as full columns and mix float_frame["G"] = np.nan f = lambda x: [np.inf, np.nan][np.random.default_rng(2).random() < 0.5] - float_frame["H"] = float_frame.index.map(f) + float_frame["h"] = float_frame.index.map(f) with tm.ensure_clean() as path: float_frame.to_csv(path) @@ -1077,7 +1077,7 @@ def test_to_csv_with_dst_transitions(self, td): "2013-10-26 23:00", "2013-10-27 01:00", tz="Europe/London", - freq="H", + freq="h", ambiguous="infer", ) i = times + td @@ -1095,7 +1095,7 @@ def test_to_csv_with_dst_transitions(self, td): def test_to_csv_with_dst_transitions_with_pickle(self): # GH11619 - idx = date_range("2015-01-01", "2015-12-31", freq="H", tz="Europe/Paris") + idx = date_range("2015-01-01", "2015-12-31", freq="h", tz="Europe/Paris") idx = idx._with_freq(None) # freq does not round-trip idx._data._freq = None # otherwise there is trouble on unpickle df = DataFrame({"values": 1, "idx": idx}, index=idx) diff --git a/pandas/tests/frame/methods/test_to_timestamp.py b/pandas/tests/frame/methods/test_to_timestamp.py index 478708ce90488..aeb65d98d8ab2 100644 --- a/pandas/tests/frame/methods/test_to_timestamp.py +++ b/pandas/tests/frame/methods/test_to_timestamp.py @@ -132,12 +132,12 @@ def test_to_timestamp_invalid_axis(self): obj.to_timestamp(axis=2) def test_to_timestamp_hourly(self, frame_or_series): - index = period_range(freq="H", start="1/1/2001", end="1/2/2001") + index = period_range(freq="h", start="1/1/2001", end="1/2/2001") obj = Series(1, index=index, name="foo") if frame_or_series is not Series: obj = obj.to_frame() - exp_index = date_range("1/1/2001 00:59:59", end="1/2/2001 00:59:59", freq="H") + exp_index = date_range("1/1/2001 00:59:59", end="1/2/2001 00:59:59", freq="h") result = obj.to_timestamp(how="end") exp_index = exp_index + Timedelta(1, "s") - Timedelta(1, "ns") tm.assert_index_equal(result.index, exp_index) diff --git a/pandas/tests/frame/methods/test_transpose.py b/pandas/tests/frame/methods/test_transpose.py index 8ff6ea37eae18..50fc6fe6984e7 100644 --- a/pandas/tests/frame/methods/test_transpose.py +++ b/pandas/tests/frame/methods/test_transpose.py @@ -70,7 +70,7 @@ def test_transpose_tzaware_2col_mixed_tz(self): @pytest.mark.parametrize("tz", [None, "America/New_York"]) def test_transpose_preserves_dtindex_equality_with_dst(self, tz): # GH#19970 - idx = date_range("20161101", "20161130", freq="4H", tz=tz) + idx = date_range("20161101", "20161130", freq="4h", tz=tz) df = DataFrame({"a": range(len(idx)), "b": range(len(idx))}, index=idx) result = df.T == df.T expected = DataFrame(True, index=list("ab"), columns=idx) diff --git a/pandas/tests/frame/methods/test_tz_convert.py b/pandas/tests/frame/methods/test_tz_convert.py index 8a484abaab54c..bcb8e423980fd 100644 --- a/pandas/tests/frame/methods/test_tz_convert.py +++ b/pandas/tests/frame/methods/test_tz_convert.py @@ -120,7 +120,7 @@ def test_tz_convert_copy_inplace_mutate(self, copy, frame_or_series): # GH#6326 obj = frame_or_series( np.arange(0, 5), - index=date_range("20131027", periods=5, freq="1H", tz="Europe/Berlin"), + index=date_range("20131027", periods=5, freq="h", tz="Europe/Berlin"), ) orig = obj.copy() result = obj.tz_convert("UTC", copy=copy) diff --git a/pandas/tests/frame/methods/test_tz_localize.py b/pandas/tests/frame/methods/test_tz_localize.py index ed2b0b247e62c..b167afc17f484 100644 --- a/pandas/tests/frame/methods/test_tz_localize.py +++ b/pandas/tests/frame/methods/test_tz_localize.py @@ -16,7 +16,7 @@ class TestTZLocalize: # test_tz_convert_and_localize in test_tz_convert def test_tz_localize(self, frame_or_series): - rng = date_range("1/1/2011", periods=100, freq="H") + rng = date_range("1/1/2011", periods=100, freq="h") obj = DataFrame({"a": 1}, index=rng) obj = tm.get_obj(obj, frame_or_series) @@ -29,7 +29,7 @@ def test_tz_localize(self, frame_or_series): tm.assert_equal(result, expected) def test_tz_localize_axis1(self): - rng = date_range("1/1/2011", periods=100, freq="H") + rng = date_range("1/1/2011", periods=100, freq="h") df = DataFrame({"a": 1}, index=rng) @@ -43,7 +43,7 @@ def test_tz_localize_axis1(self): def test_tz_localize_naive(self, frame_or_series): # Can't localize if already tz-aware - rng = date_range("1/1/2011", periods=100, freq="H", tz="utc") + rng = date_range("1/1/2011", periods=100, freq="h", tz="utc") ts = Series(1, index=rng) ts = frame_or_series(ts) @@ -54,13 +54,13 @@ def test_tz_localize_naive(self, frame_or_series): def test_tz_localize_copy_inplace_mutate(self, copy, frame_or_series): # GH#6326 obj = frame_or_series( - np.arange(0, 5), index=date_range("20131027", periods=5, freq="1H", tz=None) + np.arange(0, 5), index=date_range("20131027", periods=5, freq="1h", tz=None) ) orig = obj.copy() result = obj.tz_localize("UTC", copy=copy) expected = frame_or_series( np.arange(0, 5), - index=date_range("20131027", periods=5, freq="1H", tz="UTC"), + index=date_range("20131027", periods=5, freq="1h", tz="UTC"), ) tm.assert_equal(result, expected) tm.assert_equal(obj, orig) diff --git a/pandas/tests/frame/test_arithmetic.py b/pandas/tests/frame/test_arithmetic.py index 2c3b732fe7196..09a5cda4b3458 100644 --- a/pandas/tests/frame/test_arithmetic.py +++ b/pandas/tests/frame/test_arithmetic.py @@ -1223,7 +1223,7 @@ def test_frame_single_columns_object_sum_axis_1(): class TestFrameArithmeticUnsorted: def test_frame_add_tz_mismatch_converts_to_utc(self): - rng = pd.date_range("1/1/2011", periods=10, freq="H", tz="US/Eastern") + rng = pd.date_range("1/1/2011", periods=10, freq="h", tz="US/Eastern") df = DataFrame( np.random.default_rng(2).standard_normal(len(rng)), index=rng, columns=["a"] ) diff --git a/pandas/tests/frame/test_constructors.py b/pandas/tests/frame/test_constructors.py index 543a5be9544cc..3d8053703e906 100644 --- a/pandas/tests/frame/test_constructors.py +++ b/pandas/tests/frame/test_constructors.py @@ -2471,8 +2471,8 @@ def test_dataframe_constructor_infer_multiindex(self): [ ([1, 2]), (["1", "2"]), - (list(date_range("1/1/2011", periods=2, freq="H"))), - (list(date_range("1/1/2011", periods=2, freq="H", tz="US/Eastern"))), + (list(date_range("1/1/2011", periods=2, freq="h"))), + (list(date_range("1/1/2011", periods=2, freq="h", tz="US/Eastern"))), ([Interval(left=0, right=5)]), ], ) diff --git a/pandas/tests/frame/test_repr_info.py b/pandas/tests/frame/test_repr_info.py index 55c239f7284c1..0634b8268c04c 100644 --- a/pandas/tests/frame/test_repr_info.py +++ b/pandas/tests/frame/test_repr_info.py @@ -313,7 +313,7 @@ def test_latex_repr(self): def test_repr_categorical_dates_periods(self): # normal DataFrame - dt = date_range("2011-01-01 09:00", freq="H", periods=5, tz="US/Eastern") + dt = date_range("2011-01-01 09:00", freq="h", periods=5, tz="US/Eastern") p = period_range("2011-01", freq="M", periods=5) df = DataFrame({"dt": dt, "p": p}) exp = """ dt p diff --git a/pandas/tests/generic/test_finalize.py b/pandas/tests/generic/test_finalize.py index 1522b83a4f5d0..0f7ae998a4b2b 100644 --- a/pandas/tests/generic/test_finalize.py +++ b/pandas/tests/generic/test_finalize.py @@ -281,12 +281,12 @@ ( pd.Series, (1, pd.date_range("2000", periods=4)), - operator.methodcaller("asfreq", "H"), + operator.methodcaller("asfreq", "h"), ), ( pd.DataFrame, ({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4)), - operator.methodcaller("asfreq", "H"), + operator.methodcaller("asfreq", "h"), ), ( pd.Series, @@ -628,9 +628,9 @@ def test_string_method(method): operator.methodcaller("tz_localize", "CET"), operator.methodcaller("normalize"), operator.methodcaller("strftime", "%Y"), - operator.methodcaller("round", "H"), - operator.methodcaller("floor", "H"), - operator.methodcaller("ceil", "H"), + operator.methodcaller("round", "h"), + operator.methodcaller("floor", "h"), + operator.methodcaller("ceil", "h"), operator.methodcaller("month_name"), operator.methodcaller("day_name"), ], diff --git a/pandas/tests/groupby/aggregate/test_other.py b/pandas/tests/groupby/aggregate/test_other.py index 7ea107f254104..398e9b09693e6 100644 --- a/pandas/tests/groupby/aggregate/test_other.py +++ b/pandas/tests/groupby/aggregate/test_other.py @@ -106,7 +106,7 @@ def test_agg_dict_parameter_cast_result_dtypes(): df = DataFrame( { "class": ["A", "A", "B", "B", "C", "C", "D", "D"], - "time": date_range("1/1/2011", periods=8, freq="H"), + "time": date_range("1/1/2011", periods=8, freq="h"), } ) df.loc[[0, 1, 2, 5], "time"] = None diff --git a/pandas/tests/groupby/test_apply.py b/pandas/tests/groupby/test_apply.py index abcb9f68e0f5c..5331b2e2c5d81 100644 --- a/pandas/tests/groupby/test_apply.py +++ b/pandas/tests/groupby/test_apply.py @@ -1306,7 +1306,7 @@ def test_positional_slice_groups_datetimelike(): # GH 21651 expected = DataFrame( { - "date": pd.date_range("2010-01-01", freq="12H", periods=5), + "date": pd.date_range("2010-01-01", freq="12h", periods=5), "vals": range(5), "let": list("abcde"), } diff --git a/pandas/tests/groupby/test_groupby_shift_diff.py b/pandas/tests/groupby/test_groupby_shift_diff.py index bb4b9aa866ac9..f2d40867af03a 100644 --- a/pandas/tests/groupby/test_groupby_shift_diff.py +++ b/pandas/tests/groupby/test_groupby_shift_diff.py @@ -205,11 +205,11 @@ def test_group_shift_with_multiple_periods_and_freq(): # GH#44424 df = DataFrame( {"a": [1, 2, 3, 4, 5], "b": [True, True, False, False, True]}, - index=date_range("1/1/2000", periods=5, freq="H"), + index=date_range("1/1/2000", periods=5, freq="h"), ) shifted_df = df.groupby("b")[["a"]].shift( [0, 1], - freq="H", + freq="h", ) expected_df = DataFrame( { @@ -223,7 +223,7 @@ def test_group_shift_with_multiple_periods_and_freq(): 5.0, ], }, - index=date_range("1/1/2000", periods=6, freq="H"), + index=date_range("1/1/2000", periods=6, freq="h"), ) tm.assert_frame_equal(shifted_df, expected_df) @@ -244,11 +244,11 @@ def test_group_shift_with_multiple_periods_and_both_fill_and_freq_deprecated(): # GH#44424 df = DataFrame( {"a": [1, 2, 3, 4, 5], "b": [True, True, False, False, True]}, - index=date_range("1/1/2000", periods=5, freq="H"), + index=date_range("1/1/2000", periods=5, freq="h"), ) msg = ( "Passing a 'freq' together with a 'fill_value' silently ignores the " "fill_value" ) with tm.assert_produces_warning(FutureWarning, match=msg): - df.groupby("b")[["a"]].shift([1, 2], fill_value=1, freq="H") + df.groupby("b")[["a"]].shift([1, 2], fill_value=1, freq="h") diff --git a/pandas/tests/groupby/test_grouping.py b/pandas/tests/groupby/test_grouping.py index 5adf9ace255ea..88ee8a35e5c94 100644 --- a/pandas/tests/groupby/test_grouping.py +++ b/pandas/tests/groupby/test_grouping.py @@ -475,7 +475,7 @@ def test_groupby_with_datetime_key(self): df = DataFrame( { "id": ["a", "b"] * 3, - "b": date_range("2000-01-01", "2000-01-03", freq="9H"), + "b": date_range("2000-01-01", "2000-01-03", freq="9h"), } ) grouper = Grouper(key="b", freq="D") diff --git a/pandas/tests/groupby/test_quantile.py b/pandas/tests/groupby/test_quantile.py index 805fef2125fda..4e7c09b70feb0 100644 --- a/pandas/tests/groupby/test_quantile.py +++ b/pandas/tests/groupby/test_quantile.py @@ -421,7 +421,7 @@ def test_timestamp_groupby_quantile(): { "timestamp": pd.date_range( start="2020-04-19 00:00:00", freq="1min", periods=100, tz="UTC" - ).floor("1H"), + ).floor("1h"), "category": list(range(1, 101)), "value": list(range(101, 201)), } diff --git a/pandas/tests/groupby/test_timegrouper.py b/pandas/tests/groupby/test_timegrouper.py index a3dc9e3087c7b..31629ba697e33 100644 --- a/pandas/tests/groupby/test_timegrouper.py +++ b/pandas/tests/groupby/test_timegrouper.py @@ -655,7 +655,7 @@ def test_groupby_groups_periods(self): df = DataFrame( { "label": ["a", "a", "a", "b", "b", "b"], - "period": [pd.Period(d, freq="H") for d in dates], + "period": [pd.Period(d, freq="h") for d in dates], "value1": np.arange(6, dtype="int64"), "value2": [1, 2] * 3, } @@ -670,7 +670,7 @@ def test_groupby_groups_periods(self): "2011-07-19 09:00:00", "2011-07-19 09:00:00", ], - freq="H", + freq="h", name="period", ) exp_idx2 = Index(["a", "b"] * 3, name="label") @@ -685,7 +685,7 @@ def test_groupby_groups_periods(self): tm.assert_frame_equal(result, expected) # by level - didx = pd.PeriodIndex(dates, freq="H") + didx = pd.PeriodIndex(dates, freq="h") df = DataFrame( {"value1": np.arange(6, dtype="int64"), "value2": [1, 2, 3, 1, 2, 3]}, index=didx, @@ -693,7 +693,7 @@ def test_groupby_groups_periods(self): exp_idx = pd.PeriodIndex( ["2011-07-19 07:00:00", "2011-07-19 08:00:00", "2011-07-19 09:00:00"], - freq="H", + freq="h", ) expected = DataFrame( {"value1": [3, 5, 7], "value2": [2, 4, 6]}, diff --git a/pandas/tests/indexes/conftest.py b/pandas/tests/indexes/conftest.py index fe397e2c7c88e..808a1687390ff 100644 --- a/pandas/tests/indexes/conftest.py +++ b/pandas/tests/indexes/conftest.py @@ -25,7 +25,7 @@ def sort(request): return request.param -@pytest.fixture(params=["D", "3D", "-3D", "H", "2H", "-2H", "min", "2min", "s", "-3s"]) +@pytest.fixture(params=["D", "3D", "-3D", "h", "2h", "-2h", "min", "2min", "s", "-3s"]) def freq_sample(request): """ Valid values for 'freq' parameter used to create date_range and diff --git a/pandas/tests/indexes/datetimelike_/test_drop_duplicates.py b/pandas/tests/indexes/datetimelike_/test_drop_duplicates.py index c38e24232f181..61a79c4ceabf9 100644 --- a/pandas/tests/indexes/datetimelike_/test_drop_duplicates.py +++ b/pandas/tests/indexes/datetimelike_/test_drop_duplicates.py @@ -68,7 +68,7 @@ def test_drop_duplicates(self, keep, expected, index, idx): class TestDropDuplicatesPeriodIndex(DropDuplicates): - @pytest.fixture(params=["D", "3D", "H", "2H", "min", "2min", "s", "3s"]) + @pytest.fixture(params=["D", "3D", "h", "2h", "min", "2min", "s", "3s"]) def freq(self, request): return request.param diff --git a/pandas/tests/indexes/datetimelike_/test_equals.py b/pandas/tests/indexes/datetimelike_/test_equals.py index d85d7103fe381..7845d99614d34 100644 --- a/pandas/tests/indexes/datetimelike_/test_equals.py +++ b/pandas/tests/indexes/datetimelike_/test_equals.py @@ -65,7 +65,7 @@ def test_equals2(self, freq): assert not idx.equals(list(idx)) assert not idx.equals(pd.Series(idx)) - idx2 = PeriodIndex(["2011-01-01", "2011-01-02", "NaT"], freq="H") + idx2 = PeriodIndex(["2011-01-01", "2011-01-02", "NaT"], freq="h") assert not idx.equals(idx2) assert not idx.equals(idx2.copy()) assert not idx.equals(idx2.astype(object)) @@ -75,7 +75,7 @@ def test_equals2(self, freq): # same internal, different tz idx3 = PeriodIndex._simple_new( - idx._values._simple_new(idx._values.asi8, dtype=pd.PeriodDtype("H")) + idx._values._simple_new(idx._values.asi8, dtype=pd.PeriodDtype("h")) ) tm.assert_numpy_array_equal(idx.asi8, idx3.asi8) assert not idx.equals(idx3) diff --git a/pandas/tests/indexes/datetimelike_/test_is_monotonic.py b/pandas/tests/indexes/datetimelike_/test_is_monotonic.py index 088ccc406cb81..b0e42e660b751 100644 --- a/pandas/tests/indexes/datetimelike_/test_is_monotonic.py +++ b/pandas/tests/indexes/datetimelike_/test_is_monotonic.py @@ -34,7 +34,7 @@ def test_is_monotonic_with_nat(): assert obj.is_unique dti2 = dti.insert(3, NaT) - pi2 = dti2.to_period("H") + pi2 = dti2.to_period("h") tdi2 = Index(dti2.view("timedelta64[ns]")) for obj in [pi2, pi2._engine, dti2, dti2._engine, tdi2, tdi2._engine]: diff --git a/pandas/tests/indexes/datetimelike_/test_sort_values.py b/pandas/tests/indexes/datetimelike_/test_sort_values.py index cf919bfa29d10..a2c349c8b0ef6 100644 --- a/pandas/tests/indexes/datetimelike_/test_sort_values.py +++ b/pandas/tests/indexes/datetimelike_/test_sort_values.py @@ -92,7 +92,7 @@ def check_sort_values_with_freq(self, idx): tm.assert_numpy_array_equal(indexer, np.array([2, 1, 0], dtype=np.intp)) check_freq_ascending(ordered, idx, False) - @pytest.mark.parametrize("freq", ["D", "H"]) + @pytest.mark.parametrize("freq", ["D", "h"]) def test_sort_values_with_freq_timedeltaindex(self, freq): # GH#10295 idx = timedelta_range(start=f"1{freq}", periods=3, freq=freq).rename("idx") @@ -107,7 +107,7 @@ def test_sort_values_with_freq_timedeltaindex(self, freq): ), DatetimeIndex( ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], - freq="H", + freq="h", name="tzidx", tz="Asia/Tokyo", ), diff --git a/pandas/tests/indexes/datetimelike_/test_value_counts.py b/pandas/tests/indexes/datetimelike_/test_value_counts.py index a0f05a1a35d79..069e354a364c9 100644 --- a/pandas/tests/indexes/datetimelike_/test_value_counts.py +++ b/pandas/tests/indexes/datetimelike_/test_value_counts.py @@ -18,15 +18,15 @@ class TestValueCounts: def test_value_counts_unique_datetimeindex(self, tz_naive_fixture): tz = tz_naive_fixture - orig = date_range("2011-01-01 09:00", freq="H", periods=10, tz=tz) + orig = date_range("2011-01-01 09:00", freq="h", periods=10, tz=tz) self._check_value_counts_with_repeats(orig) def test_value_counts_unique_timedeltaindex(self): - orig = timedelta_range("1 days 09:00:00", freq="H", periods=10) + orig = timedelta_range("1 days 09:00:00", freq="h", periods=10) self._check_value_counts_with_repeats(orig) def test_value_counts_unique_periodindex(self): - orig = period_range("2011-01-01 09:00", freq="H", periods=10) + orig = period_range("2011-01-01 09:00", freq="h", periods=10) self._check_value_counts_with_repeats(orig) def _check_value_counts_with_repeats(self, orig): @@ -83,7 +83,7 @@ def test_value_counts_unique_periodindex2(self): "2013-01-01 08:00", NaT, ], - freq="H", + freq="h", ) self._check_value_counts_dropna(idx) diff --git a/pandas/tests/indexes/datetimes/methods/test_astype.py b/pandas/tests/indexes/datetimes/methods/test_astype.py index 08a2473f22556..94390acb35624 100644 --- a/pandas/tests/indexes/datetimes/methods/test_astype.py +++ b/pandas/tests/indexes/datetimes/methods/test_astype.py @@ -117,7 +117,7 @@ def test_astype_str_tz_and_name(self): def test_astype_str_freq_and_name(self): # test astype string with freqH and name - dti = date_range("1/1/2011", periods=3, freq="H", name="test_name") + dti = date_range("1/1/2011", periods=3, freq="h", name="test_name") result = dti.astype(str) expected = Index( ["2011-01-01 00:00:00", "2011-01-01 01:00:00", "2011-01-01 02:00:00"], @@ -129,7 +129,7 @@ def test_astype_str_freq_and_name(self): def test_astype_str_freq_and_tz(self): # test astype string with freqH and timezone dti = date_range( - "3/6/2012 00:00", periods=2, freq="H", tz="Europe/London", name="test_name" + "3/6/2012 00:00", periods=2, freq="h", tz="Europe/London", name="test_name" ) result = dti.astype(str) expected = Index( diff --git a/pandas/tests/indexes/datetimes/methods/test_factorize.py b/pandas/tests/indexes/datetimes/methods/test_factorize.py index 99a3bc910b9ca..41ecf9ee6b823 100644 --- a/pandas/tests/indexes/datetimes/methods/test_factorize.py +++ b/pandas/tests/indexes/datetimes/methods/test_factorize.py @@ -74,7 +74,7 @@ def test_factorize_preserves_freq(self): def test_factorize_tz(self, tz_naive_fixture, index_or_series): tz = tz_naive_fixture # GH#13750 - base = date_range("2016-11-05", freq="H", periods=100, tz=tz) + base = date_range("2016-11-05", freq="h", periods=100, tz=tz) idx = base.repeat(5) exp_arr = np.arange(100, dtype=np.intp).repeat(5) @@ -89,7 +89,7 @@ def test_factorize_tz(self, tz_naive_fixture, index_or_series): def test_factorize_dst(self, index_or_series): # GH#13750 - idx = date_range("2016-11-06", freq="H", periods=12, tz="US/Eastern") + idx = date_range("2016-11-06", freq="h", periods=12, tz="US/Eastern") obj = index_or_series(idx) arr, res = obj.factorize() @@ -98,7 +98,7 @@ def test_factorize_dst(self, index_or_series): if index_or_series is Index: assert res.freq == idx.freq - idx = date_range("2016-06-13", freq="H", periods=12, tz="US/Eastern") + idx = date_range("2016-06-13", freq="h", periods=12, tz="US/Eastern") obj = index_or_series(idx) arr, res = obj.factorize() @@ -112,7 +112,7 @@ def test_factorize_no_freq_non_nano(self, tz_naive_fixture, sort): # GH#51978 case that does not go through the fastpath based on # non-None freq tz = tz_naive_fixture - idx = date_range("2016-11-06", freq="H", periods=5, tz=tz)[[0, 4, 1, 3, 2]] + idx = date_range("2016-11-06", freq="h", periods=5, tz=tz)[[0, 4, 1, 3, 2]] exp_codes, exp_uniques = idx.factorize(sort=sort) res_codes, res_uniques = idx.as_unit("s").factorize(sort=sort) diff --git a/pandas/tests/indexes/datetimes/methods/test_insert.py b/pandas/tests/indexes/datetimes/methods/test_insert.py index 9ef43ace747e2..4ef162913b622 100644 --- a/pandas/tests/indexes/datetimes/methods/test_insert.py +++ b/pandas/tests/indexes/datetimes/methods/test_insert.py @@ -128,10 +128,10 @@ def test_insert(self): assert result.freq is None for tz in ["US/Pacific", "Asia/Singapore"]: - idx = date_range("1/1/2000 09:00", periods=6, freq="H", tz=tz, name="idx") + idx = date_range("1/1/2000 09:00", periods=6, freq="h", tz=tz, name="idx") # preserve freq expected = date_range( - "1/1/2000 09:00", periods=7, freq="H", tz=tz, name="idx" + "1/1/2000 09:00", periods=7, freq="h", tz=tz, name="idx" ) for d in [ Timestamp("2000-01-01 15:00", tz=tz), diff --git a/pandas/tests/indexes/datetimes/methods/test_shift.py b/pandas/tests/indexes/datetimes/methods/test_shift.py index e8661fafc3bb7..064f664a4de10 100644 --- a/pandas/tests/indexes/datetimes/methods/test_shift.py +++ b/pandas/tests/indexes/datetimes/methods/test_shift.py @@ -24,30 +24,30 @@ def test_dti_shift_tzaware(self, tz_naive_fixture): # GH#9903 tz = tz_naive_fixture idx = DatetimeIndex([], name="xxx", tz=tz) - tm.assert_index_equal(idx.shift(0, freq="H"), idx) - tm.assert_index_equal(idx.shift(3, freq="H"), idx) + tm.assert_index_equal(idx.shift(0, freq="h"), idx) + tm.assert_index_equal(idx.shift(3, freq="h"), idx) idx = DatetimeIndex( ["2011-01-01 10:00", "2011-01-01 11:00", "2011-01-01 12:00"], name="xxx", tz=tz, - freq="H", + freq="h", ) - tm.assert_index_equal(idx.shift(0, freq="H"), idx) + tm.assert_index_equal(idx.shift(0, freq="h"), idx) exp = DatetimeIndex( ["2011-01-01 13:00", "2011-01-01 14:00", "2011-01-01 15:00"], name="xxx", tz=tz, - freq="H", + freq="h", ) - tm.assert_index_equal(idx.shift(3, freq="H"), exp) + tm.assert_index_equal(idx.shift(3, freq="h"), exp) exp = DatetimeIndex( ["2011-01-01 07:00", "2011-01-01 08:00", "2011-01-01 09:00"], name="xxx", tz=tz, - freq="H", + freq="h", ) - tm.assert_index_equal(idx.shift(-3, freq="H"), exp) + tm.assert_index_equal(idx.shift(-3, freq="h"), exp) def test_dti_shift_freqs(self): # test shift for DatetimeIndex and non DatetimeIndex @@ -101,9 +101,9 @@ def test_dti_shift_localized(self, tzstr): def test_dti_shift_across_dst(self): # GH 8616 - idx = date_range("2013-11-03", tz="America/Chicago", periods=7, freq="H") + idx = date_range("2013-11-03", tz="America/Chicago", periods=7, freq="h") s = Series(index=idx[:-1], dtype=object) - result = s.shift(freq="H") + result = s.shift(freq="h") expected = Series(index=idx[1:], dtype=object) tm.assert_series_equal(result, expected) @@ -120,7 +120,7 @@ def test_dti_shift_near_midnight(self, shift, result_time): dt = datetime(2014, 11, 14, 0) dt_est = pytz.timezone("EST").localize(dt) s = Series(data=[1], index=[dt_est]) - result = s.shift(shift, freq="H") + result = s.shift(shift, freq="h") expected = Series(1, index=DatetimeIndex([result_time], tz="EST")) tm.assert_series_equal(result, expected) diff --git a/pandas/tests/indexes/datetimes/methods/test_to_period.py b/pandas/tests/indexes/datetimes/methods/test_to_period.py index 7712a4166329c..8900c5cdbca14 100644 --- a/pandas/tests/indexes/datetimes/methods/test_to_period.py +++ b/pandas/tests/indexes/datetimes/methods/test_to_period.py @@ -112,7 +112,7 @@ def test_period_dt64_round_trip(self): tm.assert_index_equal(pi.to_timestamp(), dti) dti = date_range("1/1/2000", "1/7/2002", freq="B") - pi = dti.to_period(freq="H") + pi = dti.to_period(freq="h") tm.assert_index_equal(pi.to_timestamp(), dti) def test_to_period_millisecond(self): diff --git a/pandas/tests/indexes/datetimes/test_constructors.py b/pandas/tests/indexes/datetimes/test_constructors.py index 7dee58e63fa88..6da215715482d 100644 --- a/pandas/tests/indexes/datetimes/test_constructors.py +++ b/pandas/tests/indexes/datetimes/test_constructors.py @@ -185,7 +185,7 @@ def test_construction_caching(self): ) def test_construction_with_alt(self, kwargs, tz_aware_fixture): tz = tz_aware_fixture - i = date_range("20130101", periods=5, freq="H", tz=tz) + i = date_range("20130101", periods=5, freq="h", tz=tz) kwargs = {key: attrgetter(val)(i) for key, val in kwargs.items()} result = DatetimeIndex(i, **kwargs) tm.assert_index_equal(i, result) @@ -196,7 +196,7 @@ def test_construction_with_alt(self, kwargs, tz_aware_fixture): ) def test_construction_with_alt_tz_localize(self, kwargs, tz_aware_fixture): tz = tz_aware_fixture - i = date_range("20130101", periods=5, freq="H", tz=tz) + i = date_range("20130101", periods=5, freq="h", tz=tz) i = i._with_freq(None) kwargs = {key: attrgetter(val)(i) for key, val in kwargs.items()} @@ -902,7 +902,7 @@ def test_constructor_with_nonexistent_keyword_arg(self, warsaw): start = Timestamp("2015-03-29 02:30:00").tz_localize( timezone, nonexistent="shift_forward" ) - result = date_range(start=start, periods=2, freq="H") + result = date_range(start=start, periods=2, freq="h") expected = DatetimeIndex( [ Timestamp("2015-03-29 03:00:00+02:00", tz=timezone), @@ -916,7 +916,7 @@ def test_constructor_with_nonexistent_keyword_arg(self, warsaw): end = Timestamp("2015-03-29 02:30:00").tz_localize( timezone, nonexistent="shift_forward" ) - result = date_range(end=end, periods=2, freq="H") + result = date_range(end=end, periods=2, freq="h") expected = DatetimeIndex( [ Timestamp("2015-03-29 01:00:00+01:00", tz=timezone), @@ -1036,7 +1036,7 @@ def test_constructor_int64_nocopy(self): assert (index.asi8[50:100] != -1).all() @pytest.mark.parametrize( - "freq", ["ME", "Q", "Y", "D", "B", "BH", "min", "s", "ms", "us", "H", "ns", "C"] + "freq", ["ME", "Q", "Y", "D", "B", "bh", "min", "s", "ms", "us", "h", "ns", "C"] ) def test_from_freq_recreate_from_data(self, freq): org = date_range(start="2001/02/01 09:00", freq=freq, periods=1) diff --git a/pandas/tests/indexes/datetimes/test_date_range.py b/pandas/tests/indexes/datetimes/test_date_range.py index f0996d7af917d..ededf78621699 100644 --- a/pandas/tests/indexes/datetimes/test_date_range.py +++ b/pandas/tests/indexes/datetimes/test_date_range.py @@ -124,7 +124,7 @@ def test_date_range_timestamp_equiv_preserve_frequency(self): class TestDateRanges: - @pytest.mark.parametrize("freq", ["ns", "us", "ms", "min", "s", "H", "D"]) + @pytest.mark.parametrize("freq", ["ns", "us", "ms", "min", "s", "h", "D"]) def test_date_range_edges(self, freq): # GH#13672 td = Timedelta(f"1{freq}") @@ -206,11 +206,11 @@ def test_date_range_int64_overflow_non_recoverable(self): # case with start later than 1970-01-01, overflow int64 but not uint64 msg = "Cannot generate range with" with pytest.raises(OutOfBoundsDatetime, match=msg): - date_range(start="1970-02-01", periods=106752 * 24, freq="H") + date_range(start="1970-02-01", periods=106752 * 24, freq="h") # case with end before 1970-01-01, overflow int64 but not uint64 with pytest.raises(OutOfBoundsDatetime, match=msg): - date_range(end="1969-11-14", periods=106752 * 24, freq="H") + date_range(end="1969-11-14", periods=106752 * 24, freq="h") @pytest.mark.slow @pytest.mark.parametrize( @@ -224,11 +224,11 @@ def test_date_range_int64_overflow_stride_endpoint_different_signs( start = Timestamp(s_ts) end = Timestamp(e_ts) - expected = date_range(start=start, end=end, freq="-1H") + expected = date_range(start=start, end=end, freq="-1h") assert expected[0] == start assert expected[-1] == end - dti = date_range(end=end, periods=len(expected), freq="-1H") + dti = date_range(end=end, periods=len(expected), freq="-1h") tm.assert_index_equal(dti, expected) def test_date_range_out_of_bounds(self): @@ -416,13 +416,13 @@ def test_date_range_businesshour(self): "2014-07-04 15:00", "2014-07-04 16:00", ], - freq="BH", + freq="bh", ) - rng = date_range("2014-07-04 09:00", "2014-07-04 16:00", freq="BH") + rng = date_range("2014-07-04 09:00", "2014-07-04 16:00", freq="bh") tm.assert_index_equal(idx, rng) - idx = DatetimeIndex(["2014-07-04 16:00", "2014-07-07 09:00"], freq="BH") - rng = date_range("2014-07-04 16:00", "2014-07-07 09:00", freq="BH") + idx = DatetimeIndex(["2014-07-04 16:00", "2014-07-07 09:00"], freq="bh") + rng = date_range("2014-07-04 16:00", "2014-07-07 09:00", freq="bh") tm.assert_index_equal(idx, rng) idx = DatetimeIndex( @@ -452,9 +452,9 @@ def test_date_range_businesshour(self): "2014-07-08 15:00", "2014-07-08 16:00", ], - freq="BH", + freq="bh", ) - rng = date_range("2014-07-04 09:00", "2014-07-08 16:00", freq="BH") + rng = date_range("2014-07-04 09:00", "2014-07-08 16:00", freq="bh") tm.assert_index_equal(idx, rng) def test_date_range_timedelta(self): @@ -481,13 +481,13 @@ def test_range_misspecified(self): date_range(periods=10) with pytest.raises(ValueError, match=msg): - date_range(start="1/1/2000", freq="H") + date_range(start="1/1/2000", freq="h") with pytest.raises(ValueError, match=msg): - date_range(end="1/1/2000", freq="H") + date_range(end="1/1/2000", freq="h") with pytest.raises(ValueError, match=msg): - date_range(periods=10, freq="H") + date_range(periods=10, freq="h") with pytest.raises(ValueError, match=msg): date_range() @@ -524,14 +524,14 @@ def test_construct_over_dst(self): pre_dst, pst_dst, ] - expected = DatetimeIndex(expect_data, freq="H") - result = date_range(start="2010-11-7", periods=3, freq="H", tz="US/Pacific") + expected = DatetimeIndex(expect_data, freq="h") + result = date_range(start="2010-11-7", periods=3, freq="h", tz="US/Pacific") tm.assert_index_equal(result, expected) def test_construct_with_different_start_end_string_format(self): # GH 12064 result = date_range( - "2013-01-01 00:00:00+09:00", "2013/01/01 02:00:00+09:00", freq="H" + "2013-01-01 00:00:00+09:00", "2013/01/01 02:00:00+09:00", freq="h" ) expected = DatetimeIndex( [ @@ -539,7 +539,7 @@ def test_construct_with_different_start_end_string_format(self): Timestamp("2013-01-01 01:00:00+09:00"), Timestamp("2013-01-01 02:00:00+09:00"), ], - freq="H", + freq="h", ) tm.assert_index_equal(result, expected) @@ -638,7 +638,7 @@ def test_range_tz_dateutil(self): assert dr[0] == start assert dr[2] == end - @pytest.mark.parametrize("freq", ["1D", "3D", "2ME", "7W", "3H", "Y"]) + @pytest.mark.parametrize("freq", ["1D", "3D", "2ME", "7W", "3h", "Y"]) def test_range_closed(self, freq, inclusive_endpoints_fixture): begin = datetime(2011, 1, 1) end = datetime(2014, 1, 1) @@ -653,7 +653,7 @@ def test_range_closed(self, freq, inclusive_endpoints_fixture): tm.assert_index_equal(expected_range, result_range) - @pytest.mark.parametrize("freq", ["1D", "3D", "2ME", "7W", "3H", "Y"]) + @pytest.mark.parametrize("freq", ["1D", "3D", "2ME", "7W", "3h", "Y"]) def test_range_closed_with_tz_aware_start_end( self, freq, inclusive_endpoints_fixture ): @@ -674,7 +674,7 @@ def test_range_closed_with_tz_aware_start_end( tm.assert_index_equal(expected_range, result_range) - @pytest.mark.parametrize("freq", ["1D", "3D", "2ME", "7W", "3H", "Y"]) + @pytest.mark.parametrize("freq", ["1D", "3D", "2ME", "7W", "3h", "Y"]) def test_range_with_tz_closed_with_tz_aware_start_end( self, freq, inclusive_endpoints_fixture ): @@ -841,6 +841,7 @@ def test_freq_dateoffset_with_relateivedelta_nanos(self): [ ("2Y", "2A"), ("200Y-MAY", "200A-MAY"), + ("h", "H"), ("2min", "2T"), ("1s", "1S"), ("2ms", "2L"), @@ -908,7 +909,7 @@ def test_date_range_with_tz(self, tzstr): stamp = Timestamp("3/11/2012 05:00", tz=tzstr) assert stamp.hour == 5 - rng = date_range("3/11/2012 04:00", periods=10, freq="H", tz=tzstr) + rng = date_range("3/11/2012 04:00", periods=10, freq="h", tz=tzstr) assert stamp == rng[1] diff --git a/pandas/tests/indexes/datetimes/test_datetime.py b/pandas/tests/indexes/datetimes/test_datetime.py index d08757a206e67..156075e3fafec 100644 --- a/pandas/tests/indexes/datetimes/test_datetime.py +++ b/pandas/tests/indexes/datetimes/test_datetime.py @@ -1,3 +1,4 @@ +import datetime as dt from datetime import date import dateutil @@ -96,7 +97,7 @@ def test_append_nondatetimeindex(self): def test_iteration_preserves_tz(self): # see gh-8890 - index = date_range("2012-01-01", periods=3, freq="H", tz="US/Eastern") + index = date_range("2012-01-01", periods=3, freq="h", tz="US/Eastern") for i, ts in enumerate(index): result = ts @@ -104,7 +105,7 @@ def test_iteration_preserves_tz(self): assert result == expected index = date_range( - "2012-01-01", periods=3, freq="H", tz=dateutil.tz.tzoffset(None, -28800) + "2012-01-01", periods=3, freq="h", tz=dateutil.tz.tzoffset(None, -28800) ) for i, ts in enumerate(index): @@ -201,3 +202,27 @@ def test_asarray_tz_aware(self): result = np.asarray(idx, dtype=object) tm.assert_numpy_array_equal(result, expected) + + def test_CBH_deprecated(self): + msg = "'CBH' is deprecated and will be removed in a future version." + + with tm.assert_produces_warning(FutureWarning, match=msg): + expected = date_range( + dt.datetime(2022, 12, 11), dt.datetime(2022, 12, 13), freq="CBH" + ) + result = DatetimeIndex( + [ + "2022-12-12 09:00:00", + "2022-12-12 10:00:00", + "2022-12-12 11:00:00", + "2022-12-12 12:00:00", + "2022-12-12 13:00:00", + "2022-12-12 14:00:00", + "2022-12-12 15:00:00", + "2022-12-12 16:00:00", + ], + dtype="datetime64[ns]", + freq="cbh", + ) + + tm.assert_index_equal(result, expected) diff --git a/pandas/tests/indexes/datetimes/test_delete.py b/pandas/tests/indexes/datetimes/test_delete.py index 3565e516e69d5..90dfdb46cdfa5 100644 --- a/pandas/tests/indexes/datetimes/test_delete.py +++ b/pandas/tests/indexes/datetimes/test_delete.py @@ -42,25 +42,25 @@ def test_delete(self): for tz in [None, "Asia/Tokyo", "US/Pacific"]: idx = date_range( - start="2000-01-01 09:00", periods=10, freq="H", name="idx", tz=tz + start="2000-01-01 09:00", periods=10, freq="h", name="idx", tz=tz ) expected = date_range( - start="2000-01-01 10:00", periods=9, freq="H", name="idx", tz=tz + start="2000-01-01 10:00", periods=9, freq="h", name="idx", tz=tz ) result = idx.delete(0) tm.assert_index_equal(result, expected) assert result.name == expected.name - assert result.freqstr == "H" + assert result.freqstr == "h" assert result.tz == expected.tz expected = date_range( - start="2000-01-01 09:00", periods=9, freq="H", name="idx", tz=tz + start="2000-01-01 09:00", periods=9, freq="h", name="idx", tz=tz ) result = idx.delete(-1) tm.assert_index_equal(result, expected) assert result.name == expected.name - assert result.freqstr == "H" + assert result.freqstr == "h" assert result.tz == expected.tz def test_delete_slice(self): @@ -105,13 +105,13 @@ def test_delete_slice(self): ts = Series( 1, index=date_range( - "2000-01-01 09:00", periods=10, freq="H", name="idx", tz=tz + "2000-01-01 09:00", periods=10, freq="h", name="idx", tz=tz ), ) # preserve freq result = ts.drop(ts.index[:5]).index expected = date_range( - "2000-01-01 14:00", periods=5, freq="H", name="idx", tz=tz + "2000-01-01 14:00", periods=5, freq="h", name="idx", tz=tz ) tm.assert_index_equal(result, expected) assert result.name == expected.name diff --git a/pandas/tests/indexes/datetimes/test_formats.py b/pandas/tests/indexes/datetimes/test_formats.py index 502cb0407bfcd..9fb5db9e034ee 100644 --- a/pandas/tests/indexes/datetimes/test_formats.py +++ b/pandas/tests/indexes/datetimes/test_formats.py @@ -87,8 +87,8 @@ def test_dti_repr_short(self): ), ( ["2012-01-01"], - "24H", - "DatetimeIndex(['2012-01-01'], dtype='datetime64[ns]', freq='24H')", + "24h", + "DatetimeIndex(['2012-01-01'], dtype='datetime64[ns]', freq='24h')", ), ], ) @@ -108,7 +108,7 @@ def test_dti_representation(self, method): idxs.append( DatetimeIndex( ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], - freq="H", + freq="h", tz="Asia/Tokyo", ) ) @@ -135,7 +135,7 @@ def test_dti_representation(self, method): exp.append( "DatetimeIndex(['2011-01-01 09:00:00+09:00', " "'2011-01-01 10:00:00+09:00', '2011-01-01 11:00:00+09:00']" - ", dtype='datetime64[ns, Asia/Tokyo]', freq='H')" + ", dtype='datetime64[ns, Asia/Tokyo]', freq='h')" ) exp.append( "DatetimeIndex(['2011-01-01 09:00:00-05:00', " @@ -161,7 +161,7 @@ def test_dti_representation_to_series(self): idx4 = DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"], freq="D") idx5 = DatetimeIndex( ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], - freq="H", + freq="h", tz="Asia/Tokyo", ) idx6 = DatetimeIndex( @@ -218,7 +218,7 @@ def test_dti_summary(self): idx4 = DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"], freq="D") idx5 = DatetimeIndex( ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], - freq="H", + freq="h", tz="Asia/Tokyo", ) idx6 = DatetimeIndex( @@ -236,7 +236,7 @@ def test_dti_summary(self): exp5 = ( "DatetimeIndex: 3 entries, 2011-01-01 09:00:00+09:00 " "to 2011-01-01 11:00:00+09:00\n" - "Freq: H" + "Freq: h" ) exp6 = """DatetimeIndex: 3 entries, 2011-01-01 09:00:00-05:00 to NaT""" diff --git a/pandas/tests/indexes/datetimes/test_freq_attr.py b/pandas/tests/indexes/datetimes/test_freq_attr.py index f5821a316358d..5cddf56cd1c73 100644 --- a/pandas/tests/indexes/datetimes/test_freq_attr.py +++ b/pandas/tests/indexes/datetimes/test_freq_attr.py @@ -31,7 +31,7 @@ def test_freq_setter_errors(self): idx._data.freq = "foo" @pytest.mark.parametrize("values", [["20180101", "20180103", "20180105"], []]) - @pytest.mark.parametrize("freq", ["2D", Day(2), "2B", BDay(2), "48H", Hour(48)]) + @pytest.mark.parametrize("freq", ["2D", Day(2), "2B", BDay(2), "48h", Hour(48)]) @pytest.mark.parametrize("tz", [None, "US/Eastern"]) def test_freq_setter(self, values, freq, tz): # GH#20678 diff --git a/pandas/tests/indexes/datetimes/test_indexing.py b/pandas/tests/indexes/datetimes/test_indexing.py index d877110e72b26..37c580c98b139 100644 --- a/pandas/tests/indexes/datetimes/test_indexing.py +++ b/pandas/tests/indexes/datetimes/test_indexing.py @@ -31,7 +31,7 @@ def test_getitem_slice_keeps_name(self): # GH4226 st = Timestamp("2013-07-01 00:00:00", tz="America/Los_Angeles") et = Timestamp("2013-07-02 00:00:00", tz="America/Los_Angeles") - dr = date_range(st, et, freq="H", name="timebucket") + dr = date_range(st, et, freq="h", name="timebucket") assert dr[1:].name == dr.name def test_getitem(self): @@ -301,7 +301,7 @@ def test_take2(self, tz): idx = date_range( start="2010-01-01 09:00", end="2010-02-01 09:00", - freq="H", + freq="h", tz=tz, name="idx", ) @@ -407,7 +407,7 @@ def test_get_loc_key_unit_mismatch_not_castable(self): def test_get_loc_time_obj(self): # time indexing - idx = date_range("2000-01-01", periods=24, freq="H") + idx = date_range("2000-01-01", periods=24, freq="h") result = idx.get_loc(time(12)) expected = np.array([12]) @@ -603,7 +603,7 @@ def test_get_indexer_pad_requires_monotonicity(self): class TestMaybeCastSliceBound: def test_maybe_cast_slice_bounds_empty(self): # GH#14354 - empty_idx = date_range(freq="1H", periods=0, end="2015") + empty_idx = date_range(freq="1h", periods=0, end="2015") right = empty_idx._maybe_cast_slice_bound("2015-01-02", "right") exp = Timestamp("2015-01-02 23:59:59.999999999") diff --git a/pandas/tests/indexes/datetimes/test_join.py b/pandas/tests/indexes/datetimes/test_join.py index ccfdb55fc8119..959fbab0dcec6 100644 --- a/pandas/tests/indexes/datetimes/test_join.py +++ b/pandas/tests/indexes/datetimes/test_join.py @@ -65,7 +65,7 @@ def test_join_object_index(self): assert isinstance(result[0], Timestamp) def test_join_utc_convert(self, join_type): - rng = date_range("1/1/2011", periods=100, freq="H", tz="utc") + rng = date_range("1/1/2011", periods=100, freq="h", tz="utc") left = rng.tz_convert("US/Eastern") right = rng.tz_convert("Europe/Berlin") diff --git a/pandas/tests/indexes/datetimes/test_ops.py b/pandas/tests/indexes/datetimes/test_ops.py index ac6d0a97956e4..7eea05c753b8a 100644 --- a/pandas/tests/indexes/datetimes/test_ops.py +++ b/pandas/tests/indexes/datetimes/test_ops.py @@ -24,7 +24,7 @@ class TestDatetimeIndexOps: ("Q", "day"), ("ME", "day"), ("D", "day"), - ("H", "hour"), + ("h", "hour"), ("min", "minute"), ("s", "second"), ("ms", "millisecond"), diff --git a/pandas/tests/indexes/datetimes/test_partial_slicing.py b/pandas/tests/indexes/datetimes/test_partial_slicing.py index 7ad0dfbaf6cb1..c4e23154b7ffc 100644 --- a/pandas/tests/indexes/datetimes/test_partial_slicing.py +++ b/pandas/tests/indexes/datetimes/test_partial_slicing.py @@ -194,7 +194,7 @@ def test_partial_slice(self): s["2004-12-31"] def test_partial_slice_daily(self): - rng = date_range(freq="H", start=datetime(2005, 1, 31), periods=500) + rng = date_range(freq="h", start=datetime(2005, 1, 31), periods=500) s = Series(np.arange(len(rng)), index=rng) result = s["2005-1-31"] diff --git a/pandas/tests/indexes/datetimes/test_scalar_compat.py b/pandas/tests/indexes/datetimes/test_scalar_compat.py index 49597ef885183..7c7e57b51ccc0 100644 --- a/pandas/tests/indexes/datetimes/test_scalar_compat.py +++ b/pandas/tests/indexes/datetimes/test_scalar_compat.py @@ -29,7 +29,7 @@ def test_dti_time(self): assert (result == expected).all() def test_dti_date(self): - rng = date_range("1/1/2000", freq="12H", periods=10) + rng = date_range("1/1/2000", freq="12h", periods=10) result = pd.Index(rng).date expected = [t.date() for t in rng] assert (result == expected).all() @@ -122,8 +122,8 @@ def test_round(self, tz_naive_fixture): ) expected_elt = expected_rng[1] - tm.assert_index_equal(rng.round(freq="H"), expected_rng) - assert elt.round(freq="H") == expected_elt + tm.assert_index_equal(rng.round(freq="h"), expected_rng) + assert elt.round(freq="h") == expected_elt msg = INVALID_FREQ_ERR_MSG with pytest.raises(ValueError, match=msg): @@ -198,8 +198,8 @@ def test_no_rounding_occurs(self, tz_naive_fixture): (["1823-01-01 00:00:01"], "ceil", "1s", ["1823-01-01 00:00:01"]), (["2018-01-01 00:15:00"], "ceil", "15min", ["2018-01-01 00:15:00"]), (["2018-01-01 00:15:00"], "floor", "15min", ["2018-01-01 00:15:00"]), - (["1823-01-01 03:00:00"], "ceil", "3H", ["1823-01-01 03:00:00"]), - (["1823-01-01 03:00:00"], "floor", "3H", ["1823-01-01 03:00:00"]), + (["1823-01-01 03:00:00"], "ceil", "3h", ["1823-01-01 03:00:00"]), + (["1823-01-01 03:00:00"], "floor", "3h", ["1823-01-01 03:00:00"]), ( ("NaT", "1823-01-01 00:00:01"), "floor", @@ -223,7 +223,7 @@ def test_ceil_floor_edge(self, test_input, rounder, freq, expected): @pytest.mark.parametrize( "start, index_freq, periods", - [("2018-01-01", "12H", 25), ("2018-01-01 0:0:0.124999", "1ns", 1000)], + [("2018-01-01", "12h", 25), ("2018-01-01 0:0:0.124999", "1ns", 1000)], ) @pytest.mark.parametrize( "round_freq", @@ -245,7 +245,7 @@ def test_ceil_floor_edge(self, test_input, rounder, freq, expected): "1s", "2s", "3s", - "12H", + "12h", "1D", ], ) @@ -326,7 +326,7 @@ def test_2000(self): tm.assert_index_equal(r1, r2) def test_hour(self): - dr = date_range(start=Timestamp("2000-02-27"), periods=5, freq="H") + dr = date_range(start=Timestamp("2000-02-27"), periods=5, freq="h") r1 = pd.Index([x.to_julian_date() for x in dr]) r2 = dr.to_julian_date() assert isinstance(r2, pd.Index) and r2.dtype == np.float64 diff --git a/pandas/tests/indexes/datetimes/test_setops.py b/pandas/tests/indexes/datetimes/test_setops.py index ca784948a5d29..6071c7fa8319b 100644 --- a/pandas/tests/indexes/datetimes/test_setops.py +++ b/pandas/tests/indexes/datetimes/test_setops.py @@ -98,8 +98,8 @@ def test_union_coverage(self, sort): assert result.freq == ordered.freq def test_union_bug_1730(self, sort): - rng_a = date_range("1/1/2012", periods=4, freq="3H") - rng_b = date_range("1/1/2012", periods=4, freq="4H") + rng_a = date_range("1/1/2012", periods=4, freq="3h") + rng_b = date_range("1/1/2012", periods=4, freq="4h") result = rng_a.union(rng_b, sort=sort) exp = list(rng_a) + list(rng_b[1:]) @@ -308,7 +308,7 @@ def test_intersection_empty(self, tz_aware_fixture, freq): def test_intersection_bug_1708(self): from pandas import DateOffset - index_1 = date_range("1/1/2012", periods=4, freq="12H") + index_1 = date_range("1/1/2012", periods=4, freq="12h") index_2 = index_1 + DateOffset(hours=1) result = index_1.intersection(index_2) diff --git a/pandas/tests/indexes/datetimes/test_timezones.py b/pandas/tests/indexes/datetimes/test_timezones.py index 756a72cf1849a..eb54ea8e4316f 100644 --- a/pandas/tests/indexes/datetimes/test_timezones.py +++ b/pandas/tests/indexes/datetimes/test_timezones.py @@ -192,7 +192,7 @@ def test_dti_tz_convert_hour_overflow_dst_timestamps(self, tz): expected = Index([9, 9, 9], dtype=np.int32) tm.assert_index_equal(ut.hour, expected) - @pytest.mark.parametrize("freq, n", [("H", 1), ("min", 60), ("s", 3600)]) + @pytest.mark.parametrize("freq, n", [("h", 1), ("min", 60), ("s", 3600)]) def test_dti_tz_convert_trans_pos_plus_1__bug(self, freq, n): # Regression test for tslib.tz_convert(vals, tz1, tz2). # See https://github.com/pandas-dev/pandas/issues/4496 for details. @@ -204,7 +204,7 @@ def test_dti_tz_convert_trans_pos_plus_1__bug(self, freq, n): tm.assert_index_equal(idx.hour, Index(expected, dtype=np.int32)) def test_dti_tz_convert_dst(self): - for freq, n in [("H", 1), ("min", 60), ("s", 3600)]: + for freq, n in [("h", 1), ("min", 60), ("s", 3600)]: # Start DST idx = date_range( "2014-03-08 23:00", "2014-03-09 09:00", freq=freq, tz="UTC" @@ -278,8 +278,8 @@ def test_tz_convert_roundtrip(self, tz_aware_fixture): idx2 = date_range(start="2014-01-01", end="2014-12-31", freq="D", tz="UTC") exp2 = date_range(start="2014-01-01", end="2014-12-31", freq="D") - idx3 = date_range(start="2014-01-01", end="2014-03-01", freq="H", tz="UTC") - exp3 = date_range(start="2014-01-01", end="2014-03-01", freq="H") + idx3 = date_range(start="2014-01-01", end="2014-03-01", freq="h", tz="UTC") + exp3 = date_range(start="2014-01-01", end="2014-03-01", freq="h") idx4 = date_range(start="2014-08-01", end="2014-10-31", freq="min", tz="UTC") exp4 = date_range(start="2014-08-01", end="2014-10-31", freq="min") @@ -314,7 +314,7 @@ def test_dti_tz_convert_tzlocal(self): ], ) def test_dti_tz_convert_utc_to_local_no_modify(self, tz): - rng = date_range("3/11/2012", "3/12/2012", freq="H", tz="utc") + rng = date_range("3/11/2012", "3/12/2012", freq="h", tz="utc") rng_eastern = rng.tz_convert(tz) # Values are unmodified @@ -324,7 +324,7 @@ def test_dti_tz_convert_utc_to_local_no_modify(self, tz): @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) def test_tz_convert_unsorted(self, tzstr): - dr = date_range("2012-03-09", freq="H", periods=100, tz="utc") + dr = date_range("2012-03-09", freq="h", periods=100, tz="utc") dr = dr.tz_convert(tzstr) result = dr[::-1].hour @@ -504,10 +504,10 @@ def test_dti_tz_localize_roundtrip(self, tz_aware_fixture): tm.assert_index_equal(reset, expected) def test_dti_tz_localize_naive(self): - rng = date_range("1/1/2011", periods=100, freq="H") + rng = date_range("1/1/2011", periods=100, freq="h") conv = rng.tz_localize("US/Pacific") - exp = date_range("1/1/2011", periods=100, freq="H", tz="US/Pacific") + exp = date_range("1/1/2011", periods=100, freq="h", tz="US/Pacific") tm.assert_index_equal(conv, exp._with_freq(None)) @@ -613,11 +613,11 @@ def test_dti_construction_ambiguous_endpoint(self, tz): with pytest.raises(pytz.AmbiguousTimeError, match="Cannot infer dst time"): date_range( - "2013-10-26 23:00", "2013-10-27 01:00", tz="Europe/London", freq="H" + "2013-10-26 23:00", "2013-10-27 01:00", tz="Europe/London", freq="h" ) times = date_range( - "2013-10-26 23:00", "2013-10-27 01:00", freq="H", tz=tz, ambiguous="infer" + "2013-10-26 23:00", "2013-10-27 01:00", freq="h", tz=tz, ambiguous="infer" ) assert times[0] == Timestamp("2013-10-26 23:00", tz=tz) assert times[-1] == Timestamp("2013-10-27 01:00:00+0000", tz=tz) @@ -637,11 +637,11 @@ def test_dti_construction_nonexistent_endpoint(self, tz, option, expected): with pytest.raises(pytz.NonExistentTimeError, match="2019-03-10 02:00:00"): date_range( - "2019-03-10 00:00", "2019-03-10 02:00", tz="US/Pacific", freq="H" + "2019-03-10 00:00", "2019-03-10 02:00", tz="US/Pacific", freq="h" ) times = date_range( - "2019-03-10 00:00", "2019-03-10 02:00", freq="H", tz=tz, nonexistent=option + "2019-03-10 00:00", "2019-03-10 02:00", freq="h", tz=tz, nonexistent=option ) assert times[-1] == Timestamp(expected, tz=tz) @@ -820,7 +820,7 @@ def test_dti_tz_constructors(self, tzstr): arr = ["11/10/2005 08:00:00", "11/10/2005 09:00:00"] idx1 = to_datetime(arr).tz_localize(tzstr) - idx2 = date_range(start="2005-11-10 08:00:00", freq="H", periods=2, tz=tzstr) + idx2 = date_range(start="2005-11-10 08:00:00", freq="h", periods=2, tz=tzstr) idx2 = idx2._with_freq(None) # the others all have freq=None idx3 = DatetimeIndex(arr, tz=tzstr) idx4 = DatetimeIndex(np.array(arr), tz=tzstr) @@ -877,7 +877,7 @@ def test_dti_drop_dont_lose_tz(self): def test_dti_tz_conversion_freq(self, tz_naive_fixture): # GH25241 - t3 = DatetimeIndex(["2019-01-01 10:00"], freq="H") + t3 = DatetimeIndex(["2019-01-01 10:00"], freq="h") assert t3.tz_localize(tz=tz_naive_fixture).freq == t3.freq t4 = DatetimeIndex(["2019-01-02 12:00"], tz="UTC", freq="min") assert t4.tz_convert(tz="UTC").freq == t4.freq @@ -927,9 +927,9 @@ def test_drop_dst_boundary(self): tm.assert_index_equal(result, expected) def test_date_range_localize(self): - rng = date_range("3/11/2012 03:00", periods=15, freq="H", tz="US/Eastern") + rng = date_range("3/11/2012 03:00", periods=15, freq="h", tz="US/Eastern") rng2 = DatetimeIndex(["3/11/2012 03:00", "3/11/2012 04:00"], tz="US/Eastern") - rng3 = date_range("3/11/2012 03:00", periods=15, freq="H") + rng3 = date_range("3/11/2012 03:00", periods=15, freq="h") rng3 = rng3.tz_localize("US/Eastern") tm.assert_index_equal(rng._with_freq(None), rng3) @@ -944,9 +944,9 @@ def test_date_range_localize(self): tm.assert_index_equal(rng[:2], rng2) # Right before the DST transition - rng = date_range("3/11/2012 00:00", periods=2, freq="H", tz="US/Eastern") + rng = date_range("3/11/2012 00:00", periods=2, freq="h", tz="US/Eastern") rng2 = DatetimeIndex( - ["3/11/2012 00:00", "3/11/2012 01:00"], tz="US/Eastern", freq="H" + ["3/11/2012 00:00", "3/11/2012 01:00"], tz="US/Eastern", freq="h" ) tm.assert_index_equal(rng, rng2) exp = Timestamp("3/11/2012 00:00", tz="US/Eastern") @@ -956,7 +956,7 @@ def test_date_range_localize(self): assert exp.hour == 1 assert rng[1] == exp - rng = date_range("3/11/2012 00:00", periods=10, freq="H", tz="US/Eastern") + rng = date_range("3/11/2012 00:00", periods=10, freq="h", tz="US/Eastern") assert rng[2].hour == 3 def test_timestamp_equality_different_timezones(self): @@ -974,7 +974,7 @@ def test_timestamp_equality_different_timezones(self): assert (berlin_range == eastern_range).all() def test_dti_intersection(self): - rng = date_range("1/1/2011", periods=100, freq="H", tz="utc") + rng = date_range("1/1/2011", periods=100, freq="h", tz="utc") left = rng[10:90][::-1] right = rng[20:80][::-1] @@ -984,8 +984,8 @@ def test_dti_intersection(self): assert result.tz == left.tz def test_dti_equals_with_tz(self): - left = date_range("1/1/2011", periods=100, freq="H", tz="utc") - right = date_range("1/1/2011", periods=100, freq="H", tz="US/Eastern") + left = date_range("1/1/2011", periods=100, freq="h", tz="utc") + right = date_range("1/1/2011", periods=100, freq="h", tz="US/Eastern") assert not left.equals(right) @@ -1036,7 +1036,7 @@ def test_dti_take_dont_lose_meta(self, tzstr): def test_utc_box_timestamp_and_localize(self, tzstr): tz = timezones.maybe_get_tz(tzstr) - rng = date_range("3/11/2012", "3/12/2012", freq="H", tz="utc") + rng = date_range("3/11/2012", "3/12/2012", freq="h", tz="utc") rng_eastern = rng.tz_convert(tzstr) expected = rng[-1].astimezone(tz) @@ -1046,7 +1046,7 @@ def test_utc_box_timestamp_and_localize(self, tzstr): assert stamp.tzinfo == expected.tzinfo # right tzinfo - rng = date_range("3/13/2012", "3/14/2012", freq="H", tz="utc") + rng = date_range("3/13/2012", "3/14/2012", freq="h", tz="utc") rng_eastern = rng.tz_convert(tzstr) # test not valid for dateutil timezones. # assert 'EDT' in repr(rng_eastern[0].tzinfo) @@ -1148,9 +1148,9 @@ def test_dti_convert_tz_aware_datetime_datetime(self, tz): def test_dti_setop_aware(self, setop): # non-overlapping # GH#39328 as of 2.0 we cast these to UTC instead of object - rng = date_range("2012-11-15 00:00:00", periods=6, freq="H", tz="US/Central") + rng = date_range("2012-11-15 00:00:00", periods=6, freq="h", tz="US/Central") - rng2 = date_range("2012-11-15 12:00:00", periods=6, freq="H", tz="US/Eastern") + rng2 = date_range("2012-11-15 12:00:00", periods=6, freq="h", tz="US/Eastern") result = getattr(rng, setop)(rng2) @@ -1195,14 +1195,14 @@ def test_tz_localize_invalidates_freq(): # we only preserve freq in unambiguous cases # if localized to US/Eastern, this crosses a DST transition - dti = date_range("2014-03-08 23:00", "2014-03-09 09:00", freq="H") - assert dti.freq == "H" + dti = date_range("2014-03-08 23:00", "2014-03-09 09:00", freq="h") + assert dti.freq == "h" result = dti.tz_localize(None) # no-op - assert result.freq == "H" + assert result.freq == "h" result = dti.tz_localize("UTC") # unambiguous freq preservation - assert result.freq == "H" + assert result.freq == "h" result = dti.tz_localize("US/Eastern", nonexistent="shift_forward") assert result.freq is None @@ -1211,4 +1211,4 @@ def test_tz_localize_invalidates_freq(): # Case where we _can_ keep freq because we're length==1 dti2 = dti[:1] result = dti2.tz_localize("US/Eastern") - assert result.freq == "H" + assert result.freq == "h" diff --git a/pandas/tests/indexes/interval/test_interval.py b/pandas/tests/indexes/interval/test_interval.py index aff4944e7bd55..fcf297fd1b092 100644 --- a/pandas/tests/indexes/interval/test_interval.py +++ b/pandas/tests/indexes/interval/test_interval.py @@ -87,7 +87,7 @@ def test_properties(self, closed): [1, 1, 2, 5, 15, 53, 217, 1014, 5335, 31240, 201608], [-np.inf, -100, -10, 0.5, 1, 1.5, 3.8, 101, 202, np.inf], pd.to_datetime(["20170101", "20170202", "20170303", "20170404"]), - pd.to_timedelta(["1ns", "2ms", "3s", "4min", "5H", "6D"]), + pd.to_timedelta(["1ns", "2ms", "3s", "4min", "5h", "6D"]), ], ) def test_length(self, closed, breaks): @@ -689,13 +689,13 @@ def test_datetime(self, tz): # test get_indexer start = Timestamp("1999-12-31T12:00", tz=tz) - target = date_range(start=start, periods=7, freq="12H") + target = date_range(start=start, periods=7, freq="12h") actual = index.get_indexer(target) expected = np.array([-1, -1, 0, 0, 1, 1, 2], dtype="intp") tm.assert_numpy_array_equal(actual, expected) start = Timestamp("2000-01-08T18:00", tz=tz) - target = date_range(start=start, periods=7, freq="6H") + target = date_range(start=start, periods=7, freq="6h") actual = index.get_indexer(target) expected = np.array([7, 7, 8, 8, 8, 8, -1], dtype="intp") tm.assert_numpy_array_equal(actual, expected) diff --git a/pandas/tests/indexes/interval/test_interval_range.py b/pandas/tests/indexes/interval/test_interval_range.py index 499220b39279d..6c531fb0428a3 100644 --- a/pandas/tests/indexes/interval/test_interval_range.py +++ b/pandas/tests/indexes/interval/test_interval_range.py @@ -58,7 +58,7 @@ def test_constructor_numeric(self, closed, name, freq, periods): @pytest.mark.parametrize("tz", [None, "US/Eastern"]) @pytest.mark.parametrize( - "freq, periods", [("D", 364), ("2D", 182), ("22D18H", 16), ("ME", 11)] + "freq, periods", [("D", 364), ("2D", 182), ("22D18h", 16), ("ME", 11)] ) def test_constructor_timestamp(self, closed, name, freq, periods, tz): start, end = Timestamp("20180101", tz=tz), Timestamp("20181231", tz=tz) @@ -93,7 +93,7 @@ def test_constructor_timestamp(self, closed, name, freq, periods, tz): tm.assert_index_equal(result, expected) @pytest.mark.parametrize( - "freq, periods", [("D", 100), ("2D12H", 40), ("5D", 20), ("25D", 4)] + "freq, periods", [("D", 100), ("2D12h", 40), ("5D", 20), ("25D", 4)] ) def test_constructor_timedelta(self, closed, name, freq, periods): start, end = Timedelta("0 days"), Timedelta("100 days") @@ -130,7 +130,7 @@ def test_constructor_timedelta(self, closed, name, freq, periods): (0, 10, 3, 9), (0, 10, 1.5, 9), (0.5, 10, 3, 9.5), - (Timedelta("0D"), Timedelta("10D"), "2D4H", Timedelta("8D16H")), + (Timedelta("0D"), Timedelta("10D"), "2D4h", Timedelta("8D16h")), ( Timestamp("2018-01-01"), Timestamp("2018-02-09"), @@ -140,7 +140,7 @@ def test_constructor_timedelta(self, closed, name, freq, periods): ( Timestamp("2018-01-01", tz="US/Eastern"), Timestamp("2018-01-20", tz="US/Eastern"), - "5D12H", + "5D12h", Timestamp("2018-01-17 12:00:00", tz="US/Eastern"), ), ], diff --git a/pandas/tests/indexes/multi/test_compat.py b/pandas/tests/indexes/multi/test_compat.py index f91856c3948a0..27a8c6e9b7158 100644 --- a/pandas/tests/indexes/multi/test_compat.py +++ b/pandas/tests/indexes/multi/test_compat.py @@ -87,7 +87,7 @@ def test_inplace_mutation_resets_values(): def test_boxable_categorical_values(): - cat = pd.Categorical(pd.date_range("2012-01-01", periods=3, freq="H")) + cat = pd.Categorical(pd.date_range("2012-01-01", periods=3, freq="h")) result = MultiIndex.from_product([["a", "b", "c"], cat]).values expected = pd.Series( [ diff --git a/pandas/tests/indexes/multi/test_constructors.py b/pandas/tests/indexes/multi/test_constructors.py index cce6c98d71b47..c51dcb395c795 100644 --- a/pandas/tests/indexes/multi/test_constructors.py +++ b/pandas/tests/indexes/multi/test_constructors.py @@ -203,15 +203,15 @@ def test_from_arrays_tuples(idx): [ ( pd.period_range("2011-01-01", freq="D", periods=3), - pd.period_range("2015-01-01", freq="H", periods=3), + pd.period_range("2015-01-01", freq="h", periods=3), ), ( date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern"), - date_range("2015-01-01 10:00", freq="H", periods=3, tz="Asia/Tokyo"), + date_range("2015-01-01 10:00", freq="h", periods=3, tz="Asia/Tokyo"), ), ( pd.timedelta_range("1 days", freq="D", periods=3), - pd.timedelta_range("2 hours", freq="H", periods=3), + pd.timedelta_range("2 hours", freq="h", periods=3), ), ], ) @@ -229,7 +229,7 @@ def test_from_arrays_index_series_period_datetimetz_and_timedelta(idx1, idx2): def test_from_arrays_index_datetimelike_mixed(): idx1 = date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern") - idx2 = date_range("2015-01-01 10:00", freq="H", periods=3) + idx2 = date_range("2015-01-01 10:00", freq="h", periods=3) idx3 = pd.timedelta_range("1 days", freq="D", periods=3) idx4 = pd.period_range("2011-01-01", freq="D", periods=3) diff --git a/pandas/tests/indexes/multi/test_indexing.py b/pandas/tests/indexes/multi/test_indexing.py index d86692477f381..a65677bba35e4 100644 --- a/pandas/tests/indexes/multi/test_indexing.py +++ b/pandas/tests/indexes/multi/test_indexing.py @@ -261,7 +261,7 @@ def test_get_indexer_categorical_time(self): midx = MultiIndex.from_product( [ Categorical(["a", "b", "c"]), - Categorical(date_range("2012-01-01", periods=3, freq="H")), + Categorical(date_range("2012-01-01", periods=3, freq="h")), ] ) result = midx.get_indexer(midx) @@ -854,7 +854,7 @@ def test_timestamp_multiindex_indexer(): # https://github.com/pandas-dev/pandas/issues/26944 idx = MultiIndex.from_product( [ - date_range("2019-01-01T00:15:33", periods=100, freq="H", name="date"), + date_range("2019-01-01T00:15:33", periods=100, freq="h", name="date"), ["x"], [3], ] @@ -866,7 +866,7 @@ def test_timestamp_multiindex_indexer(): date_range( start="2019-01-02T00:15:33", end="2019-01-05T03:15:33", - freq="H", + freq="h", name="date", ), ["x"], diff --git a/pandas/tests/indexes/multi/test_partial_indexing.py b/pandas/tests/indexes/multi/test_partial_indexing.py index 66163dad3deae..64cc1fa621b31 100644 --- a/pandas/tests/indexes/multi/test_partial_indexing.py +++ b/pandas/tests/indexes/multi/test_partial_indexing.py @@ -28,7 +28,7 @@ def df(): # 2016-01-03 00:00:00 a 12 # b 13 # c 14 - dr = date_range("2016-01-01", "2016-01-03", freq="12H") + dr = date_range("2016-01-01", "2016-01-03", freq="12h") abc = ["a", "b", "c"] mi = MultiIndex.from_product([dr, abc]) frame = DataFrame({"c1": range(15)}, index=mi) diff --git a/pandas/tests/indexes/period/methods/test_asfreq.py b/pandas/tests/indexes/period/methods/test_asfreq.py index f9838ce272296..ed078a3e8fb8b 100644 --- a/pandas/tests/indexes/period/methods/test_asfreq.py +++ b/pandas/tests/indexes/period/methods/test_asfreq.py @@ -14,7 +14,7 @@ def test_asfreq(self): pi2 = period_range(freq="Q", start="1/1/2001", end="1/1/2001") pi3 = period_range(freq="M", start="1/1/2001", end="1/1/2001") pi4 = period_range(freq="D", start="1/1/2001", end="1/1/2001") - pi5 = period_range(freq="H", start="1/1/2001", end="1/1/2001 00:00") + pi5 = period_range(freq="h", start="1/1/2001", end="1/1/2001 00:00") pi6 = period_range(freq="Min", start="1/1/2001", end="1/1/2001 00:00") pi7 = period_range(freq="s", start="1/1/2001", end="1/1/2001 00:00:00") @@ -22,28 +22,28 @@ def test_asfreq(self): assert pi1.asfreq("Q", "s") == pi2 assert pi1.asfreq("M", "start") == pi3 assert pi1.asfreq("D", "StarT") == pi4 - assert pi1.asfreq("H", "beGIN") == pi5 + assert pi1.asfreq("h", "beGIN") == pi5 assert pi1.asfreq("Min", "s") == pi6 assert pi1.asfreq("s", "s") == pi7 assert pi2.asfreq("Y", "s") == pi1 assert pi2.asfreq("M", "s") == pi3 assert pi2.asfreq("D", "s") == pi4 - assert pi2.asfreq("H", "s") == pi5 + assert pi2.asfreq("h", "s") == pi5 assert pi2.asfreq("Min", "s") == pi6 assert pi2.asfreq("s", "s") == pi7 assert pi3.asfreq("Y", "s") == pi1 assert pi3.asfreq("Q", "s") == pi2 assert pi3.asfreq("D", "s") == pi4 - assert pi3.asfreq("H", "s") == pi5 + assert pi3.asfreq("h", "s") == pi5 assert pi3.asfreq("Min", "s") == pi6 assert pi3.asfreq("s", "s") == pi7 assert pi4.asfreq("Y", "s") == pi1 assert pi4.asfreq("Q", "s") == pi2 assert pi4.asfreq("M", "s") == pi3 - assert pi4.asfreq("H", "s") == pi5 + assert pi4.asfreq("h", "s") == pi5 assert pi4.asfreq("Min", "s") == pi6 assert pi4.asfreq("s", "s") == pi7 @@ -58,14 +58,14 @@ def test_asfreq(self): assert pi6.asfreq("Q", "s") == pi2 assert pi6.asfreq("M", "s") == pi3 assert pi6.asfreq("D", "s") == pi4 - assert pi6.asfreq("H", "s") == pi5 + assert pi6.asfreq("h", "s") == pi5 assert pi6.asfreq("s", "s") == pi7 assert pi7.asfreq("Y", "s") == pi1 assert pi7.asfreq("Q", "s") == pi2 assert pi7.asfreq("M", "s") == pi3 assert pi7.asfreq("D", "s") == pi4 - assert pi7.asfreq("H", "s") == pi5 + assert pi7.asfreq("h", "s") == pi5 assert pi7.asfreq("Min", "s") == pi6 msg = "How must be one of S or E" @@ -100,23 +100,23 @@ def test_asfreq_mult_pi(self, freq): assert result.freq == exp.freq def test_asfreq_combined_pi(self): - pi = PeriodIndex(["2001-01-01 00:00", "2001-01-02 02:00", "NaT"], freq="H") - exp = PeriodIndex(["2001-01-01 00:00", "2001-01-02 02:00", "NaT"], freq="25H") - for freq, how in zip(["1D1H", "1H1D"], ["S", "E"]): + pi = PeriodIndex(["2001-01-01 00:00", "2001-01-02 02:00", "NaT"], freq="h") + exp = PeriodIndex(["2001-01-01 00:00", "2001-01-02 02:00", "NaT"], freq="25h") + for freq, how in zip(["1D1h", "1h1D"], ["S", "E"]): result = pi.asfreq(freq, how=how) tm.assert_index_equal(result, exp) assert result.freq == exp.freq - for freq in ["1D1H", "1H1D"]: + for freq in ["1D1h", "1h1D"]: pi = PeriodIndex(["2001-01-01 00:00", "2001-01-02 02:00", "NaT"], freq=freq) - result = pi.asfreq("H") - exp = PeriodIndex(["2001-01-02 00:00", "2001-01-03 02:00", "NaT"], freq="H") + result = pi.asfreq("h") + exp = PeriodIndex(["2001-01-02 00:00", "2001-01-03 02:00", "NaT"], freq="h") tm.assert_index_equal(result, exp) assert result.freq == exp.freq pi = PeriodIndex(["2001-01-01 00:00", "2001-01-02 02:00", "NaT"], freq=freq) - result = pi.asfreq("H", how="S") - exp = PeriodIndex(["2001-01-01 00:00", "2001-01-02 02:00", "NaT"], freq="H") + result = pi.asfreq("h", how="S") + exp = PeriodIndex(["2001-01-01 00:00", "2001-01-02 02:00", "NaT"], freq="h") tm.assert_index_equal(result, exp) assert result.freq == exp.freq diff --git a/pandas/tests/indexes/period/methods/test_fillna.py b/pandas/tests/indexes/period/methods/test_fillna.py index 12a07bac25a59..ed6b4686a06de 100644 --- a/pandas/tests/indexes/period/methods/test_fillna.py +++ b/pandas/tests/indexes/period/methods/test_fillna.py @@ -10,19 +10,19 @@ class TestFillNA: def test_fillna_period(self): # GH#11343 - idx = PeriodIndex(["2011-01-01 09:00", NaT, "2011-01-01 11:00"], freq="H") + idx = PeriodIndex(["2011-01-01 09:00", NaT, "2011-01-01 11:00"], freq="h") exp = PeriodIndex( - ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], freq="H" + ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], freq="h" ) - result = idx.fillna(Period("2011-01-01 10:00", freq="H")) + result = idx.fillna(Period("2011-01-01 10:00", freq="h")) tm.assert_index_equal(result, exp) exp = Index( [ - Period("2011-01-01 09:00", freq="H"), + Period("2011-01-01 09:00", freq="h"), "x", - Period("2011-01-01 11:00", freq="H"), + Period("2011-01-01 11:00", freq="h"), ], dtype=object, ) @@ -31,9 +31,9 @@ def test_fillna_period(self): exp = Index( [ - Period("2011-01-01 09:00", freq="H"), + Period("2011-01-01 09:00", freq="h"), Period("2011-01-01", freq="D"), - Period("2011-01-01 11:00", freq="H"), + Period("2011-01-01 11:00", freq="h"), ], dtype=object, ) diff --git a/pandas/tests/indexes/period/methods/test_shift.py b/pandas/tests/indexes/period/methods/test_shift.py index d649dd3da0864..fca3e3a559e1f 100644 --- a/pandas/tests/indexes/period/methods/test_shift.py +++ b/pandas/tests/indexes/period/methods/test_shift.py @@ -64,12 +64,12 @@ def test_shift(self): def test_shift_corner_cases(self): # GH#9903 - idx = PeriodIndex([], name="xxx", freq="H") + idx = PeriodIndex([], name="xxx", freq="h") msg = "`freq` argument is not supported for PeriodIndex.shift" with pytest.raises(TypeError, match=msg): # period shift doesn't accept freq - idx.shift(1, freq="H") + idx.shift(1, freq="h") tm.assert_index_equal(idx.shift(0), idx) tm.assert_index_equal(idx.shift(3), idx) @@ -77,19 +77,19 @@ def test_shift_corner_cases(self): idx = PeriodIndex( ["2011-01-01 10:00", "2011-01-01 11:00", "2011-01-01 12:00"], name="xxx", - freq="H", + freq="h", ) tm.assert_index_equal(idx.shift(0), idx) exp = PeriodIndex( ["2011-01-01 13:00", "2011-01-01 14:00", "2011-01-01 15:00"], name="xxx", - freq="H", + freq="h", ) tm.assert_index_equal(idx.shift(3), exp) exp = PeriodIndex( ["2011-01-01 07:00", "2011-01-01 08:00", "2011-01-01 09:00"], name="xxx", - freq="H", + freq="h", ) tm.assert_index_equal(idx.shift(-3), exp) diff --git a/pandas/tests/indexes/period/methods/test_to_timestamp.py b/pandas/tests/indexes/period/methods/test_to_timestamp.py index 462f66eef7269..2394efb353ab6 100644 --- a/pandas/tests/indexes/period/methods/test_to_timestamp.py +++ b/pandas/tests/indexes/period/methods/test_to_timestamp.py @@ -107,7 +107,7 @@ def test_to_timestamp_pi_mult(self): tm.assert_index_equal(result, expected) def test_to_timestamp_pi_combined(self): - idx = period_range(start="2011", periods=2, freq="1D1H", name="idx") + idx = period_range(start="2011", periods=2, freq="1D1h", name="idx") result = idx.to_timestamp() expected = DatetimeIndex(["2011-01-01 00:00", "2011-01-02 01:00"], name="idx") @@ -120,7 +120,7 @@ def test_to_timestamp_pi_combined(self): expected = expected + Timedelta(1, "s") - Timedelta(1, "ns") tm.assert_index_equal(result, expected) - result = idx.to_timestamp(how="E", freq="H") + result = idx.to_timestamp(how="E", freq="h") expected = DatetimeIndex(["2011-01-02 00:00", "2011-01-03 01:00"], name="idx") expected = expected + Timedelta(1, "h") - Timedelta(1, "ns") tm.assert_index_equal(result, expected) diff --git a/pandas/tests/indexes/period/test_constructors.py b/pandas/tests/indexes/period/test_constructors.py index ac4edb10d9352..f1db5ab28be30 100644 --- a/pandas/tests/indexes/period/test_constructors.py +++ b/pandas/tests/indexes/period/test_constructors.py @@ -397,9 +397,9 @@ def test_constructor_freq_mult(self): ) tm.assert_index_equal(pidx, expected) - pidx = period_range(end="2014-01-01 17:00", freq="4H", periods=3) + pidx = period_range(end="2014-01-01 17:00", freq="4h", periods=3) expected = PeriodIndex( - ["2014-01-01 09:00", "2014-01-01 13:00", "2014-01-01 17:00"], freq="4H" + ["2014-01-01 09:00", "2014-01-01 13:00", "2014-01-01 17:00"], freq="4h" ) tm.assert_index_equal(pidx, expected) @@ -444,12 +444,12 @@ def test_constructor_freq_mult_dti_compat_month(self, mult): tm.assert_index_equal(pidx, expected) def test_constructor_freq_combined(self): - for freq in ["1D1H", "1H1D"]: + for freq in ["1D1h", "1h1D"]: pidx = PeriodIndex(["2016-01-01", "2016-01-02"], freq=freq) - expected = PeriodIndex(["2016-01-01 00:00", "2016-01-02 00:00"], freq="25H") - for freq in ["1D1H", "1H1D"]: + expected = PeriodIndex(["2016-01-01 00:00", "2016-01-02 00:00"], freq="25h") + for freq in ["1D1h", "1h1D"]: pidx = period_range(start="2016-01-01", periods=2, freq=freq) - expected = PeriodIndex(["2016-01-01 00:00", "2016-01-02 01:00"], freq="25H") + expected = PeriodIndex(["2016-01-01 00:00", "2016-01-02 01:00"], freq="25h") tm.assert_index_equal(pidx, expected) def test_constructor(self): @@ -470,7 +470,7 @@ def test_constructor(self): pi = period_range(freq="B", start="1/1/2001", end="12/31/2009") assert len(pi) == 261 * 9 - pi = period_range(freq="H", start="1/1/2001", end="12/31/2001 23:00") + pi = period_range(freq="h", start="1/1/2001", end="12/31/2001 23:00") assert len(pi) == 365 * 24 pi = period_range(freq="Min", start="1/1/2001", end="1/1/2001 23:59") @@ -526,7 +526,7 @@ def test_constructor(self): Period("2006-12-31", ("w", 1)) @pytest.mark.parametrize( - "freq", ["M", "Q", "Y", "D", "B", "min", "s", "ms", "us", "ns", "H"] + "freq", ["M", "Q", "Y", "D", "B", "min", "s", "ms", "us", "ns", "h"] ) @pytest.mark.filterwarnings( r"ignore:Period with BDay freq is deprecated:FutureWarning" diff --git a/pandas/tests/indexes/period/test_formats.py b/pandas/tests/indexes/period/test_formats.py index 67deeccff4e2a..9441f56a75f03 100644 --- a/pandas/tests/indexes/period/test_formats.py +++ b/pandas/tests/indexes/period/test_formats.py @@ -56,7 +56,7 @@ def test_representation(self, method): idx3 = PeriodIndex(["2011-01-01", "2011-01-02"], freq="D") idx4 = PeriodIndex(["2011-01-01", "2011-01-02", "2011-01-03"], freq="D") idx5 = PeriodIndex(["2011", "2012", "2013"], freq="Y") - idx6 = PeriodIndex(["2011-01-01 09:00", "2012-02-01 10:00", "NaT"], freq="H") + idx6 = PeriodIndex(["2011-01-01 09:00", "2012-02-01 10:00", "NaT"], freq="h") idx7 = pd.period_range("2013Q1", periods=1, freq="Q") idx8 = pd.period_range("2013Q1", periods=2, freq="Q") idx9 = pd.period_range("2013Q1", periods=3, freq="Q") @@ -77,7 +77,7 @@ def test_representation(self, method): exp6 = ( "PeriodIndex(['2011-01-01 09:00', '2012-02-01 10:00', 'NaT'], " - "dtype='period[H]')" + "dtype='period[h]')" ) exp7 = "PeriodIndex(['2013Q1'], dtype='period[Q-DEC]')" @@ -102,7 +102,7 @@ def test_representation_to_series(self): idx3 = PeriodIndex(["2011-01-01", "2011-01-02"], freq="D") idx4 = PeriodIndex(["2011-01-01", "2011-01-02", "2011-01-03"], freq="D") idx5 = PeriodIndex(["2011", "2012", "2013"], freq="Y") - idx6 = PeriodIndex(["2011-01-01 09:00", "2012-02-01 10:00", "NaT"], freq="H") + idx6 = PeriodIndex(["2011-01-01 09:00", "2012-02-01 10:00", "NaT"], freq="h") idx7 = pd.period_range("2013Q1", periods=1, freq="Q") idx8 = pd.period_range("2013Q1", periods=2, freq="Q") @@ -130,7 +130,7 @@ def test_representation_to_series(self): exp6 = """0 2011-01-01 09:00 1 2012-02-01 10:00 2 NaT -dtype: period[H]""" +dtype: period[h]""" exp7 = """0 2013Q1 dtype: period[Q-DEC]""" @@ -158,7 +158,7 @@ def test_summary(self): idx3 = PeriodIndex(["2011-01-01", "2011-01-02"], freq="D") idx4 = PeriodIndex(["2011-01-01", "2011-01-02", "2011-01-03"], freq="D") idx5 = PeriodIndex(["2011", "2012", "2013"], freq="Y") - idx6 = PeriodIndex(["2011-01-01 09:00", "2012-02-01 10:00", "NaT"], freq="H") + idx6 = PeriodIndex(["2011-01-01 09:00", "2012-02-01 10:00", "NaT"], freq="h") idx7 = pd.period_range("2013Q1", periods=1, freq="Q") idx8 = pd.period_range("2013Q1", periods=2, freq="Q") @@ -180,7 +180,7 @@ def test_summary(self): Freq: Y-DEC""" exp6 = """PeriodIndex: 3 entries, 2011-01-01 09:00 to NaT -Freq: H""" +Freq: h""" exp7 = """PeriodIndex: 1 entries, 2013Q1 to 2013Q1 Freq: Q-DEC""" diff --git a/pandas/tests/indexes/period/test_indexing.py b/pandas/tests/indexes/period/test_indexing.py index f5af550d94ab1..2683e25eda618 100644 --- a/pandas/tests/indexes/period/test_indexing.py +++ b/pandas/tests/indexes/period/test_indexing.py @@ -183,7 +183,7 @@ def test_getitem_seconds(self): "2014", "2013/02", "2013/01/02", - "2013/02/01 9H", + "2013/02/01 9h", "2013/02/01 09:00", ] for val in values: @@ -195,7 +195,7 @@ def test_getitem_seconds(self): ser = Series(np.random.default_rng(2).random(len(idx)), index=idx) tm.assert_series_equal(ser["2013/01/01 10:00"], ser[3600:3660]) - tm.assert_series_equal(ser["2013/01/01 9H"], ser[:3600]) + tm.assert_series_equal(ser["2013/01/01 9h"], ser[:3600]) for d in ["2013/01/01", "2013/01", "2013"]: tm.assert_series_equal(ser[d], ser) @@ -215,7 +215,7 @@ def test_getitem_day(self, idx_range): "2014", "2013/02", "2013/01/02", - "2013/02/01 9H", + "2013/02/01 9h", "2013/02/01 09:00", ] for val in values: @@ -230,7 +230,7 @@ def test_getitem_day(self, idx_range): tm.assert_series_equal(ser["2013/02"], ser[31:59]) tm.assert_series_equal(ser["2014"], ser[365:]) - invalid = ["2013/02/01 9H", "2013/02/01 09:00"] + invalid = ["2013/02/01 9h", "2013/02/01 09:00"] for val in invalid: with pytest.raises(KeyError, match=val): ser[val] @@ -479,13 +479,13 @@ def test_get_indexer_non_unique(self): # TODO: This method came from test_period; de-dup with version above def test_get_indexer2(self): - idx = period_range("2000-01-01", periods=3).asfreq("H", how="start") + idx = period_range("2000-01-01", periods=3).asfreq("h", how="start") tm.assert_numpy_array_equal( idx.get_indexer(idx), np.array([0, 1, 2], dtype=np.intp) ) target = PeriodIndex( - ["1999-12-31T23", "2000-01-01T12", "2000-01-02T01"], freq="H" + ["1999-12-31T23", "2000-01-01T12", "2000-01-02T01"], freq="h" ) tm.assert_numpy_array_equal( idx.get_indexer(target, "pad"), np.array([-1, 0, 1], dtype=np.intp) @@ -501,7 +501,7 @@ def test_get_indexer2(self): np.array([0, -1, 1], dtype=np.intp), ) - msg = "Input has different freq=None from PeriodArray\\(freq=H\\)" + msg = "Input has different freq=None from PeriodArray\\(freq=h\\)" with pytest.raises(ValueError, match=msg): idx.get_indexer(target, "nearest", tolerance="1 minute") @@ -714,7 +714,7 @@ def test_take_fill_value(self): class TestGetValue: - @pytest.mark.parametrize("freq", ["H", "D"]) + @pytest.mark.parametrize("freq", ["h", "D"]) def test_get_value_datetime_hourly(self, freq): # get_loc and get_value should treat datetime objects symmetrically # TODO: this test used to test get_value, which is removed in 2.0. @@ -730,7 +730,7 @@ def test_get_value_datetime_hourly(self, freq): assert ser.loc[ts] == 7 ts2 = ts + Timedelta(hours=3) - if freq == "H": + if freq == "h": with pytest.raises(KeyError, match="2016-01-01 03:00"): pi.get_loc(ts2) with pytest.raises(KeyError, match="2016-01-01 03:00"): @@ -795,7 +795,7 @@ class TestAsOfLocs: def test_asof_locs_mismatched_type(self): dti = date_range("2016-01-01", periods=3) pi = dti.to_period("D") - pi2 = dti.to_period("H") + pi2 = dti.to_period("h") mask = np.array([0, 1, 0], dtype=bool) @@ -810,6 +810,6 @@ def test_asof_locs_mismatched_type(self): # TimedeltaIndex pi.asof_locs(dti - dti, mask) - msg = "Input has different freq=H" + msg = "Input has different freq=h" with pytest.raises(libperiod.IncompatibleFrequency, match=msg): pi.asof_locs(pi2, mask) diff --git a/pandas/tests/indexes/period/test_period.py b/pandas/tests/indexes/period/test_period.py index 0c445b4cdf770..22bb63d67f57f 100644 --- a/pandas/tests/indexes/period/test_period.py +++ b/pandas/tests/indexes/period/test_period.py @@ -161,7 +161,7 @@ def test_period_index_length(self): period_range(freq="Q", start="1/1/2001", end="12/1/2002"), period_range(freq="M", start="1/1/2001", end="1/1/2002"), period_range(freq="D", start="12/1/2001", end="6/1/2001"), - period_range(freq="H", start="12/31/2001", end="1/1/2002 23:00"), + period_range(freq="h", start="12/31/2001", end="1/1/2002 23:00"), period_range(freq="Min", start="12/31/2001", end="1/1/2002 00:20"), period_range( freq="s", start="12/31/2001 00:00:00", end="12/31/2001 00:05:00" @@ -255,7 +255,7 @@ def test_iteration(self): def test_with_multi_index(self): # #1705 - index = date_range("1/1/2012", periods=4, freq="12H") + index = date_range("1/1/2012", periods=4, freq="12h") index_as_arrays = [index.to_period(freq="D"), index.hour] s = Series([0, 1, 2, 3], index_as_arrays) @@ -284,6 +284,15 @@ def test_period_index_frequency_ME_error_message(self): with pytest.raises(ValueError, match=msg): PeriodIndex(["2020-01-01", "2020-01-02"], freq="2ME") + def test_H_deprecated_from_time_series(self): + # GH#52536 + msg = "'H' is deprecated and will be removed in a future version." + + with tm.assert_produces_warning(FutureWarning, match=msg): + index = period_range(freq="2H", start="1/1/2001", end="12/1/2009") + series = Series(1, index=index) + assert isinstance(series, Series) + @pytest.mark.parametrize("freq", ["2A", "A-DEC", "200A-AUG"]) def test_a_deprecated_from_time_series(self, freq): # GH#52536 diff --git a/pandas/tests/indexes/period/test_resolution.py b/pandas/tests/indexes/period/test_resolution.py index 98ccfe6569798..680bdaa2e2a44 100644 --- a/pandas/tests/indexes/period/test_resolution.py +++ b/pandas/tests/indexes/period/test_resolution.py @@ -11,7 +11,7 @@ class TestResolution: ("Q", "quarter"), ("M", "month"), ("D", "day"), - ("H", "hour"), + ("h", "hour"), ("min", "minute"), ("s", "second"), ("ms", "millisecond"), diff --git a/pandas/tests/indexes/period/test_searchsorted.py b/pandas/tests/indexes/period/test_searchsorted.py index b9863d1bb019a..9b02a2f35fd01 100644 --- a/pandas/tests/indexes/period/test_searchsorted.py +++ b/pandas/tests/indexes/period/test_searchsorted.py @@ -27,9 +27,9 @@ def test_searchsorted(self, freq): assert pidx.searchsorted(NaT) == 5 - msg = "Input has different freq=H from PeriodArray" + msg = "Input has different freq=h from PeriodArray" with pytest.raises(IncompatibleFrequency, match=msg): - pidx.searchsorted(Period("2014-01-01", freq="H")) + pidx.searchsorted(Period("2014-01-01", freq="h")) msg = "Input has different freq=5D from PeriodArray" with pytest.raises(IncompatibleFrequency, match=msg): diff --git a/pandas/tests/indexes/period/test_setops.py b/pandas/tests/indexes/period/test_setops.py index 9610db5f0336b..b9a5940795a5b 100644 --- a/pandas/tests/indexes/period/test_setops.py +++ b/pandas/tests/indexes/period/test_setops.py @@ -43,8 +43,8 @@ def test_union(self, sort): other3 = PeriodIndex([], freq="D") expected3 = period_range("1/1/2000", freq="D", periods=5) - rng4 = period_range("2000-01-01 09:00", freq="H", periods=5) - other4 = period_range("2000-01-02 09:00", freq="H", periods=5) + rng4 = period_range("2000-01-01 09:00", freq="h", periods=5) + other4 = period_range("2000-01-02 09:00", freq="h", periods=5) expected4 = PeriodIndex( [ "2000-01-01 09:00", @@ -58,7 +58,7 @@ def test_union(self, sort): "2000-01-02 12:00", "2000-01-02 13:00", ], - freq="H", + freq="h", ) rng5 = PeriodIndex( @@ -269,8 +269,8 @@ def test_difference(self, sort): "2000-01-01 11:00", "2000-01-01 13:00", ] - rng4 = PeriodIndex(period_rng, freq="H") - other4 = period_range("2000-01-02 09:00", freq="H", periods=5) + rng4 = PeriodIndex(period_rng, freq="h") + other4 = period_range("2000-01-02 09:00", freq="h", periods=5) expected4 = rng4 rng5 = PeriodIndex( diff --git a/pandas/tests/indexes/period/test_tools.py b/pandas/tests/indexes/period/test_tools.py index 2a9149844a353..f507e64d88b06 100644 --- a/pandas/tests/indexes/period/test_tools.py +++ b/pandas/tests/indexes/period/test_tools.py @@ -20,7 +20,7 @@ class TestPeriodRepresentation: ("W-THU", "1970-01-01"), ("D", "1970-01-01"), ("B", "1970-01-01"), - ("H", "1970-01-01"), + ("h", "1970-01-01"), ("min", "1970-01-01"), ("s", "1970-01-01"), ("ms", "1970-01-01"), diff --git a/pandas/tests/indexes/timedeltas/methods/test_astype.py b/pandas/tests/indexes/timedeltas/methods/test_astype.py index f69f0fd3d78e2..4f5ece61fc30c 100644 --- a/pandas/tests/indexes/timedeltas/methods/test_astype.py +++ b/pandas/tests/indexes/timedeltas/methods/test_astype.py @@ -69,7 +69,7 @@ def test_astype(self): tm.assert_numpy_array_equal(rng.asi8, result.values) def test_astype_uint(self): - arr = timedelta_range("1H", periods=2) + arr = timedelta_range("1h", periods=2) with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"): arr.astype("uint64") @@ -104,10 +104,10 @@ def test_astype_raises(self, dtype): idx.astype(dtype) def test_astype_category(self): - obj = timedelta_range("1H", periods=2, freq="H") + obj = timedelta_range("1h", periods=2, freq="h") result = obj.astype("category") - expected = pd.CategoricalIndex([Timedelta("1H"), Timedelta("2H")]) + expected = pd.CategoricalIndex([Timedelta("1h"), Timedelta("2h")]) tm.assert_index_equal(result, expected) result = obj._data.astype("category") @@ -115,7 +115,7 @@ def test_astype_category(self): tm.assert_categorical_equal(result, expected) def test_astype_array_fallback(self): - obj = timedelta_range("1H", periods=2) + obj = timedelta_range("1h", periods=2) result = obj.astype(bool) expected = Index(np.array([True, True])) tm.assert_index_equal(result, expected) diff --git a/pandas/tests/indexes/timedeltas/methods/test_shift.py b/pandas/tests/indexes/timedeltas/methods/test_shift.py index e33b8de3e6594..a0986d1496881 100644 --- a/pandas/tests/indexes/timedeltas/methods/test_shift.py +++ b/pandas/tests/indexes/timedeltas/methods/test_shift.py @@ -14,17 +14,17 @@ class TestTimedeltaIndexShift: def test_tdi_shift_empty(self): # GH#9903 idx = TimedeltaIndex([], name="xxx") - tm.assert_index_equal(idx.shift(0, freq="H"), idx) - tm.assert_index_equal(idx.shift(3, freq="H"), idx) + tm.assert_index_equal(idx.shift(0, freq="h"), idx) + tm.assert_index_equal(idx.shift(3, freq="h"), idx) def test_tdi_shift_hours(self): # GH#9903 idx = TimedeltaIndex(["5 hours", "6 hours", "9 hours"], name="xxx") - tm.assert_index_equal(idx.shift(0, freq="H"), idx) + tm.assert_index_equal(idx.shift(0, freq="h"), idx) exp = TimedeltaIndex(["8 hours", "9 hours", "12 hours"], name="xxx") - tm.assert_index_equal(idx.shift(3, freq="H"), exp) + tm.assert_index_equal(idx.shift(3, freq="h"), exp) exp = TimedeltaIndex(["2 hours", "3 hours", "6 hours"], name="xxx") - tm.assert_index_equal(idx.shift(-3, freq="H"), exp) + tm.assert_index_equal(idx.shift(-3, freq="h"), exp) def test_tdi_shift_minutes(self): # GH#9903 diff --git a/pandas/tests/indexes/timedeltas/test_freq_attr.py b/pandas/tests/indexes/timedeltas/test_freq_attr.py index 868da4329dccf..1912c49d3000f 100644 --- a/pandas/tests/indexes/timedeltas/test_freq_attr.py +++ b/pandas/tests/indexes/timedeltas/test_freq_attr.py @@ -12,7 +12,7 @@ class TestFreq: @pytest.mark.parametrize("values", [["0 days", "2 days", "4 days"], []]) - @pytest.mark.parametrize("freq", ["2D", Day(2), "48H", Hour(48)]) + @pytest.mark.parametrize("freq", ["2D", Day(2), "48h", Hour(48)]) def test_freq_setter(self, values, freq): # GH#20678 idx = TimedeltaIndex(values) diff --git a/pandas/tests/indexes/timedeltas/test_indexing.py b/pandas/tests/indexes/timedeltas/test_indexing.py index 31cc8e18f58ce..397f9d9e18331 100644 --- a/pandas/tests/indexes/timedeltas/test_indexing.py +++ b/pandas/tests/indexes/timedeltas/test_indexing.py @@ -21,7 +21,7 @@ class TestGetItem: def test_getitem_slice_keeps_name(self): # GH#4226 - tdi = timedelta_range("1d", "5d", freq="H", name="timebucket") + tdi = timedelta_range("1d", "5d", freq="h", name="timebucket") assert tdi[1:].name == tdi.name def test_getitem(self): @@ -230,7 +230,7 @@ def test_take_invalid_kwargs(self): def test_take_equiv_getitem(self): tds = ["1day 02:00:00", "1 day 04:00:00", "1 day 10:00:00"] - idx = timedelta_range(start="1d", end="2d", freq="H", name="idx") + idx = timedelta_range(start="1d", end="2d", freq="h", name="idx") expected = TimedeltaIndex(tds, freq=None, name="idx") taken1 = idx.take([2, 4, 10]) diff --git a/pandas/tests/indexes/timedeltas/test_scalar_compat.py b/pandas/tests/indexes/timedeltas/test_scalar_compat.py index fe3ff1799e763..63db5c1b9c91d 100644 --- a/pandas/tests/indexes/timedeltas/test_scalar_compat.py +++ b/pandas/tests/indexes/timedeltas/test_scalar_compat.py @@ -63,8 +63,8 @@ def test_tdi_round(self): ) expected_elt = expected_rng[1] - tm.assert_index_equal(td.round(freq="H"), expected_rng) - assert elt.round(freq="H") == expected_elt + tm.assert_index_equal(td.round(freq="h"), expected_rng) + assert elt.round(freq="h") == expected_elt msg = INVALID_FREQ_ERR_MSG with pytest.raises(ValueError, match=msg): @@ -121,7 +121,7 @@ def test_round(self): ), ), ("12min", t1c, TimedeltaIndex(["-1 days", "-1 days", "-1 days"])), - ("H", t1c, TimedeltaIndex(["-1 days", "-1 days", "-1 days"])), + ("h", t1c, TimedeltaIndex(["-1 days", "-1 days", "-1 days"])), ("d", t1c, TimedeltaIndex([-1, -1, -1], unit="D")), ]: r1 = t1.round(freq) diff --git a/pandas/tests/indexes/timedeltas/test_setops.py b/pandas/tests/indexes/timedeltas/test_setops.py index 6cdd6944e90ea..727b4eee00566 100644 --- a/pandas/tests/indexes/timedeltas/test_setops.py +++ b/pandas/tests/indexes/timedeltas/test_setops.py @@ -52,8 +52,8 @@ def test_union_coverage(self): assert result.freq == ordered.freq def test_union_bug_1730(self): - rng_a = timedelta_range("1 day", periods=4, freq="3H") - rng_b = timedelta_range("1 day", periods=4, freq="4H") + rng_a = timedelta_range("1 day", periods=4, freq="3h") + rng_b = timedelta_range("1 day", periods=4, freq="4h") result = rng_a.union(rng_b) exp = TimedeltaIndex(sorted(set(rng_a) | set(rng_b))) diff --git a/pandas/tests/indexes/timedeltas/test_timedelta_range.py b/pandas/tests/indexes/timedeltas/test_timedelta_range.py index 03531547ef042..f22bdb7a90516 100644 --- a/pandas/tests/indexes/timedeltas/test_timedelta_range.py +++ b/pandas/tests/indexes/timedeltas/test_timedelta_range.py @@ -46,6 +46,7 @@ def test_timedelta_range(self): @pytest.mark.parametrize( "depr_unit, unit", [ + ("H", "hour"), ("T", "minute"), ("t", "minute"), ("S", "second"), @@ -57,7 +58,8 @@ def test_timedelta_range(self): ("n", "nanosecond"), ], ) - def test_timedelta_units_T_S_L_U_N_deprecated(self, depr_unit, unit): + def test_timedelta_units_H_T_S_L_U_N_deprecated(self, depr_unit, unit): + # GH#52536 depr_msg = ( f"'{depr_unit}' is deprecated and will be removed in a future version." ) @@ -68,7 +70,7 @@ def test_timedelta_units_T_S_L_U_N_deprecated(self, depr_unit, unit): tm.assert_index_equal(result, expected) @pytest.mark.parametrize( - "periods, freq", [(3, "2D"), (5, "D"), (6, "19H12min"), (7, "16H"), (9, "12H")] + "periods, freq", [(3, "2D"), (5, "D"), (6, "19h12min"), (7, "16h"), (9, "12h")] ) def test_linspace_behavior(self, periods, freq): # GH 20976 @@ -76,6 +78,16 @@ def test_linspace_behavior(self, periods, freq): expected = timedelta_range(start="0 days", end="4 days", freq=freq) tm.assert_index_equal(result, expected) + @pytest.mark.parametrize("msg_freq, freq", [("H", "19H12min"), ("T", "19h12T")]) + def test_timedelta_range_H_T_deprecated(self, freq, msg_freq): + # GH#52536 + msg = f"'{msg_freq}' is deprecated and will be removed in a future version." + + result = timedelta_range(start="0 days", end="4 days", periods=6) + with tm.assert_produces_warning(FutureWarning, match=msg): + expected = timedelta_range(start="0 days", end="4 days", freq=freq) + tm.assert_index_equal(result, expected) + def test_errors(self): # not enough params msg = ( @@ -96,7 +108,7 @@ def test_errors(self): # too many params with pytest.raises(ValueError, match=msg): - timedelta_range(start="0 days", end="5 days", periods=10, freq="H") + timedelta_range(start="0 days", end="5 days", periods=10, freq="h") @pytest.mark.parametrize( "start, end, freq, expected_periods", diff --git a/pandas/tests/indexing/test_loc.py b/pandas/tests/indexing/test_loc.py index 2eee506e1feb7..70eada188f3c8 100644 --- a/pandas/tests/indexing/test_loc.py +++ b/pandas/tests/indexing/test_loc.py @@ -1464,7 +1464,7 @@ def test_loc_setitem_datetime_coercion(self): def test_loc_setitem_datetimeindex_tz(self, idxer, tz_naive_fixture): # GH#11365 tz = tz_naive_fixture - idx = date_range(start="2015-07-12", periods=3, freq="H", tz=tz) + idx = date_range(start="2015-07-12", periods=3, freq="h", tz=tz) expected = DataFrame(1.2, index=idx, columns=["var"]) # if result started off with object dtype, then the .loc.__setitem__ # below would retain object dtype @@ -2057,7 +2057,7 @@ def test_loc_setitem_with_expansion_and_existing_dst(self): start = Timestamp("2017-10-29 00:00:00+0200", tz="Europe/Madrid") end = Timestamp("2017-10-29 03:00:00+0100", tz="Europe/Madrid") ts = Timestamp("2016-10-10 03:00:00", tz="Europe/Madrid") - idx = date_range(start, end, inclusive="left", freq="H") + idx = date_range(start, end, inclusive="left", freq="h") assert ts not in idx # i.e. result.loc setitem is with-expansion result = DataFrame(index=idx, columns=["value"]) @@ -2330,7 +2330,7 @@ def test_loc_getitem_partial_string_slicing_with_periodindex(self): tm.assert_series_equal(result, expected) def test_loc_getitem_partial_string_slicing_with_timedeltaindex(self): - ix = timedelta_range(start="1 day", end="2 days", freq="1H") + ix = timedelta_range(start="1 day", end="2 days", freq="1h") ser = ix.to_series() result = ser.loc[:"1 days"] expected = ser.iloc[:-1] @@ -2432,7 +2432,7 @@ def test_loc_getitem_label_slice_across_dst(self): "index", [ pd.period_range(start="2017-01-01", end="2018-01-01", freq="M"), - timedelta_range(start="1 day", end="2 days", freq="1H"), + timedelta_range(start="1 day", end="2 days", freq="1h"), ], ) def test_loc_getitem_label_slice_period_timedelta(self, index): @@ -2588,7 +2588,7 @@ def test_loc_setitem_mask_and_label_with_datetimeindex(self): df = DataFrame( np.arange(6.0).reshape(3, 2), columns=list("AB"), - index=date_range("1/1/2000", periods=3, freq="1H"), + index=date_range("1/1/2000", periods=3, freq="1h"), ) expected = df.copy() expected["C"] = [expected.index[0]] + [pd.NaT, pd.NaT] @@ -2887,7 +2887,7 @@ def test_loc_datetimelike_mismatched_dtypes(): df = DataFrame( np.random.default_rng(2).standard_normal((5, 3)), columns=["a", "b", "c"], - index=date_range("2012", freq="H", periods=5), + index=date_range("2012", freq="h", periods=5), ) # create dataframe with non-unique DatetimeIndex df = df.iloc[[0, 2, 2, 3]].copy() diff --git a/pandas/tests/io/formats/test_format.py b/pandas/tests/io/formats/test_format.py index 51965f753a0fb..585958765f0b6 100644 --- a/pandas/tests/io/formats/test_format.py +++ b/pandas/tests/io/formats/test_format.py @@ -2166,7 +2166,7 @@ def test_period(self): "B": [ pd.Period("2011-01", freq="M"), pd.Period("2011-02-01", freq="D"), - pd.Period("2011-03-01 09:00", freq="H"), + pd.Period("2011-03-01 09:00", freq="h"), pd.Period("2011-04", freq="M"), ], "C": list("abcd"), @@ -2703,7 +2703,7 @@ def test_period(self): [ pd.Period("2011-01", freq="M"), pd.Period("2011-02-01", freq="D"), - pd.Period("2011-03-01 09:00", freq="H"), + pd.Period("2011-03-01 09:00", freq="h"), ] ) exp = ( @@ -3330,7 +3330,7 @@ def test_str(self): class TestPeriodIndexFormat: def test_period_format_and_strftime_default(self): - per = pd.PeriodIndex([datetime(2003, 1, 1, 12), None], freq="H") + per = pd.PeriodIndex([datetime(2003, 1, 1, 12), None], freq="h") # Default formatting formatted = per.format() @@ -3377,13 +3377,13 @@ def test_period_tz(self): # Converting to a period looses the timezone information # Since tz is currently set as utc, we'll see 2012 with tm.assert_produces_warning(UserWarning, match="will drop timezone"): - per = dt.to_period(freq="H") + per = dt.to_period(freq="h") assert per.format()[0] == "2012-12-31 23:00" # If tz is currently set as paris before conversion, we'll see 2013 dt = dt.tz_convert("Europe/Paris") with tm.assert_produces_warning(UserWarning, match="will drop timezone"): - per = dt.to_period(freq="H") + per = dt.to_period(freq="h") assert per.format()[0] == "2013-01-01 00:00" @pytest.mark.parametrize( @@ -3406,7 +3406,7 @@ def test_period_non_ascii_fmt(self, locale_str): # Change locale temporarily for this test. with tm.set_locale(locale_str, locale.LC_ALL) if locale_str else nullcontext(): # Scalar - per = pd.Period("2018-03-11 13:00", freq="H") + per = pd.Period("2018-03-11 13:00", freq="h") assert per.strftime("%y é") == "18 é" # Index @@ -3438,7 +3438,7 @@ def test_period_custom_locale_directive(self, locale_str): am_local, pm_local = get_local_am_pm() # Scalar - per = pd.Period("2018-03-11 13:00", freq="H") + per = pd.Period("2018-03-11 13:00", freq="h") assert per.strftime("%p") == pm_local # Index diff --git a/pandas/tests/io/formats/test_to_csv.py b/pandas/tests/io/formats/test_to_csv.py index 822bd14610388..613f609320f31 100644 --- a/pandas/tests/io/formats/test_to_csv.py +++ b/pandas/tests/io/formats/test_to_csv.py @@ -285,7 +285,7 @@ def test_to_csv_different_datetime_formats(self): df = DataFrame( { "date": pd.to_datetime("1970-01-01"), - "datetime": pd.date_range("1970-01-01", periods=2, freq="H"), + "datetime": pd.date_range("1970-01-01", periods=2, freq="h"), } ) expected_rows = [ diff --git a/pandas/tests/io/json/test_json_table_schema.py b/pandas/tests/io/json/test_json_table_schema.py index d7a4dfca90b5e..943515acd33b5 100644 --- a/pandas/tests/io/json/test_json_table_schema.py +++ b/pandas/tests/io/json/test_json_table_schema.py @@ -32,7 +32,7 @@ def df_schema(): "A": [1, 2, 3, 4], "B": ["a", "b", "c", "c"], "C": pd.date_range("2016-01-01", freq="d", periods=4), - "D": pd.timedelta_range("1H", periods=4, freq="min"), + "D": pd.timedelta_range("1h", periods=4, freq="min"), }, index=pd.Index(range(4), name="idx"), ) @@ -45,7 +45,7 @@ def df_table(): "A": [1, 2, 3, 4], "B": ["a", "b", "c", "c"], "C": pd.date_range("2016-01-01", freq="d", periods=4), - "D": pd.timedelta_range("1H", periods=4, freq="min"), + "D": pd.timedelta_range("1h", periods=4, freq="min"), "E": pd.Series(pd.Categorical(["a", "b", "c", "c"])), "F": pd.Series(pd.Categorical(["a", "b", "c", "c"], ordered=True)), "G": [1.0, 2.0, 3, 4.0], @@ -695,7 +695,7 @@ def test_read_json_table_orient(self, index_nm, vals, recwarn): @pytest.mark.parametrize("index_nm", [None, "idx", "index"]) @pytest.mark.parametrize( "vals", - [{"timedeltas": pd.timedelta_range("1H", periods=4, freq="min")}], + [{"timedeltas": pd.timedelta_range("1h", periods=4, freq="min")}], ) def test_read_json_table_orient_raises(self, index_nm, vals, recwarn): df = DataFrame(vals, index=pd.Index(range(4), name=index_nm)) diff --git a/pandas/tests/io/json/test_pandas.py b/pandas/tests/io/json/test_pandas.py index b3c2e67f7c318..4280f8617517f 100644 --- a/pandas/tests/io/json/test_pandas.py +++ b/pandas/tests/io/json/test_pandas.py @@ -521,7 +521,7 @@ def test_v12_compat(self, datapath): tm.assert_frame_equal(df_iso, df_unser_iso) def test_blocks_compat_GH9037(self): - index = pd.date_range("20000101", periods=10, freq="H") + index = pd.date_range("20000101", periods=10, freq="h") # freq doesn't round-trip index = DatetimeIndex(list(index), freq=None) diff --git a/pandas/tests/io/parser/dtypes/test_categorical.py b/pandas/tests/io/parser/dtypes/test_categorical.py index 8671bccbc1bbd..8640c17a1349f 100644 --- a/pandas/tests/io/parser/dtypes/test_categorical.py +++ b/pandas/tests/io/parser/dtypes/test_categorical.py @@ -275,9 +275,9 @@ def test_categorical_coerces_timestamp(all_parsers): def test_categorical_coerces_timedelta(all_parsers): parser = all_parsers - dtype = {"b": CategoricalDtype(pd.to_timedelta(["1H", "2H", "3H"]))} + dtype = {"b": CategoricalDtype(pd.to_timedelta(["1h", "2h", "3h"]))} - data = "b\n1H\n2H\n3H" + data = "b\n1h\n2h\n3h" expected = DataFrame({"b": Categorical(dtype["b"].categories)}) result = parser.read_csv(StringIO(data), dtype=dtype) diff --git a/pandas/tests/io/pytables/test_retain_attributes.py b/pandas/tests/io/pytables/test_retain_attributes.py index 799c2f22a9dc6..aef6fc0460cd9 100644 --- a/pandas/tests/io/pytables/test_retain_attributes.py +++ b/pandas/tests/io/pytables/test_retain_attributes.py @@ -21,7 +21,7 @@ def test_retain_index_attributes(setup_path): # GH 3499, losing frequency info on index recreation df = DataFrame( - {"A": Series(range(3), index=date_range("2000-1-1", periods=3, freq="H"))} + {"A": Series(range(3), index=date_range("2000-1-1", periods=3, freq="h"))} ) with ensure_clean_store(setup_path) as store: @@ -76,7 +76,7 @@ def test_retain_index_attributes2(tmp_path, setup_path): with tm.assert_produces_warning(errors.AttributeConflictWarning): df = DataFrame( - {"A": Series(range(3), index=date_range("2000-1-1", periods=3, freq="H"))} + {"A": Series(range(3), index=date_range("2000-1-1", periods=3, freq="h"))} ) df.to_hdf(path, key="data", mode="w", append=True) df2 = DataFrame( @@ -85,7 +85,7 @@ def test_retain_index_attributes2(tmp_path, setup_path): df2.to_hdf(path, key="data", append=True) - idx = date_range("2000-1-1", periods=3, freq="H") + idx = date_range("2000-1-1", periods=3, freq="h") idx.name = "foo" df = DataFrame({"A": Series(range(3), index=idx)}) df.to_hdf(path, key="data", mode="w", append=True) @@ -93,7 +93,7 @@ def test_retain_index_attributes2(tmp_path, setup_path): assert read_hdf(path, key="data").index.name == "foo" with tm.assert_produces_warning(errors.AttributeConflictWarning): - idx2 = date_range("2001-1-1", periods=3, freq="H") + idx2 = date_range("2001-1-1", periods=3, freq="h") idx2.name = "bar" df2 = DataFrame({"A": Series(range(3), index=idx2)}) df2.to_hdf(path, key="data", append=True) diff --git a/pandas/tests/io/pytables/test_timezones.py b/pandas/tests/io/pytables/test_timezones.py index 1eb7a34bead56..676b9374514e8 100644 --- a/pandas/tests/io/pytables/test_timezones.py +++ b/pandas/tests/io/pytables/test_timezones.py @@ -131,7 +131,7 @@ def test_append_with_timezones(setup_path, gettz): def test_append_with_timezones_as_index(setup_path, gettz): # GH#4098 example - dti = date_range("2000-1-1", periods=3, freq="H", tz=gettz("US/Eastern")) + dti = date_range("2000-1-1", periods=3, freq="h", tz=gettz("US/Eastern")) dti = dti._with_freq(None) # freq doesn't round-trip df = DataFrame({"A": Series(range(3), index=dti)}) @@ -332,7 +332,7 @@ def test_dst_transitions(setup_path): "2013-10-26 23:00", "2013-10-27 01:00", tz="Europe/London", - freq="H", + freq="h", ambiguous="infer", ) times = times._with_freq(None) # freq doesn't round-trip diff --git a/pandas/tests/io/test_sql.py b/pandas/tests/io/test_sql.py index e1839fc1b0a67..82fb98615100f 100644 --- a/pandas/tests/io/test_sql.py +++ b/pandas/tests/io/test_sql.py @@ -2411,7 +2411,7 @@ def test_naive_datetimeindex_roundtrip(conn, request): # GH 23510 # Ensure that a naive DatetimeIndex isn't converted to UTC conn = request.getfixturevalue(conn) - dates = date_range("2018-01-01", periods=5, freq="6H")._with_freq(None) + dates = date_range("2018-01-01", periods=5, freq="6h")._with_freq(None) expected = DataFrame({"nums": range(5)}, index=dates) assert expected.to_sql(name="foo_table", con=conn, index_label="info_date") == 5 result = sql.read_sql_table("foo_table", conn, index_col="info_date") diff --git a/pandas/tests/plotting/test_datetimelike.py b/pandas/tests/plotting/test_datetimelike.py index f488ee7da87ac..db7c0cec09e6c 100644 --- a/pandas/tests/plotting/test_datetimelike.py +++ b/pandas/tests/plotting/test_datetimelike.py @@ -49,7 +49,7 @@ class TestTSPlot: def test_ts_plot_with_tz(self, tz_aware_fixture): # GH2877, GH17173, GH31205, GH31580 tz = tz_aware_fixture - index = date_range("1/1/2011", periods=2, freq="H", tz=tz) + index = date_range("1/1/2011", periods=2, freq="h", tz=tz) ts = Series([188.5, 328.25], index=index) _check_plot_works(ts.plot) ax = ts.plot() @@ -117,7 +117,7 @@ def test_nonnumeric_exclude_error(self): with pytest.raises(TypeError, match=msg): df["A"].plot() - @pytest.mark.parametrize("freq", ["s", "min", "H", "D", "W", "M", "Q", "Y"]) + @pytest.mark.parametrize("freq", ["s", "min", "h", "D", "W", "M", "Q", "Y"]) def test_tsplot_period(self, freq): idx = period_range("12/31/1999", freq=freq, periods=100) ser = Series(np.random.default_rng(2).standard_normal(len(idx)), idx) @@ -125,7 +125,7 @@ def test_tsplot_period(self, freq): _check_plot_works(ser.plot, ax=ax) @pytest.mark.parametrize( - "freq", ["s", "min", "H", "D", "W", "ME", "Q-DEC", "Y", "1B30Min"] + "freq", ["s", "min", "h", "D", "W", "ME", "Q-DEC", "Y", "1B30Min"] ) def test_tsplot_datetime(self, freq): idx = date_range("12/31/1999", freq=freq, periods=100) @@ -187,14 +187,14 @@ def check_format_of_first_point(ax, expected_string): daily.plot(ax=ax) check_format_of_first_point(ax, "t = 2014-01-01 y = 1.000000") - @pytest.mark.parametrize("freq", ["s", "min", "H", "D", "W", "M", "Q", "Y"]) + @pytest.mark.parametrize("freq", ["s", "min", "h", "D", "W", "M", "Q", "Y"]) def test_line_plot_period_series(self, freq): idx = period_range("12/31/1999", freq=freq, periods=100) ser = Series(np.random.default_rng(2).standard_normal(len(idx)), idx) _check_plot_works(ser.plot, ser.index.freq) @pytest.mark.parametrize( - "frqncy", ["1s", "3s", "5min", "7H", "4D", "8W", "11M", "3Y"] + "frqncy", ["1s", "3s", "5min", "7h", "4D", "8W", "11M", "3Y"] ) def test_line_plot_period_mlt_series(self, frqncy): # test period index line plot for series with multiples (`mlt`) of the @@ -204,14 +204,14 @@ def test_line_plot_period_mlt_series(self, frqncy): _check_plot_works(s.plot, s.index.freq.rule_code) @pytest.mark.parametrize( - "freq", ["s", "min", "H", "D", "W", "ME", "Q-DEC", "Y", "1B30Min"] + "freq", ["s", "min", "h", "D", "W", "ME", "Q-DEC", "Y", "1B30Min"] ) def test_line_plot_datetime_series(self, freq): idx = date_range("12/31/1999", freq=freq, periods=100) ser = Series(np.random.default_rng(2).standard_normal(len(idx)), idx) _check_plot_works(ser.plot, ser.index.freq.rule_code) - @pytest.mark.parametrize("freq", ["s", "min", "H", "D", "W", "ME", "Q", "Y"]) + @pytest.mark.parametrize("freq", ["s", "min", "h", "D", "W", "ME", "Q", "Y"]) def test_line_plot_period_frame(self, freq): idx = date_range("12/31/1999", freq=freq, periods=100) df = DataFrame( @@ -222,7 +222,7 @@ def test_line_plot_period_frame(self, freq): _check_plot_works(df.plot, df.index.freq) @pytest.mark.parametrize( - "frqncy", ["1s", "3s", "5min", "7H", "4D", "8W", "11M", "3Y"] + "frqncy", ["1s", "3s", "5min", "7h", "4D", "8W", "11M", "3Y"] ) def test_line_plot_period_mlt_frame(self, frqncy): # test period index line plot for DataFrames with multiples (`mlt`) @@ -240,7 +240,7 @@ def test_line_plot_period_mlt_frame(self, frqncy): @pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning") @pytest.mark.parametrize( - "freq", ["s", "min", "H", "D", "W", "ME", "Q-DEC", "Y", "1B30Min"] + "freq", ["s", "min", "h", "D", "W", "ME", "Q-DEC", "Y", "1B30Min"] ) def test_line_plot_datetime_frame(self, freq): idx = date_range("12/31/1999", freq=freq, periods=100) @@ -254,7 +254,7 @@ def test_line_plot_datetime_frame(self, freq): _check_plot_works(df.plot, freq) @pytest.mark.parametrize( - "freq", ["s", "min", "H", "D", "W", "ME", "Q-DEC", "Y", "1B30Min"] + "freq", ["s", "min", "h", "D", "W", "ME", "Q-DEC", "Y", "1B30Min"] ) def test_line_plot_inferred_freq(self, freq): idx = date_range("12/31/1999", freq=freq, periods=100) @@ -382,7 +382,7 @@ def test_freq_with_no_period_alias(self): def test_nonzero_base(self): # GH2571 - idx = date_range("2012-12-20", periods=24, freq="H") + timedelta(minutes=30) + idx = date_range("2012-12-20", periods=24, freq="h") + timedelta(minutes=30) df = DataFrame(np.arange(24), index=idx) _, ax = mpl.pyplot.subplots() df.plot(ax=ax) @@ -551,13 +551,13 @@ def test_finder_minutely(self): def test_finder_hourly(self): nhours = 23 - rng = date_range("1/1/1999", freq="H", periods=nhours) + rng = date_range("1/1/1999", freq="h", periods=nhours) ser = Series(np.random.default_rng(2).standard_normal(len(rng)), rng) _, ax = mpl.pyplot.subplots() ser.plot(ax=ax) xaxis = ax.get_xaxis() rs = xaxis.get_majorticklocs()[0] - xp = Period("1/1/1999", freq="H").ordinal + xp = Period("1/1/1999", freq="h").ordinal assert rs == xp @@ -814,7 +814,7 @@ def test_mixed_freq_hf_first(self): assert PeriodIndex(data=line.get_xdata()).freq == "D" def test_mixed_freq_alignment(self): - ts_ind = date_range("2012-01-01 13:00", "2012-01-02", freq="H") + ts_ind = date_range("2012-01-01 13:00", "2012-01-02", freq="h") ts_data = np.random.default_rng(2).standard_normal(12) ts = Series(ts_data, index=ts_ind) @@ -842,7 +842,7 @@ def test_mixed_freq_lf_first(self): def test_mixed_freq_lf_first_hourly(self): idxh = date_range("1/1/1999", periods=240, freq="min") - idxl = date_range("1/1/1999", periods=4, freq="H") + idxl = date_range("1/1/1999", periods=4, freq="h") high = Series(np.random.default_rng(2).standard_normal(len(idxh)), idxh) low = Series(np.random.default_rng(2).standard_normal(len(idxl)), idxl) _, ax = mpl.pyplot.subplots() @@ -1517,7 +1517,7 @@ def test_timedelta_short_period(self): def test_hist(self): # https://github.com/matplotlib/matplotlib/issues/8459 - rng = date_range("1/1/2011", periods=10, freq="H") + rng = date_range("1/1/2011", periods=10, freq="h") x = rng w1 = np.arange(0, 1, 0.1) w2 = np.arange(0, 1, 0.1)[::-1] diff --git a/pandas/tests/reductions/test_stat_reductions.py b/pandas/tests/reductions/test_stat_reductions.py index 3cea39fa75ece..74e521ab71f41 100644 --- a/pandas/tests/reductions/test_stat_reductions.py +++ b/pandas/tests/reductions/test_stat_reductions.py @@ -41,7 +41,7 @@ def test_dt64_mean(self, tz_naive_fixture, box): assert obj.mean(skipna=False) is pd.NaT @pytest.mark.parametrize("box", [Series, pd.Index, PeriodArray]) - @pytest.mark.parametrize("freq", ["s", "H", "D", "W", "B"]) + @pytest.mark.parametrize("freq", ["s", "h", "D", "W", "B"]) def test_period_mean(self, box, freq): # GH#24757 dti = pd.date_range("2001-01-01", periods=11) diff --git a/pandas/tests/resample/test_base.py b/pandas/tests/resample/test_base.py index ad6ff70b14d25..42e741119b0a1 100644 --- a/pandas/tests/resample/test_base.py +++ b/pandas/tests/resample/test_base.py @@ -44,7 +44,7 @@ def _create_index(*args, **kwargs): return _create_index -@pytest.mark.parametrize("freq", ["2D", "1H"]) +@pytest.mark.parametrize("freq", ["2D", "1h"]) @pytest.mark.parametrize( "_index_factory,_series_name,_index_start,_index_end", [DATE_RANGE, TIMEDELTA_RANGE] ) @@ -65,16 +65,16 @@ def test_asfreq_fill_value(series, create_index): ser = series - result = ser.resample("1H").asfreq() - new_index = create_index(ser.index[0], ser.index[-1], freq="1H") + result = ser.resample("1h").asfreq() + new_index = create_index(ser.index[0], ser.index[-1], freq="1h") expected = ser.reindex(new_index) tm.assert_series_equal(result, expected) # Explicit cast to float to avoid implicit cast when setting None frame = ser.astype("float").to_frame("value") frame.iloc[1] = None - result = frame.resample("1H").asfreq(fill_value=4.0) - new_index = create_index(frame.index[0], frame.index[-1], freq="1H") + result = frame.resample("1h").asfreq(fill_value=4.0) + new_index = create_index(frame.index[0], frame.index[-1], freq="1h") expected = frame.reindex(new_index, fill_value=4.0) tm.assert_frame_equal(result, expected) @@ -100,7 +100,7 @@ def test_raises_on_non_datetimelike_index(): @all_ts -@pytest.mark.parametrize("freq", ["ME", "D", "H"]) +@pytest.mark.parametrize("freq", ["ME", "D", "h"]) def test_resample_empty_series(freq, empty_series_dti, resample_method): # GH12771 & GH12868 @@ -108,7 +108,7 @@ def test_resample_empty_series(freq, empty_series_dti, resample_method): if freq == "ME" and isinstance(ser.index, TimedeltaIndex): msg = ( "Resampling on a TimedeltaIndex requires fixed-duration `freq`, " - "e.g. '24H' or '3D', not " + "e.g. '24h' or '3D', not " ) with pytest.raises(ValueError, match=msg): ser.resample(freq) @@ -140,7 +140,7 @@ def test_resample_empty_series(freq, empty_series_dti, resample_method): [ pytest.param("ME", marks=pytest.mark.xfail(reason="Don't know why this fails")), "D", - "H", + "h", ], ) def test_resample_nat_index_series(freq, series, resample_method): @@ -164,7 +164,7 @@ def test_resample_nat_index_series(freq, series, resample_method): @all_ts -@pytest.mark.parametrize("freq", ["ME", "D", "H"]) +@pytest.mark.parametrize("freq", ["ME", "D", "h"]) @pytest.mark.parametrize("resample_method", ["count", "size"]) def test_resample_count_empty_series(freq, empty_series_dti, resample_method): # GH28427 @@ -172,7 +172,7 @@ def test_resample_count_empty_series(freq, empty_series_dti, resample_method): if freq == "ME" and isinstance(ser.index, TimedeltaIndex): msg = ( "Resampling on a TimedeltaIndex requires fixed-duration `freq`, " - "e.g. '24H' or '3D', not " + "e.g. '24h' or '3D', not " ) with pytest.raises(ValueError, match=msg): ser.resample(freq) @@ -192,7 +192,7 @@ def test_resample_count_empty_series(freq, empty_series_dti, resample_method): @all_ts -@pytest.mark.parametrize("freq", ["ME", "D", "H"]) +@pytest.mark.parametrize("freq", ["ME", "D", "h"]) def test_resample_empty_dataframe(empty_frame_dti, freq, resample_method): # GH13212 df = empty_frame_dti @@ -200,7 +200,7 @@ def test_resample_empty_dataframe(empty_frame_dti, freq, resample_method): if freq == "ME" and isinstance(df.index, TimedeltaIndex): msg = ( "Resampling on a TimedeltaIndex requires fixed-duration `freq`, " - "e.g. '24H' or '3D', not " + "e.g. '24h' or '3D', not " ) with pytest.raises(ValueError, match=msg): df.resample(freq, group_keys=False) @@ -234,7 +234,7 @@ def test_resample_empty_dataframe(empty_frame_dti, freq, resample_method): @all_ts -@pytest.mark.parametrize("freq", ["ME", "D", "H"]) +@pytest.mark.parametrize("freq", ["ME", "D", "h"]) def test_resample_count_empty_dataframe(freq, empty_frame_dti): # GH28427 @@ -243,7 +243,7 @@ def test_resample_count_empty_dataframe(freq, empty_frame_dti): if freq == "ME" and isinstance(empty_frame_dti.index, TimedeltaIndex): msg = ( "Resampling on a TimedeltaIndex requires fixed-duration `freq`, " - "e.g. '24H' or '3D', not " + "e.g. '24h' or '3D', not " ) with pytest.raises(ValueError, match=msg): empty_frame_dti.resample(freq) @@ -261,7 +261,7 @@ def test_resample_count_empty_dataframe(freq, empty_frame_dti): @all_ts -@pytest.mark.parametrize("freq", ["ME", "D", "H"]) +@pytest.mark.parametrize("freq", ["ME", "D", "h"]) def test_resample_size_empty_dataframe(freq, empty_frame_dti): # GH28427 @@ -270,7 +270,7 @@ def test_resample_size_empty_dataframe(freq, empty_frame_dti): if freq == "ME" and isinstance(empty_frame_dti.index, TimedeltaIndex): msg = ( "Resampling on a TimedeltaIndex requires fixed-duration `freq`, " - "e.g. '24H' or '3D', not " + "e.g. '24h' or '3D', not " ) with pytest.raises(ValueError, match=msg): empty_frame_dti.resample(freq) @@ -308,7 +308,7 @@ def test_resample_empty_dtypes(index, dtype, resample_method): @all_ts -@pytest.mark.parametrize("freq", ["ME", "D", "H"]) +@pytest.mark.parametrize("freq", ["ME", "D", "h"]) def test_apply_to_empty_series(empty_series_dti, freq): # GH 14313 ser = empty_series_dti @@ -316,7 +316,7 @@ def test_apply_to_empty_series(empty_series_dti, freq): if freq == "ME" and isinstance(empty_series_dti.index, TimedeltaIndex): msg = ( "Resampling on a TimedeltaIndex requires fixed-duration `freq`, " - "e.g. '24H' or '3D', not " + "e.g. '24h' or '3D', not " ) with pytest.raises(ValueError, match=msg): empty_series_dti.resample(freq) @@ -333,7 +333,7 @@ def test_apply_to_empty_series(empty_series_dti, freq): @all_ts def test_resampler_is_iterable(series): # GH 15314 - freq = "H" + freq = "h" tg = Grouper(freq=freq, convention="start") grouped = series.groupby(tg) resampled = series.resample(freq) @@ -347,7 +347,7 @@ def test_resample_quantile(series): # GH 15023 ser = series q = 0.75 - freq = "H" + freq = "h" result = ser.resample(freq).quantile(q) expected = ser.resample(freq).agg(lambda x: x.quantile(q)).rename(ser.name) tm.assert_series_equal(result, expected) diff --git a/pandas/tests/resample/test_datetime_index.py b/pandas/tests/resample/test_datetime_index.py index b40ae7d2bd8c0..63f9de2da733d 100644 --- a/pandas/tests/resample/test_datetime_index.py +++ b/pandas/tests/resample/test_datetime_index.py @@ -516,8 +516,8 @@ def test_upsample_with_limit(unit): tm.assert_series_equal(result, expected) -@pytest.mark.parametrize("freq", ["5D", "10H", "5Min", "10s"]) -@pytest.mark.parametrize("rule", ["Y", "3ME", "15D", "30H", "15Min", "30s"]) +@pytest.mark.parametrize("freq", ["5D", "10h", "5Min", "10s"]) +@pytest.mark.parametrize("rule", ["Y", "3ME", "15D", "30h", "15Min", "30s"]) def test_nearest_upsample_with_limit(tz_aware_fixture, freq, rule, unit): # GH 33939 rng = date_range("1/1/2000", periods=3, freq=freq, tz=tz_aware_fixture).as_unit( @@ -604,9 +604,9 @@ def test_resample_ohlc_dataframe(unit): ).reindex(["VOLUME", "PRICE"], axis=1) df.index = df.index.as_unit(unit) df.columns.name = "Cols" - res = df.resample("H").ohlc() + res = df.resample("h").ohlc() exp = pd.concat( - [df["VOLUME"].resample("H").ohlc(), df["PRICE"].resample("H").ohlc()], + [df["VOLUME"].resample("h").ohlc(), df["PRICE"].resample("h").ohlc()], axis=1, keys=df.columns, ) @@ -614,7 +614,7 @@ def test_resample_ohlc_dataframe(unit): tm.assert_frame_equal(exp, res) df.columns = [["a", "b"], ["c", "d"]] - res = df.resample("H").ohlc() + res = df.resample("h").ohlc() exp.columns = pd.MultiIndex.from_tuples( [ ("a", "c", "open"), @@ -659,7 +659,7 @@ def test_resample_reresample(unit): ).as_unit(unit) s = Series(np.random.default_rng(2).random(len(dti)), dti) bs = s.resample("B", closed="right", label="right").mean() - result = bs.resample("8H").mean() + result = bs.resample("8h").mean() assert len(result) == 25 assert isinstance(result.index.freq, offsets.DateOffset) assert result.index.freq == offsets.Hour(8) @@ -954,13 +954,13 @@ def test_resample_origin_epoch_with_tz_day_vs_24h(unit): ts_1 = Series(random_values, index=rng) result_1 = ts_1.resample("D", origin="epoch").mean() - result_2 = ts_1.resample("24H", origin="epoch").mean() + result_2 = ts_1.resample("24h", origin="epoch").mean() tm.assert_series_equal(result_1, result_2) # check that we have the same behavior with epoch even if we are not timezone aware ts_no_tz = ts_1.tz_localize(None) result_3 = ts_no_tz.resample("D", origin="epoch").mean() - result_4 = ts_no_tz.resample("24H", origin="epoch").mean() + result_4 = ts_no_tz.resample("24h", origin="epoch").mean() tm.assert_series_equal(result_1, result_3.tz_localize(rng.tz), check_freq=False) tm.assert_series_equal(result_1, result_4.tz_localize(rng.tz), check_freq=False) @@ -969,7 +969,7 @@ def test_resample_origin_epoch_with_tz_day_vs_24h(unit): rng = date_range(start, end, freq="7min").as_unit(unit) ts_2 = Series(random_values, index=rng) result_5 = ts_2.resample("D", origin="epoch").mean() - result_6 = ts_2.resample("24H", origin="epoch").mean() + result_6 = ts_2.resample("24h", origin="epoch").mean() tm.assert_series_equal(result_1.tz_localize(None), result_5.tz_localize(None)) tm.assert_series_equal(result_1.tz_localize(None), result_6.tz_localize(None)) @@ -1005,27 +1005,27 @@ def _create_series(values, timestamps, freq="D"): expected_ts = ["2013-11-02 22:00-05:00", "2013-11-03 22:00-06:00"] expected = _create_series([23.0, 2.0], expected_ts) - result = ts.resample("D", origin="start", offset="-2H").sum() + result = ts.resample("D", origin="start", offset="-2h").sum() tm.assert_series_equal(result, expected) expected_ts = ["2013-11-02 22:00-05:00", "2013-11-03 21:00-06:00"] - expected = _create_series([22.0, 3.0], expected_ts, freq="24H") - result = ts.resample("24H", origin="start", offset="-2H").sum() + expected = _create_series([22.0, 3.0], expected_ts, freq="24h") + result = ts.resample("24h", origin="start", offset="-2h").sum() tm.assert_series_equal(result, expected) expected_ts = ["2013-11-02 02:00-05:00", "2013-11-03 02:00-06:00"] expected = _create_series([3.0, 22.0], expected_ts) - result = ts.resample("D", origin="start", offset="2H").sum() + result = ts.resample("D", origin="start", offset="2h").sum() tm.assert_series_equal(result, expected) expected_ts = ["2013-11-02 23:00-05:00", "2013-11-03 23:00-06:00"] expected = _create_series([24.0, 1.0], expected_ts) - result = ts.resample("D", origin="start", offset="-1H").sum() + result = ts.resample("D", origin="start", offset="-1h").sum() tm.assert_series_equal(result, expected) expected_ts = ["2013-11-02 01:00-05:00", "2013-11-03 01:00:00-0500"] expected = _create_series([1.0, 24.0], expected_ts) - result = ts.resample("D", origin="start", offset="1H").sum() + result = ts.resample("D", origin="start", offset="1h").sum() tm.assert_series_equal(result, expected) @@ -1054,7 +1054,7 @@ def test_period_with_agg(): # aggregate a period resampler with a lambda s2 = Series( np.random.default_rng(2).integers(0, 5, 50), - index=period_range("2012-01-01", freq="H", periods=50), + index=period_range("2012-01-01", freq="h", periods=50), dtype="float64", ) @@ -1114,12 +1114,12 @@ def test_resample_dtype_coercion(unit): df = {"a": [1, 3, 1, 4]} df = DataFrame(df, index=date_range("2017-01-01", "2017-01-04").as_unit(unit)) - expected = df.astype("float64").resample("H").mean()["a"].interpolate("cubic") + expected = df.astype("float64").resample("h").mean()["a"].interpolate("cubic") - result = df.resample("H")["a"].mean().interpolate("cubic") + result = df.resample("h")["a"].mean().interpolate("cubic") tm.assert_series_equal(result, expected) - result = df.resample("H").mean()["a"].interpolate("cubic") + result = df.resample("h").mean()["a"].interpolate("cubic") tm.assert_series_equal(result, expected) @@ -1564,11 +1564,11 @@ def test_resample_across_dst(): pd.to_datetime(df2.ts, unit="s") .dt.tz_localize("UTC") .dt.tz_convert("Europe/Madrid"), - freq="H", + freq="h", ) df = DataFrame([5, 5], index=dti1) - result = df.resample(rule="H").sum() + result = df.resample(rule="h").sum() expected = DataFrame([5, 5], index=dti2) tm.assert_frame_equal(result, expected) @@ -1694,11 +1694,11 @@ def test_downsample_across_dst(unit): # GH 8531 tz = pytz.timezone("Europe/Berlin") dt = datetime(2014, 10, 26) - dates = date_range(tz.localize(dt), periods=4, freq="2H").as_unit(unit) - result = Series(5, index=dates).resample("H").mean() + dates = date_range(tz.localize(dt), periods=4, freq="2h").as_unit(unit) + result = Series(5, index=dates).resample("h").mean() expected = Series( [5.0, np.nan] * 3 + [5.0], - index=date_range(tz.localize(dt), periods=7, freq="H").as_unit(unit), + index=date_range(tz.localize(dt), periods=7, freq="h").as_unit(unit), ) tm.assert_series_equal(result, expected) @@ -1724,7 +1724,7 @@ def test_downsample_across_dst_weekly(unit): def test_downsample_across_dst_weekly_2(unit): # GH 9119, GH 21459 - idx = date_range("2013-04-01", "2013-05-01", tz="Europe/London", freq="H").as_unit( + idx = date_range("2013-04-01", "2013-05-01", tz="Europe/London", freq="h").as_unit( unit ) s = Series(index=idx, dtype=np.float64) @@ -1742,7 +1742,7 @@ def test_downsample_dst_at_midnight(unit): # GH 25758 start = datetime(2018, 11, 3, 12) end = datetime(2018, 11, 5, 12) - index = date_range(start, end, freq="1H").as_unit(unit) + index = date_range(start, end, freq="1h").as_unit(unit) index = index.tz_localize("UTC").tz_convert("America/Havana") data = list(range(len(index))) dataframe = DataFrame(data, index=index) @@ -1844,14 +1844,14 @@ def f(data, add_arg): [ (30, "s", 0.5, "Min"), (60, "s", 1, "Min"), - (3600, "s", 1, "H"), - (60, "Min", 1, "H"), + (3600, "s", 1, "h"), + (60, "Min", 1, "h"), (21600, "s", 0.25, "D"), (86400, "s", 1, "D"), (43200, "s", 0.5, "D"), (1440, "Min", 1, "D"), - (12, "H", 0.5, "D"), - (24, "H", 1, "D"), + (12, "h", 0.5, "D"), + (24, "h", 1, "D"), ], ) def test_resample_equivalent_offsets(n1, freq1, n2, freq2, k, unit): @@ -1871,7 +1871,7 @@ def test_resample_equivalent_offsets(n1, freq1, n2, freq2, k, unit): [ ("19910905", "19920406", "D", "19910905", "19920407"), ("19910905 00:00", "19920406 06:00", "D", "19910905", "19920407"), - ("19910905 06:00", "19920406 06:00", "H", "19910905 06:00", "19920406 07:00"), + ("19910905 06:00", "19920406 06:00", "h", "19910905 06:00", "19920406 07:00"), ("19910906", "19920406", "ME", "19910831", "19920430"), ("19910831", "19920430", "ME", "19910831", "19920531"), ("1991-08", "1992-04", "ME", "19910831", "19920531"), @@ -1922,32 +1922,32 @@ def test_resample_apply_product(duplicates, unit): "2020-03-28", "2020-03-31", "D", - "24H", + "24h", "2020-03-30 01:00", ), # includes transition into DST ( "2020-03-28", "2020-10-27", "D", - "24H", + "24h", "2020-10-27 00:00", ), # includes transition into and out of DST ( "2020-10-25", "2020-10-27", "D", - "24H", + "24h", "2020-10-26 23:00", ), # includes transition out of DST ( "2020-03-28", "2020-03-31", - "24H", + "24h", "D", "2020-03-30 00:00", ), # same as above, but from 24H to D - ("2020-03-28", "2020-10-27", "24H", "D", "2020-10-27 00:00"), - ("2020-10-25", "2020-10-27", "24H", "D", "2020-10-26 00:00"), + ("2020-03-28", "2020-10-27", "24h", "D", "2020-10-27 00:00"), + ("2020-10-25", "2020-10-27", "24h", "D", "2020-10-26 00:00"), ], ) def test_resample_calendar_day_with_dst( @@ -1981,7 +1981,7 @@ def test_resample_aggregate_functions_min_count(func, unit): def test_resample_unsigned_int(any_unsigned_int_numpy_dtype, unit): # gh-43329 df = DataFrame( - index=date_range(start="2000-01-01", end="2000-01-03 23", freq="12H").as_unit( + index=date_range(start="2000-01-01", end="2000-01-03 23", freq="12h").as_unit( unit ), columns=["x"], @@ -2098,7 +2098,7 @@ def test_resample_c_b_closed_right(freq: str): def test_resample_b_55282(): # https://github.com/pandas-dev/pandas/issues/55282 s = Series( - [1, 2, 3, 4, 5, 6], index=date_range("2023-09-26", periods=6, freq="12H") + [1, 2, 3, 4, 5, 6], index=date_range("2023-09-26", periods=6, freq="12h") ) result = s.resample("B", closed="right", label="right").mean() expected = Series( diff --git a/pandas/tests/resample/test_period_index.py b/pandas/tests/resample/test_period_index.py index f3e2eecf63d6b..d214e1b4ae4ae 100644 --- a/pandas/tests/resample/test_period_index.py +++ b/pandas/tests/resample/test_period_index.py @@ -41,7 +41,7 @@ def _series_name(): class TestPeriodIndex: - @pytest.mark.parametrize("freq", ["2D", "1H", "2H"]) + @pytest.mark.parametrize("freq", ["2D", "1h", "2h"]) @pytest.mark.parametrize("kind", ["period", None, "timestamp"]) def test_asfreq(self, series_and_frame, freq, kind): # GH 12884, 15944 @@ -65,23 +65,23 @@ def test_asfreq_fill_value(self, series): new_index = date_range( s.index[0].to_timestamp(how="start"), (s.index[-1]).to_timestamp(how="start"), - freq="1H", + freq="1h", ) expected = s.to_timestamp().reindex(new_index, fill_value=4.0) - result = s.resample("1H", kind="timestamp").asfreq(fill_value=4.0) + result = s.resample("1h", kind="timestamp").asfreq(fill_value=4.0) tm.assert_series_equal(result, expected) frame = s.to_frame("value") new_index = date_range( frame.index[0].to_timestamp(how="start"), (frame.index[-1]).to_timestamp(how="start"), - freq="1H", + freq="1h", ) expected = frame.to_timestamp().reindex(new_index, fill_value=3.0) - result = frame.resample("1H", kind="timestamp").asfreq(fill_value=3.0) + result = frame.resample("1h", kind="timestamp").asfreq(fill_value=3.0) tm.assert_frame_equal(result, expected) - @pytest.mark.parametrize("freq", ["H", "12H", "2D", "W"]) + @pytest.mark.parametrize("freq", ["h", "12h", "2D", "W"]) @pytest.mark.parametrize("kind", [None, "period", "timestamp"]) @pytest.mark.parametrize("kwargs", [{"on": "date"}, {"level": "d"}]) def test_selection(self, index, freq, kind, kwargs): @@ -271,7 +271,7 @@ def test_with_local_timezone_pytz(self): # 1 day later end = datetime(year=2013, month=11, day=2, hour=0, minute=0, tzinfo=pytz.utc) - index = date_range(start, end, freq="H") + index = date_range(start, end, freq="h") series = Series(1, index=index) series = series.tz_convert(local_timezone) @@ -287,7 +287,7 @@ def test_with_local_timezone_pytz(self): def test_resample_with_pytz(self): # GH 13238 s = Series( - 2, index=date_range("2017-01-01", periods=48, freq="H", tz="US/Eastern") + 2, index=date_range("2017-01-01", periods=48, freq="h", tz="US/Eastern") ) result = s.resample("D").mean() expected = Series( @@ -312,7 +312,7 @@ def test_with_local_timezone_dateutil(self): year=2013, month=11, day=2, hour=0, minute=0, tzinfo=dateutil.tz.tzutc() ) - index = date_range(start, end, freq="H", name="idx") + index = date_range(start, end, freq="h", name="idx") series = Series(1, index=index) series = series.tz_convert(local_timezone) @@ -337,7 +337,7 @@ def test_resample_nonexistent_time_bin_edge(self): tm.assert_series_equal(result, expected) # GH 23742 - index = date_range(start="2017-10-10", end="2017-10-20", freq="1H") + index = date_range(start="2017-10-10", end="2017-10-20", freq="1h") index = index.tz_localize("UTC").tz_convert("America/Sao_Paulo") df = DataFrame(data=list(range(len(index))), index=index) result = df.groupby(pd.Grouper(freq="1D")).count() @@ -461,9 +461,9 @@ def test_upsample_daily_business_daily(self, simple_period_range_series): tm.assert_series_equal(result, expected) ts = simple_period_range_series("1/1/2000", "2/1/2000") - result = ts.resample("H", convention="s").asfreq() - exp_rng = period_range("1/1/2000", "2/1/2000 23:00", freq="H") - expected = ts.asfreq("H", how="s").reindex(exp_rng) + result = ts.resample("h", convention="s").asfreq() + exp_rng = period_range("1/1/2000", "2/1/2000 23:00", freq="h") + expected = ts.asfreq("h", how="s").reindex(exp_rng) tm.assert_series_equal(result, expected) def test_resample_irregular_sparse(self): @@ -530,7 +530,7 @@ def test_resample_tz_localized(self): # GH 6397 # comparing an offset that doesn't propagate tz's - rng = date_range("1/1/2011", periods=20000, freq="H") + rng = date_range("1/1/2011", periods=20000, freq="h") rng = rng.tz_localize("EST") ts = DataFrame(index=rng) ts["first"] = np.random.default_rng(2).standard_normal(len(rng)) @@ -660,7 +660,7 @@ def test_default_right_closed_label(self, from_freq, to_freq): @pytest.mark.parametrize( "from_freq, to_freq", - [("D", "MS"), ("Q", "AS"), ("ME", "QS"), ("H", "D"), ("min", "H")], + [("D", "MS"), ("Q", "AS"), ("ME", "QS"), ("h", "D"), ("min", "h")], ) def test_default_left_closed_label(self, from_freq, to_freq): idx = date_range(start="8/15/2012", periods=100, freq=from_freq) @@ -752,7 +752,7 @@ def test_evenly_divisible_with_no_extra_bins(self): result = df.resample("7D").sum() tm.assert_frame_equal(result, expected) - @pytest.mark.parametrize("freq, period_mult", [("H", 24), ("12H", 2)]) + @pytest.mark.parametrize("freq, period_mult", [("h", 24), ("12h", 2)]) @pytest.mark.parametrize("kind", [None, "period"]) def test_upsampling_ohlc(self, freq, period_mult, kind): # GH 13083 @@ -829,19 +829,19 @@ def test_resample_with_only_nat(self): @pytest.mark.parametrize( "start,end,start_freq,end_freq,offset", [ - ("19910905", "19910909 03:00", "H", "24H", "10H"), - ("19910905", "19910909 12:00", "H", "24H", "10H"), - ("19910905", "19910909 23:00", "H", "24H", "10H"), - ("19910905 10:00", "19910909", "H", "24H", "10H"), - ("19910905 10:00", "19910909 10:00", "H", "24H", "10H"), - ("19910905", "19910909 10:00", "H", "24H", "10H"), - ("19910905 12:00", "19910909", "H", "24H", "10H"), - ("19910905 12:00", "19910909 03:00", "H", "24H", "10H"), - ("19910905 12:00", "19910909 12:00", "H", "24H", "10H"), - ("19910905 12:00", "19910909 12:00", "H", "24H", "34H"), - ("19910905 12:00", "19910909 12:00", "H", "17H", "10H"), - ("19910905 12:00", "19910909 12:00", "H", "17H", "3H"), - ("19910905", "19910913 06:00", "2H", "24H", "10H"), + ("19910905", "19910909 03:00", "h", "24h", "10h"), + ("19910905", "19910909 12:00", "h", "24h", "10h"), + ("19910905", "19910909 23:00", "h", "24h", "10h"), + ("19910905 10:00", "19910909", "h", "24h", "10h"), + ("19910905 10:00", "19910909 10:00", "h", "24h", "10h"), + ("19910905", "19910909 10:00", "h", "24h", "10h"), + ("19910905 12:00", "19910909", "h", "24h", "10h"), + ("19910905 12:00", "19910909 03:00", "h", "24h", "10h"), + ("19910905 12:00", "19910909 12:00", "h", "24h", "10h"), + ("19910905 12:00", "19910909 12:00", "h", "24h", "34h"), + ("19910905 12:00", "19910909 12:00", "h", "17h", "10h"), + ("19910905 12:00", "19910909 12:00", "h", "17h", "3h"), + ("19910905", "19910913 06:00", "2h", "24h", "10h"), ("19910905", "19910905 01:39", "Min", "5Min", "3Min"), ("19910905", "19910905 03:18", "2Min", "5Min", "3Min"), ], @@ -858,11 +858,11 @@ def test_resample_with_offset(self, start, end, start_freq, end_freq, offset): def test_resample_with_offset_month(self): # GH 23882 & 31809 - pi = period_range("19910905 12:00", "19910909 1:00", freq="H") + pi = period_range("19910905 12:00", "19910909 1:00", freq="h") ser = Series(np.arange(len(pi)), index=pi) - result = ser.resample("M", offset="3H").mean() + result = ser.resample("M", offset="3h").mean() result = result.to_timestamp("M") - expected = ser.to_timestamp().resample("ME", offset="3H").mean() + expected = ser.to_timestamp().resample("ME", offset="3h").mean() # TODO: is non-tick the relevant characteristic? (GH 33815) expected.index = expected.index._with_freq(None) tm.assert_series_equal(result, expected) @@ -875,8 +875,8 @@ def test_resample_with_offset_month(self): ( "19910905 06:00", "19920406 06:00", - "H", - "H", + "h", + "h", "19910905 06:00", "19920406 06:00", ), @@ -912,7 +912,7 @@ def test_sum_min_count(self): tm.assert_series_equal(result, expected) def test_resample_t_l_deprecated(self): - # GH 52536 + # GH#52536 msg_t = "'T' is deprecated and will be removed in a future version." msg_l = "'L' is deprecated and will be removed in a future version." diff --git a/pandas/tests/resample/test_resample_api.py b/pandas/tests/resample/test_resample_api.py index 86a7439410d8b..ff7b129c52f71 100644 --- a/pandas/tests/resample/test_resample_api.py +++ b/pandas/tests/resample/test_resample_api.py @@ -33,13 +33,13 @@ def test_frame(dti, _test_series): def test_str(_test_series): - r = _test_series.resample("H") + r = _test_series.resample("h") assert ( "DatetimeIndexResampler [freq=, axis=0, closed=left, " "label=left, convention=start, origin=start_day]" in str(r) ) - r = _test_series.resample("H", origin="2000-01-01") + r = _test_series.resample("h", origin="2000-01-01") assert ( "DatetimeIndexResampler [freq=, axis=0, closed=left, " "label=left, convention=start, origin=2000-01-01 00:00:00]" in str(r) @@ -47,12 +47,12 @@ def test_str(_test_series): def test_api(_test_series): - r = _test_series.resample("H") + r = _test_series.resample("h") result = r.mean() assert isinstance(result, Series) assert len(result) == 217 - r = _test_series.to_frame().resample("H") + r = _test_series.to_frame().resample("h") result = r.mean() assert isinstance(result, DataFrame) assert len(result) == 217 @@ -127,36 +127,36 @@ def test_pipe(test_frame, _test_series): # GH17905 # series - r = _test_series.resample("H") + r = _test_series.resample("h") expected = r.max() - r.mean() result = r.pipe(lambda x: x.max() - x.mean()) tm.assert_series_equal(result, expected) # dataframe - r = test_frame.resample("H") + r = test_frame.resample("h") expected = r.max() - r.mean() result = r.pipe(lambda x: x.max() - x.mean()) tm.assert_frame_equal(result, expected) def test_getitem(test_frame): - r = test_frame.resample("H") + r = test_frame.resample("h") tm.assert_index_equal(r._selected_obj.columns, test_frame.columns) - r = test_frame.resample("H")["B"] + r = test_frame.resample("h")["B"] assert r._selected_obj.name == test_frame.columns[1] # technically this is allowed - r = test_frame.resample("H")["A", "B"] + r = test_frame.resample("h")["A", "B"] tm.assert_index_equal(r._selected_obj.columns, test_frame.columns[[0, 1]]) - r = test_frame.resample("H")["A", "B"] + r = test_frame.resample("h")["A", "B"] tm.assert_index_equal(r._selected_obj.columns, test_frame.columns[[0, 1]]) @pytest.mark.parametrize("key", [["D"], ["A", "D"]]) def test_select_bad_cols(key, test_frame): - g = test_frame.resample("H") + g = test_frame.resample("h") # 'A' should not be referenced as a bad column... # will have to rethink regex if you change message! msg = r"^\"Columns not found: 'D'\"$" @@ -165,7 +165,7 @@ def test_select_bad_cols(key, test_frame): def test_attribute_access(test_frame): - r = test_frame.resample("H") + r = test_frame.resample("h") tm.assert_series_equal(r.A.sum(), r["A"].sum()) @@ -188,7 +188,7 @@ def test_api_compat_before_use(attr): def tests_raises_on_nuisance(test_frame): df = test_frame df["D"] = "foo" - r = df.resample("H") + r = df.resample("h") result = r[["A", "B"]].mean() expected = pd.concat([r.A.mean(), r.B.mean()], axis=1) tm.assert_frame_equal(result, expected) @@ -1041,7 +1041,7 @@ def test_series_axis_param_depr(_test_series): "deprecated and will be removed in a future version." ) with tm.assert_produces_warning(FutureWarning, match=warning_msg): - _test_series.resample("H", axis=0) + _test_series.resample("h", axis=0) def test_resample_empty(): @@ -1061,5 +1061,5 @@ def test_resample_empty(): ] ) ) - result = df.resample("8H").mean() + result = df.resample("8h").mean() tm.assert_frame_equal(result, expected) diff --git a/pandas/tests/resample/test_resampler_grouper.py b/pandas/tests/resample/test_resampler_grouper.py index f394e3a25dc0f..55365d14a68ca 100644 --- a/pandas/tests/resample/test_resampler_grouper.py +++ b/pandas/tests/resample/test_resampler_grouper.py @@ -327,10 +327,10 @@ def test_apply_columns_multilevel(): ind = date_range(start="2017-01-01", freq="15Min", periods=8) df = DataFrame(np.array([0] * 16).reshape(8, 2), index=ind, columns=cols) agg_dict = {col: (np.sum if col[3] == "one" else np.mean) for col in df.columns} - result = df.resample("H").apply(lambda x: agg_dict[x.name](x)) + result = df.resample("h").apply(lambda x: agg_dict[x.name](x)) expected = DataFrame( 2 * [[0, 0.0]], - index=date_range(start="2017-01-01", freq="1H", periods=2), + index=date_range(start="2017-01-01", freq="1h", periods=2), columns=pd.MultiIndex.from_tuples( [("A", "a", "", "one"), ("B", "b", "i", "two")] ), @@ -418,14 +418,14 @@ def test_apply_to_one_column_of_df(): ) # access "col" via getattr -> make sure we handle AttributeError - result = df.resample("H").apply(lambda group: group.col.sum()) + result = df.resample("h").apply(lambda group: group.col.sum()) expected = Series( - [3, 12, 21, 9], index=date_range("2012-01-01", periods=4, freq="H") + [3, 12, 21, 9], index=date_range("2012-01-01", periods=4, freq="h") ) tm.assert_series_equal(result, expected) # access "col" via _getitem__ -> make sure we handle KeyErrpr - result = df.resample("H").apply(lambda group: group["col"].sum()) + result = df.resample("h").apply(lambda group: group["col"].sum()) tm.assert_series_equal(result, expected) @@ -608,7 +608,7 @@ def test_groupby_resample_size_all_index_same(): # GH 46826 df = DataFrame( {"A": [1] * 3 + [2] * 3 + [1] * 3 + [2] * 3, "B": np.arange(12)}, - index=date_range("31/12/2000 18:00", freq="H", periods=12), + index=date_range("31/12/2000 18:00", freq="h", periods=12), ) msg = "DataFrameGroupBy.resample operated on the grouping columns" with tm.assert_produces_warning(FutureWarning, match=msg): diff --git a/pandas/tests/resample/test_time_grouper.py b/pandas/tests/resample/test_time_grouper.py index c00366b2e28ce..e5593302625ec 100644 --- a/pandas/tests/resample/test_time_grouper.py +++ b/pandas/tests/resample/test_time_grouper.py @@ -273,7 +273,7 @@ def test_aggregate_with_nat_size(): def test_repr(): # GH18203 - result = repr(Grouper(key="A", freq="H")) + result = repr(Grouper(key="A", freq="h")) expected = ( "TimeGrouper(key='A', freq=, axis=0, sort=True, dropna=True, " "closed='left', label='left', how='mean', " @@ -281,7 +281,7 @@ def test_repr(): ) assert result == expected - result = repr(Grouper(key="A", freq="H", origin="2000-01-01")) + result = repr(Grouper(key="A", freq="h", origin="2000-01-01")) expected = ( "TimeGrouper(key='A', freq=, axis=0, sort=True, dropna=True, " "closed='left', label='left', how='mean', " @@ -304,7 +304,7 @@ def test_repr(): ], ) def test_upsample_sum(method, method_args, expected_values): - s = Series(1, index=date_range("2017", periods=2, freq="H")) + s = Series(1, index=date_range("2017", periods=2, freq="h")) resampled = s.resample("30min") index = pd.DatetimeIndex( ["2017-01-01T00:00:00", "2017-01-01T00:30:00", "2017-01-01T01:00:00"], diff --git a/pandas/tests/resample/test_timedelta.py b/pandas/tests/resample/test_timedelta.py index 79b13673e70c6..606403ba56494 100644 --- a/pandas/tests/resample/test_timedelta.py +++ b/pandas/tests/resample/test_timedelta.py @@ -128,13 +128,13 @@ def test_resample_timedelta_values(): @pytest.mark.parametrize( "start, end, freq, resample_freq", [ - ("8H", "21h59min50s", "10s", "3H"), # GH 30353 example - ("3H", "22H", "1H", "5H"), + ("8h", "21h59min50s", "10s", "3h"), # GH 30353 example + ("3h", "22h", "1h", "5h"), ("527D", "5006D", "3D", "10D"), ("1D", "10D", "1D", "2D"), # GH 13022 example # tests that worked before GH 33498: - ("8H", "21h59min50s", "10s", "2H"), - ("0H", "21h59min50s", "10s", "3H"), + ("8h", "21h59min50s", "10s", "2h"), + ("0h", "21h59min50s", "10s", "3h"), ("10D", "85D", "D", "2D"), ], ) diff --git a/pandas/tests/reshape/concat/test_concat.py b/pandas/tests/reshape/concat/test_concat.py index aa1a74aadae12..74c79d20a3fb3 100644 --- a/pandas/tests/reshape/concat/test_concat.py +++ b/pandas/tests/reshape/concat/test_concat.py @@ -272,7 +272,7 @@ def test_concat_mixed_objs(self): # G2385 # axis 1 - index = date_range("01-Jan-2013", periods=10, freq="H") + index = date_range("01-Jan-2013", periods=10, freq="h") arr = np.arange(10, dtype="int64") s1 = Series(arr, index=index) s2 = Series(arr, index=index) diff --git a/pandas/tests/reshape/concat/test_datetimes.py b/pandas/tests/reshape/concat/test_datetimes.py index 12d28c388d508..51398acd6ec57 100644 --- a/pandas/tests/reshape/concat/test_datetimes.py +++ b/pandas/tests/reshape/concat/test_datetimes.py @@ -46,8 +46,8 @@ def test_concat_datetime_datetime64_frame(self): def test_concat_datetime_timezone(self): # GH 18523 - idx1 = date_range("2011-01-01", periods=3, freq="H", tz="Europe/Paris") - idx2 = date_range(start=idx1[0], end=idx1[-1], freq="H") + idx1 = date_range("2011-01-01", periods=3, freq="h", tz="Europe/Paris") + idx2 = date_range(start=idx1[0], end=idx1[-1], freq="h") df1 = DataFrame({"a": [1, 2, 3]}, index=idx1) df2 = DataFrame({"b": [1, 2, 3]}, index=idx2) result = concat([df1, df2], axis=1) @@ -59,7 +59,7 @@ def test_concat_datetime_timezone(self): "2011-01-01 01:00:00+01:00", "2011-01-01 02:00:00+01:00", ], - freq="H", + freq="h", ) .tz_convert("UTC") .tz_convert("Europe/Paris") @@ -71,7 +71,7 @@ def test_concat_datetime_timezone(self): tm.assert_frame_equal(result, expected) - idx3 = date_range("2011-01-01", periods=3, freq="H", tz="Asia/Tokyo") + idx3 = date_range("2011-01-01", periods=3, freq="h", tz="Asia/Tokyo") df3 = DataFrame({"b": [1, 2, 3]}, index=idx3) result = concat([df1, df3], axis=1) @@ -102,7 +102,7 @@ def test_concat_datetime_timezone(self): tm.assert_frame_equal(result, expected) # GH 13783: Concat after resample - result = concat([df1.resample("H").mean(), df2.resample("H").mean()], sort=True) + result = concat([df1.resample("h").mean(), df2.resample("h").mean()], sort=True) expected = DataFrame( {"a": [1, 2, 3] + [np.nan] * 3, "b": [np.nan] * 3 + [1, 2, 3]}, index=idx1.append(idx1), diff --git a/pandas/tests/reshape/merge/test_merge.py b/pandas/tests/reshape/merge/test_merge.py index d203a04a7fffc..4d779349b5c14 100644 --- a/pandas/tests/reshape/merge/test_merge.py +++ b/pandas/tests/reshape/merge/test_merge.py @@ -901,7 +901,7 @@ def test_merge_on_datetime64tz_empty(self): def test_merge_datetime64tz_with_dst_transition(self): # GH 18885 df1 = DataFrame( - pd.date_range("2017-10-29 01:00", periods=4, freq="H", tz="Europe/Madrid"), + pd.date_range("2017-10-29 01:00", periods=4, freq="h", tz="Europe/Madrid"), columns=["date"], ) df1["value"] = 1 @@ -922,7 +922,7 @@ def test_merge_datetime64tz_with_dst_transition(self): expected = DataFrame( { "date": pd.date_range( - "2017-10-29 01:00", periods=7, freq="H", tz="Europe/Madrid" + "2017-10-29 01:00", periods=7, freq="h", tz="Europe/Madrid" ), "value_x": [1] * 4 + [np.nan] * 3, "value_y": [np.nan] * 4 + [2] * 3, diff --git a/pandas/tests/scalar/interval/test_interval.py b/pandas/tests/scalar/interval/test_interval.py index a02dbf0a0413f..4841c488a5768 100644 --- a/pandas/tests/scalar/interval/test_interval.py +++ b/pandas/tests/scalar/interval/test_interval.py @@ -80,8 +80,8 @@ def test_hash(self, interval): (-np.inf, np.inf, np.inf), (Timedelta("0 days"), Timedelta("5 days"), Timedelta("5 days")), (Timedelta("10 days"), Timedelta("10 days"), Timedelta("0 days")), - (Timedelta("1H10min"), Timedelta("5H5min"), Timedelta("3H55min")), - (Timedelta("5s"), Timedelta("1H"), Timedelta("59min55s")), + (Timedelta("1h10min"), Timedelta("5h5min"), Timedelta("3h55min")), + (Timedelta("5s"), Timedelta("1h"), Timedelta("59min55s")), ], ) def test_length(self, left, right, expected): diff --git a/pandas/tests/scalar/period/test_asfreq.py b/pandas/tests/scalar/period/test_asfreq.py index 4287a69823aef..597282e10052e 100644 --- a/pandas/tests/scalar/period/test_asfreq.py +++ b/pandas/tests/scalar/period/test_asfreq.py @@ -78,8 +78,8 @@ def test_conv_annual(self): ival_A_to_B_end = Period(freq="B", year=2007, month=12, day=31) ival_A_to_D_start = Period(freq="D", year=2007, month=1, day=1) ival_A_to_D_end = Period(freq="D", year=2007, month=12, day=31) - ival_A_to_H_start = Period(freq="H", year=2007, month=1, day=1, hour=0) - ival_A_to_H_end = Period(freq="H", year=2007, month=12, day=31, hour=23) + ival_A_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0) + ival_A_to_H_end = Period(freq="h", year=2007, month=12, day=31, hour=23) ival_A_to_T_start = Period( freq="Min", year=2007, month=1, day=1, hour=0, minute=0 ) @@ -111,8 +111,10 @@ def test_conv_annual(self): assert ival_A.asfreq("B", "E") == ival_A_to_B_end assert ival_A.asfreq("D", "s") == ival_A_to_D_start assert ival_A.asfreq("D", "E") == ival_A_to_D_end - assert ival_A.asfreq("H", "s") == ival_A_to_H_start - assert ival_A.asfreq("H", "E") == ival_A_to_H_end + msg = "'H' is deprecated and will be removed in a future version." + with tm.assert_produces_warning(FutureWarning, match=msg): + assert ival_A.asfreq("H", "s") == ival_A_to_H_start + assert ival_A.asfreq("H", "E") == ival_A_to_H_end assert ival_A.asfreq("min", "s") == ival_A_to_T_start assert ival_A.asfreq("min", "E") == ival_A_to_T_end msg = "'T' is deprecated and will be removed in a future version." @@ -154,8 +156,8 @@ def test_conv_quarterly(self): ival_Q_to_B_end = Period(freq="B", year=2007, month=3, day=30) ival_Q_to_D_start = Period(freq="D", year=2007, month=1, day=1) ival_Q_to_D_end = Period(freq="D", year=2007, month=3, day=31) - ival_Q_to_H_start = Period(freq="H", year=2007, month=1, day=1, hour=0) - ival_Q_to_H_end = Period(freq="H", year=2007, month=3, day=31, hour=23) + ival_Q_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0) + ival_Q_to_H_end = Period(freq="h", year=2007, month=3, day=31, hour=23) ival_Q_to_T_start = Period( freq="Min", year=2007, month=1, day=1, hour=0, minute=0 ) @@ -187,8 +189,8 @@ def test_conv_quarterly(self): assert ival_Q.asfreq("B", "E") == ival_Q_to_B_end assert ival_Q.asfreq("D", "s") == ival_Q_to_D_start assert ival_Q.asfreq("D", "E") == ival_Q_to_D_end - assert ival_Q.asfreq("H", "s") == ival_Q_to_H_start - assert ival_Q.asfreq("H", "E") == ival_Q_to_H_end + assert ival_Q.asfreq("h", "s") == ival_Q_to_H_start + assert ival_Q.asfreq("h", "E") == ival_Q_to_H_end assert ival_Q.asfreq("Min", "s") == ival_Q_to_T_start assert ival_Q.asfreq("Min", "E") == ival_Q_to_T_end assert ival_Q.asfreq("s", "s") == ival_Q_to_S_start @@ -216,8 +218,8 @@ def test_conv_monthly(self): ival_M_to_B_end = Period(freq="B", year=2007, month=1, day=31) ival_M_to_D_start = Period(freq="D", year=2007, month=1, day=1) ival_M_to_D_end = Period(freq="D", year=2007, month=1, day=31) - ival_M_to_H_start = Period(freq="H", year=2007, month=1, day=1, hour=0) - ival_M_to_H_end = Period(freq="H", year=2007, month=1, day=31, hour=23) + ival_M_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0) + ival_M_to_H_end = Period(freq="h", year=2007, month=1, day=31, hour=23) ival_M_to_T_start = Period( freq="Min", year=2007, month=1, day=1, hour=0, minute=0 ) @@ -243,8 +245,8 @@ def test_conv_monthly(self): assert ival_M.asfreq("B", "E") == ival_M_to_B_end assert ival_M.asfreq("D", "s") == ival_M_to_D_start assert ival_M.asfreq("D", "E") == ival_M_to_D_end - assert ival_M.asfreq("H", "s") == ival_M_to_H_start - assert ival_M.asfreq("H", "E") == ival_M_to_H_end + assert ival_M.asfreq("h", "s") == ival_M_to_H_start + assert ival_M.asfreq("h", "E") == ival_M_to_H_end assert ival_M.asfreq("Min", "s") == ival_M_to_T_start assert ival_M.asfreq("Min", "E") == ival_M_to_T_end assert ival_M.asfreq("s", "s") == ival_M_to_S_start @@ -306,8 +308,8 @@ def test_conv_weekly(self): ival_W_to_B_end = Period(freq="B", year=2007, month=1, day=5) ival_W_to_D_start = Period(freq="D", year=2007, month=1, day=1) ival_W_to_D_end = Period(freq="D", year=2007, month=1, day=7) - ival_W_to_H_start = Period(freq="H", year=2007, month=1, day=1, hour=0) - ival_W_to_H_end = Period(freq="H", year=2007, month=1, day=7, hour=23) + ival_W_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0) + ival_W_to_H_end = Period(freq="h", year=2007, month=1, day=7, hour=23) ival_W_to_T_start = Period( freq="Min", year=2007, month=1, day=1, hour=0, minute=0 ) @@ -352,8 +354,8 @@ def test_conv_weekly(self): assert ival_WMON.asfreq("D", "s") == ival_WMON_to_D_start assert ival_WMON.asfreq("D", "E") == ival_WMON_to_D_end - assert ival_W.asfreq("H", "s") == ival_W_to_H_start - assert ival_W.asfreq("H", "E") == ival_W_to_H_end + assert ival_W.asfreq("h", "s") == ival_W_to_H_start + assert ival_W.asfreq("h", "E") == ival_W_to_H_end assert ival_W.asfreq("Min", "s") == ival_W_to_T_start assert ival_W.asfreq("Min", "E") == ival_W_to_T_end assert ival_W.asfreq("s", "s") == ival_W_to_S_start @@ -399,8 +401,8 @@ def test_conv_business(self): ival_B_to_M = Period(freq="M", year=2007, month=1) ival_B_to_W = Period(freq="W", year=2007, month=1, day=7) ival_B_to_D = Period(freq="D", year=2007, month=1, day=1) - ival_B_to_H_start = Period(freq="H", year=2007, month=1, day=1, hour=0) - ival_B_to_H_end = Period(freq="H", year=2007, month=1, day=1, hour=23) + ival_B_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0) + ival_B_to_H_end = Period(freq="h", year=2007, month=1, day=1, hour=23) ival_B_to_T_start = Period( freq="Min", year=2007, month=1, day=1, hour=0, minute=0 ) @@ -425,8 +427,8 @@ def test_conv_business(self): assert ival_B.asfreq("D") == ival_B_to_D - assert ival_B.asfreq("H", "s") == ival_B_to_H_start - assert ival_B.asfreq("H", "E") == ival_B_to_H_end + assert ival_B.asfreq("h", "s") == ival_B_to_H_start + assert ival_B.asfreq("h", "E") == ival_B_to_H_end assert ival_B.asfreq("Min", "s") == ival_B_to_T_start assert ival_B.asfreq("Min", "E") == ival_B_to_T_end assert ival_B.asfreq("s", "s") == ival_B_to_S_start @@ -465,8 +467,8 @@ def test_conv_daily(self): ival_D_to_M = Period(freq="M", year=2007, month=1) ival_D_to_W = Period(freq="W", year=2007, month=1, day=7) - ival_D_to_H_start = Period(freq="H", year=2007, month=1, day=1, hour=0) - ival_D_to_H_end = Period(freq="H", year=2007, month=1, day=1, hour=23) + ival_D_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0) + ival_D_to_H_end = Period(freq="h", year=2007, month=1, day=1, hour=23) ival_D_to_T_start = Period( freq="Min", year=2007, month=1, day=1, hour=0, minute=0 ) @@ -503,8 +505,8 @@ def test_conv_daily(self): assert ival_D_sunday.asfreq("B", "s") == ival_B_friday assert ival_D_sunday.asfreq("B", "E") == ival_B_monday - assert ival_D.asfreq("H", "s") == ival_D_to_H_start - assert ival_D.asfreq("H", "E") == ival_D_to_H_end + assert ival_D.asfreq("h", "s") == ival_D_to_H_start + assert ival_D.asfreq("h", "E") == ival_D_to_H_end assert ival_D.asfreq("Min", "s") == ival_D_to_T_start assert ival_D.asfreq("Min", "E") == ival_D_to_T_end assert ival_D.asfreq("s", "s") == ival_D_to_S_start @@ -515,13 +517,13 @@ def test_conv_daily(self): def test_conv_hourly(self): # frequency conversion tests: from Hourly Frequency" - ival_H = Period(freq="H", year=2007, month=1, day=1, hour=0) - ival_H_end_of_year = Period(freq="H", year=2007, month=12, day=31, hour=23) - ival_H_end_of_quarter = Period(freq="H", year=2007, month=3, day=31, hour=23) - ival_H_end_of_month = Period(freq="H", year=2007, month=1, day=31, hour=23) - ival_H_end_of_week = Period(freq="H", year=2007, month=1, day=7, hour=23) - ival_H_end_of_day = Period(freq="H", year=2007, month=1, day=1, hour=23) - ival_H_end_of_bus = Period(freq="H", year=2007, month=1, day=1, hour=23) + ival_H = Period(freq="h", year=2007, month=1, day=1, hour=0) + ival_H_end_of_year = Period(freq="h", year=2007, month=12, day=31, hour=23) + ival_H_end_of_quarter = Period(freq="h", year=2007, month=3, day=31, hour=23) + ival_H_end_of_month = Period(freq="h", year=2007, month=1, day=31, hour=23) + ival_H_end_of_week = Period(freq="h", year=2007, month=1, day=7, hour=23) + ival_H_end_of_day = Period(freq="h", year=2007, month=1, day=1, hour=23) + ival_H_end_of_bus = Period(freq="h", year=2007, month=1, day=1, hour=23) ival_H_to_A = Period(freq="Y", year=2007) ival_H_to_Q = Period(freq="Q", year=2007, quarter=1) @@ -563,7 +565,7 @@ def test_conv_hourly(self): assert ival_H.asfreq("s", "s") == ival_H_to_S_start assert ival_H.asfreq("s", "E") == ival_H_to_S_end - assert ival_H.asfreq("H") == ival_H + assert ival_H.asfreq("h") == ival_H def test_conv_minutely(self): # frequency conversion tests: from Minutely Frequency" @@ -598,7 +600,7 @@ def test_conv_minutely(self): ival_T_to_D = Period(freq="D", year=2007, month=1, day=1) with tm.assert_produces_warning(FutureWarning, match=bday_msg): ival_T_to_B = Period(freq="B", year=2007, month=1, day=1) - ival_T_to_H = Period(freq="H", year=2007, month=1, day=1, hour=0) + ival_T_to_H = Period(freq="h", year=2007, month=1, day=1, hour=0) ival_T_to_S_start = Period( freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0 @@ -620,8 +622,8 @@ def test_conv_minutely(self): with tm.assert_produces_warning(FutureWarning, match=bday_msg): assert ival_T.asfreq("B") == ival_T_to_B assert ival_T_end_of_bus.asfreq("B") == ival_T_to_B - assert ival_T.asfreq("H") == ival_T_to_H - assert ival_T_end_of_hour.asfreq("H") == ival_T_to_H + assert ival_T.asfreq("h") == ival_T_to_H + assert ival_T_end_of_hour.asfreq("h") == ival_T_to_H assert ival_T.asfreq("s", "s") == ival_T_to_S_start assert ival_T.asfreq("s", "E") == ival_T_to_S_end @@ -664,7 +666,7 @@ def test_conv_secondly(self): ival_S_to_D = Period(freq="D", year=2007, month=1, day=1) with tm.assert_produces_warning(FutureWarning, match=bday_msg): ival_S_to_B = Period(freq="B", year=2007, month=1, day=1) - ival_S_to_H = Period(freq="H", year=2007, month=1, day=1, hour=0) + ival_S_to_H = Period(freq="h", year=2007, month=1, day=1, hour=0) ival_S_to_T = Period(freq="Min", year=2007, month=1, day=1, hour=0, minute=0) assert ival_S.asfreq("Y") == ival_S_to_A @@ -680,8 +682,8 @@ def test_conv_secondly(self): with tm.assert_produces_warning(FutureWarning, match=bday_msg): assert ival_S.asfreq("B") == ival_S_to_B assert ival_S_end_of_bus.asfreq("B") == ival_S_to_B - assert ival_S.asfreq("H") == ival_S_to_H - assert ival_S_end_of_hour.asfreq("H") == ival_S_to_H + assert ival_S.asfreq("h") == ival_S_to_H + assert ival_S_end_of_hour.asfreq("h") == ival_S_to_H assert ival_S.asfreq("Min") == ival_S_to_T assert ival_S_end_of_minute.asfreq("Min") == ival_S_to_T @@ -778,24 +780,24 @@ def test_asfreq_mult(self): def test_asfreq_combined(self): # normal freq to combined freq - p = Period("2007", freq="H") + p = Period("2007", freq="h") # ordinal will not change - expected = Period("2007", freq="25H") - for freq, how in zip(["1D1H", "1H1D"], ["E", "S"]): + expected = Period("2007", freq="25h") + for freq, how in zip(["1D1h", "1h1D"], ["E", "S"]): result = p.asfreq(freq, how=how) assert result == expected assert result.ordinal == expected.ordinal assert result.freq == expected.freq # combined freq to normal freq - p1 = Period(freq="1D1H", year=2007) - p2 = Period(freq="1H1D", year=2007) + p1 = Period(freq="1D1h", year=2007) + p2 = Period(freq="1h1D", year=2007) # ordinal will change because how=E is the default - result1 = p1.asfreq("H") - result2 = p2.asfreq("H") - expected = Period("2007-01-02", freq="H") + result1 = p1.asfreq("h") + result2 = p2.asfreq("h") + expected = Period("2007-01-02", freq="h") assert result1 == expected assert result1.ordinal == expected.ordinal assert result1.freq == expected.freq @@ -804,9 +806,9 @@ def test_asfreq_combined(self): assert result2.freq == expected.freq # ordinal will not change - result1 = p1.asfreq("H", how="S") - result2 = p2.asfreq("H", how="S") - expected = Period("2007-01-01", freq="H") + result1 = p1.asfreq("h", how="S") + result2 = p2.asfreq("h", how="S") + expected = Period("2007-01-01", freq="h") assert result1 == expected assert result1.ordinal == expected.ordinal assert result1.freq == expected.freq diff --git a/pandas/tests/scalar/period/test_period.py b/pandas/tests/scalar/period/test_period.py index c7d42a83e663c..6c27881e44b56 100644 --- a/pandas/tests/scalar/period/test_period.py +++ b/pandas/tests/scalar/period/test_period.py @@ -340,7 +340,7 @@ def test_constructor_infer_freq(self): assert p.freq == "D" p = Period("2007-01-01 07") - assert p.freq == "H" + assert p.freq == "h" p = Period("2007-01-01 07:10") assert p.freq == "min" @@ -428,7 +428,7 @@ def test_period_from_ordinal(self): assert p == res assert isinstance(res, Period) - @pytest.mark.parametrize("freq", ["Y", "M", "D", "H"]) + @pytest.mark.parametrize("freq", ["Y", "M", "D", "h"]) def test_construct_from_nat_string_and_freq(self, freq): per = Period("NaT", freq=freq) assert per is NaT @@ -449,7 +449,7 @@ def test_period_cons_nat(self): p = Period(iNaT, freq="3D") assert p is NaT - p = Period(iNaT, freq="1D1H") + p = Period(iNaT, freq="1D1h") assert p is NaT p = Period("NaT") @@ -491,14 +491,14 @@ def test_period_cons_mult(self): def test_period_cons_combined(self): p = [ ( - Period("2011-01", freq="1D1H"), - Period("2011-01", freq="1H1D"), - Period("2011-01", freq="H"), + Period("2011-01", freq="1D1h"), + Period("2011-01", freq="1h1D"), + Period("2011-01", freq="h"), ), ( - Period(ordinal=1, freq="1D1H"), - Period(ordinal=1, freq="1H1D"), - Period(ordinal=1, freq="H"), + Period(ordinal=1, freq="1D1h"), + Period(ordinal=1, freq="1h1D"), + Period(ordinal=1, freq="h"), ), ] @@ -507,49 +507,49 @@ def test_period_cons_combined(self): assert p2.ordinal == p3.ordinal assert p1.freq == offsets.Hour(25) - assert p1.freqstr == "25H" + assert p1.freqstr == "25h" assert p2.freq == offsets.Hour(25) - assert p2.freqstr == "25H" + assert p2.freqstr == "25h" assert p3.freq == offsets.Hour() - assert p3.freqstr == "H" + assert p3.freqstr == "h" result = p1 + 1 assert result.ordinal == (p3 + 25).ordinal assert result.freq == p1.freq - assert result.freqstr == "25H" + assert result.freqstr == "25h" result = p2 + 1 assert result.ordinal == (p3 + 25).ordinal assert result.freq == p2.freq - assert result.freqstr == "25H" + assert result.freqstr == "25h" result = p1 - 1 assert result.ordinal == (p3 - 25).ordinal assert result.freq == p1.freq - assert result.freqstr == "25H" + assert result.freqstr == "25h" result = p2 - 1 assert result.ordinal == (p3 - 25).ordinal assert result.freq == p2.freq - assert result.freqstr == "25H" + assert result.freqstr == "25h" - msg = "Frequency must be positive, because it represents span: -25H" + msg = "Frequency must be positive, because it represents span: -25h" with pytest.raises(ValueError, match=msg): - Period("2011-01", freq="-1D1H") + Period("2011-01", freq="-1D1h") with pytest.raises(ValueError, match=msg): - Period("2011-01", freq="-1H1D") + Period("2011-01", freq="-1h1D") with pytest.raises(ValueError, match=msg): - Period(ordinal=1, freq="-1D1H") + Period(ordinal=1, freq="-1D1h") with pytest.raises(ValueError, match=msg): - Period(ordinal=1, freq="-1H1D") + Period(ordinal=1, freq="-1h1D") msg = "Frequency must be positive, because it represents span: 0D" with pytest.raises(ValueError, match=msg): - Period("2011-01", freq="0D0H") + Period("2011-01", freq="0D0h") with pytest.raises(ValueError, match=msg): - Period(ordinal=1, freq="0D0H") + Period(ordinal=1, freq="0D0h") # You can only combine together day and intraday offsets msg = "Invalid frequency: 1W1D" @@ -584,7 +584,7 @@ def test_period_constructor_nanosecond(self, day, hour, sec_float, expected): def test_period_large_ordinal(self, hour): # Issue #36430 # Integer overflow for Period over the maximum timestamp - p = Period(ordinal=2562048 + hour, freq="1H") + p = Period(ordinal=2562048 + hour, freq="1h") assert p.hour == hour @@ -635,7 +635,7 @@ def test_to_timestamp(self): assert end_ts == p.to_timestamp("D", how=a) assert end_ts == p.to_timestamp("3D", how=a) - from_lst = ["Y", "Q", "M", "W", "B", "D", "H", "Min", "s"] + from_lst = ["Y", "Q", "M", "W", "B", "D", "h", "Min", "s"] def _ex(p): if p.freq == "B": @@ -655,10 +655,10 @@ def _ex(p): p = Period("1985", freq="Y") - result = p.to_timestamp("H", how="end") + result = p.to_timestamp("h", how="end") expected = Timestamp(1986, 1, 1) - Timedelta(1, "ns") assert result == expected - result = p.to_timestamp("3H", how="end") + result = p.to_timestamp("3h", how="end") assert result == expected result = p.to_timestamp("min", how="end") @@ -672,13 +672,13 @@ def _ex(p): assert result == expected expected = datetime(1985, 1, 1) - result = p.to_timestamp("H", how="start") + result = p.to_timestamp("h", how="start") assert result == expected result = p.to_timestamp("min", how="start") assert result == expected result = p.to_timestamp("s", how="start") assert result == expected - result = p.to_timestamp("3H", how="start") + result = p.to_timestamp("3h", how="start") assert result == expected result = p.to_timestamp("5s", how="start") assert result == expected @@ -731,7 +731,7 @@ def test_to_timestamp_microsecond(self, ts, expected, freq): ("2000-12-15 13:45:26.123", None, "2000-12-15 13:45:26.123", "ms"), ("2000-12-15 13:45:26", "s", "2000-12-15 13:45:26", "s"), ("2000-12-15 13:45:26", "min", "2000-12-15 13:45", "min"), - ("2000-12-15 13:45:26", "H", "2000-12-15 13:00", "H"), + ("2000-12-15 13:45:26", "h", "2000-12-15 13:00", "h"), ("2000-12-15", "Y", "2000", "Y-DEC"), ("2000-12-15", "Q", "2000Q4", "Q-DEC"), ("2000-12-15", "M", "2000-12", "M"), @@ -763,7 +763,7 @@ def test_strftime(self): class TestPeriodProperties: """Test properties such as year, month, weekday, etc....""" - @pytest.mark.parametrize("freq", ["Y", "M", "D", "H"]) + @pytest.mark.parametrize("freq", ["Y", "M", "D", "h"]) def test_is_leap_year(self, freq): # GH 13727 p = Period("2000-01-01 00:00:00", freq=freq) @@ -808,7 +808,7 @@ def test_period_deprecated_freq(self): "M": ["MTH", "MONTH", "MONTHLY", "Mth", "month", "monthly"], "B": ["BUS", "BUSINESS", "BUSINESSLY", "WEEKDAY", "bus"], "D": ["DAY", "DLY", "DAILY", "Day", "Dly", "Daily"], - "H": ["HR", "HOUR", "HRLY", "HOURLY", "hr", "Hour", "HRly"], + "h": ["HR", "HOUR", "HRLY", "HOURLY", "hr", "Hour", "HRly"], "min": ["minute", "MINUTE", "MINUTELY", "minutely"], "s": ["sec", "SEC", "SECOND", "SECONDLY", "second"], "ms": ["MILLISECOND", "MILLISECONDLY", "millisecond"], @@ -861,7 +861,7 @@ def test_inner_bounds_start_and_end_time(self, bound, offset, period_property): assert getattr(period, period_property).floor("s") == expected def test_start_time(self): - freq_lst = ["Y", "Q", "M", "D", "H", "min", "s"] + freq_lst = ["Y", "Q", "M", "D", "h", "min", "s"] xp = datetime(2012, 1, 1) for f in freq_lst: p = Period("2012", freq=f) @@ -891,7 +891,7 @@ def _ex(*args): xp = _ex(2012, 1, 2) assert xp == p.end_time - p = Period("2012", freq="H") + p = Period("2012", freq="h") xp = _ex(2012, 1, 1, 1) assert xp == p.end_time @@ -909,11 +909,11 @@ def _ex(*args): xp = _ex(2012, 1, 16) assert xp == p.end_time - p = Period("2012", freq="1D1H") + p = Period("2012", freq="1D1h") xp = _ex(2012, 1, 2, 1) assert xp == p.end_time - p = Period("2012", freq="1H1D") + p = Period("2012", freq="1h1D") xp = _ex(2012, 1, 2, 1) assert xp == p.end_time @@ -1023,8 +1023,8 @@ def test_properties_daily(self): def test_properties_hourly(self): # Test properties on Periods with hourly frequency. - h_date1 = Period(freq="H", year=2007, month=1, day=1, hour=0) - h_date2 = Period(freq="2H", year=2007, month=1, day=1, hour=0) + h_date1 = Period(freq="h", year=2007, month=1, day=1, hour=0) + h_date2 = Period(freq="2h", year=2007, month=1, day=1, hour=0) for h_date in [h_date1, h_date2]: assert h_date.year == 2007 @@ -1036,7 +1036,7 @@ def test_properties_hourly(self): assert h_date.hour == 0 assert h_date.days_in_month == 31 assert ( - Period(freq="H", year=2012, month=2, day=1, hour=0).days_in_month == 29 + Period(freq="h", year=2012, month=2, day=1, hour=0).days_in_month == 29 ) def test_properties_minutely(self): @@ -1411,7 +1411,7 @@ def test_add_offset(self): with pytest.raises(IncompatibleFrequency, match=msg): o + p - for freq in ["H", "2H", "3H"]: + for freq in ["h", "2h", "3h"]: p = Period("2011-04-01 09:00", freq=freq) exp = Period("2011-04-03 09:00", freq=freq) @@ -1516,7 +1516,7 @@ def test_sub_offset(self): with pytest.raises(IncompatibleFrequency, match=msg): p - o - for freq in ["H", "2H", "3H"]: + for freq in ["h", "2h", "3h"]: p = Period("2011-04-01 09:00", freq=freq) assert p - offsets.Day(2) == Period("2011-03-30 09:00", freq=freq) assert p - offsets.Hour(3) == Period("2011-04-01 06:00", freq=freq) @@ -1589,7 +1589,7 @@ def test_small_year_parsing(): def test_negone_ordinals(): - freqs = ["Y", "M", "Q", "D", "H", "min", "s"] + freqs = ["Y", "M", "Q", "D", "h", "min", "s"] period = Period(ordinal=-1, freq="D") for freq in freqs: diff --git a/pandas/tests/scalar/timedelta/test_constructors.py b/pandas/tests/scalar/timedelta/test_constructors.py index 7bd9e5fc5e293..0d876fbb9bde8 100644 --- a/pandas/tests/scalar/timedelta/test_constructors.py +++ b/pandas/tests/scalar/timedelta/test_constructors.py @@ -208,8 +208,8 @@ def test_construction(): assert Timedelta(offsets.Second(2)) == Timedelta(seconds=2) # GH#11995: unicode - expected = Timedelta("1H") - result = Timedelta("1H") + expected = Timedelta("1h") + result = Timedelta("1h") assert result == expected assert to_timedelta(offsets.Hour(2)) == Timedelta("0 days, 02:00:00") diff --git a/pandas/tests/scalar/timedelta/test_timedelta.py b/pandas/tests/scalar/timedelta/test_timedelta.py index b1483342eb6e4..ef780a090c12d 100644 --- a/pandas/tests/scalar/timedelta/test_timedelta.py +++ b/pandas/tests/scalar/timedelta/test_timedelta.py @@ -673,7 +673,7 @@ def test_to_numpy_alias(self): ("5s", Timedelta("1 days 02:34:55"), Timedelta("-1 days 02:34:55")), ("min", Timedelta("1 days 02:35:00"), Timedelta("-1 days 02:35:00")), ("12min", Timedelta("1 days 02:36:00"), Timedelta("-1 days 02:36:00")), - ("H", Timedelta("1 days 03:00:00"), Timedelta("-1 days 03:00:00")), + ("h", Timedelta("1 days 03:00:00"), Timedelta("-1 days 03:00:00")), ("d", Timedelta("1 days"), Timedelta("-1 days")), ], ) @@ -991,7 +991,7 @@ def test_total_seconds_precision(self): def test_resolution_string(self): assert Timedelta(days=1).resolution_string == "D" - assert Timedelta(days=1, hours=6).resolution_string == "H" + assert Timedelta(days=1, hours=6).resolution_string == "h" assert Timedelta(days=1, minutes=6).resolution_string == "min" assert Timedelta(days=1, seconds=6).resolution_string == "s" assert Timedelta(days=1, milliseconds=6).resolution_string == "ms" @@ -1043,6 +1043,7 @@ def test_timedelta_attribute_precision(): @pytest.mark.parametrize( "unit,unit_depr", [ + ("h", "H"), ("min", "T"), ("s", "S"), ("ms", "L"), @@ -1050,8 +1051,8 @@ def test_timedelta_attribute_precision(): ("us", "U"), ], ) -def test_units_t_l_u_n_deprecated(unit, unit_depr): - # GH 52536 +def test_units_H_T_S_L_N_U_deprecated(unit, unit_depr): + # GH#52536 msg = f"'{unit_depr}' is deprecated and will be removed in a future version." expected = Timedelta(1, unit=unit) diff --git a/pandas/tests/scalar/timestamp/test_unary_ops.py b/pandas/tests/scalar/timestamp/test_unary_ops.py index e501bd93bc1c6..b7d5bbe71269a 100644 --- a/pandas/tests/scalar/timestamp/test_unary_ops.py +++ b/pandas/tests/scalar/timestamp/test_unary_ops.py @@ -45,7 +45,7 @@ def test_round_division_by_zero_raises(self): ("20130201 12:00:00", "D", "20130202"), ("20130104 12:00:00", "D", "20130105"), ("2000-01-05 05:09:15.13", "D", "2000-01-05 00:00:00"), - ("2000-01-05 05:09:15.13", "H", "2000-01-05 05:00:00"), + ("2000-01-05 05:09:15.13", "h", "2000-01-05 05:00:00"), ("2000-01-05 05:09:15.13", "s", "2000-01-05 05:09:15"), ], ) @@ -141,7 +141,7 @@ def test_ceil_floor_edge(self, test_input, rounder, freq, expected): ("2018-01-01 00:04:00", "4min", "2018-01-01 00:04:00"), ("2018-01-01 00:15:00", "15min", "2018-01-01 00:15:00"), ("2018-01-01 00:20:00", "20min", "2018-01-01 00:20:00"), - ("2018-01-01 03:00:00", "3H", "2018-01-01 03:00:00"), + ("2018-01-01 03:00:00", "3h", "2018-01-01 03:00:00"), ], ) @pytest.mark.parametrize("rounder", ["ceil", "floor", "round"]) @@ -181,30 +181,30 @@ def test_round_dst_border_ambiguous(self, method, unit): ts = Timestamp("2017-10-29 00:00:00", tz="UTC").tz_convert("Europe/Madrid") ts = ts.as_unit(unit) # - result = getattr(ts, method)("H", ambiguous=True) + result = getattr(ts, method)("h", ambiguous=True) assert result == ts assert result._creso == getattr(NpyDatetimeUnit, f"NPY_FR_{unit}").value - result = getattr(ts, method)("H", ambiguous=False) + result = getattr(ts, method)("h", ambiguous=False) expected = Timestamp("2017-10-29 01:00:00", tz="UTC").tz_convert( "Europe/Madrid" ) assert result == expected assert result._creso == getattr(NpyDatetimeUnit, f"NPY_FR_{unit}").value - result = getattr(ts, method)("H", ambiguous="NaT") + result = getattr(ts, method)("h", ambiguous="NaT") assert result is NaT msg = "Cannot infer dst time" with pytest.raises(pytz.AmbiguousTimeError, match=msg): - getattr(ts, method)("H", ambiguous="raise") + getattr(ts, method)("h", ambiguous="raise") @pytest.mark.parametrize( "method, ts_str, freq", [ ["ceil", "2018-03-11 01:59:00-0600", "5min"], ["round", "2018-03-11 01:59:00-0600", "5min"], - ["floor", "2018-03-11 03:01:00-0500", "2H"], + ["floor", "2018-03-11 03:01:00-0500", "2h"], ], ) @pytest.mark.parametrize( diff --git a/pandas/tests/series/accessors/test_dt_accessor.py b/pandas/tests/series/accessors/test_dt_accessor.py index fba16749c026e..d7d33ae058af8 100644 --- a/pandas/tests/series/accessors/test_dt_accessor.py +++ b/pandas/tests/series/accessors/test_dt_accessor.py @@ -345,30 +345,30 @@ def test_dt_round_tz_ambiguous(self, method): ) df1["date"] = df1["date"].dt.tz_convert("Europe/Madrid") # infer - result = getattr(df1.date.dt, method)("H", ambiguous="infer") + result = getattr(df1.date.dt, method)("h", ambiguous="infer") expected = df1["date"] tm.assert_series_equal(result, expected) # bool-array - result = getattr(df1.date.dt, method)("H", ambiguous=[True, False, False]) + result = getattr(df1.date.dt, method)("h", ambiguous=[True, False, False]) tm.assert_series_equal(result, expected) # NaT - result = getattr(df1.date.dt, method)("H", ambiguous="NaT") + result = getattr(df1.date.dt, method)("h", ambiguous="NaT") expected = df1["date"].copy() expected.iloc[0:2] = pd.NaT tm.assert_series_equal(result, expected) # raise with tm.external_error_raised(pytz.AmbiguousTimeError): - getattr(df1.date.dt, method)("H", ambiguous="raise") + getattr(df1.date.dt, method)("h", ambiguous="raise") @pytest.mark.parametrize( "method, ts_str, freq", [ ["ceil", "2018-03-11 01:59:00-0600", "5min"], ["round", "2018-03-11 01:59:00-0600", "5min"], - ["floor", "2018-03-11 03:01:00-0500", "2H"], + ["floor", "2018-03-11 03:01:00-0500", "2h"], ], ) def test_dt_round_tz_nonexistent(self, method, ts_str, freq): @@ -598,7 +598,7 @@ def test_strftime_dt64_microsecond_resolution(self): tm.assert_series_equal(result, expected) def test_strftime_period_hours(self): - ser = Series(period_range("20130101", periods=4, freq="H")) + ser = Series(period_range("20130101", periods=4, freq="h")) result = ser.dt.strftime("%Y/%m/%d %H:%M:%S") expected = Series( [ @@ -776,8 +776,8 @@ class TestSeriesPeriodValuesDtAccessor: [Period("2016-01", freq="M"), Period("2016-02", freq="M")], [Period("2016-01-01", freq="D"), Period("2016-01-02", freq="D")], [ - Period("2016-01-01 00:00:00", freq="H"), - Period("2016-01-01 01:00:00", freq="H"), + Period("2016-01-01 00:00:00", freq="h"), + Period("2016-01-01 01:00:00", freq="h"), ], [ Period("2016-01-01 00:00:00", freq="M"), diff --git a/pandas/tests/series/indexing/test_datetime.py b/pandas/tests/series/indexing/test_datetime.py index 317967bcbb7ff..84cf80fa1ffce 100644 --- a/pandas/tests/series/indexing/test_datetime.py +++ b/pandas/tests/series/indexing/test_datetime.py @@ -76,7 +76,7 @@ def test_getitem_setitem_datetime_tz(tz_source): N = 50 # testing with timezone, GH #2785 - rng = date_range("1/1/1990", periods=N, freq="H", tz=tzget("US/Eastern")) + rng = date_range("1/1/1990", periods=N, freq="h", tz=tzget("US/Eastern")) ts = Series(np.random.default_rng(2).standard_normal(N), index=rng) # also test Timestamp tz handling, GH #2789 @@ -107,7 +107,7 @@ def test_getitem_setitem_datetime_tz(tz_source): def test_getitem_setitem_datetimeindex(): N = 50 # testing with timezone, GH #2785 - rng = date_range("1/1/1990", periods=N, freq="H", tz="US/Eastern") + rng = date_range("1/1/1990", periods=N, freq="h", tz="US/Eastern") ts = Series(np.random.default_rng(2).standard_normal(N), index=rng) result = ts["1990-01-01 04:00:00"] @@ -213,7 +213,7 @@ def test_getitem_setitem_datetimeindex(): def test_getitem_setitem_periodindex(): N = 50 - rng = period_range("1/1/1990", periods=N, freq="H") + rng = period_range("1/1/1990", periods=N, freq="h") ts = Series(np.random.default_rng(2).standard_normal(N), index=rng) result = ts["1990-01-01 04"] @@ -450,7 +450,7 @@ def test_indexing(): def test_getitem_str_month_with_datetimeindex(): # GH3546 (not including times on the last day) - idx = date_range(start="2013-05-31 00:00", end="2013-05-31 23:00", freq="H") + idx = date_range(start="2013-05-31 00:00", end="2013-05-31 23:00", freq="h") ts = Series(range(len(idx)), index=idx) expected = ts["2013-05"] tm.assert_series_equal(expected, ts) diff --git a/pandas/tests/series/indexing/test_getitem.py b/pandas/tests/series/indexing/test_getitem.py index 458988491aae8..479e74703bc0e 100644 --- a/pandas/tests/series/indexing/test_getitem.py +++ b/pandas/tests/series/indexing/test_getitem.py @@ -137,7 +137,7 @@ def test_getitem_pydatetime_tz(self, tzstr): tz = timezones.maybe_get_tz(tzstr) index = date_range( - start="2012-12-24 16:00", end="2012-12-24 18:00", freq="H", tz=tzstr + start="2012-12-24 16:00", end="2012-12-24 18:00", freq="h", tz=tzstr ) ts = Series(index=index, data=index.hour) time_pandas = Timestamp("2012-12-24 17:00", tz=tzstr) diff --git a/pandas/tests/series/indexing/test_indexing.py b/pandas/tests/series/indexing/test_indexing.py index 0fa28920d41bd..a52d87b1a0457 100644 --- a/pandas/tests/series/indexing/test_indexing.py +++ b/pandas/tests/series/indexing/test_indexing.py @@ -327,7 +327,7 @@ def test_multilevel_preserve_name(lexsorted_two_level_string_multiindex, indexer [ date_range("2014-01-01", periods=20, freq="MS"), period_range("2014-01", periods=20, freq="M"), - timedelta_range("0", periods=20, freq="H"), + timedelta_range("0", periods=20, freq="h"), ], ) def test_slice_with_negative_step(index): diff --git a/pandas/tests/series/indexing/test_setitem.py b/pandas/tests/series/indexing/test_setitem.py index f419ff9384042..5fcd3a19dcaa4 100644 --- a/pandas/tests/series/indexing/test_setitem.py +++ b/pandas/tests/series/indexing/test_setitem.py @@ -79,7 +79,7 @@ def test_setitem_tuple_with_datetimetz_values(self): @pytest.mark.parametrize("tz", ["US/Eastern", "UTC", "Asia/Tokyo"]) def test_setitem_with_tz(self, tz, indexer_sli): - orig = Series(date_range("2016-01-01", freq="H", periods=3, tz=tz)) + orig = Series(date_range("2016-01-01", freq="h", periods=3, tz=tz)) assert orig.dtype == f"datetime64[ns, {tz}]" exp = Series( @@ -117,7 +117,7 @@ def test_setitem_with_tz(self, tz, indexer_sli): def test_setitem_with_tz_dst(self, indexer_sli): # GH#14146 trouble setting values near DST boundary tz = "US/Eastern" - orig = Series(date_range("2016-11-06", freq="H", periods=3, tz=tz)) + orig = Series(date_range("2016-11-06", freq="h", periods=3, tz=tz)) assert orig.dtype == f"datetime64[ns, {tz}]" exp = Series( diff --git a/pandas/tests/series/methods/test_align.py b/pandas/tests/series/methods/test_align.py index eedd8b654f3d0..cb60cd2e5bcf3 100644 --- a/pandas/tests/series/methods/test_align.py +++ b/pandas/tests/series/methods/test_align.py @@ -193,7 +193,7 @@ def test_align_with_dataframe_method(method): def test_align_dt64tzindex_mismatched_tzs(): - idx1 = date_range("2001", periods=5, freq="H", tz="US/Eastern") + idx1 = date_range("2001", periods=5, freq="h", tz="US/Eastern") ser = Series(np.random.default_rng(2).standard_normal(len(idx1)), index=idx1) ser_central = ser.tz_convert("US/Central") # different timezones convert to UTC diff --git a/pandas/tests/series/methods/test_asof.py b/pandas/tests/series/methods/test_asof.py index 31c264d74d063..2acc2921e5efc 100644 --- a/pandas/tests/series/methods/test_asof.py +++ b/pandas/tests/series/methods/test_asof.py @@ -118,7 +118,7 @@ def test_with_nan(self): def test_periodindex(self): # array or list or dates N = 50 - rng = period_range("1/1/1990", periods=N, freq="H") + rng = period_range("1/1/1990", periods=N, freq="h") ts = Series(np.random.default_rng(2).standard_normal(N), index=rng) ts.iloc[15:30] = np.nan dates = date_range("1/1/1990", periods=N * 3, freq="37min") @@ -133,7 +133,7 @@ def test_periodindex(self): lb = ts.index[14] ub = ts.index[30] - pix = PeriodIndex(result.index.values, freq="H") + pix = PeriodIndex(result.index.values, freq="h") mask = (pix >= lb) & (pix < ub) rs = result[mask] assert (rs == ts[lb]).all() diff --git a/pandas/tests/series/methods/test_combine_first.py b/pandas/tests/series/methods/test_combine_first.py index aabed794ac557..89b6f9b01bc66 100644 --- a/pandas/tests/series/methods/test_combine_first.py +++ b/pandas/tests/series/methods/test_combine_first.py @@ -107,7 +107,7 @@ def test_combine_first_timezone_series_with_empty_series(self): time_index = date_range( datetime(2021, 1, 1, 1), datetime(2021, 1, 1, 10), - freq="H", + freq="h", tz="Europe/Rome", ) s1 = Series(range(10), index=time_index) diff --git a/pandas/tests/series/methods/test_interpolate.py b/pandas/tests/series/methods/test_interpolate.py index 549f429f09d35..f8bbd4c25a4c0 100644 --- a/pandas/tests/series/methods/test_interpolate.py +++ b/pandas/tests/series/methods/test_interpolate.py @@ -780,7 +780,7 @@ def test_series_interpolate_intraday(self): exp = ts.reindex(new_index).interpolate(method="time") - index = date_range("1/1/2012", periods=4, freq="12H") + index = date_range("1/1/2012", periods=4, freq="12h") ts = Series([0, 12, 24, 36], index) new_index = index.append(index + pd.DateOffset(hours=1)).sort_values() result = ts.reindex(new_index).interpolate(method="time") diff --git a/pandas/tests/series/methods/test_map.py b/pandas/tests/series/methods/test_map.py index 783e18e541ad8..ae6c62e95f696 100644 --- a/pandas/tests/series/methods/test_map.py +++ b/pandas/tests/series/methods/test_map.py @@ -494,14 +494,14 @@ def test_map_categorical_na_action(na_action, expected): def test_map_datetimetz(): - values = pd.date_range("2011-01-01", "2011-01-02", freq="H").tz_localize( + values = pd.date_range("2011-01-01", "2011-01-02", freq="h").tz_localize( "Asia/Tokyo" ) s = Series(values, name="XX") # keep tz result = s.map(lambda x: x + pd.offsets.Day()) - exp_values = pd.date_range("2011-01-02", "2011-01-03", freq="H").tz_localize( + exp_values = pd.date_range("2011-01-02", "2011-01-03", freq="h").tz_localize( "Asia/Tokyo" ) exp = Series(exp_values, name="XX") diff --git a/pandas/tests/series/methods/test_reindex.py b/pandas/tests/series/methods/test_reindex.py index 9d6611cd53068..0923a2d42ce10 100644 --- a/pandas/tests/series/methods/test_reindex.py +++ b/pandas/tests/series/methods/test_reindex.py @@ -329,7 +329,7 @@ def test_reindex_fill_value_datetimelike_upcast(dtype, fill_value, using_array_m def test_reindex_datetimeindexes_tz_naive_and_aware(): # GH 8306 idx = date_range("20131101", tz="America/Chicago", periods=7) - newidx = date_range("20131103", periods=10, freq="H") + newidx = date_range("20131103", periods=10, freq="h") s = Series(range(7), index=idx) msg = ( r"Cannot compare dtypes datetime64\[ns, America/Chicago\] " diff --git a/pandas/tests/series/test_arithmetic.py b/pandas/tests/series/test_arithmetic.py index 522b86274a517..e9eb906a9cf10 100644 --- a/pandas/tests/series/test_arithmetic.py +++ b/pandas/tests/series/test_arithmetic.py @@ -457,7 +457,7 @@ def test_ser_flex_cmp_return_dtypes_empty(self, opname): def test_ser_cmp_result_names(self, names, comparison_op): # datetime64 dtype op = comparison_op - dti = date_range("1949-06-07 03:00:00", freq="H", periods=5, name=names[0]) + dti = date_range("1949-06-07 03:00:00", freq="h", periods=5, name=names[0]) ser = Series(dti).rename(names[1]) result = op(ser, dti) assert result.name == names[2] @@ -713,7 +713,7 @@ def test_compare_series_interval_keyword(self): class TestTimeSeriesArithmetic: def test_series_add_tz_mismatch_converts_to_utc(self): - rng = date_range("1/1/2011", periods=100, freq="H", tz="utc") + rng = date_range("1/1/2011", periods=100, freq="h", tz="utc") perm = np.random.default_rng(2).permutation(100)[:90] ser1 = Series( @@ -737,7 +737,7 @@ def test_series_add_tz_mismatch_converts_to_utc(self): tm.assert_series_equal(result, expected) def test_series_add_aware_naive_raises(self): - rng = date_range("1/1/2011", periods=10, freq="H") + rng = date_range("1/1/2011", periods=10, freq="h") ser = Series(np.random.default_rng(2).standard_normal(len(rng)), index=rng) ser_utc = ser.tz_localize("utc") diff --git a/pandas/tests/series/test_constructors.py b/pandas/tests/series/test_constructors.py index 5a05a1840b644..4f9050be100ca 100644 --- a/pandas/tests/series/test_constructors.py +++ b/pandas/tests/series/test_constructors.py @@ -335,8 +335,8 @@ def test_constructor_index_dtype(self, dtype): [ ([1, 2]), (["1", "2"]), - (list(date_range("1/1/2011", periods=2, freq="H"))), - (list(date_range("1/1/2011", periods=2, freq="H", tz="US/Eastern"))), + (list(date_range("1/1/2011", periods=2, freq="h"))), + (list(date_range("1/1/2011", periods=2, freq="h", tz="US/Eastern"))), ([Interval(left=0, right=5)]), ], ) @@ -2158,7 +2158,7 @@ def test_constructor_no_pandas_array(self, using_array_manager): @td.skip_array_manager_invalid_test def test_from_array(self): - result = Series(pd.array(["1H", "2H"], dtype="timedelta64[ns]")) + result = Series(pd.array(["1h", "2h"], dtype="timedelta64[ns]")) assert result._mgr.blocks[0].is_extension is False result = Series(pd.array(["2015"], dtype="datetime64[ns]")) @@ -2166,7 +2166,7 @@ def test_from_array(self): @td.skip_array_manager_invalid_test def test_from_list_dtype(self): - result = Series(["1H", "2H"], dtype="timedelta64[ns]") + result = Series(["1h", "2h"], dtype="timedelta64[ns]") assert result._mgr.blocks[0].is_extension is False result = Series(["2015"], dtype="datetime64[ns]") diff --git a/pandas/tests/series/test_reductions.py b/pandas/tests/series/test_reductions.py index 1e1ac100b21bf..fbdf843a998bb 100644 --- a/pandas/tests/series/test_reductions.py +++ b/pandas/tests/series/test_reductions.py @@ -50,7 +50,7 @@ def test_td64_sum_empty(skipna): def test_td64_summation_overflow(): # GH#9442 - ser = Series(pd.date_range("20130101", periods=100000, freq="H")) + ser = Series(pd.date_range("20130101", periods=100000, freq="h")) ser[0] += pd.Timedelta("1s 1ms") # mean diff --git a/pandas/tests/series/test_repr.py b/pandas/tests/series/test_repr.py index 535a64f303ec2..86474a38d29fb 100644 --- a/pandas/tests/series/test_repr.py +++ b/pandas/tests/series/test_repr.py @@ -389,7 +389,7 @@ def test_categorical_series_repr_ordered(self): assert repr(s) == exp def test_categorical_series_repr_datetime(self): - idx = date_range("2011-01-01 09:00", freq="H", periods=5) + idx = date_range("2011-01-01 09:00", freq="h", periods=5) s = Series(Categorical(idx)) exp = """0 2011-01-01 09:00:00 1 2011-01-01 10:00:00 @@ -402,7 +402,7 @@ def test_categorical_series_repr_datetime(self): assert repr(s) == exp - idx = date_range("2011-01-01 09:00", freq="H", periods=5, tz="US/Eastern") + idx = date_range("2011-01-01 09:00", freq="h", periods=5, tz="US/Eastern") s = Series(Categorical(idx)) exp = """0 2011-01-01 09:00:00-05:00 1 2011-01-01 10:00:00-05:00 @@ -417,7 +417,7 @@ def test_categorical_series_repr_datetime(self): assert repr(s) == exp def test_categorical_series_repr_datetime_ordered(self): - idx = date_range("2011-01-01 09:00", freq="H", periods=5) + idx = date_range("2011-01-01 09:00", freq="h", periods=5) s = Series(Categorical(idx, ordered=True)) exp = """0 2011-01-01 09:00:00 1 2011-01-01 10:00:00 @@ -430,7 +430,7 @@ def test_categorical_series_repr_datetime_ordered(self): assert repr(s) == exp - idx = date_range("2011-01-01 09:00", freq="H", periods=5, tz="US/Eastern") + idx = date_range("2011-01-01 09:00", freq="h", periods=5, tz="US/Eastern") s = Series(Categorical(idx, ordered=True)) exp = """0 2011-01-01 09:00:00-05:00 1 2011-01-01 10:00:00-05:00 @@ -445,7 +445,7 @@ def test_categorical_series_repr_datetime_ordered(self): assert repr(s) == exp def test_categorical_series_repr_period(self): - idx = period_range("2011-01-01 09:00", freq="H", periods=5) + idx = period_range("2011-01-01 09:00", freq="h", periods=5) s = Series(Categorical(idx)) exp = """0 2011-01-01 09:00 1 2011-01-01 10:00 @@ -453,7 +453,7 @@ def test_categorical_series_repr_period(self): 3 2011-01-01 12:00 4 2011-01-01 13:00 dtype: category -Categories (5, period[H]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, +Categories (5, period[h]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00]""" # noqa: E501 assert repr(s) == exp @@ -471,7 +471,7 @@ def test_categorical_series_repr_period(self): assert repr(s) == exp def test_categorical_series_repr_period_ordered(self): - idx = period_range("2011-01-01 09:00", freq="H", periods=5) + idx = period_range("2011-01-01 09:00", freq="h", periods=5) s = Series(Categorical(idx, ordered=True)) exp = """0 2011-01-01 09:00 1 2011-01-01 10:00 @@ -479,7 +479,7 @@ def test_categorical_series_repr_period_ordered(self): 3 2011-01-01 12:00 4 2011-01-01 13:00 dtype: category -Categories (5, period[H]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < +Categories (5, period[h]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < 2011-01-01 13:00]""" # noqa: E501 assert repr(s) == exp diff --git a/pandas/tests/tools/test_to_datetime.py b/pandas/tests/tools/test_to_datetime.py index 93fe9b05adb4f..b0406dbfa3469 100644 --- a/pandas/tests/tools/test_to_datetime.py +++ b/pandas/tests/tools/test_to_datetime.py @@ -2817,7 +2817,7 @@ class TestToDatetimeInferFormat: def test_to_datetime_infer_datetime_format_consistent_format( self, cache, test_format ): - ser = Series(date_range("20000101", periods=50, freq="H")) + ser = Series(date_range("20000101", periods=50, freq="h")) s_as_dt_strings = ser.apply(lambda x: x.strftime(test_format)) diff --git a/pandas/tests/tseries/frequencies/test_freq_code.py b/pandas/tests/tseries/frequencies/test_freq_code.py index ca8db972e2185..fa92590fb5ec1 100644 --- a/pandas/tests/tseries/frequencies/test_freq_code.py +++ b/pandas/tests/tseries/frequencies/test_freq_code.py @@ -13,7 +13,7 @@ @pytest.mark.parametrize( "freqstr,exp_freqstr", - [("D", "D"), ("W", "D"), ("ME", "D"), ("s", "s"), ("min", "s"), ("H", "s")], + [("D", "D"), ("W", "D"), ("ME", "D"), ("s", "s"), ("min", "s"), ("h", "s")], ) def test_get_to_timestamp_base(freqstr, exp_freqstr): off = to_offset(freqstr) @@ -31,7 +31,7 @@ def test_get_to_timestamp_base(freqstr, exp_freqstr): ("Q", "quarter"), ("M", "month"), ("D", "day"), - ("H", "hour"), + ("h", "hour"), ("min", "minute"), ("s", "second"), ("ms", "millisecond"), @@ -43,7 +43,7 @@ def test_get_attrname_from_abbrev(freqstr, expected): assert Resolution.get_reso_from_freqstr(freqstr).attrname == expected -@pytest.mark.parametrize("freq", ["D", "H", "min", "s", "ms", "us", "ns"]) +@pytest.mark.parametrize("freq", ["D", "h", "min", "s", "ms", "us", "ns"]) def test_get_freq_roundtrip2(freq): obj = Resolution.get_reso_from_freqstr(freq) result = _attrname_to_abbrevs[obj.attrname] @@ -55,9 +55,9 @@ def test_get_freq_roundtrip2(freq): [ ((1.5, "min"), (90, "s")), ((62.4, "min"), (3744, "s")), - ((1.04, "H"), (3744, "s")), + ((1.04, "h"), (3744, "s")), ((1, "D"), (1, "D")), - ((0.342931, "H"), (1234551600, "us")), + ((0.342931, "h"), (1234551600, "us")), ((1.2345, "D"), (106660800, "ms")), ], ) @@ -73,7 +73,7 @@ def test_resolution_bumping(args, expected): [ (0.5, "ns"), # Too much precision in the input can prevent. - (0.3429324798798269273987982, "H"), + (0.3429324798798269273987982, "h"), ], ) def test_cat(args): @@ -86,7 +86,7 @@ def test_cat(args): @pytest.mark.parametrize( "freqstr,expected", [ - ("1H", "2021-01-01T09:00:00"), + ("1h", "2021-01-01T09:00:00"), ("1D", "2021-01-02T08:00:00"), ("1W", "2021-01-03T08:00:00"), ("1ME", "2021-01-31T08:00:00"), @@ -99,8 +99,8 @@ def test_compatibility(freqstr, expected): assert ts_np + do == np.datetime64(expected) -@pytest.mark.parametrize("freq", ["A", "T", "S", "L", "U", "N"]) -def test_units_A_T_S_L_U_N_deprecated_from_attrname_to_abbrevs(freq): +@pytest.mark.parametrize("freq", ["A", "H", "T", "S", "L", "U", "N"]) +def test_units_A_H_T_S_L_U_N_deprecated_from_attrname_to_abbrevs(freq): # GH#52536 msg = f"'{freq}' is deprecated and will be removed in a future version." diff --git a/pandas/tests/tseries/frequencies/test_inference.py b/pandas/tests/tseries/frequencies/test_inference.py index 0b2978389ea88..51d0dd298f841 100644 --- a/pandas/tests/tseries/frequencies/test_inference.py +++ b/pandas/tests/tseries/frequencies/test_inference.py @@ -38,7 +38,7 @@ @pytest.fixture( params=[ (timedelta(1), "D"), - (timedelta(hours=1), "H"), + (timedelta(hours=1), "h"), (timedelta(minutes=1), "min"), (timedelta(seconds=1), "s"), (np.timedelta64(1, "ns"), "ns"), @@ -220,7 +220,7 @@ def test_infer_freq_index(freq, expected): "ME": ["2010-11-30", "2010-12-31", "2011-01-31", "2011-02-28"], "W-SAT": ["2010-12-25", "2011-01-01", "2011-01-08", "2011-01-15"], "D": ["2011-01-01", "2011-01-02", "2011-01-03", "2011-01-04"], - "H": [ + "h": [ "2011-12-31 22:00", "2011-12-31 23:00", "2012-01-01 00:00", @@ -255,7 +255,7 @@ def test_infer_freq_tz_series(tz_naive_fixture): ) @pytest.mark.parametrize( "freq", - ["H", "3H", "10min", "3601s", "3600001ms", "3600000001us", "3600000000001ns"], + ["h", "3h", "10min", "3601s", "3600001ms", "3600000001us", "3600000000001ns"], ) def test_infer_freq_tz_transition(tz_naive_fixture, date_pair, freq): # see gh-8772 @@ -265,7 +265,7 @@ def test_infer_freq_tz_transition(tz_naive_fixture, date_pair, freq): def test_infer_freq_tz_transition_custom(): - index = date_range("2013-11-03", periods=5, freq="3H").tz_localize( + index = date_range("2013-11-03", periods=5, freq="3h").tz_localize( "America/Chicago" ) assert index.inferred_freq is None @@ -274,7 +274,7 @@ def test_infer_freq_tz_transition_custom(): @pytest.mark.parametrize( "data,expected", [ - # Hourly freq in a day must result in "H" + # Hourly freq in a day must result in "h" ( [ "2014-07-01 09:00", @@ -284,7 +284,7 @@ def test_infer_freq_tz_transition_custom(): "2014-07-01 13:00", "2014-07-01 14:00", ], - "H", + "h", ), ( [ @@ -300,7 +300,7 @@ def test_infer_freq_tz_transition_custom(): "2014-07-02 10:00", "2014-07-02 11:00", ], - "BH", + "bh", ), ( [ @@ -316,7 +316,7 @@ def test_infer_freq_tz_transition_custom(): "2014-07-07 10:00", "2014-07-07 11:00", ], - "BH", + "bh", ), ( [ @@ -345,7 +345,7 @@ def test_infer_freq_tz_transition_custom(): "2014-07-08 15:00", "2014-07-08 16:00", ], - "BH", + "bh", ), ], ) diff --git a/pandas/tests/tseries/offsets/test_business_hour.py b/pandas/tests/tseries/offsets/test_business_hour.py index 319cc053d5d7d..f0d065f8bb7ef 100644 --- a/pandas/tests/tseries/offsets/test_business_hour.py +++ b/pandas/tests/tseries/offsets/test_business_hour.py @@ -148,17 +148,17 @@ def test_repr( offset9, offset10, ): - assert repr(offset1) == "" - assert repr(offset2) == "<3 * BusinessHours: BH=09:00-17:00>" - assert repr(offset3) == "<-1 * BusinessHour: BH=09:00-17:00>" - assert repr(offset4) == "<-4 * BusinessHours: BH=09:00-17:00>" - - assert repr(offset5) == "" - assert repr(offset6) == "" - assert repr(offset7) == "<-2 * BusinessHours: BH=21:30-06:30>" - assert repr(offset8) == "" - assert repr(offset9) == "<3 * BusinessHours: BH=09:00-13:00,22:00-03:00>" - assert repr(offset10) == "<-1 * BusinessHour: BH=13:00-17:00,23:00-02:00>" + assert repr(offset1) == "" + assert repr(offset2) == "<3 * BusinessHours: bh=09:00-17:00>" + assert repr(offset3) == "<-1 * BusinessHour: bh=09:00-17:00>" + assert repr(offset4) == "<-4 * BusinessHours: bh=09:00-17:00>" + + assert repr(offset5) == "" + assert repr(offset6) == "" + assert repr(offset7) == "<-2 * BusinessHours: bh=21:30-06:30>" + assert repr(offset8) == "" + assert repr(offset9) == "<3 * BusinessHours: bh=09:00-13:00,22:00-03:00>" + assert repr(offset10) == "<-1 * BusinessHour: bh=13:00-17:00,23:00-02:00>" def test_with_offset(self, dt): expected = Timestamp("2014-07-01 13:00") @@ -947,9 +947,9 @@ def test_apply_nanoseconds(self): assert_offset_equal(offset, base, expected) def test_datetimeindex(self): - idx1 = date_range(start="2014-07-04 15:00", end="2014-07-08 10:00", freq="BH") - idx2 = date_range(start="2014-07-04 15:00", periods=12, freq="BH") - idx3 = date_range(end="2014-07-08 10:00", periods=12, freq="BH") + idx1 = date_range(start="2014-07-04 15:00", end="2014-07-08 10:00", freq="bh") + idx2 = date_range(start="2014-07-04 15:00", periods=12, freq="bh") + idx3 = date_range(end="2014-07-08 10:00", periods=12, freq="bh") expected = DatetimeIndex( [ "2014-07-04 15:00", @@ -965,14 +965,14 @@ def test_datetimeindex(self): "2014-07-08 09:00", "2014-07-08 10:00", ], - freq="BH", + freq="bh", ) for idx in [idx1, idx2, idx3]: tm.assert_index_equal(idx, expected) - idx1 = date_range(start="2014-07-04 15:45", end="2014-07-08 10:45", freq="BH") - idx2 = date_range(start="2014-07-04 15:45", periods=12, freq="BH") - idx3 = date_range(end="2014-07-08 10:45", periods=12, freq="BH") + idx1 = date_range(start="2014-07-04 15:45", end="2014-07-08 10:45", freq="bh") + idx2 = date_range(start="2014-07-04 15:45", periods=12, freq="bh") + idx3 = date_range(end="2014-07-08 10:45", periods=12, freq="bh") expected = idx1 for idx in [idx1, idx2, idx3]: @@ -980,13 +980,13 @@ def test_datetimeindex(self): def test_short_datetimeindex_creation(self): # gh-49835 - idx4 = date_range(start="2014-07-01 10:00", freq="BH", periods=1) - expected4 = DatetimeIndex(["2014-07-01 10:00"], freq="BH") + idx4 = date_range(start="2014-07-01 10:00", freq="bh", periods=1) + expected4 = DatetimeIndex(["2014-07-01 10:00"], freq="bh") tm.assert_index_equal(idx4, expected4) def test_bday_ignores_timedeltas(self): - idx = date_range("2010/02/01", "2010/02/10", freq="12H") - t1 = idx + BDay(offset=Timedelta(3, unit="H")) + idx = date_range("2010/02/01", "2010/02/10", freq="12h") + t1 = idx + BDay(offset=Timedelta(3, unit="h")) expected = DatetimeIndex( [ diff --git a/pandas/tests/tseries/offsets/test_custom_business_hour.py b/pandas/tests/tseries/offsets/test_custom_business_hour.py index 38b5d74fe170f..55a184f95c2d8 100644 --- a/pandas/tests/tseries/offsets/test_custom_business_hour.py +++ b/pandas/tests/tseries/offsets/test_custom_business_hour.py @@ -69,8 +69,8 @@ def test_different_normalize_equals(self, _offset): assert offset != offset2 def test_repr(self, offset1, offset2): - assert repr(offset1) == "" - assert repr(offset2) == "" + assert repr(offset1) == "" + assert repr(offset2) == "" def test_with_offset(self, dt): expected = Timestamp("2014-07-01 13:00") diff --git a/pandas/tests/tseries/offsets/test_offsets.py b/pandas/tests/tseries/offsets/test_offsets.py index de44cf3f94d26..7f96ea98fa047 100644 --- a/pandas/tests/tseries/offsets/test_offsets.py +++ b/pandas/tests/tseries/offsets/test_offsets.py @@ -811,7 +811,7 @@ def test_alias_equality(self): assert k == v.copy() def test_rule_code(self): - lst = ["ME", "MS", "BM", "BMS", "D", "B", "H", "min", "s", "ms", "us"] + lst = ["ME", "MS", "BM", "BMS", "D", "B", "h", "min", "s", "ms", "us"] for k in lst: assert k == _get_offset(k).rule_code # should be cached - this is kind of an internals test... diff --git a/pandas/tests/tslibs/test_conversion.py b/pandas/tests/tslibs/test_conversion.py index d0f8923f3ad89..cefe449f3484d 100644 --- a/pandas/tests/tslibs/test_conversion.py +++ b/pandas/tests/tslibs/test_conversion.py @@ -66,8 +66,8 @@ def test_tz_localize_to_utc_copies(): def test_tz_convert_single_matches_tz_convert_hourly(tz_aware_fixture): tz = tz_aware_fixture - tz_didx = date_range("2014-03-01", "2015-01-10", freq="H", tz=tz) - naive_didx = date_range("2014-03-01", "2015-01-10", freq="H") + tz_didx = date_range("2014-03-01", "2015-01-10", freq="h", tz=tz) + naive_didx = date_range("2014-03-01", "2015-01-10", freq="h") _compare_utc_to_local(tz_didx) _compare_local_to_utc(tz_didx, naive_didx) diff --git a/pandas/tests/tslibs/test_period_asfreq.py b/pandas/tests/tslibs/test_period_asfreq.py index ca207e1031653..149817357fbd6 100644 --- a/pandas/tests/tslibs/test_period_asfreq.py +++ b/pandas/tests/tslibs/test_period_asfreq.py @@ -24,17 +24,17 @@ def get_freq_code(freqstr: str) -> int: @pytest.mark.parametrize( "freq1,freq2,expected", [ - ("D", "H", 24), + ("D", "h", 24), ("D", "min", 1440), ("D", "s", 86400), ("D", "ms", 86400000), ("D", "us", 86400000000), ("D", "ns", 86400000000000), - ("H", "min", 60), - ("H", "s", 3600), - ("H", "ms", 3600000), - ("H", "us", 3600000000), - ("H", "ns", 3600000000000), + ("h", "min", 60), + ("h", "s", 3600), + ("h", "ms", 3600000), + ("h", "us", 3600000000), + ("h", "ns", 3600000000000), ("min", "s", 60), ("min", "ms", 60000), ("min", "us", 60000000), diff --git a/pandas/tests/tslibs/test_to_offset.py b/pandas/tests/tslibs/test_to_offset.py index bc3e06646b235..82b0c78002972 100644 --- a/pandas/tests/tslibs/test_to_offset.py +++ b/pandas/tests/tslibs/test_to_offset.py @@ -53,7 +53,7 @@ def test_to_offset_negative(freqstr, expected): "-us", "3us1", "-2-3us", - "-2D:3H", + "-2D:3h", "1.5.0s", "2SMS-15-15", "2SMS-15D", @@ -105,12 +105,12 @@ def test_to_offset_tuple_unsupported(): @pytest.mark.parametrize( "freqstr,expected", [ - ("2D 3H", offsets.Hour(51)), - ("2 D3 H", offsets.Hour(51)), - ("2 D 3 H", offsets.Hour(51)), - (" 2 D 3 H ", offsets.Hour(51)), - (" H ", offsets.Hour()), - (" 3 H ", offsets.Hour(3)), + ("2D 3h", offsets.Hour(51)), + ("2 D3 h", offsets.Hour(51)), + ("2 D 3 h", offsets.Hour(51)), + (" 2 D 3 h ", offsets.Hour(51)), + (" h ", offsets.Hour()), + (" 3 h ", offsets.Hour(3)), ], ) def test_to_offset_whitespace(freqstr, expected): @@ -119,7 +119,7 @@ def test_to_offset_whitespace(freqstr, expected): @pytest.mark.parametrize( - "freqstr,expected", [("00H 00min 01s", 1), ("-00H 03min 14s", -194)] + "freqstr,expected", [("00h 00min 01s", 1), ("-00h 03min 14s", -194)] ) def test_to_offset_leading_zero(freqstr, expected): result = to_offset(freqstr) diff --git a/pandas/tests/window/test_groupby.py b/pandas/tests/window/test_groupby.py index b8e0173ee131f..a23c91df5eef6 100644 --- a/pandas/tests/window/test_groupby.py +++ b/pandas/tests/window/test_groupby.py @@ -1239,15 +1239,15 @@ def test_dont_mutate_obj_after_slicing(self): df = DataFrame( { "id": ["a", "a", "b", "b", "b"], - "timestamp": date_range("2021-9-1", periods=5, freq="H"), + "timestamp": date_range("2021-9-1", periods=5, freq="h"), "y": range(5), } ) - grp = df.groupby("id").rolling("1H", on="timestamp") + grp = df.groupby("id").rolling("1h", on="timestamp") result = grp.count() expected_df = DataFrame( { - "timestamp": date_range("2021-9-1", periods=5, freq="H"), + "timestamp": date_range("2021-9-1", periods=5, freq="h"), "y": [1.0] * 5, }, index=MultiIndex.from_arrays( @@ -1262,7 +1262,7 @@ def test_dont_mutate_obj_after_slicing(self): index=MultiIndex.from_arrays( [ ["a", "a", "b", "b", "b"], - date_range("2021-9-1", periods=5, freq="H"), + date_range("2021-9-1", periods=5, freq="h"), ], names=["id", "timestamp"], ), diff --git a/pandas/tests/window/test_pairwise.py b/pandas/tests/window/test_pairwise.py index b6f2365afb457..2258a4106fe92 100644 --- a/pandas/tests/window/test_pairwise.py +++ b/pandas/tests/window/test_pairwise.py @@ -392,7 +392,7 @@ def test_pairwise_with_series(self, pairwise_frames, pairwise_target_frame, f): def test_corr_freq_memory_error(self): # GH 31789 s = Series(range(5), index=date_range("2020", periods=5)) - result = s.rolling("12H").corr(s) + result = s.rolling("12h").corr(s) expected = Series([np.nan] * 5, index=date_range("2020", periods=5)) tm.assert_series_equal(result, expected) diff --git a/pandas/tests/window/test_timeseries_window.py b/pandas/tests/window/test_timeseries_window.py index caea3e98f262f..482c8992feb13 100644 --- a/pandas/tests/window/test_timeseries_window.py +++ b/pandas/tests/window/test_timeseries_window.py @@ -599,12 +599,12 @@ def test_all2(self, arithmetic_win_operators): # more sophisticated comparison of integer vs. # time-based windowing df = DataFrame( - {"B": np.arange(50)}, index=date_range("20130101", periods=50, freq="H") + {"B": np.arange(50)}, index=date_range("20130101", periods=50, freq="h") ) # in-range data dft = df.between_time("09:00", "16:00") - r = dft.rolling(window="5H") + r = dft.rolling(window="5h") result = getattr(r, f)() diff --git a/pandas/tseries/frequencies.py b/pandas/tseries/frequencies.py index e77f56a9928ae..0ed0fe4b87576 100644 --- a/pandas/tseries/frequencies.py +++ b/pandas/tseries/frequencies.py @@ -56,7 +56,7 @@ TimedeltaIndex, ) from pandas.core.arrays.datetimelike import DatetimeLikeArrayMixin -# --------------------------------------------------------------------- +# -------------------------------------------------------------------- # Offset related functions _need_suffix = ["QS", "BQ", "BQS", "YS", "AS", "BY", "BA", "BYS", "BAS"] @@ -229,7 +229,7 @@ def get_freq(self) -> str | None: # Business hourly, maybe. 17: one day / 65: one weekend if self.hour_deltas in ([1, 17], [1, 65], [1, 17, 65]): - return "BH" + return "bh" # Possibly intraday frequency. Here we use the # original .asi8 values as the modified values @@ -243,7 +243,7 @@ def get_freq(self) -> str | None: pps = ppm // 60 if _is_multiple(delta, pph): # Hours - return _maybe_add_count("H", delta / pph) + return _maybe_add_count("h", delta / pph) elif _is_multiple(delta, ppm): # Minutes return _maybe_add_count("min", delta / ppm) @@ -457,21 +457,21 @@ def is_subperiod(source, target) -> bool: return _quarter_months_conform( get_rule_month(source), get_rule_month(target) ) - return source in {"D", "C", "B", "M", "H", "min", "s", "ms", "us", "ns"} + return source in {"D", "C", "B", "M", "h", "min", "s", "ms", "us", "ns"} elif _is_quarterly(target): - return source in {"D", "C", "B", "M", "H", "min", "s", "ms", "us", "ns"} + return source in {"D", "C", "B", "M", "h", "min", "s", "ms", "us", "ns"} elif _is_monthly(target): - return source in {"D", "C", "B", "H", "min", "s", "ms", "us", "ns"} + return source in {"D", "C", "B", "h", "min", "s", "ms", "us", "ns"} elif _is_weekly(target): - return source in {target, "D", "C", "B", "H", "min", "s", "ms", "us", "ns"} + return source in {target, "D", "C", "B", "h", "min", "s", "ms", "us", "ns"} elif target == "B": - return source in {"B", "H", "min", "s", "ms", "us", "ns"} + return source in {"B", "h", "min", "s", "ms", "us", "ns"} elif target == "C": - return source in {"C", "H", "min", "s", "ms", "us", "ns"} + return source in {"C", "h", "min", "s", "ms", "us", "ns"} elif target == "D": - return source in {"D", "H", "min", "s", "ms", "us", "ns"} - elif target == "H": - return source in {"H", "min", "s", "ms", "us", "ns"} + return source in {"D", "h", "min", "s", "ms", "us", "ns"} + elif target == "h": + return source in {"h", "min", "s", "ms", "us", "ns"} elif target == "min": return source in {"min", "s", "ms", "us", "ns"} elif target == "s": @@ -515,21 +515,21 @@ def is_superperiod(source, target) -> bool: smonth = get_rule_month(source) tmonth = get_rule_month(target) return _quarter_months_conform(smonth, tmonth) - return target in {"D", "C", "B", "M", "H", "min", "s", "ms", "us", "ns"} + return target in {"D", "C", "B", "M", "h", "min", "s", "ms", "us", "ns"} elif _is_quarterly(source): - return target in {"D", "C", "B", "M", "H", "min", "s", "ms", "us", "ns"} + return target in {"D", "C", "B", "M", "h", "min", "s", "ms", "us", "ns"} elif _is_monthly(source): - return target in {"D", "C", "B", "H", "min", "s", "ms", "us", "ns"} + return target in {"D", "C", "B", "h", "min", "s", "ms", "us", "ns"} elif _is_weekly(source): - return target in {source, "D", "C", "B", "H", "min", "s", "ms", "us", "ns"} + return target in {source, "D", "C", "B", "h", "min", "s", "ms", "us", "ns"} elif source == "B": - return target in {"D", "C", "B", "H", "min", "s", "ms", "us", "ns"} + return target in {"D", "C", "B", "h", "min", "s", "ms", "us", "ns"} elif source == "C": - return target in {"D", "C", "B", "H", "min", "s", "ms", "us", "ns"} + return target in {"D", "C", "B", "h", "min", "s", "ms", "us", "ns"} elif source == "D": - return target in {"D", "C", "B", "H", "min", "s", "ms", "us", "ns"} - elif source == "H": - return target in {"H", "min", "s", "ms", "us", "ns"} + return target in {"D", "C", "B", "h", "min", "s", "ms", "us", "ns"} + elif source == "h": + return target in {"h", "min", "s", "ms", "us", "ns"} elif source == "min": return target in {"min", "s", "ms", "us", "ns"} elif source == "s": @@ -560,7 +560,7 @@ def _maybe_coerce_freq(code) -> str: assert code is not None if isinstance(code, DateOffset): code = freq_to_period_freqstr(1, code.name) - if code in {"min", "s", "ms", "us", "ns"}: + if code in {"h", "min", "s", "ms", "us", "ns"}: return code else: return code.upper()