diff --git a/pandas/tests/indexes/datetimes/test_astype.py b/pandas/tests/indexes/datetimes/test_astype.py index ad9a2f112caac..2f22236d55ff3 100644 --- a/pandas/tests/indexes/datetimes/test_astype.py +++ b/pandas/tests/indexes/datetimes/test_astype.py @@ -172,7 +172,7 @@ def test_astype_object(self): @pytest.mark.parametrize("tz", [None, "Asia/Tokyo"]) def test_astype_object_tz(self, tz): - idx = pd.date_range(start="2013-01-01", periods=4, freq="M", name="idx", tz=tz) + idx = date_range(start="2013-01-01", periods=4, freq="M", name="idx", tz=tz) expected_list = [ Timestamp("2013-01-31", tz=tz), Timestamp("2013-02-28", tz=tz), @@ -288,7 +288,7 @@ def test_dti_astype_period(self): class TestAstype: @pytest.mark.parametrize("tz", [None, "US/Central"]) def test_astype_category(self, tz): - obj = pd.date_range("2000", periods=2, tz=tz, name="idx") + obj = date_range("2000", periods=2, tz=tz, name="idx") result = obj.astype("category") expected = pd.CategoricalIndex( [Timestamp("2000-01-01", tz=tz), Timestamp("2000-01-02", tz=tz)], @@ -302,7 +302,7 @@ def test_astype_category(self, tz): @pytest.mark.parametrize("tz", [None, "US/Central"]) def test_astype_array_fallback(self, tz): - obj = pd.date_range("2000", periods=2, tz=tz, name="idx") + obj = date_range("2000", periods=2, tz=tz, name="idx") result = obj.astype(bool) expected = Index(np.array([True, True]), name="idx") tm.assert_index_equal(result, expected) diff --git a/pandas/tests/indexes/datetimes/test_constructors.py b/pandas/tests/indexes/datetimes/test_constructors.py index fc59df29ef18f..3bb25adb0f36b 100644 --- a/pandas/tests/indexes/datetimes/test_constructors.py +++ b/pandas/tests/indexes/datetimes/test_constructors.py @@ -35,7 +35,7 @@ def test_freq_validation_with_nat(self, dt_cls): @pytest.mark.parametrize( "index", [ - pd.date_range("2016-01-01", periods=5, tz="US/Pacific"), + date_range("2016-01-01", periods=5, tz="US/Pacific"), pd.timedelta_range("1 Day", periods=5), ], ) @@ -103,14 +103,14 @@ def test_construction_caching(self): df = pd.DataFrame( { - "dt": pd.date_range("20130101", periods=3), - "dttz": pd.date_range("20130101", periods=3, tz="US/Eastern"), + "dt": date_range("20130101", periods=3), + "dttz": date_range("20130101", periods=3, tz="US/Eastern"), "dt_with_null": [ Timestamp("20130101"), pd.NaT, Timestamp("20130103"), ], - "dtns": pd.date_range("20130101", periods=3, freq="ns"), + "dtns": date_range("20130101", periods=3, freq="ns"), } ) assert df.dttz.dtype.tz.zone == "US/Eastern" @@ -121,7 +121,7 @@ def test_construction_caching(self): ) def test_construction_with_alt(self, kwargs, tz_aware_fixture): tz = tz_aware_fixture - i = pd.date_range("20130101", periods=5, freq="H", tz=tz) + i = date_range("20130101", periods=5, freq="H", tz=tz) kwargs = {key: attrgetter(val)(i) for key, val in kwargs.items()} result = DatetimeIndex(i, **kwargs) tm.assert_index_equal(i, result) @@ -132,7 +132,7 @@ def test_construction_with_alt(self, kwargs, tz_aware_fixture): ) def test_construction_with_alt_tz_localize(self, kwargs, tz_aware_fixture): tz = tz_aware_fixture - i = pd.date_range("20130101", periods=5, freq="H", tz=tz) + i = date_range("20130101", periods=5, freq="H", tz=tz) i = i._with_freq(None) kwargs = {key: attrgetter(val)(i) for key, val in kwargs.items()} @@ -754,7 +754,7 @@ def test_construction_int_rountrip(self, tz_naive_fixture): def test_construction_from_replaced_timestamps_with_dst(self): # GH 18785 - index = pd.date_range( + index = date_range( Timestamp(2000, 1, 1), Timestamp(2005, 1, 1), freq="MS", @@ -804,7 +804,7 @@ def test_constructor_with_ambiguous_keyword_arg(self): start = Timestamp(year=2020, month=11, day=1, hour=1).tz_localize( timezone, ambiguous=False ) - result = pd.date_range(start=start, periods=2, ambiguous=False) + result = date_range(start=start, periods=2, ambiguous=False) tm.assert_index_equal(result, expected) # ambiguous keyword in end @@ -812,7 +812,7 @@ def test_constructor_with_ambiguous_keyword_arg(self): end = Timestamp(year=2020, month=11, day=2, hour=1).tz_localize( timezone, ambiguous=False ) - result = pd.date_range(end=end, periods=2, ambiguous=False) + result = date_range(end=end, periods=2, ambiguous=False) tm.assert_index_equal(result, expected) def test_constructor_with_nonexistent_keyword_arg(self): @@ -824,7 +824,7 @@ def test_constructor_with_nonexistent_keyword_arg(self): start = Timestamp("2015-03-29 02:30:00").tz_localize( timezone, nonexistent="shift_forward" ) - result = pd.date_range(start=start, periods=2, freq="H") + result = date_range(start=start, periods=2, freq="H") expected = DatetimeIndex( [ Timestamp("2015-03-29 03:00:00+02:00", tz=timezone), @@ -838,7 +838,7 @@ def test_constructor_with_nonexistent_keyword_arg(self): end = Timestamp("2015-03-29 02:30:00").tz_localize( timezone, nonexistent="shift_forward" ) - result = pd.date_range(end=end, periods=2, freq="H") + result = date_range(end=end, periods=2, freq="H") expected = DatetimeIndex( [ Timestamp("2015-03-29 01:00:00+01:00", tz=timezone), diff --git a/pandas/tests/indexes/datetimes/test_date_range.py b/pandas/tests/indexes/datetimes/test_date_range.py index 237c82436eb84..7c70b58318a11 100644 --- a/pandas/tests/indexes/datetimes/test_date_range.py +++ b/pandas/tests/indexes/datetimes/test_date_range.py @@ -650,10 +650,10 @@ def test_timezone_comparaison_assert(self): def test_negative_non_tick_frequency_descending_dates(self, tz_aware_fixture): # GH 23270 tz = tz_aware_fixture - result = pd.date_range(start="2011-06-01", end="2011-01-01", freq="-1MS", tz=tz) - expected = pd.date_range( - end="2011-06-01", start="2011-01-01", freq="1MS", tz=tz - )[::-1] + result = date_range(start="2011-06-01", end="2011-01-01", freq="-1MS", tz=tz) + expected = date_range(end="2011-06-01", start="2011-01-01", freq="1MS", tz=tz)[ + ::-1 + ] tm.assert_index_equal(result, expected) @@ -739,10 +739,10 @@ def test_3(self): def test_precision_finer_than_offset(self): # GH#9907 - result1 = pd.date_range( + result1 = date_range( start="2015-04-15 00:00:03", end="2016-04-22 00:00:00", freq="Q" ) - result2 = pd.date_range( + result2 = date_range( start="2015-04-15 00:00:03", end="2015-06-22 00:00:04", freq="W" ) expected1_list = [ @@ -788,9 +788,9 @@ def test_mismatching_tz_raises_err(self, start, end): # issue 18488 msg = "Start and end cannot both be tz-aware with different timezones" with pytest.raises(TypeError, match=msg): - pd.date_range(start, end) + date_range(start, end) with pytest.raises(TypeError, match=msg): - pd.date_range(start, end, freq=BDay()) + date_range(start, end, freq=BDay()) class TestBusinessDateRange: @@ -849,18 +849,18 @@ def test_bdays_and_open_boundaries(self, closed): # GH 6673 start = "2018-07-21" # Saturday end = "2018-07-29" # Sunday - result = pd.date_range(start, end, freq="B", closed=closed) + result = date_range(start, end, freq="B", closed=closed) bday_start = "2018-07-23" # Monday bday_end = "2018-07-27" # Friday - expected = pd.date_range(bday_start, bday_end, freq="D") + expected = date_range(bday_start, bday_end, freq="D") tm.assert_index_equal(result, expected) # Note: we do _not_ expect the freqs to match here def test_bday_near_overflow(self): # GH#24252 avoid doing unnecessary addition that _would_ overflow start = Timestamp.max.floor("D").to_pydatetime() - rng = pd.date_range(start, end=None, periods=1, freq="B") + rng = date_range(start, end=None, periods=1, freq="B") expected = DatetimeIndex([start], freq="B") tm.assert_index_equal(rng, expected) @@ -869,7 +869,7 @@ def test_bday_overflow_error(self): msg = "Out of bounds nanosecond timestamp" start = Timestamp.max.floor("D").to_pydatetime() with pytest.raises(OutOfBoundsDatetime, match=msg): - pd.date_range(start, periods=2, freq="B") + date_range(start, periods=2, freq="B") class TestCustomDateRange: @@ -995,7 +995,7 @@ def test_all_custom_freq(self, freq): def test_range_with_millisecond_resolution(self, start_end): # https://github.com/pandas-dev/pandas/issues/24110 start, end = start_end - result = pd.date_range(start=start, end=end, periods=2, closed="left") + result = date_range(start=start, end=end, periods=2, closed="left") expected = DatetimeIndex([start]) tm.assert_index_equal(result, expected) @@ -1003,7 +1003,7 @@ def test_range_with_millisecond_resolution(self, start_end): def test_date_range_with_custom_holidays(): # GH 30593 freq = pd.offsets.CustomBusinessHour(start="15:00", holidays=["2020-11-26"]) - result = pd.date_range(start="2020-11-25 15:00", periods=4, freq=freq) + result = date_range(start="2020-11-25 15:00", periods=4, freq=freq) expected = DatetimeIndex( [ "2020-11-25 15:00:00", diff --git a/pandas/tests/indexes/datetimes/test_datetime.py b/pandas/tests/indexes/datetimes/test_datetime.py index b35aa28ffc40b..2657fc817ec3a 100644 --- a/pandas/tests/indexes/datetimes/test_datetime.py +++ b/pandas/tests/indexes/datetimes/test_datetime.py @@ -68,7 +68,7 @@ def test_time_loc(self): # GH8667 step = 24 * 3600 for n in ns: - idx = pd.date_range("2014-11-26", periods=n, freq="S") + idx = date_range("2014-11-26", periods=n, freq="S") ts = pd.Series(np.random.randn(n), index=idx) i = np.arange(start, n, step) @@ -89,10 +89,10 @@ def test_time_overflow_for_32bit_machines(self): # overflow. periods = np.int_(1000) - idx1 = pd.date_range(start="2000", periods=periods, freq="S") + idx1 = date_range(start="2000", periods=periods, freq="S") assert len(idx1) == periods - idx2 = pd.date_range(end="2000", periods=periods, freq="S") + idx2 = date_range(end="2000", periods=periods, freq="S") assert len(idx2) == periods def test_nat(self): @@ -251,7 +251,7 @@ def test_ns_index(self): index = DatetimeIndex(dt, freq=freq, name="time") self.assert_index_parameters(index) - new_index = pd.date_range(start=index[0], end=index[-1], freq=index.freq) + new_index = date_range(start=index[0], end=index[-1], freq=index.freq) self.assert_index_parameters(new_index) def test_factorize(self): @@ -304,7 +304,7 @@ def test_factorize(self): def test_factorize_tz(self, tz_naive_fixture): tz = tz_naive_fixture # GH#13750 - base = pd.date_range("2016-11-05", freq="H", periods=100, tz=tz) + base = date_range("2016-11-05", freq="H", periods=100, tz=tz) idx = base.repeat(5) exp_arr = np.arange(100, dtype=np.intp).repeat(5) @@ -317,14 +317,14 @@ def test_factorize_tz(self, tz_naive_fixture): def test_factorize_dst(self): # GH 13750 - idx = pd.date_range("2016-11-06", freq="H", periods=12, tz="US/Eastern") + idx = date_range("2016-11-06", freq="H", periods=12, tz="US/Eastern") for obj in [idx, pd.Series(idx)]: arr, res = obj.factorize() tm.assert_numpy_array_equal(arr, np.arange(12, dtype=np.intp)) tm.assert_index_equal(res, idx) - idx = pd.date_range("2016-06-13", freq="H", periods=12, tz="US/Eastern") + idx = date_range("2016-06-13", freq="H", periods=12, tz="US/Eastern") for obj in [idx, pd.Series(idx)]: arr, res = obj.factorize() @@ -350,7 +350,7 @@ def test_unique(self, arr, expected): def test_asarray_tz_naive(self): # This shouldn't produce a warning. - idx = pd.date_range("2000", periods=2) + idx = date_range("2000", periods=2) # M8[ns] by default result = np.asarray(idx) @@ -365,7 +365,7 @@ def test_asarray_tz_naive(self): def test_asarray_tz_aware(self): tz = "US/Central" - idx = pd.date_range("2000", periods=2, tz=tz) + idx = date_range("2000", periods=2, tz=tz) expected = np.array(["2000-01-01T06", "2000-01-02T06"], dtype="M8[ns]") result = np.asarray(idx, dtype="datetime64[ns]") @@ -393,7 +393,7 @@ def test_to_frame_datetime_tz(self): def test_split_non_utc(self): # GH 14042 - indices = pd.date_range("2016-01-01 00:00:00+0200", freq="S", periods=10) + indices = date_range("2016-01-01 00:00:00+0200", freq="S", periods=10) result = np.split(indices, indices_or_sections=[])[0] expected = indices._with_freq(None) tm.assert_index_equal(result, expected) diff --git a/pandas/tests/indexes/datetimes/test_indexing.py b/pandas/tests/indexes/datetimes/test_indexing.py index 59269b9b54ddc..232ebc608e465 100644 --- a/pandas/tests/indexes/datetimes/test_indexing.py +++ b/pandas/tests/indexes/datetimes/test_indexing.py @@ -17,7 +17,7 @@ class TestGetItem: def test_ellipsis(self): # GH#21282 - idx = pd.date_range( + idx = date_range( "2011-01-01", "2011-01-31", freq="D", tz="Asia/Tokyo", name="idx" ) @@ -29,12 +29,12 @@ def test_getitem_slice_keeps_name(self): # GH4226 st = Timestamp("2013-07-01 00:00:00", tz="America/Los_Angeles") et = Timestamp("2013-07-02 00:00:00", tz="America/Los_Angeles") - dr = pd.date_range(st, et, freq="H", name="timebucket") + dr = date_range(st, et, freq="H", name="timebucket") assert dr[1:].name == dr.name def test_getitem(self): - idx1 = pd.date_range("2011-01-01", "2011-01-31", freq="D", name="idx") - idx2 = pd.date_range( + idx1 = date_range("2011-01-01", "2011-01-31", freq="D", name="idx") + idx2 = date_range( "2011-01-01", "2011-01-31", freq="D", tz="Asia/Tokyo", name="idx" ) @@ -43,21 +43,21 @@ def test_getitem(self): assert result == Timestamp("2011-01-01", tz=idx.tz) result = idx[0:5] - expected = pd.date_range( + expected = date_range( "2011-01-01", "2011-01-05", freq="D", tz=idx.tz, name="idx" ) tm.assert_index_equal(result, expected) assert result.freq == expected.freq result = idx[0:10:2] - expected = pd.date_range( + expected = date_range( "2011-01-01", "2011-01-09", freq="2D", tz=idx.tz, name="idx" ) tm.assert_index_equal(result, expected) assert result.freq == expected.freq result = idx[-20:-5:3] - expected = pd.date_range( + expected = date_range( "2011-01-12", "2011-01-24", freq="3D", tz=idx.tz, name="idx" ) tm.assert_index_equal(result, expected) @@ -74,7 +74,7 @@ def test_getitem(self): assert result.freq == expected.freq def test_dti_business_getitem(self): - rng = pd.bdate_range(START, END) + rng = bdate_range(START, END) smaller = rng[:5] exp = DatetimeIndex(rng.view(np.ndarray)[:5], freq="B") tm.assert_index_equal(smaller, exp) @@ -94,7 +94,7 @@ def test_dti_business_getitem(self): assert rng[4] == rng[np.int_(4)] def test_dti_business_getitem_matplotlib_hackaround(self): - rng = pd.bdate_range(START, END) + rng = bdate_range(START, END) with tm.assert_produces_warning(FutureWarning): # GH#30588 multi-dimensional indexing deprecated values = rng[:, None] @@ -102,7 +102,7 @@ def test_dti_business_getitem_matplotlib_hackaround(self): tm.assert_numpy_array_equal(values, expected) def test_dti_custom_getitem(self): - rng = pd.bdate_range(START, END, freq="C") + rng = bdate_range(START, END, freq="C") smaller = rng[:5] exp = DatetimeIndex(rng.view(np.ndarray)[:5], freq="C") tm.assert_index_equal(smaller, exp) @@ -121,7 +121,7 @@ def test_dti_custom_getitem(self): assert rng[4] == rng[np.int_(4)] def test_dti_custom_getitem_matplotlib_hackaround(self): - rng = pd.bdate_range(START, END, freq="C") + rng = bdate_range(START, END, freq="C") with tm.assert_produces_warning(FutureWarning): # GH#30588 multi-dimensional indexing deprecated values = rng[:, None] @@ -155,7 +155,7 @@ def test_where_doesnt_retain_freq(self): def test_where_other(self): # other is ndarray or Index - i = pd.date_range("20130101", periods=3, tz="US/Eastern") + i = date_range("20130101", periods=3, tz="US/Eastern") for arr in [np.nan, pd.NaT]: result = i.where(notna(i), other=np.nan) @@ -173,7 +173,7 @@ def test_where_other(self): tm.assert_index_equal(result, i2) def test_where_invalid_dtypes(self): - dti = pd.date_range("20130101", periods=3, tz="US/Eastern") + dti = date_range("20130101", periods=3, tz="US/Eastern") i2 = Index([pd.NaT, pd.NaT] + dti[2:].tolist()) @@ -202,7 +202,7 @@ def test_where_invalid_dtypes(self): def test_where_mismatched_nat(self, tz_aware_fixture): tz = tz_aware_fixture - dti = pd.date_range("2013-01-01", periods=3, tz=tz) + dti = date_range("2013-01-01", periods=3, tz=tz) cond = np.array([True, False, True]) msg = "value should be a 'Timestamp', 'NaT', or array of those. Got" @@ -211,7 +211,7 @@ def test_where_mismatched_nat(self, tz_aware_fixture): dti.where(cond, np.timedelta64("NaT", "ns")) def test_where_tz(self): - i = pd.date_range("20130101", periods=3, tz="US/Eastern") + i = date_range("20130101", periods=3, tz="US/Eastern") result = i.where(notna(i)) expected = i tm.assert_index_equal(result, expected) @@ -226,8 +226,8 @@ def test_where_tz(self): class TestTake: def test_take(self): # GH#10295 - idx1 = pd.date_range("2011-01-01", "2011-01-31", freq="D", name="idx") - idx2 = pd.date_range( + idx1 = date_range("2011-01-01", "2011-01-31", freq="D", name="idx") + idx2 = date_range( "2011-01-01", "2011-01-31", freq="D", tz="Asia/Tokyo", name="idx" ) @@ -236,21 +236,21 @@ def test_take(self): assert result == Timestamp("2011-01-01", tz=idx.tz) result = idx.take([0, 1, 2]) - expected = pd.date_range( + expected = date_range( "2011-01-01", "2011-01-03", freq="D", tz=idx.tz, name="idx" ) tm.assert_index_equal(result, expected) assert result.freq == expected.freq result = idx.take([0, 2, 4]) - expected = pd.date_range( + expected = date_range( "2011-01-01", "2011-01-05", freq="2D", tz=idx.tz, name="idx" ) tm.assert_index_equal(result, expected) assert result.freq == expected.freq result = idx.take([7, 4, 1]) - expected = pd.date_range( + expected = date_range( "2011-01-08", "2011-01-02", freq="-3D", tz=idx.tz, name="idx" ) tm.assert_index_equal(result, expected) @@ -277,7 +277,7 @@ def test_take(self): assert result.freq is None def test_take_invalid_kwargs(self): - idx = pd.date_range("2011-01-01", "2011-01-31", freq="D", name="idx") + idx = date_range("2011-01-01", "2011-01-31", freq="D", name="idx") indices = [1, 6, 5, 9, 10, 13, 15, 3] msg = r"take\(\) got an unexpected keyword argument 'foo'" @@ -302,7 +302,7 @@ def test_take2(self, tz): datetime(2010, 1, 1, 21), ] - idx = pd.date_range( + idx = date_range( start="2010-01-01 09:00", end="2010-02-01 09:00", freq="H", @@ -392,7 +392,7 @@ def test_take_fill_value_with_timezone(self): class TestGetLoc: @pytest.mark.parametrize("method", [None, "pad", "backfill", "nearest"]) def test_get_loc_method_exact_match(self, method): - idx = pd.date_range("2000-01-01", periods=3) + idx = date_range("2000-01-01", periods=3) assert idx.get_loc(idx[1], method) == 1 assert idx.get_loc(idx[1].to_pydatetime(), method) == 1 assert idx.get_loc(str(idx[1]), method) == 1 @@ -401,7 +401,7 @@ def test_get_loc_method_exact_match(self, method): assert idx.get_loc(idx[1], method, tolerance=pd.Timedelta("0 days")) == 1 def test_get_loc(self): - idx = pd.date_range("2000-01-01", periods=3) + idx = date_range("2000-01-01", periods=3) assert idx.get_loc("2000-01-01", method="nearest") == 0 assert idx.get_loc("2000-01-01T12", method="nearest") == 1 @@ -458,7 +458,7 @@ def test_get_loc(self): assert idx.get_loc("2000-01", method="nearest") == slice(0, 2) # time indexing - idx = pd.date_range("2000-01-01", periods=24, freq="H") + idx = date_range("2000-01-01", periods=24, freq="H") tm.assert_numpy_array_equal( idx.get_loc(time(12)), np.array([12]), check_dtype=False ) @@ -481,7 +481,7 @@ def test_get_loc_time_nat(self): def test_get_loc_tz_aware(self): # https://github.com/pandas-dev/pandas/issues/32140 - dti = pd.date_range( + dti = date_range( Timestamp("2019-12-12 00:00:00", tz="US/Eastern"), Timestamp("2019-12-13 00:00:00", tz="US/Eastern"), freq="5s", @@ -509,7 +509,7 @@ def test_get_loc_nat(self): @pytest.mark.parametrize("key", [pd.Timedelta(0), pd.Timedelta(1), timedelta(0)]) def test_get_loc_timedelta_invalid_key(self, key): # GH#20464 - dti = pd.date_range("1970-01-01", periods=10) + dti = date_range("1970-01-01", periods=10) msg = "Cannot index DatetimeIndex with [Tt]imedelta" with pytest.raises(TypeError, match=msg): dti.get_loc(key) @@ -552,7 +552,7 @@ def test_get_indexer_date_objs(self): tm.assert_numpy_array_equal(result, expected) def test_get_indexer(self): - idx = pd.date_range("2000-01-01", periods=3) + idx = date_range("2000-01-01", periods=3) exp = np.array([0, 1, 2], dtype=np.intp) tm.assert_numpy_array_equal(idx.get_indexer(idx), exp) @@ -654,7 +654,7 @@ def test_maybe_cast_slice_duplicate_monotonic(self): class TestDatetimeIndex: def test_get_value(self): # specifically make sure we have test for np.datetime64 key - dti = pd.date_range("2016-01-01", periods=3) + dti = date_range("2016-01-01", periods=3) arr = np.arange(6, 9) ser = pd.Series(arr, index=dti) diff --git a/pandas/tests/indexes/datetimes/test_misc.py b/pandas/tests/indexes/datetimes/test_misc.py index 88c837e32d261..333a1ac169bb7 100644 --- a/pandas/tests/indexes/datetimes/test_misc.py +++ b/pandas/tests/indexes/datetimes/test_misc.py @@ -14,7 +14,7 @@ class TestTimeSeries: def test_range_edges(self): # GH#13672 - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01 00:00:00.000000001"), end=Timestamp("1970-01-01 00:00:00.000000004"), freq="N", @@ -30,7 +30,7 @@ def test_range_edges(self): ) tm.assert_index_equal(idx, exp) - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01 00:00:00.000000004"), end=Timestamp("1970-01-01 00:00:00.000000001"), freq="N", @@ -38,7 +38,7 @@ def test_range_edges(self): exp = DatetimeIndex([], freq="N") tm.assert_index_equal(idx, exp) - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01 00:00:00.000000001"), end=Timestamp("1970-01-01 00:00:00.000000001"), freq="N", @@ -46,7 +46,7 @@ def test_range_edges(self): exp = DatetimeIndex(["1970-01-01 00:00:00.000000001"], freq="N") tm.assert_index_equal(idx, exp) - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01 00:00:00.000001"), end=Timestamp("1970-01-01 00:00:00.000004"), freq="U", @@ -62,7 +62,7 @@ def test_range_edges(self): ) tm.assert_index_equal(idx, exp) - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01 00:00:00.001"), end=Timestamp("1970-01-01 00:00:00.004"), freq="L", @@ -78,7 +78,7 @@ def test_range_edges(self): ) tm.assert_index_equal(idx, exp) - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01 00:00:01"), end=Timestamp("1970-01-01 00:00:04"), freq="S", @@ -94,7 +94,7 @@ def test_range_edges(self): ) tm.assert_index_equal(idx, exp) - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01 00:01"), end=Timestamp("1970-01-01 00:04"), freq="T", @@ -110,7 +110,7 @@ def test_range_edges(self): ) tm.assert_index_equal(idx, exp) - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01 01:00"), end=Timestamp("1970-01-01 04:00"), freq="H", @@ -126,7 +126,7 @@ def test_range_edges(self): ) tm.assert_index_equal(idx, exp) - idx = pd.date_range( + idx = date_range( start=Timestamp("1970-01-01"), end=Timestamp("1970-01-04"), freq="D" ) exp = DatetimeIndex( @@ -137,9 +137,9 @@ def test_range_edges(self): class TestDatetime64: def test_datetimeindex_accessors(self): - dti_naive = pd.date_range(freq="D", start=datetime(1998, 1, 1), periods=365) + dti_naive = date_range(freq="D", start=datetime(1998, 1, 1), periods=365) # GH#13303 - dti_tz = pd.date_range( + dti_tz = date_range( freq="D", start=datetime(1998, 1, 1), periods=365, tz="US/Eastern" ) for dti in [dti_naive, dti_tz]: @@ -227,7 +227,7 @@ def test_datetimeindex_accessors(self): exp = DatetimeIndex([], freq="D", tz=dti.tz, name="name") tm.assert_index_equal(res, exp) - dti = pd.date_range(freq="BQ-FEB", start=datetime(1998, 1, 1), periods=4) + dti = date_range(freq="BQ-FEB", start=datetime(1998, 1, 1), periods=4) assert sum(dti.is_quarter_start) == 0 assert sum(dti.is_quarter_end) == 4 @@ -329,7 +329,7 @@ def test_datetime_name_accessors(self, time_locale): expected_months = calendar.month_name[1:] # GH#11128 - dti = pd.date_range(freq="D", start=datetime(1998, 1, 1), periods=365) + dti = date_range(freq="D", start=datetime(1998, 1, 1), periods=365) english_days = [ "Monday", "Tuesday", @@ -350,7 +350,7 @@ def test_datetime_name_accessors(self, time_locale): assert np.isnan(ts.day_name(locale=time_locale)) # GH#12805 - dti = pd.date_range(freq="M", start="2012", end="2013") + dti = date_range(freq="M", start="2012", end="2013") result = dti.month_name(locale=time_locale) expected = Index([month.capitalize() for month in expected_months]) @@ -388,7 +388,7 @@ def test_iter_readonly(): def test_week_and_weekofyear_are_deprecated(): # GH#33595 Deprecate week and weekofyear - idx = pd.date_range(start="2019-12-29", freq="D", periods=4) + idx = date_range(start="2019-12-29", freq="D", periods=4) with tm.assert_produces_warning(FutureWarning): idx.week with tm.assert_produces_warning(FutureWarning): diff --git a/pandas/tests/indexes/datetimes/test_ops.py b/pandas/tests/indexes/datetimes/test_ops.py index cbbe3aca9ccbe..faa135d649cd9 100644 --- a/pandas/tests/indexes/datetimes/test_ops.py +++ b/pandas/tests/indexes/datetimes/test_ops.py @@ -50,7 +50,7 @@ def test_repeat_range(self, tz_naive_fixture): assert result.freq is None assert len(result) == 5 * len(rng) - index = pd.date_range("2001-01-01", periods=2, freq="D", tz=tz) + index = date_range("2001-01-01", periods=2, freq="D", tz=tz) exp = DatetimeIndex( ["2001-01-01", "2001-01-01", "2001-01-02", "2001-01-02"], tz=tz ) @@ -58,7 +58,7 @@ def test_repeat_range(self, tz_naive_fixture): tm.assert_index_equal(res, exp) assert res.freq is None - index = pd.date_range("2001-01-01", periods=2, freq="2D", tz=tz) + index = date_range("2001-01-01", periods=2, freq="2D", tz=tz) exp = DatetimeIndex( ["2001-01-01", "2001-01-01", "2001-01-03", "2001-01-03"], tz=tz ) @@ -90,7 +90,7 @@ def test_repeat(self, tz_naive_fixture): reps = 2 msg = "the 'axis' parameter is not supported" - rng = pd.date_range(start="2016-01-01", periods=2, freq="30Min", tz=tz) + rng = date_range(start="2016-01-01", periods=2, freq="30Min", tz=tz) expected_rng = DatetimeIndex( [ @@ -128,17 +128,17 @@ def test_resolution(self, tz_naive_fixture, freq, expected): if freq == "A" and not IS64 and isinstance(tz, tzlocal): pytest.xfail(reason="OverflowError inside tzlocal past 2038") - idx = pd.date_range(start="2013-04-01", periods=30, freq=freq, tz=tz) + idx = date_range(start="2013-04-01", periods=30, freq=freq, tz=tz) assert idx.resolution == expected def test_value_counts_unique(self, tz_naive_fixture): tz = tz_naive_fixture # GH 7735 - idx = pd.date_range("2011-01-01 09:00", freq="H", periods=10) + idx = date_range("2011-01-01 09:00", freq="H", periods=10) # create repeated values, 'n'th element is repeated by n+1 times idx = DatetimeIndex(np.repeat(idx.values, range(1, len(idx) + 1)), tz=tz) - exp_idx = pd.date_range("2011-01-01 18:00", freq="-1H", periods=10, tz=tz) + exp_idx = date_range("2011-01-01 18:00", freq="-1H", periods=10, tz=tz) expected = Series(range(10, 0, -1), index=exp_idx, dtype="int64") expected.index = expected.index._with_freq(None) @@ -146,7 +146,7 @@ def test_value_counts_unique(self, tz_naive_fixture): tm.assert_series_equal(obj.value_counts(), expected) - expected = pd.date_range("2011-01-01 09:00", freq="H", periods=10, tz=tz) + expected = date_range("2011-01-01 09:00", freq="H", periods=10, tz=tz) expected = expected._with_freq(None) tm.assert_index_equal(idx.unique(), expected) @@ -261,7 +261,7 @@ def test_order_without_freq(self, index_dates, expected_dates, tz_naive_fixture) def test_drop_duplicates_metadata(self, freq_sample): # GH 10115 - idx = pd.date_range("2011-01-01", freq=freq_sample, periods=10, name="idx") + idx = date_range("2011-01-01", freq=freq_sample, periods=10, name="idx") result = idx.drop_duplicates() tm.assert_index_equal(idx, result) assert idx.freq == result.freq @@ -287,7 +287,7 @@ def test_drop_duplicates_metadata(self, freq_sample): ) def test_drop_duplicates(self, freq_sample, keep, expected, index): # to check Index/Series compat - idx = pd.date_range("2011-01-01", freq=freq_sample, periods=10, name="idx") + idx = date_range("2011-01-01", freq=freq_sample, periods=10, name="idx") idx = idx.append(idx[:5]) tm.assert_numpy_array_equal(idx.duplicated(keep=keep), expected) @@ -301,7 +301,7 @@ def test_drop_duplicates(self, freq_sample, keep, expected, index): def test_infer_freq(self, freq_sample): # GH 11018 - idx = pd.date_range("2011-01-01 09:00:00", freq=freq_sample, periods=10) + idx = date_range("2011-01-01 09:00:00", freq=freq_sample, periods=10) result = DatetimeIndex(idx.asi8, freq="infer") tm.assert_index_equal(idx, result) assert result.freq == freq_sample @@ -361,7 +361,7 @@ def test_freq_view_safe(self): # Setting the freq for one DatetimeIndex shouldn't alter the freq # for another that views the same data - dti = pd.date_range("2016-01-01", periods=5) + dti = date_range("2016-01-01", periods=5) dta = dti._data dti2 = DatetimeIndex(dta)._with_freq(None) diff --git a/pandas/tests/indexes/datetimes/test_setops.py b/pandas/tests/indexes/datetimes/test_setops.py index 3dbfd8b64cbba..c8edd30e3f7aa 100644 --- a/pandas/tests/indexes/datetimes/test_setops.py +++ b/pandas/tests/indexes/datetimes/test_setops.py @@ -54,19 +54,19 @@ def test_union3(self, sort, box): @pytest.mark.parametrize("tz", tz) def test_union(self, tz, sort): - rng1 = pd.date_range("1/1/2000", freq="D", periods=5, tz=tz) - other1 = pd.date_range("1/6/2000", freq="D", periods=5, tz=tz) - expected1 = pd.date_range("1/1/2000", freq="D", periods=10, tz=tz) + rng1 = date_range("1/1/2000", freq="D", periods=5, tz=tz) + other1 = date_range("1/6/2000", freq="D", periods=5, tz=tz) + expected1 = date_range("1/1/2000", freq="D", periods=10, tz=tz) expected1_notsorted = DatetimeIndex(list(other1) + list(rng1)) - rng2 = pd.date_range("1/1/2000", freq="D", periods=5, tz=tz) - other2 = pd.date_range("1/4/2000", freq="D", periods=5, tz=tz) - expected2 = pd.date_range("1/1/2000", freq="D", periods=8, tz=tz) + rng2 = date_range("1/1/2000", freq="D", periods=5, tz=tz) + other2 = date_range("1/4/2000", freq="D", periods=5, tz=tz) + expected2 = date_range("1/1/2000", freq="D", periods=8, tz=tz) expected2_notsorted = DatetimeIndex(list(other2) + list(rng2[:3])) - rng3 = pd.date_range("1/1/2000", freq="D", periods=5, tz=tz) + rng3 = date_range("1/1/2000", freq="D", periods=5, tz=tz) other3 = DatetimeIndex([], tz=tz) - expected3 = pd.date_range("1/1/2000", freq="D", periods=5, tz=tz) + expected3 = date_range("1/1/2000", freq="D", periods=5, tz=tz) expected3_notsorted = rng3 for rng, other, exp, exp_notsorted in [ @@ -156,7 +156,7 @@ def test_union_freq_infer(self): # When taking the union of two DatetimeIndexes, we infer # a freq even if the arguments don't have freq. This matches # TimedeltaIndex behavior. - dti = pd.date_range("2016-01-01", periods=5) + dti = date_range("2016-01-01", periods=5) left = dti[[0, 1, 3, 4]] right = dti[[2, 3, 1]] @@ -175,7 +175,7 @@ def test_union_dataframe_index(self): s2 = Series(np.random.randn(len(rng2)), rng2) df = DataFrame({"s1": s1, "s2": s2}) - exp = pd.date_range("1/1/1980", "1/1/2012", freq="MS") + exp = date_range("1/1/1980", "1/1/2012", freq="MS") tm.assert_index_equal(df.index, exp) def test_union_with_DatetimeIndex(self, sort): @@ -309,11 +309,11 @@ def test_difference(self, tz, sort): rng_dates = ["1/2/2000", "1/3/2000", "1/1/2000", "1/4/2000", "1/5/2000"] rng1 = DatetimeIndex(rng_dates, tz=tz) - other1 = pd.date_range("1/6/2000", freq="D", periods=5, tz=tz) + other1 = date_range("1/6/2000", freq="D", periods=5, tz=tz) expected1 = DatetimeIndex(rng_dates, tz=tz) rng2 = DatetimeIndex(rng_dates, tz=tz) - other2 = pd.date_range("1/4/2000", freq="D", periods=5, tz=tz) + other2 = date_range("1/4/2000", freq="D", periods=5, tz=tz) expected2 = DatetimeIndex(rng_dates[:3], tz=tz) rng3 = DatetimeIndex(rng_dates, tz=tz) diff --git a/pandas/tests/indexes/datetimes/test_shift.py b/pandas/tests/indexes/datetimes/test_shift.py index 3c202005f7933..611df5d99cb9c 100644 --- a/pandas/tests/indexes/datetimes/test_shift.py +++ b/pandas/tests/indexes/datetimes/test_shift.py @@ -49,7 +49,7 @@ def test_dti_shift_tzaware(self, tz_naive_fixture): def test_dti_shift_freqs(self): # test shift for DatetimeIndex and non DatetimeIndex # GH#8083 - drange = pd.date_range("20130101", periods=5) + drange = date_range("20130101", periods=5) result = drange.shift(1) expected = DatetimeIndex( ["2013-01-02", "2013-01-03", "2013-01-04", "2013-01-05", "2013-01-06"], @@ -123,7 +123,7 @@ def test_dti_shift_near_midnight(self, shift, result_time): def test_shift_periods(self): # GH#22458 : argument 'n' was deprecated in favor of 'periods' - idx = pd.date_range(start=START, end=END, periods=3) + idx = date_range(start=START, end=END, periods=3) tm.assert_index_equal(idx.shift(periods=0), idx) tm.assert_index_equal(idx.shift(0), idx) diff --git a/pandas/tests/indexes/datetimes/test_timezones.py b/pandas/tests/indexes/datetimes/test_timezones.py index 8a73f564ef064..add1bd4bb3972 100644 --- a/pandas/tests/indexes/datetimes/test_timezones.py +++ b/pandas/tests/indexes/datetimes/test_timezones.py @@ -400,10 +400,10 @@ def test_dti_tz_localize_pass_dates_to_utc(self, tzstr): @pytest.mark.parametrize("prefix", ["", "dateutil/"]) def test_dti_tz_localize(self, prefix): tzstr = prefix + "US/Eastern" - dti = pd.date_range(start="1/1/2005", end="1/1/2005 0:00:30.256", freq="L") + dti = date_range(start="1/1/2005", end="1/1/2005 0:00:30.256", freq="L") dti2 = dti.tz_localize(tzstr) - dti_utc = pd.date_range( + dti_utc = date_range( start="1/1/2005 05:00", end="1/1/2005 5:00:30.256", freq="L", tz="utc" ) @@ -412,11 +412,11 @@ def test_dti_tz_localize(self, prefix): dti3 = dti2.tz_convert(prefix + "US/Pacific") tm.assert_numpy_array_equal(dti3.values, dti_utc.values) - dti = pd.date_range(start="11/6/2011 1:59", end="11/6/2011 2:00", freq="L") + dti = date_range(start="11/6/2011 1:59", end="11/6/2011 2:00", freq="L") with pytest.raises(pytz.AmbiguousTimeError, match="Cannot infer dst time"): dti.tz_localize(tzstr) - dti = pd.date_range(start="3/13/2011 1:59", end="3/13/2011 2:00", freq="L") + dti = date_range(start="3/13/2011 1:59", end="3/13/2011 2:00", freq="L") with pytest.raises(pytz.NonExistentTimeError, match="2011-03-13 02:00:00"): dti.tz_localize(tzstr) @@ -606,8 +606,8 @@ def test_dti_construction_nonexistent_endpoint(self, tz, option, expected): assert times[-1] == Timestamp(expected, tz=tz, freq="H") def test_dti_tz_localize_bdate_range(self): - dr = pd.bdate_range("1/1/2009", "1/1/2010") - dr_utc = pd.bdate_range("1/1/2009", "1/1/2010", tz=pytz.utc) + dr = bdate_range("1/1/2009", "1/1/2010") + dr_utc = bdate_range("1/1/2009", "1/1/2010", tz=pytz.utc) localized = dr.tz_localize(pytz.utc) tm.assert_index_equal(dr_utc, localized) @@ -805,7 +805,7 @@ def test_dti_tz_constructors(self, tzstr): arr = ["11/10/2005 08:00:00", "11/10/2005 09:00:00"] idx1 = to_datetime(arr).tz_localize(tzstr) - idx2 = pd.date_range(start="2005-11-10 08:00:00", freq="H", periods=2, tz=tzstr) + idx2 = date_range(start="2005-11-10 08:00:00", freq="H", periods=2, tz=tzstr) idx2 = idx2._with_freq(None) # the others all have freq=None idx3 = DatetimeIndex(arr, tz=tzstr) idx4 = DatetimeIndex(np.array(arr), tz=tzstr) @@ -874,7 +874,7 @@ def test_drop_dst_boundary(self): start = Timestamp("201710290100", tz=tz) end = Timestamp("201710290300", tz=tz) - index = pd.date_range(start=start, end=end, freq=freq) + index = date_range(start=start, end=end, freq=freq) expected = DatetimeIndex( [ diff --git a/pandas/tests/indexes/multi/test_constructors.py b/pandas/tests/indexes/multi/test_constructors.py index a2ca686d0412d..85f3d17fdd0d4 100644 --- a/pandas/tests/indexes/multi/test_constructors.py +++ b/pandas/tests/indexes/multi/test_constructors.py @@ -190,8 +190,8 @@ def test_from_arrays_tuples(idx): def test_from_arrays_index_series_datetimetz(): - idx1 = pd.date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern") - idx2 = pd.date_range("2015-01-01 10:00", freq="H", periods=3, tz="Asia/Tokyo") + idx1 = date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern") + idx2 = date_range("2015-01-01 10:00", freq="H", periods=3, tz="Asia/Tokyo") result = MultiIndex.from_arrays([idx1, idx2]) tm.assert_index_equal(result.get_level_values(0), idx1) tm.assert_index_equal(result.get_level_values(1), idx2) @@ -232,8 +232,8 @@ def test_from_arrays_index_series_period(): def test_from_arrays_index_datetimelike_mixed(): - idx1 = pd.date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern") - idx2 = pd.date_range("2015-01-01 10:00", freq="H", periods=3) + idx1 = date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern") + idx2 = date_range("2015-01-01 10:00", freq="H", periods=3) idx3 = pd.timedelta_range("1 days", freq="D", periods=3) idx4 = pd.period_range("2011-01-01", freq="D", periods=3) @@ -667,7 +667,7 @@ def test_from_frame_dtype_fidelity(): # GH 22420 df = pd.DataFrame( { - "dates": pd.date_range("19910905", periods=6, tz="US/Eastern"), + "dates": date_range("19910905", periods=6, tz="US/Eastern"), "a": [1, 1, 1, 2, 2, 2], "b": pd.Categorical(["a", "a", "b", "b", "c", "c"], ordered=True), "c": ["x", "x", "y", "z", "x", "y"], @@ -677,7 +677,7 @@ def test_from_frame_dtype_fidelity(): expected_mi = MultiIndex.from_arrays( [ - pd.date_range("19910905", periods=6, tz="US/Eastern"), + date_range("19910905", periods=6, tz="US/Eastern"), [1, 1, 1, 2, 2, 2], pd.Categorical(["a", "a", "b", "b", "c", "c"], ordered=True), ["x", "x", "y", "z", "x", "y"], @@ -754,7 +754,7 @@ def test_datetimeindex(): idx1 = pd.DatetimeIndex( ["2013-04-01 9:00", "2013-04-02 9:00", "2013-04-03 9:00"] * 2, tz="Asia/Tokyo" ) - idx2 = pd.date_range("2010/01/01", periods=6, freq="M", tz="US/Eastern") + idx2 = date_range("2010/01/01", periods=6, freq="M", tz="US/Eastern") idx = MultiIndex.from_arrays([idx1, idx2]) expected1 = pd.DatetimeIndex( diff --git a/pandas/tests/indexes/multi/test_indexing.py b/pandas/tests/indexes/multi/test_indexing.py index e0241c2c5eadd..6bce89c520ce6 100644 --- a/pandas/tests/indexes/multi/test_indexing.py +++ b/pandas/tests/indexes/multi/test_indexing.py @@ -696,7 +696,7 @@ def test_contains_top_level(self): def test_contains_with_nat(self): # MI with a NaT mi = MultiIndex( - levels=[["C"], pd.date_range("2012-01-01", periods=5)], + levels=[["C"], date_range("2012-01-01", periods=5)], codes=[[0, 0, 0, 0, 0, 0], [-1, 0, 1, 2, 3, 4]], names=[None, "B"], ) @@ -757,7 +757,7 @@ def test_timestamp_multiindex_indexer(): # https://github.com/pandas-dev/pandas/issues/26944 idx = MultiIndex.from_product( [ - pd.date_range("2019-01-01T00:15:33", periods=100, freq="H", name="date"), + date_range("2019-01-01T00:15:33", periods=100, freq="H", name="date"), ["x"], [3], ] @@ -766,7 +766,7 @@ def test_timestamp_multiindex_indexer(): result = df.loc[pd.IndexSlice["2019-1-2":, "x", :], "foo"] qidx = MultiIndex.from_product( [ - pd.date_range( + date_range( start="2019-01-02T00:15:33", end="2019-01-05T03:15:33", freq="H", diff --git a/pandas/tests/indexes/period/test_indexing.py b/pandas/tests/indexes/period/test_indexing.py index 19dfa9137cc5c..9b203e1b17517 100644 --- a/pandas/tests/indexes/period/test_indexing.py +++ b/pandas/tests/indexes/period/test_indexing.py @@ -362,7 +362,7 @@ def test_get_loc2(self): def test_get_loc_invalid_string_raises_keyerror(self): # GH#34240 - pi = pd.period_range("2000", periods=3, name="A") + pi = period_range("2000", periods=3, name="A") with pytest.raises(KeyError, match="A"): pi.get_loc("A") @@ -713,7 +713,7 @@ def test_get_value(self): def test_loc_str(self): # https://github.com/pandas-dev/pandas/issues/33964 - index = pd.period_range(start="2000", periods=20, freq="B") + index = period_range(start="2000", periods=20, freq="B") series = Series(range(20), index=index) assert series.loc["2000-01-14"] == 9 @@ -821,7 +821,7 @@ def test_contains_nat(self): class TestAsOfLocs: def test_asof_locs_mismatched_type(self): - dti = pd.date_range("2016-01-01", periods=3) + dti = date_range("2016-01-01", periods=3) pi = dti.to_period("D") pi2 = dti.to_period("H") diff --git a/pandas/tests/indexes/test_base.py b/pandas/tests/indexes/test_base.py index 53467819c3ba0..a4f23741650ec 100644 --- a/pandas/tests/indexes/test_base.py +++ b/pandas/tests/indexes/test_base.py @@ -90,16 +90,16 @@ def test_constructor_copy(self, index): @pytest.mark.parametrize( "index", [ - pd.date_range( + date_range( "2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern", name="Green Eggs & Ham", ), # DTI with tz - pd.date_range("2015-01-01 10:00", freq="D", periods=3), # DTI no tz + date_range("2015-01-01 10:00", freq="D", periods=3), # DTI no tz pd.timedelta_range("1 days", freq="D", periods=3), # td - pd.period_range("2015-01-01", freq="D", periods=3), # period + period_range("2015-01-01", freq="D", periods=3), # period ], ) def test_constructor_from_index_dtlike(self, cast_as_obj, index): @@ -125,11 +125,11 @@ def test_constructor_from_index_dtlike(self, cast_as_obj, index): "index,has_tz", [ ( - pd.date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern"), + date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern"), True, ), # datetimetz (pd.timedelta_range("1 days", freq="D", periods=3), False), # td - (pd.period_range("2015-01-01", freq="D", periods=3), False), # period + (period_range("2015-01-01", freq="D", periods=3), False), # period ], ) def test_constructor_from_series_dtlike(self, index, has_tz): @@ -341,7 +341,7 @@ def test_constructor_dtypes_datetime(self, tz_naive_fixture, attr, klass): # .asi8 produces integers, so these are considered epoch timestamps # ^the above will be true in a later version. Right now we `.view` # the i8 values as NS_DTYPE, effectively treating them as wall times. - index = pd.date_range("2011-01-01", periods=5) + index = date_range("2011-01-01", periods=5) arg = getattr(index, attr) index = index.tz_localize(tz_naive_fixture) dtype = index.dtype @@ -550,7 +550,7 @@ def test_asof(self, index): assert isinstance(index.asof(d), Timestamp) def test_asof_datetime_partial(self): - index = pd.date_range("2010-01-01", periods=2, freq="m") + index = date_range("2010-01-01", periods=2, freq="m") expected = Timestamp("2010-02-28") result = index.asof("2010-02") assert result == expected @@ -718,7 +718,7 @@ def test_union_identity(self, index, sort): def test_union_dt_as_obj(self, sort): # TODO: Replace with fixturesult index = self.create_index() - date_index = pd.date_range("2019-01-01", periods=10) + date_index = date_range("2019-01-01", periods=10) first_cat = index.union(date_index) second_cat = index.union(index) @@ -1639,7 +1639,7 @@ def test_isin_empty(self, empty): [1.0, 2.0, 3.0, 4.0], [True, True, True, True], ["foo", "bar", "baz", "qux"], - pd.date_range("2018-01-01", freq="D", periods=4), + date_range("2018-01-01", freq="D", periods=4), ], ) def test_boolean_cmp(self, values): @@ -1812,8 +1812,8 @@ def test_take_bad_bounds_raises(self): np.array(["A", "B", "C"]), np.array(["C", "B", "A"]), # Must preserve name even if dtype changes - pd.date_range("20130101", periods=3).values, - pd.date_range("20130101", periods=3).tolist(), + date_range("20130101", periods=3).values, + date_range("20130101", periods=3).tolist(), ], ) def test_reindex_preserves_name_if_target_is_list_or_ndarray(self, name, labels): diff --git a/pandas/tests/indexes/timedeltas/test_astype.py b/pandas/tests/indexes/timedeltas/test_astype.py index a908cada5b5dc..6f82e77faca7a 100644 --- a/pandas/tests/indexes/timedeltas/test_astype.py +++ b/pandas/tests/indexes/timedeltas/test_astype.py @@ -104,7 +104,7 @@ def test_astype_raises(self, dtype): idx.astype(dtype) def test_astype_category(self): - obj = pd.timedelta_range("1H", periods=2, freq="H") + obj = timedelta_range("1H", periods=2, freq="H") result = obj.astype("category") expected = pd.CategoricalIndex([Timedelta("1H"), Timedelta("2H")]) @@ -115,7 +115,7 @@ def test_astype_category(self): tm.assert_categorical_equal(result, expected) def test_astype_array_fallback(self): - obj = pd.timedelta_range("1H", periods=2) + obj = timedelta_range("1H", periods=2) result = obj.astype(bool) expected = Index(np.array([True, True])) tm.assert_index_equal(result, expected) diff --git a/pandas/tests/indexes/timedeltas/test_constructors.py b/pandas/tests/indexes/timedeltas/test_constructors.py index 1c0104f340f75..a07977702531e 100644 --- a/pandas/tests/indexes/timedeltas/test_constructors.py +++ b/pandas/tests/indexes/timedeltas/test_constructors.py @@ -27,7 +27,7 @@ def test_infer_from_tdi(self): # GH#23539 # fast-path for inferring a frequency if the passed data already # has one - tdi = pd.timedelta_range("1 second", periods=10 ** 7, freq="1s") + tdi = timedelta_range("1 second", periods=10 ** 7, freq="1s") result = TimedeltaIndex(tdi, freq="infer") assert result.freq == tdi.freq @@ -40,7 +40,7 @@ def test_infer_from_tdi_mismatch(self): # GH#23539 # fast-path for invalidating a frequency if the passed data already # has one and it does not match the `freq` input - tdi = pd.timedelta_range("1 second", periods=100, freq="1s") + tdi = timedelta_range("1 second", periods=100, freq="1s") msg = ( "Inferred frequency .* from passed values does " diff --git a/pandas/tests/indexes/timedeltas/test_indexing.py b/pandas/tests/indexes/timedeltas/test_indexing.py index 37aa9653550fb..d79865c1446db 100644 --- a/pandas/tests/indexes/timedeltas/test_indexing.py +++ b/pandas/tests/indexes/timedeltas/test_indexing.py @@ -65,7 +65,7 @@ def test_getitem(self): ) def test_timestamp_invalid_key(self, key): # GH#20464 - tdi = pd.timedelta_range(0, periods=10) + tdi = timedelta_range(0, periods=10) with pytest.raises(KeyError, match=re.escape(repr(key))): tdi.get_loc(key) diff --git a/pandas/tests/indexes/timedeltas/test_ops.py b/pandas/tests/indexes/timedeltas/test_ops.py index 52097dbe610ef..3578174e17141 100644 --- a/pandas/tests/indexes/timedeltas/test_ops.py +++ b/pandas/tests/indexes/timedeltas/test_ops.py @@ -69,7 +69,7 @@ def test_nonunique_contains(self): def test_unknown_attribute(self): # see gh-9680 - tdi = pd.timedelta_range(start=0, periods=10, freq="1s") + tdi = timedelta_range(start=0, periods=10, freq="1s") ts = Series(np.random.normal(size=10), index=tdi) assert "foo" not in ts.__dict__.keys() msg = "'Series' object has no attribute 'foo'" @@ -138,7 +138,7 @@ def test_order(self): def test_drop_duplicates_metadata(self, freq_sample): # GH 10115 - idx = pd.timedelta_range("1 day", periods=10, freq=freq_sample, name="idx") + idx = timedelta_range("1 day", periods=10, freq=freq_sample, name="idx") result = idx.drop_duplicates() tm.assert_index_equal(idx, result) assert idx.freq == result.freq @@ -164,7 +164,7 @@ def test_drop_duplicates_metadata(self, freq_sample): ) def test_drop_duplicates(self, freq_sample, keep, expected, index): # to check Index/Series compat - idx = pd.timedelta_range("1 day", periods=10, freq=freq_sample, name="idx") + idx = timedelta_range("1 day", periods=10, freq=freq_sample, name="idx") idx = idx.append(idx[:5]) tm.assert_numpy_array_equal(idx.duplicated(keep=keep), expected) @@ -178,13 +178,13 @@ def test_drop_duplicates(self, freq_sample, keep, expected, index): def test_infer_freq(self, freq_sample): # GH#11018 - idx = pd.timedelta_range("1", freq=freq_sample, periods=10) + idx = timedelta_range("1", freq=freq_sample, periods=10) result = TimedeltaIndex(idx.asi8, freq="infer") tm.assert_index_equal(idx, result) assert result.freq == freq_sample def test_repeat(self): - index = pd.timedelta_range("1 days", periods=2, freq="D") + index = timedelta_range("1 days", periods=2, freq="D") exp = TimedeltaIndex(["1 days", "1 days", "2 days", "2 days"]) for res in [index.repeat(2), np.repeat(index, 2)]: tm.assert_index_equal(res, exp) diff --git a/pandas/tests/indexes/timedeltas/test_scalar_compat.py b/pandas/tests/indexes/timedeltas/test_scalar_compat.py index 6a2238d90b590..2f9e1a88a04a8 100644 --- a/pandas/tests/indexes/timedeltas/test_scalar_compat.py +++ b/pandas/tests/indexes/timedeltas/test_scalar_compat.py @@ -7,7 +7,6 @@ from pandas._libs.tslibs.offsets import INVALID_FREQ_ERR_MSG -import pandas as pd from pandas import Index, Series, Timedelta, TimedeltaIndex, timedelta_range import pandas._testing as tm @@ -43,7 +42,7 @@ def test_tdi_total_seconds(self): ) def test_tdi_round(self): - td = pd.timedelta_range(start="16801 days", periods=5, freq="30Min") + td = timedelta_range(start="16801 days", periods=5, freq="30Min") elt = td[1] expected_rng = TimedeltaIndex( diff --git a/pandas/tests/indexes/timedeltas/test_setops.py b/pandas/tests/indexes/timedeltas/test_setops.py index 94fdfefa497a3..2e4e4bfde9202 100644 --- a/pandas/tests/indexes/timedeltas/test_setops.py +++ b/pandas/tests/indexes/timedeltas/test_setops.py @@ -82,7 +82,7 @@ def test_union_freq_infer(self): # When taking the union of two TimedeltaIndexes, we infer # a freq even if the arguments don't have freq. This matches # DatetimeIndex behavior. - tdi = pd.timedelta_range("1 Day", periods=5) + tdi = timedelta_range("1 Day", periods=5) left = tdi[[0, 1, 3, 4]] right = tdi[[2, 3, 1]]