diff --git a/pandas/conftest.py b/pandas/conftest.py index 81a039e484cf1..e78f565b0a9af 100644 --- a/pandas/conftest.py +++ b/pandas/conftest.py @@ -87,3 +87,24 @@ def join_type(request): Fixture for trying all types of join operations """ return request.param + + +TIMEZONES = [None, 'UTC', 'US/Eastern', 'Asia/Tokyo', 'dateutil/US/Pacific'] + + +@td.parametrize_fixture_doc(str(TIMEZONES)) +@pytest.fixture(params=TIMEZONES) +def tz_naive_fixture(request): + """ + Fixture for trying timezones including default (None): {0} + """ + return request.param + + +@td.parametrize_fixture_doc(str(TIMEZONES[1:])) +@pytest.fixture(params=TIMEZONES[1:]) +def tz_aware_fixture(request): + """ + Fixture for trying explicit timezones: {0} + """ + return request.param diff --git a/pandas/tests/dtypes/test_cast.py b/pandas/tests/dtypes/test_cast.py index 96a9e3227b40b..590f28b275aec 100644 --- a/pandas/tests/dtypes/test_cast.py +++ b/pandas/tests/dtypes/test_cast.py @@ -144,16 +144,6 @@ def testinfer_dtype_from_scalar(self): dtype, val = infer_dtype_from_scalar(data) assert dtype == 'm8[ns]' - for tz in ['UTC', 'US/Eastern', 'Asia/Tokyo']: - dt = Timestamp(1, tz=tz) - dtype, val = infer_dtype_from_scalar(dt, pandas_dtype=True) - assert dtype == 'datetime64[ns, {0}]'.format(tz) - assert val == dt.value - - dtype, val = infer_dtype_from_scalar(dt) - assert dtype == np.object_ - assert val == dt - for freq in ['M', 'D']: p = Period('2011-01-01', freq=freq) dtype, val = infer_dtype_from_scalar(p, pandas_dtype=True) @@ -171,6 +161,17 @@ def testinfer_dtype_from_scalar(self): dtype, val = infer_dtype_from_scalar(data) assert dtype == np.object_ + @pytest.mark.parametrize('tz', ['UTC', 'US/Eastern', 'Asia/Tokyo']) + def testinfer_from_scalar_tz(self, tz): + dt = Timestamp(1, tz=tz) + dtype, val = infer_dtype_from_scalar(dt, pandas_dtype=True) + assert dtype == 'datetime64[ns, {0}]'.format(tz) + assert val == dt.value + + dtype, val = infer_dtype_from_scalar(dt) + assert dtype == np.object_ + assert val == dt + def testinfer_dtype_from_scalar_errors(self): with pytest.raises(ValueError): infer_dtype_from_scalar(np.array([1])) diff --git a/pandas/tests/frame/test_alter_axes.py b/pandas/tests/frame/test_alter_axes.py index 3e0ba26c20eb0..d38e3b2ad9c10 100644 --- a/pandas/tests/frame/test_alter_axes.py +++ b/pandas/tests/frame/test_alter_axes.py @@ -269,25 +269,26 @@ def test_set_index_cast_datetimeindex(self): df.pop('ts') assert_frame_equal(df, expected) + def test_reset_index_tz(self, tz_aware_fixture): # GH 3950 # reset_index with single level - for tz in ['UTC', 'Asia/Tokyo', 'US/Eastern']: - idx = pd.date_range('1/1/2011', periods=5, - freq='D', tz=tz, name='idx') - df = pd.DataFrame( - {'a': range(5), 'b': ['A', 'B', 'C', 'D', 'E']}, index=idx) - - expected = pd.DataFrame({'idx': [datetime(2011, 1, 1), - datetime(2011, 1, 2), - datetime(2011, 1, 3), - datetime(2011, 1, 4), - datetime(2011, 1, 5)], - 'a': range(5), - 'b': ['A', 'B', 'C', 'D', 'E']}, - columns=['idx', 'a', 'b']) - expected['idx'] = expected['idx'].apply( - lambda d: pd.Timestamp(d, tz=tz)) - assert_frame_equal(df.reset_index(), expected) + tz = tz_aware_fixture + idx = pd.date_range('1/1/2011', periods=5, + freq='D', tz=tz, name='idx') + df = pd.DataFrame( + {'a': range(5), 'b': ['A', 'B', 'C', 'D', 'E']}, index=idx) + + expected = pd.DataFrame({'idx': [datetime(2011, 1, 1), + datetime(2011, 1, 2), + datetime(2011, 1, 3), + datetime(2011, 1, 4), + datetime(2011, 1, 5)], + 'a': range(5), + 'b': ['A', 'B', 'C', 'D', 'E']}, + columns=['idx', 'a', 'b']) + expected['idx'] = expected['idx'].apply( + lambda d: pd.Timestamp(d, tz=tz)) + assert_frame_equal(df.reset_index(), expected) def test_set_index_timezone(self): # GH 12358 diff --git a/pandas/tests/indexes/datetimes/test_arithmetic.py b/pandas/tests/indexes/datetimes/test_arithmetic.py index 8f259a7e78897..4ef7997a53b85 100644 --- a/pandas/tests/indexes/datetimes/test_arithmetic.py +++ b/pandas/tests/indexes/datetimes/test_arithmetic.py @@ -876,23 +876,24 @@ def test_dti_with_offset_series(self, tz, names): res3 = dti - other tm.assert_series_equal(res3, expected_sub) - def test_dti_add_offset_tzaware(self): - dates = date_range('2012-11-01', periods=3, tz='US/Pacific') - offset = dates + pd.offsets.Hour(5) - assert dates[0] + pd.offsets.Hour(5) == offset[0] + def test_dti_add_offset_tzaware(self, tz_aware_fixture): + timezone = tz_aware_fixture + if timezone == 'US/Pacific': + dates = date_range('2012-11-01', periods=3, tz=timezone) + offset = dates + pd.offsets.Hour(5) + assert dates[0] + pd.offsets.Hour(5) == offset[0] - # GH#6818 - for tz in ['UTC', 'US/Pacific', 'Asia/Tokyo']: - dates = date_range('2010-11-01 00:00', periods=3, tz=tz, freq='H') - expected = DatetimeIndex(['2010-11-01 05:00', '2010-11-01 06:00', - '2010-11-01 07:00'], freq='H', tz=tz) + dates = date_range('2010-11-01 00:00', + periods=3, tz=timezone, freq='H') + expected = DatetimeIndex(['2010-11-01 05:00', '2010-11-01 06:00', + '2010-11-01 07:00'], freq='H', tz=timezone) - offset = dates + pd.offsets.Hour(5) - tm.assert_index_equal(offset, expected) - offset = dates + np.timedelta64(5, 'h') - tm.assert_index_equal(offset, expected) - offset = dates + timedelta(hours=5) - tm.assert_index_equal(offset, expected) + offset = dates + pd.offsets.Hour(5) + tm.assert_index_equal(offset, expected) + offset = dates + np.timedelta64(5, 'h') + tm.assert_index_equal(offset, expected) + offset = dates + timedelta(hours=5) + tm.assert_index_equal(offset, expected) @pytest.mark.parametrize('klass,assert_func', [ diff --git a/pandas/tests/indexes/datetimes/test_construction.py b/pandas/tests/indexes/datetimes/test_construction.py index 176f5bd0c1a2a..86030a5605395 100644 --- a/pandas/tests/indexes/datetimes/test_construction.py +++ b/pandas/tests/indexes/datetimes/test_construction.py @@ -450,9 +450,8 @@ def test_dti_constructor_preserve_dti_freq(self): rng2 = DatetimeIndex(rng) assert rng.freq == rng2.freq - @pytest.mark.parametrize('tz', [None, 'UTC', 'Asia/Tokyo', - 'dateutil/US/Pacific']) - def test_dti_constructor_years_only(self, tz): + def test_dti_constructor_years_only(self, tz_naive_fixture): + tz = tz_naive_fixture # GH 6961 rng1 = date_range('2014', '2015', freq='M', tz=tz) expected1 = date_range('2014-01-31', '2014-12-31', freq='M', tz=tz) diff --git a/pandas/tests/indexes/datetimes/test_datetime.py b/pandas/tests/indexes/datetimes/test_datetime.py index 51788b3e25507..2d55dfff7a8f3 100644 --- a/pandas/tests/indexes/datetimes/test_datetime.py +++ b/pandas/tests/indexes/datetimes/test_datetime.py @@ -329,8 +329,8 @@ def test_factorize(self): tm.assert_numpy_array_equal(arr, exp_arr) tm.assert_index_equal(idx, idx3) - @pytest.mark.parametrize('tz', [None, 'UTC', 'US/Eastern', 'Asia/Tokyo']) - def test_factorize_tz(self, tz): + def test_factorize_tz(self, tz_naive_fixture): + tz = tz_naive_fixture # GH#13750 base = pd.date_range('2016-11-05', freq='H', periods=100, tz=tz) idx = base.repeat(5) diff --git a/pandas/tests/indexes/datetimes/test_ops.py b/pandas/tests/indexes/datetimes/test_ops.py index ed7e425924097..8986828399a98 100644 --- a/pandas/tests/indexes/datetimes/test_ops.py +++ b/pandas/tests/indexes/datetimes/test_ops.py @@ -13,12 +13,17 @@ from pandas.tests.test_base import Ops +@pytest.fixture(params=[None, 'UTC', 'Asia/Tokyo', 'US/Eastern', + 'dateutil/Asia/Singapore', + 'dateutil/US/Pacific']) +def tz_fixture(request): + return request.param + + START, END = datetime(2009, 1, 1), datetime(2010, 1, 1) class TestDatetimeIndexOps(Ops): - tz = [None, 'UTC', 'Asia/Tokyo', 'US/Eastern', 'dateutil/Asia/Singapore', - 'dateutil/US/Pacific'] def setup_method(self, method): super(TestDatetimeIndexOps, self).setup_method(method) @@ -47,34 +52,35 @@ def test_ops_properties_basic(self): assert s.day == 10 pytest.raises(AttributeError, lambda: s.weekday) - def test_minmax(self): - for tz in self.tz: - # monotonic - idx1 = pd.DatetimeIndex(['2011-01-01', '2011-01-02', - '2011-01-03'], tz=tz) - assert idx1.is_monotonic + def test_minmax_tz(self, tz_fixture): + tz = tz_fixture + # monotonic + idx1 = pd.DatetimeIndex(['2011-01-01', '2011-01-02', + '2011-01-03'], tz=tz) + assert idx1.is_monotonic - # non-monotonic - idx2 = pd.DatetimeIndex(['2011-01-01', pd.NaT, '2011-01-03', - '2011-01-02', pd.NaT], tz=tz) - assert not idx2.is_monotonic + # non-monotonic + idx2 = pd.DatetimeIndex(['2011-01-01', pd.NaT, '2011-01-03', + '2011-01-02', pd.NaT], tz=tz) + assert not idx2.is_monotonic - for idx in [idx1, idx2]: - assert idx.min() == Timestamp('2011-01-01', tz=tz) - assert idx.max() == Timestamp('2011-01-03', tz=tz) - assert idx.argmin() == 0 - assert idx.argmax() == 2 + for idx in [idx1, idx2]: + assert idx.min() == Timestamp('2011-01-01', tz=tz) + assert idx.max() == Timestamp('2011-01-03', tz=tz) + assert idx.argmin() == 0 + assert idx.argmax() == 2 - for op in ['min', 'max']: - # Return NaT - obj = DatetimeIndex([]) - assert pd.isna(getattr(obj, op)()) + @pytest.mark.parametrize('op', ['min', 'max']) + def test_minmax_nat(self, op): + # Return NaT + obj = DatetimeIndex([]) + assert pd.isna(getattr(obj, op)()) - obj = DatetimeIndex([pd.NaT]) - assert pd.isna(getattr(obj, op)()) + obj = DatetimeIndex([pd.NaT]) + assert pd.isna(getattr(obj, op)()) - obj = DatetimeIndex([pd.NaT, pd.NaT, pd.NaT]) - assert pd.isna(getattr(obj, op)()) + obj = DatetimeIndex([pd.NaT, pd.NaT, pd.NaT]) + assert pd.isna(getattr(obj, op)()) def test_numpy_minmax(self): dr = pd.date_range(start='2016-01-15', end='2016-01-20') @@ -96,8 +102,8 @@ def test_numpy_minmax(self): tm.assert_raises_regex( ValueError, errmsg, np.argmax, dr, out=0) - @pytest.mark.parametrize('tz', tz) - def test_repeat_range(self, tz): + def test_repeat_range(self, tz_fixture): + tz = tz_fixture rng = date_range('1/1/2000', '1/1/2001') result = rng.repeat(5) @@ -128,8 +134,8 @@ def test_repeat_range(self, tz): tm.assert_index_equal(res, exp) assert res.freq is None - @pytest.mark.parametrize('tz', tz) - def test_repeat(self, tz): + def test_repeat(self, tz_fixture): + tz = tz_fixture reps = 2 msg = "the 'axis' parameter is not supported" @@ -151,8 +157,8 @@ def test_repeat(self, tz): tm.assert_raises_regex(ValueError, msg, np.repeat, rng, reps, axis=1) - @pytest.mark.parametrize('tz', tz) - def test_resolution(self, tz): + def test_resolution(self, tz_fixture): + tz = tz_fixture for freq, expected in zip(['A', 'Q', 'M', 'D', 'H', 'T', 'S', 'L', 'U'], ['day', 'day', 'day', 'day', 'hour', @@ -162,8 +168,8 @@ def test_resolution(self, tz): tz=tz) assert idx.resolution == expected - @pytest.mark.parametrize('tz', tz) - def test_value_counts_unique(self, tz): + def test_value_counts_unique(self, tz_fixture): + tz = tz_fixture # GH 7735 idx = pd.date_range('2011-01-01 09:00', freq='H', periods=10) # create repeated values, 'n'th element is repeated by n+1 times @@ -209,86 +215,89 @@ def test_nonunique_contains(self): ['2015', '2015', '2016'], ['2015', '2015', '2014'])): assert idx[0] in idx - def test_order(self): - # with freq - idx1 = DatetimeIndex(['2011-01-01', '2011-01-02', - '2011-01-03'], freq='D', name='idx') - idx2 = DatetimeIndex(['2011-01-01 09:00', '2011-01-01 10:00', - '2011-01-01 11:00'], freq='H', - tz='Asia/Tokyo', name='tzidx') - - for idx in [idx1, idx2]: - ordered = idx.sort_values() - tm.assert_index_equal(ordered, idx) - assert ordered.freq == idx.freq - - ordered = idx.sort_values(ascending=False) - expected = idx[::-1] - tm.assert_index_equal(ordered, expected) - assert ordered.freq == expected.freq - assert ordered.freq.n == -1 - - ordered, indexer = idx.sort_values(return_indexer=True) - tm.assert_index_equal(ordered, idx) - tm.assert_numpy_array_equal(indexer, np.array([0, 1, 2]), - check_dtype=False) - assert ordered.freq == idx.freq - - ordered, indexer = idx.sort_values(return_indexer=True, - ascending=False) - expected = idx[::-1] - tm.assert_index_equal(ordered, expected) - tm.assert_numpy_array_equal(indexer, - np.array([2, 1, 0]), - check_dtype=False) - assert ordered.freq == expected.freq - assert ordered.freq.n == -1 + @pytest.mark.parametrize('idx', + [ + DatetimeIndex( + ['2011-01-01', + '2011-01-02', + '2011-01-03'], + freq='D', name='idx'), + DatetimeIndex( + ['2011-01-01 09:00', + '2011-01-01 10:00', + '2011-01-01 11:00'], + freq='H', name='tzidx', tz='Asia/Tokyo') + ]) + def test_order_with_freq(self, idx): + ordered = idx.sort_values() + tm.assert_index_equal(ordered, idx) + assert ordered.freq == idx.freq + + ordered = idx.sort_values(ascending=False) + expected = idx[::-1] + tm.assert_index_equal(ordered, expected) + assert ordered.freq == expected.freq + assert ordered.freq.n == -1 + + ordered, indexer = idx.sort_values(return_indexer=True) + tm.assert_index_equal(ordered, idx) + tm.assert_numpy_array_equal(indexer, np.array([0, 1, 2]), + check_dtype=False) + assert ordered.freq == idx.freq + + ordered, indexer = idx.sort_values(return_indexer=True, + ascending=False) + expected = idx[::-1] + tm.assert_index_equal(ordered, expected) + tm.assert_numpy_array_equal(indexer, + np.array([2, 1, 0]), + check_dtype=False) + assert ordered.freq == expected.freq + assert ordered.freq.n == -1 + + @pytest.mark.parametrize('index_dates,expected_dates', [ + (['2011-01-01', '2011-01-03', '2011-01-05', + '2011-01-02', '2011-01-01'], + ['2011-01-01', '2011-01-01', '2011-01-02', + '2011-01-03', '2011-01-05']), + (['2011-01-01', '2011-01-03', '2011-01-05', + '2011-01-02', '2011-01-01'], + ['2011-01-01', '2011-01-01', '2011-01-02', + '2011-01-03', '2011-01-05']), + ([pd.NaT, '2011-01-03', '2011-01-05', + '2011-01-02', pd.NaT], + [pd.NaT, pd.NaT, '2011-01-02', '2011-01-03', + '2011-01-05']) + ]) + def test_order_without_freq(self, index_dates, expected_dates, tz_fixture): + tz = tz_fixture # without freq - for tz in self.tz: - idx1 = DatetimeIndex(['2011-01-01', '2011-01-03', '2011-01-05', - '2011-01-02', '2011-01-01'], - tz=tz, name='idx1') - exp1 = DatetimeIndex(['2011-01-01', '2011-01-01', '2011-01-02', - '2011-01-03', '2011-01-05'], - tz=tz, name='idx1') - - idx2 = DatetimeIndex(['2011-01-01', '2011-01-03', '2011-01-05', - '2011-01-02', '2011-01-01'], - tz=tz, name='idx2') - - exp2 = DatetimeIndex(['2011-01-01', '2011-01-01', '2011-01-02', - '2011-01-03', '2011-01-05'], - tz=tz, name='idx2') - - idx3 = DatetimeIndex([pd.NaT, '2011-01-03', '2011-01-05', - '2011-01-02', pd.NaT], tz=tz, name='idx3') - exp3 = DatetimeIndex([pd.NaT, pd.NaT, '2011-01-02', '2011-01-03', - '2011-01-05'], tz=tz, name='idx3') - - for idx, expected in [(idx1, exp1), (idx2, exp2), (idx3, exp3)]: - ordered = idx.sort_values() - tm.assert_index_equal(ordered, expected) - assert ordered.freq is None - - ordered = idx.sort_values(ascending=False) - tm.assert_index_equal(ordered, expected[::-1]) - assert ordered.freq is None - - ordered, indexer = idx.sort_values(return_indexer=True) - tm.assert_index_equal(ordered, expected) - - exp = np.array([0, 4, 3, 1, 2]) - tm.assert_numpy_array_equal(indexer, exp, check_dtype=False) - assert ordered.freq is None - - ordered, indexer = idx.sort_values(return_indexer=True, - ascending=False) - tm.assert_index_equal(ordered, expected[::-1]) - - exp = np.array([2, 1, 3, 4, 0]) - tm.assert_numpy_array_equal(indexer, exp, check_dtype=False) - assert ordered.freq is None + index = DatetimeIndex(index_dates, tz=tz, name='idx') + expected = DatetimeIndex(expected_dates, tz=tz, name='idx') + + ordered = index.sort_values() + tm.assert_index_equal(ordered, expected) + assert ordered.freq is None + + ordered = index.sort_values(ascending=False) + tm.assert_index_equal(ordered, expected[::-1]) + assert ordered.freq is None + + ordered, indexer = index.sort_values(return_indexer=True) + tm.assert_index_equal(ordered, expected) + + exp = np.array([0, 4, 3, 1, 2]) + tm.assert_numpy_array_equal(indexer, exp, check_dtype=False) + assert ordered.freq is None + + ordered, indexer = index.sort_values(return_indexer=True, + ascending=False) + tm.assert_index_equal(ordered, expected[::-1]) + + exp = np.array([2, 1, 3, 4, 0]) + tm.assert_numpy_array_equal(indexer, exp, check_dtype=False) + assert ordered.freq is None def test_drop_duplicates_metadata(self): # GH 10115 @@ -345,12 +354,12 @@ def test_nat_new(self): exp = np.array([tslib.iNaT] * 5, dtype=np.int64) tm.assert_numpy_array_equal(result, exp) - @pytest.mark.parametrize('tz', [None, 'US/Eastern', 'UTC']) - def test_nat(self, tz): + def test_nat(self, tz_naive_fixture): + timezone = tz_naive_fixture assert pd.DatetimeIndex._na_value is pd.NaT assert pd.DatetimeIndex([])._na_value is pd.NaT - idx = pd.DatetimeIndex(['2011-01-01', '2011-01-02'], tz=tz) + idx = pd.DatetimeIndex(['2011-01-01', '2011-01-02'], tz=timezone) assert idx._can_hold_na tm.assert_numpy_array_equal(idx._isnan, np.array([False, False])) @@ -358,7 +367,7 @@ def test_nat(self, tz): tm.assert_numpy_array_equal(idx._nan_idxs, np.array([], dtype=np.intp)) - idx = pd.DatetimeIndex(['2011-01-01', 'NaT'], tz=tz) + idx = pd.DatetimeIndex(['2011-01-01', 'NaT'], tz=timezone) assert idx._can_hold_na tm.assert_numpy_array_equal(idx._isnan, np.array([False, True])) @@ -366,8 +375,7 @@ def test_nat(self, tz): tm.assert_numpy_array_equal(idx._nan_idxs, np.array([1], dtype=np.intp)) - @pytest.mark.parametrize('tz', [None, 'UTC', 'US/Eastern', 'Asia/Tokyo']) - def test_equals(self, tz): + def test_equals(self): # GH 13107 idx = pd.DatetimeIndex(['2011-01-01', '2011-01-02', 'NaT']) assert idx.equals(idx) diff --git a/pandas/tests/indexes/datetimes/test_timezones.py b/pandas/tests/indexes/datetimes/test_timezones.py index a8191816238b1..09210d8b64d1b 100644 --- a/pandas/tests/indexes/datetimes/test_timezones.py +++ b/pandas/tests/indexes/datetimes/test_timezones.py @@ -241,9 +241,8 @@ def test_dti_tz_convert_dst(self): idx = idx.tz_convert('UTC') tm.assert_index_equal(idx.hour, Index([4, 4])) - @pytest.mark.parametrize('tz', ['UTC', 'Asia/Tokyo', 'US/Eastern', - 'dateutil/US/Pacific']) - def test_tz_convert_roundtrip(self, tz): + def test_tz_convert_roundtrip(self, tz_aware_fixture): + tz = tz_aware_fixture idx1 = date_range(start='2014-01-01', end='2014-12-31', freq='M', tz='UTC') exp1 = date_range(start='2014-01-01', end='2014-12-31', freq='M') @@ -431,9 +430,9 @@ def test_dti_tz_localize_utc_conversion(self, tz): with pytest.raises(pytz.NonExistentTimeError): rng.tz_localize(tz) - @pytest.mark.parametrize('tz', ['UTC', 'Asia/Tokyo', 'US/Eastern', - 'dateutil/US/Pacific']) - def test_dti_tz_localize_roundtrip(self, tz): + def test_dti_tz_localize_roundtrip(self, tz_aware_fixture): + tz = tz_aware_fixture + idx1 = date_range(start='2014-01-01', end='2014-12-31', freq='M') idx2 = date_range(start='2014-01-01', end='2014-12-31', freq='D') idx3 = date_range(start='2014-01-01', end='2014-03-01', freq='H') @@ -443,7 +442,6 @@ def test_dti_tz_localize_roundtrip(self, tz): expected = date_range(start=idx[0], end=idx[-1], freq=idx.freq, tz=tz) tm.assert_index_equal(localized, expected) - with pytest.raises(TypeError): localized.tz_localize(tz) diff --git a/pandas/tests/indexes/test_base.py b/pandas/tests/indexes/test_base.py index ff9c86fbfe384..7e19de4cca292 100644 --- a/pandas/tests/indexes/test_base.py +++ b/pandas/tests/indexes/test_base.py @@ -376,28 +376,27 @@ def test_constructor_dtypes(self): assert isinstance(idx, Index) assert idx.dtype == object - def test_constructor_dtypes_datetime(self): - - for tz in [None, 'UTC', 'US/Eastern', 'Asia/Tokyo']: - idx = pd.date_range('2011-01-01', periods=5, tz=tz) - dtype = idx.dtype - - # pass values without timezone, as DatetimeIndex localizes it - for values in [pd.date_range('2011-01-01', periods=5).values, - pd.date_range('2011-01-01', periods=5).asi8]: - - for res in [pd.Index(values, tz=tz), - pd.Index(values, dtype=dtype), - pd.Index(list(values), tz=tz), - pd.Index(list(values), dtype=dtype)]: - tm.assert_index_equal(res, idx) - - # check compat with DatetimeIndex - for res in [pd.DatetimeIndex(values, tz=tz), - pd.DatetimeIndex(values, dtype=dtype), - pd.DatetimeIndex(list(values), tz=tz), - pd.DatetimeIndex(list(values), dtype=dtype)]: - tm.assert_index_equal(res, idx) + def test_constructor_dtypes_datetime(self, tz_naive_fixture): + tz = tz_naive_fixture + idx = pd.date_range('2011-01-01', periods=5, tz=tz) + dtype = idx.dtype + + # pass values without timezone, as DatetimeIndex localizes it + for values in [pd.date_range('2011-01-01', periods=5).values, + pd.date_range('2011-01-01', periods=5).asi8]: + + for res in [pd.Index(values, tz=tz), + pd.Index(values, dtype=dtype), + pd.Index(list(values), tz=tz), + pd.Index(list(values), dtype=dtype)]: + tm.assert_index_equal(res, idx) + + # check compat with DatetimeIndex + for res in [pd.DatetimeIndex(values, tz=tz), + pd.DatetimeIndex(values, dtype=dtype), + pd.DatetimeIndex(list(values), tz=tz), + pd.DatetimeIndex(list(values), dtype=dtype)]: + tm.assert_index_equal(res, idx) def test_constructor_dtypes_timedelta(self): diff --git a/pandas/tests/reshape/test_concat.py b/pandas/tests/reshape/test_concat.py index 437b4179c580a..ffd37dc4b2f59 100644 --- a/pandas/tests/reshape/test_concat.py +++ b/pandas/tests/reshape/test_concat.py @@ -293,88 +293,88 @@ def test_concatlike_common_coerce_to_pandas_object(self): assert isinstance(res.iloc[0], pd.Timestamp) assert isinstance(res.iloc[-1], pd.Timedelta) - def test_concatlike_datetimetz(self): + def test_concatlike_datetimetz(self, tz_aware_fixture): + tz = tz_aware_fixture # GH 7795 - for tz in ['UTC', 'US/Eastern', 'Asia/Tokyo']: - dti1 = pd.DatetimeIndex(['2011-01-01', '2011-01-02'], tz=tz) - dti2 = pd.DatetimeIndex(['2012-01-01', '2012-01-02'], tz=tz) + dti1 = pd.DatetimeIndex(['2011-01-01', '2011-01-02'], tz=tz) + dti2 = pd.DatetimeIndex(['2012-01-01', '2012-01-02'], tz=tz) - exp = pd.DatetimeIndex(['2011-01-01', '2011-01-02', - '2012-01-01', '2012-01-02'], tz=tz) + exp = pd.DatetimeIndex(['2011-01-01', '2011-01-02', + '2012-01-01', '2012-01-02'], tz=tz) - res = dti1.append(dti2) - tm.assert_index_equal(res, exp) + res = dti1.append(dti2) + tm.assert_index_equal(res, exp) - dts1 = pd.Series(dti1) - dts2 = pd.Series(dti2) - res = dts1.append(dts2) - tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) + dts1 = pd.Series(dti1) + dts2 = pd.Series(dti2) + res = dts1.append(dts2) + tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) - res = pd.concat([dts1, dts2]) - tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) + res = pd.concat([dts1, dts2]) + tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) - def test_concatlike_datetimetz_short(self): + @pytest.mark.parametrize('tz', + ['UTC', 'US/Eastern', 'Asia/Tokyo', 'EST5EDT']) + def test_concatlike_datetimetz_short(self, tz): # GH 7795 - for tz in ['UTC', 'US/Eastern', 'Asia/Tokyo', 'EST5EDT']: - - ix1 = pd.DatetimeIndex(start='2014-07-15', end='2014-07-17', - freq='D', tz=tz) - ix2 = pd.DatetimeIndex(['2014-07-11', '2014-07-21'], tz=tz) - df1 = pd.DataFrame(0, index=ix1, columns=['A', 'B']) - df2 = pd.DataFrame(0, index=ix2, columns=['A', 'B']) - - exp_idx = pd.DatetimeIndex(['2014-07-15', '2014-07-16', - '2014-07-17', '2014-07-11', - '2014-07-21'], tz=tz) - exp = pd.DataFrame(0, index=exp_idx, columns=['A', 'B']) - - tm.assert_frame_equal(df1.append(df2), exp) - tm.assert_frame_equal(pd.concat([df1, df2]), exp) - - def test_concatlike_datetimetz_to_object(self): + ix1 = pd.DatetimeIndex(start='2014-07-15', end='2014-07-17', + freq='D', tz=tz) + ix2 = pd.DatetimeIndex(['2014-07-11', '2014-07-21'], tz=tz) + df1 = pd.DataFrame(0, index=ix1, columns=['A', 'B']) + df2 = pd.DataFrame(0, index=ix2, columns=['A', 'B']) + + exp_idx = pd.DatetimeIndex(['2014-07-15', '2014-07-16', + '2014-07-17', '2014-07-11', + '2014-07-21'], tz=tz) + exp = pd.DataFrame(0, index=exp_idx, columns=['A', 'B']) + + tm.assert_frame_equal(df1.append(df2), exp) + tm.assert_frame_equal(pd.concat([df1, df2]), exp) + + def test_concatlike_datetimetz_to_object(self, tz_aware_fixture): + tz = tz_aware_fixture # GH 13660 # different tz coerces to object - for tz in ['UTC', 'US/Eastern', 'Asia/Tokyo']: - dti1 = pd.DatetimeIndex(['2011-01-01', '2011-01-02'], tz=tz) - dti2 = pd.DatetimeIndex(['2012-01-01', '2012-01-02']) + dti1 = pd.DatetimeIndex(['2011-01-01', '2011-01-02'], tz=tz) + dti2 = pd.DatetimeIndex(['2012-01-01', '2012-01-02']) - exp = pd.Index([pd.Timestamp('2011-01-01', tz=tz), - pd.Timestamp('2011-01-02', tz=tz), - pd.Timestamp('2012-01-01'), - pd.Timestamp('2012-01-02')], dtype=object) + exp = pd.Index([pd.Timestamp('2011-01-01', tz=tz), + pd.Timestamp('2011-01-02', tz=tz), + pd.Timestamp('2012-01-01'), + pd.Timestamp('2012-01-02')], dtype=object) - res = dti1.append(dti2) - tm.assert_index_equal(res, exp) + res = dti1.append(dti2) + tm.assert_index_equal(res, exp) - dts1 = pd.Series(dti1) - dts2 = pd.Series(dti2) - res = dts1.append(dts2) - tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) + dts1 = pd.Series(dti1) + dts2 = pd.Series(dti2) + res = dts1.append(dts2) + tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) - res = pd.concat([dts1, dts2]) - tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) + res = pd.concat([dts1, dts2]) + tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) - # different tz - dti3 = pd.DatetimeIndex(['2012-01-01', '2012-01-02'], - tz='US/Pacific') + # different tz + dti3 = pd.DatetimeIndex(['2012-01-01', '2012-01-02'], + tz='US/Pacific') - exp = pd.Index([pd.Timestamp('2011-01-01', tz=tz), - pd.Timestamp('2011-01-02', tz=tz), - pd.Timestamp('2012-01-01', tz='US/Pacific'), - pd.Timestamp('2012-01-02', tz='US/Pacific')], - dtype=object) + exp = pd.Index([pd.Timestamp('2011-01-01', tz=tz), + pd.Timestamp('2011-01-02', tz=tz), + pd.Timestamp('2012-01-01', tz='US/Pacific'), + pd.Timestamp('2012-01-02', tz='US/Pacific')], + dtype=object) - res = dti1.append(dti3) - # tm.assert_index_equal(res, exp) + res = dti1.append(dti3) + # tm.assert_index_equal(res, exp) - dts1 = pd.Series(dti1) - dts3 = pd.Series(dti3) - res = dts1.append(dts3) - tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) + dts1 = pd.Series(dti1) + dts3 = pd.Series(dti3) + res = dts1.append(dts3) + tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) - res = pd.concat([dts1, dts3]) - tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) + res = pd.concat([dts1, dts3]) + tm.assert_series_equal(res, pd.Series(exp, index=[0, 1, 0, 1])) def test_concatlike_common_period(self): # GH 13660 diff --git a/pandas/tests/scalar/timestamp/test_timestamp.py b/pandas/tests/scalar/timestamp/test_timestamp.py index cde5baf47c18e..fb989b2f19307 100644 --- a/pandas/tests/scalar/timestamp/test_timestamp.py +++ b/pandas/tests/scalar/timestamp/test_timestamp.py @@ -124,8 +124,8 @@ def test_names(self, data, time_locale): assert np.isnan(nan_ts.day_name(time_locale)) assert np.isnan(nan_ts.month_name(time_locale)) - @pytest.mark.parametrize('tz', [None, 'UTC', 'US/Eastern', 'Asia/Tokyo']) - def test_is_leap_year(self, tz): + def test_is_leap_year(self, tz_naive_fixture): + tz = tz_naive_fixture # GH 13727 dt = Timestamp('2000-01-01 00:00:00', tz=tz) assert dt.is_leap_year diff --git a/pandas/tests/scalar/timestamp/test_timezones.py b/pandas/tests/scalar/timestamp/test_timezones.py index f43651dc6f0db..cd0379e7af1a3 100644 --- a/pandas/tests/scalar/timestamp/test_timezones.py +++ b/pandas/tests/scalar/timestamp/test_timezones.py @@ -94,11 +94,10 @@ def test_tz_localize_errors_ambiguous(self): with pytest.raises(AmbiguousTimeError): ts.tz_localize('US/Pacific', errors='coerce') - @pytest.mark.parametrize('tz', ['UTC', 'Asia/Tokyo', - 'US/Eastern', 'dateutil/US/Pacific']) @pytest.mark.parametrize('stamp', ['2014-02-01 09:00', '2014-07-08 09:00', '2014-11-01 17:00', '2014-11-05 00:00']) - def test_tz_localize_roundtrip(self, stamp, tz): + def test_tz_localize_roundtrip(self, stamp, tz_aware_fixture): + tz = tz_aware_fixture ts = Timestamp(stamp) localized = ts.tz_localize(tz) assert localized == Timestamp(stamp, tz=tz) @@ -162,11 +161,11 @@ def test_timestamp_tz_localize(self, tz): # ------------------------------------------------------------------ # Timestamp.tz_convert - @pytest.mark.parametrize('tz', ['UTC', 'Asia/Tokyo', - 'US/Eastern', 'dateutil/US/Pacific']) @pytest.mark.parametrize('stamp', ['2014-02-01 09:00', '2014-07-08 09:00', '2014-11-01 17:00', '2014-11-05 00:00']) - def test_tz_convert_roundtrip(self, stamp, tz): + def test_tz_convert_roundtrip(self, stamp, tz_aware_fixture): + tz = tz_aware_fixture + ts = Timestamp(stamp, tz='UTC') converted = ts.tz_convert(tz) diff --git a/pandas/tests/scalar/timestamp/test_unary_ops.py b/pandas/tests/scalar/timestamp/test_unary_ops.py index 994ff86e6fdf9..aecddab8477fc 100644 --- a/pandas/tests/scalar/timestamp/test_unary_ops.py +++ b/pandas/tests/scalar/timestamp/test_unary_ops.py @@ -132,7 +132,6 @@ def test_floor(self): # -------------------------------------------------------------- # Timestamp.replace - timezones = ['UTC', 'Asia/Tokyo', 'US/Eastern', 'dateutil/US/Pacific'] def test_replace_naive(self): # GH#14621, GH#7825 @@ -141,8 +140,8 @@ def test_replace_naive(self): expected = Timestamp('2016-01-01 00:00:00') assert result == expected - @pytest.mark.parametrize('tz', timezones) - def test_replace_aware(self, tz): + def test_replace_aware(self, tz_aware_fixture): + tz = tz_aware_fixture # GH#14621, GH#7825 # replacing datetime components with and w/o presence of a timezone ts = Timestamp('2016-01-01 09:00:00', tz=tz) @@ -150,16 +149,16 @@ def test_replace_aware(self, tz): expected = Timestamp('2016-01-01 00:00:00', tz=tz) assert result == expected - @pytest.mark.parametrize('tz', timezones) - def test_replace_preserves_nanos(self, tz): + def test_replace_preserves_nanos(self, tz_aware_fixture): + tz = tz_aware_fixture # GH#14621, GH#7825 ts = Timestamp('2016-01-01 09:00:00.000000123', tz=tz) result = ts.replace(hour=0) expected = Timestamp('2016-01-01 00:00:00.000000123', tz=tz) assert result == expected - @pytest.mark.parametrize('tz', timezones) - def test_replace_multiple(self, tz): + def test_replace_multiple(self, tz_aware_fixture): + tz = tz_aware_fixture # GH#14621, GH#7825 # replacing datetime components with and w/o presence of a timezone # test all @@ -169,15 +168,15 @@ def test_replace_multiple(self, tz): expected = Timestamp('2015-02-02 00:05:05.000005005', tz=tz) assert result == expected - @pytest.mark.parametrize('tz', timezones) - def test_replace_invalid_kwarg(self, tz): + def test_replace_invalid_kwarg(self, tz_aware_fixture): + tz = tz_aware_fixture # GH#14621, GH#7825 ts = Timestamp('2016-01-01 09:00:00.000000123', tz=tz) with pytest.raises(TypeError): ts.replace(foo=5) - @pytest.mark.parametrize('tz', timezones) - def test_replace_integer_args(self, tz): + def test_replace_integer_args(self, tz_aware_fixture): + tz = tz_aware_fixture # GH#14621, GH#7825 ts = Timestamp('2016-01-01 09:00:00.000000123', tz=tz) with pytest.raises(ValueError): diff --git a/pandas/util/_test_decorators.py b/pandas/util/_test_decorators.py index 8ad73538fbec1..6c23b186ef1c4 100644 --- a/pandas/util/_test_decorators.py +++ b/pandas/util/_test_decorators.py @@ -187,3 +187,28 @@ def decorated_func(func): "installed->{installed}".format( enabled=_USE_NUMEXPR, installed=_NUMEXPR_INSTALLED)) + + +def parametrize_fixture_doc(*args): + """ + Intended for use as a decorator for parametrized fixture, + this function will wrap the decorated function with a pytest + ``parametrize_fixture_doc`` mark. That mark will format + initial fixture docstring by replacing placeholders {0}, {1} etc + with parameters passed as arguments. + + Parameters: + ---------- + args: iterable + Positional arguments for docstring. + + Returns: + ------- + documented_fixture: function + The decorated function wrapped within a pytest + ``parametrize_fixture_doc`` mark + """ + def documented_fixture(fixture): + fixture.__doc__ = fixture.__doc__.format(*args) + return fixture + return documented_fixture