diff --git a/pandas/tests/test_groupby.py b/pandas/tests/test_groupby.py index 89ef4decfda39..d301016aa1316 100644 --- a/pandas/tests/test_groupby.py +++ b/pandas/tests/test_groupby.py @@ -3993,11 +3993,13 @@ def test_groupby_groups_datetimeindex_tz(self): df['datetime'] = df['datetime'].apply( lambda d: Timestamp(d, tz='US/Pacific')) - exp_idx1 = pd.DatetimeIndex( - ['2011-07-19 07:00:00', '2011-07-19 07:00:00', - '2011-07-19 08:00:00', '2011-07-19 08:00:00', - '2011-07-19 09:00:00', '2011-07-19 09:00:00'], - tz='US/Pacific', name='datetime') + exp_idx1 = pd.DatetimeIndex(['2011-07-19 07:00:00', + '2011-07-19 07:00:00', + '2011-07-19 08:00:00', + '2011-07-19 08:00:00', + '2011-07-19 09:00:00', + '2011-07-19 09:00:00'], + tz='US/Pacific', name='datetime') exp_idx2 = Index(['a', 'b'] * 3, name='label') exp_idx = MultiIndex.from_arrays([exp_idx1, exp_idx2]) expected = DataFrame({'value1': [0, 3, 1, 4, 2, 5], @@ -4013,9 +4015,9 @@ def test_groupby_groups_datetimeindex_tz(self): 'value2': [1, 2, 3, 1, 2, 3]}, index=didx) - exp_idx = pd.DatetimeIndex( - ['2011-07-19 07:00:00', '2011-07-19 08:00:00', - '2011-07-19 09:00:00'], tz='Asia/Tokyo') + exp_idx = pd.DatetimeIndex(['2011-07-19 07:00:00', + '2011-07-19 08:00:00', + '2011-07-19 09:00:00'], tz='Asia/Tokyo') expected = DataFrame({'value1': [3, 5, 7], 'value2': [2, 4, 6]}, index=exp_idx, columns=['value1', 'value2']) @@ -4032,8 +4034,8 @@ def test_groupby_multi_timezone(self): 3,2000-01-31 16:50:00,America/Chicago 4,2000-01-01 16:50:00,America/New_York""" - df = pd.read_csv( - StringIO(data), header=None, names=['value', 'date', 'tz']) + df = pd.read_csv(StringIO(data), header=None, + names=['value', 'date', 'tz']) result = df.groupby('tz').date.apply( lambda x: pd.to_datetime(x).dt.tz_localize(x.name)) @@ -4051,14 +4053,54 @@ def test_groupby_multi_timezone(self): assert_series_equal(result, expected) tz = 'America/Chicago' - result = pd.to_datetime(df.groupby('tz').date.get_group( - tz)).dt.tz_localize(tz) - expected = pd.to_datetime(Series( - ['2000-01-28 16:47:00', '2000-01-29 16:48:00', - '2000-01-31 16:50:00'], index=[0, 1, 3 - ], name='date')).dt.tz_localize(tz) + res_values = df.groupby('tz').date.get_group(tz) + result = pd.to_datetime(res_values).dt.tz_localize(tz) + exp_values = Series(['2000-01-28 16:47:00', '2000-01-29 16:48:00', + '2000-01-31 16:50:00'], + index=[0, 1, 3], name='date') + expected = pd.to_datetime(exp_values).dt.tz_localize(tz) assert_series_equal(result, expected) + def test_groupby_groups_periods(self): + dates = ['2011-07-19 07:00:00', '2011-07-19 08:00:00', + '2011-07-19 09:00:00', '2011-07-19 07:00:00', + '2011-07-19 08:00:00', '2011-07-19 09:00:00'] + df = DataFrame({'label': ['a', 'a', 'a', 'b', 'b', 'b'], + 'period': [pd.Period(d, freq='H') for d in dates], + 'value1': np.arange(6, dtype='int64'), + 'value2': [1, 2] * 3}) + + exp_idx1 = pd.PeriodIndex(['2011-07-19 07:00:00', + '2011-07-19 07:00:00', + '2011-07-19 08:00:00', + '2011-07-19 08:00:00', + '2011-07-19 09:00:00', + '2011-07-19 09:00:00'], + freq='H', name='period') + exp_idx2 = Index(['a', 'b'] * 3, name='label') + exp_idx = MultiIndex.from_arrays([exp_idx1, exp_idx2]) + expected = DataFrame({'value1': [0, 3, 1, 4, 2, 5], + 'value2': [1, 2, 2, 1, 1, 2]}, + index=exp_idx, columns=['value1', 'value2']) + + result = df.groupby(['period', 'label']).sum() + assert_frame_equal(result, expected) + + # by level + didx = pd.PeriodIndex(dates, freq='H') + df = DataFrame({'value1': np.arange(6, dtype='int64'), + 'value2': [1, 2, 3, 1, 2, 3]}, + index=didx) + + exp_idx = pd.PeriodIndex(['2011-07-19 07:00:00', + '2011-07-19 08:00:00', + '2011-07-19 09:00:00'], freq='H') + expected = DataFrame({'value1': [3, 5, 7], 'value2': [2, 4, 6]}, + index=exp_idx, columns=['value1', 'value2']) + + result = df.groupby(level=0).sum() + assert_frame_equal(result, expected) + def test_groupby_reindex_inside_function(self): from pandas.tseries.api import DatetimeIndex diff --git a/pandas/tools/tests/test_merge.py b/pandas/tools/tests/test_merge.py index 47eea60b2b496..d5ddfe624e240 100644 --- a/pandas/tools/tests/test_merge.py +++ b/pandas/tools/tests/test_merge.py @@ -1031,6 +1031,36 @@ def test_merge_on_datetime64tz(self): result = pd.merge(left, right, on='key', how='outer') assert_frame_equal(result, expected) + def test_merge_on_periods(self): + left = pd.DataFrame({'key': pd.period_range('20151010', periods=2, + freq='D'), + 'value': [1, 2]}) + right = pd.DataFrame({'key': pd.period_range('20151011', periods=3, + freq='D'), + 'value': [1, 2, 3]}) + + expected = DataFrame({'key': pd.period_range('20151010', periods=4, + freq='D'), + 'value_x': [1, 2, np.nan, np.nan], + 'value_y': [np.nan, 1, 2, 3]}) + result = pd.merge(left, right, on='key', how='outer') + assert_frame_equal(result, expected) + + left = pd.DataFrame({'value': pd.period_range('20151010', periods=2, + freq='D'), + 'key': [1, 2]}) + right = pd.DataFrame({'value': pd.period_range('20151011', periods=2, + freq='D'), + 'key': [2, 3]}) + + exp_x = pd.period_range('20151010', periods=2, freq='D') + exp_y = pd.period_range('20151011', periods=2, freq='D') + expected = DataFrame({'value_x': list(exp_x) + [pd.NaT], + 'value_y': [pd.NaT] + list(exp_y), + 'key': [1., 2, 3]}) + result = pd.merge(left, right, on='key', how='outer') + assert_frame_equal(result, expected) + def test_concat_NaT_series(self): # GH 11693 # test for merging NaT series with datetime series. @@ -1131,6 +1161,39 @@ def test_concat_tz_series(self): result = pd.concat([first, second]) self.assertEqual(result[0].dtype, 'datetime64[ns, Europe/London]') + def test_concat_period_series(self): + x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) + y = Series(pd.PeriodIndex(['2015-10-01', '2016-01-01'], freq='D')) + expected = Series([x[0], x[1], y[0], y[1]], dtype='object') + result = concat([x, y], ignore_index=True) + tm.assert_series_equal(result, expected) + + # different freq + x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) + y = Series(pd.PeriodIndex(['2015-10-01', '2016-01-01'], freq='M')) + expected = Series([x[0], x[1], y[0], y[1]], dtype='object') + result = concat([x, y], ignore_index=True) + tm.assert_series_equal(result, expected) + + x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) + y = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='M')) + expected = Series([x[0], x[1], y[0], y[1]], dtype='object') + result = concat([x, y], ignore_index=True) + tm.assert_series_equal(result, expected) + + # non-period + x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) + y = Series(pd.DatetimeIndex(['2015-11-01', '2015-12-01'])) + expected = Series([x[0], x[1], y[0], y[1]], dtype='object') + result = concat([x, y], ignore_index=True) + tm.assert_series_equal(result, expected) + + x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) + y = Series(['A', 'B']) + expected = Series([x[0], x[1], y[0], y[1]], dtype='object') + result = concat([x, y], ignore_index=True) + tm.assert_series_equal(result, expected) + def test_indicator(self): # PR #10054. xref #7412 and closes #8790. df1 = DataFrame({'col1': [0, 1], 'col_left': [ diff --git a/pandas/tools/tests/test_pivot.py b/pandas/tools/tests/test_pivot.py index 845f50aa65d70..994269d36cd85 100644 --- a/pandas/tools/tests/test_pivot.py +++ b/pandas/tools/tests/test_pivot.py @@ -240,6 +240,39 @@ def test_pivot_with_tz(self): pv = df.pivot(index='dt1', columns='dt2', values='data1') tm.assert_frame_equal(pv, expected) + def test_pivot_periods(self): + df = DataFrame({'p1': [pd.Period('2013-01-01', 'D'), + pd.Period('2013-01-02', 'D'), + pd.Period('2013-01-01', 'D'), + pd.Period('2013-01-02', 'D')], + 'p2': [pd.Period('2013-01', 'M'), + pd.Period('2013-01', 'M'), + pd.Period('2013-02', 'M'), + pd.Period('2013-02', 'M')], + 'data1': np.arange(4, dtype='int64'), + 'data2': np.arange(4, dtype='int64')}) + + exp_col1 = Index(['data1', 'data1', 'data2', 'data2']) + exp_col2 = pd.PeriodIndex(['2013-01', '2013-02'] * 2, + name='p2', freq='M') + exp_col = pd.MultiIndex.from_arrays([exp_col1, exp_col2]) + expected = DataFrame([[0, 2, 0, 2], [1, 3, 1, 3]], + index=pd.PeriodIndex(['2013-01-01', '2013-01-02'], + name='p1', freq='D'), + columns=exp_col) + + pv = df.pivot(index='p1', columns='p2') + tm.assert_frame_equal(pv, expected) + + expected = DataFrame([[0, 2], [1, 3]], + index=pd.PeriodIndex(['2013-01-01', '2013-01-02'], + name='p1', freq='D'), + columns=pd.PeriodIndex(['2013-01', '2013-02'], + name='p2', freq='M')) + + pv = df.pivot(index='p1', columns='p2', values='data1') + tm.assert_frame_equal(pv, expected) + def test_margins(self): def _check_output(result, values_col, index=['A', 'B'], columns=['C'], diff --git a/pandas/tseries/tests/test_period.py b/pandas/tseries/tests/test_period.py index ee6f1a8b0e2db..95d84bba4b5db 100644 --- a/pandas/tseries/tests/test_period.py +++ b/pandas/tseries/tests/test_period.py @@ -2877,6 +2877,17 @@ def test_union(self): index3 = period_range('1/1/2000', '1/20/2000', freq='2D') self.assertRaises(ValueError, index.join, index3) + def test_union_dataframe_index(self): + rng1 = pd.period_range('1/1/1999', '1/1/2012', freq='M') + s1 = pd.Series(np.random.randn(len(rng1)), rng1) + + rng2 = pd.period_range('1/1/1980', '12/1/2001', freq='M') + s2 = pd.Series(np.random.randn(len(rng2)), rng2) + df = pd.DataFrame({'s1': s1, 's2': s2}) + + exp = pd.period_range('1/1/1980', '1/1/2012', freq='M') + self.assert_index_equal(df.index, exp) + def test_intersection(self): index = period_range('1/1/2000', '1/20/2000', freq='D') @@ -2897,6 +2908,63 @@ def test_intersection(self): index3 = period_range('1/1/2000', '1/20/2000', freq='2D') self.assertRaises(ValueError, index.intersection, index3) + def test_intersection_cases(self): + base = period_range('6/1/2000', '6/30/2000', freq='D', name='idx') + + # if target has the same name, it is preserved + rng2 = period_range('5/15/2000', '6/20/2000', freq='D', name='idx') + expected2 = period_range('6/1/2000', '6/20/2000', freq='D', + name='idx') + + # if target name is different, it will be reset + rng3 = period_range('5/15/2000', '6/20/2000', freq='D', name='other') + expected3 = period_range('6/1/2000', '6/20/2000', freq='D', + name=None) + + rng4 = period_range('7/1/2000', '7/31/2000', freq='D', name='idx') + expected4 = PeriodIndex([], name='idx', freq='D') + + for (rng, expected) in [(rng2, expected2), (rng3, expected3), + (rng4, expected4)]: + result = base.intersection(rng) + self.assertTrue(result.equals(expected)) + self.assertEqual(result.name, expected.name) + self.assertEqual(result.freq, expected.freq) + + # non-monotonic + base = PeriodIndex(['2011-01-05', '2011-01-04', '2011-01-02', + '2011-01-03'], freq='D', name='idx') + + rng2 = PeriodIndex(['2011-01-04', '2011-01-02', + '2011-02-02', '2011-02-03'], + freq='D', name='idx') + expected2 = PeriodIndex(['2011-01-04', '2011-01-02'], freq='D', + name='idx') + + rng3 = PeriodIndex(['2011-01-04', '2011-01-02', '2011-02-02', + '2011-02-03'], + freq='D', name='other') + expected3 = PeriodIndex(['2011-01-04', '2011-01-02'], freq='D', + name=None) + + rng4 = period_range('7/1/2000', '7/31/2000', freq='D', name='idx') + expected4 = PeriodIndex([], freq='D', name='idx') + + for (rng, expected) in [(rng2, expected2), (rng3, expected3), + (rng4, expected4)]: + result = base.intersection(rng) + self.assertTrue(result.equals(expected)) + self.assertEqual(result.name, expected.name) + self.assertEqual(result.freq, 'D') + + # empty same freq + rng = date_range('6/1/2000', '6/15/2000', freq='T') + result = rng[0:0].intersection(rng) + self.assertEqual(len(result), 0) + + result = rng.intersection(rng[0:0]) + self.assertEqual(len(result), 0) + def test_fields(self): # year, month, day, hour, minute # second, weekofyear, week, dayofweek, weekday, dayofyear, quarter @@ -3734,6 +3802,86 @@ def test_pi_nat_comp(self): idx1 == diff +class TestSeriesPeriod(tm.TestCase): + + def setUp(self): + self.series = Series(period_range('2000-01-01', periods=10, freq='D')) + + def test_auto_conversion(self): + series = Series(list(period_range('2000-01-01', periods=10, freq='D'))) + self.assertEqual(series.dtype, 'object') + + def test_constructor_cant_cast_period(self): + with tm.assertRaises(TypeError): + Series(period_range('2000-01-01', periods=10, freq='D'), + dtype=float) + + def test_series_comparison_scalars(self): + val = pd.Period('2000-01-04', freq='D') + result = self.series > val + expected = np.array([x > val for x in self.series]) + self.assert_numpy_array_equal(result, expected) + + val = self.series[5] + result = self.series > val + expected = np.array([x > val for x in self.series]) + self.assert_numpy_array_equal(result, expected) + + def test_between(self): + left, right = self.series[[2, 7]] + result = self.series.between(left, right) + expected = (self.series >= left) & (self.series <= right) + assert_series_equal(result, expected) + + # --------------------------------------------------------------------- + # NaT support + + """ + # ToDo: Enable when support period dtype + def test_NaT_scalar(self): + series = Series([0, 1000, 2000, iNaT], dtype='period[D]') + + val = series[3] + self.assertTrue(com.isnull(val)) + + series[2] = val + self.assertTrue(com.isnull(series[2])) + + def test_NaT_cast(self): + result = Series([np.nan]).astype('period[D]') + expected = Series([NaT]) + assert_series_equal(result, expected) + """ + + def test_set_none_nan(self): + # currently Period is stored as object dtype, not as NaT + self.series[3] = None + self.assertIs(self.series[3], None) + + self.series[3:5] = None + self.assertIs(self.series[4], None) + + self.series[5] = np.nan + self.assertTrue(np.isnan(self.series[5])) + + self.series[5:7] = np.nan + self.assertTrue(np.isnan(self.series[6])) + + def test_intercept_astype_object(self): + expected = self.series.astype('object') + + df = DataFrame({'a': self.series, + 'b': np.random.randn(len(self.series))}) + + result = df.values.squeeze() + self.assertTrue((result[:, 0] == expected.values).all()) + + df = DataFrame({'a': self.series, 'b': ['foo'] * len(self.series)}) + + result = df.values.squeeze() + self.assertTrue((result[:, 0] == expected.values).all()) + + if __name__ == '__main__': import nose nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'], diff --git a/pandas/tseries/tests/test_timeseries.py b/pandas/tseries/tests/test_timeseries.py index b83c51b6a3ab6..6167edd9499ab 100644 --- a/pandas/tseries/tests/test_timeseries.py +++ b/pandas/tseries/tests/test_timeseries.py @@ -1325,6 +1325,47 @@ def test_date_range_negative_freq(self): self.assert_index_equal(rng, exp) self.assertEqual(rng.freq, '-2M') + def test_date_range_bms_bug(self): + # #1645 + rng = date_range('1/1/2000', periods=10, freq='BMS') + + ex_first = Timestamp('2000-01-03') + self.assertEqual(rng[0], ex_first) + + def test_date_range_businesshour(self): + idx = DatetimeIndex(['2014-07-04 09:00', '2014-07-04 10:00', + '2014-07-04 11:00', + '2014-07-04 12:00', '2014-07-04 13:00', + '2014-07-04 14:00', + '2014-07-04 15:00', '2014-07-04 16:00'], + freq='BH') + rng = date_range('2014-07-04 09:00', '2014-07-04 16:00', freq='BH') + tm.assert_index_equal(idx, rng) + + idx = DatetimeIndex( + ['2014-07-04 16:00', '2014-07-07 09:00'], freq='BH') + rng = date_range('2014-07-04 16:00', '2014-07-07 09:00', freq='BH') + tm.assert_index_equal(idx, rng) + + idx = DatetimeIndex(['2014-07-04 09:00', '2014-07-04 10:00', + '2014-07-04 11:00', + '2014-07-04 12:00', '2014-07-04 13:00', + '2014-07-04 14:00', + '2014-07-04 15:00', '2014-07-04 16:00', + '2014-07-07 09:00', '2014-07-07 10:00', + '2014-07-07 11:00', + '2014-07-07 12:00', '2014-07-07 13:00', + '2014-07-07 14:00', + '2014-07-07 15:00', '2014-07-07 16:00', + '2014-07-08 09:00', '2014-07-08 10:00', + '2014-07-08 11:00', + '2014-07-08 12:00', '2014-07-08 13:00', + '2014-07-08 14:00', + '2014-07-08 15:00', '2014-07-08 16:00'], + freq='BH') + rng = date_range('2014-07-04 09:00', '2014-07-08 16:00', freq='BH') + tm.assert_index_equal(idx, rng) + def test_first_subset(self): ts = _simple_ts('1/1/2000', '1/1/2010', freq='12h') result = ts.first('10d') @@ -2716,6 +2757,26 @@ def test_union_bug_4564(self): exp = DatetimeIndex(sorted(set(list(left)) | set(list(right)))) self.assertTrue(result.equals(exp)) + def test_union_freq_both_none(self): + # GH11086 + expected = bdate_range('20150101', periods=10) + expected.freq = None + + result = expected.union(expected) + tm.assert_index_equal(result, expected) + self.assertIsNone(result.freq) + + def test_union_dataframe_index(self): + rng1 = date_range('1/1/1999', '1/1/2012', freq='MS') + s1 = Series(np.random.randn(len(rng1)), rng1) + + rng2 = date_range('1/1/1980', '12/1/2001', freq='MS') + s2 = Series(np.random.randn(len(rng2)), rng2) + df = DataFrame({'s1': s1, 's2': s2}) + + exp = pd.date_range('1/1/1980', '1/1/2012', freq='MS') + self.assert_index_equal(df.index, exp) + def test_intersection_bug_1708(self): from pandas import DateOffset index_1 = date_range('1/1/2012', periods=4, freq='12H') @@ -2724,14 +2785,80 @@ def test_intersection_bug_1708(self): result = index_1 & index_2 self.assertEqual(len(result), 0) - def test_union_freq_both_none(self): - # GH11086 - expected = bdate_range('20150101', periods=10) - expected.freq = None + def test_intersection(self): + # GH 4690 (with tz) + for tz in [None, 'Asia/Tokyo', 'US/Eastern', 'dateutil/US/Pacific']: + base = date_range('6/1/2000', '6/30/2000', freq='D', name='idx') - result = expected.union(expected) - tm.assert_index_equal(result, expected) - self.assertIsNone(result.freq) + # if target has the same name, it is preserved + rng2 = date_range('5/15/2000', '6/20/2000', freq='D', name='idx') + expected2 = date_range('6/1/2000', '6/20/2000', freq='D', + name='idx') + + # if target name is different, it will be reset + rng3 = date_range('5/15/2000', '6/20/2000', freq='D', name='other') + expected3 = date_range('6/1/2000', '6/20/2000', freq='D', + name=None) + + rng4 = date_range('7/1/2000', '7/31/2000', freq='D', name='idx') + expected4 = DatetimeIndex([], name='idx') + + for (rng, expected) in [(rng2, expected2), (rng3, expected3), + (rng4, expected4)]: + result = base.intersection(rng) + self.assertTrue(result.equals(expected)) + self.assertEqual(result.name, expected.name) + self.assertEqual(result.freq, expected.freq) + self.assertEqual(result.tz, expected.tz) + + # non-monotonic + base = DatetimeIndex(['2011-01-05', '2011-01-04', + '2011-01-02', '2011-01-03'], + tz=tz, name='idx') + + rng2 = DatetimeIndex(['2011-01-04', '2011-01-02', + '2011-02-02', '2011-02-03'], + tz=tz, name='idx') + expected2 = DatetimeIndex( + ['2011-01-04', '2011-01-02'], tz=tz, name='idx') + + rng3 = DatetimeIndex(['2011-01-04', '2011-01-02', + '2011-02-02', '2011-02-03'], + tz=tz, name='other') + expected3 = DatetimeIndex( + ['2011-01-04', '2011-01-02'], tz=tz, name=None) + + # GH 7880 + rng4 = date_range('7/1/2000', '7/31/2000', freq='D', tz=tz, + name='idx') + expected4 = DatetimeIndex([], tz=tz, name='idx') + + for (rng, expected) in [(rng2, expected2), (rng3, expected3), + (rng4, expected4)]: + result = base.intersection(rng) + self.assertTrue(result.equals(expected)) + self.assertEqual(result.name, expected.name) + self.assertIsNone(result.freq) + self.assertEqual(result.tz, expected.tz) + + # empty same freq GH2129 + rng = date_range('6/1/2000', '6/15/2000', freq='T') + result = rng[0:0].intersection(rng) + self.assertEqual(len(result), 0) + + result = rng.intersection(rng[0:0]) + self.assertEqual(len(result), 0) + + def test_string_index_series_name_converted(self): + # #1644 + df = DataFrame(np.random.randn(10, 4), + index=date_range('1/1/2000', periods=10)) + + result = df.ix['1/3/2000'] + self.assertEqual(result.name, df.index[2]) + + result = df.T['1/3/2000'] + self.assertEqual(result.name, df.index[2]) # GH 10699 def test_datetime64_with_DateOffset(self): @@ -3823,131 +3950,6 @@ def test_intercept_astype_object(self): result = df.values.squeeze() self.assertTrue((result[:, 0] == expected.values).all()) - def test_union(self): - rng1 = date_range('1/1/1999', '1/1/2012', freq='MS') - s1 = Series(np.random.randn(len(rng1)), rng1) - - rng2 = date_range('1/1/1980', '12/1/2001', freq='MS') - s2 = Series(np.random.randn(len(rng2)), rng2) - df = DataFrame({'s1': s1, 's2': s2}) - self.assertEqual(df.index.values.dtype, np.dtype('M8[ns]')) - - def test_intersection(self): - # GH 4690 (with tz) - for tz in [None, 'Asia/Tokyo', 'US/Eastern', 'dateutil/US/Pacific']: - base = date_range('6/1/2000', '6/30/2000', freq='D', name='idx') - - # if target has the same name, it is preserved - rng2 = date_range('5/15/2000', '6/20/2000', freq='D', name='idx') - expected2 = date_range('6/1/2000', '6/20/2000', freq='D', - name='idx') - - # if target name is different, it will be reset - rng3 = date_range('5/15/2000', '6/20/2000', freq='D', name='other') - expected3 = date_range('6/1/2000', '6/20/2000', freq='D', - name=None) - - rng4 = date_range('7/1/2000', '7/31/2000', freq='D', name='idx') - expected4 = DatetimeIndex([], name='idx') - - for (rng, expected) in [(rng2, expected2), (rng3, expected3), - (rng4, expected4)]: - result = base.intersection(rng) - self.assertTrue(result.equals(expected)) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.tz, expected.tz) - - # non-monotonic - base = DatetimeIndex(['2011-01-05', '2011-01-04', - '2011-01-02', '2011-01-03'], - tz=tz, name='idx') - - rng2 = DatetimeIndex(['2011-01-04', '2011-01-02', - '2011-02-02', '2011-02-03'], - tz=tz, name='idx') - expected2 = DatetimeIndex( - ['2011-01-04', '2011-01-02'], tz=tz, name='idx') - - rng3 = DatetimeIndex(['2011-01-04', '2011-01-02', - '2011-02-02', '2011-02-03'], - tz=tz, name='other') - expected3 = DatetimeIndex( - ['2011-01-04', '2011-01-02'], tz=tz, name=None) - - # GH 7880 - rng4 = date_range('7/1/2000', '7/31/2000', freq='D', tz=tz, - name='idx') - expected4 = DatetimeIndex([], tz=tz, name='idx') - - for (rng, expected) in [(rng2, expected2), (rng3, expected3), - (rng4, expected4)]: - result = base.intersection(rng) - self.assertTrue(result.equals(expected)) - self.assertEqual(result.name, expected.name) - self.assertIsNone(result.freq) - self.assertEqual(result.tz, expected.tz) - - # empty same freq GH2129 - rng = date_range('6/1/2000', '6/15/2000', freq='T') - result = rng[0:0].intersection(rng) - self.assertEqual(len(result), 0) - - result = rng.intersection(rng[0:0]) - self.assertEqual(len(result), 0) - - def test_date_range_bms_bug(self): - # #1645 - rng = date_range('1/1/2000', periods=10, freq='BMS') - - ex_first = Timestamp('2000-01-03') - self.assertEqual(rng[0], ex_first) - - def test_date_range_businesshour(self): - idx = DatetimeIndex(['2014-07-04 09:00', '2014-07-04 10:00', - '2014-07-04 11:00', - '2014-07-04 12:00', '2014-07-04 13:00', - '2014-07-04 14:00', - '2014-07-04 15:00', '2014-07-04 16:00'], - freq='BH') - rng = date_range('2014-07-04 09:00', '2014-07-04 16:00', freq='BH') - tm.assert_index_equal(idx, rng) - - idx = DatetimeIndex( - ['2014-07-04 16:00', '2014-07-07 09:00'], freq='BH') - rng = date_range('2014-07-04 16:00', '2014-07-07 09:00', freq='BH') - tm.assert_index_equal(idx, rng) - - idx = DatetimeIndex(['2014-07-04 09:00', '2014-07-04 10:00', - '2014-07-04 11:00', - '2014-07-04 12:00', '2014-07-04 13:00', - '2014-07-04 14:00', - '2014-07-04 15:00', '2014-07-04 16:00', - '2014-07-07 09:00', '2014-07-07 10:00', - '2014-07-07 11:00', - '2014-07-07 12:00', '2014-07-07 13:00', - '2014-07-07 14:00', - '2014-07-07 15:00', '2014-07-07 16:00', - '2014-07-08 09:00', '2014-07-08 10:00', - '2014-07-08 11:00', - '2014-07-08 12:00', '2014-07-08 13:00', - '2014-07-08 14:00', - '2014-07-08 15:00', '2014-07-08 16:00'], - freq='BH') - rng = date_range('2014-07-04 09:00', '2014-07-08 16:00', freq='BH') - tm.assert_index_equal(idx, rng) - - def test_string_index_series_name_converted(self): - # #1644 - df = DataFrame(np.random.randn(10, 4), - index=date_range('1/1/2000', periods=10)) - - result = df.ix['1/3/2000'] - self.assertEqual(result.name, df.index[2]) - - result = df.T['1/3/2000'] - self.assertEqual(result.name, df.index[2]) - class TestTimestamp(tm.TestCase): def test_class_ops_pytz(self):