Skip to content

Commit ad2723c

Browse files
committed
Merge remote-tracking branch 'upstream/master' into dtype-only
2 parents 9e4faf8 + d9a037e commit ad2723c

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+591
-465
lines changed

doc/source/whatsnew/v0.24.0.rst

+1
Original file line numberDiff line numberDiff line change
@@ -1076,6 +1076,7 @@ Deprecations
10761076
- Timezone converting a tz-aware ``datetime.datetime`` or :class:`Timestamp` with :class:`Timestamp` and the ``tz`` argument is now deprecated. Instead, use :meth:`Timestamp.tz_convert` (:issue:`23579`)
10771077
- :func:`pandas.types.is_period` is deprecated in favor of `pandas.types.is_period_dtype` (:issue:`23917`)
10781078
- :func:`pandas.types.is_datetimetz` is deprecated in favor of `pandas.types.is_datetime64tz` (:issue:`23917`)
1079+
- Creating a :class:`TimedeltaIndex` or :class:`DatetimeIndex` by passing range arguments `start`, `end`, and `periods` is deprecated in favor of :func:`timedelta_range` and :func:`date_range` (:issue:`23919`)
10791080

10801081
.. _whatsnew_0240.deprecations.datetimelike_int_ops:
10811082

pandas/_libs/tslibs/timestamps.pyx

+4-2
Original file line numberDiff line numberDiff line change
@@ -377,13 +377,15 @@ cdef class _Timestamp(datetime):
377377
neg_other = -other
378378
return self + neg_other
379379

380+
typ = getattr(other, '_typ', None)
381+
380382
# a Timestamp-DatetimeIndex -> yields a negative TimedeltaIndex
381-
elif getattr(other, '_typ', None) == 'datetimeindex':
383+
if typ in ('datetimeindex', 'datetimearray'):
382384
# timezone comparison is performed in DatetimeIndex._sub_datelike
383385
return -other.__sub__(self)
384386

385387
# a Timestamp-TimedeltaIndex -> yields a negative TimedeltaIndex
386-
elif getattr(other, '_typ', None) == 'timedeltaindex':
388+
elif typ in ('timedeltaindex', 'timedeltaarray'):
387389
return (-other).__add__(self)
388390

389391
elif other is NaT:

pandas/core/arrays/timedeltas.py

+36-7
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
import numpy as np
99

10-
from pandas._libs import tslibs
10+
from pandas._libs import algos, tslibs
1111
from pandas._libs.tslibs import NaT, Timedelta, Timestamp, iNaT
1212
from pandas._libs.tslibs.fields import get_timedelta_field
1313
from pandas._libs.tslibs.timedeltas import (
@@ -24,7 +24,7 @@
2424
from pandas.core.dtypes.missing import isna
2525

2626
from pandas.core import ops
27-
from pandas.core.algorithms import checked_add_with_arr
27+
from pandas.core.algorithms import checked_add_with_arr, unique1d
2828
import pandas.core.common as com
2929

3030
from pandas.tseries.frequencies import to_offset
@@ -162,15 +162,29 @@ def _simple_new(cls, values, freq=None, dtype=_TD_DTYPE):
162162
result._freq = freq
163163
return result
164164

165-
def __new__(cls, values, freq=None, dtype=_TD_DTYPE):
165+
def __new__(cls, values, freq=None, dtype=_TD_DTYPE, copy=False):
166166

167167
freq, freq_infer = dtl.maybe_infer_freq(freq)
168168

169-
values = np.array(values, copy=False)
170-
if values.dtype == np.object_:
171-
values = array_to_timedelta64(values)
169+
values, inferred_freq = sequence_to_td64ns(
170+
values, copy=copy, unit=None)
171+
if inferred_freq is not None:
172+
if freq is not None and freq != inferred_freq:
173+
raise ValueError('Inferred frequency {inferred} from passed '
174+
'values does not conform to passed frequency '
175+
'{passed}'
176+
.format(inferred=inferred_freq,
177+
passed=freq.freqstr))
178+
elif freq is None:
179+
freq = inferred_freq
180+
freq_infer = False
172181

173182
result = cls._simple_new(values, freq=freq)
183+
# check that we are matching freqs
184+
if inferred_freq is None and len(result) > 0:
185+
if freq is not None and not freq_infer:
186+
cls._validate_frequency(result, freq)
187+
174188
if freq_infer:
175189
result.freq = to_offset(result.inferred_freq)
176190

@@ -227,6 +241,21 @@ def _validate_fill_value(self, fill_value):
227241
"Got '{got}'.".format(got=fill_value))
228242
return fill_value
229243

244+
# monotonicity/uniqueness properties are called via frequencies.infer_freq,
245+
# see GH#23789
246+
247+
@property
248+
def _is_monotonic_increasing(self):
249+
return algos.is_monotonic(self.asi8, timelike=True)[0]
250+
251+
@property
252+
def _is_monotonic_decreasing(self):
253+
return algos.is_monotonic(self.asi8, timelike=True)[1]
254+
255+
@property
256+
def _is_unique(self):
257+
return len(unique1d(self.asi8)) == len(self)
258+
230259
# ----------------------------------------------------------------
231260
# Arithmetic Methods
232261

@@ -283,7 +312,7 @@ def _add_datetimelike_scalar(self, other):
283312
result = checked_add_with_arr(i8, other.value,
284313
arr_mask=self._isnan)
285314
result = self._maybe_mask_results(result)
286-
return DatetimeArrayMixin(result, tz=other.tz)
315+
return DatetimeArrayMixin(result, tz=other.tz, freq=self.freq)
287316

288317
def _addsub_offset_array(self, other, op):
289318
# Add or subtract Array-like of DateOffset objects

pandas/core/groupby/ops.py

-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
"""
88

99
import collections
10-
import copy
1110

1211
import numpy as np
1312

pandas/core/indexes/datetimes.py

+40-14
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,13 @@ def _new_DatetimeIndex(cls, d):
5050
# so need to localize
5151
tz = d.pop('tz', None)
5252

53-
result = cls.__new__(cls, verify_integrity=False, **d)
53+
with warnings.catch_warnings():
54+
# we ignore warnings from passing verify_integrity=False
55+
# TODO: If we knew what was going in to **d, we might be able to
56+
# go through _simple_new instead
57+
warnings.simplefilter("ignore")
58+
result = cls.__new__(cls, verify_integrity=False, **d)
59+
5460
if tz is not None:
5561
result = result.tz_localize('UTC').tz_convert(tz)
5662
return result
@@ -220,10 +226,20 @@ def __new__(cls, data=None,
220226
freq=None, start=None, end=None, periods=None, tz=None,
221227
normalize=False, closed=None, ambiguous='raise',
222228
dayfirst=False, yearfirst=False, dtype=None,
223-
copy=False, name=None, verify_integrity=True):
229+
copy=False, name=None, verify_integrity=None):
230+
231+
if verify_integrity is not None:
232+
warnings.warn("The 'verify_integrity' argument is deprecated, "
233+
"will be removed in a future version.",
234+
FutureWarning, stacklevel=2)
235+
else:
236+
verify_integrity = True
224237

225238
if data is None:
226-
# TODO: Remove this block and associated kwargs; GH#20535
239+
warnings.warn("Creating a DatetimeIndex by passing range "
240+
"endpoints is deprecated. Use "
241+
"`pandas.date_range` instead.",
242+
FutureWarning, stacklevel=2)
227243
result = cls._generate_range(start, end, periods,
228244
freq=freq, tz=tz, normalize=normalize,
229245
closed=closed, ambiguous=ambiguous)
@@ -756,8 +772,8 @@ def snap(self, freq='S'):
756772
snapped[i] = s
757773

758774
# we know it conforms; skip check
759-
return DatetimeIndex(snapped, freq=freq, verify_integrity=False)
760-
# TODO: what about self.name? if so, use shallow_copy?
775+
return DatetimeIndex._simple_new(snapped, freq=freq)
776+
# TODO: what about self.name? tz? if so, use shallow_copy?
761777

762778
def unique(self, level=None):
763779
if level is not None:
@@ -1135,6 +1151,11 @@ def slice_indexer(self, start=None, end=None, step=None, kind=None):
11351151
# --------------------------------------------------------------------
11361152
# Wrapping DatetimeArray
11371153

1154+
# Compat for frequency inference, see GH#23789
1155+
_is_monotonic_increasing = Index.is_monotonic_increasing
1156+
_is_monotonic_decreasing = Index.is_monotonic_decreasing
1157+
_is_unique = Index.is_unique
1158+
11381159
_timezone = cache_readonly(DatetimeArray._timezone.fget)
11391160
is_normalized = cache_readonly(DatetimeArray.is_normalized.fget)
11401161
_resolution = cache_readonly(DatetimeArray._resolution.fget)
@@ -1514,9 +1535,13 @@ def date_range(start=None, end=None, periods=None, freq=None, tz=None,
15141535
if freq is None and com._any_none(periods, start, end):
15151536
freq = 'D'
15161537

1517-
return DatetimeIndex(start=start, end=end, periods=periods,
1518-
freq=freq, tz=tz, normalize=normalize, name=name,
1519-
closed=closed, **kwargs)
1538+
result = DatetimeIndex._generate_range(
1539+
start=start, end=end, periods=periods,
1540+
freq=freq, tz=tz, normalize=normalize,
1541+
closed=closed, **kwargs)
1542+
1543+
result.name = name
1544+
return result
15201545

15211546

15221547
def bdate_range(start=None, end=None, periods=None, freq='B', tz=None,
@@ -1602,9 +1627,9 @@ def bdate_range(start=None, end=None, periods=None, freq='B', tz=None,
16021627
'weekmask are passed, got frequency {freq}').format(freq=freq)
16031628
raise ValueError(msg)
16041629

1605-
return DatetimeIndex(start=start, end=end, periods=periods,
1606-
freq=freq, tz=tz, normalize=normalize, name=name,
1607-
closed=closed, **kwargs)
1630+
return date_range(start=start, end=end, periods=periods,
1631+
freq=freq, tz=tz, normalize=normalize, name=name,
1632+
closed=closed, **kwargs)
16081633

16091634

16101635
def cdate_range(start=None, end=None, periods=None, freq='C', tz=None,
@@ -1661,9 +1686,10 @@ def cdate_range(start=None, end=None, periods=None, freq='C', tz=None,
16611686
holidays = kwargs.pop('holidays', [])
16621687
weekmask = kwargs.pop('weekmask', 'Mon Tue Wed Thu Fri')
16631688
freq = CDay(holidays=holidays, weekmask=weekmask)
1664-
return DatetimeIndex(start=start, end=end, periods=periods, freq=freq,
1665-
tz=tz, normalize=normalize, name=name,
1666-
closed=closed, **kwargs)
1689+
1690+
return date_range(start=start, end=end, periods=periods, freq=freq,
1691+
tz=tz, normalize=normalize, name=name,
1692+
closed=closed, **kwargs)
16671693

16681694

16691695
def _time_to_micros(time):

pandas/core/indexes/timedeltas.py

+23-4
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
""" implement the TimedeltaIndex """
22
from datetime import datetime
3+
import warnings
34

45
import numpy as np
56

@@ -132,12 +133,22 @@ def _join_i8_wrapper(joinf, **kwargs):
132133

133134
def __new__(cls, data=None, unit=None, freq=None, start=None, end=None,
134135
periods=None, closed=None, dtype=None, copy=False,
135-
name=None, verify_integrity=True):
136+
name=None, verify_integrity=None):
137+
138+
if verify_integrity is not None:
139+
warnings.warn("The 'verify_integrity' argument is deprecated, "
140+
"will be removed in a future version.",
141+
FutureWarning, stacklevel=2)
142+
else:
143+
verify_integrity = True
136144

137145
freq, freq_infer = dtl.maybe_infer_freq(freq)
138146

139147
if data is None:
140-
# TODO: Remove this block and associated kwargs; GH#20535
148+
warnings.warn("Creating a TimedeltaIndex by passing range "
149+
"endpoints is deprecated. Use "
150+
"`pandas.timedelta_range` instead.",
151+
FutureWarning, stacklevel=2)
141152
result = cls._generate_range(start, end, periods, freq,
142153
closed=closed)
143154
result.name = name
@@ -250,6 +261,11 @@ def _format_native_types(self, na_rep=u'NaT', date_format=None, **kwargs):
250261

251262
total_seconds = wrap_array_method(TimedeltaArray.total_seconds, True)
252263

264+
# Compat for frequency inference, see GH#23789
265+
_is_monotonic_increasing = Index.is_monotonic_increasing
266+
_is_monotonic_decreasing = Index.is_monotonic_decreasing
267+
_is_unique = Index.is_unique
268+
253269
# -------------------------------------------------------------------
254270

255271
@Appender(_index_shared_docs['astype'])
@@ -735,5 +751,8 @@ def timedelta_range(start=None, end=None, periods=None, freq=None,
735751
if freq is None and com._any_none(periods, start, end):
736752
freq = 'D'
737753

738-
return TimedeltaIndex(start=start, end=end, periods=periods,
739-
freq=freq, name=name, closed=closed)
754+
freq, freq_infer = dtl.maybe_infer_freq(freq)
755+
result = TimedeltaIndex._generate_range(start, end, periods, freq,
756+
closed=closed)
757+
result.name = name
758+
return result

pandas/core/resample.py

+10-10
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
from pandas.core.groupby.ops import BinGrouper
2727
from pandas.core.indexes.datetimes import DatetimeIndex, date_range
2828
from pandas.core.indexes.period import PeriodIndex
29-
from pandas.core.indexes.timedeltas import TimedeltaIndex
29+
from pandas.core.indexes.timedeltas import TimedeltaIndex, timedelta_range
3030

3131
from pandas.tseries.frequencies import is_subperiod, is_superperiod, to_offset
3232
from pandas.tseries.offsets import (
@@ -1398,11 +1398,11 @@ def _get_time_bins(self, ax):
13981398
# because replace() will swallow the nanosecond part
13991399
# thus last bin maybe slightly before the end if the end contains
14001400
# nanosecond part and lead to `Values falls after last bin` error
1401-
binner = labels = DatetimeIndex(freq=self.freq,
1402-
start=first,
1403-
end=last,
1404-
tz=tz,
1405-
name=ax.name)
1401+
binner = labels = date_range(freq=self.freq,
1402+
start=first,
1403+
end=last,
1404+
tz=tz,
1405+
name=ax.name)
14061406

14071407
# GH 15549
14081408
# In edge case of tz-aware resapmling binner last index can be
@@ -1484,10 +1484,10 @@ def _get_time_delta_bins(self, ax):
14841484
return binner, [], labels
14851485

14861486
start, end = ax.min(), ax.max()
1487-
labels = binner = TimedeltaIndex(start=start,
1488-
end=end,
1489-
freq=self.freq,
1490-
name=ax.name)
1487+
labels = binner = timedelta_range(start=start,
1488+
end=end,
1489+
freq=self.freq,
1490+
name=ax.name)
14911491

14921492
end_stamps = labels + self.freq
14931493
bins = ax.searchsorted(end_stamps, side='left')

pandas/io/packers.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -604,8 +604,8 @@ def decode(obj):
604604

605605
elif typ == u'datetime_index':
606606
data = unconvert(obj[u'data'], np.int64, obj.get(u'compress'))
607-
d = dict(name=obj[u'name'], freq=obj[u'freq'], verify_integrity=False)
608-
result = globals()[obj[u'klass']](data, **d)
607+
d = dict(name=obj[u'name'], freq=obj[u'freq'])
608+
result = DatetimeIndex._simple_new(data, **d)
609609
tz = obj[u'tz']
610610

611611
# reverse tz conversion

0 commit comments

Comments
 (0)