diff --git a/README.md b/README.md index 79a84440d6a5c..6a645dc64123d 100644 --- a/README.md +++ b/README.md @@ -90,7 +90,7 @@ pip install pandas ``` ## Dependencies -- [NumPy](http://www.numpy.org): 1.6.1 or higher +- [NumPy](http://www.numpy.org): 1.7.0 or higher - [python-dateutil](http://labix.org/python-dateutil): 1.5 or higher - [pytz](http://pytz.sourceforge.net) - Needed for time zone support with ``pandas.date_range`` diff --git a/ci/requirements-2.6.txt b/ci/requirements-2.6.txt index 117d14005e175..fec0a96a3d077 100644 --- a/ci/requirements-2.6.txt +++ b/ci/requirements-2.6.txt @@ -1,4 +1,4 @@ -numpy==1.6.1 +numpy==1.7.0 cython==0.19.1 python-dateutil==1.5 pytz==2013b diff --git a/ci/requirements-2.7_LOCALE.txt b/ci/requirements-2.7_LOCALE.txt index a4d2b857f92c1..9af33fe96d58c 100644 --- a/ci/requirements-2.7_LOCALE.txt +++ b/ci/requirements-2.7_LOCALE.txt @@ -4,7 +4,7 @@ xlwt==0.7.5 openpyxl==1.6.2 xlsxwriter==0.4.6 xlrd==0.9.2 -numpy==1.6.1 +numpy==1.7.1 cython==0.19.1 bottleneck==0.6.0 matplotlib==1.3.0 diff --git a/doc/source/install.rst b/doc/source/install.rst index 5595f60c6789c..fb22a86096b59 100644 --- a/doc/source/install.rst +++ b/doc/source/install.rst @@ -247,7 +247,7 @@ installed), make sure you have `nose Dependencies ------------ - * `NumPy `__: 1.6.1 or higher + * `NumPy `__: 1.7.0 or higher * `python-dateutil `__ 1.5 * `pytz `__ * Needed for time zone support diff --git a/doc/source/timeseries.rst b/doc/source/timeseries.rst index d12e0fd7c7f9e..8f96ec98df6f2 100644 --- a/doc/source/timeseries.rst +++ b/doc/source/timeseries.rst @@ -1685,23 +1685,3 @@ yields another ``timedelta64[ns]`` dtypes Series. td * -1 td * Series([1,2,3,4]) - -Numpy < 1.7 Compatibility -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Numpy < 1.7 has a broken ``timedelta64`` type that does not correctly work -for arithmetic. pandas bypasses this, but for frequency conversion as above, -you need to create the divisor yourself. The ``np.timetimedelta64`` type only -has 1 argument, the number of **micro** seconds. - -The following are equivalent statements in the two versions of numpy. - -.. code-block:: python - - from distutils.version import LooseVersion - if LooseVersion(np.__version__) <= '1.6.2': - y / np.timedelta(86400*int(1e6)) - y / np.timedelta(int(1e6)) - else: - y / np.timedelta64(1,'D') - y / np.timedelta64(1,'s') diff --git a/doc/source/v0.15.0.txt b/doc/source/v0.15.0.txt index 322bcba9664d9..c6e784ac93e92 100644 --- a/doc/source/v0.15.0.txt +++ b/doc/source/v0.15.0.txt @@ -7,6 +7,12 @@ This is a major release from 0.14.1 and includes a small number of API changes, enhancements, and performance improvements along with a large number of bug fixes. We recommend that all users upgrade to this version. +.. warning:: + + pandas >= 0.15.0 will no longer support compatibility with NumPy versions < + 1.7.0. If you want to use the latest versions of pandas, please upgrade to + NumPy >= 1.7.0. + - Highlights include: - The ``Categorical`` type was integrated as a first-class pandas type, see :ref:`here ` diff --git a/pandas/__init__.py b/pandas/__init__.py index 6eda049835526..df5e6f567e3a6 100644 --- a/pandas/__init__.py +++ b/pandas/__init__.py @@ -1,5 +1,6 @@ # pylint: disable-msg=W0614,W0401,W0611,W0622 + __docformat__ = 'restructuredtext' try: @@ -18,6 +19,7 @@ from datetime import datetime import numpy as np + # XXX: HACK for NumPy 1.5.1 to suppress warnings try: np.seterr(all='ignore') @@ -27,14 +29,20 @@ # numpy versioning from distutils.version import LooseVersion _np_version = np.version.short_version -_np_version_under1p6 = LooseVersion(_np_version) < '1.6' -_np_version_under1p7 = LooseVersion(_np_version) < '1.7' _np_version_under1p8 = LooseVersion(_np_version) < '1.8' _np_version_under1p9 = LooseVersion(_np_version) < '1.9' + from pandas.version import version as __version__ from pandas.info import __doc__ + +if LooseVersion(_np_version) < '1.7.0': + raise ImportError('pandas {0} is incompatible with numpy < 1.7.0, ' + 'your numpy version is {1}. Please upgrade numpy to' + ' >= 1.7.0 to use pandas version {0}'.format(__version__, + _np_version)) + # let init-time option registration happen import pandas.core.config_init diff --git a/pandas/core/base.py b/pandas/core/base.py index 021f4474130bd..1655d2a4e4e23 100644 --- a/pandas/core/base.py +++ b/pandas/core/base.py @@ -298,7 +298,12 @@ def ndim(self): def item(self): """ return the first element of the underlying data as a python scalar """ - return self.values.item() + try: + return self.values.item() + except IndexError: + # copy numpy's message here because Py26 raises an IndexError + raise ValueError('can only convert an array of size 1 to a ' + 'Python scalar') @property def data(self): diff --git a/pandas/core/common.py b/pandas/core/common.py index bc4c95ed3323e..48fb75f59ac34 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -1848,6 +1848,8 @@ def _possibly_cast_to_datetime(value, dtype, coerce=False): """ try to cast the array/value to a datetimelike dtype, converting float nan to iNaT """ + from pandas.tseries.timedeltas import _possibly_cast_to_timedelta + from pandas.tseries.tools import to_datetime if dtype is not None: if isinstance(dtype, compat.string_types): @@ -1886,13 +1888,11 @@ def _possibly_cast_to_datetime(value, dtype, coerce=False): elif np.prod(value.shape) and value.dtype != dtype: try: if is_datetime64: - from pandas.tseries.tools import to_datetime value = to_datetime(value, coerce=coerce).values elif is_timedelta64: - from pandas.tseries.timedeltas import \ - _possibly_cast_to_timedelta - value = _possibly_cast_to_timedelta(value, coerce='compat', dtype=dtype) - except: + value = _possibly_cast_to_timedelta(value, + dtype=dtype) + except (AttributeError, ValueError): pass else: @@ -1901,28 +1901,20 @@ def _possibly_cast_to_datetime(value, dtype, coerce=False): # catch a datetime/timedelta that is not of ns variety # and no coercion specified - if (is_array and value.dtype.kind in ['M','m']): + if is_array and value.dtype.kind in ['M', 'm']: dtype = value.dtype if dtype.kind == 'M' and dtype != _NS_DTYPE: value = value.astype(_NS_DTYPE) elif dtype.kind == 'm' and dtype != _TD_DTYPE: - from pandas.tseries.timedeltas import \ - _possibly_cast_to_timedelta - value = _possibly_cast_to_timedelta(value, coerce='compat') + value = _possibly_cast_to_timedelta(value) # only do this if we have an array and the dtype of the array is not # setup already we are not an integer/object, so don't bother with this # conversion - elif (is_array and not ( - issubclass(value.dtype.type, np.integer) or - value.dtype == np.object_)): - pass - - # try to infer if we have a datetimelike here - # otherwise pass thru - else: + elif not (is_array and not (issubclass(value.dtype.type, np.integer) or + value.dtype == np.object_)): value = _possibly_infer_to_datetimelike(value) return value diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 83110d143e8bc..5064545404fb0 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -16,7 +16,7 @@ from pandas.core.internals import BlockManager import pandas.core.common as com import pandas.core.datetools as datetools -from pandas import compat, _np_version_under1p7 +from pandas import compat from pandas.compat import map, zip, lrange, string_types, isidentifier, lmap from pandas.core.common import (isnull, notnull, is_list_like, _values_from_object, _maybe_promote, @@ -3613,21 +3613,6 @@ def abs(self): ------- abs: type of caller """ - - # suprimo numpy 1.6 hacking - # for timedeltas - if _np_version_under1p7: - - def _convert_timedeltas(x): - if x.dtype.kind == 'm': - return np.abs(x.view('i8')).astype(x.dtype) - return np.abs(x) - - if self.ndim == 1: - return _convert_timedeltas(self) - elif self.ndim == 2: - return self.apply(_convert_timedeltas) - return np.abs(self) _shared_docs['describe'] = """ diff --git a/pandas/core/groupby.py b/pandas/core/groupby.py index 7c24c339d4f16..ce57a9c03d570 100644 --- a/pandas/core/groupby.py +++ b/pandas/core/groupby.py @@ -26,7 +26,6 @@ is_timedelta64_dtype, is_datetime64_dtype, is_categorical_dtype, _values_from_object) from pandas.core.config import option_context -from pandas import _np_version_under1p7 import pandas.lib as lib from pandas.lib import Timestamp import pandas.tslib as tslib @@ -2764,18 +2763,21 @@ def _wrap_applied_output(self, keys, values, not_indexed_same=False): # normally use vstack as its faster than concat # and if we have mi-columns - if not _np_version_under1p7 or isinstance(v.index,MultiIndex) or key_index is None: - stacked_values = np.vstack([np.asarray(x) for x in values]) - result = DataFrame(stacked_values,index=key_index,columns=index) + if isinstance(v.index, MultiIndex) or key_index is None: + stacked_values = np.vstack(map(np.asarray, values)) + result = DataFrame(stacked_values, index=key_index, + columns=index) else: # GH5788 instead of stacking; concat gets the dtypes correct from pandas.tools.merge import concat - result = concat(values,keys=key_index,names=key_index.names, + result = concat(values, keys=key_index, + names=key_index.names, axis=self.axis).unstack() result.columns = index else: - stacked_values = np.vstack([np.asarray(x) for x in values]) - result = DataFrame(stacked_values.T,index=v.index,columns=key_index) + stacked_values = np.vstack(map(np.asarray, values)) + result = DataFrame(stacked_values.T, index=v.index, + columns=key_index) except (ValueError, AttributeError): # GH1738: values is list of arrays of unequal lengths fall diff --git a/pandas/core/internals.py b/pandas/core/internals.py index da36d95a3ad9e..f3b8a54034d56 100644 --- a/pandas/core/internals.py +++ b/pandas/core/internals.py @@ -25,7 +25,7 @@ from pandas.util.decorators import cache_readonly from pandas.tslib import Timestamp -from pandas import compat, _np_version_under1p7 +from pandas import compat from pandas.compat import range, map, zip, u from pandas.tseries.timedeltas import _coerce_scalar_to_timedelta_type @@ -1298,10 +1298,8 @@ def to_native_types(self, slicer=None, na_rep=None, **kwargs): def get_values(self, dtype=None): # return object dtypes as datetime.timedeltas if dtype == object: - if _np_version_under1p7: - return self.values.astype('object') return lib.map_infer(self.values.ravel(), - lambda x: timedelta(microseconds=x.item()/1000) + lambda x: timedelta(microseconds=x.item() / 1000) ).reshape(self.values.shape) return self.values diff --git a/pandas/core/ops.py b/pandas/core/ops.py index 9f29570af6f4f..16e6e40802a95 100644 --- a/pandas/core/ops.py +++ b/pandas/core/ops.py @@ -258,9 +258,7 @@ def __init__(self, left, right, name): self.is_datetime_lhs = com.is_datetime64_dtype(left) self.is_integer_lhs = left.dtype.kind in ['i', 'u'] self.is_datetime_rhs = com.is_datetime64_dtype(rvalues) - self.is_timedelta_rhs = (com.is_timedelta64_dtype(rvalues) - or (not self.is_datetime_rhs - and pd._np_version_under1p7)) + self.is_timedelta_rhs = com.is_timedelta64_dtype(rvalues) self.is_integer_rhs = rvalues.dtype.kind in ('i', 'u') self._validate() @@ -318,7 +316,7 @@ def _convert_to_array(self, values, name=None, other=None): """converts values to ndarray""" from pandas.tseries.timedeltas import _possibly_cast_to_timedelta - coerce = 'compat' if pd._np_version_under1p7 else True + coerce = True if not is_list_like(values): values = np.array([values]) inferred_type = lib.infer_dtype(values) @@ -648,13 +646,7 @@ def _radd_compat(left, right): try: output = radd(left, right) except TypeError: - cond = (pd._np_version_under1p6 and - left.dtype == np.object_) - if cond: # pragma: no cover - output = np.empty_like(left) - output.flat[:] = [radd(x, right) for x in left.flat] - else: - raise + raise return output diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index 989249994d953..5150729ed6f79 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -14,7 +14,7 @@ import numpy as np from pandas import (Series, TimeSeries, DataFrame, Panel, Panel4D, Index, - MultiIndex, Int64Index, Timestamp, _np_version_under1p7) + MultiIndex, Int64Index, Timestamp) from pandas.sparse.api import SparseSeries, SparseDataFrame, SparsePanel from pandas.sparse.array import BlockIndex, IntIndex from pandas.tseries.api import PeriodIndex, DatetimeIndex @@ -1721,9 +1721,6 @@ def set_atom(self, block, block_items, existing_col, min_itemsize, if inferred_type == 'datetime64': self.set_atom_datetime64(block) elif dtype == 'timedelta64[ns]': - if _np_version_under1p7: - raise TypeError( - "timdelta64 is not supported under under numpy < 1.7") self.set_atom_timedelta64(block) elif inferred_type == 'date': raise TypeError( @@ -2240,9 +2237,6 @@ def read_array(self, key): if dtype == u('datetime64'): ret = np.array(ret, dtype='M8[ns]') elif dtype == u('timedelta64'): - if _np_version_under1p7: - raise TypeError( - "timedelta64 is not supported under under numpy < 1.7") ret = np.array(ret, dtype='m8[ns]') if transposed: diff --git a/pandas/io/tests/test_json/test_pandas.py b/pandas/io/tests/test_json/test_pandas.py index 62d729ccdaa88..5732bc90573fd 100644 --- a/pandas/io/tests/test_json/test_pandas.py +++ b/pandas/io/tests/test_json/test_pandas.py @@ -601,8 +601,6 @@ def test_url(self): self.assertEqual(result[c].dtype, 'datetime64[ns]') def test_timedelta(self): - tm._skip_if_not_numpy17_friendly() - from datetime import timedelta converter = lambda x: pd.to_timedelta(x,unit='ms') diff --git a/pandas/io/tests/test_pytables.py b/pandas/io/tests/test_pytables.py index 2a0796e90e418..9cdecd16755c7 100644 --- a/pandas/io/tests/test_pytables.py +++ b/pandas/io/tests/test_pytables.py @@ -22,7 +22,7 @@ assert_frame_equal, assert_series_equal) from pandas import concat, Timestamp -from pandas import compat, _np_version_under1p7 +from pandas import compat from pandas.compat import range, lrange, u from pandas.util.testing import assert_produces_warning @@ -2159,8 +2159,6 @@ def setTZ(tz): setTZ(orig_tz) def test_append_with_timedelta(self): - tm._skip_if_not_numpy17_friendly() - # GH 3577 # append timedelta diff --git a/pandas/io/tests/test_sql.py b/pandas/io/tests/test_sql.py index eadcb2c9f1fdb..4d7eb2d04af21 100644 --- a/pandas/io/tests/test_sql.py +++ b/pandas/io/tests/test_sql.py @@ -36,7 +36,6 @@ import pandas.io.sql as sql import pandas.util.testing as tm -from pandas import _np_version_under1p7 try: @@ -509,8 +508,6 @@ def test_date_and_index(self): def test_timedelta(self): # see #6921 - tm._skip_if_not_numpy17_friendly() - df = to_timedelta(Series(['00:00:01', '00:00:03'], name='foo')).to_frame() with tm.assert_produces_warning(UserWarning): df.to_sql('test_timedelta', self.conn) diff --git a/pandas/tests/test_base.py b/pandas/tests/test_base.py index 5dd8d072595cb..b171b31528a55 100644 --- a/pandas/tests/test_base.py +++ b/pandas/tests/test_base.py @@ -1,3 +1,4 @@ +from __future__ import print_function import re from datetime import datetime, timedelta import numpy as np @@ -8,7 +9,6 @@ from pandas.util.testing import assertRaisesRegexp, assert_isinstance from pandas.tseries.common import is_datetimelike from pandas import Series, Index, Int64Index, DatetimeIndex, PeriodIndex -from pandas import _np_version_under1p7 import pandas.tslib as tslib import nose @@ -128,6 +128,7 @@ def test_values(self): self.assert_numpy_array_equal(self.container, original) self.assertEqual(vals[0], n) + class TestPandasDelegate(tm.TestCase): def setUp(self): @@ -175,6 +176,7 @@ def f(): delegate.foo() self.assertRaises(TypeError, f) + class Ops(tm.TestCase): def setUp(self): self.int_index = tm.makeIntIndex(10) @@ -238,6 +240,7 @@ def check_ops_properties(self, props, filter=None, ignore_failures=False): else: self.assertRaises(AttributeError, lambda : getattr(o,op)) + class TestIndexOps(Ops): def setUp(self): @@ -250,29 +253,25 @@ def test_ndarray_compat_properties(self): for o in self.objs: # check that we work - for p in ['shape','dtype','base','flags','T', - 'strides','itemsize','nbytes']: - self.assertIsNotNone(getattr(o,p,None)) + for p in ['shape', 'dtype', 'base', 'flags', 'T', + 'strides', 'itemsize', 'nbytes']: + self.assertIsNotNone(getattr(o, p, None)) # if we have a datetimelike dtype then needs a view to work # but the user is responsible for that try: self.assertIsNotNone(o.data) - except (ValueError): + except ValueError: pass - # len > 1 - self.assertRaises(ValueError, lambda : o.item()) - - self.assertTrue(o.ndim == 1) - - self.assertTrue(o.size == len(o)) + self.assertRaises(ValueError, o.item) # len > 1 + self.assertEqual(o.ndim, 1) + self.assertEqual(o.size, len(o)) - self.assertTrue(Index([1]).item() == 1) - self.assertTrue(Series([1]).item() == 1) + self.assertEqual(Index([1]).item(), 1) + self.assertEqual(Series([1]).item(), 1) def test_ops(self): - tm._skip_if_not_numpy17_friendly() for op in ['max','min']: for o in self.objs: result = getattr(o,op)() @@ -734,10 +733,7 @@ def test_add_iadd(self): tm.assert_index_equal(rng, expected) # offset - if _np_version_under1p7: - offsets = [pd.offsets.Hour(2), timedelta(hours=2)] - else: - offsets = [pd.offsets.Hour(2), timedelta(hours=2), np.timedelta64(2, 'h')] + offsets = [pd.offsets.Hour(2), timedelta(hours=2), np.timedelta64(2, 'h')] for delta in offsets: rng = pd.date_range('2000-01-01', '2000-02-01', tz=tz) @@ -781,10 +777,7 @@ def test_sub_isub(self): tm.assert_index_equal(rng, expected) # offset - if _np_version_under1p7: - offsets = [pd.offsets.Hour(2), timedelta(hours=2)] - else: - offsets = [pd.offsets.Hour(2), timedelta(hours=2), np.timedelta64(2, 'h')] + offsets = [pd.offsets.Hour(2), timedelta(hours=2), np.timedelta64(2, 'h')] for delta in offsets: rng = pd.date_range('2000-01-01', '2000-02-01', tz=tz) @@ -961,8 +954,6 @@ def test_resolution(self): self.assertEqual(idx.resolution, expected) def test_add_iadd(self): - tm._skip_if_not_numpy17_friendly() - # union rng1 = pd.period_range('1/1/2000', freq='D', periods=5) other1 = pd.period_range('1/6/2000', freq='D', periods=5) @@ -1085,8 +1076,6 @@ def test_add_iadd(self): tm.assert_index_equal(rng, expected) def test_sub_isub(self): - tm._skip_if_not_numpy17_friendly() - # diff rng1 = pd.period_range('1/1/2000', freq='D', periods=5) other1 = pd.period_range('1/6/2000', freq='D', periods=5) diff --git a/pandas/tests/test_categorical.py b/pandas/tests/test_categorical.py index 421e05f5a3bc7..d07adeadb640c 100644 --- a/pandas/tests/test_categorical.py +++ b/pandas/tests/test_categorical.py @@ -7,8 +7,7 @@ import numpy as np import pandas as pd -from pandas import (Categorical, Index, Series, DataFrame, PeriodIndex, - Timestamp, _np_version_under1p7) +from pandas import Categorical, Index, Series, DataFrame, PeriodIndex, Timestamp import pandas.core.common as com import pandas.compat as compat @@ -379,10 +378,7 @@ def f(): codes= c.codes def f(): codes[4] = 1 - if _np_version_under1p7: - self.assertRaises(RuntimeError, f) - else: - self.assertRaises(ValueError, f) + self.assertRaises(ValueError, f) # But even after getting the codes, the original array should still be writeable! c[4] = "a" diff --git a/pandas/tests/test_format.py b/pandas/tests/test_format.py index 27f5ab3c63d81..c6a9192d7bb79 100644 --- a/pandas/tests/test_format.py +++ b/pandas/tests/test_format.py @@ -14,7 +14,7 @@ from numpy.random import randn import numpy as np -from pandas import DataFrame, Series, Index, _np_version_under1p7, Timestamp, MultiIndex +from pandas import DataFrame, Series, Index, Timestamp, MultiIndex import pandas.core.format as fmt import pandas.util.testing as tm @@ -2727,10 +2727,6 @@ def test_format(self): class TestRepr_timedelta64(tm.TestCase): - @classmethod - def setUpClass(cls): - tm._skip_if_not_numpy17_friendly() - def test_legacy(self): delta_1d = pd.to_timedelta(1, unit='D') delta_0d = pd.to_timedelta(0, unit='D') @@ -2775,10 +2771,6 @@ def test_long(self): class TestTimedelta64Formatter(tm.TestCase): - @classmethod - def setUpClass(cls): - tm._skip_if_not_numpy17_friendly() - def test_mixed(self): x = pd.to_timedelta(list(range(5)) + [pd.NaT], unit='D') y = pd.to_timedelta(list(range(5)) + [pd.NaT], unit='s') diff --git a/pandas/tests/test_frame.py b/pandas/tests/test_frame.py index 7912debd0d409..cf845a18092af 100644 --- a/pandas/tests/test_frame.py +++ b/pandas/tests/test_frame.py @@ -3803,8 +3803,6 @@ def test_operators_timedelta64(self): self.assertTrue(df['off2'].dtype == 'timedelta64[ns]') def test_datetimelike_setitem_with_inference(self): - tm._skip_if_not_numpy17_friendly() - # GH 7592 # assignment of timedeltas with NaT @@ -9668,8 +9666,6 @@ def test_apply(self): self.assertRaises(ValueError, df.apply, lambda x: x, 2) def test_apply_mixed_datetimelike(self): - tm._skip_if_not_numpy17_friendly() - # mixed datetimelike # GH 7778 df = DataFrame({ 'A' : date_range('20130101',periods=3), 'B' : pd.to_timedelta(np.arange(3),unit='s') }) @@ -13112,7 +13108,6 @@ def test_select_dtypes_exclude_include(self): tm.assert_frame_equal(r, e) def test_select_dtypes_not_an_attr_but_still_valid_dtype(self): - tm._skip_if_not_numpy17_friendly() df = DataFrame({'a': list('abc'), 'b': list(range(1, 4)), 'c': np.arange(3, 6).astype('u1'), diff --git a/pandas/tests/test_generic.py b/pandas/tests/test_generic.py index 8d80962eb9902..001d6f489e934 100644 --- a/pandas/tests/test_generic.py +++ b/pandas/tests/test_generic.py @@ -160,8 +160,6 @@ def f(): self.assertRaises(ValueError, lambda : not obj1) def test_numpy_1_7_compat_numeric_methods(self): - tm._skip_if_not_numpy17_friendly() - # GH 4435 # numpy in 1.7 tries to pass addtional arguments to pandas functions diff --git a/pandas/tests/test_index.py b/pandas/tests/test_index.py index d0045c2282aba..60105719179ad 100644 --- a/pandas/tests/test_index.py +++ b/pandas/tests/test_index.py @@ -32,7 +32,6 @@ import pandas as pd from pandas.lib import Timestamp -from pandas import _np_version_under1p7 class Base(object): """ base class for index sub-class tests """ @@ -392,8 +391,6 @@ def test_asof(self): tm.assert_isinstance(self.dateIndex.asof(d), Timestamp) def test_nanosecond_index_access(self): - tm._skip_if_not_numpy17_friendly() - s = Series([Timestamp('20130101')]).values.view('i8')[0] r = DatetimeIndex([s + 50 + i for i in range(100)]) x = Series(np.random.randn(100), index=r) @@ -1630,7 +1627,7 @@ def test_pickle_compat_construction(self): def test_numeric_compat(self): super(TestDatetimeIndex, self).test_numeric_compat() - if not (_np_version_under1p7 or compat.PY3_2): + if not compat.PY3_2: for f in [lambda : np.timedelta64(1, 'D').astype('m8[ns]') * pd.date_range('2000-01-01', periods=3), lambda : pd.date_range('2000-01-01', periods=3) * np.timedelta64(1, 'D').astype('m8[ns]') ]: self.assertRaises(TypeError, f) @@ -2227,12 +2224,11 @@ def test_get_level_values_na(self): expected = np.array(['a', np.nan, 1],dtype=object) assert_array_equal(values.values, expected) - if not _np_version_under1p7: - arrays = [['a', 'b', 'b'], pd.DatetimeIndex([0, 1, pd.NaT])] - index = pd.MultiIndex.from_arrays(arrays) - values = index.get_level_values(1) - expected = pd.DatetimeIndex([0, 1, pd.NaT]) - assert_array_equal(values.values, expected.values) + arrays = [['a', 'b', 'b'], pd.DatetimeIndex([0, 1, pd.NaT])] + index = pd.MultiIndex.from_arrays(arrays) + values = index.get_level_values(1) + expected = pd.DatetimeIndex([0, 1, pd.NaT]) + assert_array_equal(values.values, expected.values) arrays = [[], []] index = pd.MultiIndex.from_arrays(arrays) diff --git a/pandas/tests/test_series.py b/pandas/tests/test_series.py index 4ecb9a1430eba..24282fdc280af 100644 --- a/pandas/tests/test_series.py +++ b/pandas/tests/test_series.py @@ -15,8 +15,8 @@ import numpy.ma as ma import pandas as pd -from pandas import (Index, Series, DataFrame, isnull, notnull, - bdate_range, date_range, period_range, _np_version_under1p7) +from pandas import (Index, Series, DataFrame, isnull, notnull, bdate_range, + date_range, period_range) from pandas.core.index import MultiIndex from pandas.core.indexing import IndexingError from pandas.tseries.index import Timestamp, DatetimeIndex @@ -27,7 +27,7 @@ import pandas.core.datetools as datetools import pandas.core.nanops as nanops -from pandas.compat import StringIO, lrange, range, zip, u, OrderedDict, long, PY3_2 +from pandas.compat import StringIO, lrange, range, zip, u, OrderedDict, long from pandas import compat from pandas.util.testing import (assert_series_equal, assert_almost_equal, @@ -80,7 +80,6 @@ def test_dt_namespace_accessor(self): ok_for_period = ok_for_base + ['qyear'] ok_for_dt = ok_for_base + ['date','time','microsecond','nanosecond', 'is_month_start', 'is_month_end', 'is_quarter_start', 'is_quarter_end', 'is_year_start', 'is_year_end'] - ok_for_both = ok_for_dt def get_expected(s, name): result = getattr(Index(s.values),prop) @@ -726,15 +725,14 @@ def test_constructor_dtype_datetime64(self): values2 = dates.view(np.ndarray).astype('datetime64[ns]') expected = Series(values2, dates) - # numpy < 1.7 is very odd about astyping - if not _np_version_under1p7: - for dtype in ['s','D','ms','us','ns']: - values1 = dates.view(np.ndarray).astype('M8[{0}]'.format(dtype)) - result = Series(values1, dates) - assert_series_equal(result,expected) + for dtype in ['s', 'D', 'ms', 'us', 'ns']: + values1 = dates.view(np.ndarray).astype('M8[{0}]'.format(dtype)) + result = Series(values1, dates) + assert_series_equal(result,expected) # leave datetime.date alone - dates2 = np.array([ d.date() for d in dates.to_pydatetime() ],dtype=object) + dates2 = np.array([d.date() for d in dates.to_pydatetime()], + dtype=object) series1 = Series(dates2, dates) self.assert_numpy_array_equal(series1.values,dates2) self.assertEqual(series1.dtype,object) @@ -1343,7 +1341,7 @@ def test_reshape_2d_return_array(self): self.assertNotIsInstance(result, Series) result2 = np.reshape(x, (-1, 1)) - self.assertNotIsInstance(result, Series) + self.assertNotIsInstance(result2, Series) result = x[:, None] expected = x.reshape((-1, 1)) @@ -1929,11 +1927,10 @@ def test_timeseries_repr_object_dtype(self): self.assertTrue(repr(ts).splitlines()[-1].startswith('Freq:')) ts2 = ts.ix[np.random.randint(0, len(ts) - 1, 400)] - repr(ts).splitlines()[-1] + repr(ts2).splitlines()[-1] def test_timeseries_periodindex(self): # GH2891 - import pickle from pandas import period_range prng = period_range('1/1/2011', '1/1/2012', freq='M') ts = Series(np.random.randn(len(prng)), prng) @@ -2297,11 +2294,10 @@ def test_quantile(self): q = dts.quantile(.2) self.assertEqual(q, Timestamp('2000-01-10 19:12:00')) - if not _np_version_under1p7: - # timedelta64[ns] dtype - tds = dts.diff() - q = tds.quantile(.25) - self.assertEqual(q, pd.to_timedelta('24:00:00')) + # timedelta64[ns] dtype + tds = dts.diff() + q = tds.quantile(.25) + self.assertEqual(q, pd.to_timedelta('24:00:00')) def test_quantile_multi(self): from numpy import percentile @@ -2488,9 +2484,7 @@ def check_comparators(series, other): def test_operators_empty_int_corner(self): s1 = Series([], [], dtype=np.int32) s2 = Series({'x': 0.}) - - # it works! - _ = s1 * s2 + tm.assert_series_equal(s1 * s2, Series([np.nan], index=['x'])) def test_constructor_dtype_timedelta64(self): @@ -2501,9 +2495,8 @@ def test_constructor_dtype_timedelta64(self): td = Series([timedelta(days=1)]) self.assertEqual(td.dtype, 'timedelta64[ns]') - if not _np_version_under1p7: - td = Series([timedelta(days=1),timedelta(days=2),np.timedelta64(1,'s')]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + td = Series([timedelta(days=1),timedelta(days=2),np.timedelta64(1,'s')]) + self.assertEqual(td.dtype, 'timedelta64[ns]') # mixed with NaT from pandas import tslib @@ -2530,9 +2523,8 @@ def test_constructor_dtype_timedelta64(self): td = Series([pd.NaT, np.timedelta64(300000000)]) self.assertEqual(td.dtype, 'timedelta64[ns]') - if not _np_version_under1p7: - td = Series([np.timedelta64(1,'s')]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + td = Series([np.timedelta64(1,'s')]) + self.assertEqual(td.dtype, 'timedelta64[ns]') # these are frequency conversion astypes #for t in ['s', 'D', 'us', 'ms']: @@ -2554,16 +2546,14 @@ def f(): self.assertEqual(td.dtype, 'object') # these will correctly infer a timedelta - # but only on numpy > 1.7 as the cython path will only be used - if not _np_version_under1p7: - s = Series([None, pd.NaT, '1 Day']) - self.assertEqual(s.dtype,'timedelta64[ns]') - s = Series([np.nan, pd.NaT, '1 Day']) - self.assertEqual(s.dtype,'timedelta64[ns]') - s = Series([pd.NaT, None, '1 Day']) - self.assertEqual(s.dtype,'timedelta64[ns]') - s = Series([pd.NaT, np.nan, '1 Day']) - self.assertEqual(s.dtype,'timedelta64[ns]') + s = Series([None, pd.NaT, '1 Day']) + self.assertEqual(s.dtype,'timedelta64[ns]') + s = Series([np.nan, pd.NaT, '1 Day']) + self.assertEqual(s.dtype,'timedelta64[ns]') + s = Series([pd.NaT, None, '1 Day']) + self.assertEqual(s.dtype,'timedelta64[ns]') + s = Series([pd.NaT, np.nan, '1 Day']) + self.assertEqual(s.dtype,'timedelta64[ns]') def test_operators_timedelta64(self): @@ -2666,22 +2656,20 @@ def test_timedeltas_with_DateOffset(self): [Timestamp('20130101 9:06:00.005'), Timestamp('20130101 9:07:00.005')]) assert_series_equal(result, expected) - if not _np_version_under1p7: - - # operate with np.timedelta64 correctly - result = s + np.timedelta64(1, 's') - result2 = np.timedelta64(1, 's') + s - expected = Series( - [Timestamp('20130101 9:01:01'), Timestamp('20130101 9:02:01')]) - assert_series_equal(result, expected) - assert_series_equal(result2, expected) + # operate with np.timedelta64 correctly + result = s + np.timedelta64(1, 's') + result2 = np.timedelta64(1, 's') + s + expected = Series( + [Timestamp('20130101 9:01:01'), Timestamp('20130101 9:02:01')]) + assert_series_equal(result, expected) + assert_series_equal(result2, expected) - result = s + np.timedelta64(5, 'ms') - result2 = np.timedelta64(5, 'ms') + s - expected = Series( - [Timestamp('20130101 9:01:00.005'), Timestamp('20130101 9:02:00.005')]) - assert_series_equal(result, expected) - assert_series_equal(result2, expected) + result = s + np.timedelta64(5, 'ms') + result2 = np.timedelta64(5, 'ms') + s + expected = Series( + [Timestamp('20130101 9:01:00.005'), Timestamp('20130101 9:02:00.005')]) + assert_series_equal(result, expected) + assert_series_equal(result2, expected) # valid DateOffsets for do in [ 'Hour', 'Minute', 'Second', 'Day', 'Micro', @@ -2720,22 +2708,21 @@ def test_timedelta64_operations_with_timedeltas(self): # Now again, using pd.to_timedelta, which should build # a Series or a scalar, depending on input. - if not _np_version_under1p7: - td1 = Series(pd.to_timedelta(['00:05:03'] * 3)) - td2 = pd.to_timedelta('00:05:04') - result = td1 - td2 - expected = Series([timedelta(seconds=0)] * 3) -Series( - [timedelta(seconds=1)] * 3) - self.assertEqual(result.dtype, 'm8[ns]') - assert_series_equal(result, expected) + td1 = Series(pd.to_timedelta(['00:05:03'] * 3)) + td2 = pd.to_timedelta('00:05:04') + result = td1 - td2 + expected = Series([timedelta(seconds=0)] * 3) -Series( + [timedelta(seconds=1)] * 3) + self.assertEqual(result.dtype, 'm8[ns]') + assert_series_equal(result, expected) - result2 = td2 - td1 - expected = (Series([timedelta(seconds=1)] * 3) - - Series([timedelta(seconds=0)] * 3)) - assert_series_equal(result2, expected) + result2 = td2 - td1 + expected = (Series([timedelta(seconds=1)] * 3) - + Series([timedelta(seconds=0)] * 3)) + assert_series_equal(result2, expected) - # roundtrip - assert_series_equal(result + td2,td1) + # roundtrip + assert_series_equal(result + td2,td1) def test_timedelta64_operations_with_integers(self): @@ -2800,8 +2787,6 @@ def test_timedelta64_operations_with_integers(self): self.assertRaises(TypeError, sop, s2.values) def test_timedelta64_conversions(self): - tm._skip_if_not_numpy17_friendly() - startdate = Series(date_range('2013-01-01', '2013-01-03')) enddate = Series(date_range('2013-03-01', '2013-03-03')) @@ -2836,11 +2821,6 @@ def test_timedelta64_equal_timedelta_supported_ops(self): 'm': 60 * 1000000, 's': 1000000, 'us': 1} def timedelta64(*args): - if _np_version_under1p7: - coeffs = np.array(args) - terms = np.array([npy16_mappings[interval] - for interval in intervals]) - return np.timedelta64(coeffs.dot(terms)) return sum(starmap(np.timedelta64, zip(args, intervals))) for op, d, h, m, s, us in product([operator.add, operator.sub], @@ -2913,8 +2893,6 @@ def run_ops(ops, get_ser, test_ser): dt1 + td1 def test_ops_datetimelike_align(self): - tm._skip_if_not_numpy17_friendly() - # GH 7500 # datetimelike ops need to align dt = Series(date_range('2012-1-1', periods=3, freq='D')) @@ -2976,8 +2954,6 @@ def test_timedelta64_functions(self): assert_series_equal(result, expected) def test_timedelta_fillna(self): - tm._skip_if_not_numpy17_friendly() - #GH 3371 s = Series([Timestamp('20130101'), Timestamp('20130101'), Timestamp('20130102'), Timestamp('20130103 9:01:01')]) @@ -3183,8 +3159,6 @@ def test_bfill(self): assert_series_equal(ts.bfill(), ts.fillna(method='bfill')) def test_sub_of_datetime_from_TimeSeries(self): - tm._skip_if_not_numpy17_friendly() - from pandas.tseries.timedeltas import _possibly_cast_to_timedelta from datetime import datetime a = Timestamp(datetime(1993, 0o1, 0o7, 13, 30, 00)) @@ -5640,9 +5614,8 @@ def test_isin_with_i8(self): assert_series_equal(result, expected) # fails on dtype conversion in the first place - if not _np_version_under1p7: - result = s.isin(s[0:2].values.astype('datetime64[D]')) - assert_series_equal(result, expected) + result = s.isin(s[0:2].values.astype('datetime64[D]')) + assert_series_equal(result, expected) result = s.isin([s[1]]) assert_series_equal(result, expected2) @@ -5651,19 +5624,15 @@ def test_isin_with_i8(self): assert_series_equal(result, expected2) # timedelta64[ns] - if not _np_version_under1p7: - s = Series(pd.to_timedelta(lrange(5),unit='d')) - result = s.isin(s[0:2]) - assert_series_equal(result, expected) + s = Series(pd.to_timedelta(lrange(5),unit='d')) + result = s.isin(s[0:2]) + assert_series_equal(result, expected) #------------------------------------------------------------------------------ # TimeSeries-specific def test_cummethods_bool(self): # GH 6270 # looks like a buggy np.maximum.accumulate for numpy 1.6.1, py 3.2 - if _np_version_under1p7 and sys.version_info[0] == 3 and sys.version_info[1] == 2: - raise nose.SkipTest("failure of GH6270 on numpy < 1.7 and py 3.2") - def cummin(x): return np.minimum.accumulate(x) diff --git a/pandas/tools/tests/test_merge.py b/pandas/tools/tests/test_merge.py index 7ac9f900c615e..749f15af0d916 100644 --- a/pandas/tools/tests/test_merge.py +++ b/pandas/tools/tests/test_merge.py @@ -10,7 +10,7 @@ import pandas as pd from pandas.compat import range, lrange, lzip, zip, StringIO -from pandas import compat, _np_version_under1p7 +from pandas import compat from pandas.tseries.index import DatetimeIndex from pandas.tools.merge import merge, concat, ordered_merge, MergeError from pandas.util.testing import (assert_frame_equal, assert_series_equal, @@ -822,7 +822,6 @@ def test_join_append_timedeltas(self): # timedelta64 issues with join/merge # GH 5695 - tm._skip_if_not_numpy17_friendly() d = {'d': dt.datetime(2013, 11, 5, 5, 56), 't': dt.timedelta(0, 22500)} df = DataFrame(columns=list('dt')) @@ -2013,9 +2012,6 @@ def test_concat_datetime64_block(self): self.assertTrue((result.iloc[10:]['time'] == rng).all()) def test_concat_timedelta64_block(self): - - # not friendly for < 1.7 - tm._skip_if_not_numpy17_friendly() from pandas import to_timedelta rng = to_timedelta(np.arange(10),unit='s') diff --git a/pandas/tseries/offsets.py b/pandas/tseries/offsets.py index d2c9acedcee94..cd37f4000e5a2 100644 --- a/pandas/tseries/offsets.py +++ b/pandas/tseries/offsets.py @@ -11,8 +11,6 @@ import pandas.tslib as tslib from pandas.tslib import Timestamp, OutOfBoundsDatetime -from pandas import _np_version_under1p7 - import functools __all__ = ['Day', 'BusinessDay', 'BDay', 'CustomBusinessDay', 'CDay', @@ -2062,7 +2060,7 @@ class Micro(Tick): class Nano(Tick): - _inc = np.timedelta64(1, 'ns') if not _np_version_under1p7 else 1 + _inc = np.timedelta64(1, 'ns') _prefix = 'N' @@ -2181,9 +2179,7 @@ def generate_range(start=None, end=None, periods=None, FY5253Quarter, ]) -if not _np_version_under1p7: - # Only 1.7+ supports nanosecond resolution - prefix_mapping['N'] = Nano +prefix_mapping['N'] = Nano def _make_offset(key): diff --git a/pandas/tseries/tests/test_frequencies.py b/pandas/tseries/tests/test_frequencies.py index 24deb8a298688..b251ae50e22d6 100644 --- a/pandas/tseries/tests/test_frequencies.py +++ b/pandas/tseries/tests/test_frequencies.py @@ -16,7 +16,6 @@ from pandas.tseries.period import PeriodIndex import pandas.compat as compat -from pandas import _np_version_under1p7 import pandas.util.testing as tm def test_to_offset_multiple(): @@ -48,11 +47,10 @@ def test_to_offset_multiple(): expected = offsets.Milli(10075) assert(result == expected) - if not _np_version_under1p7: - freqstr = '2800N' - result = frequencies.to_offset(freqstr) - expected = offsets.Nano(2800) - assert(result == expected) + freqstr = '2800N' + result = frequencies.to_offset(freqstr) + expected = offsets.Nano(2800) + assert(result == expected) # malformed try: @@ -137,7 +135,6 @@ def test_microsecond(self): self._check_tick(timedelta(microseconds=1), 'U') def test_nanosecond(self): - tm._skip_if_not_numpy17_friendly() self._check_tick(np.timedelta64(1, 'ns'), 'N') def _check_tick(self, base_delta, code): diff --git a/pandas/tseries/tests/test_offsets.py b/pandas/tseries/tests/test_offsets.py index 065aa9236e539..f6f91760e8ad8 100644 --- a/pandas/tseries/tests/test_offsets.py +++ b/pandas/tseries/tests/test_offsets.py @@ -30,8 +30,6 @@ LastWeekOfMonth, FY5253, FY5253Quarter, WeekDay from pandas.tseries.holiday import USFederalHolidayCalendar -from pandas import _np_version_under1p7 - _multiprocess_can_split_ = True @@ -96,18 +94,13 @@ class Base(tm.TestCase): _offset = None _offset_types = [getattr(offsets, o) for o in offsets.__all__] - skip_np_u1p7 = [offsets.CustomBusinessDay, offsets.CDay, offsets.CustomBusinessMonthBegin, - offsets.CustomBusinessMonthEnd, offsets.Nano] timezones = [None, 'UTC', 'Asia/Tokyo', 'US/Eastern', 'dateutil/Asia/Tokyo', 'dateutil/US/Pacific'] @property def offset_types(self): - if _np_version_under1p7: - return [o for o in self._offset_types if o not in self.skip_np_u1p7] - else: - return self._offset_types + return self._offset_types def _get_offset(self, klass, value=1, normalize=False): # create instance from offset class @@ -133,8 +126,6 @@ def _get_offset(self, klass, value=1, normalize=False): def test_apply_out_of_range(self): if self._offset is None: return - if _np_version_under1p7 and self._offset in self.skip_np_u1p7: - raise nose.SkipTest('numpy >= 1.7 required') # try to create an out-of-bounds result timestamp; if we can't create the offset # skip @@ -2857,8 +2848,6 @@ def test_Microsecond(): def test_NanosecondGeneric(): - tm._skip_if_not_numpy17_friendly() - timestamp = Timestamp(datetime(2010, 1, 1)) assert timestamp.nanosecond == 0 @@ -2870,8 +2859,6 @@ def test_NanosecondGeneric(): def test_Nanosecond(): - tm._skip_if_not_numpy17_friendly() - timestamp = Timestamp(datetime(2010, 1, 1)) assertEq(Nano(), timestamp, timestamp + np.timedelta64(1, 'ns')) assertEq(Nano(-1), timestamp + np.timedelta64(1, 'ns'), timestamp) diff --git a/pandas/tseries/tests/test_period.py b/pandas/tseries/tests/test_period.py index 3fae251b433e6..e6e6b48ccb573 100644 --- a/pandas/tseries/tests/test_period.py +++ b/pandas/tseries/tests/test_period.py @@ -27,7 +27,7 @@ from pandas import Series, TimeSeries, DataFrame, _np_version_under1p9 from pandas import tslib from pandas.util.testing import(assert_series_equal, assert_almost_equal, - assertRaisesRegexp, _skip_if_not_numpy17_friendly) + assertRaisesRegexp) import pandas.util.testing as tm from pandas import compat from numpy.testing import assert_array_equal @@ -2486,8 +2486,6 @@ def test_add(self): dt1 + dt2 def test_add_offset(self): - _skip_if_not_numpy17_friendly() - # freq is DateOffset p = Period('2011', freq='A') self.assertEqual(p + offsets.YearEnd(2), Period('2013', freq='A')) @@ -2534,8 +2532,6 @@ def test_add_offset(self): p + o def test_add_offset_nat(self): - _skip_if_not_numpy17_friendly() - # freq is DateOffset p = Period('NaT', freq='A') for o in [offsets.YearEnd(2)]: @@ -2578,8 +2574,6 @@ def test_add_offset_nat(self): p + o def test_sub_offset(self): - _skip_if_not_numpy17_friendly() - # freq is DateOffset p = Period('2011', freq='A') self.assertEqual(p - offsets.YearEnd(2), Period('2009', freq='A')) @@ -2626,8 +2620,6 @@ def test_sub_offset(self): p - o def test_sub_offset_nat(self): - _skip_if_not_numpy17_friendly() - # freq is DateOffset p = Period('NaT', freq='A') for o in [offsets.YearEnd(2)]: diff --git a/pandas/tseries/tests/test_timedeltas.py b/pandas/tseries/tests/test_timedeltas.py index 9d85c599c840c..769062f293cf9 100644 --- a/pandas/tseries/tests/test_timedeltas.py +++ b/pandas/tseries/tests/test_timedeltas.py @@ -15,8 +15,7 @@ from pandas.util.testing import (assert_series_equal, assert_frame_equal, assert_almost_equal, - ensure_clean, - _skip_if_not_numpy17_friendly) + ensure_clean) import pandas.util.testing as tm class TestTimedeltas(tm.TestCase): @@ -26,8 +25,6 @@ def setUp(self): pass def test_numeric_conversions(self): - _skip_if_not_numpy17_friendly() - self.assertEqual(ct(0), np.timedelta64(0,'ns')) self.assertEqual(ct(10), np.timedelta64(10,'ns')) self.assertEqual(ct(10,unit='ns'), np.timedelta64(10,'ns').astype('m8[ns]')) @@ -38,15 +35,11 @@ def test_numeric_conversions(self): self.assertEqual(ct(10,unit='d'), np.timedelta64(10,'D').astype('m8[ns]')) def test_timedelta_conversions(self): - _skip_if_not_numpy17_friendly() - self.assertEqual(ct(timedelta(seconds=1)), np.timedelta64(1,'s').astype('m8[ns]')) self.assertEqual(ct(timedelta(microseconds=1)), np.timedelta64(1,'us').astype('m8[ns]')) self.assertEqual(ct(timedelta(days=1)), np.timedelta64(1,'D').astype('m8[ns]')) def test_short_format_converters(self): - _skip_if_not_numpy17_friendly() - def conv(v): return v.astype('m8[ns]') @@ -93,8 +86,6 @@ def conv(v): self.assertRaises(ValueError, ct, 'foo') def test_full_format_converters(self): - _skip_if_not_numpy17_friendly() - def conv(v): return v.astype('m8[ns]') d1 = np.timedelta64(1,'D') @@ -116,14 +107,10 @@ def conv(v): self.assertRaises(ValueError, ct, '- 1days, 00') def test_nat_converters(self): - _skip_if_not_numpy17_friendly() - self.assertEqual(to_timedelta('nat',box=False).astype('int64'), tslib.iNaT) self.assertEqual(to_timedelta('nan',box=False).astype('int64'), tslib.iNaT) def test_to_timedelta(self): - _skip_if_not_numpy17_friendly() - def conv(v): return v.astype('m8[ns]') d1 = np.timedelta64(1,'D') @@ -231,8 +218,6 @@ def testit(unit, transform): self.assertRaises(ValueError, lambda : to_timedelta(1,unit='foo')) def test_to_timedelta_via_apply(self): - _skip_if_not_numpy17_friendly() - # GH 5458 expected = Series([np.timedelta64(1,'s')]) result = Series(['00:00:01']).apply(to_timedelta) @@ -242,8 +227,6 @@ def test_to_timedelta_via_apply(self): tm.assert_series_equal(result, expected) def test_timedelta_ops(self): - _skip_if_not_numpy17_friendly() - # GH4984 # make sure ops return timedeltas s = Series([Timestamp('20130101') + timedelta(seconds=i*i) for i in range(10) ]) @@ -271,8 +254,6 @@ def test_timedelta_ops(self): tm.assert_almost_equal(result, expected) def test_timedelta_ops_scalar(self): - _skip_if_not_numpy17_friendly() - # GH 6808 base = pd.to_datetime('20130101 09:01:12.123456') expected_add = pd.to_datetime('20130101 09:01:22.123456') @@ -305,8 +286,6 @@ def test_timedelta_ops_scalar(self): self.assertEqual(result, expected_sub) def test_to_timedelta_on_missing_values(self): - _skip_if_not_numpy17_friendly() - # GH5438 timedelta_NaT = np.timedelta64('NaT') @@ -324,8 +303,6 @@ def test_to_timedelta_on_missing_values(self): self.assertEqual(actual.astype('int64'), timedelta_NaT.astype('int64')) def test_timedelta_ops_with_missing_values(self): - _skip_if_not_numpy17_friendly() - # setup s1 = pd.to_timedelta(Series(['00:00:01'])) s2 = pd.to_timedelta(Series(['00:00:02'])) @@ -403,8 +380,6 @@ def test_timedelta_ops_with_missing_values(self): assert_frame_equal(actual, dfn) def test_apply_to_timedelta(self): - _skip_if_not_numpy17_friendly() - timedelta_NaT = pd.to_timedelta('NaT') list_of_valid_strings = ['00:00:01', '00:00:02'] diff --git a/pandas/tseries/tests/test_timeseries.py b/pandas/tseries/tests/test_timeseries.py index f94910d9dec89..3da97074a93fd 100644 --- a/pandas/tseries/tests/test_timeseries.py +++ b/pandas/tseries/tests/test_timeseries.py @@ -34,7 +34,7 @@ import pandas.compat as compat import pandas.core.common as com from pandas import concat -from pandas import _np_version_under1p7, _np_version_under1p8 +from pandas import _np_version_under1p8 from numpy.testing.decorators import slow @@ -288,10 +288,7 @@ def test_indexing(self): self.assertRaises(KeyError, df.__getitem__, df.index[2],) def test_recreate_from_data(self): - if _np_version_under1p7: - freqs = ['M', 'Q', 'A', 'D', 'B', 'T', 'S', 'L', 'U', 'H'] - else: - freqs = ['M', 'Q', 'A', 'D', 'B', 'T', 'S', 'L', 'U', 'H', 'N', 'C'] + freqs = ['M', 'Q', 'A', 'D', 'B', 'T', 'S', 'L', 'U', 'H', 'N', 'C'] for f in freqs: org = DatetimeIndex(start='2001/02/01 09:00', freq=f, periods=1) @@ -768,19 +765,6 @@ def test_index_cast_datetime64_other_units(self): self.assertTrue((idx.values == tslib.cast_to_nanoseconds(arr)).all()) - def test_index_astype_datetime64(self): - # valid only under 1.7! - if not _np_version_under1p7: - raise nose.SkipTest("test only valid in numpy < 1.7") - - idx = Index([datetime(2012, 1, 1)], dtype=object) - casted = idx.astype(np.dtype('M8[D]')) - - casted = idx.astype(np.dtype('M8[D]')) - expected = DatetimeIndex(idx.values) - tm.assert_isinstance(casted, DatetimeIndex) - self.assertTrue(casted.equals(expected)) - def test_reindex_series_add_nat(self): rng = date_range('1/1/2000 00:00:00', periods=10, freq='10s') series = Series(rng) @@ -2713,8 +2697,6 @@ def assert_index_parameters(self, index): assert index.inferred_freq == '40960N' def test_ns_index(self): - tm._skip_if_not_numpy17_friendly() - nsamples = 400 ns = int(1e9 / 24414) dtstart = np.datetime64('2012-09-20T00:00:00') @@ -2862,10 +2844,9 @@ def test_datetimeindex_accessors(self): self.assertEqual(sum(dti.is_year_end), 1) # Ensure is_start/end accessors throw ValueError for CustomBusinessDay, CBD requires np >= 1.7 - if not _np_version_under1p7: - bday_egypt = offsets.CustomBusinessDay(weekmask='Sun Mon Tue Wed Thu') - dti = date_range(datetime(2013, 4, 30), periods=5, freq=bday_egypt) - self.assertRaises(ValueError, lambda: dti.is_month_start) + bday_egypt = offsets.CustomBusinessDay(weekmask='Sun Mon Tue Wed Thu') + dti = date_range(datetime(2013, 4, 30), periods=5, freq=bday_egypt) + self.assertRaises(ValueError, lambda: dti.is_month_start) dti = DatetimeIndex(['2000-01-01', '2000-01-02', '2000-01-03']) @@ -3545,18 +3526,7 @@ def test_timestamp_compare_scalars(self): for left, right in ops.items(): left_f = getattr(operator, left) right_f = getattr(operator, right) - - if pd._np_version_under1p7: - # you have to convert to timestamp for this to work with numpy - # scalars - expected = left_f(Timestamp(lhs), rhs) - - # otherwise a TypeError is thrown - if left not in ('eq', 'ne'): - with tm.assertRaises(TypeError): - left_f(lhs, rhs) - else: - expected = left_f(lhs, rhs) + expected = left_f(lhs, rhs) result = right_f(rhs, lhs) self.assertEqual(result, expected) diff --git a/pandas/tseries/tests/test_timezones.py b/pandas/tseries/tests/test_timezones.py index bcfb2357b668d..5635bb75dd9ce 100644 --- a/pandas/tseries/tests/test_timezones.py +++ b/pandas/tseries/tests/test_timezones.py @@ -23,8 +23,6 @@ from pandas.util.testing import assert_frame_equal from pandas.compat import lrange, zip -from pandas import _np_version_under1p7 - try: import pytz @@ -1195,9 +1193,8 @@ def test_tzaware_offset(self): offset = dates + offsets.Hour(5) self.assertTrue(offset.equals(expected)) - if not _np_version_under1p7: - offset = dates + np.timedelta64(5, 'h') - self.assertTrue(offset.equals(expected)) + offset = dates + np.timedelta64(5, 'h') + self.assertTrue(offset.equals(expected)) offset = dates + timedelta(hours=5) self.assertTrue(offset.equals(expected)) @@ -1227,14 +1224,13 @@ def test_nat(self): expected = ['2010-12-01 05:00', '2010-12-02 05:00', NaT] self.assertTrue(idx.equals(DatetimeIndex(expected, tz='US/Pacific'))) - if not _np_version_under1p7: - idx = idx + np.timedelta64(3, 'h') - expected = ['2010-12-01 08:00', '2010-12-02 08:00', NaT] - self.assertTrue(idx.equals(DatetimeIndex(expected, tz='US/Pacific'))) + idx = idx + np.timedelta64(3, 'h') + expected = ['2010-12-01 08:00', '2010-12-02 08:00', NaT] + self.assertTrue(idx.equals(DatetimeIndex(expected, tz='US/Pacific'))) - idx = idx.tz_convert('US/Eastern') - expected = ['2010-12-01 11:00', '2010-12-02 11:00', NaT] - self.assertTrue(idx.equals(DatetimeIndex(expected, tz='US/Eastern'))) + idx = idx.tz_convert('US/Eastern') + expected = ['2010-12-01 11:00', '2010-12-02 11:00', NaT] + self.assertTrue(idx.equals(DatetimeIndex(expected, tz='US/Eastern'))) if __name__ == '__main__': diff --git a/pandas/tseries/tests/test_tslib.py b/pandas/tseries/tests/test_tslib.py index a700a617b0dee..57dc5f4404621 100644 --- a/pandas/tseries/tests/test_tslib.py +++ b/pandas/tseries/tests/test_tslib.py @@ -10,7 +10,6 @@ from pandas.tseries.index import date_range from pandas.tseries.frequencies import get_freq import pandas.tseries.offsets as offsets -from pandas import _np_version_under1p7 import pandas.util.testing as tm from pandas.util.testing import assert_series_equal @@ -140,10 +139,7 @@ def test_constructor_with_stringoffset(self): def test_repr(self): dates = ['2014-03-07', '2014-01-01 09:00', '2014-01-01 00:00:00.000000001'] timezones = ['UTC', 'Asia/Tokyo', 'US/Eastern', 'dateutil/US/Pacific'] - if _np_version_under1p7: - freqs = ['D', 'M', 'S'] - else: - freqs = ['D', 'M', 'S', 'N'] + freqs = ['D', 'M', 'S', 'N'] for date in dates: for tz in timezones: @@ -431,7 +427,6 @@ def test_parsing_timezone_offsets(self): class TestTimestampNsOperations(tm.TestCase): def setUp(self): - tm._skip_if_not_numpy17_friendly() self.timestamp = Timestamp(datetime.datetime.utcnow()) def assert_ns_timedelta(self, modified_timestamp, expected_value): @@ -539,15 +534,6 @@ def test_nat_arithmetic(self): with tm.assertRaises(TypeError): right - left - if _np_version_under1p7: - self.assertEqual(nat + np.timedelta64(1, 'h'), tslib.NaT) - with tm.assertRaises(TypeError): - np.timedelta64(1, 'h') + nat - - self.assertEqual(nat - np.timedelta64(1, 'h'), tslib.NaT) - with tm.assertRaises(TypeError): - np.timedelta64(1, 'h') - nat - class TestTslib(tm.TestCase): @@ -655,10 +641,9 @@ def test_timestamp_and_series(self): timestamp_series = Series(date_range('2014-03-17', periods=2, freq='D', tz='US/Eastern')) first_timestamp = timestamp_series[0] - if not _np_version_under1p7: - delta_series = Series([np.timedelta64(0, 'D'), np.timedelta64(1, 'D')]) - assert_series_equal(timestamp_series - first_timestamp, delta_series) - assert_series_equal(first_timestamp - timestamp_series, -delta_series) + delta_series = Series([np.timedelta64(0, 'D'), np.timedelta64(1, 'D')]) + assert_series_equal(timestamp_series - first_timestamp, delta_series) + assert_series_equal(first_timestamp - timestamp_series, -delta_series) def test_addition_subtraction_types(self): # Assert on the types resulting from Timestamp +/- various date/time objects @@ -676,11 +661,10 @@ def test_addition_subtraction_types(self): self.assertEqual(type(timestamp_instance + timedelta_instance), Timestamp) self.assertEqual(type(timestamp_instance - timedelta_instance), Timestamp) - if not _np_version_under1p7: - # Timestamp +/- datetime64 not supported, so not tested (could possibly assert error raised?) - timedelta64_instance = np.timedelta64(1, 'D') - self.assertEqual(type(timestamp_instance + timedelta64_instance), Timestamp) - self.assertEqual(type(timestamp_instance - timedelta64_instance), Timestamp) + # Timestamp +/- datetime64 not supported, so not tested (could possibly assert error raised?) + timedelta64_instance = np.timedelta64(1, 'D') + self.assertEqual(type(timestamp_instance + timedelta64_instance), Timestamp) + self.assertEqual(type(timestamp_instance - timedelta64_instance), Timestamp) def test_addition_subtraction_preserve_frequency(self): timestamp_instance = date_range('2014-03-05', periods=1, freq='D')[0] @@ -691,10 +675,9 @@ def test_addition_subtraction_preserve_frequency(self): self.assertEqual((timestamp_instance + timedelta_instance).freq, original_freq) self.assertEqual((timestamp_instance - timedelta_instance).freq, original_freq) - if not _np_version_under1p7: - timedelta64_instance = np.timedelta64(1, 'D') - self.assertEqual((timestamp_instance + timedelta64_instance).freq, original_freq) - self.assertEqual((timestamp_instance - timedelta64_instance).freq, original_freq) + timedelta64_instance = np.timedelta64(1, 'D') + self.assertEqual((timestamp_instance + timedelta64_instance).freq, original_freq) + self.assertEqual((timestamp_instance - timedelta64_instance).freq, original_freq) def test_resolution(self): diff --git a/pandas/tseries/timedeltas.py b/pandas/tseries/timedeltas.py index 0d6d74db6f18c..e762ebe9d85cf 100644 --- a/pandas/tseries/timedeltas.py +++ b/pandas/tseries/timedeltas.py @@ -7,9 +7,10 @@ import numpy as np import pandas.tslib as tslib -from pandas import compat, _np_version_under1p7 -from pandas.core.common import (ABCSeries, is_integer, is_integer_dtype, is_timedelta64_dtype, - _values_from_object, is_list_like, isnull, _ensure_object) +from pandas import compat +from pandas.core.common import (ABCSeries, is_integer, is_integer_dtype, + is_timedelta64_dtype, _values_from_object, + is_list_like, isnull, _ensure_object) repr_timedelta = tslib.repr_timedelta64 repr_timedelta64 = tslib.repr_timedelta64 @@ -29,9 +30,6 @@ def to_timedelta(arg, box=True, unit='ns'): ------- ret : timedelta64/arrays of timedelta64 if parsing succeeded """ - if _np_version_under1p7: - raise ValueError("to_timedelta is not support for numpy < 1.7") - unit = _validate_timedelta_unit(unit) def _convert_listlike(arg, box, unit): @@ -187,46 +185,9 @@ def _possibly_cast_to_timedelta(value, coerce=True, dtype=None): sure that we are [ns] (as numpy 1.6.2 is very buggy in this regards, don't force the conversion unless coerce is True - if coerce='compat' force a compatibilty coercerion (to timedeltas) if needeed if dtype is passed then this is the target dtype """ - # coercion compatability - if coerce == 'compat' and _np_version_under1p7: - - def convert(td, dtype): - - # we have an array with a non-object dtype - if hasattr(td,'item'): - td = td.astype(np.int64).item() - if td == tslib.iNaT: - return td - if dtype == 'm8[us]': - td *= 1000 - return td - - if isnull(td) or td == tslib.compat_NaT or td == tslib.iNaT: - return tslib.iNaT - - # convert td value to a nanosecond value - d = td.days - s = td.seconds - us = td.microseconds - - if dtype == 'object' or dtype == 'm8[ns]': - td = 1000*us + (s + d * 24 * 3600) * 10 ** 9 - else: - raise ValueError("invalid conversion of dtype in np < 1.7 [%s]" % dtype) - - return td - - # < 1.7 coercion - if not is_list_like(value): - value = np.array([ value ]) - - dtype = value.dtype - return np.array([ convert(v,dtype) for v in value ], dtype='m8[ns]') - # deal with numpy not being able to handle certain timedelta operations if isinstance(value, (ABCSeries, np.ndarray)): diff --git a/pandas/tslib.pyx b/pandas/tslib.pyx index 7084184b7d423..3bdd422d9fc06 100644 --- a/pandas/tslib.pyx +++ b/pandas/tslib.pyx @@ -42,12 +42,6 @@ from pandas.compat import parse_date, string_types from sys import version_info -# numpy compat -from distutils.version import LooseVersion -_np_version = np.version.short_version -_np_version_under1p6 = LooseVersion(_np_version) < '1.6' -_np_version_under1p7 = LooseVersion(_np_version) < '1.7' - # GH3363 cdef bint PY2 = version_info[0] == 2 @@ -1472,33 +1466,16 @@ cdef inline convert_to_timedelta64(object ts, object unit, object coerce): if util.is_array(ts): ts = ts.astype('int64').item() if unit in ['Y','M','W']: - if _np_version_under1p7: - raise ValueError("unsupported unit for native timedelta under this numpy {0}".format(unit)) - else: - ts = np.timedelta64(ts,unit) + ts = np.timedelta64(ts, unit) else: ts = cast_from_unit(ts, unit) - if _np_version_under1p7: - ts = timedelta(microseconds=ts/1000.0) - else: - ts = np.timedelta64(ts) + ts = np.timedelta64(ts) elif util.is_string_object(ts): if ts in _nat_strings or coerce: return np.timedelta64(iNaT) else: raise ValueError("Invalid type for timedelta scalar: %s" % type(ts)) - if _np_version_under1p7: - if not isinstance(ts, timedelta): - if coerce: - return np.timedelta64(iNaT) - raise ValueError("Invalid type for timedelta scalar: %s" % type(ts)) - if not PY2: - # convert to microseconds in timedelta64 - ts = np.timedelta64(int(ts.total_seconds()*1e9 + ts.microseconds*1000)) - else: - return ts - if isinstance(ts, timedelta): ts = np.timedelta64(ts) elif not isinstance(ts, np.timedelta64): @@ -2124,9 +2101,6 @@ cdef object _get_transitions(object tz): arr = np.hstack([np.array([0], dtype='M8[s]'), # place holder for first item np.array(trans_list, dtype='M8[s]')]).astype('M8[ns]') # all trans listed arr = arr.view('i8') - # scale transitions correctly in numpy 1.6 - if _np_version_under1p7: - arr *= 1000000000 arr[0] = NPY_NAT + 1 elif _is_fixed_offset(tz): arr = np.array([NPY_NAT + 1], dtype=np.int64) diff --git a/pandas/util/testing.py b/pandas/util/testing.py index 42048ec9877fa..c6ddfd20cec7c 100644 --- a/pandas/util/testing.py +++ b/pandas/util/testing.py @@ -41,7 +41,7 @@ from pandas.tseries.index import DatetimeIndex from pandas.tseries.period import PeriodIndex -from pandas import _testing, _np_version_under1p7 +from pandas import _testing from pandas.io.common import urlopen @@ -225,11 +225,6 @@ def setUpClass(cls): cls.setUpClass = setUpClass return cls -def _skip_if_not_numpy17_friendly(): - # not friendly for < 1.7 - if _np_version_under1p7: - import nose - raise nose.SkipTest("numpy >= 1.7 is required") def _skip_if_no_scipy(): try: @@ -351,7 +346,6 @@ def get_locales(prefix=None, normalize=True, # raw_locales is "\n" seperated list of locales # it may contain non-decodable parts, so split # extract what we can and then rejoin. - locales = raw_locales.split(b'\n') raw_locales = [] for x in raw_locales: try: @@ -1231,7 +1225,7 @@ def dec(f): # and conditionally raise on these exception types _network_error_classes = (IOError, httplib.HTTPException) -if sys.version_info[:2] >= (3,3): +if sys.version_info >= (3, 3): _network_error_classes += (TimeoutError,) def can_connect(url, error_classes=_network_error_classes): diff --git a/setup.py b/setup.py index 844f5742c0e69..f93ade98c26cf 100755 --- a/setup.py +++ b/setup.py @@ -35,12 +35,9 @@ _have_setuptools = False setuptools_kwargs = {} -min_numpy_ver = '1.6' +min_numpy_ver = '1.7.0' if sys.version_info[0] >= 3: - if sys.version_info[1] >= 3: # 3.3 needs numpy 1.7+ - min_numpy_ver = "1.7.0b2" - setuptools_kwargs = { 'zip_safe': False, 'install_requires': ['python-dateutil >= 2', @@ -53,7 +50,6 @@ "\n$ pip install distribute") else: - min_numpy_ver = '1.6.1' setuptools_kwargs = { 'install_requires': ['python-dateutil', 'pytz >= 2011k',