Skip to content

Commit 27c78b7

Browse files
authored
Fix tslib issue with new version of pandas (pandas-dev#353)
* Remove unused imports * starting with pandas 0.20.0 tslib is part of _libs * Update code to work with pandas 0.20.+ * Sorting between 0.16 and 0.20 versions requires two different methods * remove deprecated indexer from script * Convert datetime slicing to loc indexing
1 parent 8dced44 commit 27c78b7

File tree

13 files changed

+28
-17
lines changed

13 files changed

+28
-17
lines changed

.travis.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ install:
2525
- pip install cython --upgrade
2626
- pip install pymongo --upgrade
2727
- pip install numpy --upgrade
28-
- pip install pandas==0.19.2
28+
- pip install pandas --upgrade
2929
- pip install decorator --upgrade
3030
- pip install enum34 --upgrade
3131
- pip install lz4==0.8.2

arctic/chunkstore/date_chunker.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,12 @@ def to_chunks(self, df, chunk_size='D', **kwargs):
2929
elif 'date' in df.columns:
3030
dates = pd.DatetimeIndex(df.date)
3131
if not dates.is_monotonic_increasing:
32-
df = df.sort(columns='date')
32+
# providing support for pandas 0.16.2 to 0.20.x
33+
# neither sort method exists in both
34+
try:
35+
df = df.sort_values('date')
36+
except AttributeError:
37+
df = df.sort(columns='date')
3338
dates = pd.DatetimeIndex(df.date)
3439
else:
3540
raise Exception("Data must be datetime indexed or have a column named 'date'")

arctic/multi_index.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import logging
66
import types
77

8-
from pandas.tseries.tools import to_datetime as dt
8+
from pandas import to_datetime as dt
99

1010
import numpy as np
1111
import pandas as pd

arctic/scripts/arctic_copy_data.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,8 @@ def _copy_symbol(symbols):
4747
# No timezone on the original, should we even allow this?
4848
preserve_start = preserve_start.replace(tzinfo=None)
4949
preserve_end = preserve_end.replace(tzinfo=None)
50-
before = original_data.ix[:preserve_start]
51-
after = original_data.ix[preserve_end:]
50+
before = original_data.loc[:preserve_start]
51+
after = original_data[preserve_end:]
5252
new_data = before.append(new_data).append(after)
5353

5454
mt.write(symbol, new_data, metadata=version.metadata)

arctic/serialization/numpy_records.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,10 @@
22
import numpy as np
33

44
from pandas import DataFrame, MultiIndex, Series, DatetimeIndex
5-
from pandas.tslib import Timestamp, get_timezone
5+
try:
6+
from pandas._libs.tslib import Timestamp, get_timezone
7+
except ImportError:
8+
from pandas.tslib import Timestamp, get_timezone
69

710

811
log = logging.getLogger(__name__)

arctic/store/_pandas_ndarray_store.py

-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,6 @@
33

44
from bson.binary import Binary
55
from pandas import DataFrame, Series, Panel
6-
from pandas.tslib import Timestamp, get_timezone
7-
86
import numpy as np
97

108
from arctic.serialization.numpy_records import SeriesSerializer, DataFrameSerializer

arctic/tickstore/tickstore.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -343,7 +343,7 @@ def read(self, symbol, date_range=None, columns=None, include_images=False, allo
343343
rtn = rtn.sort_index(kind='mergesort')
344344
if date_range:
345345
# FIXME: support DateRange.interval...
346-
rtn = rtn.ix[date_range.start:date_range.end]
346+
rtn = rtn.loc[date_range.start:date_range.end]
347347
return rtn
348348

349349
def _pad_and_fix_dtypes(self, cols, column_dtypes):

setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ def run_tests(self):
9696
"enum34",
9797
"lz4<=0.8.2",
9898
"mockextras",
99-
"pandas<=0.19.2",
99+
"pandas",
100100
"pymongo>=3.0",
101101
"python-dateutil",
102102
"pytz",

tests/integration/chunkstore/test_chunkstore.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -1354,7 +1354,11 @@ def test_unsorted_date_col(chunkstore_lib):
13541354
'vals': range(2)})
13551355

13561356
chunkstore_lib.write('test_symbol', df)
1357-
assert_frame_equal(df.sort(columns='date').reset_index(drop=True), chunkstore_lib.read('test_symbol'))
1357+
try:
1358+
df = df.sort_values('date')
1359+
except AttributeError:
1360+
df = df.sort(columns='date')
1361+
assert_frame_equal(df.reset_index(drop=True), chunkstore_lib.read('test_symbol'))
13581362
chunkstore_lib.update('test_symbol', df2)
13591363
assert_frame_equal(chunkstore_lib.read('test_symbol'),
13601364
pd.DataFrame({'date': pd.date_range('2016-8-31',

tests/integration/store/test_pandas_store.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -525,7 +525,7 @@ def panel(i1, i2, i3):
525525
list(rrule(DAILY, count=i3, dtstart=dt(1970, 1, 1), interval=1)))
526526

527527

528-
@pytest.mark.skipif(pd.__version__ >= '0.18.0', reason="issue #115")
528+
@pytest.mark.xfail(pd.__version__ >= '0.18.0', reason="see issue #115")
529529
@pytest.mark.parametrize("df_size", list(itertools.combinations_with_replacement([1, 2, 4], r=3)))
530530
def test_panel_save_read(library, df_size):
531531
'''Note - empties are not tested here as they don't work!'''
@@ -540,6 +540,7 @@ def test_panel_save_read(library, df_size):
540540
str(pn.axes[i].names) + "!=" + str(pn.axes[i].names)
541541

542542

543+
@pytest.mark.xfail(pd.__version__ >= '0.20.0', reason='Panel is deprecated')
543544
def test_panel_save_read_with_nans(library):
544545
'''Ensure that nan rows are not dropped when calling to_frame.'''
545546
df1 = DataFrame(data=np.arange(4).reshape((2, 2)), index=['r1', 'r2'], columns=['c1', 'c2'])
@@ -747,15 +748,15 @@ def test_daterange_append(library):
747748
saved_arr = library.read('MYARR').data
748749
assert_frame_equal(df, saved_arr, check_names=False)
749750
# append two more rows
750-
rows = df.ix[-2:].copy()
751+
rows = df.iloc[-2:].copy()
751752
rows.index = rows.index + dtd(days=1)
752753
library.append('MYARR', rows)
753754
# assert we can rows back out
754755
assert_frame_equal(rows, library.read('MYARR', date_range=DateRange(rows.index[0])).data)
755756
# assert we can read back the first array
756757
assert_frame_equal(df, library.read('MYARR', date_range=DateRange(df.index[0], df.index[-1])).data)
757758
# append two more rows
758-
rows1 = df.ix[-2:].copy()
759+
rows1 = df.iloc[-2:].copy()
759760
rows1.index = rows1.index + dtd(days=2)
760761
library.append('MYARR', rows1)
761762
# assert we can read a mix of data

tests/integration/tickstore/test_ts_read.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from numpy.testing.utils import assert_array_equal
55
from pandas.util.testing import assert_frame_equal
66
import pandas as pd
7-
from pandas.tseries.index import DatetimeIndex
7+
from pandas import DatetimeIndex
88
from pymongo import ReadPreference
99
import pytest
1010
import pytz

tests/unit/test_multi_index.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from datetime import timedelta
22
import itertools
33

4-
from pandas.tseries.tools import to_datetime as dt
4+
from pandas import to_datetime as dt
55
from pandas.util.testing import assert_frame_equal
66

77
from arctic.multi_index import groupby_asof, fancy_group_by, insert_at

tests/unit/tickstore/test_toplevel.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ def test_slice_pandas_dataframe(start, end, expected_start_index, expected_end_i
118118
top_level_tick_store = TopLevelTickStore(Mock())
119119
dates = pd.date_range('20100101', periods=5, freq='2D')
120120
data = pd.DataFrame(np.random.randn(5, 4), index=dates, columns=list('ABCD'))
121-
expected = data.ix[expected_start_index:expected_end_index]
121+
expected = data.iloc[expected_start_index:expected_end_index]
122122
result = top_level_tick_store._slice(data, start, end)
123123
assert_frame_equal(expected, result), '{}\n{}'.format(expected, result)
124124

0 commit comments

Comments
 (0)