Skip to content

Commit 7e14828

Browse files
committed
Merge pull request pandas-dev#97 from manahl/top_level_read_fix
Top level read fix for pandas-dev#95
2 parents db7adb8 + c115ee9 commit 7e14828

File tree

2 files changed

+30
-8
lines changed

2 files changed

+30
-8
lines changed

arctic/tickstore/toplevel.py

+10-2
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,16 @@ def add(self, date_range, library_name):
100100

101101
def read(self, symbol, date_range, columns=['BID', 'ASK', 'TRDPRC_1', 'BIDSIZE', 'ASKSIZE', 'TRDVOL_1'], include_images=False):
102102
libraries = self._get_libraries(date_range)
103-
dfs = [l.library.read(symbol, l.date_range.intersection(date_range), columns,
104-
include_images=include_images) for l in libraries]
103+
dfs = []
104+
for l in libraries:
105+
try:
106+
df = l.library.read(symbol, l.date_range.intersection(date_range), columns,
107+
include_images=include_images)
108+
dfs.append(df)
109+
except NoDataFoundException as e:
110+
continue
111+
if len(dfs) == 0:
112+
raise NoDataFoundException("No Data found for {} in range: {}".format(symbol, date_range))
105113
return pd.concat(dfs)
106114

107115
def write(self, symbol, data):

tests/integration/tickstore/test_toplevel.py

+20-6
Original file line numberDiff line numberDiff line change
@@ -71,12 +71,8 @@ def test_should_return_data_when_date_range_spans_libraries(toplevel_tickstore,
7171
arctic.initialize_library('FEED_2011.LEVEL1', tickstore.TICK_STORE_TYPE)
7272
tickstore_2010 = arctic['FEED_2010.LEVEL1']
7373
tickstore_2011 = arctic['FEED_2011.LEVEL1']
74-
toplevel_tickstore._collection.insert_one({'start': dt(2010, 1, 1),
75-
'end': dt(2010, 12, 31, 23, 59, 59),
76-
'library_name': 'FEED_2010.LEVEL1'})
77-
toplevel_tickstore._collection.insert_one({'start': dt(2011, 1, 1),
78-
'end': dt(2011, 12, 31, 23, 59, 59),
79-
'library_name': 'FEED_2011.LEVEL1'})
74+
toplevel_tickstore.add(DateRange(start=dt(2010, 1, 1), end=dt(2010, 12, 31, 23, 59, 59, 999000)), 'FEED_2010.LEVEL1')
75+
toplevel_tickstore.add(DateRange(start=dt(2011, 1, 1), end=dt(2011, 12, 31, 23, 59, 59, 999000)), 'FEED_2011.LEVEL1')
8076
dates = pd.date_range('20100101', periods=6, tz=mktz('Europe/London'))
8177
df_10 = pd.DataFrame(np.random.randn(6, 4), index=dates, columns=list('ABCD'))
8278
tickstore_2010.write('blah', df_10)
@@ -88,6 +84,24 @@ def test_should_return_data_when_date_range_spans_libraries(toplevel_tickstore,
8884
assert_frame_equal(expected_df, res.tz_convert(mktz('Europe/London')))
8985

9086

87+
def test_should_return_data_when_date_range_spans_libraries_even_if_one_returns_nothing(toplevel_tickstore, arctic):
88+
arctic.initialize_library('FEED_2010.LEVEL1', tickstore.TICK_STORE_TYPE)
89+
arctic.initialize_library('FEED_2011.LEVEL1', tickstore.TICK_STORE_TYPE)
90+
tickstore_2010 = arctic['FEED_2010.LEVEL1']
91+
tickstore_2011 = arctic['FEED_2011.LEVEL1']
92+
toplevel_tickstore.add(DateRange(start=dt(2010, 1, 1), end=dt(2010, 12, 31, 23, 59, 59, 999000)), 'FEED_2010.LEVEL1')
93+
toplevel_tickstore.add(DateRange(start=dt(2011, 1, 1), end=dt(2011, 12, 31, 23, 59, 59, 999000)), 'FEED_2011.LEVEL1')
94+
dates = pd.date_range('20100101', periods=6, tz=mktz('Europe/London'))
95+
df_10 = pd.DataFrame(np.random.randn(6, 4), index=dates, columns=list('ABCD'))
96+
tickstore_2010.write('blah', df_10)
97+
dates = pd.date_range('20110201', periods=6, tz=mktz('Europe/London'))
98+
df_11 = pd.DataFrame(np.random.randn(6, 4), index=dates, columns=list('ABCD'))
99+
tickstore_2011.write('blah', df_11)
100+
res = toplevel_tickstore.read('blah', DateRange(start=dt(2010, 1, 2), end=dt(2011, 1, 4)), list('ABCD'))
101+
expected_df = df_10[1:]
102+
assert_frame_equal(expected_df, res.tz_convert(mktz('Europe/London')))
103+
104+
91105
def test_should_add_underlying_library_where_none_exists(toplevel_tickstore, arctic):
92106
arctic.initialize_library('FEED_2010.LEVEL1', tickstore.TICK_STORE_TYPE)
93107
toplevel_tickstore.add(DateRange(start=dt(2010, 1, 1), end=dt(2010, 12, 31, 23, 59, 59, 999000)), 'FEED_2010.LEVEL1')

0 commit comments

Comments
 (0)